_recursive.py 42 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073
  1. # mypy: allow-untyped-defs
  2. import collections
  3. import functools
  4. import inspect
  5. import sys
  6. import textwrap
  7. import types
  8. import warnings
  9. from typing import Dict, List, Set, Type
  10. import torch
  11. import torch._jit_internal as _jit_internal
  12. from torch._sources import fake_range
  13. from torch.jit._builtins import _find_builtin
  14. from torch.jit._check import AttributeTypeIsSupportedChecker
  15. from torch.jit._state import _add_script_class, _get_script_class, _python_cu
  16. from torch.jit.frontend import (
  17. get_class_properties,
  18. get_default_args,
  19. get_jit_class_def,
  20. get_jit_def,
  21. )
  22. from torch.nn import Module
  23. ScriptMethodStub = collections.namedtuple(
  24. "ScriptMethodStub", ("resolution_callback", "def_", "original_method")
  25. )
  26. PropertyStub = collections.namedtuple("PropertyStub", ("resolution_callback", "def_"))
  27. # TODO: there should be a more principled way of doing this.
  28. ignored_attributes = [
  29. "_version",
  30. "_parameters",
  31. "_buffers",
  32. "_non_persistent_buffers_set",
  33. "_backward_hooks",
  34. "_backward_pre_hooks",
  35. "_forward_hooks",
  36. "_forward_hooks_with_kwargs",
  37. "_forward_pre_hooks",
  38. "_forward_pre_hooks_with_kwargs",
  39. "_forward_hooks_always_called",
  40. "_state_dict_hooks",
  41. "_state_dict_pre_hooks",
  42. "_load_state_dict_pre_hooks",
  43. "_load_state_dict_post_hooks",
  44. "_modules",
  45. "_initializing",
  46. "dump_patches",
  47. ]
  48. def _compile_and_register_class(obj, rcb, qualified_name):
  49. script_class = _get_script_class(obj)
  50. if not script_class:
  51. ast = get_jit_class_def(obj, obj.__name__)
  52. defaults = torch.jit.frontend.get_default_args_for_class(obj)
  53. script_class = torch._C._jit_script_class_compile(
  54. qualified_name, ast, defaults, rcb
  55. )
  56. _add_script_class(obj, script_class)
  57. return script_class
  58. def make_stub(func, name):
  59. rcb = _jit_internal.createResolutionCallbackFromClosure(func)
  60. ast = get_jit_def(func, name, self_name="RecursiveScriptModule")
  61. return ScriptMethodStub(rcb, ast, func)
  62. def make_stub_from_method(nn_module, method_name):
  63. func = getattr(nn_module, method_name)
  64. if isinstance(func, ScriptMethodStub):
  65. return func
  66. # Make sure the name present in the resulting AST will match the name
  67. # requested here. The only time they don't match is if you do something
  68. # like:
  69. # def _forward(self):
  70. # pass
  71. # forward = _forward
  72. # In this case, the actual function object will have the name `_forward`,
  73. # even though we requested a stub for `forward`.
  74. return make_stub(func, method_name)
  75. def make_stubs_from_exported_methods(mod):
  76. stubs = []
  77. for name in dir(mod):
  78. item = getattr(mod, name, None)
  79. if (
  80. _jit_internal.get_torchscript_modifier(item)
  81. is _jit_internal.FunctionModifiers.EXPORT
  82. ):
  83. stubs.append(make_stub_from_method(mod, name))
  84. return stubs
  85. def jit_ignored_properties(module):
  86. user_annotated_ignored_attributes = getattr(
  87. module, "__jit_ignored_attributes__", list()
  88. )
  89. def get_properties_names(module):
  90. return {k for k, v in vars(module).items() if isinstance(v, property)}
  91. properties = get_properties_names(type(module))
  92. user_annoted_ignored_properties = set()
  93. for ignored_attr in user_annotated_ignored_attributes:
  94. if ignored_attr in properties:
  95. user_annoted_ignored_properties.add(ignored_attr)
  96. return user_annoted_ignored_properties
  97. # base types that can be constants
  98. # in addition, tuples and lists of these base types are also considered constants
  99. # If you edit this list, then you also need to edit the handlers in
  100. # ConstantValue in jit/script/init.cpp
  101. _constant_types = (
  102. bool,
  103. float,
  104. int,
  105. str,
  106. type(None),
  107. torch.device,
  108. torch.layout,
  109. torch.dtype,
  110. )
  111. def _get_valid_constant(attr, v, owner_type):
  112. if isinstance(v, _constant_types):
  113. return v
  114. elif isinstance(v, (tuple, list)):
  115. return tuple(_get_valid_constant(attr, x, owner_type) for x in v)
  116. constants = ", ".join(torch.typename(typ) for typ in _constant_types)
  117. raise TypeError(
  118. textwrap.dedent(
  119. f"""
  120. '{torch.typename(type(v))}' object in attribute '{owner_type}.{attr}' is not a valid constant.
  121. Valid constants are:
  122. 1. a nn.ModuleList
  123. 2. a value of type {{{constants}}}
  124. 3. a list or tuple of (2)
  125. """
  126. )
  127. )
  128. class SourceContext(torch._C._jit_tree_views.SourceRangeFactory):
  129. def __init__(self, source, filename, file_lineno, leading_whitespace_len):
  130. super().__init__(source, filename, file_lineno, leading_whitespace_len)
  131. def get_annotations(obj):
  132. if sys.version_info < (3, 10):
  133. return getattr(obj, "__annotations__", {})
  134. # In Python-3.10+ it is recommended to use inspect.get_annotations
  135. # See https://docs.python.org/3.10/howto/annotations.html
  136. # But also, in 3.10 annotations from base class are not inherited
  137. # by unannotated derived one, so they must be manually extracted
  138. annotations = inspect.get_annotations(obj)
  139. if annotations:
  140. return annotations
  141. def get_cls_annotations(cls):
  142. cls_annotations = inspect.get_annotations(cls)
  143. if cls_annotations:
  144. return cls_annotations
  145. for base in cls.__bases__:
  146. cls_annotations = get_cls_annotations(base)
  147. if cls_annotations:
  148. return cls_annotations
  149. return {}
  150. cls = obj if isinstance(obj, type) else type(obj)
  151. return get_cls_annotations(cls)
  152. def infer_concrete_type_builder(nn_module, share_types=True):
  153. """
  154. Build a ConcreteModuleTypeBuilder from an nn.Module.
  155. This ConcreteModuleType doesn't have a JIT type associated with it yet, it
  156. must be filled in by the caller.
  157. """
  158. concrete_type_builder = torch._C.ConcreteModuleTypeBuilder(type(nn_module))
  159. if isinstance(nn_module, (torch.nn.ModuleDict)):
  160. concrete_type_builder.set_module_dict()
  161. if isinstance(nn_module, (torch.nn.ModuleList, torch.nn.Sequential)):
  162. concrete_type_builder.set_module_list()
  163. if isinstance(nn_module, (torch.nn.ParameterList)):
  164. concrete_type_builder.set_parameter_list()
  165. if isinstance(nn_module, (torch.nn.ParameterDict)):
  166. concrete_type_builder.set_parameter_dict()
  167. class_annotations = get_annotations(nn_module)
  168. if isinstance(nn_module, (torch.ao.quantization.QuantWrapper)):
  169. class_annotations = {}
  170. # Get user-annotated ignored attributes.
  171. user_annotated_ignored_attributes = getattr(
  172. nn_module, "__jit_ignored_attributes__", list()
  173. )
  174. concrete_type_builder.add_ignored_attributes(user_annotated_ignored_attributes)
  175. ignored_properties = jit_ignored_properties(nn_module)
  176. # try to infer the type from type annotation or from the object itself
  177. def infer_type(name, item):
  178. # The forward function from Module is special; never use this annotations; we
  179. # need to infer type directly using JIT. I originally wanted to write
  180. # this test as isinstance(class_annotations[name], Callable) but
  181. # isinstance on typing things doesn't seem to work: isinstance(list, Callable)
  182. # is also true!
  183. inferred = False
  184. try:
  185. if (
  186. name in class_annotations
  187. and class_annotations[name]
  188. != torch.nn.Module.__annotations__["forward"]
  189. ):
  190. ann_to_type = torch.jit.annotations.ann_to_type(
  191. class_annotations[name], fake_range()
  192. )
  193. attr_type = torch._C.InferredType(ann_to_type)
  194. elif isinstance(item, torch.jit.Attribute):
  195. ann_to_type = torch.jit.annotations.ann_to_type(item.type, fake_range())
  196. attr_type = torch._C.InferredType(ann_to_type)
  197. else:
  198. attr_type = torch._C._jit_try_infer_type(item)
  199. inferred = True
  200. except RuntimeError as re:
  201. raise RuntimeError(f"Error inferring type for {name}: {item}: {re}") from re
  202. return attr_type, inferred
  203. added_names = set()
  204. for name, item in nn_module._parameters.items():
  205. if name in user_annotated_ignored_attributes:
  206. continue
  207. assert item is None or isinstance(item, torch.Tensor)
  208. attr_type, _ = infer_type(name, item)
  209. # We currently have the invariant in various places in our code
  210. # that parameters must be Tensors. However, the nn.Module API also
  211. # allows NoneType parameters. These parameters are not returned as
  212. # part of `parameters()` and its variants, but are available
  213. # through direct attribute access.
  214. concrete_type_builder.add_attribute(name, attr_type.type(), True, False)
  215. added_names.add(name)
  216. for name, item in nn_module._buffers.items():
  217. if name in user_annotated_ignored_attributes:
  218. continue
  219. assert item is None or isinstance(item, torch.Tensor)
  220. attr_type, _ = infer_type(name, item)
  221. concrete_type_builder.add_attribute(name, attr_type.type(), False, True)
  222. added_names.add(name)
  223. for name, item in nn_module._modules.items():
  224. if name in user_annotated_ignored_attributes:
  225. continue
  226. attr_type, _ = infer_type(name, item)
  227. if item is None:
  228. # Modules can be None. We don't have direct support for optional
  229. # Modules, so the register it as an NoneType attribute instead.
  230. concrete_type_builder.add_attribute(name, attr_type.type(), False, False)
  231. continue
  232. if attr_type.success():
  233. assert attr_type.type().is_interface_type()
  234. # if the type can be inferred, it should be a module interface type
  235. sub_concrete_type = torch._C.ConcreteModuleType.from_jit_type(
  236. attr_type.type()
  237. )
  238. else:
  239. # otherwise we get the concrete module type for item and add it to concrete_type
  240. sub_concrete_type = get_module_concrete_type(item, share_types)
  241. concrete_type_builder.add_module(name, sub_concrete_type)
  242. added_names.add(name)
  243. # populate constants_set
  244. constants_set = set(getattr(nn_module, "__constants__", ()))
  245. # Constants annotated via `Final[T]` rather than being added to `__constants__`
  246. for name, ann in class_annotations.items():
  247. if torch._jit_internal.is_final(ann):
  248. constants_set.add(name)
  249. for name in constants_set:
  250. if name in added_names:
  251. # TODO: We should really error in this case, but its bc-breaking so
  252. # we need to warn for at least one release
  253. if name in nn_module._modules:
  254. hint = "submodule"
  255. elif name in nn_module._buffers:
  256. hint = "buffer"
  257. elif name in nn_module._parameters:
  258. hint = "parameter"
  259. else:
  260. raise AssertionError(
  261. "added_names must be submodule, parameter, or buffer"
  262. )
  263. warnings.warn(
  264. f"'{name}' was found in ScriptModule constants, "
  265. f" but it is a non-constant {hint}. Consider removing it."
  266. )
  267. continue
  268. if not hasattr(nn_module, name):
  269. # TODO: We should really error in this case, but its bc-breaking so
  270. # we need to warn for at least one release
  271. warnings.warn(
  272. f"'{name}' was found in ScriptModule constants, "
  273. "but was not actually set in __init__. "
  274. "Consider removing it."
  275. )
  276. continue
  277. value = getattr(nn_module, name)
  278. concrete_type_builder.add_constant(
  279. name, _get_valid_constant(name, value, type(nn_module).__name__)
  280. )
  281. added_names.add(name)
  282. # populate overloads
  283. overloads = getattr(nn_module, "__overloads__", {})
  284. # update with any annotated overloads
  285. overloads.update(
  286. get_overload_name_mapping(
  287. get_overload_annotations(nn_module, ignored_properties)
  288. )
  289. )
  290. for name, overloaded_names in overloads.items():
  291. concrete_type_builder.add_overload(name, overloaded_names)
  292. for name, value in nn_module.__dict__.items():
  293. if name in ignored_attributes or name.startswith("__"):
  294. # Python objects have lots of random attributes attached to them;
  295. # PyTorch adds a few more. Prevent these from getting compiled.
  296. continue
  297. if name in user_annotated_ignored_attributes:
  298. continue
  299. if name in added_names:
  300. # Don't re-add anything we already added
  301. continue
  302. isoverloadpacket = isinstance(value, torch._ops.OpOverloadPacket)
  303. if isoverloadpacket:
  304. value = value.op
  305. # Handle Python function attributes
  306. if inspect.isfunction(value):
  307. try:
  308. scripted_fn = torch.jit.script(value)
  309. concrete_type_builder.add_function_attribute(
  310. name, torch._C._jit_try_infer_type(scripted_fn).type(), value
  311. )
  312. except Exception as e:
  313. # If we fail to script the function, it isn't a hard error.
  314. # Instead, we will add it to the list of attributes we failed
  315. # to convert, with the compilation error.
  316. hint = (
  317. "(This function exists as an attribute on the Python module, "
  318. "but we failed to compile it to a TorchScript function. "
  319. f"\nThe error stack is reproduced here:\n{e}"
  320. )
  321. concrete_type_builder.add_failed_attribute(name, hint)
  322. pass
  323. continue
  324. # Handle calls to builtin functions (either bespoke builtins from torch.jit._builtins or
  325. # a call to an aten function like torch.add)
  326. builtin_symbol_name = _find_builtin(value)
  327. if builtin_symbol_name:
  328. concrete_type_builder.add_builtin_function(name, builtin_symbol_name)
  329. continue
  330. # Handle Script function attributes
  331. if isinstance(value, torch.jit.ScriptFunction):
  332. concrete_type_builder.add_function_attribute(
  333. name, torch._C._jit_try_infer_type(value).type(), value
  334. )
  335. continue
  336. # If we got here, this is a regular "data" attribute, add it to the concrete type
  337. attr_type, inferred = infer_type(name, value)
  338. if attr_type.success():
  339. concrete_type_builder.add_attribute(name, attr_type.type(), False, False)
  340. else:
  341. # TODO: could add more detail here. For example, what the user should do
  342. # when the pytype is `list` or `NoneType`
  343. inferred_msg = (
  344. "Its type was inferred; try adding a type annotation for the attribute."
  345. if inferred
  346. else ""
  347. )
  348. additional_info = f"{attr_type.reason()}. {inferred_msg}"
  349. hint = (
  350. "(This attribute exists on the Python module, "
  351. f"but we failed to convert Python type: '{torch.typename(type(value))}' "
  352. f"to a TorchScript type. {additional_info})"
  353. )
  354. concrete_type_builder.add_failed_attribute(name, hint)
  355. # add hooks to concrete type
  356. for hook in nn_module._forward_hooks.values():
  357. concrete_type_builder.add_forward_hook(hook)
  358. for pre_hook in nn_module._forward_pre_hooks.values():
  359. concrete_type_builder.add_forward_pre_hook(pre_hook)
  360. return concrete_type_builder
  361. class ConcreteTypeStore:
  362. type_store: Dict[Type[Module], List[torch._C.ConcreteModuleType]]
  363. methods_compiled: Set[torch._C.ConcreteModuleType]
  364. def __init__(self):
  365. # Python module type => List[ConcreteModuleType)]
  366. self.type_store = {}
  367. # ConcreteTypes that have had their methods already compiled
  368. self.methods_compiled = set()
  369. def get_or_create_concrete_type(self, nn_module):
  370. """Infer a ConcreteType from this `nn.Module` instance. Underlying JIT types are re-used if possible."""
  371. concrete_type_builder = infer_concrete_type_builder(nn_module)
  372. nn_module_type = type(nn_module)
  373. if nn_module_type not in self.type_store:
  374. self.type_store[nn_module_type] = []
  375. # Search the type store for an already-available JIT type
  376. known_types = self.type_store[nn_module_type]
  377. for known_type in known_types:
  378. if known_type.equals(concrete_type_builder):
  379. return known_type
  380. # We didn't find anything; generate a new JIT type from this concrete type
  381. concrete_type = concrete_type_builder.build()
  382. self.type_store[nn_module_type].append(concrete_type)
  383. return concrete_type
  384. concrete_type_store = ConcreteTypeStore()
  385. def create_methods_and_properties_from_stubs(
  386. concrete_type, method_stubs, property_stubs
  387. ):
  388. method_defs = [m.def_ for m in method_stubs]
  389. method_rcbs = [m.resolution_callback for m in method_stubs]
  390. method_defaults = [get_default_args(m.original_method) for m in method_stubs]
  391. property_defs = [p.def_ for p in property_stubs]
  392. property_rcbs = [p.resolution_callback for p in property_stubs]
  393. concrete_type._create_methods_and_properties(
  394. property_defs, property_rcbs, method_defs, method_rcbs, method_defaults
  395. )
  396. def create_hooks_from_stubs(concrete_type, hook_stubs, pre_hook_stubs):
  397. hook_defs = [h.def_ for h in hook_stubs]
  398. hook_rcbs = [h.resolution_callback for h in hook_stubs]
  399. pre_hook_defs = [h.def_ for h in pre_hook_stubs]
  400. pre_hook_rcbs = [h.resolution_callback for h in pre_hook_stubs]
  401. concrete_type._create_hooks(hook_defs, hook_rcbs, pre_hook_defs, pre_hook_rcbs)
  402. def get_module_concrete_type(nn_module, share_types=True):
  403. """
  404. Get a concrete type for nn_modules.
  405. If share_types is True, the concrete type is fetched from concrete_type_store.
  406. If it is False, a new concrete type is created without first searching concrete_type_store.
  407. Args:
  408. nn_module: The original Python nn.Module that we are creating a ScriptModule for.
  409. share_types = Whether to share underlying JIT types between modules (if possible).
  410. Returns:
  411. A concrete type for nn_module.
  412. """
  413. assert isinstance(nn_module, Module)
  414. if isinstance(nn_module, torch.jit.ScriptModule) and hasattr(
  415. nn_module, "_concrete_type"
  416. ):
  417. return nn_module._concrete_type
  418. if share_types:
  419. # Look into the store of cached JIT types
  420. concrete_type = concrete_type_store.get_or_create_concrete_type(nn_module)
  421. else:
  422. # Get a concrete type directly, without trying to re-use an existing JIT
  423. # type from the type store.
  424. concrete_type_builder = infer_concrete_type_builder(nn_module, share_types)
  425. concrete_type_builder.set_poisoned()
  426. concrete_type = concrete_type_builder.build()
  427. return concrete_type
  428. def create_script_class(obj):
  429. """
  430. Create and return a RecursiveScriptClass instance from a Python object.
  431. Arguments:
  432. obj: A Python object.
  433. """
  434. qualified_class_name = _jit_internal._qualified_name(type(obj))
  435. rcb = _jit_internal.createResolutionCallbackForClassMethods(type(obj))
  436. # Script the type of obj if it hasn't already been scripted.
  437. _compile_and_register_class(type(obj), rcb, qualified_class_name)
  438. class_ty = _python_cu.get_class(qualified_class_name)
  439. # Create an empty torch._C.ScriptObject with the scripted type.
  440. cpp_object = torch._C._create_object_with_type(class_ty)
  441. # Copy all of the attributes over to the torch._C.ScriptObject.
  442. for name, value in obj.__dict__.items():
  443. cpp_object.setattr(name, value)
  444. # Wrap the torch._C.ScriptObject in a RecursiveScriptClass instance.
  445. return wrap_cpp_class(cpp_object)
  446. def create_script_module(nn_module, stubs_fn, share_types=True, is_tracing=False):
  447. """
  448. Create a new ScriptModule from an nn.Module.
  449. Args:
  450. nn_module: The original Python nn.Module that we are creating a ScriptModule for.
  451. stubs_fn: Lambda that takes an nn.Module and generates a list of ScriptMethodStubs to compile.
  452. share_types: Whether to share underlying JIT types between modules (if possible).
  453. NOTE: Only set to False this when we cannot guarantee type sharing will work
  454. correctly. This only happens today for traced modules, where the same
  455. module can produce different traced methods depending on the inputs.
  456. is_tracing: Whether this function is called during tracing or scripting. If tracing,
  457. we don't need to do AttributeTypeIsSupportedChecker because all the unsupported
  458. attributes will be baked as constant in the tracing graph. In addition,
  459. this check significantly slows down the traced modules when the module size is big.
  460. """
  461. assert not isinstance(nn_module, torch.jit.RecursiveScriptModule)
  462. check_module_initialized(nn_module)
  463. concrete_type = get_module_concrete_type(nn_module, share_types)
  464. if not is_tracing:
  465. AttributeTypeIsSupportedChecker().check(nn_module)
  466. return create_script_module_impl(nn_module, concrete_type, stubs_fn)
  467. def create_script_module_impl(nn_module, concrete_type, stubs_fn):
  468. """
  469. Convert an nn.Module to a RecursiveScriptModule.
  470. Args:
  471. nn_module: The original Python nn.Module that we are creating a ScriptModule for.
  472. concrete_type: The fully initialized ConcreteType of the module.
  473. stubs_fn: Lambda that takes an nn.Module and generates a list of ScriptMethodStubs to compile.
  474. """
  475. cpp_module = torch._C._create_module_with_type(concrete_type.jit_type)
  476. method_stubs = stubs_fn(nn_module)
  477. property_stubs = get_property_stubs(nn_module)
  478. hook_stubs, pre_hook_stubs = get_hook_stubs(nn_module)
  479. user_annotated_ignored_attributes = getattr(
  480. nn_module, "__jit_ignored_attributes__", list()
  481. )
  482. ignored_properties = jit_ignored_properties(nn_module)
  483. def init_fn(script_module):
  484. # Initialize the ScriptModule:
  485. # 1. Copy the attributes/parameters/buffers from the original `nn_module` to the new ScriptModule.
  486. for name in concrete_type.get_attributes().keys():
  487. orig_value = getattr(nn_module, name)
  488. orig_value = (
  489. orig_value.value
  490. if isinstance(orig_value, torch.jit.Attribute)
  491. else orig_value
  492. )
  493. cpp_module.setattr(name, orig_value)
  494. # 2. Copy the submodules from the original `nn_module` to the new ScriptModule,
  495. # recursively scripting them.
  496. for name, sub_concrete_type in concrete_type.get_modules():
  497. orig_value = getattr(nn_module, name)
  498. assert isinstance(
  499. orig_value, Module
  500. ), f"Expected Module but got {type(orig_value)}"
  501. module_type = sub_concrete_type.jit_type
  502. if isinstance(module_type, torch._C.InterfaceType):
  503. # use the interface inference rule to compile the module
  504. scripted = interface_script(module_type, orig_value)
  505. elif isinstance(orig_value, torch.jit.ScriptModule):
  506. scripted = orig_value
  507. else:
  508. # always reuse the provided stubs_fn to infer the methods to compile
  509. scripted = create_script_module_impl(
  510. orig_value, sub_concrete_type, stubs_fn
  511. )
  512. cpp_module.setattr(name, scripted)
  513. script_module._modules[name] = scripted
  514. # 3. Copy @ignored/@unused methods and attrs from the original `nn_module` to the new ScriptModule.
  515. # This ensures we can access these Python methods on the ScriptModule.
  516. for name in dir(nn_module):
  517. if name in ignored_properties:
  518. continue
  519. item = getattr(nn_module, name, None)
  520. if inspect.ismethod(item) and _jit_internal.is_ignored_fn(item):
  521. unbound_function = getattr(nn_module, name).__func__
  522. bound_method = unbound_function.__get__(script_module)
  523. setattr(script_module, name, bound_method)
  524. elif concrete_type.is_ignored_attribute(name):
  525. setattr(script_module, name, item)
  526. # For convenience, attach the concrete type to the new ScriptModule
  527. script_module._concrete_type = concrete_type
  528. # Actually create the ScriptModule, initializing it with the function we just defined
  529. script_module = torch.jit.RecursiveScriptModule._construct(cpp_module, init_fn)
  530. # Compile methods if necessary
  531. if concrete_type not in concrete_type_store.methods_compiled:
  532. create_methods_and_properties_from_stubs(
  533. concrete_type, method_stubs, property_stubs
  534. )
  535. # Create hooks after methods to ensure no name collisions between hooks and methods.
  536. # If done before, hooks can overshadow methods that aren't exported.
  537. create_hooks_from_stubs(concrete_type, hook_stubs, pre_hook_stubs)
  538. torch._C._run_emit_module_hook(cpp_module)
  539. concrete_type_store.methods_compiled.add(concrete_type)
  540. # Copy the forward hooks and pre-hooks to the new ScriptModule
  541. # to allow the hooks to be run from eager as ScriptFunctions
  542. for idx, fn in enumerate(script_module._c._get_forward_pre_hooks()):
  543. script_module._forward_pre_hooks[idx] = fn
  544. for idx, fn in enumerate(script_module._c._get_forward_hooks()):
  545. script_module._forward_hooks[idx] = fn
  546. # Special handling so methods like __len__ work in script methods on classes derived from containers
  547. if (
  548. isinstance(
  549. nn_module, (torch.nn.ModuleList, torch.nn.Sequential, torch.nn.ModuleDict)
  550. )
  551. and "__len__" not in cpp_module._method_names()
  552. ):
  553. script_module.define(f"def __len__(self):\n return {len(nn_module)}\n")
  554. if (
  555. isinstance(nn_module, torch.nn.ModuleDict)
  556. and "__contains__" not in cpp_module._method_names()
  557. ):
  558. if len(nn_module.keys()):
  559. keys = repr(list(nn_module.keys()))
  560. script_module.define(
  561. f"def __contains__(self, key: str):\n return key in {keys}\n"
  562. )
  563. else:
  564. script_module.define("def __contains__(self, key: str):\n return False\n")
  565. # Make the compiled methods available to the Python ScriptModule class.
  566. for method_stub in method_stubs:
  567. if method_stub.original_method is None:
  568. # define()'d methods don't have an Python original_method, so we
  569. # don't need to do any Python re-wrapping stuff
  570. continue
  571. name = method_stub.original_method.__name__
  572. if name != method_stub.def_.name().name:
  573. # TODO: Why skip this? Because @torch.jit._overload_method will
  574. # mangle the name of the function.
  575. continue
  576. script_method = cpp_module._get_method(name)
  577. # Wrap the original to propagate docstrings and such.
  578. # TODO: we don't currently do this functions that are recursively
  579. # compiled, we should.
  580. wrapped_script_method = functools.wraps(method_stub.original_method)(
  581. script_method
  582. )
  583. # Add the methods to the script_module directly. This ensures they will
  584. # be found first when `name` is looked up (as opposed to the stubs or
  585. # nn.Module.forward)
  586. script_module.__dict__[name] = wrapped_script_method
  587. # Make module properties available on the Python ScriptModule class.
  588. for property_stub in property_stubs:
  589. property_name = property_stub.def_.name().name
  590. fget = cpp_module._get_method(property_stub.def_.getter_name().name)
  591. # Setter is optional, so it may not exist.
  592. setter_name = property_stub.def_.setter_name()
  593. fset = cpp_module._get_method(setter_name.name) if setter_name else None
  594. script_module.__dict__[property_name] = property(property_name, fget, fset) # type: ignore[arg-type]
  595. # copy over python methods to script module if they aren't defined on the script module
  596. # this is currently an internal api used only on module containers
  597. for name in dir(nn_module):
  598. if name in ignored_properties:
  599. continue
  600. item = getattr(nn_module, name, None)
  601. if (
  602. _jit_internal.get_torchscript_modifier(item)
  603. is _jit_internal.FunctionModifiers.COPY_TO_SCRIPT_WRAPPER
  604. ):
  605. add_python_attr_to_scripted_model(script_module, nn_module, name)
  606. return script_module
  607. # We define shims of certain attributes on the RecursiveScriptModule to support
  608. # magic methods. To check if a script model defines an attribute we need
  609. # to also check that the attribute is not the shim
  610. def script_model_defines_attr(script_model, attr):
  611. script_attr = getattr(script_model, attr, None)
  612. if script_attr is None:
  613. return False
  614. default_attr = getattr(torch.jit.RecursiveScriptModule, attr, None)
  615. if default_attr is None:
  616. return False
  617. return script_attr != default_attr
  618. def add_python_attr_to_scripted_model(script_model, orig, attr):
  619. if hasattr(orig, attr) and script_model_defines_attr(script_model, attr):
  620. setattr(script_model, attr, getattr(orig, attr))
  621. def get_overload_annotations(mod, jit_ignored_properties):
  622. # original function => [(mangled overload name, overload function)]
  623. overloads = {}
  624. for name in dir(type(mod)):
  625. if name in jit_ignored_properties:
  626. continue
  627. item = getattr(mod, name, None)
  628. if not callable(item):
  629. continue
  630. # builtin functions like repr() in python 2 do not have __module__ defined
  631. if hasattr(item, "__module__") and item.__module__ is not None:
  632. method_overloads = _jit_internal._get_overloaded_methods(
  633. item, mod.__class__
  634. )
  635. if method_overloads is None:
  636. continue
  637. if item.__func__ in method_overloads:
  638. raise RuntimeError(
  639. _jit_internal.get_overload_no_implementation_error_message(
  640. "method", item.__func__
  641. )
  642. )
  643. names = [name + "__" + str(i) for i in range(len(method_overloads))]
  644. overloads[item] = list(zip(names, method_overloads))
  645. return overloads
  646. def get_overload_name_mapping(overload_info):
  647. # Same format as __overloads__
  648. # original function => [overload names]
  649. overload_name_mappings: Dict[str, List[str]] = {}
  650. for orig_fn, overloads in overload_info.items():
  651. original_name = orig_fn.__name__
  652. if original_name not in overload_name_mappings:
  653. overload_name_mappings[original_name] = []
  654. for overload_name, _ in overloads:
  655. overload_name_mappings[original_name].append(overload_name)
  656. return overload_name_mappings
  657. def _check_no_signature(func):
  658. signature = torch.jit.annotations.get_signature(
  659. func, None, fake_range(), inspect.ismethod(func)
  660. )
  661. if signature is None:
  662. qual_name = _jit_internal._qualified_name(func)
  663. raise RuntimeError(
  664. f"Must explicitly add type annotations to overloaded functions: {qual_name}"
  665. )
  666. def make_stubs_for_overloads(overload_info):
  667. overload_stubs = []
  668. for orig_fn, overloads in overload_info.items():
  669. orig_ast = get_jit_def(
  670. orig_fn, orig_fn.__name__, self_name="RecursiveScriptModule"
  671. )
  672. for overload_name, overload_fn in overloads:
  673. _check_no_signature(overload_fn)
  674. over_ast = get_jit_def(
  675. overload_fn, overload_fn.__name__, self_name="RecursiveScriptModule"
  676. )
  677. new_ast = torch._C._replace_overloaded_method_decl(
  678. over_ast.decl(), orig_ast, overload_name
  679. )
  680. _rcb = _jit_internal.createResolutionCallbackFromClosure(orig_fn)
  681. overload_stubs.append(ScriptMethodStub(_rcb, new_ast, overload_fn))
  682. return overload_stubs
  683. def check_module_initialized(mod):
  684. assert isinstance(mod, torch.nn.Module)
  685. if not hasattr(mod, "_parameters"):
  686. raise RuntimeError(
  687. f"'{torch.typename(type(mod))}' has not been initialized, did you forget to call 'super()'?"
  688. )
  689. # This is to avoid importing torch.distributed.nn
  690. if not hasattr(mod, "remote_parameters"):
  691. for name, param in mod._parameters.items():
  692. if param is not None and torch.nn.parameter.is_lazy(param):
  693. raise RuntimeError(
  694. f"'{torch.typename(type(mod))}' has uninitialized parameters {name}. Did you forget to run a forward pass?"
  695. )
  696. for name, buf in mod._buffers.items():
  697. if buf is not None and torch.nn.parameter.is_lazy(buf):
  698. raise RuntimeError(
  699. f"'{torch.typename(type(mod))}' has uninitialized buffers {name}. Did you forget to run a forward pass?"
  700. )
  701. def infer_methods_to_compile(nn_module):
  702. """Implement the default rules for which methods should act as starting points for compilation.
  703. (TODO add a link when the rules are published).
  704. """
  705. check_module_initialized(nn_module)
  706. user_annotated_ignored_attributes = getattr(
  707. nn_module, "__jit_ignored_attributes__", list()
  708. )
  709. ignored_properties = jit_ignored_properties(nn_module)
  710. methods: List[str] = []
  711. if hasattr(nn_module, "forward") and not _jit_internal.is_ignored_fn(
  712. nn_module.forward
  713. ):
  714. forward_func = getattr(nn_module.forward, "__func__", None)
  715. module_forward = getattr(torch.nn.Module, "forward", None)
  716. if forward_func != module_forward:
  717. methods = ["forward"]
  718. exported = []
  719. for name in dir(nn_module):
  720. if name in ignored_properties:
  721. continue
  722. item = getattr(nn_module, name, None)
  723. if (
  724. _jit_internal.get_torchscript_modifier(item)
  725. is _jit_internal.FunctionModifiers.EXPORT
  726. ):
  727. exported.append(name)
  728. methods = methods + exported
  729. overload_name_mappings = dict(getattr(nn_module, "__overloads__", {}))
  730. overload_info = get_overload_annotations(nn_module, ignored_properties)
  731. overload_name_mappings.update(get_overload_name_mapping(overload_info))
  732. overload_stubs = make_stubs_for_overloads(overload_info)
  733. nn_module.__overloads__ = overload_name_mappings
  734. # we shouldn't directly compile overloaded methods, just its overloads
  735. def ignore_overloaded(method_name):
  736. return method_name not in overload_name_mappings
  737. filtered_methods = filter(ignore_overloaded, methods)
  738. # Unique the methods. We don't want to use a set to store the methods because it
  739. # introduces non-determinism to compile order.
  740. uniquer: Set[str] = set()
  741. uniqued_methods = []
  742. for name in filtered_methods:
  743. if name in uniquer:
  744. continue
  745. uniqued_methods.append(name)
  746. uniquer.add(name)
  747. stubs = []
  748. for method in uniqued_methods:
  749. stubs.append(make_stub_from_method(nn_module, method))
  750. return overload_stubs + stubs
  751. def get_hook_stubs(nn_module):
  752. """Return forward hook and pre_hook ScriptModuleStubs."""
  753. check_module_initialized(nn_module)
  754. hook_map: Dict = {}
  755. hook_stubs = []
  756. for hook in nn_module._forward_hooks.values():
  757. if hook.__name__ in hook_map:
  758. if id(hook) != id(hook_map[hook.__name__]):
  759. raise RuntimeError(
  760. f"Hook '{hook.__name__}' on {type(nn_module).__name__} "
  761. "has at least two different python definitions."
  762. " Please use unique names for all hooks."
  763. )
  764. else:
  765. hook_map[hook.__name__] = hook
  766. hook_stubs.append(make_stub(hook, hook.__name__))
  767. pre_hook_stubs = []
  768. for pre_hook in nn_module._forward_pre_hooks.values():
  769. if pre_hook.__name__ in hook_map:
  770. if id(pre_hook) != id(hook_map[pre_hook.__name__]):
  771. raise RuntimeError(
  772. f"Pre-hook '{pre_hook.__name__}' on {type(nn_module).__name__} "
  773. "has at least two different python definitions."
  774. " Please use unique names for all hooks."
  775. )
  776. else:
  777. hook_map[pre_hook.__name__] = pre_hook
  778. pre_hook_stubs.append(make_stub(pre_hook, pre_hook.__name__))
  779. return hook_stubs, pre_hook_stubs
  780. def get_property_stubs(nn_module):
  781. """Create property stubs for the properties of the module by creating method stubs for the getter and setter."""
  782. module_ty = type(nn_module)
  783. properties_asts = get_class_properties(module_ty, self_name="RecursiveScriptModule")
  784. rcbs = {}
  785. for name in dir(module_ty):
  786. item = getattr(module_ty, name, None)
  787. if isinstance(item, property):
  788. if not item.fget:
  789. raise RuntimeError(
  790. f"Property {name} of {nn_module.__name__} must have a getter"
  791. )
  792. rcbs[name] = _jit_internal.createResolutionCallbackFromClosure(item.fget)
  793. stubs = [PropertyStub(rcbs[ast.name().name], ast) for ast in properties_asts]
  794. return stubs
  795. def interface_script(mod_interface, nn_module):
  796. """
  797. Make a ScriptModule from an nn.Module, using the interface methods rule for determining which methods to compile.
  798. Args:
  799. mod_interface: the interface type that the module have
  800. nn_module: The original Python nn.Module that we are creating a ScriptModule for.
  801. """
  802. if isinstance(nn_module, torch.jit.ScriptModule):
  803. return nn_module
  804. check_module_initialized(nn_module)
  805. def infer_interface_methods_to_compile(nn_module):
  806. """Rule to infer the methods from the interface type.
  807. It is used to know which methods need to act as starting points for compilation.
  808. """
  809. stubs = []
  810. for method in mod_interface.getMethodNames():
  811. stubs.append(make_stub_from_method(nn_module, method))
  812. return stubs
  813. return create_script_module(nn_module, infer_interface_methods_to_compile)
  814. def try_compile_fn(fn, loc):
  815. if _jit_internal.is_ignored_fn(fn):
  816. # Don't do anything for @ignore'd functions
  817. return None
  818. if isinstance(fn, torch.nn.Module):
  819. # Since modules are callable pybind recognizes them as functions, but
  820. # don't do anything for them
  821. return None
  822. if not inspect.isfunction(fn) and not inspect.ismethod(fn):
  823. raise RuntimeError(
  824. f"`{fn}` is not a function. Recursive scripting only supports "
  825. "Python functions or methods currently.\n"
  826. f"Consider manually annotating `{fn}` with @torch.jit.script."
  827. )
  828. # The object returned by __prepare_scriptable__ might have a different closure.
  829. # Resolve it here to get the right resolution callback.
  830. fn = fn.__prepare_scriptable__() if hasattr(fn, "__prepare_scriptable__") else fn # type: ignore[operator]
  831. # We don't have the actual scope where the function was defined, but we can
  832. # extract the necessary info from the closed over variables on the function
  833. # object
  834. rcb = _jit_internal.createResolutionCallbackFromClosure(fn)
  835. return torch.jit.script(fn, _rcb=rcb)
  836. def wrap_cpp_class(cpp_class):
  837. """Wrap this torch._C.Object in a Python RecursiveScriptClass."""
  838. return torch.jit.RecursiveScriptClass(cpp_class)
  839. def wrap_cpp_module(cpp_module):
  840. """Wrap this torch._C.ScriptModule in a Python ScriptModule, recursively for all submodules."""
  841. def init_fn(script_module):
  842. for name, cpp_module in torch._C.ModuleDict(script_module._c).items():
  843. setattr(script_module, name, wrap_cpp_module(cpp_module))
  844. script_module._concrete_type = torch._C.ConcreteModuleType.from_jit_type(
  845. script_module._c._type()
  846. )
  847. for idx, fn in enumerate(script_module._c._get_forward_pre_hooks()):
  848. script_module._forward_pre_hooks[idx] = fn
  849. for idx, fn in enumerate(script_module._c._get_forward_hooks()):
  850. script_module._forward_hooks[idx] = fn
  851. return torch.jit.RecursiveScriptModule._construct(cpp_module, init_fn)
  852. def compile_unbound_method(concrete_type, fn):
  853. if _jit_internal.is_ignored_fn(fn):
  854. return None
  855. stub = make_stub(fn, fn.__name__)
  856. with torch._jit_internal._disable_emit_hooks():
  857. # We don't want to call the hooks here since the graph that is calling
  858. # this function is not yet complete
  859. create_methods_and_properties_from_stubs(concrete_type, (stub,), ())
  860. return stub
  861. def lazy_bind(concrete_type, unbound_method):
  862. """
  863. Return a function that lazily binds `unbound_method` to a provided Module IValue, then invokes the method.
  864. We do this so that any Python shenanigans that
  865. will poison type sharing are impossible at compile time.
  866. """
  867. def lazy_binding_method(cpp_module, *args):
  868. def init_fn(script_module):
  869. orig_class = concrete_type.py_class
  870. # Copy @ignored/@unused methods from the original module to the new one.
  871. # This ensures they are available during execution.
  872. for name in dir(orig_class):
  873. item = getattr(orig_class, name, None)
  874. if _jit_internal.is_ignored_fn(item):
  875. setattr(script_module, name, item)
  876. # Copy constants over so they are available during execution.
  877. for name, value in concrete_type.get_constants().items():
  878. setattr(script_module, name, value)
  879. script_module = torch.jit.RecursiveScriptModule._construct(cpp_module, init_fn)
  880. method = types.MethodType(unbound_method, script_module)
  881. return method(*args)
  882. # make the lazy binding method "look like" the original method
  883. lazy_binding_method.original_fn = unbound_method # type: ignore[attr-defined]
  884. lazy_binding_method.__name__ = unbound_method.__name__
  885. torch._jit_internal.copy_torchscript_modifier(unbound_method, lazy_binding_method)
  886. return lazy_binding_method