bundled_inputs.py 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469
  1. #!/usr/bin/env python3
  2. # mypy: allow-untyped-defs
  3. from typing import Any, TypeVar, Optional, Tuple, List, NamedTuple, Union, Sequence, Dict, Callable
  4. import textwrap
  5. import torch
  6. from torch._C import TupleType, ListType
  7. from torch.jit._recursive import wrap_cpp_module
  8. T = TypeVar("T")
  9. MAX_RAW_TENSOR_SIZE = 16
  10. class InflatableArg(NamedTuple):
  11. """Helper type for bundled inputs.
  12. 'value' is the compressed/deflated input that is stored in the model. Value
  13. must be of the same type as the argument to the function that it is a deflated
  14. input for.
  15. 'fmt' is a formatable code string that is executed to inflate the compressed data into
  16. the appropriate input. It can use 'value' as an input to the format str. It must result
  17. in a value of the same type as 'value'.
  18. 'fmt_fn' is a formatable function code string that is executed to inflate the compressed
  19. data into the appropriate input. It must result in a value of the same type as 'value'.
  20. The function name should be the formatable part of the string.
  21. Note: Only top level InflatableArgs can be inflated. i.e. you cannot place
  22. an inflatable arg inside of some other structure. You should instead create
  23. an inflatable arg such that the fmt code string returns the full structure
  24. of your input.
  25. """
  26. value: Any
  27. fmt: str = "{}"
  28. fmt_fn: str = ""
  29. def bundle_inputs(
  30. model: torch.jit.ScriptModule,
  31. inputs: Union[Optional[Sequence[Tuple[Any, ...]]], Dict[Callable, Optional[Sequence[Tuple[Any, ...]]]]],
  32. info: Optional[Union[List[str], Dict[Callable, List[str]]]] = None,
  33. *,
  34. _receive_inflate_expr: Optional[List[str]] = None,
  35. ) -> torch.jit.ScriptModule:
  36. """Create and return a copy of the specified model with inputs attached.
  37. The original model is not mutated or changed in any way.
  38. Models with bundled inputs can be invoked in a uniform manner by
  39. benchmarking and code coverage tools.
  40. If inputs is passed in as a list then the inputs will be bundled for 'forward'.
  41. If inputs is instead passed in as a map then all the methods specified in the map
  42. will have their corresponding inputs bundled. Info should match watchever type is
  43. chosen for the inputs.
  44. The returned model will support the following methods:
  45. `get_all_bundled_inputs_for_<function_name>() -> List[Tuple[Any, ...]]`
  46. Returns a list of tuples suitable for passing to the model like
  47. `for inp in model.get_all_bundled_inputs_for_foo(): model.foo(*inp)`
  48. `get_bundled_inputs_functions_and_info() -> Dict[str, Dict[str: List[str]]]`
  49. Returns a dictionary mapping function names to a metadata dictionary.
  50. This nested dictionary maps preset strings like:
  51. 'get_inputs_function_name' -> the name of a function attribute in this model that can be
  52. run to get back a list of inputs corresponding to that function.
  53. 'info' -> the user provided extra information about the bundled inputs
  54. If forward has bundled inputs then these following functions will also be defined on the returned module:
  55. `get_all_bundled_inputs() -> List[Tuple[Any, ...]]`
  56. Returns a list of tuples suitable for passing to the model like
  57. `for inp in model.get_all_bundled_inputs(): model(*inp)`
  58. `get_num_bundled_inputs() -> int`
  59. Equivalent to `len(model.get_all_bundled_inputs())`,
  60. but slightly easier to call from C++.
  61. Inputs can be specified in one of two ways:
  62. - The model can define `_generate_bundled_inputs_for_<function_name>`.
  63. If the user chooses this method inputs[<function>] should map to None
  64. - The `inputs` argument to this function can be a dictionary mapping functions to a
  65. list of inputs, of the same form that will be returned by get_all_bundled_inputs_for_<function_name>.
  66. Alternatively if only bundling inputs for forward the map can be omitted and a singular list of inputs
  67. can be provided instead.
  68. The type of the inputs is List[Tuple[Any, ...]]. The outer list corresponds with a
  69. list of inputs, the inner tuple is the list of args that together make up one input.
  70. For inputs of functions that take one arg, this will be a tuple of length one. The Any, ...
  71. is the actual data that makes up the args, e.g. a tensor.
  72. Info is an optional parameter that maps functions to a list of strings providing extra information about that
  73. function's bundled inputs. Alternatively if only bundling inputs for forward the map can be omitted and
  74. a singular list of information can be provided instead. This could be descriptions, expected outputs, etc.
  75. - Ex: info={model.forward : ['man eating icecream', 'an airplane', 'a dog']}
  76. This function will attempt to optimize arguments so that (e.g.)
  77. arguments like `torch.zeros(1000)` will be represented compactly.
  78. Only top-level arguments will be optimized.
  79. Tensors in lists or tuples will not.
  80. """
  81. if not isinstance(model, torch.jit.ScriptModule):
  82. raise Exception("Only ScriptModule is supported.") # noqa: TRY002
  83. ignored_methods, ignored_attrs = _get_bundled_inputs_attributes_and_methods(model)
  84. clone = torch._C._hack_do_not_use_clone_module_with_class( # type: ignore[attr-defined]
  85. model._c,
  86. ignored_methods,
  87. ignored_attrs,
  88. )
  89. # The above cloning function returns a torch._C.scriptmodule and we need a torch.jit.scriptmodule.
  90. # Fortunately theres a function in _recursive that does exactly that conversion.
  91. cloned_module = wrap_cpp_module(clone)
  92. if isinstance(inputs, dict):
  93. assert isinstance(info, dict) or info is None
  94. augment_many_model_functions_with_bundled_inputs(cloned_module, inputs, _receive_inflate_expr, info)
  95. else:
  96. assert isinstance(info, list) or info is None
  97. augment_model_with_bundled_inputs(cloned_module, inputs, _receive_inflate_expr, info)
  98. return cloned_module
  99. def augment_model_with_bundled_inputs(
  100. model: torch.jit.ScriptModule,
  101. inputs: Optional[Sequence[Tuple[Any, ...]]] = None,
  102. _receive_inflate_expr: Optional[List[str]] = None, # For debugging.
  103. info: Optional[List[str]] = None, # Optional argument to provide info about forward or its inputs
  104. skip_size_check=False,
  105. ) -> None:
  106. """Add bundled sample inputs to a model for the forward function.
  107. Models with bundled inputs can be invoked in a uniform manner by
  108. benchmarking and code coverage tools.
  109. Augmented models will support the following methods:
  110. `get_all_bundled_inputs() -> List[Tuple[Any, ...]]`
  111. Returns a list of tuples suitable for passing to the model like
  112. `for inp in model.get_all_bundled_inputs(): model(*inp)`
  113. `get_num_bundled_inputs() -> int`
  114. Equivalent to `len(model.get_all_bundled_inputs())`,
  115. but slightly easier to call from C++.
  116. `get_bundled_inputs_functions_and_info() -> Dict[str, Dict[str: List[str]]]`
  117. Returns a dictionary mapping function names to a metadata dictionary.
  118. This nested dictionary maps preset strings like:
  119. 'get_inputs_function_name' -> the name of a function attribute in this model that can be
  120. run to get back a list of inputs corresponding to that function.
  121. 'info' -> the user provided extra information about the bundled inputs
  122. Inputs can be specified in one of two ways:
  123. - The model can define `_generate_bundled_inputs_for_forward`.
  124. If the user chooses this method inputs should be None
  125. - `inputs` is a list of inputs of form List[Tuple[Any, ...]]. A list of tuples where the elements
  126. of each tuple are the args that make up one input.
  127. """
  128. if not isinstance(model, torch.jit.ScriptModule):
  129. raise Exception("Only ScriptModule is supported.") # noqa: TRY002
  130. forward: Callable = model.forward
  131. # Sometimes forward won't have a name attached so just in case
  132. if not hasattr(forward, "__name__"):
  133. forward.__name__ = 'forward'
  134. augment_many_model_functions_with_bundled_inputs(
  135. model,
  136. inputs={forward : inputs},
  137. _receive_inflate_expr=_receive_inflate_expr,
  138. info={forward : info} if info else None,
  139. skip_size_check=skip_size_check,
  140. )
  141. def augment_many_model_functions_with_bundled_inputs(
  142. model: torch.jit.ScriptModule,
  143. inputs: Dict[Callable, Optional[Sequence[Tuple[Any, ...]]]],
  144. _receive_inflate_expr: Optional[List[str]] = None, # For debugging.
  145. info: Optional[Dict[Callable, List[str]]] = None, # Optional argument to provide info about the function or its inputs
  146. skip_size_check=False,
  147. ) -> None:
  148. """Add bundled sample inputs to a model for an arbitrary list of public functions.
  149. Models with bundled inputs can be invoked in a uniform manner by
  150. benchmarking and code coverage tools.
  151. Augmented models will support the following methods:
  152. `get_all_bundled_inputs_for_<function_name>() -> List[Tuple[Any, ...]]`
  153. Returns a list of tuples suitable for passing to the model like
  154. `for inp in model.get_all_bundled_inputs_for_foo(): model.foo(*inp)`
  155. `get_bundled_inputs_functions_and_info() -> Dict[str, Dict[str: List[str]]]`
  156. Returns a dictionary mapping function names to a metadata dictionary.
  157. This nested dictionary maps preset strings like:
  158. 'get_inputs_function_name' -> the name of a function attribute in this model that can be
  159. run to get back a list of inputs corresponding to that function.
  160. 'info' -> the user provided extra information about the bundled inputs
  161. If forward has bundled inputs then these following functions are also defined:
  162. `get_all_bundled_inputs() -> List[Tuple[Any, ...]]`
  163. Returns a list of tuples suitable for passing to the model like
  164. `for inp in model.get_all_bundled_inputs(): model(*inp)`
  165. `get_num_bundled_inputs() -> int`
  166. Equivalent to `len(model.get_all_bundled_inputs())`,
  167. but slightly easier to call from C++.
  168. Inputs can be specified in one of two ways:
  169. - The model can define `_generate_bundled_inputs_for_<function_name>`.
  170. If the user chooses this method inputs[<function>] should map to None
  171. - The `inputs` argument to this function can be a dictionary mapping functions to a
  172. list of inputs, of the same form that will be returned by get_all_bundled_inputs_for_<function_name>.
  173. The type of the inputs is List[Tuple[Any, ...]]. The outer list corresponds with a
  174. list of inputs, the inner tuple is the list of args that together make up one input.
  175. For inputs of functions that take one arg, this will be a tuple of length one. The Any, ...
  176. is the actual data that makes up the args, e.g. a tensor.
  177. Info is an optional parameter that maps functions to a list of strings providing extra information about that
  178. function's bundled inputs. This could be descriptions, expected outputs, etc.
  179. - Ex: info={model.forward : ['man eating icecream', 'an airplane', 'a dog']}
  180. This function will attempt to optimize arguments so that (e.g.)
  181. arguments like `torch.zeros(1000)` will be represented compactly.
  182. Only top-level arguments will be optimized.
  183. Tensors in lists or tuples will not.
  184. """
  185. if not isinstance(model, torch.jit.ScriptModule):
  186. raise Exception("Only ScriptModule is supported.") # noqa: TRY002
  187. if not inputs:
  188. raise Exception("Please provide inputs for at least 1 function") # noqa: TRY002
  189. if hasattr(model, "get_all_bundled_inputs") or hasattr(model, "get_bundled_inputs_functions_and_info"):
  190. raise Exception( # noqa: TRY002
  191. "Models can only be augmented with bundled inputs once. "
  192. "This Model seems to have already been augmented with "
  193. "bundled inputs. Please start afresh with one that "
  194. "doesn't have bundled inputs.",
  195. )
  196. get_bundled_inputs_functions_and_info_template = ""
  197. for function, input_list in inputs.items():
  198. if hasattr(function, "__name__"):
  199. function_name = function.__name__
  200. else:
  201. if hasattr(function, "name"):
  202. function_name = function.name # type: ignore[attr-defined]
  203. else:
  204. raise Exception( # noqa: TRY002
  205. 'At least one of your functions has no attribute name please ensure all have one. m.foo.name = "foo"')
  206. if input_list is not None and not isinstance(input_list, Sequence):
  207. raise TypeError(f"Error inputs for function {function_name} is not a Sequence")
  208. function_arg_types = [arg.type for arg in function.schema.arguments[1:]] # type: ignore[attr-defined]
  209. deflated_inputs_type: ListType = ListType(TupleType(function_arg_types))
  210. model._c._register_attribute(f"_bundled_inputs_deflated_{function_name}", deflated_inputs_type, [])
  211. if hasattr(model, "_generate_bundled_inputs_for_" + function_name):
  212. if input_list is not None:
  213. raise Exception( # noqa: TRY002
  214. f"inputs[{function_name}] is not None, but _generate_bundled_inputs_for_{function_name} is already defined"
  215. )
  216. # Model author already defined _generate_bundled_inputs_for_<function_name>.
  217. elif input_list is None or len(input_list) == 0:
  218. raise Exception( # noqa: TRY002
  219. f"inputs for {function_name} must be specified if "
  220. f"_generate_bundled_inputs_for_{function_name} is not already defined"
  221. )
  222. else:
  223. # Iterate over the inputs and args in each input.
  224. # Accumulate `deflated_inputs` as (possibly) compressed values
  225. # and `parts` to be joined into the expression that unpacks them.
  226. deflated_inputs = []
  227. parts = []
  228. for inp_idx, args in enumerate(input_list):
  229. if not isinstance(args, Tuple) and not isinstance(args, List): # type: ignore[arg-type]
  230. raise TypeError(
  231. f"Error bundled input for function {function_name} idx: {inp_idx} is not a Tuple or a List"
  232. )
  233. deflated_args = []
  234. parts.append("(")
  235. for arg_idx, arg in enumerate(args):
  236. inflate_helper_fn_name = _get_inflate_helper_fn_name(arg_idx, inp_idx, function_name)
  237. deflated, inflater, helper_definition = _inflate_expr(
  238. arg,
  239. f"deflated[{inp_idx}][{arg_idx}]",
  240. inflate_helper_fn_name,
  241. skip_size_check=skip_size_check,
  242. )
  243. deflated_args.append(deflated)
  244. parts.append(f" {inflater},")
  245. if helper_definition:
  246. model.define(textwrap.dedent(helper_definition))
  247. deflated_inputs.append(tuple(deflated_args))
  248. parts.append("),")
  249. parts.append("")
  250. expr = "\n".join(parts)
  251. # Back-channel return this expr for debugging.
  252. if _receive_inflate_expr is not None:
  253. _receive_inflate_expr.append(expr)
  254. setattr(model, f"_bundled_inputs_deflated_{function_name}", deflated_inputs)
  255. definition = textwrap.dedent("""
  256. def _generate_bundled_inputs_for_{name}(self):
  257. deflated = self._bundled_inputs_deflated_{name}
  258. return [
  259. {expr}
  260. ]
  261. """).format(expr=expr, name=function_name)
  262. model.define(definition)
  263. # Define get_all_bundled_inputs_for_<function_name> that caches the generated inputs.
  264. model.define(textwrap.dedent("""
  265. def get_all_bundled_inputs_for_{name}(self):
  266. all_inputs = self._generate_bundled_inputs_for_{name}()
  267. assert all_inputs is not None
  268. return all_inputs
  269. """).format(name=function_name))
  270. # Add to the high level helper methods
  271. inputs_info = repr(info[function]) if info and function in info else '[]'
  272. get_bundled_inputs_functions_and_info_template += f"""
  273. temp_dict : Dict[str,List[str]] = {{}}
  274. info: List[str] = {inputs_info}
  275. temp_dict['info'] = info
  276. temp_dict['get_inputs_function_name'] = ['get_all_bundled_inputs_for_{function_name}']
  277. all_inputs['{function_name}'] = temp_dict
  278. """
  279. # To ensure backwards compatibility and a streamlined api for forward these wrappers are provided
  280. if function_name == 'forward':
  281. model.define(textwrap.dedent("""
  282. def get_all_bundled_inputs(self):
  283. return self.get_all_bundled_inputs_for_forward()
  284. """))
  285. model.define(textwrap.dedent("""
  286. def get_num_bundled_inputs(self):
  287. return len(self.get_all_bundled_inputs_for_forward())
  288. """))
  289. # Define some high level helper methods that act on all bundled inputs
  290. model.define(textwrap.dedent(f"""
  291. def get_bundled_inputs_functions_and_info(self):
  292. all_inputs : Dict[str, Dict[str,List[str]]] = {{}}
  293. {get_bundled_inputs_functions_and_info_template}
  294. return all_inputs
  295. """))
  296. def _inflate_expr(
  297. arg: T, ref: str, inflate_helper_fn_name: str, skip_size_check: bool = False
  298. ) -> Tuple[Union[T, torch.Tensor], str, Optional[str]]:
  299. # Allow custom inflation expressions any object.
  300. # For example, calling custom image-decoding ops.
  301. # Or just use "{}" as the format string to ignore size limits.
  302. if isinstance(arg, InflatableArg):
  303. if arg.fmt_fn:
  304. if arg.fmt not in ["{}", ""]:
  305. raise Exception( # noqa: TRY002
  306. f"Bundled input argument at position '{ref}' has "
  307. f"both arg.fmt_fn => \n{arg.fmt_fn} "
  308. f"\n and arg.fmt => {arg.fmt}. "
  309. "Please choose `arg.fmt` if the deflater is straightforward or "
  310. "`arg.fmt_fn` if you need a function."
  311. )
  312. helper_definition = arg.fmt_fn.format(inflate_helper_fn_name)
  313. expr = f"self.{inflate_helper_fn_name}({ref})"
  314. return arg.value, expr, helper_definition
  315. else:
  316. return arg.value, arg.fmt.format(ref), None
  317. if isinstance(arg, torch.Tensor):
  318. # Small-storage tensors can just be saved directly.
  319. if arg._typed_storage().size() <= MAX_RAW_TENSOR_SIZE or skip_size_check:
  320. return arg, ref, None
  321. # Small contiguous tensors can be cloned to have small storage.
  322. # TODO: Should we do this even for non-contiguous tensors?
  323. if arg.is_contiguous() and arg.numel() <= MAX_RAW_TENSOR_SIZE:
  324. return arg.clone(), ref, None
  325. # Example inputs commonly come from torch.zeros, torch.ones, or torch.full.
  326. # These can be represented compactly.
  327. for fmt in [torch.contiguous_format, torch.channels_last]:
  328. if arg.is_contiguous(memory_format=fmt) and (arg == arg.flatten()[0]).all().item():
  329. return (arg.flatten()[0].clone().expand(*arg.size()),
  330. f"{ref}.contiguous(memory_format={fmt})", None)
  331. # Prevent big tensors from being bundled by default.
  332. # TODO: Provide more useful diagnostics.
  333. raise Exception( # noqa: TRY002
  334. f"Bundled input argument at position '{ref}' is "
  335. f"a tensor with storage size {arg._typed_storage().size()}. "
  336. f"You probably don't want to bundle this as an input. "
  337. )
  338. else:
  339. return arg, ref, None
  340. def _get_bundled_inputs_attributes_and_methods(script_module: torch.jit.ScriptModule) -> Tuple[List[str], List[str]]:
  341. methods: List[str] = []
  342. attributes: List[str] = []
  343. # Has bundled inputs for forward
  344. if hasattr(script_module, 'get_all_bundled_inputs'):
  345. methods.append('get_all_bundled_inputs')
  346. methods.append('get_num_bundled_inputs')
  347. methods.append('run_on_bundled_input')
  348. if hasattr(script_module, 'get_bundled_inputs_functions_and_info'):
  349. methods.append('get_bundled_inputs_functions_and_info')
  350. all_info = script_module.get_bundled_inputs_functions_and_info()
  351. for function_name in all_info:
  352. methods.append("get_all_bundled_inputs_for_" + function_name)
  353. methods.append("_generate_bundled_inputs_for_" + function_name)
  354. attributes.append("_bundled_inputs_deflated_" + function_name)
  355. bundled_inputs_fn = getattr(
  356. script_module,
  357. f"get_all_bundled_inputs_for_{function_name}"
  358. )
  359. num_bundled_inputs: int = len(bundled_inputs_fn())
  360. # Check inflate helper functions for each function, argument and bundled input
  361. func = getattr(script_module, function_name)
  362. for arg_idx in range(len(func.schema.arguments) - 1):
  363. for input_idx in range(num_bundled_inputs):
  364. helper_fn_name = _get_inflate_helper_fn_name(
  365. arg_idx=arg_idx,
  366. input_idx=input_idx,
  367. function_name=function_name
  368. )
  369. # if the arg has an InflatableArg with fmt_fn, add the helper function name
  370. if hasattr(script_module, helper_fn_name):
  371. methods.append(helper_fn_name)
  372. return (methods, attributes)
  373. def _get_inflate_helper_fn_name(
  374. arg_idx: int,
  375. input_idx: int,
  376. function_name: str,
  377. ) -> str:
  378. return f"_inflate_helper_for_{function_name}_input_{input_idx}_arg_{arg_idx}"
  379. def bundle_randn(*size, dtype=None):
  380. """Generate a tensor that will be inflated with torch.randn."""
  381. stub = torch.zeros(1, dtype=dtype).expand(*size)
  382. return InflatableArg(value=stub, fmt="torch.randn_like({})")
  383. def bundle_large_tensor(t):
  384. """Wrap a tensor to allow bundling regardless of size."""
  385. return InflatableArg(value=t, fmt="{}")