[docs]classModuleTracker:""" ``ModuleTracker`` is a context manager that tracks the nn.Module hierarchy during execution so that other system can query which Module is currently being executed (or its backward is being executed). You can access the ``parents`` attribute on this context manager to get the set of all the Modules currently being executed via their fqn (fully qualified name, also used as the key within the state_dict). You can access the ``is_bw`` attribute to know if you are currently running in backward or not. Note that ``parents`` is never empty and always contains the "Global" key. The ``is_bw`` flag will remain ``True`` after the forward until another Module is executed. If you need it to be more accurate, please submit an issue requesting this. Adding a map from fqn to the module instance is possible but not done yet, please submit an issue requesting this if you need it. Example usage .. code-block:: python mod = torch.nn.Linear(2, 2) with ModuleTracker() as tracker: # Access anything during the forward pass def my_linear(m1, m2, bias): print(f"Current modules: {tracker.parents}") return torch.mm(m1, m2.t()) + bias torch.nn.functional.linear = my_linear mod(torch.rand(2, 2)) """parents:Set[str]""" A Set containing the fqn for each module currently running their forward """def__init__(self)->None:self.parents={"Global"}self._known_modules:weakref.WeakKeyDictionary=weakref.WeakKeyDictionary()self._seen_modules:weakref.WeakSet=weakref.WeakSet()self._has_callback=Falseself._hooks:List[RemovableHandle]=[]def_maybe_set_engine_callback(self):# This assumes no concurrent calls to backwardifself._has_callback:returndefcallback():self.parents={"Global"}self._has_callback=Falsetorch.autograd.Variable._execution_engine.queue_callback(callback)self._has_callback=True@propertydefis_bw(self):""" A boolean marking if this is currently running during the backward pass or not """returntorch._C._current_graph_task_id()!=-1def_get_mod_name(self,mod):ifmodnotinself._known_modules:self._known_modules[mod]=type(mod).__name__mod_name=self._known_modules[mod]ifmodnotinself._seen_modules:forname,submodinmod.named_children():self._known_modules[submod]=f"{mod_name}.{name}"self._get_mod_name(submod)self._seen_modules.add(mod)returnmod_namedef_get_append_fn(self,name,is_bw):deffn(*args):ifis_bw:self._maybe_set_engine_callback()ifnameinself.parents:logger.info("The module hierarchy tracking seems to be broken as this Module was already entered. %s during %s",name,"backward"ifis_bwelse"forward",)self.parents.add(name)returnfndef_get_pop_fn(self,name,is_bw):deffn(*args):ifnameinself.parents:self.parents.remove(name)else:logger.info("The Module hierarchy tracking is confused as we're exiting a Module that was never entered. %s during %s",name,"backward"ifis_bwelse"forward",)returnfndef_fw_pre_hook(self,mod,input):name=self._get_mod_name(mod)self._get_append_fn(name,False)()args,_=tree_flatten(input)tensors=[aforainargsifisinstance(a,torch.Tensor)anda.requires_grad]iftensors:self._hooks.append(register_multi_grad_hook(tensors,self._get_pop_fn(name,True)))def_fw_post_hook(self,mod,input,output):name=self._get_mod_name(mod)self._get_pop_fn(name,False)()args,_=tree_flatten(output)tensors=[aforainargsifisinstance(a,torch.Tensor)anda.requires_grad]iftensors:self._hooks.append(register_multi_grad_hook(tensors,self._get_append_fn(name,True)))def__enter__(self):self._fw_pre_handle=register_module_forward_pre_hook(self._fw_pre_hook)self._fw_post_handle=register_module_forward_hook(self._fw_post_hook)returnselfdef__exit__(self,*args):self._fw_pre_handle.remove()self._fw_post_handle.remove()forhookinself._hooks:hook.remove()self._hooks.clear()
Docs
Access comprehensive developer documentation for PyTorch
To analyze traffic and optimize your experience, we serve cookies on this site. By clicking or navigating, you agree to allow our usage of cookies. As the current maintainers of this site, Facebook’s Cookies Policy applies. Learn more, including about available controls: Cookies Policy.