[docs]@_lru_cachedefis_available()->bool:r"""Return a bool indicating if opt_einsum is currently available. You must install opt-einsum in order for torch to automatically optimize einsum. To make opt-einsum available, you can install it along with torch: ``pip install torch[opt-einsum]`` or by itself: ``pip install opt-einsum``. If the package is installed, torch will import it automatically and use it accordingly. Use this function to check whether opt-einsum was installed and properly imported by torch. """return_opt_einsumisnotNone
[docs]defget_opt_einsum()->Any:r"""Return the opt_einsum package if opt_einsum is currently available, else None."""return_opt_einsum
def_set_enabled(_enabled:bool)->None:ifnotis_available()and_enabled:raiseValueError(f"opt_einsum is not available, so setting `enabled` to {_enabled} will not reap ""the benefits of calculating an optimal path for einsum. torch.einsum will ""fall back to contracting from left to right. To enable this optimal path ""calculation, please install opt-einsum.")globalenabledenabled=_enableddef_get_enabled()->bool:returnenableddef_set_strategy(_strategy:str)->None:ifnotis_available():raiseValueError(f"opt_einsum is not available, so setting `strategy` to {_strategy} will not be meaningful. ""torch.einsum will bypass path calculation and simply contract from left to right. ""Please install opt_einsum or unset `strategy`.")ifnotenabled:raiseValueError(f"opt_einsum is not enabled, so setting a `strategy` to {_strategy} will not be meaningful. ""torch.einsum will bypass path calculation and simply contract from left to right. ""Please set `enabled` to `True` as well or unset `strategy`.")if_strategynotin["auto","greedy","optimal"]:raiseValueError(f"`strategy` must be one of the following: [auto, greedy, optimal] but is {_strategy}")globalstrategystrategy=_strategydef_get_strategy()->str:returnstrategydefset_flags(_enabled=None,_strategy=None):orig_flags=(enabled,Noneifnotis_available()elsestrategy)if_enabledisnotNone:_set_enabled(_enabled)if_strategyisnotNone:_set_strategy(_strategy)returnorig_flags@contextmanagerdefflags(enabled=None,strategy=None):with__allow_nonbracketed_mutation():orig_flags=set_flags(enabled,strategy)try:yieldfinally:# recover the previous valueswith__allow_nonbracketed_mutation():set_flags(*orig_flags)# The magic here is to allow us to intercept code like this:## torch.backends.opt_einsum.enabled = TrueclassOptEinsumModule(PropModule):def__init__(self,m,name):super().__init__(m,name)globalenabledenabled=ContextProp(_get_enabled,_set_enabled)globalstrategystrategy=Noneifis_available():strategy=ContextProp(_get_strategy,_set_strategy)# This is the sys.modules replacement trick, see# https://stackoverflow.com/questions/2447353/getattr-on-a-module/7668273#7668273sys.modules[__name__]=OptEinsumModule(sys.modules[__name__],__name__)enabled=Trueifis_available()elseFalsestrategy="auto"ifis_available()elseNone
Docs
Access comprehensive developer documentation for PyTorch
To analyze traffic and optimize your experience, we serve cookies on this site. By clicking or navigating, you agree to allow our usage of cookies. As the current maintainers of this site, Facebook’s Cookies Policy applies. Learn more, including about available controls: Cookies Policy.