# pyre-strictr"""This package adds support for device memory management implemented in MTIA."""fromtypingimportAny,Optionalimporttorchfrom.import_device_t,is_initializedfrom._utilsimport_get_device_index
[docs]defmemory_stats(device:Optional[_device_t]=None)->dict[str,Any]:r"""Return a dictionary of MTIA memory allocator statistics for a given device. Args: device (torch.device, str, or int, optional) selected device. Returns statistics for the current device, given by current_device(), if device is None (default). """ifnotis_initialized():return{}returntorch._C._mtia_memoryStats(_get_device_index(device,optional=True))
defmax_memory_allocated(device:Optional[_device_t]=None)->int:r"""Return the maximum memory allocated in bytes for a given device. Args: device (torch.device, str, or int, optional) selected device. Returns statistics for the current device, given by current_device(), if device is None (default). """ifnotis_initialized():return0returnmemory_stats(device).get("dram",0).get("peak_bytes",0)defreset_peak_memory_stats(device:Optional[_device_t]=None)->None:r"""Reset the peak memory stats for a given device. Args: device (torch.device, str, or int, optional) selected device. Returns statistics for the current device, given by current_device(), if device is None (default). """ifnotis_initialized():returntorch._C._mtia_resetPeakMemoryStats(_get_device_index(device,optional=True))__all__=["memory_stats","max_memory_allocated","reset_peak_memory_stats",]
Docs
Access comprehensive developer documentation for PyTorch
To analyze traffic and optimize your experience, we serve cookies on this site. By clicking or navigating, you agree to allow our usage of cookies. As the current maintainers of this site, Facebook’s Cookies Policy applies. Learn more, including about available controls: Cookies Policy.