\( \newcommand{\matr}[1] {\mathbf{#1}} \newcommand{\vertbar} {\rule[-1ex]{0.5pt}{2.5ex}} \newcommand{\horzbar} {\rule[.5ex]{2.5ex}{0.5pt}} \newcommand{\E} {\mathrm{E}} \)
deepdream of
          a sidewalk
Show Answer
Pytorch's Module class

Are there any methods that you think you should know?


class Module:
    def __init__(self, *args, **kwargs) -> None:
        pass

    forward: Callable[..., Any] = _forward_unimplemented

    def register_buffer(self, name: str, tensor: Optional[Tensor], persistent: bool = True) -> None:
        r"""Add a buffer to the module."""
        pass

    def register_parameter(self, name: str, param: Optional[Parameter]) -> None:
        r"""Add a parameter to the module."""
        pass

    def add_module(self, name: str, module: Optional['Module']) -> None:
        r"""Add a child module to the current module."""
        pass

    def register_module(self, name: str, module: Optional['Module']) -> None:
        r"""Alias for :func:`add_module`."""
        pass

    def get_submodule(self, target: str) -> "Module":
        """Return the submodule given by ``target`` if it exists, otherwise throw an error."""
        pass

    def get_parameter(self, target: str) -> "Parameter":
        """Return the parameter given by ``target`` if it exists, otherwise throw an error."""
        pass

    def get_buffer(self, target: str) -> "Tensor":
        """Return the buffer given by ``target`` if it exists, otherwise throw an error."""
        pass

    def get_extra_state(self) -> Any:
        """Return any extra state to include in the module's state_dict."""
        pass

    def set_extra_state(self, state: Any) -> None:
        """Set extra state contained in the loaded `state_dict`."""
        pass

    def _apply(self, fn, recurse=True):
        pass

    def apply(self: T, fn: Callable[['Module'], None]) -> T:
        r"""Apply ``fn`` recursively to every submodule (as returned by ``.children()``) as well as self."""
        pass

    def cuda(self: T, device: Optional[Union[int, device]] = None) -> T:
        r"""Move all model parameters and buffers to the GPU."""
        pass

    def ipu(self: T, device: Optional[Union[int, device]] = None) -> T:
        r"""Move all model parameters and buffers to the IPU."""
        pass

    def xpu(self: T, device: Optional[Union[int, device]] = None) -> T:
        r"""Move all model parameters and buffers to the XPU."""
        pass

    def cpu(self: T) -> T:
        r"""Move all model parameters and buffers to the CPU."""
        pass

    def type(self: T, dst_type: Union[dtype, str]) -> T:
        r"""Casts all parameters and buffers to :attr:`dst_type`."""
        pass

    def float(self: T) -> T:
        r"""Casts all floating point parameters and buffers to ``float`` datatype."""
        pass

    def double(self: T) -> T:
        r"""Casts all floating point parameters and buffers to ``double`` datatype."""
        return self._apply(lambda t: t.double() if t.is_floating_point() else t)

    def half(self: T) -> T:
        r"""Casts all floating point parameters and buffers to ``half`` datatype."""
        pass

    def bfloat16(self: T) -> T:
        r"""Casts all floating point parameters and buffers to ``bfloat16`` datatype."""
        pass

    def to_empty(self: T, *, device: Optional[DeviceLikeType], recurse: bool = True) -> T:
        r"""Move the parameters and buffers to the specified device without copying storage."""
        pass

    def to(self, device: Optional[DeviceLikeType] = ..., dtype: Optional[dtype] = ..., non_blocking: bool = ...) -> Self:
        ...

    @overload
    def to(self, dtype: dtype, non_blocking: bool = ...) -> Self:
        ...

    @overload
    def to(self, tensor: Tensor, non_blocking: bool = ...) -> Self:
        ...

    def to(self, *args, **kwargs):
        pass

    def register_full_backward_pre_hook(
        self,
        hook: Callable[["Module", _grad_t], Union[None, _grad_t]],
        prepend: bool = False,
    ) -> RemovableHandle:
        r"""Register a backward pre-hook on the module."""
        pass

    def register_backward_hook(
        self, hook: Callable[['Module', _grad_t, _grad_t], Union[None, _grad_t]]
    ) -> RemovableHandle:
        r"""Register a backward hook on the module."""
        pass

    def register_full_backward_hook(
        self,
        hook: Callable[["Module", _grad_t, _grad_t], Union[None, _grad_t]],
        prepend: bool = False,
    ) -> RemovableHandle:
        r"""Register a backward hook on the module."""
        pass

    def _get_backward_hooks(self):
        r"""Return the backward hooks for use in the call function."""
        pass

    def _get_backward_pre_hooks(self):
        pass

    def _maybe_warn_non_full_backward_hook(self, inputs, result, grad_fn):
        pass

    def register_forward_pre_hook(
        self,
        hook: Union[
            Callable[[T, Tuple[Any, ...]], Optional[Any]],
            Callable[[T, Tuple[Any, ...], Dict[str, Any]], Optional[Tuple[Any, Dict[str, Any]]]],
        ],
        *,
        prepend: bool = False,
        with_kwargs: bool = False,
    ) -> RemovableHandle:
        r"""Register a forward pre-hook on the module."""
        pass

    def register_forward_hook(
        self,
        hook: Union[
            Callable[[T, Tuple[Any, ...], Any], Optional[Any]],
            Callable[[T, Tuple[Any, ...], Dict[str, Any], Any], Optional[Any]],
        ],
        *,
        prepend: bool = False,
        with_kwargs: bool = False,
        always_call: bool = False,
    ) -> RemovableHandle:
        r"""Register a forward hook on the module."""
        pass

    def _slow_forward(self, *input, **kwargs):
        pass

    def _wrapped_call_impl(self, *args, **kwargs):
        pass

    def _call_impl(self, *args, **kwargs):
        pass

    __call__ : Callable[..., Any] = _wrapped_call_impl

    def __getstate__(self):
        pass

    def __setstate__(self, state):
        pass

    def __getattr__(self, name: str) -> Any:

        pass

    def __setattr__(self, name: str, value: Union[Tensor, 'Module']) -> None:
        pass

    def __delattr__(self, name):
        pass

    def _register_state_dict_hook(self, hook):
        r"""Register a state-dict hook."""
        pass

    def register_state_dict_pre_hook(self, hook):
        r"""Register a pre-hook for the :meth:`~torch.nn.Module.state_dict` method."""
        pass

    def _save_to_state_dict(self, destination, prefix, keep_vars):
        r"""Save module state to the `destination` dictionary."""
        pass

    # The user can pass an optional arbitrary mappable object to `state_dict`, in which case `state_dict` returns
    # back that same object. But if they pass nothing, an `OrderedDict` is created and returned.
    T_destination = TypeVar('T_destination', bound=Dict[str, Any])

    @overload
    def state_dict(self, *, destination: T_destination, prefix: str = ..., keep_vars: bool = ...) -> T_destination:
        ...

    @overload
    def state_dict(self, *, prefix: str = ..., keep_vars: bool = ...) -> Dict[str, Any]:
        ...

    def state_dict(self, *args, destination=None, prefix='', keep_vars=False):
        r"""Return a dictionary containing references to the whole state of the module."""
        pass

    def _register_load_state_dict_pre_hook(self, hook, with_module=False):
        r"""Register a pre-hook for the :meth:`~torch.nn.Module.load_state_dict` method."""
        pass

    def register_load_state_dict_post_hook(self, hook):
        r"""Register a post hook to be run after module's ``load_state_dict`` is called."""
        pass

    def _load_from_state_dict(self, state_dict, prefix, local_metadata, strict,
                              missing_keys, unexpected_keys, error_msgs):
        r"""Copy parameters and buffers from :attr:`state_dict` into only this module, but not its descendants."""
        pass

    def load_state_dict(self, state_dict: Mapping[str, Any],
                        strict: bool = True, assign: bool = False):
        r"""Copy parameters and buffers from :attr:`state_dict` into this module and its descendants."""
        pass

    def _named_members(self, get_members_fn, prefix='', recurse=True, remove_duplicate: bool = True):
        r"""Help yield various names + members of modules."""
        pass

    def parameters(self, recurse: bool = True) -> Iterator[Parameter]:
        r"""Return an iterator over module parameters."""
        pass

    def buffers(self, recurse: bool = True) -> Iterator[Tensor]:
        r"""Return an iterator over module buffers."""
        pass

    def named_buffers(self, prefix: str = '', recurse: bool = True, remove_duplicate: bool = True) -> Iterator[Tuple[str, Tensor]]:
        r"""Return an iterator over module buffers, yielding both the name of the buffer as well as the buffer itself."""
        pass

    def children(self) -> Iterator['Module']:
        r"""Return an iterator over immediate children modules."""
        pass

    def named_children(self) -> Iterator[Tuple[str, 'Module']]:
        r"""Return an iterator over immediate children modules, yielding both the name of the module as well as the module itself."""
        pass

    def modules(self) -> Iterator['Module']:
        r"""Return an iterator over all modules in the network."""
        pass

    def named_modules(self, memo: Optional[Set['Module']] = None, prefix: str = '', remove_duplicate: bool = True):
        r"""Return an iterator over all modules in the network, yielding both the name of the module as well as the module itself."""
        pass

    def train(self: T, mode: bool = True) -> T:
        r"""Set the module in training mode."""
        pass

    def eval(self: T) -> T:
        r"""Set the module in evaluation mode."""
        pass

    def requires_grad_(self: T, requires_grad: bool = True) -> T:
        r"""Change if autograd should record operations on parameters in this module."""
        pass

    def zero_grad(self, set_to_none: bool = True) -> None:
        r"""Reset gradients of all model parameters."""
        pass

    def share_memory(self: T) -> T:
        r"""See :meth:`torch.Tensor.share_memory_`."""
        pass

    def _get_name(self):
        return self.__class__.__name__

    def extra_repr(self) -> str:
        r"""Set the extra representation of the module."""
        return ''

    def __repr__(self):
        pass

    def __dir__(self):
        pass

    def _replicate_for_data_parallel(self):
        pass

    def compile(self, *args, **kwargs):
        pass