@@ -55,13 +55,13 @@ def __init__(self, alpha=None, beta=None, gamma=None, fixed=None):
5555
5656 :param alpha: The output scaling parameter of the adaptive function.
5757 If ``None``, it is initialized to ``1``. Default is ``None``.
58- :type alpha: int | float | complex
58+ :type alpha: int | float
5959 :param beta: The input scaling parameter of the adaptive function.
6060 If ``None``, it is initialized to ``1``. Default is ``None``.
61- :type beta: int | float | complex
61+ :type beta: int | float
6262 :param gamma: The input shifting parameter of the adaptive function.
6363 If ``None``, it is initialized to ``0``. Default is ``None``.
64- :type gamma: int | float | complex
64+ :type gamma: int | float
6565 :param fixed: The names of parameters to keep fixed during training.
6666 These parameters will not be optimized and will have
6767 ``requires_grad=False``. Available options are ``"alpha"``,
@@ -76,4 +76,4 @@ def __init__(self, alpha=None, beta=None, gamma=None, fixed=None):
7676 :raises ValueError: If fixed contains invalid parameter names.
7777 """
7878 super ().__init__ (alpha , beta , gamma , fixed )
79- self .func = torch .nn .Sigmoid ()
79+ self ._func = torch .nn .Sigmoid ()
0 commit comments