@@ -2875,7 +2875,7 @@ def call(self, x):
2875
2875
]
2876
2876
)
2877
2877
def layer_normalization (
2878
- x , gamma = None , beta = None , axis = - 1 , epsilon = None , rms_scaling = False
2878
+ x , gamma = None , beta = None , axis = - 1 , epsilon = None , ** kwargs
2879
2879
):
2880
2880
"""Layer normalization layer (Ba et al., 2016).
2881
2881
@@ -2889,9 +2889,6 @@ def layer_normalization(
2889
2889
Default to -1.
2890
2890
gamma: Optional scaling factor for the normalization.
2891
2891
beta: Optional add offset for the normalized tensor.
2892
- rms_scaling:This is an approximate and faster
2893
- approach that avoids ever computing the mean of the input. Note that
2894
- this *isn't* equivalent to the computation that rms_normalization
2895
2892
epsilon: A lower bound value for the norm.
2896
2893
Defaults to `backend.epsilon()`.
2897
2894
@@ -2902,6 +2899,16 @@ def layer_normalization(
2902
2899
>>> print(x_norm)
2903
2900
array([-1.4142135 , -0.70710677, 0., 0.7071067 , 1.4142135 ])
2904
2901
"""
2902
+ rms_scaling = kwargs .pop ("rms_scaling" , False )
2903
+ if rms_scaling :
2904
+ warnings .warn (
2905
+ "You passed `rms_scaling=True`, which is deprecated. This argument "
2906
+ "incorrectly scales the input by the variance, not the root mean "
2907
+ "square. To correctly use RMS Normalization, please use "
2908
+ "`keras.ops.rms_normalization` / `keras.ops.nn.rms_normalization` "
2909
+ "instead."
2910
+ )
2911
+
2905
2912
if any_symbolic_tensors ((x ,)):
2906
2913
return LayerNorm (
2907
2914
gamma = gamma ,
@@ -2953,7 +2960,6 @@ def _broadcast(v):
2953
2960
# Calculate the variance along self.axis (layer activations).
2954
2961
variance = backend .numpy .var (x , axis = axis , keepdims = True )
2955
2962
inv = backend .math .rsqrt (variance + epsilon )
2956
-
2957
2963
outputs = x * inv * backend .cast (_broadcast (gamma ), x .dtype )
2958
2964
elif backend .config .backend () == "torch" and is_continuous_axis (axis ):
2959
2965
# when using torch backend,use kernel to improve performance
0 commit comments