自定义层是深度学习模型中灵活扩展的核心工具,常用于实现特定功能模块。以下是常见应用场景与实现技巧:
1. 自定义激活函数
import tensorflow as tf
class CustomReLU(tf.keras.layers.Layer):
def __init__(self, threshold=0.5, **kwargs):
super().__init__(**kwargs)
self.threshold = threshold
def call(self, inputs):
return tf.where(inputs > self.threshold, inputs, 0)
2. 自定义损失函数
def custom_loss(y_true, y_pred):
return tf.reduce_mean(tf.square(y_true - y_pred) * 0.1)
3. 自定义数据增强层
class CustomAugmentation(tf.keras.layers.Layer):
def __init__(self, scale=0.2, **kwargs):
super().__init__(**kwargs)
self.scale = scale
def call(self, inputs):
return inputs + tf.random.normal(tf.shape(inputs), mean=0, stddev=self.scale)
4. 自定义注意力机制
class CustomAttention(tf.keras.layers.Layer):
def __init__(self, units=64, **kwargs):
super().__init__(**kwargs)
self.units = units
self.Wq = tf.keras.layers.Dense(units)
self.Wk = tf.keras.layers.Dense(units)
self.Wv = tf.keras.layers.Dense(units)
def call(self, inputs):
q = self.Wq(inputs)
k = self.Wk(inputs)
v = self.Wv(inputs)
return tf.matmul(q, k, transpose_b=True) / tf.sqrt(tf.cast(self.units, tf.float32)) + v