neuro-lab8/model.py

65 lines
1.8 KiB
Python
Raw Normal View History

2025-12-09 08:09:49 +02:00
import tensorflow as tf
2025-12-08 17:10:47 +02:00
from tensorflow.keras import layers as kl
from tensorflow.keras import models as km
from tensorflow.keras import losses as ks
from tensorflow.keras import optimizers as ko
from tensorflow.keras import callbacks as kc
from tensorflow import keras
from loss import CTCLoss
2025-12-09 08:09:49 +02:00
#g = tf.config.experimental.list_physical_devices('GPU')
#for i in g:
# tf.config.experimental.set_memory_growth(i, True)
2025-12-08 17:10:47 +02:00
2025-12-11 09:32:21 +02:00
def model(input_dim, output_dim, rnn_layers = 5, rnn_units = 128):
2025-12-08 17:10:47 +02:00
li = kl.Input((None, input_dim))
l1 = kl.Reshape((-1, input_dim, 1))(li)
l2 = kl.Conv2D(32,
kernel_size = [11, 41],
strides = [2, 2],
padding = 'same',
use_bias = False)(l1)
l3 = kl.BatchNormalization()(l2)
l4 = kl.ReLU()(l3)
l5 = kl.Conv2D(32,
kernel_size = [11, 21],
strides = [1, 2],
padding = 'same',
use_bias = False)(l4)
l6 = kl.BatchNormalization()(l5)
l7 = kl.ReLU()(l6)
lb = kl.Reshape((-1, l7.shape[-2] * l7.shape[-1]))(l7)
for i in range(rnn_layers):
i += 1
r = kl.GRU(rnn_units,
activation = 'tanh',
recurrent_activation = 'sigmoid',
use_bias = True,
return_sequences = True,
reset_after = True)
lb = kl.Bidirectional(r,
merge_mode = 'concat')(lb)
if i < rnn_layers:
lb = kl.Dropout(rate=0.5)(lb)
lc1 = kl.Dense(rnn_units * 2, activation = 'relu')(lb)
lc2 = kl.Dropout(0.5)(lc1)
lo = kl.Dense(output_dim + 1, activation = 'softmax')(lc2)
m = keras.Model(li, lo)
2025-12-11 09:32:21 +02:00
m.compile(optimizer = ko.Adam(0.0001),
2025-12-08 17:10:47 +02:00
loss = CTCLoss)
return m