Spaces:
Sleeping
Sleeping
Fix regularizers not imported
Browse files- chatbot_constructor.py +9 -8
chatbot_constructor.py
CHANGED
|
@@ -3,6 +3,7 @@ import numpy as np
|
|
| 3 |
from keras.models import Model
|
| 4 |
from keras.saving import load_model
|
| 5 |
from keras.layers import *
|
|
|
|
| 6 |
from tensorflow.keras.optimizers import RMSprop
|
| 7 |
from keras.preprocessing.text import Tokenizer
|
| 8 |
import os
|
|
@@ -57,9 +58,9 @@ def train(message: str = "", regularization: float = 0.0001, dropout: float = 0.
|
|
| 57 |
dropout1_layer = Dropout(dropout)(emb_layer)
|
| 58 |
attn_layer = MultiHeadAttention(num_heads=4, key_dim=128)(dropout1_layer, dropout1_layer, dropout1_layer)
|
| 59 |
noise_layer = GaussianNoise(0.1)(attn_layer)
|
| 60 |
-
conv1_layer = Conv1D(kernels_count, kernel_size, padding='same', activation='relu', strides=1, input_shape=(64, 128), kernel_regularizer=
|
| 61 |
-
conv2_layer = Conv1D(16, 4, padding='same', activation='relu', strides=1, kernel_regularizer=
|
| 62 |
-
conv3_layer = Conv1D(8, 2, padding='same', activation='relu', strides=1, kernel_regularizer=
|
| 63 |
flatten_layer = Flatten()(conv3_layer)
|
| 64 |
attn_flatten_layer = Flatten()(attn_layer)
|
| 65 |
conv1_flatten_layer = Flatten()(conv1_layer)
|
|
@@ -67,16 +68,16 @@ def train(message: str = "", regularization: float = 0.0001, dropout: float = 0.
|
|
| 67 |
conv3_flatten_layer = Flatten()(conv3_layer)
|
| 68 |
concat1_layer = Concatenate()([flatten_layer, attn_flatten_layer, conv1_flatten_layer, conv2_flatten_layer, conv3_flatten_layer])
|
| 69 |
dropout2_layer = Dropout(dropout)(concat1_layer)
|
| 70 |
-
dense1_layer = Dense(512, activation="linear", kernel_regularizer=
|
| 71 |
prelu1_layer = PReLU()(dense1_layer)
|
| 72 |
dropout3_layer = Dropout(dropout)(prelu1_layer)
|
| 73 |
-
dense2_layer = Dense(256, activation="tanh", kernel_regularizer=
|
| 74 |
dropout4_layer = Dropout(dropout)(dense2_layer)
|
| 75 |
-
dense3_layer = Dense(256, activation="relu", kernel_regularizer=
|
| 76 |
dropout5_layer = Dropout(dropout)(dense3_layer)
|
| 77 |
-
dense4_layer = Dense(100, activation="tanh", kernel_regularizer=
|
| 78 |
concat2_layer = Concatenate()([dense4_layer, prelu1_layer, attn_flatten_layer, conv1_flatten_layer])
|
| 79 |
-
dense4_layer = Dense(resps_len, activation="softmax", kernel_regularizer=
|
| 80 |
model = Model(inputs=input_layer, outputs=dense4_layer)
|
| 81 |
|
| 82 |
X = []
|
|
|
|
| 3 |
from keras.models import Model
|
| 4 |
from keras.saving import load_model
|
| 5 |
from keras.layers import *
|
| 6 |
+
from keras.regularizers import L1
|
| 7 |
from tensorflow.keras.optimizers import RMSprop
|
| 8 |
from keras.preprocessing.text import Tokenizer
|
| 9 |
import os
|
|
|
|
| 58 |
dropout1_layer = Dropout(dropout)(emb_layer)
|
| 59 |
attn_layer = MultiHeadAttention(num_heads=4, key_dim=128)(dropout1_layer, dropout1_layer, dropout1_layer)
|
| 60 |
noise_layer = GaussianNoise(0.1)(attn_layer)
|
| 61 |
+
conv1_layer = Conv1D(kernels_count, kernel_size, padding='same', activation='relu', strides=1, input_shape=(64, 128), kernel_regularizer=L1(regularization))(noise_layer)
|
| 62 |
+
conv2_layer = Conv1D(16, 4, padding='same', activation='relu', strides=1, kernel_regularizer=L1(regularization))(conv1_layer)
|
| 63 |
+
conv3_layer = Conv1D(8, 2, padding='same', activation='relu', strides=1, kernel_regularizer=L1(regularization))(conv2_layer)
|
| 64 |
flatten_layer = Flatten()(conv3_layer)
|
| 65 |
attn_flatten_layer = Flatten()(attn_layer)
|
| 66 |
conv1_flatten_layer = Flatten()(conv1_layer)
|
|
|
|
| 68 |
conv3_flatten_layer = Flatten()(conv3_layer)
|
| 69 |
concat1_layer = Concatenate()([flatten_layer, attn_flatten_layer, conv1_flatten_layer, conv2_flatten_layer, conv3_flatten_layer])
|
| 70 |
dropout2_layer = Dropout(dropout)(concat1_layer)
|
| 71 |
+
dense1_layer = Dense(512, activation="linear", kernel_regularizer=L1(regularization))(dropout2_layer)
|
| 72 |
prelu1_layer = PReLU()(dense1_layer)
|
| 73 |
dropout3_layer = Dropout(dropout)(prelu1_layer)
|
| 74 |
+
dense2_layer = Dense(256, activation="tanh", kernel_regularizer=L1(regularization))(dropout3_layer)
|
| 75 |
dropout4_layer = Dropout(dropout)(dense2_layer)
|
| 76 |
+
dense3_layer = Dense(256, activation="relu", kernel_regularizer=L1(regularization))(dropout4_layer)
|
| 77 |
dropout5_layer = Dropout(dropout)(dense3_layer)
|
| 78 |
+
dense4_layer = Dense(100, activation="tanh", kernel_regularizer=L1(regularization))(dropout5_layer)
|
| 79 |
concat2_layer = Concatenate()([dense4_layer, prelu1_layer, attn_flatten_layer, conv1_flatten_layer])
|
| 80 |
+
dense4_layer = Dense(resps_len, activation="softmax", kernel_regularizer=L1(regularization))(concat2_layer)
|
| 81 |
model = Model(inputs=input_layer, outputs=dense4_layer)
|
| 82 |
|
| 83 |
X = []
|