早上前2堂: RNN
X_Data = ['good', 'bad', 'worse', 'so good']y = [1.0, 0.0, 0.0, 1.0]
count = 0Encoding = dict()for words in X_Data: for char in words: if char not in Encoding.keys(): Encoding[char] = count+1 count = count+1
X_squence = []for words in X_Data: temp = [] for char in words: temp.append(Encoding[char]) X_squence.append(temp)
# 补数字将长度改成一样from tensorflow.keras.preprocessing.sequence import pad_sequencesX = pad_sequences(X_squence, maxlen=8, padding='post', value=0)import numpy as npy = np.array(y)
# 建构网路from tensorflow.keras.models import Sequentialfrom tensorflow.keras.layers import Embedding, SimpleRNN, DenseRNN_Model = Sequential()RNN_Model.add(Embedding(input_dim=11, output_dim=11, input_length=8))RNN_Model.add(SimpleRNN(30))RNN_Model.add(Dense(2))
# 训练模型from tensorflow.keras.optimizers import Adamfrom tensorflow.keras.losses import SparseCategoricalCrossentropyadam_op = Adam(learning_rate=0.001)loss_op = SparseCategoricalCrossentropy()RNN_Model.compile(optimizer=adam_op, loss=loss_op, metrics=['acc'])RNN_Model.fit(X, y, epochs=30, batch_size=2)
# 预测np.argmax(RNN_Model.predict(X), axis=-1)
y
早上后2堂:
讲解一些演算法,ex:爬山演算法,运用程式让同学了解运作原理。