Skip to content

Instantly share code, notes, and snippets.

@artiya4u
Last active March 21, 2022 07:56
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save artiya4u/e0aee2144efda22625d2576718321cbe to your computer and use it in GitHub Desktop.
Save artiya4u/e0aee2144efda22625d2576718321cbe to your computer and use it in GitHub Desktop.
เขียนโปรแกรมรับ list ของตัวเลข แล้วให้คืนค่าตัวเลขที่ซ้ำมากที่สุดเป็นอันดับที่ 2 [1, 2, 2] => 1 [1, 2, 2, 3, 3, 3] => 2 [4, 4, 4, 4, 1, 2, 2, 3, 3, 3] => 3
import random
import numpy as np
from collections import defaultdict
import matplotlib.pyplot as plt
from tensorflow import keras
list_size = 10
class_num = 9
def find_second_best(items):
counts = defaultdict(int)
for x in items:
if x == 0: # ignore zero value for filler
continue
counts[x] += 1
(label, _) = sorted(counts.items(), reverse=True, key=lambda tup: tup[1])[1]
return label
def gen_data(size):
lists = []
labels = []
for n in range(size):
items = []
items_arr = []
for i in range(list_size):
rand = random.randint(0, class_num)
items.append(rand)
items_arr.append([np.array(rand)])
lists.append(np.array(items_arr))
label = find_second_best(items)
labels.append(np.array([label]))
return np.array(lists), keras.utils.to_categorical(np.array(labels) - 1)
random.seed(12345) # For reproduce
train_lists, train_labels = gen_data(80000)
test_lists, test_labels = gen_data(20000)
model = keras.models.Sequential()
model.add(keras.layers.Conv1D(32, 3, input_shape=(list_size, 1), activation='relu'))
model.add(keras.layers.Conv1D(64, 3, activation='relu'))
model.add(keras.layers.BatchNormalization())
model.add(keras.layers.Conv1D(64, 3, activation='relu'))
model.add(keras.layers.BatchNormalization())
model.add(keras.layers.Conv1D(64, 3, activation='relu'))
model.add(keras.layers.MaxPooling1D(pool_size=2, ))
model.add(keras.layers.BatchNormalization())
model.add(keras.layers.Flatten())
model.add(keras.layers.Dense(128, activation='relu'))
model.add(keras.layers.Dense(64, activation='relu'))
model.add(keras.layers.Dense(32, activation='relu'))
model.add(keras.layers.BatchNormalization())
model.add(keras.layers.Dense(9, activation='softmax'))
model.summary()
model.compile(optimizer='adam',
loss='categorical_crossentropy',
metrics=['accuracy'])
history = model.fit(train_lists, train_labels, epochs=200,
validation_data=(test_lists, test_labels))
test_loss, test_acc = model.evaluate(test_lists, test_labels, verbose=2)
print(f'Eval Accuracy: {test_acc * 100} %')
testcases = [
[1, 2, 2, 0, 0, 0, 0, 0, 0, 0],
[1, 2, 2, 3, 3, 3, 0, 0, 0, 0],
[4, 4, 4, 4, 1, 2, 2, 3, 3, 3],
[4, 2, 4, 2, 7, 4, 1, 7, 7, 4]
]
print('result use code:')
for t in testcases:
second_best = find_second_best(t)
print(t, '=>', second_best)
list_predict = np.asarray(testcases)
pred = model.predict(list_predict)
print('result use model:')
for i in range(len(pred)):
sorted_idx = np.argsort(pred[i])
print(testcases[i], '=>', sorted_idx[-1] + 1)
plt.plot(history.history['accuracy'], label='accuracy')
plt.plot(history.history['val_accuracy'], label='val_accuracy')
plt.xlabel('Epoch')
plt.ylabel(f'Accuracy (Max: {"{:2.4f}".format(test_acc)})')
plt.ylim([0, 1])
plt.legend(loc='lower right')
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment