인공지능

Keras functional

쿠와와 2020. 12. 10. 22:30
# Day_27_01_functional.py
import tensorflow as tf
import numpy as np


# 지금까지는 sequential 이였다면 지금부터는 functional
# AND 데이터셋에 대해 정확도를 계산하는 모델을 만들 것임.

def and_sequential():
    data = [[0, 0, 0],
            [0, 1, 0],
            [1, 0, 0],
            [1, 1, 1]]

    data = np.int32(data)

    x = data[:, :-1]
    y = data[:, -1:]
    print(x.shape, y.shape)  # (4, 2) (4, 1)

    model = tf.keras.Sequential()
    model.add(tf.keras.layers.Input(shape=[2]))
    model.add(tf.keras.layers.Dense(1, activation='sigmoid'))

    model.compile(optimizer=tf.keras.optimizers.Adam(lr=0.01),
                  loss=tf.keras.losses.binary_crossentropy,
                  metrics=['acc'])

    model.fit(x, y, epochs=100, verbose=2)
    print('acc :', model.evaluate(x, y))


def xor_sequential():
    data = [[0, 0, 0],
            [0, 1, 1],
            [1, 0, 1],
            [1, 1, 0]]

    data = np.int32(data)

    x = data[:, :-1]
    y = data[:, -1:]
    print(x.shape, y.shape)  # (4, 2) (4, 1)

    model = tf.keras.Sequential()
    model.add(tf.keras.layers.Input(shape=[2]))
    model.add(tf.keras.layers.Dense(9, activation='relu'))
    model.add(tf.keras.layers.Dense(1, activation='sigmoid'))

    model.compile(optimizer=tf.keras.optimizers.Adam(lr=0.01),
                  loss=tf.keras.losses.binary_crossentropy,
                  metrics=['acc'])

    model.fit(x, y, epochs=100, verbose=2)
    print('acc :', model.evaluate(x, y))


def xor_functional_basic():
    data = [[0, 0, 0],
            [0, 1, 1],
            [1, 0, 1],
            [1, 1, 0]]

    data = np.int32(data)

    x = data[:, :-1]
    y = data[:, -1:]
    # print(x.shape, y.shape)         # (4, 2) (4, 1)

    # model = tf.keras.Sequential()
    # model.add(tf.keras.layers.Input(shape=[2]))
    # model.add(tf.keras.layers.Dense(9, activation='relu'))
    # model.add(tf.keras.layers.Dense(1, activation='sigmoid'))

    # 1번 functional 함수 형식으로 바꾼 것
    # input = tf.keras.layers.Input(shape=[2])
    # dense1 = tf.keras.layers.Dense(9, activation='relu')
    # output1 = dense1.__call__(input)
    # dense2 = tf.keras.layers.Dense(1, activation='sigmoid')
    # output2 = dense2.__call__(output1)

    # 2번 # dense.__call__ -> 생략 가능
    input = tf.keras.layers.Input(shape=[2])
    dense1 = tf.keras.layers.Dense(9, activation='relu')
    output1 = dense1(input)
    dense2 = tf.keras.layers.Dense(1, activation='sigmoid')
    output2 = dense2(output1)  # dense2.__call__(output1) 이랑 같은

    # 3번
    # input = tf.keras.layers.Input(shape=[2])     # () <- 생성자 () <- 함수콜임
    # output1 = tf.keras.layers.Dense(9, activation='relu')(input)  # python 문법 dense에 바로 input 넣음
    # output2 = tf.keras.layers.Dense(1, activation='sigmoid')(output1)
    #
    model = tf.keras.models.Model(input, output2)  # 입력으로 들어오는 것과 최종 출력

    # 4번 많은 사람들은 그냥 통일해서 씀 굳이 전 layer를 확인 할 필요 없은깐
    # output = tf.keras.layers.Dense(9, activation='relu')(input)  # python 문법 dense에 바로 input 넣음
    # output = tf.keras.layers.Dense(1, activation='sigmoid')(output)
    #
    # model = tf.keras.models.Model(input, output)  # 입력으로 들

    model.compile(optimizer=tf.keras.optimizers.Adam(lr=0.01),
                  loss=tf.keras.losses.binary_crossentropy,
                  metrics=['acc'])

    model.fit(x, y, epochs=10, verbose=2)
    print('acc :', model.evaluate(x, y))

    # ----------------------- #
    # 2번 코드 사용
    # 기존에 내가 사용했던 모델을 사용할 수 있음 dense1 이 없더라도 model. 하면 찾을 수 있음
    new_model = tf.keras.Model(model.input, dense1.output)
    preds = new_model.predict(x)    # 입력값에 대해 layer 의 가중치와 그에 따른 결과값
    print(preds)

    # ----------------------- #
    print(dense1.weights)   # layer 가 사용했던 가중치

    w, b = dense1.weights
    print(w.numpy())
    print(b.numpy())


# input 이 여러개일 경우
def xor_functional_multi_input():
    data = [[0, 0, 0],
            [0, 1, 1],
            [1, 0, 1],
            [1, 1, 0]]

    data = np.int32(data)

    x1 = data[:, 0:1]
    x2 = data[:, 1:2]
    y = data[:, 2:3]

    # 1번
    # input1 = tf.keras.layers.Input(shape=[1])
    # input2 = tf.keras.layers.Input(shape=[1])
    #
    # # 인풋을 연결해주는 것임
    # input = tf.keras.layers.concatenate([input1, input2], axis=1)

    # 2번 이러한 방식은 sequential 방식에서는
    input1 = tf.keras.layers.Input(shape=[1])
    output1 = tf.keras.layers.Dense(9, activation='relu')(input1)

    input2 = tf.keras.layers.Input(shape=[1])
    output2 = tf.keras.layers.Dense(9, activation='relu')(input2)

    concat = tf.keras.layers.concatenate([output1, output2], axis=1)

    output3 = tf.keras.layers.Dense(1, activation='sigmoid')(concat)

    model = tf.keras.models.Model([input1, input2], output3)  # 입력으로 들어오는 것과 최종 출력

    model.compile(optimizer=tf.keras.optimizers.Adam(lr=0.01),
                  loss=tf.keras.losses.binary_crossentropy,
                  metrics=['acc'])

    model.fit([x1, x2], y, epochs=10, verbose=2)
    print('acc :', model.evaluate([x1, x2], y))


def xor_functional_multi_inout():
    data = [[0, 0, 0, 0],
            [0, 1, 1, 0],
            [1, 0, 1, 0],
            [1, 1, 0, 1]]
    data = np.int32(data)

    x1 = data[:, 0:1]
    x2 = data[:, 1:2]
    y1 = data[:, 2:3]
    y2 = data[:, 3:4]

    input1 = tf.keras.layers.Input(shape=[1])
    output1 = tf.keras.layers.Dense(9, activation='relu')(input1)

    input2 = tf.keras.layers.Input(shape=[1])
    output2 = tf.keras.layers.Dense(9, activation='relu')(input2)

    concat = tf.keras.layers.concatenate([output1, output2], axis=1)

    output3 = tf.keras.layers.Dense(3, activation='relu', name='output3')(concat)
    output4 = tf.keras.layers.Dense(1, activation='sigmoid', name='output4')(output3)

    output5 = tf.keras.layers.Dense(3, activation='relu', name='output5')(concat)
    output6 = tf.keras.layers.Dense(1, activation='sigmoid', name='output6')(output5)

    model = tf.keras.models.Model([input1, input2], [output4, output6])  # 입력으로 들어오는 것과 최종 출력

    model.compile(optimizer=tf.keras.optimizers.Adam(lr=0.01),
                  loss=tf.keras.losses.binary_crossentropy,
                  metrics=['acc'])

    history = model.fit([x1, x2], [y1, y2], epochs=10, verbose=2)
    print('acc :', model.evaluate([x1, x2], [y1, y2]))
    # acc : [1.324426531791687, 0.6935293078422546, 0.6308972239494324, 0.5, 0.75]
    # dict_keys(['loss', 'dense_2_loss', 'dense_3_loss', 'dense_2_acc', 'dense_3_acc'])
    # dict_keys(['loss', 'output3_loss', 'output4_loss', 'output3_acc', 'output4_acc'])
    print(history.history.keys())


# and_sequential()
# xor_sequential()
xor_functional_basic()
# xor_functional_multi_input()
# xor_functional_multi_inout()