티스토리 뷰
fashion MNIST의 기본 이미지 분류하기입니다.
fashion MNIST Dataset은 텐서플로우 케라스에서 기본으로 제공되는 Dataset입니다.
설명은 markdown형태로 주피터노트북 셀 위에 첨부하였기에 참조하시면 좋을 것 같습니다.
오류가 발생한 것도 지우지 않았습니다.
코딩은 오류를 수정하는 과정이 중요하기에 오류를 찾는 과정까지 그대로 넣었습니다.
01. import¶
- tensorflow : tf 명칭으로 사용
keras
numpy : np 명칭으로 사용
- matplotlib : plt 명칭으로 사용 -> 시각화
In [49]:
import tensorflow as tf
from tensorflow import keras
import numpy as np
import matplotlib.pyplot as plt
In [50]:
# tensorflow version
print(tf.__version__)
2.5.0
In [ ]:
In [51]:
fashion_mnist = keras.datasets.fashion_mnist
(train_images, train_labels), (test_images, test_labels) = fashion_mnist.load_data()
class¶
label | class |
---|---|
0 | T-shirt/top |
1 | Trouser |
2 | Pullover |
3 | Dress |
4 | Coat |
5 | Sandal |
6 | Shirt |
7 | Sneaker |
8 | Bag |
9 | Ankle boot |
In [52]:
class_names = ['T-shirt/top', 'Trouser', 'Pullover', 'Dress', 'Coat',
'Sandal', 'Shirt', 'Sneaker', 'Bag', 'Ankle boot']
In [ ]:
data 살펴보기¶
In [53]:
train_images.shape
Out[53]:
(60000, 28, 28)
In [54]:
len(train_labels)
Out[54]:
60000
In [55]:
train_labels
Out[55]:
array([9, 0, 0, ..., 3, 0, 5], dtype=uint8)
In [56]:
test_images.shape
Out[56]:
(10000, 28, 28)
In [57]:
len(test_labels)
Out[57]:
10000
In [58]:
test_labels
Out[58]:
array([9, 2, 1, ..., 8, 1, 5], dtype=uint8)
In [ ]:
데이터 전처리¶
In [59]:
plt.figure()
plt.imshow(train_images[0])
plt.colorbar()
plt.grid(False)
plt.show()
In [60]:
train_images[0]
Out[60]:
array([[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 13, 73, 0, 0, 1, 4, 0, 0, 0, 0, 1, 1, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 36, 136, 127, 62, 54, 0, 0, 0, 1, 3, 4, 0, 0, 3], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 102, 204, 176, 134, 144, 123, 23, 0, 0, 0, 0, 12, 10, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 155, 236, 207, 178, 107, 156, 161, 109, 64, 23, 77, 130, 72, 15], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 69, 207, 223, 218, 216, 216, 163, 127, 121, 122, 146, 141, 88, 172, 66], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 200, 232, 232, 233, 229, 223, 223, 215, 213, 164, 127, 123, 196, 229, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 183, 225, 216, 223, 228, 235, 227, 224, 222, 224, 221, 223, 245, 173, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 193, 228, 218, 213, 198, 180, 212, 210, 211, 213, 223, 220, 243, 202, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 0, 12, 219, 220, 212, 218, 192, 169, 227, 208, 218, 224, 212, 226, 197, 209, 52], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 99, 244, 222, 220, 218, 203, 198, 221, 215, 213, 222, 220, 245, 119, 167, 56], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 55, 236, 228, 230, 228, 240, 232, 213, 218, 223, 234, 217, 217, 209, 92, 0], [ 0, 0, 1, 4, 6, 7, 2, 0, 0, 0, 0, 0, 237, 226, 217, 223, 222, 219, 222, 221, 216, 223, 229, 215, 218, 255, 77, 0], [ 0, 3, 0, 0, 0, 0, 0, 0, 0, 62, 145, 204, 228, 207, 213, 221, 218, 208, 211, 218, 224, 223, 219, 215, 224, 244, 159, 0], [ 0, 0, 0, 0, 18, 44, 82, 107, 189, 228, 220, 222, 217, 226, 200, 205, 211, 230, 224, 234, 176, 188, 250, 248, 233, 238, 215, 0], [ 0, 57, 187, 208, 224, 221, 224, 208, 204, 214, 208, 209, 200, 159, 245, 193, 206, 223, 255, 255, 221, 234, 221, 211, 220, 232, 246, 0], [ 3, 202, 228, 224, 221, 211, 211, 214, 205, 205, 205, 220, 240, 80, 150, 255, 229, 221, 188, 154, 191, 210, 204, 209, 222, 228, 225, 0], [ 98, 233, 198, 210, 222, 229, 229, 234, 249, 220, 194, 215, 217, 241, 65, 73, 106, 117, 168, 219, 221, 215, 217, 223, 223, 224, 229, 29], [ 75, 204, 212, 204, 193, 205, 211, 225, 216, 185, 197, 206, 198, 213, 240, 195, 227, 245, 239, 223, 218, 212, 209, 222, 220, 221, 230, 67], [ 48, 203, 183, 194, 213, 197, 185, 190, 194, 192, 202, 214, 219, 221, 220, 236, 225, 216, 199, 206, 186, 181, 177, 172, 181, 205, 206, 115], [ 0, 122, 219, 193, 179, 171, 183, 196, 204, 210, 213, 207, 211, 210, 200, 196, 194, 191, 195, 191, 198, 192, 176, 156, 167, 177, 210, 92], [ 0, 0, 74, 189, 212, 191, 175, 172, 175, 181, 185, 188, 189, 188, 193, 198, 204, 209, 210, 210, 211, 188, 188, 194, 192, 216, 170, 0], [ 2, 0, 0, 0, 66, 200, 222, 237, 239, 242, 246, 243, 244, 221, 220, 193, 191, 179, 182, 182, 181, 176, 166, 168, 99, 58, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 40, 61, 44, 72, 41, 35, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=uint8)
In [61]:
# 값의 범위를 0~1사이로 조정
train_images = train_images / 255.0
#test_images = train_images / 255.0
test_images = test_images / 255.0
In [62]:
train_images[0]
Out[62]:
array([[0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ], [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ], [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ], [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.00392157, 0. , 0. , 0.05098039, 0.28627451, 0. , 0. , 0.00392157, 0.01568627, 0. , 0. , 0. , 0. , 0.00392157, 0.00392157, 0. ], [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.01176471, 0. , 0.14117647, 0.53333333, 0.49803922, 0.24313725, 0.21176471, 0. , 0. , 0. , 0.00392157, 0.01176471, 0.01568627, 0. , 0. , 0.01176471], [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.02352941, 0. , 0.4 , 0.8 , 0.69019608, 0.5254902 , 0.56470588, 0.48235294, 0.09019608, 0. , 0. , 0. , 0. , 0.04705882, 0.03921569, 0. ], [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.60784314, 0.9254902 , 0.81176471, 0.69803922, 0.41960784, 0.61176471, 0.63137255, 0.42745098, 0.25098039, 0.09019608, 0.30196078, 0.50980392, 0.28235294, 0.05882353], [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.00392157, 0. , 0.27058824, 0.81176471, 0.8745098 , 0.85490196, 0.84705882, 0.84705882, 0.63921569, 0.49803922, 0.4745098 , 0.47843137, 0.57254902, 0.55294118, 0.34509804, 0.6745098 , 0.25882353], [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.00392157, 0.00392157, 0.00392157, 0. , 0.78431373, 0.90980392, 0.90980392, 0.91372549, 0.89803922, 0.8745098 , 0.8745098 , 0.84313725, 0.83529412, 0.64313725, 0.49803922, 0.48235294, 0.76862745, 0.89803922, 0. ], [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.71764706, 0.88235294, 0.84705882, 0.8745098 , 0.89411765, 0.92156863, 0.89019608, 0.87843137, 0.87058824, 0.87843137, 0.86666667, 0.8745098 , 0.96078431, 0.67843137, 0. ], [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.75686275, 0.89411765, 0.85490196, 0.83529412, 0.77647059, 0.70588235, 0.83137255, 0.82352941, 0.82745098, 0.83529412, 0.8745098 , 0.8627451 , 0.95294118, 0.79215686, 0. ], [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.00392157, 0.01176471, 0. , 0.04705882, 0.85882353, 0.8627451 , 0.83137255, 0.85490196, 0.75294118, 0.6627451 , 0.89019608, 0.81568627, 0.85490196, 0.87843137, 0.83137255, 0.88627451, 0.77254902, 0.81960784, 0.20392157], [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.02352941, 0. , 0.38823529, 0.95686275, 0.87058824, 0.8627451 , 0.85490196, 0.79607843, 0.77647059, 0.86666667, 0.84313725, 0.83529412, 0.87058824, 0.8627451 , 0.96078431, 0.46666667, 0.65490196, 0.21960784], [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.01568627, 0. , 0. , 0.21568627, 0.9254902 , 0.89411765, 0.90196078, 0.89411765, 0.94117647, 0.90980392, 0.83529412, 0.85490196, 0.8745098 , 0.91764706, 0.85098039, 0.85098039, 0.81960784, 0.36078431, 0. ], [0. , 0. , 0.00392157, 0.01568627, 0.02352941, 0.02745098, 0.00784314, 0. , 0. , 0. , 0. , 0. , 0.92941176, 0.88627451, 0.85098039, 0.8745098 , 0.87058824, 0.85882353, 0.87058824, 0.86666667, 0.84705882, 0.8745098 , 0.89803922, 0.84313725, 0.85490196, 1. , 0.30196078, 0. ], [0. , 0.01176471, 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.24313725, 0.56862745, 0.8 , 0.89411765, 0.81176471, 0.83529412, 0.86666667, 0.85490196, 0.81568627, 0.82745098, 0.85490196, 0.87843137, 0.8745098 , 0.85882353, 0.84313725, 0.87843137, 0.95686275, 0.62352941, 0. ], [0. , 0. , 0. , 0. , 0.07058824, 0.17254902, 0.32156863, 0.41960784, 0.74117647, 0.89411765, 0.8627451 , 0.87058824, 0.85098039, 0.88627451, 0.78431373, 0.80392157, 0.82745098, 0.90196078, 0.87843137, 0.91764706, 0.69019608, 0.7372549 , 0.98039216, 0.97254902, 0.91372549, 0.93333333, 0.84313725, 0. ], [0. , 0.22352941, 0.73333333, 0.81568627, 0.87843137, 0.86666667, 0.87843137, 0.81568627, 0.8 , 0.83921569, 0.81568627, 0.81960784, 0.78431373, 0.62352941, 0.96078431, 0.75686275, 0.80784314, 0.8745098 , 1. , 1. , 0.86666667, 0.91764706, 0.86666667, 0.82745098, 0.8627451 , 0.90980392, 0.96470588, 0. ], [0.01176471, 0.79215686, 0.89411765, 0.87843137, 0.86666667, 0.82745098, 0.82745098, 0.83921569, 0.80392157, 0.80392157, 0.80392157, 0.8627451 , 0.94117647, 0.31372549, 0.58823529, 1. , 0.89803922, 0.86666667, 0.7372549 , 0.60392157, 0.74901961, 0.82352941, 0.8 , 0.81960784, 0.87058824, 0.89411765, 0.88235294, 0. ], [0.38431373, 0.91372549, 0.77647059, 0.82352941, 0.87058824, 0.89803922, 0.89803922, 0.91764706, 0.97647059, 0.8627451 , 0.76078431, 0.84313725, 0.85098039, 0.94509804, 0.25490196, 0.28627451, 0.41568627, 0.45882353, 0.65882353, 0.85882353, 0.86666667, 0.84313725, 0.85098039, 0.8745098 , 0.8745098 , 0.87843137, 0.89803922, 0.11372549], [0.29411765, 0.8 , 0.83137255, 0.8 , 0.75686275, 0.80392157, 0.82745098, 0.88235294, 0.84705882, 0.7254902 , 0.77254902, 0.80784314, 0.77647059, 0.83529412, 0.94117647, 0.76470588, 0.89019608, 0.96078431, 0.9372549 , 0.8745098 , 0.85490196, 0.83137255, 0.81960784, 0.87058824, 0.8627451 , 0.86666667, 0.90196078, 0.2627451 ], [0.18823529, 0.79607843, 0.71764706, 0.76078431, 0.83529412, 0.77254902, 0.7254902 , 0.74509804, 0.76078431, 0.75294118, 0.79215686, 0.83921569, 0.85882353, 0.86666667, 0.8627451 , 0.9254902 , 0.88235294, 0.84705882, 0.78039216, 0.80784314, 0.72941176, 0.70980392, 0.69411765, 0.6745098 , 0.70980392, 0.80392157, 0.80784314, 0.45098039], [0. , 0.47843137, 0.85882353, 0.75686275, 0.70196078, 0.67058824, 0.71764706, 0.76862745, 0.8 , 0.82352941, 0.83529412, 0.81176471, 0.82745098, 0.82352941, 0.78431373, 0.76862745, 0.76078431, 0.74901961, 0.76470588, 0.74901961, 0.77647059, 0.75294118, 0.69019608, 0.61176471, 0.65490196, 0.69411765, 0.82352941, 0.36078431], [0. , 0. , 0.29019608, 0.74117647, 0.83137255, 0.74901961, 0.68627451, 0.6745098 , 0.68627451, 0.70980392, 0.7254902 , 0.7372549 , 0.74117647, 0.7372549 , 0.75686275, 0.77647059, 0.8 , 0.81960784, 0.82352941, 0.82352941, 0.82745098, 0.7372549 , 0.7372549 , 0.76078431, 0.75294118, 0.84705882, 0.66666667, 0. ], [0.00784314, 0. , 0. , 0. , 0.25882353, 0.78431373, 0.87058824, 0.92941176, 0.9372549 , 0.94901961, 0.96470588, 0.95294118, 0.95686275, 0.86666667, 0.8627451 , 0.75686275, 0.74901961, 0.70196078, 0.71372549, 0.71372549, 0.70980392, 0.69019608, 0.65098039, 0.65882353, 0.38823529, 0.22745098, 0. , 0. ], [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0.15686275, 0.23921569, 0.17254902, 0.28235294, 0.16078431, 0.1372549 , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ], [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ], [0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. , 0. ]])
In [63]:
# image 25개 출력
plt.figure(figsize=(10, 10))
for i in range(25):
plt.subplot(5, 5, i+1)
plt.xticks([])
plt.yticks([])
plt.grid(False)
plt.imshow(train_images[i], cmap=plt.cm.binary)
plt.xlabel(class_names[train_labels[i]])
plt.show()
In [ ]:
03. 모델 구성¶
- 층(layer) 설정 : 간단한 층을 연결하여 구성
- tf.keras.layers.Dense
- Flatten : 28 x 28 2차원 배열 -> 784의 1차원 배열로 변환
- 학습되는 가중치는 없고 데이터를 변환만 함
- Dense : densely-connected / fully-connected layer
- 첫번째 : 128개 노드(뉴런)
- 활성화 함수 : relu
- 두번째 : 10개 노드
- 활성화 함수 : softmax
- 10개 노드 : 분류항목
- 첫번째 : 128개 노드(뉴런)
In [64]:
model = keras.Sequential([
keras.layers.Flatten(input_shape=(28, 28)),
keras.layers.Dense(128, activation='relu'),
keras.layers.Dense(10, activation='softmax')
])
In [ ]:
04. 모델 컴파일¶
- 손실함수(Loss Function) : 훈련하는 동안 label과의 오차를 측정
- 학습 : 손실값을 최소화
- 옵티마이저(Optimizer) : 손실함수를 바탕으로 모델의 업데이트 방법을 결정
- 지표(Metrics) : 훈련과 테스트 단계를 모니터링하기 위해 사용
In [65]:
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
In [ ]:
05. 모델 훈련¶
- 훈련데이터를 모델에 대입
- 모델의 이미지와 레이블을 매핑
- 테스트 세트에 대한 모델의 예측 / 레이블과 비교
In [66]:
model.fit(train_images, train_labels, epochs=5)
Epoch 1/5 1875/1875 [==============================] - 20s 5ms/step - loss: 0.4962 - accuracy: 0.8249 Epoch 2/5 1875/1875 [==============================] - 9s 5ms/step - loss: 0.3761 - accuracy: 0.8645 Epoch 3/5 1875/1875 [==============================] - 9s 5ms/step - loss: 0.3375 - accuracy: 0.8762 Epoch 4/5 1875/1875 [==============================] - 9s 5ms/step - loss: 0.3134 - accuracy: 0.8858 Epoch 5/5 1875/1875 [==============================] - 9s 5ms/step - loss: 0.2932 - accuracy: 0.8921
Out[66]:
<tensorflow.python.keras.callbacks.History at 0xc5f75dfd60>
In [ ]:
06. 정확도 평가¶
<오류발생 : 찾아서 수정 후 진행>¶
test_loss, test_acc = model.evaluate(test_images, test_labels, verbose=2)
print('\ntest accuracy: ', test_acc)
ValueError: Data cardinality is ambiguous:
x sizes: 60000
y sizes: 10000
Make sure all arrays contain the same number of samples.
오류발생 : 데이터에서 x, y의 개수의 차이가 있음.¶
코드를 다시 살펴보면서 개수가 변경되는 부분을 찾을 수 있었음 -> 수정¶
#값의 범위를 0~1사이로 조정
train_images = train_images / 255.0
test_images = train_images / 255.0 # 여기에 오류 있습니다.
In [67]:
test_loss, test_acc = model.evaluate(test_images, test_labels, verbose=2)
print('\ntest accuracy: ', test_acc)
313/313 - 9s - loss: 0.3482 - accuracy: 0.8744 test accuracy: 0.8744000196456909
In [ ]:
07. 예측 만들기¶
In [68]:
predictions = model.predict(test_images)
In [69]:
predictions[0]
Out[69]:
array([2.4537260e-06, 5.3777743e-08, 9.4284992e-08, 2.8809595e-08, 6.8595035e-08, 5.5387085e-03, 4.8385988e-07, 1.4015856e-01, 2.4620858e-05, 8.5427493e-01], dtype=float32)
In [70]:
np.argmax(predictions[0])
Out[70]:
9
In [71]:
test_labels[0]
Out[71]:
9
In [ ]:
In [74]:
def plot_image(i, predictions_array, true_label, img):
predictions_array, true_label, img = predictions_array[i], true_label[i], img[i]
plt.grid(False)
plt.xticks([])
plt.yticks([])
plt.imshow(img, cmap=plt.cm.binary)
predicted_label = np.argmax(predictions_array)
if predicted_label == true_label:
color = 'blue'
else:
color = 'red'
plt.xlabel("{} {:2.0f}% ({})".format(class_names[predicted_label],
100*np.max(predictions_array),
class_names[true_label]),
color=color)
In [80]:
def plot_value_array(i, predictions_array, true_label):
predictions_array, true_label = predictions_array[i], true_label[i]
plt.grid(False)
plt.xticks([])
plt.yticks([])
thisplot = plt.bar(range(10), predictions_array, color="#777777")
plt.ylim([0, 1])
predicted_label = np.argmax(predictions_array)
thisplot[predicted_label].set_color('red')
thisplot[true_label].set_color('blue')
In [81]:
i = 0
plt.figure(figsize=(6, 3))
plt.subplot(1, 2, 1)
plot_image(i, predictions, test_labels, test_images)
plt.subplot(1, 2, 2)
plot_value_array(i, predictions, test_labels)
plt.show()
In [84]:
import random
for a in range(5):
i = random.randint(0, 10000)
plt.figure(figsize=(6, 3))
plt.subplot(1, 2, 1)
plot_image(i, predictions, test_labels, test_images)
plt.subplot(1, 2, 2)
plot_value_array(i, predictions, test_labels)
plt.show()
In [ ]:
In [85]:
num_rows = 5
num_cols = 3
num_images = num_rows * num_cols
plt.figure(figsize=(2*2*num_cols, 2*num_rows))
for i in range(num_images):
plt.subplot(num_rows, 2*num_cols, 2*i+1)
plot_image(i, predictions, test_labels, test_images)
plt.subplot(num_rows, 2*num_cols, 2*i+2)
plot_value_array(i, predictions, test_labels)
plt.show()
In [ ]:
In [90]:
img = test_images[0]
print(img.shape)
(28, 28)
In [91]:
img = (np.expand_dims(img, 0))
print(img.shape)
(1, 28, 28)
In [ ]:
In [92]:
predictions_single = model.predict(img)
print(predictions_single)
[[2.4537217e-06 5.3777651e-08 9.4284651e-08 2.8809600e-08 6.8595050e-08 5.5387015e-03 4.8385954e-07 1.4015843e-01 2.4620816e-05 8.5427511e-01]]
In [94]:
plot_value_array(0, predictions_single, test_labels)
_ = plt.xticks(range(10), class_names, rotation=45)
In [ ]:
'machineLearning > tensorflow' 카테고리의 다른 글
tf_OverUnderFitting (0) | 2021.07.03 |
---|---|
tf_carFuelEconomy (1) | 2021.07.02 |
tf_tfHubTextClassification (1) | 2021.07.02 |
tf_basicTextClassification (1) | 2021.07.01 |
tf_mnist (1) | 2021.06.30 |
공지사항
최근에 올라온 글
최근에 달린 댓글
- Total
- Today
- Yesterday
링크
TAG
- 로피탈정리
- ChatGPT
- checkpoint
- programmers.co.kr
- 도함수
- LLM
- 미분
- Python
- FewShot
- 고등학교 수학
- LangChain
- 텐서플로우
- 미분법
- streamlit
- 약수
- 미분계수
- 랭체인
- AI_고교수학
- RAG
- Chatbot
- prompt
- programmers
- TensorFlow
- GPT
- 프로그래머스
- 챗봇
- 변화율
- 파이썬
- image depict
- multi modal
일 | 월 | 화 | 수 | 목 | 금 | 토 |
---|---|---|---|---|---|---|
1 | 2 | 3 | ||||
4 | 5 | 6 | 7 | 8 | 9 | 10 |
11 | 12 | 13 | 14 | 15 | 16 | 17 |
18 | 19 | 20 | 21 | 22 | 23 | 24 |
25 | 26 | 27 | 28 | 29 | 30 | 31 |
글 보관함