Up モデルの保存 作成: 2021-04-23
更新: 2021-04-23


    モデルに対する思い (意味づけ) は,「ニューラルネットワーク」である。
    これは,つぎのように4層構造で構築した:
      >>> model = tf.keras.Sequential([ ... preprocessing_layer, ... tf.keras.layers.Dense(128, activation='relu'), ... tf.keras.layers.Dense(128, activation='relu'), ... tf.keras.layers.Dense(1), ... ]) >>> model.compile( ... loss=tf.keras.losses.BinaryCrossentropy(from_logits=True), ... optimizer='adam', ... metrics=['accuracy']) >>>

    訓練が済んで,モデルはいまつぎのようになっている:
      >>> model.summary() Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_features_2 (DenseFeatu multiple 0 _________________________________________________________________ dense (Dense) multiple 3200 _________________________________________________________________ dense_1 (Dense) multiple 16512 _________________________________________________________________ dense_2 (Dense) multiple 129 ================================================================= Total params: 19,841 Trainable params: 19,841 Non-trainable params: 0 _________________________________________________________________ >>>

    このモデルを,拡張子「.h5」のファイル名で保存する:
      >>> model.save('[パス]/tf_data.h5') Traceback (most recent call last): File "<stdin>", line 1, in <module> File "/home/pi/venv/lib/python3.7/site-packages/ tensorflow_core/python/keras/engine/network.py", line 986, in save signatures, options) File "/home/pi/venv/lib/python3.7/site-packages/ tensorflow_core/python/keras/saving/save.py", line 112, in save_model model, filepath, overwrite, include_optimizer) File "/home/pi/venv/lib/python3.7/site-packages/ tensorflow_core/python/keras/saving/hdf5_format.py", line 99, in save_model_to_hdf5 model_metadata = saving_utils.model_metadata(model, include_optimizer) File "/home/pi/venv/lib/python3.7/site-packages/ tensorflow_core/python/keras/saving/saving_utils.py", line 162, in model_metadata model_config['config'] = model.get_config() File "/home/pi/venv/lib/python3.7/site-packages/ tensorflow_core/python/keras/engine/sequential.py", line 347, in get_config layer_configs.append(generic_utils.serialize_keras_object(layer)) File "/home/pi/venv/lib/python3.7/site-packages/ tensorflow_core/python/keras/utils/generic_utils.py", line 197, in serialize_keras_object config = instance.get_config() File "/home/pi/venv/lib/python3.7/site-packages/ tensorflow_core/python/feature_column/feature_column_v2.py", line 439, in get_config self._feature_columns) File "/home/pi/venv/lib/python3.7/site-packages/ tensorflow_core/python/feature_column/serialization.py", line 166, in serialize_feature_columns return [serialize_feature_column(fc) for fc in feature_columns] File "/home/pi/venv/lib/python3.7/site-packages/ tensorflow_core/python/feature_column/serialization.py", line 166, in <listcomp> return [serialize_feature_column(fc) for fc in feature_columns] File "/home/pi/venv/lib/python3.7/site-packages/ tensorflow_core/python/feature_column/serialization.py", line 88, in serialize_feature_column fc.__class__.__name__, fc.get_config()) # pylint: disable=protected-access File "/home/pi/venv/lib/python3.7/site-packages/ tensorflow_core/python/feature_column/feature_column_v2.py", line 2870, in get_config self.normalizer_fn) File "/home/pi/venv/lib/python3.7/site-packages/ tensorflow_core/python/keras/utils/generic_utils.py", line 212, in serialize_keras_object raise ValueError('Cannot serialize', instance) ValueError: ('Cannot serialize', functools.partial( <function normalize_numeric_data at 0x64b42420>, mean=array([29.631, 0.545, 0.38 , 34.385]), std=array([12.512, 1.151, 0.793, 54.598])))

    ターミナルの別のシェルから,ファイルの存在を確認:
      (venv) $ ls -la tf_data.h5 -rw-r--r-- 1 pi pi 1248728 Apr 19 13:19 tf_data.h5


    python インタラクティブ・シェルでのここまでの作業を一旦休止したいときは,ここが切れ目である:
      >>> quit() (venv) $ deactivate $

    作業の続きは,モデルの読み込みからの開始となる:
      $ source [venv のパス]/venv/bin/activate (venv) $ python >>> from tensorflow.python.keras.models import load_model >>> model = load_model('./tf_data.h5')


    model の保存までをプログラムにすると:

    $ vi tf_data.py
    #!/usr/bin/env python import tensorflow as tf tf.enable_eager_execution() # data-load train_file_path = "/home/pi/.keras/datasets/train.csv" test_file_path = "/home/pi/.keras/datasets/eval.csv" LABEL_COLUMN = 'survived' def get_dataset(file_path, **kwargs): dataset = tf.data.experimental.make_csv_dataset( file_path, batch_size=5, label_name=LABEL_COLUMN, na_value="?", num_epochs=1, ignore_errors=True, **kwargs) return dataset raw_train_data = get_dataset(train_file_path) raw_test_data = get_dataset(test_file_path) # data-preprocessing import numpy as np np.set_printoptions(precision=3, suppress=True) class PackNumericFeatures(object): def __init__(self, names): self.names = names def __call__(self, features, labels): numeric_features = [features.pop(name) for name in self.names] numeric_features = [tf.cast(feat, tf.float32) for feat in numeric_features] numeric_features = tf.stack(numeric_features, axis=-1) features['numeric'] = numeric_features return features, labels NUMERIC_FEATURES = ['age','n_siblings_spouses','parch', 'fare'] packed_train_data = raw_train_data.map(PackNumericFeatures(NUMERIC_FEATURES)) packed_test_data = raw_test_data.map(PackNumericFeatures(NUMERIC_FEATURES)) import pandas as pd desc = pd.read_csv(train_file_path)[NUMERIC_FEATURES].describe() MEAN = np.array(desc.T['mean']) STD = np.array(desc.T['std']) def normalize_numeric_data(data, mean, std): return (data-mean)/std import functools normalizer = functools.partial(normalize_numeric_data, mean=MEAN, std=STD) numeric_column = tf.feature_column.numeric_column( 'numeric', normalizer_fn=normalizer, shape=[len(NUMERIC_FEATURES)]) numeric_columns = [numeric_column] CATEGORIES = { 'sex': ['male', 'female'], 'class' : ['First', 'Second', 'Third'], 'deck' : ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J'], 'embark_town' : ['Cherbourg', 'Southhampton', 'Queenstown'], 'alone' : ['y', 'n'] } categorical_columns = [] for feature, vocab in CATEGORIES.items(): cat_col = tf.feature_column.categorical_column_with_vocabulary_list( key=feature, vocabulary_list=vocab) categorical_columns.append(tf.feature_column.indicator_column(cat_col)) preprocessing_layer = tf.keras.layers.DenseFeatures(categorical_columns+numeric_columns) # model setup model = tf.keras.Sequential([ preprocessing_layer, tf.keras.layers.Dense(128, activation='relu'), tf.keras.layers.Dense(128, activation='relu'), tf.keras.layers.Dense(1), ]) model.compile( loss=tf.keras.losses.BinaryCrossentropy(from_logits=True), optimizer='adam', metrics=['accuracy']) # training train_data = packed_train_data.shuffle(500) test_data = packed_test_data model.fit(train_data, epochs=20) # save model.save('./df_data.h5')

    $ chmod +x df_data.py

    $ ./df_data.py