API发生了很大变化,所以现在您可以执行以下操作(此处提供官方示例):
import tensorflow as tf
from sklearn import datasets, metrics
def train_input_fn(features, labels, batch_size):
dataset = tf.data.Dataset.from_tensor_slices((dict(features), labels))
return dataset.shuffle(1000).repeat().batch(batch_size)
iris = datasets.load_iris()
train_x = {
'0': iris.data[:, 0],
'1': iris.data[:, 1],
'2': iris.data[:, 2],
'3': iris.data[:, 3],
}
my_feature_columns = []
for key in train_x.keys():
my_feature_columns.append(tf.feature_column.numeric_column(key=key))
clf = tf.estimator.DNNClassifier(hidden_units=[10, 20, 10], feature_columns=my_feature_columns, n_classes=3)
clf.train(input_fn=lambda: train_input_fn(train_x, iris.target, 32), steps=10000)
preds = list()
for idx, p in enumerate(classifier.predict(input_fn=lambda: train_input_fn(train_x, iris.target, 32))):
preds.append(p['class_ids'][0])
if idx == 99:
break
print(metrics.accuracy_score(iris.target[:100], preds))
但现在最好像这样使用 TF Keras API:
import tensorflow as tf
from sklearn import datasets, metrics
def train_input_fn(features, labels, batch_size):
dataset = tf.data.Dataset.from_tensor_slices((dict(features), labels))
return dataset.shuffle(1000).repeat().batch(batch_size)
iris = datasets.load_iris()
clf = tf.keras.models.Sequential([
tf.keras.layers.Dense(10, activation='sigmoid'),
tf.keras.layers.Dense(20, activation='sigmoid'),
tf.keras.layers.Dense(10, activation='sigmoid'),
tf.keras.layers.Dense(3, activation='softmax'),
])
clf.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
clf.fit(iris.data, iris.target, batch_size=32)