forked from puke3615/MachineCoding
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmodel.py
More file actions
52 lines (38 loc) · 1.67 KB
/
model.py
File metadata and controls
52 lines (38 loc) · 1.67 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
# coding=utf-8
import tensorflow as tf
import numpy as np
import config
def weight_variables(shape):
return tf.Variable(tf.truncated_normal(shape, stddev=0.01))
def bias_variables(shape):
return tf.Variable(tf.constant(0.01, shape=shape))
def inference(inputs, depth, batch_size):
n_hidden = config.N_HIDDEN
n_layers = config.N_LAYERS
with tf.device("/cpu:0"):
embedding = tf.get_variable('embedding', initializer=tf.random_uniform(
[depth, n_hidden], -1.0, 1.0))
x = tf.nn.embedding_lookup(embedding, inputs)
# 展开数据
# (batch_size x n_steps, n_hidden) => (batch_size, n_steps, n_hidden)
x = tf.reshape(x, [batch_size, -1, n_hidden])
cell = tf.contrib.rnn.BasicLSTMCell(n_hidden, state_is_tuple=True)
# 拼接出多个Cell
cell = tf.contrib.rnn.MultiRNNCell([cell] * n_layers, state_is_tuple=True)
initial_state = cell.zero_state(batch_size, tf.float32)
# 取出输出值
outputs, last_state = tf.nn.dynamic_rnn(cell, x, initial_state=initial_state)
x = tf.reshape(outputs, [-1, n_hidden])
# 得到输出
W = weight_variables([n_hidden, depth])
b = bias_variables([depth])
# (batch_size, n_hidden) => (batch_size, n_outputs) = (batch_size, depth)
x = tf.matmul(x, W) + b
return x, initial_state, last_state
def get_train_info(logits, labels, learning_rate):
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(labels=labels, logits=logits)
loss = tf.reduce_mean(cross_entropy)
# logits = tf.nn.softmax(logits)
# loss = tf.reduce_mean(tf.square(logits - labels))
train_op = tf.train.AdamOptimizer(learning_rate).minimize(loss)
return train_op, loss