👨💻Tensorflow Kod Notları
👨🔧 Verileri Düzenleme
X_train_flatten = X_train_orig.reshape(X_train_orig.shape[0], -1).T
X_test_flatten = X_test_orig.reshape(X_test_orig.shape[0], -1).T
✨ Tensorflow Ortamı Tanımlama
# GRADED FUNCTION: create_placeholders
def create_placeholders(n_x, n_y):
"""
Creates the placeholders for the tensorflow session.
Arguments:
n_x -- scalar, size of an image vector (num_px * num_px = 64 * 64 * 3 = 12288)
n_y -- scalar, number of classes (from 0 to 5, so -> 6)
Returns:
X -- placeholder for the data input, of shape [n_x, None] and dtype "float"
Y -- placeholder for the input labels, of shape [n_y, None] and dtype "float"
Tips:
- You will use None because it let's us be flexible on the number of examples you will for the placeholders.
In fact, the number of examples during test/train is different.
"""
### START CODE HERE ### (approx. 2 lines)
X = tf.placeholder(dtype=tf.float32, shape=[n_x, None])
Y = tf.placeholder(dtype=tf.float32, shape=[n_y, None])
### END CODE HERE ###
return X, Y
🧱 Temel Tanımlamalar
# Değişken değerleri tanımlama
w = tf.Variable(0, dtype=tf.float32)
loss = tf.Variable((y - y_hat)**2, name='loss')
# Sabit değerleri tanımlama
a = tf.constant(2)
b = tf.constant(10)
X = tf.constant(np.random.randn(3, 1), name = "X")
W = tf.constant(np.random.randn(4, 3), name = "W")
# Sonradan atancak değerlerin tipi ve boyutu
x = tf.placeholder(tf.float32, (3, 1))
train = tf.train.GradientDescentOptimizer(0.01).minimize(cost)
init = tf.global_variables_initializer() # When init is run later (session.run(init)),
# Z ve Y değerlerine göre cost tanımlama
cost = x[0][0] * w**2 + x[1][0] * w + x[2][0]
cost = tf.nn.sigmoid_cross_entropy_with_logits(logits = z, labels = y)
# mean, toplayıp ortalama almak demektir
cost = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(
labels = labels, logits = logits
)
)
# Cost değerini minimum yapmayı amaçlayan değişkenler
optimizer = tf.train.GradientDescentOptimizer(learning_rate = learning_rate).minimize(cost)
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)
# y = [1, 2, 3, 4] -> [[0,1,0,0], [0, 0, 1, 0], ...] gösterimi
one_hot_matrix = tf.one_hot(indices=labels, depth=C, axis=0)
Last updated
Was this helpful?