No Description
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

model.py 2.7KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990
  1. import tensorflow as tf
  2. import params
  3. print_layer = True
  4. x = tf.placeholder(tf.float32, shape=[None, params.network_height, params.img_width, params.img_channels])
  5. y_ = tf.placeholder(tf.float32, shape=[None, 1])
  6. keep_prob = tf.placeholder(tf.float32)
  7. x_image = tf.reshape(x, [-1, params.network_height, params.img_width, params.img_channels])
  8. # Conv Layer # 1
  9. network = tf.layers.conv2d(x_image, filters=64, kernel_size = (7,7), strides=(2,2),
  10. padding='same', activation=tf.nn.relu, use_bias=False,name="conv_1")
  11. if print_layer:
  12. print(network)
  13. network = tf.layers.max_pooling2d(network, pool_size=(3,3), strides=2, padding='same',
  14. name="m_pooling_1")
  15. if print_layer:
  16. print(network)
  17. network = tf.layers.batch_normalization(network, name="norm_1")
  18. if print_layer:
  19. print(network)
  20. # Conv Layer #2
  21. network = tf.layers.conv2d(network, filters=128, kernel_size = (5,5), strides=(1,1),
  22. padding='same', activation=tf.nn.relu, use_bias=False,name="conv_2")
  23. if print_layer:
  24. print(network)
  25. network = tf.layers.max_pooling2d(network, pool_size=(3,3), strides=2, padding='same',
  26. name="m_pooling_2")
  27. if print_layer:
  28. print(network)
  29. network = tf.layers.batch_normalization(network, name="norm_2")
  30. if print_layer:
  31. print(network)
  32. # Conv Layer #3
  33. network = tf.layers.conv2d(network, filters=256, kernel_size = (3,3), strides=(1,1),
  34. padding='same', activation=tf.nn.relu, use_bias=True,name="conv_3_1")
  35. if print_layer:
  36. print(network)
  37. network = tf.layers.conv2d(network, filters=256, kernel_size = (3,3), strides=(1,1),
  38. padding='same', activation=tf.nn.relu, use_bias=True,name="conv_3_2")
  39. if print_layer:
  40. print(network)
  41. network = tf.layers.conv2d(network, filters=16, kernel_size = (3,3), strides=(1,1),
  42. padding='same', activation=tf.nn.relu, use_bias=False,name="conv_3_3")
  43. if print_layer:
  44. print(network)
  45. network = tf.layers.max_pooling2d(network, pool_size=(3,3), strides=2, padding='same',
  46. name="m_pooling_3")
  47. if print_layer:
  48. print(network)
  49. network = tf.layers.batch_normalization(network, name="norm_3")
  50. if print_layer:
  51. print(network)
  52. # Flatten
  53. network = tf.reshape(network, [-1, 1280], name="flatten")
  54. if print_layer:
  55. print(network)
  56. # Dense Layer 1
  57. network = tf.layers.dense(network, 4096, activation=tf.nn.relu, name="dense_1")
  58. if print_layer:
  59. print(network)
  60. network = tf.layers.dropout(network, rate=keep_prob, name="dropout_1")
  61. if print_layer:
  62. print(network)
  63. # Dense Layer 2
  64. network = tf.layers.dense(network, 4096, activation=tf.nn.relu, name="dense_2")
  65. if print_layer:
  66. print(network)
  67. network = tf.layers.dropout(network, rate=keep_prob, name="dropout_2")
  68. if print_layer:
  69. print(network)
  70. # output layer
  71. # activation=None => linear
  72. y = tf.layers.dense(network, 1, activation=None, name="output")
  73. if print_layer:
  74. print(y)