Training model to recognize annotations in text.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

train.py 2.5KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778
  1. # Convolutional Neural Network
  2. # Installing Theano
  3. # pip install --upgrade --no-deps git+git://github.com/Theano/Theano.git
  4. # Installing Tensorflow
  5. # pip install tensorflow
  6. # Installing Keras
  7. # pip install --upgrade keras
  8. # Part 1 - Building the CNN
  9. # Importing the Keras libraries and packages
  10. from keras.models import Sequential
  11. from keras.layers import Conv2D
  12. from keras.layers import MaxPooling2D
  13. from keras.layers import Flatten
  14. from keras.layers import Dense
  15. # Initialising the CNN
  16. classifier = Sequential()
  17. # Step 1 - Convolution
  18. classifier.add(Conv2D(32, (3, 3), input_shape = (64, 64, 3), activation = 'relu'))
  19. # Step 2 - Pooling
  20. classifier.add(MaxPooling2D(pool_size = (2, 2)))
  21. # Adding a second convolutional layer
  22. classifier.add(Conv2D(32, (3, 3), activation = 'relu'))
  23. classifier.add(MaxPooling2D(pool_size = (2, 2)))
  24. # Step 3 - Flattening
  25. classifier.add(Flatten())
  26. # Step 4 - Full connection
  27. classifier.add(Dense(units = 128, activation = 'relu'))
  28. classifier.add(Dense(units = 1, activation = 'sigmoid'))
  29. # Compiling the CNN
  30. classifier.compile(optimizer = 'adam', loss = 'binary_crossentropy', metrics = ['accuracy'])
  31. # Part 2 - Fitting the CNN to the images
  32. from keras.preprocessing.image import ImageDataGenerator
  33. train_datagen = ImageDataGenerator(rescale = 1./255,
  34. shear_range = 0.2,
  35. zoom_range = 0.2,
  36. horizontal_flip = True)
  37. test_datagen = ImageDataGenerator(rescale = 1./255)
  38. training_set = train_datagen.flow_from_directory('dataset/training_set',
  39. target_size = (64, 64),
  40. batch_size = 32,
  41. class_mode = 'binary')
  42. test_set = test_datagen.flow_from_directory('dataset/test_set',
  43. target_size = (64, 64),
  44. batch_size = 32,
  45. class_mode = 'binary')
  46. classifier.fit_generator(training_set,
  47. steps_per_epoch = 500,
  48. epochs = 1,
  49. validation_data = test_set,
  50. validation_steps = 100)
  51. # serialize model to YAML
  52. model_yaml = classifier.to_yaml()
  53. with open("model.yaml", "w") as yaml_file:
  54. yaml_file.write(model_yaml)
  55. # serialize weights to HDF5
  56. classifier.save_weights("model.h5")
  57. print("Saved model to disk")