Changeset - 9483b964f560
[Not reviewed]
default
0 3 0
Laman - 6 years ago 2019-05-05 13:03:27

saving and loading prepared data
3 files changed with 33 insertions and 14 deletions:
0 comments (0 inline, 0 general)
exp/kerokero/prepare_data.py
Show inline comments
 
@@ -113,6 +113,11 @@ def show(img,filename="x"):
 

	
 

	
 
if __name__=="__main__":
 
	root=sys.argv[1]
 
	for d in traverseDirs(root):
 
		harvestDir(d)
 
	((trainImages,trainLabels),(testImages,testLabels))=loadDataset(sys.argv[1])
 
	np.savez_compressed(
 
		sys.argv[2],
 
		trainImages=trainImages,
 
		trainLabels=trainLabels,
 
		testImages=testImages,
 
		testLabels=testLabels
 
	)
exp/kerokero/test.py
Show inline comments
 
@@ -10,13 +10,17 @@ from analyzer.corners import Corners
 

	
 
parser=argparse.ArgumentParser()
 
parser.add_argument("model")
 
parser.add_argument("data_dir")
 
parser.add_argument("data")
 
args=parser.parse_args()
 

	
 
model=load_model(args.model)
 

	
 
print("loading data...")
 
((trainImages,trainLabels),(testImages,testLabels))=loadDataset(args.data_dir)
 
with np.load(args.data) as data:
 
	trainImages=data["trainImages"]
 
	trainLabels=data["trainLabels"]
 
	testImages=data["testImages"]
 
	testLabels=data["testLabels"]
 
print("done")
 

	
 
for img in testImages:
exp/kerokero/train.py
Show inline comments
 
import argparse
 
import logging as log
 

	
 
import numpy as np
 
from keras.layers import Conv2D,Dropout,Dense,Flatten,MaxPooling2D,BatchNormalization
 
from keras.models import Sequential,load_model
 

	
 
from prepare_data import loadDataset
 
import ftp
 

	
 
log.basicConfig(level=log.INFO,format="%(asctime)s %(levelname)s: %(message)s")
 

	
 
parser=argparse.ArgumentParser()
 
parser.add_argument("data_dir")
 
parser.add_argument("data")
 
parser.add_argument("--load_model")
 
parser.add_argument("--save_model",default="/tmp/gogo-{0:03}.h5")
 
parser.add_argument("--epochs",type=int,default=100)
 
@@ -55,10 +58,10 @@ def createCNN():
 
	
 
	model.add(Flatten())
 
	
 
	model.add(Dense(500, activation="relu"))
 
	model.add(Dense(500,activation="relu"))
 
	model.add(Dropout(0.1))
 
	
 
	model.add(Dense(128, activation="relu"))
 
	model.add(Dense(128,activation="relu"))
 
	model.add(Dropout(0.1))
 
	
 
	model.add(Dense(8))
 
@@ -71,11 +74,18 @@ model=createCNN()
 
if args.load_model:
 
	model=load_model(args.load_model)
 

	
 
print("loading data...")
 
((trainImages,trainLabels),(testImages,testLabels))=loadDataset(args.data_dir)
 
print("done")
 
log.info("loading data...")
 
with np.load(args.data) as data:
 
	trainImages=data["trainImages"]
 
	trainLabels=data["trainLabels"]
 
	testImages=data["testImages"]
 
	testLabels=data["testLabels"]
 
log.info("done")
 

	
 
for i in range(args.initial_epoch,args.epochs//10):
 
	model.fit(trainImages.reshape((-1,224,224,1)),trainLabels,epochs=(i+1)*10,initial_epoch=i*10,batch_size=128,validation_split=0.2)
 
	model.save(args.save_model.format(i+1))
 
print(model.evaluate(testImages,testLabels))
 
	path=args.save_model.format(i+1)
 
	log.info("saving model...")
 
	model.save(path)
 
	ftp.push(path)
 
log.info(model.evaluate(testImages,testLabels))
0 comments (0 inline, 0 general)