Changeset - 247811dfb9be
[Not reviewed]
default
0 3 0
Laman - 6 years ago 2019-05-14 16:12:12

data augmentation
3 files changed with 68 insertions and 23 deletions:
0 comments (0 inline, 0 general)
exp/kerokero/k_util.py
Show inline comments
 
import math
 

	
 
import keras.backend as K
 
import random
 

	
 
def singleUnorderedLoss(yTrue,yPred):
 
	d1=sum(math.sqrt(min((yTrue[i*2]-yPred[j*2])**2+(yTrue[i*2+1]-yPred[j*2+1])**2 for j in range(4))) for i in range(4))
 
	d2=sum(math.sqrt(min((yTrue[i*2]-yPred[j*2])**2+(yTrue[i*2+1]-yPred[j*2+1])**2 for i in range(4))) for j in range(4))
 
	return (d1+d2)/2
 
import numpy as np
 
import keras.backend as K
 

	
 

	
 
def averageDistance(yTrue,yPred):
 
	squares=K.square(yTrue-yPred)
 
	distances=K.sqrt(K.sum(squares,-1))
 
	return K.mean(distances,-1)
 

	
 

	
 
def transform(image,label):
 
	if random.choice((True,False)):
 
		image=image[::-1] # reflect vertically
 
		label[...,1]*=-1 # reflect y values
 
		label=label[::-1,...] # switch back to counter-clockwise order
 

	
 
	rot=random.randint(0,3)
 
	image=np.rot90(image,rot)
 
	if rot==1:
 
		label=label[...,::-1]
 
		label[...,1]*=-1
 
	elif rot==2:
 
		label*=-1
 
	elif rot==3:
 
		label=label[...,::-1]
 
		label[...,0]*=-1
 

	
 
	k=0
 
	val=2
 
	for (i,point) in enumerate(label): # rotate the upper-leftmost point to the first position
 
		v=sum(point)
 
		if v<val:
 
			k=i
 
			val=v
 
	label=np.concatenate((label[k:],label[:k]))
 

	
 
	return (image,label)
 

	
 

	
 
def generateData(images,labels,batch_size=32):
 
	n=len(images)
 
	keys=list(range(n))
 
	while True:
 
		random.shuffle(keys)
 
		for i in range(0,n,batch_size):
 
			ks=keys[i:i+batch_size]
 
			imgs=images[ks]
 
			labs=labels[ks]
 
			for j in range(len(ks)):
 
				(imgs[j],labs[j])=transform(imgs[j],labs[j])
 
			yield (imgs,labs)
exp/kerokero/test.py
Show inline comments
 
@@ -3,13 +3,12 @@ import logging as log
 

	
 
import numpy as np
 
from keras.models import load_model
 
import keras.losses
 
import keras.metrics
 

	
 
from prepare_data import loadDataset,Sample
 
from analyzer.epoint import EPoint
 
from analyzer.corners import Corners
 
from k_util import averageDistance
 
from k_util import averageDistance,generateData
 
import config as cfg
 

	
 
keras.losses.averageDistance=averageDistance
 
@@ -25,11 +24,11 @@ model.summary()
 

	
 
log.info("loading data...")
 
with np.load(args.data) as data:
 
	testImages=data["testImages"]
 
	testLabels=data["testLabels"]
 
	testImages=data["testImages"].reshape((-1,224,224,1))
 
	testLabels=data["testLabels"].reshape((-1,4,2))
 
log.info("done")
 

	
 
log.info(model.evaluate(testImages.reshape((-1,224,224,1)),testLabels.reshape((-1,4,2))))
 
log.info(model.evaluate(testImages,testLabels))
 

	
 
for img in testImages:
 
	label=model.predict(np.reshape(img,(1,224,224,1)))
exp/kerokero/train.py
Show inline comments
 
import os
 
import math
 
from time import time
 
import argparse
 
import logging as log
 
@@ -11,7 +12,7 @@ import keras.losses
 
import keras.metrics
 

	
 
import config as cfg
 
from k_util import averageDistance
 
from k_util import averageDistance,generateData
 

	
 
keras.losses.averageDistance=averageDistance
 
keras.metrics.averageDistance=averageDistance
 
@@ -78,23 +79,29 @@ if args.load_model:
 

	
 
log.info("loading data...")
 
with np.load(args.data) as data:
 
	trainImages=data["trainImages"]
 
	trainLabels=data["trainLabels"]
 
	testImages=data["testImages"]
 
	testLabels=data["testLabels"]
 
	trainImages=data["trainImages"].reshape((-1,224,224,1))
 
	trainLabels=data["trainLabels"].reshape((-1,4,2))
 
	testImages=data["testImages"].reshape((-1,224,224,1))
 
	testLabels=data["testLabels"].reshape((-1,4,2))
 
log.info("done")
 

	
 
n=len(trainImages)
 
k=round(n*0.9)
 
n_=n-k
 
(trainImages,valImages)=(np.float32(trainImages[:k]),np.float32(trainImages[k:]))
 
(trainLabels,valLabels)=(np.float32(trainLabels[:k]),np.float32(trainLabels[k:]))
 

	
 
tensorboard=TensorBoard(log_dir=os.path.join(args.log_dir,"{}".format(time())))
 
checkpoint=ModelCheckpoint(args.save_model,monitor="val_loss",period=10)
 

	
 
model.fit(
 
	trainImages.reshape((-1,224,224,1)),
 
	trainLabels.reshape((-1,4,2)),
 
model.fit_generator(
 
	generateData(trainImages,trainLabels,batch_size=20),
 
	epochs=args.epochs,
 
	initial_epoch=args.initial_epoch,
 
	batch_size=20,
 
	validation_split=0.2,
 
	steps_per_epoch=math.ceil(n_/20),
 
	validation_data=generateData(valImages,valLabels,batch_size=20),
 
	validation_steps=math.ceil(k/20),
 
	callbacks=[tensorboard,checkpoint]
 
)
 

	
 
log.info(model.evaluate(testImages.reshape((-1,224,224,1)),testLabels.reshape((-1,4,2))))
 
log.info(model.evaluate(testImages,testLabels))
0 comments (0 inline, 0 general)