Files @ db53fefbf557
Branch filter:

Location: OneEye/exp/dochi.py

Laman
Hakugen detection module
import os
import time
import argparse
import logging as log

import cv2 as cv
import numpy as np
import keras
from keras.models import load_model
from PIL import Image

import exp_config as cfg
from kerokero.k_util import averageDistance

keras.losses.averageDistance=averageDistance
keras.metrics.averageDistance=averageDistance

model=load_model(cfg.dochiModel)
SIDE=224


def locateGrid(img):
	t1=time.time()
	(width,height)=img.size
	normedImg=img.convert("L").resize((224,224),resample=Image.BILINEAR)
	npImg=np.array(normedImg.getdata()).reshape((224,224,1)).astype(np.float32)
	npImg=npImg/128-1

	label=model.predict(np.reshape(npImg,(1,224,224,1)))
	points=[]
	for i in range(4):
		points.append([(label[0][i][0]+1)*(width/2),(label[0][i][1]+1)*(height/2)])
	t=time.time()-t1
	log.info("grid located in {0:.3}s".format(t))
	return points


if __name__=="__main__":
	parser=argparse.ArgumentParser()
	parser.add_argument("-i","--input",nargs="+")
	parser.add_argument("-o","--output_dir",required=True)
	args=parser.parse_args()

	for image_path in args.input:
		image=Image.open(image_path)
		points=locateGrid(image)
		x1=SIDE*0.1
		x2=SIDE*0.9
		destPoints=[(x1,x1),(x1,x2),(x2,x2),(x2,x1)]
		m=cv.getPerspectiveTransform(np.float32(points),np.float32(destPoints))
		img=cv.warpPerspective(np.uint8(image),m,(SIDE,SIDE))
		img=cv.cvtColor(img,cv.COLOR_BGR2RGB)
		cv.imwrite(os.path.join(args.output_dir,os.path.basename(image_path)),img)