Files
@ 7cb01d4080c9
Branch filter:
Location: OneEye/exp/quantization.py - annotation
7cb01d4080c9
1.8 KiB
text/x-python
a hinted neural network (failed)
68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 68719ba74601 | import logging as log
import numpy as np
import scipy.cluster
import cv2 as cv
def kmeans(img):
arr=np.reshape(img,(-1,3)).astype(np.float)
wood=[193,165,116]
(centers,distortion)=scipy.cluster.vq.kmeans(arr,3)
log.debug("k-means centers: %s",centers)
(black,empty,white)=sorted(centers,key=sum)
if np.linalg.norm(black)>np.linalg.norm(black-wood):
black=None
if np.linalg.norm(white-[255,255,255])>np.linalg.norm(white-wood):
white=None
log.debug("black, white: %s, %s",black,white)
return (black,white,centers)
class QuantizedImage:
BLACK=0
WHITE=1
EMPTY=2
def __init__(self,img):
self.img=self._quantize(img)
self._mask()
def transform(self,matrix):
(h,w)=self.img.shape[:2]
self.img=cv.warpPerspective(self.img,matrix,(w,h))
self.maskB=cv.warpPerspective(self.maskB,matrix,(w,h))
self.maskW=cv.warpPerspective(self.maskW,matrix,(w,h))
def get(self,x,y):
if self.maskB[y,x]: return self.BLACK
elif self.maskW[y,x]: return self.WHITE
else: return self.EMPTY
def _quantize(self,img):
(self._black,self._white,colors)=self._sampleColors(img)
origShape=img.shape
data=np.reshape(img,(-1,3))
(keys,dists)=scipy.cluster.vq.vq(data,colors)
pixels=np.array([colors[k] for k in keys],dtype=np.uint8).reshape(origShape)
return pixels
def _sampleColors(self,rect):
(h,w)=rect.shape[:2]
minirect=rect[h//4:3*h//4, w//4:3*w//4]
return kmeans(minirect)
def _mask(self):
unit=np.array([1,1,1],dtype=np.uint8)
if self._black is not None:
self.maskB=cv.inRange(self.img,self._black-unit,self._black+unit)
else:
self.maskB=np.zeros(self.img.shape[:2],dtype=np.uint8)
if self._white is not None:
self.maskW=cv.inRange(self.img,self._white-unit,self._white+unit)
else:
self.maskW=np.zeros(self.img.shape[:2],dtype=np.uint8)
|