Skip to content

Commit

Permalink
Resolving some path issues
Browse files Browse the repository at this point in the history
  • Loading branch information
philkr committed May 10, 2015
1 parent 69560da commit 73af215
Show file tree
Hide file tree
Showing 4 changed files with 41 additions and 29 deletions.
17 changes: 11 additions & 6 deletions src/eval_all.sh
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
#!/bin/bash
# You can change the python interpreter used by setting the evironment variable PYTHON=...
# Call this script as follows to use python 2.7: PYTHON=python bash eval_all.sh
P=${PYTHON:=python3}
cd $( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
# This script reproduces table 3 in the paper
python3 train_lpo.py -f0 0.2 ../models/lpo_VOC_0.2.dat
python3 train_lpo.py -f0 0.1 ../models/lpo_VOC_0.1.dat
python3 train_lpo.py -f0 0.05 ../models/lpo_VOC_0.05.dat
python3 train_lpo.py -f0 0.03 ../models/lpo_VOC_0.03.dat
python3 train_lpo.py -f0 0.02 ../models/lpo_VOC_0.02.dat
python3 train_lpo.py -f0 0.01 ../models/lpo_VOC_0.01.dat -iou 0.925 # Increase the IoU a bit to make sure the number of proposals match
$P train_lpo.py -f0 0.2 ../models/lpo_VOC_0.2.dat
$P train_lpo.py -f0 0.1 ../models/lpo_VOC_0.1.dat
$P train_lpo.py -f0 0.05 ../models/lpo_VOC_0.05.dat
$P train_lpo.py -f0 0.03 ../models/lpo_VOC_0.03.dat
$P train_lpo.py -f0 0.02 ../models/lpo_VOC_0.02.dat
$P train_lpo.py -f0 0.01 ../models/lpo_VOC_0.01.dat -iou 0.925 # Increase the IoU a bit to make sure the number of proposals match
19 changes: 10 additions & 9 deletions src/propose_hf5.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,9 @@
def generate( prop, over_segs, save_names, segments=True, boxes=False, max_iou=0.9, box_overlap=False ):
BS = 100
for i in range(0,len(over_segs),BS):
ii = min(N,i+BS)
props = prop.propose( over_segs[i:i+BS], max_iou, box_overlap )
stdout.write('%3.1f%%\r'%(100*(i+BS)/len(over_segs)))
stdout.write('%3.1f%%\r'%(100*ii/len(over_segs)))
for p,fn in zip( props, save_names[i:i+BS] ):
saveProposalsHDF5( p, fn, segments, boxes )
print( "done"+" "*10 )
Expand All @@ -76,8 +77,7 @@ def generate( prop, over_segs, save_names, segments=True, boxes=False, max_iou=0

if args.images:
from time import time
detector = contour.MultiScaleStructuredForest()
detector.load( "../data/sf.dat" )
detector = getDetector('mssf')

if len(args.images)==1 and not path.exists(args.images[0]):
from glob import glob
Expand All @@ -86,29 +86,30 @@ def generate( prop, over_segs, save_names, segments=True, boxes=False, max_iou=0
BS,N = 100,len(args.images)
tl,ts,tp,to = 0,0,0,0
for i in range(0,N,BS):
ii = min(N,i+BS)
# Load the images
imgs,names = [],[]
stdout.write('Loading %3.1f%% [%0.3fs %0.3fs %0.3fs %0.3fs / im]\r'%(100*(i+BS)/N,tl/(i+1e-3),to/(i+1e-3),tp/(i+1e-3),ts/(i+1e-3)))
stdout.write('Loading %3.1f%% [%0.3fs %0.3fs %0.3fs %0.3fs / im]\r'%(100*(ii)/N,tl/(i+1e-3),to/(i+1e-3),tp/(i+1e-3),ts/(i+1e-3)))
tl -= time()
for nm in args.images[i:i+BS]:
for nm in args.images[i:ii]:
names.append( path.splitext(path.basename(nm))[0] )
imgs.append( imgproc.imread(nm) )
tl += time()

stdout.write('Overseg %3.1f%% [%0.3fs %0.3fs %0.3fs %0.3fs / im]\r'%(100*(i+BS)/N,tl/(i + BS),to/(i+1e-3),tp/(i+1e-3),ts/(i+1e-3)))
stdout.write('Overseg %3.1f%% [%0.3fs %0.3fs %0.3fs %0.3fs / im]\r'%(100*(ii)/N,tl/ii,to/(i+1e-3),tp/(i+1e-3),ts/(i+1e-3)))
to -= time()
over_segs = segmentation.generateGeodesicKMeans( detector, imgs, 1000 )
to += time()

stdout.write('Propose %3.1f%% [%0.3fs %0.3fs %0.3fs %0.3fs / im]\r'%(100*(i+BS)/N,tl/(i + BS),to/(i + BS),tp/(i+1e-3),ts/(i+1e-3)))
stdout.write('Propose %3.1f%% [%0.3fs %0.3fs %0.3fs %0.3fs / im]\r'%(100*(ii)/N,tl/ii,to/ii,tp/(i+1e-3),ts/(i+1e-3)))
tp -= time()
props = prop.propose( over_segs )
tp += time()

stdout.write('Saving %3.1f%% [%0.3fs %0.3fs %0.3fs %0.3fs / im]\r'%(100*(i+BS)/N,tl/(i + BS),to/(i + BS),tp/(i + BS),ts/(i+1e-3)))
stdout.write('Saving %3.1f%% [%0.3fs %0.3fs %0.3fs %0.3fs / im]\r'%(100*(ii)/N,tl/ii,to/ii,tp/ii,ts/(i+1e-3)))
ts -= time()
for p,n in zip( props, names ):
saveProposalsHDF5( p, args.save_path+str(n)+'.hf5', not args.box or args.bb, args.box or args.bb )
ts += time()
stdout.write('Done %3.1f%% [%0.3fs %0.3fs %0.3fs %0.3fs / im]\n'%(100*(i+BS)/N,tl/(i + BS),to/(i + BS),tp/(i + BS),ts/(i+1e-3)))
stdout.write('Done %3.1f%% [%0.3fs %0.3fs %0.3fs %0.3fs / im]\n'%(100*(ii)/N,tl/ii,to/ii,tp/ii,ts/ii))

4 changes: 3 additions & 1 deletion src/train_lpo.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,9 @@ def evaluate( prop, over_segs, segmentations, name='', bos=None, pool_ss=None, m
pool_ss.append( pool_s )
bo,pool_s = np.vstack( bos ),np.hstack( pool_ss )
stdout.write('#prop = %0.3f ABO = %0.3f\r'%(np.mean(pool_s),np.mean(bo[:,0])))
print( "LPO %05s & %d & %0.3f & %0.3f & %0.3f & %0.3f & \\\\"%(name,np.mean(pool_s),np.mean(bo[:,0]),np.sum(bo[:,0]*bo[:,1])/np.sum(bo[:,1]), np.mean(bo[:,0]>=0.5), np.mean(bo[:,0]>=0.7) ) )
if len(pool_ss):
bo,pool_s = np.vstack( bos ),np.hstack( pool_ss )
print( "LPO %05s & %d & %0.3f & %0.3f & %0.3f & %0.3f & \\\\"%(name,np.mean(pool_s),np.mean(bo[:,0]),np.sum(bo[:,0]*bo[:,1])/np.sum(bo[:,1]), np.mean(bo[:,0]>=0.5), np.mean(bo[:,0]>=0.7) ) )
return bos, pool_ss

def evaluateBox( prop, over_segs, boxes, name='', bos=None, pool_ss=None, max_iou=0.9 ):
Expand Down
30 changes: 17 additions & 13 deletions src/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,24 @@
compress = lambda x: x
decompress = lambda x: x

def getDetector( detector="sf" ):
from lpo import contour
from os import path
basedir = path.dirname( path.dirname( path.abspath(__file__) ) )
if detector=='sf':
r = contour.StructuredForest()
r.load( path.join(basedir,'data','sf.dat') )
elif detector == "mssf":
r = contour.MultiScaleStructuredForest()
r.load( path.join(basedir,'data','sf.dat') )
else:
r = contour.DirectedSobel()
return r

def loadAndOverSegDataset( loader, name, detector="sf", N_SPIX=1000 ):
import numpy as np
from pickle import dumps,loads
from lpo import contour,segmentation
from lpo import segmentation
from tempfile import gettempdir
FILE_NAME = '/%s/%s_%s_%d.dat'%(gettempdir(),name,detector,N_SPIX)
try:
Expand All @@ -63,18 +77,8 @@ def loadAndOverSegDataset( loader, name, detector="sf", N_SPIX=1000 ):
boxes = [e['boxes'] for e in data if 'boxes' in e]

# Do the over-segmentation
if detector=='sf':
detector = contour.StructuredForest()
detector.load( '../data/sf.dat' )
elif detector=='ssf':
detector = contour.SharpStructuredForest()
detector.load( '../data/sf_sharp.dat' )
elif detector == "mssf":
detector = contour.MultiScaleStructuredForest()
detector.load( "../data/sf.dat" )
else:
detector = contour.DirectedSobel()

detector = getDetector()

if detector != None:
over_segs = segmentation.generateGeodesicKMeans( detector, images, N_SPIX )

Expand Down

0 comments on commit 73af215

Please sign in to comment.