Skip to content
Snippets Groups Projects
Commit 0c48e25d authored by Spencer Delcore's avatar Spencer Delcore
Browse files

changes so multi opj works in image 77, need to test on others

parent 21a8f561
No related branches found
No related tags found
No related merge requests found
......@@ -306,17 +306,17 @@ if __name__ == "__main__":
elif args.visualize == "seed_expansion":
for i, x in enumerate(zip(preds, seeds)):
pred, seed = x
image = dataset.load_image(im_name)
# Before expansion
pred_seed, _ = detect_box(
A[seed, :],
seed,
[w_featmap, h_featmap],
scales=scales,
initial_im_size=init_image_size[1:],
)
visualize_seed_expansion(image, pred, seed, pred_seed, scales, [w_featmap, h_featmap], vis_folder, im_name+'_'+str(i))
image = dataset.load_image(im_name)
# Before expansion
pred_seed, _ = detect_box(
A[seed, :],
seed,
[w_featmap, h_featmap],
scales=scales,
initial_im_size=init_image_size[1:],
)
visualize_seed_expansion(image, pred, seed, pred_seed, scales, [w_featmap, h_featmap], vis_folder, im_name+'_'+str(i))
elif args.visualize == "pred":
image = dataset.load_image(im_name)
......@@ -340,9 +340,8 @@ if __name__ == "__main__":
continue
if len(gt_bbxs) == 0:
continue # TODO: should do something else, should skip iou but count towards FP if pred exists
break # TODO: should do something else, should skip iou but count towards FP if pred exists
# TODO: preds needs to be
ious = bbox_iou(torch.from_numpy(pred), torch.from_numpy(np.asarray(gt_bbxs)))
# TODO: This calculates the corloc
......
......@@ -56,41 +56,58 @@ def lost(feats, dims, scales, init_image_size, k_patches=100, num_init_seeds=1,
"""
# Compute the similarity
A = (feats @ feats.transpose(1, 2)).squeeze()
if num_init_seeds== 0:
num_init_seeds = len(A)
# Compute the inverse degree centrality measure per patch
sorted_patches, scores = patch_scoring(A)
# Select the initial seed
seeds = sorted_patches[0:num_init_seeds]
if num_init_seeds <= 0:
num_init_seeds = len(sorted_patches)
preds = []
filtered_seeds= []
# if only one seed, then do default LOST and return
if num_init_seeds == 1:
seed = sorted_patches[0]
potentials = sorted_patches[:k_patches]
similars = potentials[A[seed, potentials] > 0.0]
M = torch.sum(A[similars, :], dim=0)
pred, _ = detect_box(M, seed, dims, scales=scales, initial_im_size=init_image_size[1:])
pred = np.asarray(pred)
filtered_seeds.append(seed)
preds.append(pred)
return np.asarray(preds), A, scores, filtered_seeds
#print('feats.shape',feats.shape)
# Seed expansion
for i, seed in enumerate(seeds):
for i in range(num_init_seeds):
add_pred = False
init_seed = sorted_patches[i]
#print('init_seed', init_seed)
potentials = sorted_patches[i:k_patches+i]
#potentials = torch.cat((sorted_patches[:i], sorted_patches[i:k_patches+1]),0)
#potentials = sorted_patches
#print('potentials:',potentials)
# TODO: potentials should take into consideration distance from init seed
similars = potentials[A[seed, potentials] > 0.0]
#print("sim=A[seed, potentials]:", A[init_seed, potentials])
similars = potentials[A[init_seed, potentials] > 0.0]
#print("similars:",similars)
#print("A[similars, :]",A[similars, :])
M = torch.sum(A[similars, :], dim=0)
# Box extraction
pred, _ = detect_box(
M, seed, dims, scales=scales, initial_im_size=init_image_size[1:]
M, init_seed, dims, scales=scales, initial_im_size=init_image_size[1:]
)
pred = np.asarray(pred)
add_pred = aspect_ratio(pred) > 1.0 # TODO Remove assumption
#add_pred = aspect_ratio(pred) > 1.0 # TODO Remove assumption
ious = 0
## TODO, if pick good iou
## if one bbox is completely inside another than pick the smaller one
if len(preds) > 0 and add_pred:
if len(preds) > 0:
idx_to_remove = -1
ious, inter, union = _bbox_iou(torch.from_numpy(pred), torch.from_numpy(np.asarray(preds)))
......@@ -109,12 +126,16 @@ def lost(feats, dims, scales, init_image_size, k_patches=100, num_init_seeds=1,
ious = ious[ious!=ious[idx_to_remove]]
add_pred = add_pred and not any(ious >= iou_threshold)
else:
add_pred = True
if add_pred:
#print(ious, pred)
filtered_seeds.append(seed)
filtered_seeds.append(init_seed)
preds.append(pred)
#raise Exception("ENDIT")
#print("Generated", len(preds), "predictions")
return np.asarray(preds), A, scores, filtered_seeds
......
declare -a images=(
"COCO_train2014_000000000081" # SOME IMAGE NAME
"COCO_train2014_000000000077" # SOME IMAGE NAME
)
DATASET_PATH=$COCO_ROOT/images/train2014
......@@ -9,8 +9,9 @@ OUTPUT_PATH=$LOST_PATH/outputs/samples
DINO_ARCH=vit_base
LOST_FEATURES=k
K_PATCHES=100
K_PATCHES=10
PATCH_SIZE=16
NUM_INIT_SEEDS=100
rm -rf $OUTPUT_PATH
......@@ -27,7 +28,7 @@ do
--which_feature $LOST_FEATURES \
--k_patches $K_PATCHES \
--visualize pred \
--num_init_seeds 1
--num_init_seeds $NUM_INIT_SEEDS
python main_lost.py \
--image_path $DATASET_PATH/$i.jpg \
......@@ -36,7 +37,7 @@ do
--which_feature $LOST_FEATURES \
--k_patches $K_PATCHES \
--visualize fms \
--num_init_seeds 1
--num_init_seeds $NUM_INIT_SEEDS
python main_lost.py \
--image_path $DATASET_PATH/$i.jpg \
......@@ -45,6 +46,6 @@ do
--which_feature $LOST_FEATURES \
--k_patches $K_PATCHES \
--visualize seed_expansion \
--num_init_seeds 1
--num_init_seeds $NUM_INIT_SEEDS
done
\ No newline at end of file
......@@ -140,6 +140,6 @@ def visualize_seed_expansion(image, pred, seed, pred_seed, scales, dims, vis_fol
image[start_1:end_1, start_2:end_2, 2] = 41
pltname = f"{vis_folder}/LOST_seed_expansion_{im_name}.png"
os.system('mkdir -p' + os.path.dirname(pltname))
os.system('mkdir -p ' + os.path.dirname(pltname))
Image.fromarray(image).save(pltname)
print(f"Image saved at {pltname}.")
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment