-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathpython_test
More file actions
executable file
·2473 lines (2314 loc) · 109 KB
/
python_test
File metadata and controls
executable file
·2473 lines (2314 loc) · 109 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env python
#
# Useful cosines / min q dists:
# 0.04 : 0.99 => 8.1 degrees
# 0.02 : 0.995 => 5.76 degrees
# 0.004 : 0.999 => 2.56 degrees
# 0.002 : 0.9995 => 1.8 degrees
# 0.0004 : 0.9999 => 0.81 degrees
# 0.0002 : 0.99995 => 0.576 degrees
# 0.00004 : 0.99999 => 0.256 degrees
# 0.00002 : 0.999995 => 0.18 degrees
# 0.000004 : 0.999999 => 0.081 degrees
# Note FOV for 35mm is about 45 degrees
# So 5k pixels in 45 degress is about 0.01 degrees per pixel
#
min_cos_seps = {
"1.8deg" :0.9995,
"200pix35" :0.9995,
"80pix35" :0.9999,
"0.18deg" :0.999995,
"20pix35" :0.999995,
"0.081deg" :0.999999,
"8pix35" :0.999999,
}
min_q_dists = {
"5.76deg" :0.02,
"1.8deg" :0.002,
"200pix35" :0.002,
"80pix35" :0.0004,
"0.18deg" :0.00002,
"20pix35" :0.00002,
"0.081deg" :0.000004,
"8pix35" :0.000004,
}
#a Imports
from OpenGL.GLUT import *
from OpenGL.GL import *
import gjslib_c
import math
img_png_n=0
vector_z = gjslib_c.vector(vector=(0,0,1))
vector_x = gjslib_c.vector(vector=(1,0,0))
from filters import *
#a Support functions
#f init_opengl
def init_opengl():
glutInit(sys.argv)
glutInitDisplayMode(GLUT_3_2_CORE_PROFILE |GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH)
glutInitWindowSize(64,64)
glutCreateWindow("self.window_title")
#f save_as_png
def save_as_png(texture, filename):
b = gjslib_c.filter(filter="save:%s(0)"%filename)
b.textures([texture])
b.compile()
b.execute()
pass
#f alu_test
def alu_test(ops):
for (ts, op, save_filename) in ops:
b = gjslib_c.filter(filter="glsl:alu_buffers(0,0,0)")
b.define("OP",op)
b.compile()
b.textures( [tb[i] for i in ts] )
b.execute()
save_as_png(tb[ts[-1]],save_filename)
pass
pass
#f v_of_q
def v_of_q(q):
q = q.copy().normalize()
return q.rotate_vector(vector_z)
#f quaternion_average
def quaternion_average(qs):
vf = gjslib_c.vector(length=3)
vu = gjslib_c.vector(length=3)
for q in qs:
vf += q.rotate_vector(vector_z)
vu += q.rotate_vector(vector_x)
pass
return gjslib_c.quaternion().lookat(vf,vu)
#a Toplevel 1
print "init opengl"
init_opengl()
# a = gjslib_c.texture(filename='images/IMG_2162.JPG')
print "Create textures"
tb = []
size = 1024
for i in range(12):
tb.append(gjslib_c.texture(width=size, height=size))
pass
print tb[0].width, tb[0].height
# b = gjslib_c.filter(filter="glsl:yuv_from_rgb(1,3)&-DINTENSITY_YSCALE=(3456.0/5184.0)&-DINTENSITY_XOFS=0.0&-DINTENSITY_XSCALE=1.0&-DINTENSITY_YOFS=0.0")
# b.define("INTENSITY_YSCALE","3.0")
# b.define("INTENSITY_YSCALE",remove=1)
# b.compile()
#a Match classes
#c c_image_match
class c_image_match(object):
radius=4
windowed_equalization = False
max_corners=3
max_matches_per_corner=3
min_corner_distance = 5.0
min_corner_distance = 60.0
min_match_distance = 2.5
def __init__(self, radius=4):
# radius must be 4 at the moment as circle_dft_diff_combine_filter is fixed
self.copy_img = c_alu_filter(extra_defines={"OP":"src_a"})
self.equalize = c_windowed_equalization_filter()
self.harris = c_harris_filter()
self.find_corners = c_find_filter(extra_parameters={"min_distance":self.min_corner_distance, "minimum":0.05, "max_elements":2500})
self.find_matches = c_find_filter(extra_parameters={"min_distance":self.min_match_distance, "minimum":0.04, "max_elements":2500})
self.circle_dft = c_circle_dft_filter(extra_defines={"DFT_CIRCLE_RADIUS":self.radius,
"CIRCLE_COMPONENT":"r",
})
self.circle_dft_diff = c_circle_dft_diff_filter()
self.circle_dft_diff_combine = c_circle_dft_diff_combine_filter()
pass
def get_matches(self, tb):
"""tb must be at least 10 textures, and the first is the source image, second is the target image"""
self.harris.execute( (tb[0],tb[4]) )
if self.windowed_equalization: # do windowed equalization to remove brightness and contrastiness dependence - with loss of information
self.copy_img.execute((tb[0],tb[0],tb[2]))
self.equalize.execute((tb[2],tb[0]))
self.copy_img.execute((tb[1],tb[1],tb[2]))
self.equalize.execute((tb[2],tb[1]))
pass
self.circle_dft.execute((tb[0],tb[2]))
self.circle_dft.execute((tb[1],tb[3]))
self.find_corners.execute( (tb[4],) )
print "Found %d corners (will restrict to max %d)"%(self.find_corners.f.num_points, self.max_corners)
corners = self.find_corners.f.points[:self.max_corners]
matches = {}
for i in range(len(corners)):
pt = corners[i]
xy = (pt[0],pt[1])
for i, dxy in [(0,(1,0)), (1,(0,1)), (2,(-1,0)), (3,(0,-1))]:
self.circle_dft_diff.set_parameters( {"uv_base_x":xy[0]+dxy[0]*self.radius,
"uv_base_y":xy[1]+dxy[1]*self.radius} )
self.circle_dft_diff.execute( (tb[2], tb[3], tb[5+i]) )
pass
self.circle_dft_diff_combine.execute( (tb[5], tb[6], tb[7], tb[8], tb[9]) )
self.find_matches.execute( (tb[9],) )
matches[xy] = self.find_matches.f.points[0:self.max_matches_per_corner]
pass
return matches
def times(self):
return {"copy_img":self.copy_img.times(),
"equalize":self.equalize.times(),
"harris":self.harris.times(),
"circle_dft":self.circle_dft.times(),
"find_corners":self.find_corners.times(),
"circle_dft_diff":self.circle_dft_diff.times(),
"circle_dft_diff_combine":self.circle_dft_diff_combine.times(),
"find_matches":self.find_matches.times(),
}
pass
#c c_quaternion_match
class c_quaternion_match(object):
"""
A match is a mapping from a pair of source points to a pair of target points
The match has a status
"""
#f test_add
@classmethod
def test_add(cls, src_qs, tgt_qs, max_angle_diff):
(src_angle, src_axis) = src_qs[0].angle_axis(src_qs[1], vector_z).to_rotation()
(tgt_angle, tgt_axis) = tgt_qs[0].angle_axis(tgt_qs[1], vector_z).to_rotation()
if abs((src_angle-tgt_angle))>=abs(src_angle*max_angle_diff): return None
if False:
print " Src/tgt axis/angle",src_axis,src_angle*180/math.pi,tgt_axis,tgt_angle*180/math.pi
pass
#dq = (src_qs[0]/tgt_qs[0]).rotate_vector(vector_z) - (src_qs[1]/tgt_qs[1]).rotate_vector(vector_z)
#print "Abs:",dq.modulus()," ",180.0/math.pi*abs(src_angle-tgt_angle)," ",abs(src_angle-tgt_angle)/src_angle," ",dq
#dq = dq.modulus()
#if abs(dq)>0.2: return None
return cls(src_qs, tgt_qs, src_angle_axis=(src_axis, src_angle), tgt_angle_axis=(tgt_axis,tgt_angle))
#f __init__
def __init__(self, src_qs, tgt_qs, src_angle_axis=None, tgt_angle_axis=None ):
self.src_qs = src_qs
self.tgt_qs = tgt_qs
self.src_angle_axis = src_angle_axis
self.tgt_angle_axis = tgt_angle_axis
self.src_from_tgt_orient = gjslib_c.quaternion()
self.diff_q = gjslib_c.quaternion()
self.calculate()
pass
#f calculate
def calculate(self):
"""
There is a rotation (angle, axis) that has src_q0.vector_z/src_q1.vector_z on a great circle,
and similarly for tgt_q0.vector_z/tgt_q1.vector_z
(The spherical coordinates of a point corresponding to src_q/tgt_q is q.vector_z in our definition)
Given this we have two great circles; these are either the same, or they meet at two points.
If they are the same we have src_axis==tgt_axis
We can find the transformation that rotates src axis to tgt axis - this we call diff_q
Hence diff_q . src_axis = tgt axis
Now, diff_q is a rotation about an axis which goes through the great circles for src and tgt
each in two places. diff_q has no effect on these positions (da0 and -da0).
So, we can consider a first transformation that moves src_q0.vector_z to da0; this is moving
the point around the great circle with the axis src_axis. Next apply the diff_q transformation;
this has no effect on the point. Finally apply a last transformation that moves da0 to tgt_q0.vector_z,
moving it along its great circle hence as a rotation around tgt_axis.
This combination is a transformation for src_q0.vector_z to tgt_q0.vector_z
Consider the effect on a point src_q1.vector_z - also on the
great circle, perhaps 10 degrees further clockwise round.
Our first rotation will place the point at da0 + 10 degrees clockwise on the src great circle
The diff_q transformation moves this to da0 + 10 degrees clockwise on the tgt great circle
The last rotation moves it to tgt_q1.vector_z - i.e. tgt_q0.vector_z + 10 degrees clockwise
(assuming the src/tgt points correspond).
The first rotation is a rotation around src_axis to make src_q0.vector_z move to diff_axis.
This is simply the transformation to move diff_axis to src_q0.vector_z on a great circle
The last is is also just the transformation to move diff_axis to tgt_q0.vector_z on a great circle
Now if src_axis == +-tgt_axis then all that is needed is a rotation around src_axis
"""
if self.src_angle_axis is None:
self.src_angle_axis = self.src_qs[0].angle_axis(self.src_qs[1], vector_z).to_rotation()
pass
if self.tgt_angle_axis is None:
self.tgt_angle_axis = self.tgt_qs[0].angle_axis(self.tgt_qs[1], vector_z).to_rotation()
pass
(src_axis, src_angle) = self.src_angle_axis
(tgt_axis, tgt_angle) = self.tgt_angle_axis
sp0 = self.src_qs[0].rotate_vector(vector_z)
tp0 = self.tgt_qs[0].rotate_vector(vector_z)
self.diff_q = src_axis.angle_axis_to_v(tgt_axis)
if True:
print sp0, tp0, self.diff_q
pass
if self.diff_q.r>0.99999999:
self.src_from_tgt_orient = tp0.angle_axis_to_v(sp0)
return
(diff_angle, diff_axis) = self.diff_q.to_rotation()
print diff_angle, diff_axis
src_sp0_orient_to_diff_axis = diff_axis.angle_axis_to_v(sp0)
dst_tp0_orient_to_diff_axis = tp0.angle_axis_to_v(diff_axis)
self.src_from_tgt_orient = src_sp0_orient_to_diff_axis * ~self.diff_q * dst_tp0_orient_to_diff_axis
if True:
print src_sp0_orient_to_diff_axis
print dst_tp0_orient_to_diff_axis
print "These two should be equal... (or near as ...)"
print (self.src_from_tgt_orient * self.tgt_qs[1]).angle_axis(self.src_qs[1],vector_z)
print (self.src_from_tgt_orient * self.tgt_qs[0]).angle_axis(self.src_qs[0],vector_z)
pass
pass
if False:
qic=gjslib_c.quaternion_image_correlator()
src_q0 = gjslib_c.quaternion().from_euler(roll=5.0,yaw=20,pitch=10,degrees=1)
src_q1 = gjslib_c.quaternion().from_euler(pitch=5.0,yaw=10.0,degrees=1) * src_q0
tgt_q0 = gjslib_c.quaternion().from_euler(roll=20.0,degrees=1) * src_q0
tgt_q1 = gjslib_c.quaternion().from_euler(roll=20.2,degrees=1) * src_q1
qm = c_quaternion_match.test_add(src_qs=(src_q0,src_q1),
tgt_qs=(tgt_q0,tgt_q1),
max_angle_diff=0.02) # good matches for 1900/1901 are all <0.
qic.add_match(src_q0,tgt_q0,0,0,0)
qic.add_match(src_q1,tgt_q1,0,0,0)
qic.create_mappings()
print qm.src_from_tgt_orient
die
#c c_quaternion_cluster
class c_quaternion_cluster(object):
#f __init__
def __init__(self, results):
self.results = results
pass
#f find_centers
def find_centers(self, qs, cxy, min_dist, min_min_dist, min_number, scale, max_iter):
def q_dist_from_v(q,v):
return c_vector.vector_length(c_vector.vector_diff(v_of_q(q),v))
if max_iter==0:
return (qs[:], cxy, min_dist, len(qs), max_iter)
if min_dist < min_min_dist:
return (qs[:], cxy, min_dist, len(qs), max_iter)
max_min = 0
allowed_qs = []
for q in qs:
d = q_dist_from_v(q,cxy)
if (d<min_dist):
if (d>max_min): max_min=d
allowed_qs.append(q)
pass
pass
if len(allowed_qs)<min_number:
return (qs[:], cxy, min_dist, len(allowed_qs), max_iter)
tq = quaternion_average(allowed_qs)
return self.find_centers(allowed_qs, v_of_q(tq), max_min*scale, min_min_dist, min_number, scale, max_iter-1)
#f find_clusters
def find_clusters(self):
#b Produce list of results from valid results
qs = []
for m in self.results:
qs.append(self.results[m])
pass
#b Find best centers from results
handled_results = []
remaining_qs = qs[:]
fcs = []
while len(remaining_qs)>0:
q = remaining_qs[0]
v = v_of_q(q)
fc = self.find_centers(remaining_qs, v, 3.0, 0.005, len(qs)/10, 0.9, 40)
fcs.append(fc)
for q in fc[0]:
handled_results.append(q)
remaining_qs.remove(q)
pass
pass
#b Sort list of centers
def fc_cmp(x,y):
if x[2]/len(x[0])<y[2]/len(y[0]): return -1
return 1
fcs.sort(cmp=fc_cmp)
if True:
for fc in fcs:
print fc[2], fc[1], fc[3], fc[4]
n = 0
for q in fc[0]:
print " ",q
n+=1
if n>30:
print " ...."
break
pass
pass
pass
#b Display list of centers
agreed_orientations = []
for fc in fcs:
tq = quaternion_average(fc[0])
print "distance",fc[2],"num_in_range",fc[3],"/",len(fc[0]),"iterations left",fc[4], "center (r=",tq.r,",i=",tq.i,",j=",tq.j,",k=",tq.k,")"
agreed_orientations.append( {"quaternion":tq, "error":fc[2]/len(fc[0])} )
pass
return agreed_orientations
pass
#c c_camera_image
class c_camera_image(object):
def __init__(self, image_filename, frame_width=22.3, focal_length=35.0, lens_type="rectilinear", orientation=None):
self.texture = gjslib_c.texture(filename=image_filename)
# The lp has to have width/height such that a uv of 0 to 1 maps from left to right, and bottom to top
# The uv is derived from the (-1,1) range, i.e. uv can also be conceived as -1=left, -1=bottom, +1=right, +1=top
# Hence for images that are wider than high, we want -1=left, +1=right; but the same angle rotated by 90 is off the top
# So the uv for that must be = width/height. Hence, for wider than high, width=2.0, height=2.0*width/height
# For example, an image that is 300 wide and 200 high will have width=2.0, height=3.0
self.lp = gjslib_c.lens_projection(width=2.0,
height=2.0*self.texture.width/self.texture.height,
frame_width=frame_width,
focal_length=focal_length,
lens_type=lens_type)
if orientation is None:
orientation = gjslib_c.quaternion(r=1)
pass
self.lp.orient(orientation)
pass
def orientation(self):
return self.lp.orientation
def orientation_of_xy(self,xy):
return self.lp.orientation_of_xy(xy)
def orientation_of_xy_list(self, xy_list):
q_list = []
for xy in xy_list:
q_list.append(self.lp.orientation_of_xy(xy))
pass
return q_list
def xy_of_orientation_list(self, q_list):
xy_list = []
for q in q_list:
xy_list.append(self.lp.xy_of_orientation(q))
pass
return xy_list
#c c_mapping
class c_mappings(object):
"""
The set tgt_q_mappings[tgt_q] for a particular src_q is the list
of other src_q/tgt_q pairs that have approximately the same angular distance
between srcs as between tgts, stored with the quaternion transformation that would acheive tgt->src
for these pairs.
Note that a source can only map to one target point (!) at any one time,
therefore only one tgt_q_mappings is potentially viable for any one source
and then only ones that are 'similar' to a particular transformation
Hence a 'score' can be calculated for each src/tgt pair with a count of the other src/tgt pairs
that have a similar angular distance src->src, tgt->tgt and which have a similar transformation tgt->src
"""
#f save_mappings
@staticmethod
def save_mappings(mappings_by_src_q, f, src_from_tgt_q=None, best_matches=None, qic=None):
cbm = c_image_pair_quaternion_match.c_best_match
def quaternion_as_string(q):
return "quaternion(r=%f,i=%f,j=%f,k=%f)"%(q.r, q.i, q.j, q.k)
print >>f, "import gjslib_c"
print >>f, "quaternion = gjslib_c.quaternion"
print >>f, "base_mapping = None"
if src_from_tgt_q:
print >>f, "base_mapping = %s"%quaternion_as_string(src_from_tgt_q)
print >>f, "src_quaternions = {"
if qic:
for src_q in cbm.qic_qs:
src_q = cbm.qic_qs[src_q]
if src_q[2]=="src":
print >>f, " %d:%s,"%(src_q[1], quaternion_as_string(src_q[0]))
pass
pass
pass
else:
for src_q in mappings_by_src_q:
print >>f, " %d:%s,"%(src_q.id, quaternion_as_string(src_q))
pass
pass
print >>f, " }"
print >>f, "tgt_quaternions = {"
tgt_qs = []
if qic:
for tgt_q in cbm.qic_qs:
tgt_q = cbm.qic_qs[tgt_q]
if tgt_q[2]=="tgt":
print >>f, " %d:%s,"%(tgt_q[1], quaternion_as_string(tgt_q[0]))
pass
pass
pass
else:
for src_q in mappings_by_src_q:
for tgt_q in mappings_by_src_q[src_q].tgt_qs:
if tgt_q not in tgt_qs:
tgt_qs.append(tgt_q)
print >>f, " %d:%s,"%(tgt_q.id, quaternion_as_string(tgt_q))
pass
pass
pass
pass
print >>f, " }"
print >>f, "best_matches = ["
if best_matches:
for bm in best_matches:
#bm.mappings is list of src/tgt tuples
print >>f, " (%f, %f, %s, ["%(bm.max_distance, bm.min_cos_sep, quaternion_as_string(bm.src_from_tgt_q))
qs = []
for (src_q, tgt_q, num_uses) in bm.best_mappings:
if qic:
print >>f, " ((%d, %d),["%(src_q,tgt_q),
pass
else:
print >>f, " ((%d, %d),["%(src_q.id,tgt_q.id),
pass
for src_tgts in bm.qs:
(src_q0, tgt_q0, src_q1, tgt_q1) = src_tgts
(src_from_tgt_orient, d) = bm.qs[src_tgts]
if ((src_q0==src_q and tgt_q0==tgt_q) or
(src_q1==src_q and tgt_q1==tgt_q) ):
qs.append(src_from_tgt_orient)
pass
if src_q0==src_q and tgt_q0==tgt_q:
if qic:
print >>f, "(%d,%d,%f),"%(src_q1, tgt_q1, d),
pass
else:
print >>f, "(%d,%d,%f),"%(src_q1.id, tgt_q1.id, d),
pass
pass
pass
print >>f, "] ),"
pass
if len(qs)==0: qs.append(gjslib_c.quaternion(1))
print >>f, " ], {"
for (src_q, tgt_q, num_uses) in bm.best_mappings:
if qic:
print >>f, " %d:(%d,),"%(src_q, tgt_q)
pass
else:
print >>f, " %d:(%d,),"%(src_q.id, tgt_q.id)
pass
pass
print >>f, " }, %s,"%quaternion_as_string(bm.optimized_src_from_tgt_q)#quaternion_average(qs))
print >>f, " ),"
pass
print >>f, " ]"
print >>f, "mappings={}"
if qic:
for src_qx in qic.src_qs():
src_q = cbm.qid_from_q(src_qx)
print >>f, "mappings[%d] = {"%src_q
print >>f, " 'tgts':{"
for tgt_qx in qic.tgt_qs_of_src_q(src_qx):
tgt_q = cbm.qid_from_q(tgt_qx)
print >>f, " %d:{"%tgt_q
#(tgt_xy, tgt_qx, fft_rot, fft_power) = mapping.matches[tgt_q]
#print >>f, " 'data':(%s,%f,%f),"%(str(tgt_xy),fft_rot,fft_power)
print >>f, " 'mappings':("
src_tgt_mappings = qic.src_tgt_mappings(src_qx, tgt_qx)
for (src_q0, src_q1, tgt_q0, tgt_q1, src_from_tgt_q) in src_tgt_mappings:
print >>f, " (%d, %d, %s, %d)," %(cbm.qid_from_q(src_q1),
cbm.qid_from_q(tgt_q1),
quaternion_as_string(src_from_tgt_q),
0)
pass
print >>f, " ),"
print >>f, " },"
pass
print >>f, " },"
print >>f, " }"
pass
pass
else:
for src_q in mappings_by_src_q:
mapping = mappings_by_src_q[src_q]
print >>f, "mappings[%d] = {"%src_q.id
print >>f, " 'tgts':{"
for tgt_q in mapping.tgt_qs:
print >>f, " %d:{"%tgt_q.id
(tgt_xy, tgt_qx, fft_rot, fft_power) = mapping.matches[tgt_q]
print >>f, " 'data':(%s,%f,%f),"%(str(tgt_xy),fft_rot,fft_power)
print >>f, " 'mappings':("
for (src_q1, tgt_q1, src_from_tgt_orient) in mapping.tgt_q_mappings[tgt_q]:
print >>f, " (%d, %d, %s, %d)," %(src_q1.id,tgt_q1.id,quaternion_as_string(src_from_tgt_orient),src_from_tgt_orient.id)
pass
print >>f, " ),"
print >>f, " },"
pass
print >>f, " },"
print >>f, " }"
pass
pass
pass
#f show_mappings
@staticmethod
def show_mappings(mappings_by_src_q):
for src_q in mappings_by_src_q:
mappings_by_src_q[src_q].show_mapping()
pass
pass
#f show_mapping
def show_mapping(self, skip_unmapped_tgt_qs=True):
model_src_from_tgt_q = gjslib_c.quaternion(r=0.997425,i=-0.007999,j=-0.070465,k=0.010689)
vector_z = gjslib_c.vector(vector=(0,0,1))
print "%s:%s:%d"%(str(self.src_q),
str(self.src_q.rotate_vector(vector_z)),
len(self.tgt_qs))
min_error = self.find_min_error(model_src_from_tgt_q, self.src_q)
if min_error is None: print " No min error for ",model_src_from_tgt_q.to_rotation_str(1)
if min_error is not None:print " min_e for %s:"%model_src_from_tgt_q.to_rotation_str(1),min_error[0],min_error[1]
for tgt_qx in self.tgt_qs:
if skip_unmapped_tgt_qs and (len(self.tgt_q_mappings[tgt_qx])==0): continue
print " map v,l:%s:%s:%d"%(str(tgt_qx),
str(tgt_qx.rotate_vector(vector_z)),
len(self.tgt_q_mappings[tgt_qx]))
for (src_q1, tgt_q1, src_from_tgt_orient) in self.tgt_q_mappings[tgt_qx]:
print " ", src_from_tgt_orient, src_from_tgt_orient.to_rotation_str(1), src_q1.rotate_vector(vector_z), tgt_q1.rotate_vector(vector_z)
if tgt_qx in self.selected_mappings:
print " sm:",len(self.selected_mappings[tgt_qx])
for (src_from_tgt_qx, n, e) in self.selected_mappings[tgt_qx]:
print " sel,n,e:",src_from_tgt_qx.to_rotation_str(1),n,e
pass
pass
if tgt_qx in self.too_far_from:
print " tff:",len(self.too_far_from[tgt_qx])
for (src_from_tgt_qx, max_maa) in self.too_far_from[tgt_qx]:
closeness_to_model = src_from_tgt_qx.angle_axis(model_src_from_tgt_q,vector_z).r
if closeness_to_model>0.9999:
print " sft:",src_from_tgt_qx.to_rotation_str(1), max_maa, closeness_to_model
pass
pass
pass
pass
#f score_mapping
def score_mapping(self, src_from_tgt_q, min_cos_sep=0.9995, max_q_dist=0.02):
"""
For a given tgt->source transformation, generate a score
The score is the highest count of matching source/target pairs which
have a transform that is similar, from the possible targets for this source
One can also track the 'sum of vectors' for 'z' and 'x' transformed by the
similar transforms;
"""
max_score = None
verbose = False
for tgt_qx in self.tgt_qs:
dst = (src_from_tgt_q*tgt_qx).angle_axis(self.src_q,vector_z).r
if verbose: print "checking",tgt_qx.id,dst
if dst<min_cos_sep: continue
if verbose: print "passed src_q/tgt_qx mapping"
# Does it matter if the source point does not match up with any other target points?
if len(self.tgt_q_mappings[tgt_qx])==0: continue
# The more mappings that match the better - indeed, all of 'min_used' should map
if len(self.tgt_q_mappings[tgt_qx])<4: continue
if verbose: print "passed mapping length check"
score = 0
for m in self.tgt_q_mappings[tgt_qx]:
(src_q1, tgt_q1, src_from_tgt_orient) = m
if verbose: print "checking mapping",src_q1.id, tgt_q1.id, src_from_tgt_orient.id
# Only need to one of compare src_from_tgt or src_q1
# if ((src_from_tgt_q*tgt_q1).angle_axis(src_q1,vector_z).r)<min_cos_sep: continue
dq = src_from_tgt_q.distance_to(src_from_tgt_orient)
if dq>max_q_dist: continue
score += 1
pass
#print "not sure how some of the matches are getting through - there are perfect matches, and "
if (max_score is None) or (max_score[0]<score):
max_score = (score, tgt_qx)
pass
pass
return max_score
#f find_min_error
def find_min_error(self, src_from_tgt_q, src_qx):
min_d = None
for tgt_qx in self.tgt_qs:
# Does it matter if the source point does not match up with any other target points?
if len(self.tgt_q_mappings[tgt_qx])==0: continue
# The more mappings that match the better - indeed, all of 'min_used' should map
if len(self.tgt_q_mappings[tgt_qx])<4: continue
# Does it matter if the source point has no approximately similar target mappings?
can_map = False
max_maa = 0
for m in self.tgt_q_mappings[tgt_qx]:
(src_q1, tgt_q1, src_from_tgt_orient) = m
maa = src_from_tgt_q.angle_axis(src_from_tgt_orient,vector_z)
max_maa = max(max_maa,maa.r)
pass
if max_maa>0.999: can_map = True
if not can_map:
self.too_far_from[tgt_qx].append((src_from_tgt_q, max_maa))
continue
# Use the cos(great circle rotation between tgt mapped back to src and src)
# Not good to use ~src_qx * src_from_tgt_q*tgt_qx directly
# Can use (~src_qx * src_from_tgt_q*tgt_qx) applied to vector_z and use 1-z
# Although that is basically identical (really really close)
rv0 = (~src_qx*src_from_tgt_q*tgt_qx).rotate_vector(vector_z)
d = 1-rv0.coords[2]*rv0.coords[2]
if False:
rq0 = src_qx.angle_axis(src_from_tgt_q*tgt_qx,vector_z)
d = (1-rq0.r) # Not much effect if we change this to rq0.r^2
pass
if (min_d is None) or (min_d[0]>d):
min_d = (d, tgt_qx)
pass
pass
return min_d
#f select_for_mapping
def select_for_mapping(self, src_from_tgt_q, tgt_qx, i, e):
self.selected_mappings[tgt_qx].append( (src_from_tgt_q, i, e) )
pass
#f __init__
def __init__(self, src_xy, src_q):
self.src_xy = src_xy
self.src_q = src_q
self.matches = {}
self.tgt_qs = []
self.tgt_q_mappings = {}
self.selected_mappings={}
self.too_far_from = {}
pass
#f add_match
def add_match(self, tgt_xy, tgt_q, fft_rot, fft_power, min_cos_sep=min_cos_seps["8pix35"]):
found_tgt_q = None
for tgt_qx in self.tgt_qs:
cos_angle = tgt_qx.angle_axis(tgt_q,vector_z).r
if cos_angle>min_cos_sep:
found_tgt_q = tgt_qx
break
pass
if found_tgt_q is None:
self.tgt_qs.append(tgt_q)
self.tgt_q_mappings[tgt_q] = []
pass
else:
#print "Found tgt q",found_tgt_q,tgt_q,cos_angle
tgt_q = found_tgt_q
pass
self.matches[tgt_q] = (tgt_xy, tgt_q, fft_rot, fft_power)
pass
#f power_of_match
def power_of_match(self, tgt_q):
if tgt_q not in self.tgt_qs:
return 0
return self.matches[tgt_q][3]
#f add_tgt_mapping
def add_tgt_mapping(self, mappings_by_src_q, tgt_q, src_q1, tgt_q1, qm, max_rot_diff=60.0):
if tgt_q not in self.tgt_qs:
raise Exception, "Unexpected add_tgt_mapping for tgt quaternion that is not part of the source"
if False:#True:
(src0_xy, tgt0_xy, fft0_rot, fft0_power) = self.matches[tgt_q]
(src1_xy, tgt1_xy, fft1_rot, fft1_power) = mappings_by_src_q[src_q1].matches[tgt_q1]
rot_diff = abs(fft1_rot-fft0_rot)
if rot_diff>180: rot_diff=360-rot_diff
if rot_diff>max_rot_diff: return
if False:
print src0_xy, tgt0_xy, src1_xy,tgt1_xy, rot_diff
rq = qm.src_from_tgt_orient
print fft0_power, fft0_rot, fft1_power, fft1_rot, "(r=%f,i=%f,j=%f,k=%f)"%(rq.r,rq.i,rq.j,rq.k)
pass
pass
self.tgt_q_mappings[tgt_q].append( (src_q1, tgt_q1, qm.src_from_tgt_orient) )
self.selected_mappings[tgt_q] = []
self.too_far_from[tgt_q] = []
pass
pass
#c c_image_pair_quaternion_match
class c_image_pair_quaternion_match(object):
#f __init__
def __init__(self, filenames=[]):
self.camera_images = {}
for f in filenames:
self.add_image(f)
pass
self.im = c_image_match()
self.im.max_corners=40
self.im.max_matches_per_corner=10
self.im.max_corners=20
self.im.max_matches_per_corner=10 # Not too many matches, as a good match is a good match
self.to_yuv = c_yuv_from_rgb(extra_parameters={"xsc":2.0,"ysc":2.0,"xc":1.0,"yc":1.0},
extra_defines={"EXTRA_VERTEX_UNIFORMS":"uniform float xsc, ysc, xc, yc;",
"GL_POSITION":"vec4(xsc*x+xc,ysc*y+yc,0,1)",
})
self.mappings_by_src_q = None
self.src_qs = []
self.qic = None
pass
#f xy_from_texture
def xy_from_texture(self,texture,texture_xy):
return ( (2*(float(texture_xy[0])/texture.width)-1.0),
(2*(float(texture_xy[1])/texture.height)-1.0) )
#f orient
def orient(self, image, orientation):
self.camera_images[image].lp.orient(orientation)
pass
#f xy_of_orientation
def xy_of_orientation(self, image, q):
return self.camera_images[image].lp.xy_of_orientation(q)
#f add_image
def add_image(self, image_filename="", **kwargs):
if image_filename not in self.camera_images:
self.camera_images[image_filename] = c_camera_image(image_filename=image_filename, **kwargs)
pass
pass
#f find_overflows
def find_overflows(self, images, cxy0, size):
ci0 = self.camera_images[images[0]]
ci1 = self.camera_images[images[1]]
width = ci0.texture.width
height = ci0.texture.height
ci0_xy_list = []
for i in range(4):
for j in range(21):
xy = j/10.0-1.0
xy = [ (xy,-1.0), (1.0,xy), (-xy,1.0), (1.0,-xy) ][i]
xy = ((cxy0[0] + xy[0]*size/2)*width/2, (cxy0[1] + xy[1]*size/2)*height/2)
ci0_xy_list.append(xy)
pass
pass
q_list = ci0.orientation_of_xy_list(ci0_xy_list)
ci1_xy_list = ci1.xy_of_orientation_list(q_list)
off_edge = [0]*4
in_bounds = True
for i in range(4):
for j in range(21):
err = None
for xy in (ci0_xy_list[i*21+j], ci1_xy_list[i*21+j]):
xy = (xy[0]/(width/2), xy[1]/(height/2))
for k in range(2):
if (xy[k]<-1.0):err = -1 - xy[k]
if (xy[k]>1.0): err = xy[k] - 1
pass
if err is None: continue
err = abs(err)
if err > off_edge[i]:
off_edge[i] = err
in_bounds = False
pass
pass
pass
pass
if in_bounds: return None
return off_edge
#f find_overlap
def find_overlap(self, images, cxy0, size, scale=0.9, max_iter=40):
ci0 = self.camera_images[images[0]]
ci1 = self.camera_images[images[1]]
for i in range(max_iter):
overflow = self.find_overflows(images, cxy0, size)
#print i, cxy0, size, overflow
if overflow is None: return (cxy0, size)
if overflow[0]>0 and overflow[2]>0: size *= scale
elif overflow[1]>0 and overflow[3]>0: size *= scale
else:
if overflow[0]>0: cxy0=(cxy0[0] + size*(1-scale), cxy0[1])
if overflow[2]>0: cxy0=(cxy0[0] - size*(1-scale), cxy0[1])
if overflow[1]>0: cxy0=(cxy0[0], cxy0[1] + size*(1-scale))
if overflow[3]>0: cxy0=(cxy0[0], cxy0[1] - size*(1-scale))
pass
pass
return None
#f find_overflows_for_projections
def find_overflows_for_projections(self, images, projections):
def check_bounds(in_bounds, off_edge, edge, image, xy):
# An image that is 300 wide and 200 high will have width=2.0, height=3.0 for its scaling
# This means that the xy positions returned are truly just within the image
# independent of the aspect ratio
err = None
for e in (abs(xy[0])-1, abs(xy[1])-1 ):
if e<=0: continue
if e > off_edge[edge]:
off_edge[edge] = e
in_bounds = False
pass
pass
return (in_bounds, off_edge)
ci0 = self.camera_images[images[0]]
ci1 = self.camera_images[images[1]]
ci0_tp = projections[0]
ci1_tp = projections[1]
q0_list = []
q1_list = []
for i in range(4):
for j in range(21):
xy = j/10.0-1.0 # in range -1<=xy<1
xy = [ (xy,-1.0), (1.0,xy), (-xy,1.0), (-1.0,-xy) ][i] # (-1,-1)<=xy<=(1,1)
q0_list.append(ci0_tp.orientation_of_xy(xy))
q1_list.append(ci1_tp.orientation_of_xy(xy))
pass
pass
ci0_q0_xy_list = ci0.xy_of_orientation_list(q0_list)
ci1_q0_xy_list = ci1.xy_of_orientation_list(q0_list)
ci0_q1_xy_list = ci0.xy_of_orientation_list(q1_list)
ci1_q1_xy_list = ci1.xy_of_orientation_list(q1_list)
off_edge = [0]*4
in_bounds = True
for i in range(4):
for j in range(21):
(in_bounds, off_edge) = check_bounds(in_bounds, off_edge, i, ci0, ci0_q0_xy_list[i*21+j])
(in_bounds, off_edge) = check_bounds(in_bounds, off_edge, i, ci1, ci1_q0_xy_list[i*21+j])
(in_bounds, off_edge) = check_bounds(in_bounds, off_edge, i, ci0, ci0_q1_xy_list[i*21+j])
(in_bounds, off_edge) = check_bounds(in_bounds, off_edge, i, ci1, ci1_q1_xy_list[i*21+j])
pass
pass
if in_bounds: return None
return off_edge
#f overlap_projections
def overlap_projections(self, images, projections, scale=0.9, max_iter=80):
ci0 = self.camera_images[images[0]]
ci1 = self.camera_images[images[1]]
ci0_tp = projections[0]
ci1_tp = projections[1]
for i in range(max_iter):
overflow = self.find_overflows_for_projections(images, projections)
size = None
dxy = [0,0]
if overflow is None: return projections
overflow_bits = ( ((overflow[0]>0) and 1) |
((overflow[1]>0) and 2) |
((overflow[2]>0) and 4) |
((overflow[3]>0) and 8) )
# Edges are 0 => y=-1, 1=> x=1, 2=> y=1, 3=>x=-1
if overflow_bits in [1, 3, 9, 11]: dxy[1]=1-scale
if overflow_bits in [4, 6, 12, 14]: dxy[1]=scale-1
if overflow_bits in [2, 3, 6, 7]: dxy[0]=scale-1
if overflow_bits in [8, 9, 12, 13]: dxy[0]=1-scale
if overflow_bits in [5, 7, 13, 10, 11, 14, 15]: size = scale
if (dxy[0]!=0) or (dxy[1]!=0):
dxy = tuple(dxy)
q0 = ci0_tp.orientation_of_xy(dxy)
q1 = ci0_tp.orientation_of_xy((0,0))
dq = q1.angle_axis(q0,vector_z)
ci0_tp.orient(dq*ci0_tp.orientation)
# now ci0_tp.orientation_of_xy((0,0)).rotate_vector(vector_z) == q0.rotate_vector(vector_z)
q0 = ci1_tp.orientation_of_xy(dxy)
q1 = ci1_tp.orientation_of_xy((0,0))
dq = q1.angle_axis(q0,vector_z)
ci1_tp.orient(dq*ci1_tp.orientation)
pass
if size is not None:
ci0_tp.focal_length = ci0_tp.focal_length / scale
ci1_tp.focal_length = ci1_tp.focal_length / scale
pass
if False:
print overflow_bits, size, dxy, ci0_tp.orientation_of_xy((0,0)).rotate_vector(vector_z)
pass
pass
return None
#f find_matches
def find_matches(self, images, cxy0=None, size=None, min_cos_sep=min_cos_seps["8pix35"],
projections = None):
#f globals
global img_png_n
#b Set to/from projections
ci0 = self.camera_images[images[0]]
ci1 = self.camera_images[images[1]]
if projections is None:
# 'size' maps to frame_width;
# fov = 2atan2(frame_width/2focal_length) = 2atan2(size/2)
# hence size = frame_width/focal_length
# hence focal_length = frame_width/size
focal_length = 36.0 / size
#if img_png_n>0:focal_length=200.0 # GJS T
src_img_lp_to = gjslib_c.lens_projection(width=2.0, height=2.0, frame_width=36.0, focal_length=focal_length, lens_type="rectilinear")
dst_img_lp_to = gjslib_c.lens_projection(width=2.0, height=2.0, frame_width=36.0, focal_length=focal_length, lens_type="rectilinear")
# Currently we can change ci0 orientation and src_to_orientation works great with world_cxy0_q
# (but presumably not with ci0.orientation() * world_cxy0_q...)
# The dest projection camera needs to point at world_cxy0_q relative to the destination image
# as the destination image is accessed 'destination image local'
# Not sure I buy this logic actually...
print "Centering on image 0",cxy0, (cxy0[0]+0.5)*5184, (cxy0[1]+0.5)*3456
world_cxy0_q = ci0.orientation_of_xy(cxy0)
src_to_orientation = world_cxy0_q
dst_to_orientation = world_cxy0_q
dst_img_lp_to.orient(dst_to_orientation)
src_img_lp_to.orient(src_to_orientation)
pass
if projections is not None:
(src_img_lp_to, dst_img_lp_to) = projections
pass
#b Find matches
self.to_yuv.set_parameters( {"ysc":2.0, "yc":-1.0, "xsc":2.0, "xc":-1.0} )
self.to_yuv.set_projections(projections=(ci0.lp,src_img_lp_to), num_x_divisions=20, num_y_divisions=20)
self.to_yuv.execute((ci0.texture,tb[0]))
self.to_yuv.set_projections(projections=(ci1.lp,dst_img_lp_to), num_x_divisions=20, num_y_divisions=20)
self.to_yuv.execute((ci1.texture,tb[1]))
matches = self.im.get_matches(tb)
save_as_png(tb[0],"a%d.png"%img_png_n)
save_as_png(tb[1],"b%d.png"%img_png_n)
img_png_n+=1
#b Add source -> target matches for qic
if self.qic is not None:
for m in matches:
src_xy = self.xy_from_texture(tb[0],m)
src_q = src_img_lp_to.orientation_of_xy(src_xy)
for mm in matches[m]:
tgt_xy = self.xy_from_texture(tb[1],mm)
tgt_q = dst_img_lp_to.orientation_of_xy(tgt_xy)
self.qic.add_match(src_q, tgt_q, mm[2], mm[3], mm[4])
pass
return
#b Add source -> target matches for non-qic
if self.mappings_by_src_q is None:
self.mappings_by_src_q = {}
self.src_qs = []
pass
for m in matches:
src_xy = self.xy_from_texture(tb[0],m)
src_q = src_img_lp_to.orientation_of_xy(src_xy)
found_src_q = None
for src_qx in self.src_qs:
cos_angle = src_qx.angle_axis(src_q,vector_z).r
if cos_angle>min_cos_sep:
found_src_q = src_qx
break
pass
if found_src_q is None:
self.mappings_by_src_q[src_q] = c_mappings(src_xy, src_q)
self.src_qs.append(src_q)