1
+ import numpy as np
2
+ import torch
3
+ import pandas as pd
4
+ import os
5
+ from utils import geometry
6
+
7
+
8
+ def qmul (q , r ):
9
+ """
10
+ Multiply quaternion(s) q with quaternion(s) r.
11
+ Expects two equally-sized tensors of shape (*, 4), where * denotes any number of dimensions.
12
+ Returns q*r as a tensor of shape (*, 4).
13
+ """
14
+ assert q .shape [- 1 ] == 4
15
+ assert r .shape [- 1 ] == 4
16
+
17
+ original_shape = q .shape
18
+
19
+ # Compute outer product
20
+ terms = torch .bmm (r .view (- 1 , 4 , 1 ), q .view (- 1 , 1 , 4 ))
21
+
22
+ w = terms [:, 0 , 0 ] - terms [:, 1 , 1 ] - terms [:, 2 , 2 ] - terms [:, 3 , 3 ]
23
+ x = terms [:, 0 , 1 ] + terms [:, 1 , 0 ] - terms [:, 2 , 3 ] + terms [:, 3 , 2 ]
24
+ y = terms [:, 0 , 2 ] + terms [:, 1 , 3 ] + terms [:, 2 , 0 ] - terms [:, 3 , 1 ]
25
+ z = terms [:, 0 , 3 ] - terms [:, 1 , 2 ] + terms [:, 2 , 1 ] + terms [:, 3 , 0 ]
26
+ return torch .stack ((w , x , y , z ), dim = 1 ).view (original_shape )
27
+
28
+
29
+ def load_extrinsics (filename , data_type = torch .float32 ):
30
+ data = np .loadtxt (filename )
31
+ pose = np .zeros ([4 , 4 ])
32
+ pose [3 , 3 ] = 1
33
+ pose [:3 , :3 ] = data [:3 , :3 ]
34
+ pose [:3 , 3 ] = data [3 , :]
35
+ extrinsics = torch .tensor (pose , dtype = data_type )
36
+ return extrinsics , extrinsics .inverse ()
37
+
38
+ def load_pose (key ,trajectories_dir ,frame ):
39
+ building , row , date = key .split ("_" )
40
+ i = int (row )
41
+ csv_file = os .listdir (os .path .join (trajectories_dir ,building ,date ))
42
+ for file in csv_file :
43
+ if "blender" in file :
44
+ csv_name = file
45
+
46
+ csv = pd .read_csv (os .path .join (trajectories_dir ,building ,date ,csv_name ),sep = '\t ' ,decimal = '.' )
47
+
48
+ if frame == 0 :
49
+ Rd = np .array ([csv ['Orientation(x)_t1' ][i ],csv ['Orientation(y)_t1' ][i ],csv ['Orientation(z)_t1' ][i ],csv ['Orientation(w)_t1' ][i ]])
50
+ Td = np .array ([csv ['Position(x)_t1' ][i ],csv ['Position(y)_t1' ][i ],csv ['Position(z)_t1' ][i ]])
51
+ else :
52
+ Rd = np .array ([csv ['Orientation(x)_t2' ][i ],csv ['Orientation(y)_t2' ][i ],csv ['Orientation(z)_t2' ][i ],csv ['Orientation(w)_t2' ][i ]])
53
+ Td = np .array ([csv ['Position(x)_t2' ][i ],csv ['Position(y)_t2' ][i ],csv ['Position(z)_t2' ][i ]])
54
+ #read camera's pose
55
+ Rc = np .array ([csv ['PilotRot(x)' ][i ],csv ['PilotRot(y)' ][i ],csv ['PilotRot(z)' ][i ],csv ['PilotRot(w)' ][i ]])
56
+ Tc = np .array ([ csv ['Pilot(x)' ][i ],csv ['Pilot(y)' ][i ],csv ['Pilot(z)' ][i ]])
57
+
58
+ #get drone matrix in world space
59
+ Pdw = geometry .objectMatrix (Rd ,Td )
60
+ #get camera matrix in world space
61
+ Pcw = geometry .objectMatrix (Rc ,Tc )
62
+
63
+ PdroneToCamera = geometry .getRelativeMatrix (Pdw [:3 ,:3 ],Pcw [:3 ,:3 ],Pdw [:3 ,3 ],Pcw [:3 ,3 ])
64
+
65
+
66
+ return PdroneToCamera , PdroneToCamera .inverse ()
67
+
68
+ def load_egoPose (key , trajectories_dir ,target_ ,source_ ):
69
+ building , row , date = key .split ("_" )
70
+ i = int (row )
71
+ csv_file = os .listdir (os .path .join (trajectories_dir ,building ,date ))
72
+ for file in csv_file :
73
+ if "blender" in file :
74
+ csv_name = file
75
+
76
+ csv = pd .read_csv (os .path .join (trajectories_dir ,building ,date ,csv_name ),sep = '\t ' ,decimal = '.' )
77
+ #read source pose
78
+ if target_ == 1 :
79
+ Rsource = np .array ([csv ['Orientation(x)_t1' ][i ],csv ['Orientation(y)_t1' ][i ],csv ['Orientation(z)_t1' ][i ],csv ['Orientation(w)_t1' ][i ]])
80
+ Tsource = np .array ([csv ['Position(x)_t1' ][i ],csv ['Position(y)_t1' ][i ],csv ['Position(z)_t1' ][i ]])
81
+ #read target pose
82
+ Rtarget = np .array ([csv ['Orientation(x)_t2' ][i ],csv ['Orientation(y)_t2' ][i ],csv ['Orientation(z)_t2' ][i ],csv ['Orientation(w)_t2' ][i ]])
83
+ Ttarget = np .array ([csv ['Position(x)_t2' ][i ],csv ['Position(y)_t2' ][i ],csv ['Position(z)_t2' ][i ]])
84
+ else :
85
+ Rtarget = np .array ([csv ['Orientation(x)_t1' ][i ],csv ['Orientation(y)_t1' ][i ],csv ['Orientation(z)_t1' ][i ],csv ['Orientation(w)_t1' ][i ]])
86
+ Ttarget = np .array ([csv ['Position(x)_t1' ][i ],csv ['Position(y)_t1' ][i ],csv ['Position(z)_t1' ][i ]])
87
+ #read source pose
88
+ Tsource = np .array ([csv ['Position(x)_t2' ][i ],csv ['Position(y)_t2' ][i ],csv ['Position(z)_t2' ][i ]])
89
+ Rsource = np .array ([csv ['Orientation(x)_t2' ][i ],csv ['Orientation(y)_t2' ][i ],csv ['Orientation(z)_t2' ][i ],csv ['Orientation(w)_t2' ][i ]])
90
+
91
+ #get source pose matrix in world space
92
+ source_extrinsics = geometry .objectMatrix (Rsource ,Tsource )
93
+ #get target pose matrix in world space
94
+ target_extrinsics = geometry .objectMatrix (Rtarget ,Ttarget )
95
+
96
+ source_to_target_pose = target_extrinsics .inverse () @ source_extrinsics
97
+
98
+ return source_to_target_pose
0 commit comments