Skip to content

Commit 7bb8248

Browse files
committed
fix(openmvs): export the undistorted reconstruction
Fixes #192
1 parent 41cea9e commit 7bb8248

14 files changed

Lines changed: 484 additions & 2 deletions

Dockerfile_opencv3

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
FROM paulinus/opensfm-docker-base:opencv3
2+
3+
COPY . /source/OpenSfM
4+
5+
WORKDIR /source/OpenSfM
6+
7+
RUN pip install -r requirements.txt && \
8+
python setup.py build

bundle.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
2+
from opensfm import dataset
3+
from opensfm import reconstruction
4+
5+
data = dataset.DataSet('data/data/zanzibar_geo1')
6+
graph = data.load_tracks_graph()
7+
rs = data.load_reconstruction()
8+
for r in rs:
9+
reconstruction.bundle(graph, r, None, data.config)
10+
11+
data.save_reconstruction(rs, 'reconstruction.rebundled.json')
12+
print data.images()

gather_submodels.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
import sys
2+
3+
from opensfm import dataset
4+
from opensfm.large import metadataset
5+
6+
r = []
7+
a = []
8+
path = sys.argv[1]
9+
metadata = metadataset.MetaDataSet(path)
10+
11+
for submodel in metadata.get_submodel_paths():
12+
data = dataset.DataSet(submodel)
13+
r.extend(data.load_reconstruction('reconstruction.unaligned.json'))
14+
a.extend(data.load_reconstruction('reconstruction.aligned.json'))
15+
16+
data = dataset.DataSet(path)
17+
data.save_reconstruction(r, 'reconstruction.unaligned.json')
18+
data.save_reconstruction(a, 'reconstruction.aligned.json')

generate_gcp.py

Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
#!/usr/bin/env python
2+
import argparse
3+
4+
import matplotlib.pyplot as plt
5+
import numpy as np
6+
7+
from opensfm import dataset
8+
from opensfm import features
9+
from opensfm import geo
10+
11+
if __name__ == "__main__":
12+
parser = argparse.ArgumentParser(
13+
description='Generate GCP by sampling points from the reconstruction')
14+
parser.add_argument(
15+
'dataset',
16+
help='path to the dataset to be processed')
17+
parser.add_argument(
18+
'--num_points',
19+
default=3,
20+
type=int,
21+
help='number of points to generate')
22+
args = parser.parse_args()
23+
24+
data = dataset.DataSet(args.dataset)
25+
reference = data.load_reference_lla()
26+
reconstruction = data.load_reconstruction()[0]
27+
28+
print 'WGS84'
29+
for i in range(args.num_points):
30+
point = np.random.choice(reconstruction.points.values())
31+
32+
for shot in reconstruction.shots.values():
33+
pixel = shot.project(point.coordinates)
34+
if np.fabs(pixel).max() < 0.5:
35+
36+
lla = geo.lla_from_topocentric(
37+
point.coordinates[0],
38+
point.coordinates[1],
39+
point.coordinates[2],
40+
reference['latitude'],
41+
reference['longitude'],
42+
reference['altitude'])
43+
44+
x, y = features.denormalized_image_coordinates(
45+
pixel.reshape(1, 2), shot.camera.width, shot.camera.height)[0]
46+
47+
print "{} {} {} {} {} {}".format(
48+
lla[0], lla[1], lla[2],
49+
x, y, shot.id)

killer.py

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
from multiprocessing import Pool
2+
import time
3+
import threading
4+
from subprocess import check_output
5+
6+
7+
def get_pid(name):
8+
return list(map(int, check_output(["pidof", name]).split()))
9+
10+
11+
class Killer(threading.Thread):
12+
def __init__(self):
13+
threading.Thread.__init__(self)
14+
15+
def run(self):
16+
print("Wait 3 seconds...")
17+
time.sleep(3)
18+
pids = get_pid('python')
19+
print(pids)
20+
print("Found " + str(len(pids)) + " python processes")
21+
print("Killing middle process")
22+
23+
# check_output(["kill", "-9", str(pids[int(len(pids) / 2)])])
24+
check_output(["/bin/bash", "-c", '"kill -9 {}"'.format(str(pids[int(len(pids) / 2)]))])
25+
26+
print("Processes killed, now waiting for termination...")
27+
28+
29+
def f(x):
30+
time.sleep(6)
31+
return x*x
32+
33+
34+
if __name__ == '__main__':
35+
p = Pool(3)
36+
print("Started")
37+
Killer().start()
38+
print(p.map(f, [1, 2, 3]))
39+
print("Ended")

notes_on_thresholds.txt

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
2+
3+
Thresholds
4+
==========
5+
6+
7+
For a triangulated point X and projection P
8+
9+
|x - P X| < t_pixels
10+
11+
12+
For an homography between images
13+
14+
|x1 - H x2| < 2 * t_pixels
15+
16+
17+
For a triangulated point and bearing
18+
19+
|b - R X - t| < t_angle
20+
21+
22+
For a rotation between bearings
23+
24+
|b1 - R b1| < 2 * t_angle
25+
26+
27+
28+
29+
The threshold for image to image error has to be twice as big as the error for 3d to image.
30+
This is because the triangulated 3D point could be in the midle of the two image poits
31+
32+
x1 <-------> P X <-------> x2
33+
t t
34+
35+
x1 <---------------------> x2
36+
2 * t
37+
38+
39+
40+
To convert between t_pixel and t_angle, we need the focal length of the camera.
41+
We have
42+
43+
tan(t_angle) = t_pixels / focal_length
44+
45+
Here we arbitrarily assume that the threshold is given for a camera of focal length 1
46+
Also tan(t) \approx t since for small t. So we simply have
47+
48+
t_angle \approx t_pixels
49+
50+
51+
See also opengv doc on thresholds here: http://laurentkneip.github.io/opengv/page_how_to_use.html
52+
53+

opensfm/commands/export_openmvs.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,8 @@ def add_arguments(self, parser):
1919

2020
def run(self, args):
2121
data = dataset.DataSet(args.dataset)
22-
reconstructions = data.load_reconstruction()
23-
graph = data.load_tracks_graph()
22+
reconstructions = data.load_undistorted_reconstruction()
23+
graph = data.load_undistorted_tracks_graph()
2424

2525
if reconstructions:
2626
self.export(reconstructions[0], graph, data)

remove_images_without_gps.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
from opensfm import dataset
2+
3+
data = dataset.DataSet('.')
4+
5+
for image in data.images():
6+
e = data.load_exif(image)
7+
if 'gps' not in e or 'latitude' not in e['gps']:
8+
print image

run_lund.sh

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
DATA=data/data/lund_large
2+
3+
./setup_odm_metadataset.py $DATA
4+
5+
bin/opensfm extract_metadata $DATA/opensfm
6+
bin/opensfm detect_features $DATA/opensfm
7+
bin/opensfm match_features $DATA/opensfm
8+
bin/opensfm create_submodels $DATA/opensfm --size 10 --dist 20
9+
10+
python run_submodels.py $DATA/opensfm
11+
12+
bin/opensfm align_submodels $DATA/opensfm
13+
14+
python gather_submodels.py $DATA/opensfm

run_submodels.py

Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
#!/usr/bin/env python
2+
3+
import argparse
4+
import logging
5+
import multiprocessing
6+
import os
7+
import subprocess
8+
import sys
9+
10+
from opensfm.large import metadataset
11+
12+
logger = logging.getLogger(__name__)
13+
14+
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
15+
16+
17+
class Reconstructor:
18+
def __init__(self, command, complete):
19+
self._command = command
20+
self._complete = complete
21+
22+
def __call__(self, submodel_path):
23+
logger.info("===========================================================")
24+
logger.info("Reconstructing submodel {}".format(submodel_path))
25+
logger.info("===========================================================")
26+
27+
if self._complete:
28+
self._run_command([self._command, 'extract_metadata', submodel_path])
29+
self._run_command([self._command, 'detect_features', submodel_path])
30+
self._run_command([self._command, 'match_features', submodel_path])
31+
32+
self._run_command([self._command, 'create_tracks', submodel_path])
33+
self._run_command([self._command, 'reconstruct', submodel_path])
34+
35+
logger.info("===========================================================")
36+
logger.info("Submodel {} reconstructed".format(submodel_path))
37+
logger.info("===========================================================")
38+
39+
def _run_command(self, args):
40+
result = subprocess.Popen(args).wait()
41+
if result != 0:
42+
raise RuntimeError(result)
43+
44+
45+
if __name__ == "__main__":
46+
parser = argparse.ArgumentParser(description='Reconstruct all submodels')
47+
parser.add_argument('dataset',
48+
help='path to the dataset to be processed')
49+
parser.add_argument('-c', '--complete',
50+
help='Run the complete pipeline on each subset',
51+
action='store_true')
52+
parser.add_argument('-p', '--processes',
53+
help='Number of parallel processes to run',
54+
type=int, default=1)
55+
args = parser.parse_args()
56+
57+
meta_data = metadataset.MetaDataSet(args.dataset)
58+
exec_dir = os.path.join(os.getcwd(), os.path.dirname(sys.argv[0]))
59+
command = os.path.join(exec_dir, "bin/opensfm")
60+
61+
submodel_paths = meta_data.get_submodel_paths()
62+
reconstructor = Reconstructor(command, args.complete)
63+
64+
if args.processes == 1:
65+
for submodel_path in submodel_paths:
66+
reconstructor(submodel_path)
67+
else:
68+
p = multiprocessing.Pool(args.processes)
69+
p.map(reconstructor, submodel_paths)

0 commit comments

Comments
 (0)