-
Notifications
You must be signed in to change notification settings - Fork 142
/
Copy pathdataMerge
executable file
·198 lines (150 loc) · 6.27 KB
/
dataMerge
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This program is part of pygimli
See https://www.pygimli.org for further information.
"""
import sys
import numpy as np
try:
import pygimli as pg
from pygimli.utils import trimDocString
except ImportError:
sys.stderr.write('ERROR: cannot import the library pygimli.' +
' Ensure that pygimli is in your PYTHONPATH')
sys.exit(1)
def merge(data1, data2, ContainerType=pg.DataContainer, snap=0.001):
"""
Merge two datacontainers into one by copying the sensor positions
and datapoints from data2 into data1.\n
Double sensor positions will be unified and snapped to a grid.
"""
data = ContainerType(data1)
data.add(data2, snap)
return data
def loadProjectFile(
projectfile, ContainerType=pg.DataContainer, verbose=False):
"""
A project file defines how multiple data files can be imported and
merged.
A list of row entries for each data file to merge. The data will not be
sorted, i.e., the sensor positions will be in the same order than the
list entries are. However, sensors on the same positions (with
tolerance snapping) will be merged.
You can also give just one data file if you want just a spatial
transformation of the data.
The currently supported formats are:
dataFilename
dataFilename interpolationFilename
dataFilename startx endx
dataFilename startx starty endx endy
dataFilename x1 y1 x2 y2 x3 y3 ...
You can comment out a row by adding a leading '#'
interpolationFilename is a 3-column-ascii-file (dx x y)
"""
dataList = []
fi = open(projectfile, "r")
content = fi.readlines()
fi.close()
for c in content:
row = c.split('\n')[0].split('#')[0].split()
d = None
if len(row) > 0 and row[0] != '#':
if len(row) == 1: # filename only
d = ContainerType(row[0])
elif len(row) == 2: # dataFilename interpolationFilename
# Thomas?? ist das von dir?? was macht das
# kommt mir nur vage bekannt vor, benutzt aber extra-File
d = ContainerType(row[0])
xn = pg.x(d.sensorPositions())
zn = pg.z(d.sensorPositions())
tape, xt, yt = np.loadtxt(row[1], unpack=True)
x3n = np.interp(xn, tape, xt)
y3n = np.interp(xn, tape, yt)
for i in range(d.sensorCount()):
d.setSensorPosition(i, pg.RVector3(x3n[i], y3n[i], zn[i]))
elif len(row) == 3: # filename xstart xend
d = ContainerType(row[0])
start = pg.RVector3(float(row[1]), 0.0)
end = pg.RVector3(float(row[2]), 0.0)
for i in range(d.sensorCount()):
pos = start + \
float(i) * (end - start) / (d.sensorCount() - 1.)
d.setSensorPosition(i, pos)
elif len(row) == 5: # filename xstart ystart xend yend
d = ContainerType(row[0])
start = pg.RVector3(float(row[1]), float(row[2]))
end = pg.RVector3(float(row[3]), float(row[4]))
for i in range(d.sensorCount()):
pos = start + \
float(i) * (end - start) / (d.sensorCount() - 1.)
d.setSensorPosition(i, pos)
elif not len(row) % 2 is 0: # dataFilename x1 y1 x2 y2 x3 y3 ...
d = ContainerType(row[0])
raise ('toimpl')
start = pg.RVector3(
float(
row[1]), float(
row[2]), float(
row[3]))
end = pg.RVector3(float(row[4]), float(row[5]), float(row[6]))
for i in range(d.sensorCount()):
pos = start + \
float(i) * (end - start) / (d.sensorCount() - 1.)
d.setSensorPosition(i, pos)
else:
print(("cannot interpret the project format: len(row) = ",
len(row)))
return dataList
dataList.append(d)
if verbose:
print(("append: ", d))
print(("from:", d.sensorPositions()[0],
"to:", d.sensorPositions()[-1]))
return dataList
def main(argv):
import argparse
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=trimDocString(loadProjectFile.__doc__))
parser.add_argument("-v", "--verbose", dest="verbose", action="store_true",
help="Be verbose.")
parser.add_argument("-o", "--output", dest="outFileName", metavar="File",
help="Filename for the resulting data file.")
parser.add_argument("-s", "--snap", dest="snap", type=float,
default=0.1,
help="Snap coordinates to gridsize")
parser.add_argument("-B", "--bert", dest="bert", action="store_true",
help="Use BERT sensor indices (a b m n)")
parser.add_argument('project')
args = parser.parse_args()
projectFileName = args.project
if args.outFileName is None:
args.outFileName = \
projectFileName[0:projectFileName.find('.pro')] + '.dat'
if args.verbose:
print(args)
print("verbose =", args.verbose)
print("project =", projectFileName)
print("output =", args.outFileName)
print("snap =", args.snap)
ContainerTyp = pg.DataContainer
if args.bert:
import pybert as b
ContainerTyp = b.DataContainerERT
dataList = loadProjectFile(projectFileName,
ContainerTyp,
verbose=args.verbose)
outdata = dataList[0]
if args.verbose:
print("start merging ...")
print(outdata)
for d in dataList[1:]:
outdata = merge(outdata, d, ContainerTyp, args.snap)
if args.verbose:
print(outdata)
if args.verbose:
print("Write file to: ", args.outFileName)
outdata.save(args.outFileName)
if __name__ == "__main__":
main(sys.argv[1:])