# -*- coding: utf-8 -*-
from __future__ import print_function
import sys
import os
import re
import traceback
import codecs
import numpy as np
from datetime import datetime
import pygimli as pg
from pygimli.utils.utils import uniqueRows
try:
import pybert as pb
except ImportError:
sys.stderr.write('''ERROR: cannot import the library 'pybert'. ''' +
'''Ensure that pybert is in your PYTHONPATH ''')
sys.exit(1)
# self.registerOpenFileSuffix(suffix, plugin.MainOpenWildcard[i],
# plugin.PluginApplication, plugin.MainOpenFileSlot)
# self.fileSuffixes[suffix] = [wildcard, cls, callback]
# MainOpenFileSuffix = ['.dat', '.ohm', '.shm', '.edt', '.data']
# MainOpenFileSlot = BertApp.openDataFile
# MainOpenWildcard = ["BERT unified data file (*.dat)"
bertImportDataFileSuffixesDict = dict()
bertImportDataFileSuffixesDict['.dat'] = ["Unified data file (*.dat)", 'Gimli']
bertImportDataFileSuffixesDict['.ohm'] = ["Unified data file (*.ohm)", 'Gimli']
bertImportDataFileSuffixesDict['.shm'] = ["Unified data file (*.shm)", 'Gimli']
bertImportDataFileSuffixesDict['.udf'] = ["Unified data file (*.udf)", 'Gimli']
bertImportDataFileSuffixesDict['.bin'] = ["Syscal Pro (*.bin)", 'SyscalPro']
bertImportDataFileSuffixesDict['.tx0'] = ["LGM 4-point light (*.tx0)",
'4PointLight']
# bertImportDataFileSuffixesDict['.txt'] = ["GeoSys (*.txt)", 'GeoSys']
bertImportDataFileSuffixesDict['.txt'] = ["ASCII (*.txt)", 'AsciiColumns']
bertImportDataFileSuffixesDict['.tx2'] = ["WB ASCII (*.tx2)", 'AsciiColumns']
# bertImportDataFileSuffixesDict['.txt'] = ["ASCII (*.txt)", 'ABEMAscii']
bertImportDataFileSuffixesDict['.abem'] = ["ABEM-ASCII (*.txt)",
'ABEMAscii']
bertImportDataFileSuffixesDict['.resecs'] = ["Resecs-ASCII (*.txt)",
'ResecsAscii']
bertImportDataFileSuffixesDict['.flw'] = ["Geotom FLW (*.flw)", 'FLW']
bertImportDataFileSuffixesDict['.slm'] = ["Geotom Schlumberger (*.slm)",
'Geotom']
bertImportDataFileSuffixesDict['.wen'] = ["Geotom Wenner (*.wen)", 'Geotom']
bertImportDataFileSuffixesDict['.stg'] = ["SuperSting (*.stg)", 'SuperSting']
bertImportDataFileSuffixesDict['.amp'] = ["ABEM AMP (*.AMP)", 'ABEM']
bertImportDataFileSuffixesDict['.res2dinv'] = ["Res2dInv (*.res2dinv)",
'Res2dInv']
[docs]def importData(filename, format='auto', verbose=False, debug=False):
"""
Import datafile into GIMLi unified data format
Parameters
----------
format : string, optional [auto]
*Gimli - gimli-unified data format
*Res2dInv - res2dinv (partly)
*PolePoleGeotom - Geotom Pole-Pole
*SyscalPro - IRIS SYSCAL pro
*SuperSting - AGI SuperSting
*Lippmann - Lippmann 4-point light
*Iris - IRIS test unknown
*ABEM - ABEM test unknown
*ABEMTerrameterSAS - ABEM test unknown
*GeoSys - GeoSYS test unknown
verbose : boolean, optional [False]
Be verbose during import.
debug : boolean, optional [False]
Gives some more debug information.
Notes
-----
References
----------
"""
def tryImport(filename, funct, debug=False):
if debug:
print("Try to import ", filename, " by ", funct)
d = None
try:
d = funct(filename, verbose)
if verbose:
print("success: ", str(funct))
except Exception as e:
d = None
if debug:
traceback.print_exc()
print(type(e))
print(e)
return d
# def tryImport(...)
d = None
if not os.path.exists(filename):
raise Exception('File does not exist.: ' + filename)
if format.lower() == 'auto':
# first try the one associated with the file extension
ext = filename[filename.rfind('.'):].lower()
if ext in bertImportDataFileSuffixesDict:
fun = bertImportDataFileSuffixesDict[ext][1]
importFunction = 'import' + fun
if importFunction in dir(sys.modules[__name__]):
if verbose:
print("Try to import data by using: ", importFunction)
d = tryImport(filename, eval(importFunction), debug=debug)
else:
print("Error: Import function does not exist: "+importFunction)
# raise Exception("Import function does not exist: " +
# importFunction)
if d is None:
d = tryImport(filename, importGimli, debug=debug)
if d is None:
d = tryImport(filename, importRes2dInv, debug=debug)
if d is None:
d = tryImport(filename, import4PointLight, debug=debug)
if d is None:
d = tryImport(filename, importPolePoleGeotom, debug=debug)
if d is None:
d = tryImport(filename, importSyscalPro, debug=debug)
if d is None:
d = tryImport(filename, importIris, debug=debug)
if d is None:
d = tryImport(filename, importABEMAscii, debug=debug)
if d is None:
d = tryImport(filename, importABEM, debug=debug)
if d is None:
d = tryImport(filename, importABEMTerrameterSAS, debug=debug)
if d is None:
d = tryImport(filename, importSuperSting, debug=debug)
if d is None:
d = tryImport(filename, importResecsAscii, debug=debug)
if d is None:
d = tryImport(filename, importGeoSys, debug=debug)
if d is None:
d = tryImport(filename, importFLW, debug=debug)
else:
d = tryImport(filename,
getattr(sys.modules[__name__], 'import' + format),
debug=debug)
if d is None:
raise Exception('Cannot determine data format for file.', filename)
if verbose:
print("imported: ", d)
return d
# def importData(...)
[docs]def importGimli(filename, verbose=False):
data = pb.DataContainerERT(filename)
return data
# def importGimli(...)
[docs]def importPolePoleGeotom(filename, verbose=False):
'''
Pole-Pole-from geotom?? EA- LOS_A1.dat
"B_X" "B_Y" "N_X" "N_Y" "Rhos"
13 0 13 0.5 110.17
....
13 0 13 1 90.71
'''
data = pb.DataContainerERT()
with open(filename, 'r') as fi:
content = fi.readlines()
header = content[0].split('\r\n')[0].split()
if len(header) == 5:
if header[0] == '"B_X"' and header[1] == '"B_Y"' and \
header[2] == '"N_X"' and header[3] == '"N_Y"' and \
header[4] == '"Rhos"':
data.resize(len(content) - 1)
for i, row in enumerate(content[1:]):
vals = row.split()
eaID = -1
ebID = data.createSensor(pg.RVector3(float(vals[0]),
float(vals[1]),
0.0)).id()
emID = -1
enID = data.createSensor(pg.RVector3(float(vals[2]),
float(vals[3]),
0.0)).id()
data.createFourPointData(i, eaID, ebID, emID, enID)
data('rhoa')[i] = float(vals[4])
data.sortSensorsX()
else:
raise Exception("Probably no Geotom-Pole-Pole file: " +
filename + " header: " + str(header))
else:
raise Exception("Probably no Geotom-Pole-Pole file: " + filename +
" 5 != " + str("len(header)"))
return data
# def importPolePoleGeotom(...)
[docs]def importRes2dInv(filename, verbose=False, return_header=False):
"""Read res2dinv format
Parameters
----------
filename : str
verbose : bool [False]
return_header : bool [False]
Returns
-------
pg.DataContainerERT and (in case of return_header=True)
header dictionary
Format
------
str - title
float - unit spacing [m]
int - Array Number (1-Wenner, 3-Dipole-dipole atm only)
int - Number of Datapoints
float - x-location given in terms of first electrode
use 1 if mid-point location is given
int - 0 for no IP, use 1 if IP present
str - Phase Angle if IP present
str - mrad if IP present
0,90.0 - if IP present
dataBody
"""
def getNonEmptyRow(i, comment='#'):
s = next(i)
while s[0] is comment:
s = next(i)
return s.split('\r\n')[0]
# def getNonEmptyRow(...)
with open(filename, 'r') as fi:
content = fi.readlines()
fi.close()
it = iter(content)
header = {}
header['name'] = getNonEmptyRow(it, comment=';')
header['spacing'] = float(getNonEmptyRow(it, comment=';'))
typ = int(getNonEmptyRow(it, comment=';'))
if typ == 11:
# independent electrode positions
header['subtype'] = int(getNonEmptyRow(it, comment=';'))
header['dummy'] = getNonEmptyRow(it, comment=';')
isR = int(getNonEmptyRow(it, comment=';'))
nData = int(getNonEmptyRow(it, comment=';'))
xLoc = float(getNonEmptyRow(it, comment=';'))
hasIP = int(getNonEmptyRow(it, comment=';'))
if hasIP:
header['ipQuantity'] = getNonEmptyRow(it, comment=';')
header['ipUnit'] = getNonEmptyRow(it, comment=';')
header['ipData'] = getNonEmptyRow(it, comment=';')
ipline = header['ipData'].rstrip('\n').rstrip('\r').split(' ')
if len(ipline) > 2: # obviously spectral data?
header['ipNumGates'] = int(ipline[0])
header['ipDelay'] = float(ipline[1])
header['onTime'] = float(ipline[-2])
header['offTime'] = float(ipline[-1])
header['ipDT'] = np.array(ipline[2:-2], dtype=float)
header['ipGateT'] = np.cumsum(np.hstack((header['ipDelay'],
header['ipDT'])))
data = pb.DataContainerERT()
data.resize(nData)
if typ == 9 or typ == 10:
raise Exception("Don't know how to read:" + str(typ))
if typ == 11 or typ == 12 or typ == 13: # mixed array
res = pg.RVector(nData, 0.0)
ip = pg.RVector(nData, 0.0)
specIP = []
for i in range(nData):
vals = getNonEmptyRow(it, comment=';').split()
# row starts with 4
if int(vals[0]) == 4:
eaID = data.createSensor(pg.RVector3(float(vals[1]),
float(vals[2])))
ebID = data.createSensor(pg.RVector3(float(vals[3]),
float(vals[4])))
emID = data.createSensor(pg.RVector3(float(vals[5]),
float(vals[6])))
enID = data.createSensor(pg.RVector3(float(vals[7]),
float(vals[8])))
elif int(vals[0]) == 3:
eaID = data.createSensor(pg.RVector3(float(vals[1]),
float(vals[2])))
ebID = -1
emID = data.createSensor(pg.RVector3(float(vals[3]),
float(vals[4])))
enID = data.createSensor(pg.RVector3(float(vals[5]),
float(vals[6])))
elif int(vals[0]) == 2:
eaID = data.createSensor(pg.RVector3(float(vals[1]),
float(vals[2])))
ebID = -1
emID = data.createSensor(pg.RVector3(float(vals[3]),
float(vals[4])))
enID = -1
else:
raise Exception('dont know how to handle row', vals[0])
res[i] = float(vals[int(vals[0])*2+1])
if hasIP:
# ip[i] = float(vals[int(vals[0])*2+2])
ipCol = int(vals[0])*2+2
ip[i] = float(vals[ipCol])
if 'ipNumGates' in header:
specIP.append(vals[ipCol:])
data.createFourPointData(i, eaID, ebID, emID, enID)
if isR:
data.set('r', res)
else:
data.set('rhoa', res)
if hasIP:
data.set('ip', ip)
if 'ipNumGates' in header:
A = np.array(specIP, dtype=float)
A[A > 1000] = -999
A[A < -1000] = -999
for i in range(header['ipNumGates']):
data.set('ip'+str(i+1), A[:, i])
data.sortSensorsX()
data.sortSensorsIndex()
if return_header:
return data, header
else:
return data
# amount of values per collumn per typ
nntyp = [0, 3, 3, 4, 3, 3, 4, 4, 3, 0, 0, 8, 10]
nn = nntyp[typ] + hasIP
dataBody = pg.RMatrix(nn, nData)
for i in range(nData):
vals = getNonEmptyRow(it, comment=';').split()
for j in range(nn):
dataBody[j][i] = float(vals[j])
XX = dataBody[0]
EL = dataBody[1]
SP = pg.RVector(nData, 1.0)
if nn - hasIP == 4:
SP = dataBody[2]
AA = None
BB = None
NN = None
MM = None
if typ == 1: # Wenner
AA = XX - xLoc * EL * 1.5
MM = AA + EL
NN = MM + EL
BB = NN + EL
elif typ == 2: # Pole-Pole
AA = XX - xLoc * EL * 0.5
MM = AA + EL
elif typ == 3: # Dipole-Dipole
AA = XX - xLoc * EL * (SP / 2. + 1.)
BB = AA + EL
MM = BB + SP * EL
NN = MM + EL
pass
elif typ == 3: # Dipole-Dipole
AA = XX - xLoc * EL * (SP / 2. + 1.)
BB = AA + EL
MM = BB + SP * EL
NN = MM + EL
elif typ == 4: # WENNER-BETA
AA = XX - xLoc * EL * 1.5
BB = AA + EL
MM = BB + EL
NN = MM + EL
elif typ == 5: # WENNER-GAMMA
AA = XX - xLoc * EL * 1.5
MM = AA + EL
BB = MM + EL
NN = BB + EL
elif typ == 6: # POLE-DIPOLE
AA = XX - xLoc * SP * EL - (SP - 1.) * (SP < 0.) * EL
MM = AA + SP * EL
NN = MM + pg.sign(SP) * EL
elif typ == 7: # SCHLUMBERGER
AA = XX - xLoc * EL * (SP + 0.5)
MM = AA + SP * EL
NN = MM + EL
BB = NN + SP * EL
else:
raise Exception('Datatype ' + str(typ) + ' not yet suppoted')
for i in range(len(AA)):
if AA is not None:
eaID = data.createSensor(pg.RVector3(AA[i], 0.0))
else:
eaID = -1
if BB is not None:
ebID = data.createSensor(pg.RVector3(BB[i], 0.0))
else:
ebID = -1
if MM is not None:
emID = data.createSensor(pg.RVector3(MM[i], 0.0))
else:
emID = -1
if NN is not None:
enID = data.createSensor(pg.RVector3(NN[i], 0.0))
else:
enID = -1
data.createFourPointData(i, eaID, ebID, emID, enID)
data.set('rhoa', dataBody[nn - hasIP - 1])
if hasIP:
data.set('ip', dataBody[nn - 1])
data.sortSensorsX()
if return_header:
return data, header
else:
return data
# def importRes2dInv(...)
[docs]def importGeotom(filename, verbose=False):
"""Read data from Geotom instrument data (*.flw/wen/dd etc.)
str - title
float - unit spacing [m]
int - Array Number (1-Wenner, 3-Dipole-dipole atm only)
int - Number of Datapoints
float - x-location given in terms of first electrode
use 1 if mid-point location is given
int - 0 for no IP, use 1 if IP present
str - Phase Angle if IP present
str - mrad if IP present
0,90.0 - if IP present
dataBody
"""
def getNonEmptyRow(i, comment='#'):
s = next(i)
while s[0] is comment:
s = next(i)
return s.split('\r\n')[0]
# def getNonEmptyRow(...)
with open(filename, 'r') as fi:
content = fi.readlines()
fi.close() # not necessary (with closes automatically)
it = iter(content)
s = next(it)
while s[0] == '/':
s = next(it)
header = {}
header['name'] = getNonEmptyRow(it, comment='/')
header['spacing'] = float(getNonEmptyRow(it, comment=';'))
typ = int(getNonEmptyRow(it, comment=';'))
if typ == 11:
# independent electrode positions
header['subtype'] = int(getNonEmptyRow(it, comment=';'))
header['dummy'] = getNonEmptyRow(it, comment=';')
isR = int(getNonEmptyRow(it, comment=';'))
nData = int(getNonEmptyRow(it, comment=';'))
xLoc = float(getNonEmptyRow(it, comment=';'))
hasIP = int(getNonEmptyRow(it, comment=';'))
if hasIP:
header['ipStr1'] = getNonEmptyRow(it, comment=';')
header['ipStr2'] = getNonEmptyRow(it, comment=';')
header['ipStr3'] = getNonEmptyRow(it, comment=';')
data = pb.DataContainerERT()
data.resize(nData)
if typ == 9 or typ == 10:
raise Exception('Cannot yet handle datatype:' + str(typ))
if typ == 11 or typ == 12 or typ == 13: # mixed array
res = pg.RVector(nData, 0.0)
for i in range(nData):
vals = getNonEmptyRow(it, comment=';').split()
# row starts with 4
if int(vals[0]) == 4:
eaID = data.createSensor(pg.RVector3(float(vals[1]), 0.0))
ebID = data.createSensor(pg.RVector3(float(vals[3]), 0.0))
emID = data.createSensor(pg.RVector3(float(vals[5]), 0.0))
enID = data.createSensor(pg.RVector3(float(vals[7]), 0.0))
res[i] = float(vals[9])
else:
raise Exception('dont know how to handle row', vals[0])
data.createFourPointData(i, eaID, ebID, emID, enID)
if isR:
data.set('r', res)
else:
data.set('rhoa', res)
data.sortSensorsX()
data.sortSensorsIndex()
return data
# ammount of values per collumn per typ
nntyp = [0, 3, 3, 4, 3, 3, 4, 4, 3, 0, 0, 8, 10]
nn = nntyp[typ] + hasIP + 3 # current, voltage, error
dataBody = pg.RMatrix(nn, nData)
for i in range(nData):
vals = getNonEmptyRow(it, comment=';').replace(';', '').split()
if vals[-1] != '!':
for j in range(min(nn, len(vals))):
dataBody[j][i] = float(vals[j])
XX = dataBody[0]
EL = dataBody[1]
SP = pg.RVector(nData, 1.0)
# if nn - hasIP == 4+3:
if nntyp[typ] == 4:
SP = dataBody[2]
AA = None
BB = None
NN = None
MM = None
if typ == 1: # Wenner
AA = XX - xLoc * EL * 1.5
MM = AA + EL
NN = MM + EL
BB = NN + EL
elif typ == 2: # Pole-Pole
AA = XX - xLoc * EL * 0.5
MM = AA + EL
elif typ == 3: # Dipole-Dipole
AA = XX - xLoc * EL * (SP / 2. + 1.)
BB = AA + EL
MM = BB + SP * EL
NN = MM + EL
pass
elif typ == 3: # Dipole-Dipole
AA = XX - xLoc * EL * (SP / 2. + 1.)
BB = AA + EL
MM = BB + SP * EL
NN = MM + EL
elif typ == 4: # WENNER-BETA
AA = XX - xLoc * EL * 1.5
BB = AA + EL
MM = BB + EL
NN = MM + EL
elif typ == 5: # WENNER-GAMMA
AA = XX - xLoc * EL * 1.5
MM = AA + EL
BB = MM + EL
NN = BB + EL
elif typ == 6: # POLE-DIPOLE
AA = XX - xLoc * SP * EL - (SP - 1.) * (SP < 0.) * EL
MM = AA + SP * EL
NN = MM + pg.sign(SP) * EL
elif typ == 7: # SCHLUMBERGER
AA = XX - xLoc * EL * (SP + 0.5)
MM = AA + SP * EL
NN = MM + EL
BB = NN + SP * EL
else:
raise Exception('Datatype ' + str(typ) + ' not yet suppoted')
for i in range(len(AA)):
if AA is not None:
eaID = data.createSensor(pg.RVector3(AA[i], 0.0))
else:
eaID = -1
if BB is not None:
ebID = data.createSensor(pg.RVector3(BB[i], 0.0))
else:
ebID = -1
if MM is not None:
emID = data.createSensor(pg.RVector3(MM[i], 0.0))
else:
emID = -1
if NN is not None:
enID = data.createSensor(pg.RVector3(NN[i], 0.0))
else:
enID = -1
if eaID != ebID:
data.createFourPointData(i, eaID, ebID, emID, enID)
data.set('rhoa', dataBody[nntyp[typ]-1])
data.set('i', dataBody[nntyp[typ]+hasIP]*1e-3)
data.set('u', dataBody[nntyp[typ]+hasIP+1]*1e-3)
data.set('err', dataBody[nntyp[typ]+hasIP+2]*1e-2)
if hasIP:
data.set('ip', dataBody[nntyp[nn] + 1])
data.sortSensorsX()
return data
# def importGeotom(...)
[docs]def importSyscalPro(filename, verbose=False):
'''READ IRIS Syscal Pro or Elrec Pro file
ported from matlab version (from Tobias Pfaff, Uni Heidelberg)
'''
import struct
data = pb.DataContainerERT()
with open(filename, 'rb') as fi:
readData = fi.read()
# Filesize: 1029 (bytes header) + nBlocks * 304 (bytes per block)
nBlocks = (len(readData) - 1029) / 304.0
if nBlocks > 0 and (nBlocks == round(nBlocks)):
headerIdentification = str(readData[0:20])
if 'Pro' not in headerIdentification and False:
raise Exception('This is probably no SYSCAL Pro data file: ' +
filename + " : " + headerIdentification)
measureingTime = readData[20:40]
if verbose:
print(measureingTime)
startBlock = 1029 # hex 404
else:
raise Exception('Size of the SYSCAL Pro data file is not valid: ' +
filename + " : " + str(len(readData)) + " ; " +
str(nBlocks))
# the file size and header seems to be ok. start parsing.
nBlocks = int(nBlocks)
data.resize(nBlocks)
# Main data
sp = pg.RVector(nBlocks) # self potential
vp = pg.RVector(nBlocks) # voltage difference
curr = pg.RVector(nBlocks) # injected current
gm = pg.RVector(nBlocks) # global chargeability
dev = pg.RVector(nBlocks) # std. deviation
# Auxiliary data
stacks = pg.RVector(nBlocks) # number of stacks measured
rs_check = pg.RVector(nBlocks) # rs_check reception dipole
vab = pg.RVector(nBlocks) # absolute injected voltage
bat_tx = pg.RVector(nBlocks) # tx battery voltage
bat_rx = pg.RVector(nBlocks) # rx battery voltage
temp = pg.RVector(nBlocks) # temperature
for i in range(nBlocks):
block = readData[startBlock:startBlock+304]
# short(max cycles) , short(min cycles), float32(measurement time),
# float32(delay for measurement)
[maxCyles, minCycles, measTime, delayTime, unKnownInt] = \
struct.unpack_from('hhffi', block, offset=0) # 16 byte
# Read electrode positions for each data
# (C1_x C2_x P1_x P2_x C1_y C2_y P1_y P2_y C1_z C2_z P1_z P2_z)
ePos = struct.unpack_from('ffff ffff ffff', block, offset=16)
eaID, ebID, emID, enID = -1, -1, -1, -1
if (ePos[0] < 99999.99):
eaID = data.createSensor(pg.RVector3(ePos[0], ePos[4],
ePos[8]))
if (ePos[1] < 99999.99):
ebID = data.createSensor(pg.RVector3(ePos[1], ePos[5],
ePos[9]))
if (ePos[2] < 99999.99):
emID = data.createSensor(pg.RVector3(ePos[2], ePos[6],
ePos[10]))
if (ePos[3] < 99999.99):
enID = data.createSensor(pg.RVector3(ePos[3], ePos[7],
ePos[11]))
data.createFourPointData(i, eaID, ebID, emID, enID)
# Read data float32 (sp, vp, in, rho, gm, dev)
[sp[i], vp[i], curr[i], rhoDummy, gm[i], dev[i]] = \
struct.unpack_from('fff fff', block, offset=64)
# This may be interesting for time domain IP
# chargeability time windows
# print struct.unpack_from('20f', block, offset = 88)
# partial chargeability
# print struct.unpack_from('20f', block, offset = 168)
# status bits (int16)
# (80:multichannel(lower bits=channel #) 16:single)
# number of measurements (int16) (starting with 0)
# [status, nMeas] = struct.unpack_from('hh', block, offset = 248)
# The name for the sequence used by Syscal-Pro
# name = block[252 : 272]
# Read auxiliary information
[stacks[i], rs_check[i], vab[i], bat_tx[i], bat_rx[i], temp[i]] = \
struct.unpack_from('ffffff', block, offset=272)
# date & time in some strange format
# gtime = (dtime(2)*(2^32/fact)+dtime(1)/fact - d0)/24/3600 -
# datenum(2004,0,0); % date in day-since-04 format
# dtime = struct.unpack_from('II', block, offset=296)
# print (dtime[1] * (2**32/fact) + dtime[0] / fact - d0) /24./3600.
# print dtime
# 296 + 8 = 304
startBlock += 304
# END for each data block
data.add('sp', sp, 'Self potential (Please check this).|V')
data.add('u', vp * 1e-3, 'Measured voltage difference|V')
data.add('i', curr * 1e-3, 'Injected current|A')
data.add('gm', gm, 'Chargeability|mV/V')
data.add('ip', gm, 'Induced polarisation|mV/V')
data.add('err', dev, 'Standard deviation')
data.add('stacks', stacks)
data.add('rs_check', rs_check)
data.add('vab', vab, 'Injected Voltage|V')
data.add('bat_tx', bat_tx, 'Battery Voltage|V')
data.add('bat_rx', bat_rx, 'Battery Voltage|V')
data.add('temp', temp, 'Temperature|C')
data.sortSensorsX()
scale = pg.RMatrix(3, 3)
scale[0][0] = 2.0
scale[1][1] = 2.0
scale[2][2] = 2.0
data.removeInvalid()
return data
# def importSyscalPro(filename):
[docs]def importIris(filename, verbose=False):
"""Import IRIS Instruments Ascii output file
El-array Spa.1 Spa.2 Spa.3 Spa.4 Rho Dev. M Sp Vp In
Schlum. VES 0.00 43.00 21.00 22.00 25.08 0.0 0.00 -56.0 17.713 1025.036
"""
with open(filename, 'r') as fi:
content = fi.readlines()
fi.close()
# inconsistent dataformat we need to add leading dummy token
content[0] = content[0].replace('-', ' ')
d = readAsDictionary(content)
if pg.debug():
print(d.keys())
nData = len(d['array'])
data = pb.DataContainerERT()
data.resize(nData)
for i in range(nData):
eaID = data.createSensor([d['Spa.1'][i], 0.0, 0.0])
ebID = data.createSensor([d['Spa.2'][i], 0.0, 0.0])
emID = data.createSensor([d['Spa.3'][i], 0.0, 0.0])
enID = data.createSensor([d['Spa.4'][i], 0.0, 0.0])
data.createFourPointData(i, eaID, ebID, emID, enID)
data.set('i', np.array(d['In']) / 1000.0)
data.set('u', np.array(d['Vp']) / 1000.0)
data.set('sp', d['Sp'])
data.set('err', d['Dev.'])
data.sortSensorsX()
return data
# def importIris(...)
[docs]def importABEMAscii(filename, verbose=False, return_header=False):
"""Import DataContainer from ABEM or Resecs Ascii (txt) export."""
return importABEMTerrameterSAS(filename, verbose=verbose)
with open(filename, 'r', encoding='iso-8859-15') as fid:
lines = fid.readlines()
header = {}
indata = False
nstop = 0
for n, line in enumerate(lines):
li = line.split('\t')
if len(li) > 8 and not indata:
tokenline = line.rstrip().replace('#', '_')
nheader = n
if verbose:
print('header', nheader)
indata = True
fdp = line.find(': ')
if not indata and fdp >= 0 and nstop == 0:
tok = line[line[:fdp].rfind(' ')+1:fdp]
val = line[fdp+2:].rstrip()
if val.isnumeric():
val = float(val)
if val.is_integer():
val = int(val)
header[tok] = val
if indata and len(li) < 8 and nstop == 0: # no data anymore
nstop = n
if verbose:
print('stop', nstop)
indata = False
if nstop == 0:
nstop = len(lines)-3
print(nheader, len(lines), nstop, len(lines)-nstop-3)
print(len(tokenline.split('\t')))
str2date = lambda x: datetime.strptime(x.decode("utf-8"),
'%Y-%m-%d %H:%M:%S').timestamp()
Data = np.genfromtxt(filename, names=tokenline.split('\t'),
delimiter='\t', converters={"Time": str2date},
skip_header=nheader+1,
skip_footer=len(lines)-nstop-3)
fields = Data.dtype.names
if verbose:
print("Fields", fields)
print(header)
elpos = []
for el in ['A', 'B', 'M', 'N']: # the ABEM variant
alle = [Data[el+i] for i in ['x', 'y', 'z'] if el+i in fields]
if len(alle) > 0:
elpos.append(np.column_stack(alle))
if len(elpos) == 0: # try the Resecs variant
for el in ['C1', 'C2', 'P1', 'P2']:
alle = [Data[el+i] for i in ['x', 'y', 'z'] if el+i in fields]
if len(alle) > 0:
elpos.append(np.column_stack(alle))
eln = [(elp[:, 0]*9990+elp[:, 1])*9990+elp[:, 2]*10 for elp in elpos]
nall = np.unique(eln)
data = pg.DataContainerERT()
for ni in nall:
if np.isfinite(ni):
ze = np.mod(ni, 999) / 10
ye = np.mod((ni-ze) / 9990., 999) / 10
xe = ((ni-ze)/999.-ye)/9990.0 / 10
data.createSensor([xe, ye, ze])
data.resize(len(Data))
for i in range(len(Data)):
abmn = np.array([np.nonzero(nall == eli[i])[0] for eli in eln])
abmn[abmn >= data.sensorCount()] = -1 # infinite electrodes
data.createFourPointData(i, *[int(a) for a in abmn])
# %% translate data tokens with possible unit defaults
tokenmap = {'ImA': 'i', 'VoltageV': 'u', 'ROhm': 'r', 'Rho': 'rhoa',
'AppROhmm': 'rhoa', 'Var': 'err', 'I': 'i', 'U': 'u',
'M': 'ma', 'P': 'ip', 'D': 'err', 'UV': 'u', 'RO': 'r',
'RhoaOm': 'rhoa', 'IP_sum_window_11': 'ip', 'Time': 't'}
unitmap = {'ImA': 1e-3, 'Var': 0.01, 'U': 1e-3, 'I': 1e-3, 'D': 0.01}
for fi in fields:
if fi in tokenmap:
data.set(tokenmap[fi], Data[fi] * unitmap.get(fi, 1.0))
if return_header:
return data, header
return data
[docs]def importResecsAscii(filename, verbose=False):
"""Import Geoserve Resecs Ascii Export file."""
nhead = 0
fid = open(filename)
for i, line in enumerate(fid):
if line[0:4] == 'Type':
nhead = i
tokens = line.split("\t")
if line[0:3] == 'GND':
nhead += 1
fid.close()
nhead += 1
ndata = i - nhead + 1
if verbose:
print(nhead, " header lines, ", ndata, " data")
# Resecs ASCII format output tokens
data_tokens = ['U', 'I', 'Rho', 'P', 'D']
mult = [1e-3, 1e-3, 1., 1., 1e-2]
datnr, toknr = [], []
for nr, tok in enumerate(tokens):
if (data_tokens.count(tok)):
datnr.append(nr)
toknr.append(data_tokens.index(tok))
if verbose:
print("found ", [data_tokens[nr] for nr in toknr], " at ", datnr)
# load all data into big matrix
DATA = np.loadtxt(filename, usecols=datnr, skiprows=nhead).T
# position tokens set by Resecs instrument
pos_tokens = ['C1(x)', 'C1(y)', 'C1(z)', 'C2(x)', 'C2(y)', 'C2(z)',
'P1(x)', 'P1(y)', 'P1(z)', 'P2(x)', 'P2(y)', 'P2(z)']
posnr = np.zeros((len(pos_tokens),), dtype=int)
for nr, tok in enumerate(pos_tokens):
if (tokens.count(tok)):
posnr[nr] = tokens.index(tok)
# read positions from file
POS = np.loadtxt(filename, usecols=posnr, skiprows=nhead)
# extract positions for A/B/M/N and generate ids from it
c1, c2, p1, p2 = POS[:, 0:3], POS[:, 3:6], POS[:, 6:9], POS[:, 9:12]
nc1 = (c1[:, 0] * 9990 + c1[:, 1])*9990 + c1[:, 2] * 10
nc2 = (c2[:, 0] * 9990 + c2[:, 1])*9990 + c2[:, 2] * 10
np1 = (p1[:, 0] * 9990 + p1[:, 1])*9990 + p1[:, 2] * 10
np2 = (p2[:, 0] * 9990 + p2[:, 1])*9990 + p2[:, 2] * 10
# generate unique id
nall = np.unique(np.hstack((nc1, nc2, np1, np2)))
data = pb.DataContainerERT()
for i in range(len(nall)):
ze = np.mod(nall[i], 999) / 10
ye = np.mod((nall[i]-ze) / 9990., 999) / 10
xe = ((nall[i]-ze)/999.-ye)/9990.0 / 10
pos = pg.RVector3(xe, ye, ze)
data.createSensor(pos)
# rename tokens to match gimli tokens
data_tokens[2:5] = ['rhoa', 'ip', 'err']
data.resize(ndata)
for i in range(ndata):
data.createFourPointData(i, int(np.nonzero(nall == nc1[i])[0]),
int(np.nonzero(nall == nc2[i])[0]),
int(np.nonzero(nall == np1[i])[0]),
int(np.nonzero(nall == np2[i])[0]))
for j in range(len(toknr)):
itok = data_tokens[toknr[j]].lower()
data(itok)[i] = DATA[j, i] * mult[toknr[j]]
if verbose:
print(data)
savestr = 'a b m n valid '
for i in range(len(toknr)):
savestr += data_tokens[toknr[i]].lower() + ' '
data.checkDataValidity(False)
data.setInputFormatString(savestr)
if data.size() == 0:
raise Exception('No Data found in importResecsAscii')
return data
[docs]def import4PointLight(filename, verbose=False):
"""Import Lippmann 4-point light instrument data (*.tx0)"""
known_tokens = ['A', 'B', 'M', 'N', 'U', 'I', 'rho', 'phi', 'dU', 'dU90']
data = pb.DataContainerERT()
nel = 0
datnr, toknr = [], []
# with open(filename, 'r') as fi:
with codecs.open(filename, 'r', encoding='utf8', errors='replace') as fi:
content = fi.readlines()
# fi.close() # not needed using with
dataSect = -3
# tok_units = {'err': 0.01, } # to be later used
for i, line in enumerate(content):
if verbose:
pass
# print(i, line)
line = line.rstrip('\n').replace(',', '.')
if (line.find('* Electrode last num') >= 0):
nel = int(line.split()[-1]) # number of electrodes
if verbose:
print(nel, "electrodes")
for n in range(nel):
# create dummy sensors in case of Roll-along files where first
# sensors are missing
data.createSensor(pg.RVector3(-1000.+n, 0., 0.))
if line.find('* Count') == 0:
nData = int(line.split()[-1]) # number of data
if (line.find('* Electrode [') == 0):
sxyz = line.split()[-3:]
if sxyz[-1] == 'X':
sxyz = line.split()[-4:-1]
pos = pg.RVector3(float(sxyz[0]), float(sxyz[1]), float(sxyz[2]))
data.setSensorPosition(int(line[13:16])-1, pos)
if (line.find('* num') == 0):
if 1:
d = readAsDictionary(content[i+1:], content[i].split()[1:])
if verbose:
print("Token:", d.keys())
# usual token are
# ['num', 'A', 'B', 'M', 'N', 'I', 'U', 'dU', 'U90', 'dU90', 'rho', 'phi', 'f',
# 'n', 'nAB', 'Profile', 'Spread', 'PseudoZ', 'X', 'Y', 'Z', 'Date', 'Time',
# 'U(Tx)']
if d['num'][-1] != len(d['num']):
print(d['num'][-1], len(d['num']))
raise Exception('Insufficient data found!')
data.resize(len(d['num']))
for i in range(data.size()):
data.createFourPointData(
i, int(d['A'][i]-1), int(d['B'][i]-1),
int(d['M'][i]-1), int(d['N'][i]-1))
data.set('i', np.array(d['I'])/1000.0)
data.set('u', np.array(d['U'])/1000.0)
data.set('ip', np.array(d['phi']))
data.set('rhoa', np.array(d['rho']))
data.set('err', np.array(d['dU']))
else:
# evaluate tokens (physical fields) in file
for nr, tok in enumerate(line[1:].split()):
if (known_tokens.count(tok)):
datnr.append(nr)
toknr.append(known_tokens.index(tok))
known_tokens[6:10] = ['rhoa', 'ip', 'err', 'iperr'] # to BERT
if verbose:
print(datnr, toknr, [known_tokens[t] for t in toknr])
dataSect = -2 # prepare to be in data section 2 lines later
# unit line should be evaluated to gain multiplicator
data.resize(nData) # initialize vectors with appropriate size
if dataSect >= 0: # read actual data if in data section
sabmn = line.split()
if dataSect >= nData:
data.resize(dataSect+1)
# create electrode array
data.createFourPointData(dataSect,
int(sabmn[1])-1, int(sabmn[2])-1,
int(sabmn[3])-1, int(sabmn[4])-1)
for i in range(4, len(toknr)): # all tokens except ABMN
itok = known_tokens[toknr[i]].lower()
try:
ff = float(sabmn[datnr[i]])
if itok.lower() == 'err':
ff = ff / 100
if itok.lower() == 'i':
ff = ff / 1000.
if itok.lower() == 'u':
ff = ff / 1000.
data(itok)[dataSect] = ff
except IndexError:
pass
if dataSect >= -2:
dataSect += 1 # so that unit line will not be read
# savestr = ''
# for i in range(len(toknr)):
# savestr += known_tokens[toknr[i]].lower() + ' '
# data.setInputFormatString(savestr)
if data.size() == 0:
raise BaseException('No Data found in import4PointLight')
# data.set('rhoa', pg.abs(data('rhoa')))
# data.checkDataValidity()
return data
[docs]def importRollAlong4PointLight(basename, style='1', corI=0, start=1,
verbose=False):
"""Import several 4-point light roll along data files (*.tx0)
Parameters
----------
basename : str
the base file name (.tx0 extension will be stripped)
style : naming style ['1']
the roll-along files are called
'1': basename+'_1.tx0', basename+'_2.tx0' etc.
'A': basename+'_A.tx0', basename+'_B' etc.
'a': basename+'_a.tx0', basename+'_b.tx0'
"""
basename = basename.rstrip('.tx0')
data = import4PointLight(basename + '.tx0')
if verbose:
print(data)
for n in range(start, 100):
if style == 'A':
fname1 = basename + '_'+chr(64+n)+'.tx0'
elif style == 'a':
fname1 = basename + '_'+chr(96+n)+'.tx0'
else:
fname1 = basename + '_'+str(n)+'.tx0'
if not os.path.exists(fname1):
break
if verbose:
print(fname1)
data1 = import4PointLight(fname1)
for i in range(n*20):
data1.setSensorPosition(i, data.sensorPosition(i))
data.add(data1)
if verbose:
print(data)
return data
[docs]def import4PointLightOld(filename, verbose=False):
"""import Lippmann 4-point light instrument data (*.tx0)
DEPRECATED will be removed soon
"""
known_tokens = ['A', 'B', 'M', 'N', 'U', 'I', 'rho', 'phi', 'dU', 'dU90']
data = pb.DataContainerERT()
nel, ndata = 0, 0
datnr, toknr = [], []
fid = open(filename)
for i, line in enumerate(fid):
line = line.rstrip('\n')
if (line.find('* Electrode last num') == 0):
nel = int(line.split()[-1])
if (line.find('* Count') == 0):
ndata = int(line.split()[-1])
if verbose:
print("{} electrodes, {} data".format(nel, ndata))
if (line.find('* Electrode [') == 0):
sxyz = line.split()[-3:]
pos = pg.RVector3(float(sxyz[0]), float(sxyz[1]), float(sxyz[2]))
data.createSensor(pos)
if (line.find('* num') == 0):
for nr, tok in enumerate(line[1:].split()):
if (known_tokens.count(tok)):
datnr.append(nr)
toknr.append(known_tokens.index(tok))
if verbose:
print(datnr, toknr, [known_tokens[t] for t in toknr])
break
fid.close()
# load complete data matrix according to detected columns
datmat = np.loadtxt(filename, skiprows=i+2, usecols=datnr).T
# redefine tokens to match BERT definitions
known_tokens[6:10] = ['rhoa', 'ip', 'err', 'iperr']
savestr = ''
data.resize(len(datmat[0]))
# set data for known tokens
for i in range(len(datmat)):
itok = known_tokens[toknr[i]].lower()
offset, mult = 0., 1.
if (toknr[i] < 4):
offset = -1. # A B M N
if ((4, 5).count(toknr[i])):
mult = 1e-3 # U/mV I/mA
if ((8, 9).count(toknr[i])):
mult = 1e-2 # dU/% dU90/%
data.set(itok, pg.asvector(datmat[i] * mult + offset))
savestr += itok + ' '
data.setInputFormatString(savestr)
for n in range(len(datmat[0])):
data.markValid(n)
if verbose:
print(data, data.tokenList())
return data
[docs]def importABEM(filename, verbose=False):
""" Import old ABEM (AMP) format
Filename:
Instrument ID:
Date & Time:
Base station: 0.00 0.00 0.00 0.00 0.00
Rows header/data/topography: 27 5949 0
Acquisition mode: 2
Measurement method: Section
Electrode layout: 11 Freeform GN4
Co-ordinate type: XYZ:1
Smallest electrode spacing: 1.00
Marine survey (R,h,a,b): - - - -
Protocol #1: GRAD2XA
Protocol #2: -
Protocol #3: -
Protocol #4: -
Protocol #5: -
Protocol #6: -
Protocol #7: -
Protocol #8: -
Operator:
Client:
Comment #1:
Comment #2:
Comment #3:
Comment #4:
No. Time A(x) B(x) M(x) N(x) I(mA) Voltage(V) App.R.(ohmm) Error(%)
*
"""
with open(filename, 'r') as fi:
content = fi.readlines()
fi.close()
nData = 0
nHeader = 0
if len(content) > 3:
sizes = content[4].split('\r\n')[0].split()
if sizes[0] == "Rows":
# Rows header/data/topography: 27 5949 0
nHeader = int(sizes[2])
nData = int(sizes[3])
else:
raise Exception("Read ABEM .. size format unknows" + sizes)
else:
raise Exception("Read ABEM .. file content too small " +
str(len(content)))
if verbose:
print("ABEM file format assuming data", nData)
count = 0
if len(content) >= nHeader + nData:
data = pb.DataContainerERT()
data.resize(nData)
for i, row in enumerate(content[nHeader:nHeader + nData]):
vals = row.split()
if len(vals) == 10:
eaID = data.createSensor(pg.RVector3(float(vals[2]), 0.0, 0.0))
ebID = data.createSensor(pg.RVector3(float(vals[3]), 0.0, 0.0))
emID = data.createSensor(pg.RVector3(float(vals[4]), 0.0, 0.0))
enID = data.createSensor(pg.RVector3(float(vals[5]), 0.0, 0.0))
data.createFourPointData(count, eaID, ebID, emID, enID)
data('i')[count] = float(vals[6]) * 1e-3
data('u')[count] = float(vals[7])
data('err')[count] = float(vals[9]) / 100.
count += 1
else:
raise Exception("Read ABEM .. cannot interpret data tokens " +
str(len(vals)), row)
data.sortSensorsX()
else:
raise Exception("Read ABEM .. file content to small " +
str(len(content)) + " expected: " + str(nHeader+nData))
if verbose:
print(data, data.tokenList())
return data
[docs]def importAsciiColumns(filename, verbose=False, return_header=False):
"""Import any ERT data file organized in columns with column header
Input can be:
* Terrameter LS or SAS Ascii Export format, e.g.
Time MeasID DPID Channel A(x) A(y) A(z) B(x) B(y) B(z) M(x) M(y) M(z) \
N(x) N(y) N(z) F(x) F(y) F(z) Note I(mA) Uout(V) U(V) SP(V) R(O) \
Var(%) Rhoa Cycles Pint Pext(V) T(°C) Lat Long
2016-09-14 07:01:56 73 7 1 8 1 1 20 1 1 12 1 1 \
16 1 1 14 1 2.076 99.8757 107.892 0.0920761 0 0.921907 \
0.196302 23.17 1 12.1679 12.425 42.1962 0 0
* Resecs Output format
"""
data = pb.DataContainerERT()
header = {}
with open(filename, 'r', encoding='iso-8859-15') as fi:
content = fi.readlines()
if content[0].startswith('Injection'): # Resecs lead-in
n = 0
for n in range(20):
if len(content[n]) < 2:
break
content = content[n+1:]
d = readAsDictionary(content, sep='\t')
if len(d) < 2:
d = readAsDictionary(content)
nData = len(next(iter(d.values())))
data.resize(nData)
if 'Spa.1' in d: # Syscal Pro
abmn = ['Spa.1', 'Spa.2', 'Spa.3', 'Spa.4']
elif 'A(x)' in d: # ABEM Terrameter
abmn = ['A', 'B', 'M', 'N']
elif 'xA' in d: # Workbench TX2 processed data
abmn = ['xA', 'xB', 'xM', 'xN']
elif 'C1(x)' in d or 'C1(xm)' in d: # Resecs
abmn = ['C1', 'C2', 'P1', 'P2']
else:
print("no electrode positions found!")
print("Keys are:", d.keys())
raise SystemExit
for i in range(nData):
if abmn[0]+'(z)' in d:
eID = [data.createSensor([d[se+'(x)'][i], d[se+'(y)'][i],
d[se+'(z)'][i]]) for se in abmn]
elif abmn[0]+'(zm)' in d:
eID = [data.createSensor([d[se+'(xm)'][i], d[se+'(ym)'][i],
d[se+'(zm)'][i]]) for se in abmn]
elif abmn[0]+'(y)' in d:
eID = [data.createSensor([d[se+'(x)'][i], d[se+'(y)'][i],
0.]) for se in abmn]
elif abmn[0]+'(ym)' in d:
eID = [data.createSensor([d[se+'(xm)'][i], d[se+'(ym)'][i],
0.]) for se in abmn]
elif abmn[0]+'(x)' in d:
eID = [data.createSensor([d[se+'(x)'][i], 0.,
0.]) for se in abmn]
elif abmn[0]+'(xm)' in d:
eID = [data.createSensor([d[se+'(xm)'][i], 0.,
0.]) for se in abmn]
else:
eID = [data.createSensor([d[se][i], 0., 0.]) for se in abmn]
data.createFourPointData(i, *eID)
data.save('bla.shm', 'a b m n')
tokenmap = {'I(mA)': 'i', 'I': 'i', 'In': 'i', 'Vp': 'u',
'VoltageV': 'u', 'U': 'u', 'U(V)': 'u', 'UV': 'u',
'R(Ohm)': 'r', 'RO': 'r', 'R(O)': 'r', 'Res': 'r',
'Rho': 'rhoa', 'AppROhmm': 'rhoa', 'Rho-a(Ohm-m)': 'rhoa',
'Rho-a(Om)': 'rhoa',
'Var(%)': 'err', 'D': 'err', 'Dev.': 'err',
'M': 'ma', 'P': 'ip', 'IP sum window': 'ip',
'Time': 't'}
# Unit conversions (mA,mV,%), partly automatically assumed
unitmap = {'I(mA)': 1e-3, 'Var(%)': 0.01, # ABEM
'U': 1e-3, 'I': 1e-3, 'D': 0.01, # Resecs
'Dev.': 0.01, 'In': 1e-3, 'Vp': 1e-3} # Syscal
for key in d.keys():
vals = np.asarray(d[key])
if key.startswith('IP sum window'):
key = 'IP sum window'
if np.issubdtype(vals.dtype,
'float') or np.issubdtype(vals.dtype, 'int'):
if key in tokenmap: # use the standard (i, u, rhoa) key
data.set(tokenmap[key], vals * unitmap.get(key, 1.0))
else: # use the original key if not XX(x) etc.
if not re.search('([x-z])', key):
data.set(key.replace(' ', '_'), d[key])
r = data('u') / data('i')
if hasattr(d, 'R(0)'):
if np.linalg.norm(r-d['R(O)']) < 1e4: # no idea what's that for
data.set('r', r)
else:
print("Warning! File inconsistent")
data.sortSensorsX()
if return_header:
return data, header
else:
return data
[docs]def importABEMTerrameterSAS(filename, verbose=False):
"""Import Terrameter SAS Ascii Export format.
Time MeasID DPID Channel A(x) A(y) A(z) B(x) B(y) B(z) M(x) M(y) M(z) \
N(x) N(y) N(z) F(x) F(y) F(z) Note I(mA) Uout(V) U(V) SP(V) R(O) \
Var(%) Rhoa Cycles Pint Pext(V) T(°C) Lat Long
2016-09-14 07:01:56 73 7 1 8 1 1 20 1 1 12 1 1 \
16 1 1 14 1 2.076 99.8757 107.892 0.0920761 0 0.921907 \
0.196302 23.17 1 12.1679 12.425 42.1962 0 0
"""
with open(filename, 'r', encoding='iso-8859-15') as fi:
content = fi.readlines()
fi.close()
d = readAsDictionary(content, sep='\t')
nData = len(d['I(mA)'])
data = pb.DataContainerERT()
data.resize(nData)
for i in range(nData):
eaID = data.createSensor([d['A(x)'][i], d['A(y)'][i], d['A(z)'][i]])
ebID = data.createSensor([d['B(x)'][i], d['B(y)'][i], d['B(z)'][i]])
emID = data.createSensor([d['M(x)'][i], d['M(y)'][i], d['M(z)'][i]])
enID = data.createSensor([d['N(x)'][i], d['N(y)'][i], d['N(z)'][i]])
data.createFourPointData(i, eaID, ebID, emID, enID)
data.set('i', np.array(d['I(mA)'])/1000.0)
data.set('u', d['U(V)'])
if 'Uout(V)' in d:
data.set('uout', d['Uout(V)'])
if 'SP(V)' in d:
data.set('sp', d['SP(V)'])
if 'Var(%)' in 'd':
data.set('var', d['Var(%)'])
if 'R(Ohm)' in d:
data.set('r', d['R(Ohm)'])
else:
r = data('u') / data('i')
if hasattr(d, 'R(0)'):
if np.linalg.norm(r-d['R(O)']) < 1e4:
data.set('r', r)
else:
print("Warning! File inconsistent")
data.sortSensorsX()
return data
[docs]def importGeoSys(filename, verbose=False):
"""import GeoSys format
Example format
--------------
Messgebiet :Test 1b
Profilname :Kopf
Datum :11.11.2014
Registrierer :FG
Geraet :GMS150
Richtung :
Wetter :gut
Anordnung :S
Konfiguration:1
Kabelbaum :G050-050
Steuerdatei :A26W0508.ESD
A M N B zeit f mn/2 ab/2 KF I/mA U/mV R/OhmM Q
*
"""
with open(filename, 'r') as fi:
content = fi.readlines()
fi.close()
nHeader = 13
count = 0
tokenLineStr = content[12]
tokenLine = tokenLineStr.split()
if len(content) >= nHeader:
data = pb.DataContainerERT()
data.resize(len(content))
for i, row in enumerate(content[nHeader:-1]):
vals = row.split()
if len(vals) == 0:
continue
if len(vals) > 11:
if tokenLine[0] == 'A' and tokenLine[1] == 'M' and \
tokenLine[2] == 'N' and tokenLine[3] == 'B' and \
tokenLine[9] == 'I/mA' and tokenLine[10] == 'U/mV':
vv = [float(vali) for vali in vals]
eaID = data.createSensor(pg.RVector3(vv[0], 0.0, 0.0))
emID = data.createSensor(pg.RVector3(vv[1], 0.0, 0.0))
enID = data.createSensor(pg.RVector3(vv[2], 0.0, 0.0))
ebID = data.createSensor(pg.RVector3(vv[3], 0.0, 0.0))
data.createFourPointData(count, eaID, ebID, emID, enID)
data('i')[count] = float(vals[9]) * 1e-3
data('u')[count] = float(vals[10]) * 1e-3
count += 1
else:
raise Exception("Cannot interpret tokenLine. " +
tokenLineStr)
else:
raise Exception("Read GeoSYS - cannot interpret data tokens " +
str(len(vals)), row)
data.sortSensorsX()
data.resize(count)
else:
raise Exception("Read ABEM .. file content to small " +
str(len(content)) + " expected: " + str(nHeader))
if verbose:
print(data, data.tokenList())
return data
[docs]def importFLW(filename, verbose=False):
"""Import Geotom free FLW format.
//FILENAME
//- Kabelrichtung:
// Kabel I: revers
// Kabel II: normal
// Kabel III: normal
// Kabel IV: revers
//- Kabelanordnung: parallel (2-2)
//- Optimierung: letzte Messung
//- Sortierung: nach Level
//- Stromstufen: 5.0 mA .. 50.0 mA
Type: Flow
Name: Name
Comment:
Comment:
Comment:
Spacing: x
First El: x
Nr of points: Nr
IP present: 0
1 4 2 3 5.000 51.0139 128.21 0.6
"""
with open(filename, 'r') as fi:
content = fi.readlines()
fi.close()
dataStart = 0
x0 = 0
spacing = 1.0
for i, c in enumerate(content):
vals = c.split()
if c[0] != '/' and len(vals) > 0:
if len(vals) > 6 and vals[0] != 'Comment:':
dataStart = i-1
break
else:
if vals[0] == 'Spacing:':
v = vals[1].split(',')
if len(v) > 1:
spacing = float(v[1])
else:
spacing = float(v[0])
elif vals[0] == 'First El:':
v = vals[1].split(',')
if len(v) > 1:
x0 = float(v[1])
else:
x0 = float(v[0])
d = readAsDictionary(content[dataStart:], token=[])
data = pb.DataContainerERT()
nData = len(d['col0'])
# A B M N ?? I/mA U/mV ?? ?? ?? ??
# date time
# 1 2 3 4 $4 0.100 -24.4087 920.18 0.0 -28.59 0.9
# 27.05.2015 14:06:10
#
# A B M N ?? I/mA U/mV ?? ?? date time
# 1 2 3 4 $8 1.000 -41.5541 15.67 0.0 24.06.2015 14:52:54
for i in range(nData):
eaID = data.createSensor([(d['col0'][i]-1)*spacing + x0, 0.0, 0.0])
ebID = data.createSensor([(d['col1'][i]-1)*spacing + x0, 0.0, 0.0])
emID = data.createSensor([(d['col2'][i]-1)*spacing + x0, 0.0, 0.0])
enID = data.createSensor([(d['col3'][i]-1)*spacing + x0, 0.0, 0.0])
data.createFourPointData(i, eaID, ebID, emID, enID)
if '$' in d['col4'][0]:
data.set('i', np.array(d['col5'])*1e-3)
data.set('u', np.array(d['col6'])*1e-3)
else:
data.set('i', np.array(d['col4'])*1e-3)
data.set('u', np.array(d['col5'])*1e-3)
data.sortSensorsX()
return data
[docs]def importSuperSting(datafile, verbose=True):
"""Import ERT data from AGI SuperSting instrument (*.stg file)."""
ALL = np.genfromtxt(datafile, delimiter=',', skip_header=3)
Apos = ALL[:, 9:12]
Bpos = ALL[:, 12:15]
Mpos = ALL[:, 15:18]
Npos = ALL[:, 18:21]
# what about infinite electrodes?
pos = np.vstack((Apos, Bpos, Mpos, Npos))
upos, ifwd, irev = uniqueRows(pos)
data = pb.DataContainerERT()
for ipos in upos:
data.createSensor(ipos)
data.resize(len(ALL))
ABMN = irev.reshape(4, -1).T
for i, abmn in enumerate(ABMN):
ind = [int(ii) for ii in abmn]
data.createFourPointData(i, *ind) # ind[1], ind[0], ind[2], ind[3])
data.set('i', ALL[:, 6] * 1e-3)
data.set('u', ALL[:, 4] * data('i')) # U=R*I
data.set('err', ALL[:, 5] * 0.001)
data.set('rhoa', ALL[:, 7])
if ALL.shape[1] > 30:
data.set('ip', ALL[:, 30]*1000) # M integrated in msec
for i in range(6):
data.set('ip'+str(i+1), ALL[:, 24+i])
data.markValid(data('rhoa') > 0)
data.checkDataValidity()
data.sortSensorsX()
return data
[docs]def readAsDictionary(content, token=None, sep=None):
"""Read list of strings from a file as column separated dictionary.
e.g.
token1 token2 token3 token4
va1 va2 val3 val4
va1 va2 val3 val4
va1 va2 val3 val4
Parameters
----------
content: [string]
List of strings read from file:
e.g.
with open(filename, 'r') as fi:
content = fi.readlines()
fi.close()
token: [string]
If given the tokens will be the keys of the resulting dictionary.
When token is None, tokens will be the first row values.
When token is a empty list, the tokens will be autonamed to
'col' + str(ColNumber)
ret: dictionary
Dictionary of all data
"""
data = dict()
if token is None:
header = content[0].splitlines()[0].split(sep)
token = []
for i, tok in enumerate(header):
tok = tok.lstrip()
token.append(tok)
for i, row in enumerate(content[1:]):
vals = row.splitlines()[0].split(sep)
for j, v in enumerate(vals):
v = v.replace(',', '.')
if len(token) < j+1:
token.append('col' + str(j))
if token[j] not in data:
data[token[j]] = [None] * (len(content)-1)
try:
data[token[j]][i] = float(v)
except:
if len(v) == 1 and v[0] == '-':
v = 0.0
data[token[j]][i] = v
return data
[docs]def readABEMProtocolFile(xmlfile, verbose=False):
"""Read ABEM protocol file (*.xml) as DataContainerERT."""
# import xml.etree.ElementTree as ET
ET = pg.optImport("xml.etree.ElementTree",
"import ABEM protocol files (*.xml)")
tree = ET.parse(xmlfile)
root = tree.getroot()
if verbose:
for child in root:
print(child.tag, child.text)
seq = root.find('Sequence')
A, B, M, N, C = [], [], [], [], []
for mea in seq.findall('Measure'):
tx = mea.find('Tx')
a, b = [int(k) for k in tx.text.split()]
recs = mea.findall('Rx')
C.append(len(recs))
for rx in recs:
m, n = [int(k) for k in rx.text.split()]
A.append(a)
B.append(b)
M.append(m)
N.append(n)
ABMN = np.column_stack((A, B, M, N))
nel = np.max(ABMN)
dx = 1
data = pg.DataContainerERT()
for i in range(nel):
data.createSensor([i*dx, 0])
data.resize(ABMN.shape[0])
for i, abmn in enumerate(ABMN):
data.createFourPointData(i, *[int(el)-1 for el in abmn])
if verbose:
print(data)
print("{:d} injections (mean c={:.1f})".format(len(C),
data.size()/len(C)))
return data
if __name__ == "__main__":
if len(sys.argv) == 2:
filename = sys.argv[1]
print(filename)
ext = filename[filename.rfind('.'):].lower()
fun = bertImportDataFileSuffixesDict[ext][1]
importFunction = 'import' + fun
data = eval(importFunction)(filename)
# data = importData(datafile)
print(data)
pb.show(data)