repository_name
stringlengths 5
67
| func_path_in_repository
stringlengths 4
234
| func_name
stringlengths 0
314
| whole_func_string
stringlengths 52
3.87M
| language
stringclasses 6
values | func_code_string
stringlengths 52
3.87M
| func_documentation_string
stringlengths 1
47.2k
| func_code_url
stringlengths 85
339
|
---|---|---|---|---|---|---|---|
PmagPy/PmagPy | programs/mk_redo.py | main | def main():
"""
NAME
mk_redo.py
DESCRIPTION
Makes thellier_redo and zeq_redo files from existing pmag_specimens format file
SYNTAX
mk_redo.py [-h] [command line options]
INPUT
takes specimens.txt formatted input file
OPTIONS
-h: prints help message and quits
-f FILE: specify input file, default is 'specimens.txt'
-F REDO: specify output file suffix, default is redo so that
output filenames are 'thellier_redo' for thellier data and 'zeq_redo' for direction only data
OUTPUT
makes a thellier_redo or a zeq_redo format file
"""
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
zfile, tfile = 'zeq_redo', 'thellier_redo'
zredo, tredo = "", ""
dir_path = pmag.get_named_arg('-WD', '.')
inspec = pmag.get_named_arg('-f', 'specimens.txt')
if '-F' in sys.argv:
ind = sys.argv.index('-F')
redo = sys.argv[ind + 1]
tfile = redo
zfile = redo
inspec = pmag.resolve_file_name(inspec, dir_path)
zfile = pmag.resolve_file_name(zfile, dir_path)
tfile = pmag.resolve_file_name(tfile, dir_path)
#
# read in data
#
specs = []
prior_spec_data, file_type = pmag.magic_read(inspec)
if file_type != 'specimens':
print(file_type, " this is not a valid pmag_specimens file")
sys.exit()
outstrings = []
for spec in prior_spec_data:
tmp = spec["method_codes"].split(":")
meths = []
for meth in tmp:
methods = meth.strip().split('-')
for m in methods:
if m not in meths:
meths.append(m)
if 'DIR' in meths: # DE-BFL, DE-BFP or DE-FM
specs.append(spec['specimen'])
if 'dir_comp' in list(spec.keys()) and spec['dir_comp'] != "" and spec['dir_comp'] != " ":
comp_name = spec['dir_comp']
else:
comp_name = string.ascii_uppercase[specs.count(
spec['specimen']) - 1]
calculation_type = "DE-BFL" # assume default calculation type is best-fit line
if "BFP" in meths:
calculation_type = 'DE-BFP'
elif "FM" in meths:
calculation_type = 'DE-FM'
if zredo == "":
zredo = open(zfile, "w")
outstring = '%s %s %s %s %s \n' % (
spec["specimen"], calculation_type, spec["meas_step_min"], spec["meas_step_max"], comp_name)
if outstring not in outstrings:
zredo.write(outstring)
outstrings.append(outstring) # only writes unique interpretions
elif "PI" in meths and "TRM" in meths: # thellier record
if tredo == "":
tredo = open(tfile, "w")
outstring = '%s %i %i \n' % (spec["specimen"], float(
spec["meas_step_min"]), float(spec["meas_step_max"]))
if outstring not in outstrings:
tredo.write(outstring)
outstrings.append(outstring) # only writes unique interpretions
print('Redo files saved to: ', zfile, tfile) | python | def main():
"""
NAME
mk_redo.py
DESCRIPTION
Makes thellier_redo and zeq_redo files from existing pmag_specimens format file
SYNTAX
mk_redo.py [-h] [command line options]
INPUT
takes specimens.txt formatted input file
OPTIONS
-h: prints help message and quits
-f FILE: specify input file, default is 'specimens.txt'
-F REDO: specify output file suffix, default is redo so that
output filenames are 'thellier_redo' for thellier data and 'zeq_redo' for direction only data
OUTPUT
makes a thellier_redo or a zeq_redo format file
"""
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
zfile, tfile = 'zeq_redo', 'thellier_redo'
zredo, tredo = "", ""
dir_path = pmag.get_named_arg('-WD', '.')
inspec = pmag.get_named_arg('-f', 'specimens.txt')
if '-F' in sys.argv:
ind = sys.argv.index('-F')
redo = sys.argv[ind + 1]
tfile = redo
zfile = redo
inspec = pmag.resolve_file_name(inspec, dir_path)
zfile = pmag.resolve_file_name(zfile, dir_path)
tfile = pmag.resolve_file_name(tfile, dir_path)
#
# read in data
#
specs = []
prior_spec_data, file_type = pmag.magic_read(inspec)
if file_type != 'specimens':
print(file_type, " this is not a valid pmag_specimens file")
sys.exit()
outstrings = []
for spec in prior_spec_data:
tmp = spec["method_codes"].split(":")
meths = []
for meth in tmp:
methods = meth.strip().split('-')
for m in methods:
if m not in meths:
meths.append(m)
if 'DIR' in meths: # DE-BFL, DE-BFP or DE-FM
specs.append(spec['specimen'])
if 'dir_comp' in list(spec.keys()) and spec['dir_comp'] != "" and spec['dir_comp'] != " ":
comp_name = spec['dir_comp']
else:
comp_name = string.ascii_uppercase[specs.count(
spec['specimen']) - 1]
calculation_type = "DE-BFL" # assume default calculation type is best-fit line
if "BFP" in meths:
calculation_type = 'DE-BFP'
elif "FM" in meths:
calculation_type = 'DE-FM'
if zredo == "":
zredo = open(zfile, "w")
outstring = '%s %s %s %s %s \n' % (
spec["specimen"], calculation_type, spec["meas_step_min"], spec["meas_step_max"], comp_name)
if outstring not in outstrings:
zredo.write(outstring)
outstrings.append(outstring) # only writes unique interpretions
elif "PI" in meths and "TRM" in meths: # thellier record
if tredo == "":
tredo = open(tfile, "w")
outstring = '%s %i %i \n' % (spec["specimen"], float(
spec["meas_step_min"]), float(spec["meas_step_max"]))
if outstring not in outstrings:
tredo.write(outstring)
outstrings.append(outstring) # only writes unique interpretions
print('Redo files saved to: ', zfile, tfile) | NAME
mk_redo.py
DESCRIPTION
Makes thellier_redo and zeq_redo files from existing pmag_specimens format file
SYNTAX
mk_redo.py [-h] [command line options]
INPUT
takes specimens.txt formatted input file
OPTIONS
-h: prints help message and quits
-f FILE: specify input file, default is 'specimens.txt'
-F REDO: specify output file suffix, default is redo so that
output filenames are 'thellier_redo' for thellier data and 'zeq_redo' for direction only data
OUTPUT
makes a thellier_redo or a zeq_redo format file | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/mk_redo.py#L8-L90 |
PmagPy/PmagPy | pmagpy/lcc_ticks.py | find_side | def find_side(ls, side):
"""
Given a shapely LineString which is assumed to be rectangular, return the
line corresponding to a given side of the rectangle.
"""
minx, miny, maxx, maxy = ls.bounds
points = {'left': [(minx, miny), (minx, maxy)],
'right': [(maxx, miny), (maxx, maxy)],
'bottom': [(minx, miny), (maxx, miny)],
'top': [(minx, maxy), (maxx, maxy)],}
return sgeom.LineString(points[side]) | python | def find_side(ls, side):
"""
Given a shapely LineString which is assumed to be rectangular, return the
line corresponding to a given side of the rectangle.
"""
minx, miny, maxx, maxy = ls.bounds
points = {'left': [(minx, miny), (minx, maxy)],
'right': [(maxx, miny), (maxx, maxy)],
'bottom': [(minx, miny), (maxx, miny)],
'top': [(minx, maxy), (maxx, maxy)],}
return sgeom.LineString(points[side]) | Given a shapely LineString which is assumed to be rectangular, return the
line corresponding to a given side of the rectangle. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/lcc_ticks.py#L5-L16 |
PmagPy/PmagPy | pmagpy/lcc_ticks.py | lambert_xticks | def lambert_xticks(ax, ticks):
"""Draw ticks on the bottom x-axis of a Lambert Conformal projection."""
te = lambda xy: xy[0]
lc = lambda t, n, b: np.vstack((np.zeros(n) + t, np.linspace(b[2], b[3], n))).T
xticks, xticklabels = _lambert_ticks(ax, ticks, 'bottom', lc, te)
ax.xaxis.tick_bottom()
ax.set_xticks(xticks)
ax.set_xticklabels([ax.xaxis.get_major_formatter()(xtick) for xtick in xticklabels]) | python | def lambert_xticks(ax, ticks):
"""Draw ticks on the bottom x-axis of a Lambert Conformal projection."""
te = lambda xy: xy[0]
lc = lambda t, n, b: np.vstack((np.zeros(n) + t, np.linspace(b[2], b[3], n))).T
xticks, xticklabels = _lambert_ticks(ax, ticks, 'bottom', lc, te)
ax.xaxis.tick_bottom()
ax.set_xticks(xticks)
ax.set_xticklabels([ax.xaxis.get_major_formatter()(xtick) for xtick in xticklabels]) | Draw ticks on the bottom x-axis of a Lambert Conformal projection. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/lcc_ticks.py#L19-L26 |
PmagPy/PmagPy | pmagpy/lcc_ticks.py | lambert_yticks | def lambert_yticks(ax, ticks):
"""Draw ricks on the left y-axis of a Lamber Conformal projection."""
te = lambda xy: xy[1]
lc = lambda t, n, b: np.vstack((np.linspace(b[0], b[1], n), np.zeros(n) + t)).T
yticks, yticklabels = _lambert_ticks(ax, ticks, 'left', lc, te)
ax.yaxis.tick_left()
ax.set_yticks(yticks)
ax.set_yticklabels([ax.yaxis.get_major_formatter()(ytick) for ytick in yticklabels]) | python | def lambert_yticks(ax, ticks):
"""Draw ricks on the left y-axis of a Lamber Conformal projection."""
te = lambda xy: xy[1]
lc = lambda t, n, b: np.vstack((np.linspace(b[0], b[1], n), np.zeros(n) + t)).T
yticks, yticklabels = _lambert_ticks(ax, ticks, 'left', lc, te)
ax.yaxis.tick_left()
ax.set_yticks(yticks)
ax.set_yticklabels([ax.yaxis.get_major_formatter()(ytick) for ytick in yticklabels]) | Draw ricks on the left y-axis of a Lamber Conformal projection. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/lcc_ticks.py#L29-L36 |
PmagPy/PmagPy | pmagpy/lcc_ticks.py | _lambert_ticks | def _lambert_ticks(ax, ticks, tick_location, line_constructor, tick_extractor):
"""Get the tick locations and labels for an axis of a Lambert Conformal projection."""
outline_patch = sgeom.LineString(ax.outline_patch.get_path().vertices.tolist())
axis = find_side(outline_patch, tick_location)
n_steps = 30
extent = ax.get_extent(ccrs.PlateCarree())
_ticks = []
for t in ticks:
xy = line_constructor(t, n_steps, extent)
proj_xyz = ax.projection.transform_points(ccrs.Geodetic(), xy[:, 0], xy[:, 1])
xyt = proj_xyz[..., :2]
ls = sgeom.LineString(xyt.tolist())
locs = axis.intersection(ls)
if not locs:
tick = [None]
else:
tick = tick_extractor(locs.xy)
_ticks.append(tick[0])
# Remove ticks that aren't visible:
ticklabels = copy(ticks)
while True:
try:
index = _ticks.index(None)
except ValueError:
break
_ticks.pop(index)
ticklabels.pop(index)
return _ticks, ticklabels | python | def _lambert_ticks(ax, ticks, tick_location, line_constructor, tick_extractor):
"""Get the tick locations and labels for an axis of a Lambert Conformal projection."""
outline_patch = sgeom.LineString(ax.outline_patch.get_path().vertices.tolist())
axis = find_side(outline_patch, tick_location)
n_steps = 30
extent = ax.get_extent(ccrs.PlateCarree())
_ticks = []
for t in ticks:
xy = line_constructor(t, n_steps, extent)
proj_xyz = ax.projection.transform_points(ccrs.Geodetic(), xy[:, 0], xy[:, 1])
xyt = proj_xyz[..., :2]
ls = sgeom.LineString(xyt.tolist())
locs = axis.intersection(ls)
if not locs:
tick = [None]
else:
tick = tick_extractor(locs.xy)
_ticks.append(tick[0])
# Remove ticks that aren't visible:
ticklabels = copy(ticks)
while True:
try:
index = _ticks.index(None)
except ValueError:
break
_ticks.pop(index)
ticklabels.pop(index)
return _ticks, ticklabels | Get the tick locations and labels for an axis of a Lambert Conformal projection. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/lcc_ticks.py#L38-L65 |
PmagPy/PmagPy | programs/deprecated/umich_magic.py | main | def main():
"""
NAME
umich_magic.py
DESCRIPTION
converts UMICH .mag format files to magic_measurements format files
SYNTAX
umich_magic.py [command line options]
OPTIONS
-h: prints the help message and quits.
-usr USER: identify user, default is ""
-f FILE: specify .mag format input file, required
-fsa SAMPFILE : specify er_samples.txt file relating samples, site and locations names,default is none
-F FILE: specify output file, default is magic_measurements.txt
-spc NUM : specify number of characters to designate a specimen, default = 0
-loc LOCNAME : specify location/study name, must have either LOCNAME or SAMPFILE or be a synthetic
-ncn NCON: specify naming convention: default is #1 below
-A: don't average replicate measurements
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX
[5] site name same as sample
[6] site is entered under a separate column -- NOT CURRENTLY SUPPORTED
[7-Z] [XXXX]YYY: XXXX is site designation with Z characters with sample name XXXXYYYY
NB: all others you will have to customize your self
or e-mail [email protected] for help.
Format of UMICH .mag files:
Spec Treat CSD Intensity Declination Inclination metadata string
Spec: specimen name
Treat: treatment step
XXX T in Centigrade
XXX AF in mT
Intensity assumed to be total moment in 10^3 Am^2 (emu)
Declination: Declination in specimen coordinate system
Inclination: Declination in specimen coordinate system
metatdata string: mm/dd/yy;hh:mm;[dC,mT];xx.xx;UNITS;USER;INST;NMEAS
hh in 24 hours.
dC or mT units of treatment XXX (see Treat above) for thermal or AF respectively
xx.xxx DC field
UNITS of DC field (microT, mT)
INST: instrument code, number of axes, number of positions (e.g., G34 is 2G, three axes,
measured in four positions)
NMEAS: number of measurements in a single position (1,3,200...)
"""
# initialize some stuff
dir_path='.'
infile_type="mag"
noave=0
methcode,inst="",""
phi,theta,peakfield,labfield=0,0,0,0
pTRM,MD,samp_con,Z=0,0,'1',1
missing=1
demag="N"
er_location_name=""
citation='This study'
args=sys.argv
methcode="LP-NO"
samp_file,ErSamps='',[]
specnum=0
#
# get command line arguments
#
meas_file="magic_measurements.txt"
user=""
if '-WD' in args:
ind=args.index("-WD")
dir_path=args[ind+1]
if "-h" in args:
print(main.__doc__)
sys.exit()
if "-usr" in args:
ind=args.index("-usr")
user=args[ind+1]
if '-F' in args:
ind=args.index("-F")
meas_file=dir_path+'/'+args[ind+1]
if '-f' in args:
ind=args.index("-f")
magfile=dir_path+'/'+args[ind+1]
try:
input=open(magfile,'r')
except:
print("bad mag file name")
sys.exit()
else:
print("mag_file field is required option")
print(main.__doc__)
sys.exit()
if "-spc" in args:
ind=args.index("-spc")
specnum=int(args[ind+1])
if specnum!=0:specnum=-specnum
if "-loc" in args:
ind=args.index("-loc")
er_location_name=args[ind+1]
if "-fsa" in args:
ind=args.index("-fsa")
samp_file=dir_path+'/'+args[ind+1]
Samps,file_type=pmag.magic_read(samp_file)
if "-A" in args: noave=1
if "-ncn" in args:
ind=args.index("-ncn")
samp_con=sys.argv[ind+1]
if "4" in samp_con:
if "-" not in samp_con:
print("option [4] must be in form 4-Z where Z is an integer")
sys.exit()
else:
Z=samp_con.split("-")[1]
samp_con="4"
samp_con=sys.argv[ind+1]
if "7" in samp_con:
if "-" not in samp_con:
print("option [7] must be in form 7-Z where Z is an integer")
sys.exit()
else:
Z=samp_con.split("-")[1]
samp_con="7"
MagRecs,specs=[],[]
version_num=pmag.get_version()
if infile_type=="mag":
for line in input.readlines():
instcode=""
if len(line)>2:
MagRec={}
MagRec['er_location_name']=er_location_name
MagRec['magic_software_packages']=version_num
MagRec["treatment_temp"]='%8.3e' % (273) # room temp in kelvin
MagRec["measurement_temp"]='%8.3e' % (273) # room temp in kelvin
MagRec["treatment_ac_field"]='0'
MagRec["treatment_dc_field"]='0'
MagRec["treatment_dc_field_phi"]='0'
MagRec["treatment_dc_field_theta"]='0'
meas_type="LT-NO"
rec=line.split()
labfield=0
code1=rec[6].split(';')
date=code1[0].split('/') # break date into mon/day/year
yy=int(date[2])
if yy <90:
yyyy=str(2000+yy)
else: yyyy=str(1900+yy)
mm=int(date[0])
if mm<10:
mm="0"+str(mm)
else: mm=str(mm)
dd=int(date[1])
if dd<10:
dd="0"+str(dd)
else: dd=str(dd)
time=code1[1].split(':')
hh=int(time[0])
if hh<10:
hh="0"+str(hh)
else: hh=str(hh)
min=int(time[1])
if min<10:
min= "0"+str(min)
else: min=str(min)
MagRec["measurement_date"]=yyyy+":"+mm+":"+dd+":"+hh+":"+min+":00.00"
MagRec["measurement_time_zone"]=''
instcode=''
if len(code1)>1:
MagRec["measurement_positions"]=code1[6][2]
else:
MagRec["measurement_positions"]=code1[7] # takes care of awkward format with bubba and flo being different
if user=="":user=code1[5]
if code1[2][-1]=='C': demag="T"
if code1[2]=='mT': demag="AF"
treat=rec[1].split('.')
if len(treat)==1:treat.append('0')
if demag=='T' and treat!=0:
meas_type="LT-T-Z"
MagRec["treatment_temp"]='%8.3e' % (float(treat[0])+273.) # temp in kelvin
if demag=="AF":
meas_type="LT-AF-Z"
MagRec["treatment_ac_field"]='%8.3e' % (float(treat[0])*1e-3) # Af field in T
MagRec["treatment_dc_field"]='0'
MagRec["er_specimen_name"]=rec[0]
if rec[0] not in specs:specs.append(rec[0]) # get a list of specimen names
experiment=rec[0]+":"
MagRec["er_site_name"]=""
if specnum!=0:
MagRec["er_sample_name"]=rec[0][:specnum]
else:
MagRec["er_sample_name"]=rec[0]
if "-fsa" in args:
for samp in Samps:
if samp["er_sample_name"] == MagRec["er_sample_name"]:
MagRec["er_location_name"]=samp["er_location_name"]
MagRec["er_site_name"]=samp["er_site_name"]
break
elif int(samp_con)!=6:
site=pmag.parse_site(MagRec['er_sample_name'],samp_con,Z)
MagRec["er_site_name"]=site
if MagRec['er_site_name']=="":
print('No site name found for: ',MagRec['er_specimen_name'],MagRec['er_sample_name'])
if MagRec["er_location_name"]=="":
print('no location name for: ',MagRec["er_specimen_name"])
if rec[1]==".00":rec[1]="0.00"
MagRec["measurement_csd"]=rec[2]
MagRec["measurement_magn_moment"]='%10.3e'% (float(rec[3])*1e-3) # moment in Am^2 (from emu)
MagRec["measurement_dec"]=rec[4]
MagRec["measurement_inc"]=rec[5]
MagRec["magic_instrument_codes"]=instcode
MagRec["er_analyst_mail_names"]=user
MagRec["er_citation_names"]=citation
MagRec["magic_method_codes"]=meas_type
MagRec["measurement_flag"]='g'
MagRec["er_specimen_name"]=rec[0]
MagRec["measurement_number"]='1'
MagRecs.append(MagRec)
MagOuts=[]
for spec in specs: # gather all demag types for this specimen
SpecRecs,meths,measnum=[],[],1
for rec in MagRecs:
if rec['er_specimen_name']==spec:
rec['measurement_number']=str(measnum)
measnum+=1
if rec['magic_method_codes'] not in meths:meths.append(rec['magic_method_codes'])
SpecRecs.append(rec)
expname=spec
if "LT-AF-Z" in meths:expname=expname+ ':LP-DIR-AF'
if "LT-T-Z" in meths:expname=expname+ ':LP-DIR-T'
for rec in SpecRecs:
rec['magic_experiment_name']=expname
MagOuts.append(rec)
pmag.magic_write(meas_file,MagOuts,'magic_measurements')
print("results put in ",meas_file) | python | def main():
"""
NAME
umich_magic.py
DESCRIPTION
converts UMICH .mag format files to magic_measurements format files
SYNTAX
umich_magic.py [command line options]
OPTIONS
-h: prints the help message and quits.
-usr USER: identify user, default is ""
-f FILE: specify .mag format input file, required
-fsa SAMPFILE : specify er_samples.txt file relating samples, site and locations names,default is none
-F FILE: specify output file, default is magic_measurements.txt
-spc NUM : specify number of characters to designate a specimen, default = 0
-loc LOCNAME : specify location/study name, must have either LOCNAME or SAMPFILE or be a synthetic
-ncn NCON: specify naming convention: default is #1 below
-A: don't average replicate measurements
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX
[5] site name same as sample
[6] site is entered under a separate column -- NOT CURRENTLY SUPPORTED
[7-Z] [XXXX]YYY: XXXX is site designation with Z characters with sample name XXXXYYYY
NB: all others you will have to customize your self
or e-mail [email protected] for help.
Format of UMICH .mag files:
Spec Treat CSD Intensity Declination Inclination metadata string
Spec: specimen name
Treat: treatment step
XXX T in Centigrade
XXX AF in mT
Intensity assumed to be total moment in 10^3 Am^2 (emu)
Declination: Declination in specimen coordinate system
Inclination: Declination in specimen coordinate system
metatdata string: mm/dd/yy;hh:mm;[dC,mT];xx.xx;UNITS;USER;INST;NMEAS
hh in 24 hours.
dC or mT units of treatment XXX (see Treat above) for thermal or AF respectively
xx.xxx DC field
UNITS of DC field (microT, mT)
INST: instrument code, number of axes, number of positions (e.g., G34 is 2G, three axes,
measured in four positions)
NMEAS: number of measurements in a single position (1,3,200...)
"""
# initialize some stuff
dir_path='.'
infile_type="mag"
noave=0
methcode,inst="",""
phi,theta,peakfield,labfield=0,0,0,0
pTRM,MD,samp_con,Z=0,0,'1',1
missing=1
demag="N"
er_location_name=""
citation='This study'
args=sys.argv
methcode="LP-NO"
samp_file,ErSamps='',[]
specnum=0
#
# get command line arguments
#
meas_file="magic_measurements.txt"
user=""
if '-WD' in args:
ind=args.index("-WD")
dir_path=args[ind+1]
if "-h" in args:
print(main.__doc__)
sys.exit()
if "-usr" in args:
ind=args.index("-usr")
user=args[ind+1]
if '-F' in args:
ind=args.index("-F")
meas_file=dir_path+'/'+args[ind+1]
if '-f' in args:
ind=args.index("-f")
magfile=dir_path+'/'+args[ind+1]
try:
input=open(magfile,'r')
except:
print("bad mag file name")
sys.exit()
else:
print("mag_file field is required option")
print(main.__doc__)
sys.exit()
if "-spc" in args:
ind=args.index("-spc")
specnum=int(args[ind+1])
if specnum!=0:specnum=-specnum
if "-loc" in args:
ind=args.index("-loc")
er_location_name=args[ind+1]
if "-fsa" in args:
ind=args.index("-fsa")
samp_file=dir_path+'/'+args[ind+1]
Samps,file_type=pmag.magic_read(samp_file)
if "-A" in args: noave=1
if "-ncn" in args:
ind=args.index("-ncn")
samp_con=sys.argv[ind+1]
if "4" in samp_con:
if "-" not in samp_con:
print("option [4] must be in form 4-Z where Z is an integer")
sys.exit()
else:
Z=samp_con.split("-")[1]
samp_con="4"
samp_con=sys.argv[ind+1]
if "7" in samp_con:
if "-" not in samp_con:
print("option [7] must be in form 7-Z where Z is an integer")
sys.exit()
else:
Z=samp_con.split("-")[1]
samp_con="7"
MagRecs,specs=[],[]
version_num=pmag.get_version()
if infile_type=="mag":
for line in input.readlines():
instcode=""
if len(line)>2:
MagRec={}
MagRec['er_location_name']=er_location_name
MagRec['magic_software_packages']=version_num
MagRec["treatment_temp"]='%8.3e' % (273) # room temp in kelvin
MagRec["measurement_temp"]='%8.3e' % (273) # room temp in kelvin
MagRec["treatment_ac_field"]='0'
MagRec["treatment_dc_field"]='0'
MagRec["treatment_dc_field_phi"]='0'
MagRec["treatment_dc_field_theta"]='0'
meas_type="LT-NO"
rec=line.split()
labfield=0
code1=rec[6].split(';')
date=code1[0].split('/') # break date into mon/day/year
yy=int(date[2])
if yy <90:
yyyy=str(2000+yy)
else: yyyy=str(1900+yy)
mm=int(date[0])
if mm<10:
mm="0"+str(mm)
else: mm=str(mm)
dd=int(date[1])
if dd<10:
dd="0"+str(dd)
else: dd=str(dd)
time=code1[1].split(':')
hh=int(time[0])
if hh<10:
hh="0"+str(hh)
else: hh=str(hh)
min=int(time[1])
if min<10:
min= "0"+str(min)
else: min=str(min)
MagRec["measurement_date"]=yyyy+":"+mm+":"+dd+":"+hh+":"+min+":00.00"
MagRec["measurement_time_zone"]=''
instcode=''
if len(code1)>1:
MagRec["measurement_positions"]=code1[6][2]
else:
MagRec["measurement_positions"]=code1[7] # takes care of awkward format with bubba and flo being different
if user=="":user=code1[5]
if code1[2][-1]=='C': demag="T"
if code1[2]=='mT': demag="AF"
treat=rec[1].split('.')
if len(treat)==1:treat.append('0')
if demag=='T' and treat!=0:
meas_type="LT-T-Z"
MagRec["treatment_temp"]='%8.3e' % (float(treat[0])+273.) # temp in kelvin
if demag=="AF":
meas_type="LT-AF-Z"
MagRec["treatment_ac_field"]='%8.3e' % (float(treat[0])*1e-3) # Af field in T
MagRec["treatment_dc_field"]='0'
MagRec["er_specimen_name"]=rec[0]
if rec[0] not in specs:specs.append(rec[0]) # get a list of specimen names
experiment=rec[0]+":"
MagRec["er_site_name"]=""
if specnum!=0:
MagRec["er_sample_name"]=rec[0][:specnum]
else:
MagRec["er_sample_name"]=rec[0]
if "-fsa" in args:
for samp in Samps:
if samp["er_sample_name"] == MagRec["er_sample_name"]:
MagRec["er_location_name"]=samp["er_location_name"]
MagRec["er_site_name"]=samp["er_site_name"]
break
elif int(samp_con)!=6:
site=pmag.parse_site(MagRec['er_sample_name'],samp_con,Z)
MagRec["er_site_name"]=site
if MagRec['er_site_name']=="":
print('No site name found for: ',MagRec['er_specimen_name'],MagRec['er_sample_name'])
if MagRec["er_location_name"]=="":
print('no location name for: ',MagRec["er_specimen_name"])
if rec[1]==".00":rec[1]="0.00"
MagRec["measurement_csd"]=rec[2]
MagRec["measurement_magn_moment"]='%10.3e'% (float(rec[3])*1e-3) # moment in Am^2 (from emu)
MagRec["measurement_dec"]=rec[4]
MagRec["measurement_inc"]=rec[5]
MagRec["magic_instrument_codes"]=instcode
MagRec["er_analyst_mail_names"]=user
MagRec["er_citation_names"]=citation
MagRec["magic_method_codes"]=meas_type
MagRec["measurement_flag"]='g'
MagRec["er_specimen_name"]=rec[0]
MagRec["measurement_number"]='1'
MagRecs.append(MagRec)
MagOuts=[]
for spec in specs: # gather all demag types for this specimen
SpecRecs,meths,measnum=[],[],1
for rec in MagRecs:
if rec['er_specimen_name']==spec:
rec['measurement_number']=str(measnum)
measnum+=1
if rec['magic_method_codes'] not in meths:meths.append(rec['magic_method_codes'])
SpecRecs.append(rec)
expname=spec
if "LT-AF-Z" in meths:expname=expname+ ':LP-DIR-AF'
if "LT-T-Z" in meths:expname=expname+ ':LP-DIR-T'
for rec in SpecRecs:
rec['magic_experiment_name']=expname
MagOuts.append(rec)
pmag.magic_write(meas_file,MagOuts,'magic_measurements')
print("results put in ",meas_file) | NAME
umich_magic.py
DESCRIPTION
converts UMICH .mag format files to magic_measurements format files
SYNTAX
umich_magic.py [command line options]
OPTIONS
-h: prints the help message and quits.
-usr USER: identify user, default is ""
-f FILE: specify .mag format input file, required
-fsa SAMPFILE : specify er_samples.txt file relating samples, site and locations names,default is none
-F FILE: specify output file, default is magic_measurements.txt
-spc NUM : specify number of characters to designate a specimen, default = 0
-loc LOCNAME : specify location/study name, must have either LOCNAME or SAMPFILE or be a synthetic
-ncn NCON: specify naming convention: default is #1 below
-A: don't average replicate measurements
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX
[5] site name same as sample
[6] site is entered under a separate column -- NOT CURRENTLY SUPPORTED
[7-Z] [XXXX]YYY: XXXX is site designation with Z characters with sample name XXXXYYYY
NB: all others you will have to customize your self
or e-mail [email protected] for help.
Format of UMICH .mag files:
Spec Treat CSD Intensity Declination Inclination metadata string
Spec: specimen name
Treat: treatment step
XXX T in Centigrade
XXX AF in mT
Intensity assumed to be total moment in 10^3 Am^2 (emu)
Declination: Declination in specimen coordinate system
Inclination: Declination in specimen coordinate system
metatdata string: mm/dd/yy;hh:mm;[dC,mT];xx.xx;UNITS;USER;INST;NMEAS
hh in 24 hours.
dC or mT units of treatment XXX (see Treat above) for thermal or AF respectively
xx.xxx DC field
UNITS of DC field (microT, mT)
INST: instrument code, number of axes, number of positions (e.g., G34 is 2G, three axes,
measured in four positions)
NMEAS: number of measurements in a single position (1,3,200...) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/deprecated/umich_magic.py#L7-L244 |
PmagPy/PmagPy | programs/dmag_magic.py | dmag_magic | def dmag_magic(in_file="measurements.txt", dir_path=".", input_dir_path="",
spec_file="specimens.txt", samp_file="samples.txt",
site_file="sites.txt", loc_file="locations.txt",
plot_by="loc", LT="AF", norm=True, XLP="",
save_plots=True, fmt="svg"):
"""
plots intensity decay curves for demagnetization experiments
Parameters
----------
in_file : str, default "measurements.txt"
dir_path : str
output directory, default "."
input_dir_path : str
input file directory (if different from dir_path), default ""
spec_file : str
input specimen file name, default "specimens.txt"
samp_file: str
input sample file name, default "samples.txt"
site_file : str
input site file name, default "sites.txt"
loc_file : str
input location file name, default "locations.txt"
plot_by : str
[spc, sam, sit, loc] (specimen, sample, site, location), default "loc"
LT : str
lab treatment [T, AF, M], default AF
norm : bool
normalize by NRM magnetization, default True
XLP : str
exclude specific lab protocols, (for example, method codes like LP-PI)
default ""
save_plots : bool
plot and save non-interactively, default True
fmt : str
["png", "svg", "pdf", "jpg"], default "svg"
Returns
---------
type - Tuple : (True or False indicating if conversion was sucessful, file name(s) written)
"""
dir_path = os.path.realpath(dir_path)
if not input_dir_path:
input_dir_path = dir_path
input_dir_path = os.path.realpath(input_dir_path)
# format plot_key
name_dict = {'loc': 'location', 'sit': 'site',
'sam': 'sample', 'spc': 'specimen'}
if plot_by not in name_dict.values():
try:
plot_key = name_dict[plot_by]
except KeyError:
print('Unrecognized plot_by {}, falling back to plot by location'.format(plot_by))
plot_key = "loc"
else:
plot_key = plot_by
# figure out what kind of experiment
LT = "LT-" + LT + "-Z"
print('LT', LT)
if LT == "LT-T-Z":
units, dmag_key = 'K', 'treat_temp'
elif LT == "LT-AF-Z":
units, dmag_key = 'T', 'treat_ac_field'
elif LT == 'LT-M-Z':
units, dmag_key = 'J', 'treat_mw_energy'
else:
units = 'U'
# init
FIG = {} # plot dictionary
FIG['demag'] = 1 # demag is figure 1
# create contribution and add required headers
fnames = {"specimens": spec_file, "samples": samp_file,
'sites': site_file, 'locations': loc_file}
if not os.path.exists(pmag.resolve_file_name(in_file, input_dir_path)):
print('-E- Could not find {}'.format(in_file))
return False, []
contribution = cb.Contribution(input_dir_path, single_file=in_file,
custom_filenames=fnames)
file_type = list(contribution.tables.keys())[0]
print(len(contribution.tables['measurements'].df), ' records read from ', in_file)
# add plot_key into measurements table
if plot_key not in contribution.tables['measurements'].df.columns:
#contribution.propagate_name_down(plot_key, 'measurements')
contribution.propagate_location_to_measurements()
data_container = contribution.tables[file_type]
# pare down to only records with useful data
# grab records that have the requested code
data_slice = data_container.get_records_for_code(LT)
# and don't have the offending code
data = data_container.get_records_for_code(XLP, incl=False, use_slice=True,
sli=data_slice, strict_match=False)
# make sure quality is in the dataframe
if 'quality' not in data.columns:
data['quality'] = 'g'
# get intensity key and make sure intensity data is not blank
intlist = ['magn_moment', 'magn_volume', 'magn_mass']
IntMeths = [col_name for col_name in data.columns if col_name in intlist]
# get rid of any entirely blank intensity columns
for col_name in IntMeths:
if not data[col_name].any():
data.drop(col_name, axis=1, inplace=True)
IntMeths = [col_name for col_name in data.columns if col_name in intlist]
if len(IntMeths) == 0:
print('-E- No intensity headers found')
return False, []
int_key = IntMeths[0] # plot first intensity method found - normalized to initial value anyway - doesn't matter which used
data = data[data[int_key].notnull()]
# make list of individual plots
# by default, will be by location_name
plotlist = data[plot_key].unique()
plotlist.sort()
pmagplotlib.plot_init(FIG['demag'], 5, 5)
last_plot = False
# iterate through and plot the data
for plot in plotlist:
if plot == plotlist[-1]:
last_plot = True
plot_data = data[data[plot_key] == plot].copy()
if not save_plots:
print(plot, 'plotting by: ', plot_key)
if len(plot_data) > 2:
title = plot
spcs = []
spcs = plot_data['specimen'].unique()
for spc in spcs:
INTblock = []
spec_data = plot_data[plot_data['specimen'] == spc]
for ind, rec in spec_data.iterrows():
INTblock.append([float(rec[dmag_key]), 0, 0, float(rec[int_key]), 1, rec['quality']])
if len(INTblock) > 2:
pmagplotlib.plot_mag(FIG['demag'], INTblock,
title, 0, units, norm)
if save_plots:
files = {}
for key in list(FIG.keys()):
if pmagplotlib.isServer:
files[key] = title + '_' + LT + '.' + fmt
incl_dir = False
else: # if not server, include directory in output path
files[key] = os.path.join(dir_path, title + '_' + LT + '.' + fmt)
incl_dir = True
pmagplotlib.save_plots(FIG, files, incl_directory=incl_dir)
else:
pmagplotlib.draw_figs(FIG)
prompt = " S[a]ve to save plot, [q]uit, Return to continue: "
ans = input(prompt)
if ans == 'q':
return True, []
if ans == "a":
files = {}
for key in list(FIG.keys()):
if pmagplotlib.isServer:
files[key] = title + '_' + LT + '.' + fmt
incl_dir = False
else: # if not server, include directory in output path
files[key] = os.path.join(dir_path, title + '_' + LT + '.' + fmt)
incl_dir = True
pmagplotlib.save_plots(FIG, files, incl_directory=incl_dir)
pmagplotlib.clearFIG(FIG['demag'])
if last_plot:
return True, [] | python | def dmag_magic(in_file="measurements.txt", dir_path=".", input_dir_path="",
spec_file="specimens.txt", samp_file="samples.txt",
site_file="sites.txt", loc_file="locations.txt",
plot_by="loc", LT="AF", norm=True, XLP="",
save_plots=True, fmt="svg"):
"""
plots intensity decay curves for demagnetization experiments
Parameters
----------
in_file : str, default "measurements.txt"
dir_path : str
output directory, default "."
input_dir_path : str
input file directory (if different from dir_path), default ""
spec_file : str
input specimen file name, default "specimens.txt"
samp_file: str
input sample file name, default "samples.txt"
site_file : str
input site file name, default "sites.txt"
loc_file : str
input location file name, default "locations.txt"
plot_by : str
[spc, sam, sit, loc] (specimen, sample, site, location), default "loc"
LT : str
lab treatment [T, AF, M], default AF
norm : bool
normalize by NRM magnetization, default True
XLP : str
exclude specific lab protocols, (for example, method codes like LP-PI)
default ""
save_plots : bool
plot and save non-interactively, default True
fmt : str
["png", "svg", "pdf", "jpg"], default "svg"
Returns
---------
type - Tuple : (True or False indicating if conversion was sucessful, file name(s) written)
"""
dir_path = os.path.realpath(dir_path)
if not input_dir_path:
input_dir_path = dir_path
input_dir_path = os.path.realpath(input_dir_path)
# format plot_key
name_dict = {'loc': 'location', 'sit': 'site',
'sam': 'sample', 'spc': 'specimen'}
if plot_by not in name_dict.values():
try:
plot_key = name_dict[plot_by]
except KeyError:
print('Unrecognized plot_by {}, falling back to plot by location'.format(plot_by))
plot_key = "loc"
else:
plot_key = plot_by
# figure out what kind of experiment
LT = "LT-" + LT + "-Z"
print('LT', LT)
if LT == "LT-T-Z":
units, dmag_key = 'K', 'treat_temp'
elif LT == "LT-AF-Z":
units, dmag_key = 'T', 'treat_ac_field'
elif LT == 'LT-M-Z':
units, dmag_key = 'J', 'treat_mw_energy'
else:
units = 'U'
# init
FIG = {} # plot dictionary
FIG['demag'] = 1 # demag is figure 1
# create contribution and add required headers
fnames = {"specimens": spec_file, "samples": samp_file,
'sites': site_file, 'locations': loc_file}
if not os.path.exists(pmag.resolve_file_name(in_file, input_dir_path)):
print('-E- Could not find {}'.format(in_file))
return False, []
contribution = cb.Contribution(input_dir_path, single_file=in_file,
custom_filenames=fnames)
file_type = list(contribution.tables.keys())[0]
print(len(contribution.tables['measurements'].df), ' records read from ', in_file)
# add plot_key into measurements table
if plot_key not in contribution.tables['measurements'].df.columns:
#contribution.propagate_name_down(plot_key, 'measurements')
contribution.propagate_location_to_measurements()
data_container = contribution.tables[file_type]
# pare down to only records with useful data
# grab records that have the requested code
data_slice = data_container.get_records_for_code(LT)
# and don't have the offending code
data = data_container.get_records_for_code(XLP, incl=False, use_slice=True,
sli=data_slice, strict_match=False)
# make sure quality is in the dataframe
if 'quality' not in data.columns:
data['quality'] = 'g'
# get intensity key and make sure intensity data is not blank
intlist = ['magn_moment', 'magn_volume', 'magn_mass']
IntMeths = [col_name for col_name in data.columns if col_name in intlist]
# get rid of any entirely blank intensity columns
for col_name in IntMeths:
if not data[col_name].any():
data.drop(col_name, axis=1, inplace=True)
IntMeths = [col_name for col_name in data.columns if col_name in intlist]
if len(IntMeths) == 0:
print('-E- No intensity headers found')
return False, []
int_key = IntMeths[0] # plot first intensity method found - normalized to initial value anyway - doesn't matter which used
data = data[data[int_key].notnull()]
# make list of individual plots
# by default, will be by location_name
plotlist = data[plot_key].unique()
plotlist.sort()
pmagplotlib.plot_init(FIG['demag'], 5, 5)
last_plot = False
# iterate through and plot the data
for plot in plotlist:
if plot == plotlist[-1]:
last_plot = True
plot_data = data[data[plot_key] == plot].copy()
if not save_plots:
print(plot, 'plotting by: ', plot_key)
if len(plot_data) > 2:
title = plot
spcs = []
spcs = plot_data['specimen'].unique()
for spc in spcs:
INTblock = []
spec_data = plot_data[plot_data['specimen'] == spc]
for ind, rec in spec_data.iterrows():
INTblock.append([float(rec[dmag_key]), 0, 0, float(rec[int_key]), 1, rec['quality']])
if len(INTblock) > 2:
pmagplotlib.plot_mag(FIG['demag'], INTblock,
title, 0, units, norm)
if save_plots:
files = {}
for key in list(FIG.keys()):
if pmagplotlib.isServer:
files[key] = title + '_' + LT + '.' + fmt
incl_dir = False
else: # if not server, include directory in output path
files[key] = os.path.join(dir_path, title + '_' + LT + '.' + fmt)
incl_dir = True
pmagplotlib.save_plots(FIG, files, incl_directory=incl_dir)
else:
pmagplotlib.draw_figs(FIG)
prompt = " S[a]ve to save plot, [q]uit, Return to continue: "
ans = input(prompt)
if ans == 'q':
return True, []
if ans == "a":
files = {}
for key in list(FIG.keys()):
if pmagplotlib.isServer:
files[key] = title + '_' + LT + '.' + fmt
incl_dir = False
else: # if not server, include directory in output path
files[key] = os.path.join(dir_path, title + '_' + LT + '.' + fmt)
incl_dir = True
pmagplotlib.save_plots(FIG, files, incl_directory=incl_dir)
pmagplotlib.clearFIG(FIG['demag'])
if last_plot:
return True, [] | plots intensity decay curves for demagnetization experiments
Parameters
----------
in_file : str, default "measurements.txt"
dir_path : str
output directory, default "."
input_dir_path : str
input file directory (if different from dir_path), default ""
spec_file : str
input specimen file name, default "specimens.txt"
samp_file: str
input sample file name, default "samples.txt"
site_file : str
input site file name, default "sites.txt"
loc_file : str
input location file name, default "locations.txt"
plot_by : str
[spc, sam, sit, loc] (specimen, sample, site, location), default "loc"
LT : str
lab treatment [T, AF, M], default AF
norm : bool
normalize by NRM magnetization, default True
XLP : str
exclude specific lab protocols, (for example, method codes like LP-PI)
default ""
save_plots : bool
plot and save non-interactively, default True
fmt : str
["png", "svg", "pdf", "jpg"], default "svg"
Returns
---------
type - Tuple : (True or False indicating if conversion was sucessful, file name(s) written) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/dmag_magic.py#L16-L187 |
PmagPy/PmagPy | programs/dmag_magic.py | main | def main():
"""
NAME
dmag_magic.py
DESCRIPTION
plots intensity decay curves for demagnetization experiments
SYNTAX
dmag_magic -h [command line options]
INPUT
takes magic formatted measurements.txt files
OPTIONS
-h prints help message and quits
-f FILE: specify input file, default is: measurements.txt
-obj OBJ: specify object [loc, sit, sam, spc] for plot,
default is by location
-LT [AF,T,M]: specify lab treatment type, default AF
-XLP [PI]: exclude specific lab protocols,
(for example, method codes like LP-PI)
-N do not normalize by NRM magnetization
-sav save plots silently and quit
-fmt [svg,jpg,png,pdf] set figure format [default is svg]
NOTE
loc: location (study); sit: site; sam: sample; spc: specimen
"""
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
# initialize variables from command line + defaults
dir_path = pmag.get_named_arg("-WD", default_val=".")
input_dir_path = pmag.get_named_arg('-ID', '')
if not input_dir_path:
input_dir_path = dir_path
in_file = pmag.get_named_arg("-f", default_val="measurements.txt")
in_file = pmag.resolve_file_name(in_file, input_dir_path)
if "-ID" not in sys.argv:
input_dir_path = os.path.split(in_file)[0]
plot_by = pmag.get_named_arg("-obj", default_val="loc")
LT = pmag.get_named_arg("-LT", "AF")
no_norm = pmag.get_flag_arg_from_sys("-N")
norm = False if no_norm else True
save_plots = pmag.get_flag_arg_from_sys("-sav")
fmt = pmag.get_named_arg("-fmt", "svg")
XLP = pmag.get_named_arg("-XLP", "")
spec_file = pmag.get_named_arg("-fsp", default_val="specimens.txt")
samp_file = pmag.get_named_arg("-fsa", default_val="samples.txt")
site_file = pmag.get_named_arg("-fsi", default_val="sites.txt")
loc_file = pmag.get_named_arg("-flo", default_val="locations.txt")
dmag_magic(in_file, dir_path, input_dir_path, spec_file, samp_file,
site_file, loc_file, plot_by, LT, norm, XLP,
save_plots, fmt) | python | def main():
"""
NAME
dmag_magic.py
DESCRIPTION
plots intensity decay curves for demagnetization experiments
SYNTAX
dmag_magic -h [command line options]
INPUT
takes magic formatted measurements.txt files
OPTIONS
-h prints help message and quits
-f FILE: specify input file, default is: measurements.txt
-obj OBJ: specify object [loc, sit, sam, spc] for plot,
default is by location
-LT [AF,T,M]: specify lab treatment type, default AF
-XLP [PI]: exclude specific lab protocols,
(for example, method codes like LP-PI)
-N do not normalize by NRM magnetization
-sav save plots silently and quit
-fmt [svg,jpg,png,pdf] set figure format [default is svg]
NOTE
loc: location (study); sit: site; sam: sample; spc: specimen
"""
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
# initialize variables from command line + defaults
dir_path = pmag.get_named_arg("-WD", default_val=".")
input_dir_path = pmag.get_named_arg('-ID', '')
if not input_dir_path:
input_dir_path = dir_path
in_file = pmag.get_named_arg("-f", default_val="measurements.txt")
in_file = pmag.resolve_file_name(in_file, input_dir_path)
if "-ID" not in sys.argv:
input_dir_path = os.path.split(in_file)[0]
plot_by = pmag.get_named_arg("-obj", default_val="loc")
LT = pmag.get_named_arg("-LT", "AF")
no_norm = pmag.get_flag_arg_from_sys("-N")
norm = False if no_norm else True
save_plots = pmag.get_flag_arg_from_sys("-sav")
fmt = pmag.get_named_arg("-fmt", "svg")
XLP = pmag.get_named_arg("-XLP", "")
spec_file = pmag.get_named_arg("-fsp", default_val="specimens.txt")
samp_file = pmag.get_named_arg("-fsa", default_val="samples.txt")
site_file = pmag.get_named_arg("-fsi", default_val="sites.txt")
loc_file = pmag.get_named_arg("-flo", default_val="locations.txt")
dmag_magic(in_file, dir_path, input_dir_path, spec_file, samp_file,
site_file, loc_file, plot_by, LT, norm, XLP,
save_plots, fmt) | NAME
dmag_magic.py
DESCRIPTION
plots intensity decay curves for demagnetization experiments
SYNTAX
dmag_magic -h [command line options]
INPUT
takes magic formatted measurements.txt files
OPTIONS
-h prints help message and quits
-f FILE: specify input file, default is: measurements.txt
-obj OBJ: specify object [loc, sit, sam, spc] for plot,
default is by location
-LT [AF,T,M]: specify lab treatment type, default AF
-XLP [PI]: exclude specific lab protocols,
(for example, method codes like LP-PI)
-N do not normalize by NRM magnetization
-sav save plots silently and quit
-fmt [svg,jpg,png,pdf] set figure format [default is svg]
NOTE
loc: location (study); sit: site; sam: sample; spc: specimen | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/dmag_magic.py#L191-L244 |
PmagPy/PmagPy | programs/watsons_v.py | main | def main():
"""
NAME
watsons_v.py
DESCRIPTION
calculates Watson's V statistic from input files
INPUT FORMAT
takes dec/inc as first two columns in two space delimited files
SYNTAX
watsons_v.py [command line options]
OPTIONS
-h prints help message and quits
-f FILE (with optional second)
-f2 FILE (second file)
-ant, flip antipodal directions to opposite direction
in first file if only one file or flip all in second, if two files
-P (don't save or show plot)
-sav save figure and quit silently
-fmt [png,svg,eps,pdf,jpg] format for saved figure
OUTPUT
Watson's V and the Monte Carlo Critical Value Vc.
in plot, V is solid and Vc is dashed.
"""
Flip=0
show,plot=1,0
fmt='svg'
file2=""
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
if '-ant' in sys.argv: Flip=1
if '-sav' in sys.argv: show,plot=0,1 # don't display, but do save plot
if '-fmt' in sys.argv:
ind=sys.argv.index('-fmt')
fmt=sys.argv[ind+1]
if '-P' in sys.argv: show=0 # don't display or save plot
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file1=sys.argv[ind+1]
data=numpy.loadtxt(file1).transpose()
D1=numpy.array([data[0],data[1]]).transpose()
file1_name=os.path.split(file1)[1].split('.')[0]
else:
print("-f is required")
print(main.__doc__)
sys.exit()
if '-f2' in sys.argv:
ind=sys.argv.index('-f2')
file2=sys.argv[ind+1]
data2=numpy.loadtxt(file2).transpose()
D2=numpy.array([data2[0],data2[1]]).transpose()
file2_name=os.path.split(file2)[1].split('.')[0]
if Flip==1:
D2,D=pmag.flip(D2) # D2 are now flipped
if len(D2)!=0:
if len(D)!=0:
D2=numpy.concatenate(D,D2) # put all in D2
elif len(D)!=0:
D2=D
else:
print('length of second file is zero')
sys.exit()
elif Flip==1:D2,D1=pmag.flip(D1) # peel out antipodal directions, put in D2
#
counter,NumSims=0,5000
#
# first calculate the fisher means and cartesian coordinates of each set of Directions
#
pars_1=pmag.fisher_mean(D1)
pars_2=pmag.fisher_mean(D2)
#
# get V statistic for these
#
V=pmag.vfunc(pars_1,pars_2)
#
# do monte carlo simulation of datasets with same kappas, but common mean
#
Vp=[] # set of Vs from simulations
if show==1:print("Doing ",NumSims," simulations")
for k in range(NumSims):
counter+=1
if counter==50:
if show==1:print(k+1)
counter=0
Dirp=[]
# get a set of N1 fisher distributed vectors with k1, calculate fisher stats
for i in range(pars_1["n"]):
Dirp.append(pmag.fshdev(pars_1["k"]))
pars_p1=pmag.fisher_mean(Dirp)
# get a set of N2 fisher distributed vectors with k2, calculate fisher stats
Dirp=[]
for i in range(pars_2["n"]):
Dirp.append(pmag.fshdev(pars_2["k"]))
pars_p2=pmag.fisher_mean(Dirp)
# get the V for these
Vk=pmag.vfunc(pars_p1,pars_p2)
Vp.append(Vk)
#
# sort the Vs, get Vcrit (95th one)
#
Vp.sort()
k=int(.95*NumSims)
if show==1:
print("Watson's V, Vcrit: ")
print(' %10.1f %10.1f'%(V,Vp[k]))
if show==1 or plot==1:
print("Watson's V, Vcrit: ")
print(' %10.1f %10.1f'%(V,Vp[k]))
CDF={'cdf':1}
pmagplotlib.plot_init(CDF['cdf'],5,5)
pmagplotlib.plot_cdf(CDF['cdf'],Vp,"Watson's V",'r',"")
pmagplotlib.plot_vs(CDF['cdf'],[V],'g','-')
pmagplotlib.plot_vs(CDF['cdf'],[Vp[k]],'b','--')
if plot==0:pmagplotlib.draw_figs(CDF)
files={}
if pmagplotlib.isServer: # use server plot naming convention
if file2!="":
files['cdf']='watsons_v_'+file1+'_'+file2+'.'+fmt
else:
files['cdf']='watsons_v_'+file1+'.'+fmt
else: # use more readable plot naming convention
if file2!="":
files['cdf']='watsons_v_'+file1_name+'_'+file2_name+'.'+fmt
else:
files['cdf']='watsons_v_'+file1_name+'.'+fmt
if pmagplotlib.isServer:
black = '#000000'
purple = '#800080'
titles={}
titles['cdf']='Cumulative Distribution'
CDF = pmagplotlib.add_borders(CDF,titles,black,purple)
pmagplotlib.save_plots(CDF,files)
elif plot==0:
ans=input(" S[a]ve to save plot, [q]uit without saving: ")
if ans=="a": pmagplotlib.save_plots(CDF,files)
if plot==1: # save and quit silently
pmagplotlib.save_plots(CDF,files) | python | def main():
"""
NAME
watsons_v.py
DESCRIPTION
calculates Watson's V statistic from input files
INPUT FORMAT
takes dec/inc as first two columns in two space delimited files
SYNTAX
watsons_v.py [command line options]
OPTIONS
-h prints help message and quits
-f FILE (with optional second)
-f2 FILE (second file)
-ant, flip antipodal directions to opposite direction
in first file if only one file or flip all in second, if two files
-P (don't save or show plot)
-sav save figure and quit silently
-fmt [png,svg,eps,pdf,jpg] format for saved figure
OUTPUT
Watson's V and the Monte Carlo Critical Value Vc.
in plot, V is solid and Vc is dashed.
"""
Flip=0
show,plot=1,0
fmt='svg'
file2=""
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
if '-ant' in sys.argv: Flip=1
if '-sav' in sys.argv: show,plot=0,1 # don't display, but do save plot
if '-fmt' in sys.argv:
ind=sys.argv.index('-fmt')
fmt=sys.argv[ind+1]
if '-P' in sys.argv: show=0 # don't display or save plot
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file1=sys.argv[ind+1]
data=numpy.loadtxt(file1).transpose()
D1=numpy.array([data[0],data[1]]).transpose()
file1_name=os.path.split(file1)[1].split('.')[0]
else:
print("-f is required")
print(main.__doc__)
sys.exit()
if '-f2' in sys.argv:
ind=sys.argv.index('-f2')
file2=sys.argv[ind+1]
data2=numpy.loadtxt(file2).transpose()
D2=numpy.array([data2[0],data2[1]]).transpose()
file2_name=os.path.split(file2)[1].split('.')[0]
if Flip==1:
D2,D=pmag.flip(D2) # D2 are now flipped
if len(D2)!=0:
if len(D)!=0:
D2=numpy.concatenate(D,D2) # put all in D2
elif len(D)!=0:
D2=D
else:
print('length of second file is zero')
sys.exit()
elif Flip==1:D2,D1=pmag.flip(D1) # peel out antipodal directions, put in D2
#
counter,NumSims=0,5000
#
# first calculate the fisher means and cartesian coordinates of each set of Directions
#
pars_1=pmag.fisher_mean(D1)
pars_2=pmag.fisher_mean(D2)
#
# get V statistic for these
#
V=pmag.vfunc(pars_1,pars_2)
#
# do monte carlo simulation of datasets with same kappas, but common mean
#
Vp=[] # set of Vs from simulations
if show==1:print("Doing ",NumSims," simulations")
for k in range(NumSims):
counter+=1
if counter==50:
if show==1:print(k+1)
counter=0
Dirp=[]
# get a set of N1 fisher distributed vectors with k1, calculate fisher stats
for i in range(pars_1["n"]):
Dirp.append(pmag.fshdev(pars_1["k"]))
pars_p1=pmag.fisher_mean(Dirp)
# get a set of N2 fisher distributed vectors with k2, calculate fisher stats
Dirp=[]
for i in range(pars_2["n"]):
Dirp.append(pmag.fshdev(pars_2["k"]))
pars_p2=pmag.fisher_mean(Dirp)
# get the V for these
Vk=pmag.vfunc(pars_p1,pars_p2)
Vp.append(Vk)
#
# sort the Vs, get Vcrit (95th one)
#
Vp.sort()
k=int(.95*NumSims)
if show==1:
print("Watson's V, Vcrit: ")
print(' %10.1f %10.1f'%(V,Vp[k]))
if show==1 or plot==1:
print("Watson's V, Vcrit: ")
print(' %10.1f %10.1f'%(V,Vp[k]))
CDF={'cdf':1}
pmagplotlib.plot_init(CDF['cdf'],5,5)
pmagplotlib.plot_cdf(CDF['cdf'],Vp,"Watson's V",'r',"")
pmagplotlib.plot_vs(CDF['cdf'],[V],'g','-')
pmagplotlib.plot_vs(CDF['cdf'],[Vp[k]],'b','--')
if plot==0:pmagplotlib.draw_figs(CDF)
files={}
if pmagplotlib.isServer: # use server plot naming convention
if file2!="":
files['cdf']='watsons_v_'+file1+'_'+file2+'.'+fmt
else:
files['cdf']='watsons_v_'+file1+'.'+fmt
else: # use more readable plot naming convention
if file2!="":
files['cdf']='watsons_v_'+file1_name+'_'+file2_name+'.'+fmt
else:
files['cdf']='watsons_v_'+file1_name+'.'+fmt
if pmagplotlib.isServer:
black = '#000000'
purple = '#800080'
titles={}
titles['cdf']='Cumulative Distribution'
CDF = pmagplotlib.add_borders(CDF,titles,black,purple)
pmagplotlib.save_plots(CDF,files)
elif plot==0:
ans=input(" S[a]ve to save plot, [q]uit without saving: ")
if ans=="a": pmagplotlib.save_plots(CDF,files)
if plot==1: # save and quit silently
pmagplotlib.save_plots(CDF,files) | NAME
watsons_v.py
DESCRIPTION
calculates Watson's V statistic from input files
INPUT FORMAT
takes dec/inc as first two columns in two space delimited files
SYNTAX
watsons_v.py [command line options]
OPTIONS
-h prints help message and quits
-f FILE (with optional second)
-f2 FILE (second file)
-ant, flip antipodal directions to opposite direction
in first file if only one file or flip all in second, if two files
-P (don't save or show plot)
-sav save figure and quit silently
-fmt [png,svg,eps,pdf,jpg] format for saved figure
OUTPUT
Watson's V and the Monte Carlo Critical Value Vc.
in plot, V is solid and Vc is dashed. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/watsons_v.py#L15-L158 |
PmagPy/PmagPy | programs/deprecated/biplot_magic.py | main | def main():
"""
NAME
biplot_magic.py
DESCRIPTION
makes a biplot of specified variables from magic_measurements.txt format file
SYNTAX
biplot_magic.py [-h] [-i] [command line options]
INPUT
takes magic formated magic_measurments file
OPTIONS
-h prints help message and quits
-i interactively set filename and axes for plotting
-f FILE: specifies file name, default: magic_measurements.txt
-fmt [svg,png,jpg], format for images - default is svg
-sav figure and quit
-x XMETH:key:step, specify method code for X axis (optional key and treatment values)
-y YMETH:key:step, specify method code for X axis
-obj OBJ: specify object [loc, sit, sam, spc] for plot, default is whole file
-n [V,M] plot volume or mass normalized data only
NOTES
if nothing is specified for x and y, the user will be presented with options
key = ['treatment_ac_field','treatment_dc_field',treatment_temp']
step in mT for fields, K for temperatures
"""
#
file='magic_measurements.txt'
methx,methy,fmt="","",'.svg'
plot_key=''
norm_by=""
#plot=0
no_plot = pmag.get_flag_arg_from_sys('-sav')
if not no_plot:
do_plot = True
else:
do_plot = False
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
if '-fmt' in sys.argv:
ind=sys.argv.index('-fmt')
fmt='.'+sys.argv[ind+1]
if '-n' in sys.argv:
ind=sys.argv.index('-n')
norm_by=sys.argv[ind+1]
xtreat_key,ytreat_key,xstep,ystep="","","",""
if '-x' in sys.argv:
ind=sys.argv.index('-x')
meths=sys.argv[ind+1].split(':')
methx=meths[0]
if len(meths)>1:
xtreat_key=meths[1]
xstep=float(meths[2])
if '-y' in sys.argv:
ind=sys.argv.index('-y')
meths=sys.argv[ind+1].split(':')
methy=meths[0]
if len(meths)>1:
ytreat_key=meths[1]
ystep=float(meths[2])
if '-obj' in sys.argv:
ind=sys.argv.index('-obj')
plot_by=sys.argv[ind+1]
if plot_by=='loc':plot_key='er_location_name'
if plot_by=='sit':plot_key='er_site_name'
if plot_by=='sam':plot_key='er_sample_name'
if plot_by=='spc':plot_key='er_specimen_name'
if '-h' in sys.argv:
do_plot = False
if '-i' in sys.argv:
#
# get name of file from command line
#
file=input("Input magic_measurments file name? [magic_measurements.txt] ")
if file=="":file="magic_measurements.txt"
#
#
FIG={'fig':1}
pmagplotlib.plot_init(FIG['fig'],5,5)
data,file_type=pmag.magic_read(file)
if file_type!="magic_measurements":
print(file_type,' not correct format for magic_measurments file')
sys.exit()
#
# collect method codes
methods,plotlist=[],[]
for rec in data:
if plot_key!="":
if rec[plot_key] not in plotlist:plotlist.append(rec[plot_key])
elif len(plotlist)==0:
plotlist.append('All')
meths=rec['magic_method_codes'].split(':')
for meth in meths:
if meth.strip() not in methods and meth.strip()!="LP-":
methods.append(meth.strip())
#
if '-i' in sys.argv:
print(methods)
elif methx =="" or methy=="":
print(methods)
sys.exit()
GoOn=1
while GoOn==1:
if '-i' in sys.argv:methx=input('Select method for x axis: ')
if methx not in methods:
if '-i' in sys.argv:
print('try again! method not available')
else:
print(main.__doc__)
print('\n must specify X axis method\n')
sys.exit()
else:
if pmagplotlib.verbose: print(methx, ' selected for X axis')
GoOn=0
GoOn=1
while GoOn==1:
if '-i' in sys.argv:methy=input('Select method for y axis: ')
if methy not in methods:
if '-i' in sys.argv:
print('try again! method not available')
else:
print(main.__doc__)
print('\n must specify Y axis method\n')
sys.exit()
else:
if pmagplotlib.verbose: print(methy, ' selected for Y axis')
GoOn=0
if norm_by=="":
measkeys=['measurement_magn_mass','measurement_magn_volume','measurement_magn_moment','measurement_magnitude','measurement_chi_volume','measurement_chi_mass','measurement_chi']
elif norm_by=="V":
measkeys=['measurement_magn_volume','measurement_chi_volume']
elif norm_by=="M":
measkeys=['measurement_magn_mass','measurement_chi_mass']
xmeaskey,ymeaskey="",""
plotlist.sort()
for plot in plotlist: # go through objects
if pmagplotlib.verbose:
print(plot)
X,Y=[],[]
x,y='',''
for rec in data:
if plot_key!="" and rec[plot_key]!=plot:
pass
else:
meths=rec['magic_method_codes'].split(':')
for meth in meths:
if meth.strip()==methx:
if xmeaskey=="":
for key in measkeys:
if key in list(rec.keys()) and rec[key]!="":
xmeaskey=key
if pmagplotlib.verbose:
print(xmeaskey,' being used for plotting X.')
break
if meth.strip()==methy:
if ymeaskey=="":
for key in measkeys:
if key in list(rec.keys()) and rec[key]!="":
ymeaskey=key
if pmagplotlib.verbose:
print(ymeaskey,' being used for plotting Y')
break
if ymeaskey!="" and xmeaskey!="":
for rec in data:
x,y='',''
spec=rec['er_specimen_name'] # get the ydata for this specimen
if rec[ymeaskey]!="" and methy in rec['magic_method_codes'].split(':'):
if ytreat_key=="" or (ytreat_key in list(rec.keys()) and float(rec[ytreat_key])==ystep):
y=float(rec[ymeaskey])
for rec in data: # now find the xdata
if rec['er_specimen_name']==spec and rec[xmeaskey]!="" and methx in rec['magic_method_codes'].split(':'):
if xtreat_key=="" or (xtreat_key in list(rec.keys()) and float(rec[xtreat_key])==xstep):
x=float(rec[xmeaskey])
if x != '' and y!= '':
X.append(x)
Y.append(y)
if len(X)>0:
pmagplotlib.clearFIG(FIG['fig'])
pmagplotlib.plot_xy(FIG['fig'],X,Y,sym='ro',xlab=methx,ylab=methy,title=plot+':Biplot')
if not pmagplotlib.isServer and do_plot:
pmagplotlib.draw_figs(FIG)
ans=input('S[a]ve plots, [q]uit, Return for next plot ' )
if ans=='a':
files={}
for key in list(FIG.keys()): files[key]=plot+'_'+key+fmt
pmagplotlib.save_plots(FIG,files)
if ans=='q':
print("Good-bye\n")
sys.exit()
else:
files={}
for key in list(FIG.keys()): files[key]=plot+'_'+key+fmt
if pmagplotlib.isServer:
black = '#000000'
purple = '#800080'
titles={}
titles['fig']='X Y Plot'
FIG = pmagplotlib.add_borders(FIG,titles,black,purple)
pmagplotlib.save_plots(FIG,files)
else:
print('nothing to plot for ',plot) | python | def main():
"""
NAME
biplot_magic.py
DESCRIPTION
makes a biplot of specified variables from magic_measurements.txt format file
SYNTAX
biplot_magic.py [-h] [-i] [command line options]
INPUT
takes magic formated magic_measurments file
OPTIONS
-h prints help message and quits
-i interactively set filename and axes for plotting
-f FILE: specifies file name, default: magic_measurements.txt
-fmt [svg,png,jpg], format for images - default is svg
-sav figure and quit
-x XMETH:key:step, specify method code for X axis (optional key and treatment values)
-y YMETH:key:step, specify method code for X axis
-obj OBJ: specify object [loc, sit, sam, spc] for plot, default is whole file
-n [V,M] plot volume or mass normalized data only
NOTES
if nothing is specified for x and y, the user will be presented with options
key = ['treatment_ac_field','treatment_dc_field',treatment_temp']
step in mT for fields, K for temperatures
"""
#
file='magic_measurements.txt'
methx,methy,fmt="","",'.svg'
plot_key=''
norm_by=""
#plot=0
no_plot = pmag.get_flag_arg_from_sys('-sav')
if not no_plot:
do_plot = True
else:
do_plot = False
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
if '-fmt' in sys.argv:
ind=sys.argv.index('-fmt')
fmt='.'+sys.argv[ind+1]
if '-n' in sys.argv:
ind=sys.argv.index('-n')
norm_by=sys.argv[ind+1]
xtreat_key,ytreat_key,xstep,ystep="","","",""
if '-x' in sys.argv:
ind=sys.argv.index('-x')
meths=sys.argv[ind+1].split(':')
methx=meths[0]
if len(meths)>1:
xtreat_key=meths[1]
xstep=float(meths[2])
if '-y' in sys.argv:
ind=sys.argv.index('-y')
meths=sys.argv[ind+1].split(':')
methy=meths[0]
if len(meths)>1:
ytreat_key=meths[1]
ystep=float(meths[2])
if '-obj' in sys.argv:
ind=sys.argv.index('-obj')
plot_by=sys.argv[ind+1]
if plot_by=='loc':plot_key='er_location_name'
if plot_by=='sit':plot_key='er_site_name'
if plot_by=='sam':plot_key='er_sample_name'
if plot_by=='spc':plot_key='er_specimen_name'
if '-h' in sys.argv:
do_plot = False
if '-i' in sys.argv:
#
# get name of file from command line
#
file=input("Input magic_measurments file name? [magic_measurements.txt] ")
if file=="":file="magic_measurements.txt"
#
#
FIG={'fig':1}
pmagplotlib.plot_init(FIG['fig'],5,5)
data,file_type=pmag.magic_read(file)
if file_type!="magic_measurements":
print(file_type,' not correct format for magic_measurments file')
sys.exit()
#
# collect method codes
methods,plotlist=[],[]
for rec in data:
if plot_key!="":
if rec[plot_key] not in plotlist:plotlist.append(rec[plot_key])
elif len(plotlist)==0:
plotlist.append('All')
meths=rec['magic_method_codes'].split(':')
for meth in meths:
if meth.strip() not in methods and meth.strip()!="LP-":
methods.append(meth.strip())
#
if '-i' in sys.argv:
print(methods)
elif methx =="" or methy=="":
print(methods)
sys.exit()
GoOn=1
while GoOn==1:
if '-i' in sys.argv:methx=input('Select method for x axis: ')
if methx not in methods:
if '-i' in sys.argv:
print('try again! method not available')
else:
print(main.__doc__)
print('\n must specify X axis method\n')
sys.exit()
else:
if pmagplotlib.verbose: print(methx, ' selected for X axis')
GoOn=0
GoOn=1
while GoOn==1:
if '-i' in sys.argv:methy=input('Select method for y axis: ')
if methy not in methods:
if '-i' in sys.argv:
print('try again! method not available')
else:
print(main.__doc__)
print('\n must specify Y axis method\n')
sys.exit()
else:
if pmagplotlib.verbose: print(methy, ' selected for Y axis')
GoOn=0
if norm_by=="":
measkeys=['measurement_magn_mass','measurement_magn_volume','measurement_magn_moment','measurement_magnitude','measurement_chi_volume','measurement_chi_mass','measurement_chi']
elif norm_by=="V":
measkeys=['measurement_magn_volume','measurement_chi_volume']
elif norm_by=="M":
measkeys=['measurement_magn_mass','measurement_chi_mass']
xmeaskey,ymeaskey="",""
plotlist.sort()
for plot in plotlist: # go through objects
if pmagplotlib.verbose:
print(plot)
X,Y=[],[]
x,y='',''
for rec in data:
if plot_key!="" and rec[plot_key]!=plot:
pass
else:
meths=rec['magic_method_codes'].split(':')
for meth in meths:
if meth.strip()==methx:
if xmeaskey=="":
for key in measkeys:
if key in list(rec.keys()) and rec[key]!="":
xmeaskey=key
if pmagplotlib.verbose:
print(xmeaskey,' being used for plotting X.')
break
if meth.strip()==methy:
if ymeaskey=="":
for key in measkeys:
if key in list(rec.keys()) and rec[key]!="":
ymeaskey=key
if pmagplotlib.verbose:
print(ymeaskey,' being used for plotting Y')
break
if ymeaskey!="" and xmeaskey!="":
for rec in data:
x,y='',''
spec=rec['er_specimen_name'] # get the ydata for this specimen
if rec[ymeaskey]!="" and methy in rec['magic_method_codes'].split(':'):
if ytreat_key=="" or (ytreat_key in list(rec.keys()) and float(rec[ytreat_key])==ystep):
y=float(rec[ymeaskey])
for rec in data: # now find the xdata
if rec['er_specimen_name']==spec and rec[xmeaskey]!="" and methx in rec['magic_method_codes'].split(':'):
if xtreat_key=="" or (xtreat_key in list(rec.keys()) and float(rec[xtreat_key])==xstep):
x=float(rec[xmeaskey])
if x != '' and y!= '':
X.append(x)
Y.append(y)
if len(X)>0:
pmagplotlib.clearFIG(FIG['fig'])
pmagplotlib.plot_xy(FIG['fig'],X,Y,sym='ro',xlab=methx,ylab=methy,title=plot+':Biplot')
if not pmagplotlib.isServer and do_plot:
pmagplotlib.draw_figs(FIG)
ans=input('S[a]ve plots, [q]uit, Return for next plot ' )
if ans=='a':
files={}
for key in list(FIG.keys()): files[key]=plot+'_'+key+fmt
pmagplotlib.save_plots(FIG,files)
if ans=='q':
print("Good-bye\n")
sys.exit()
else:
files={}
for key in list(FIG.keys()): files[key]=plot+'_'+key+fmt
if pmagplotlib.isServer:
black = '#000000'
purple = '#800080'
titles={}
titles['fig']='X Y Plot'
FIG = pmagplotlib.add_borders(FIG,titles,black,purple)
pmagplotlib.save_plots(FIG,files)
else:
print('nothing to plot for ',plot) | NAME
biplot_magic.py
DESCRIPTION
makes a biplot of specified variables from magic_measurements.txt format file
SYNTAX
biplot_magic.py [-h] [-i] [command line options]
INPUT
takes magic formated magic_measurments file
OPTIONS
-h prints help message and quits
-i interactively set filename and axes for plotting
-f FILE: specifies file name, default: magic_measurements.txt
-fmt [svg,png,jpg], format for images - default is svg
-sav figure and quit
-x XMETH:key:step, specify method code for X axis (optional key and treatment values)
-y YMETH:key:step, specify method code for X axis
-obj OBJ: specify object [loc, sit, sam, spc] for plot, default is whole file
-n [V,M] plot volume or mass normalized data only
NOTES
if nothing is specified for x and y, the user will be presented with options
key = ['treatment_ac_field','treatment_dc_field',treatment_temp']
step in mT for fields, K for temperatures | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/deprecated/biplot_magic.py#L10-L217 |
PmagPy/PmagPy | programs/strip_magic.py | main | def main():
"""
NAME
strip_magic.py
DESCRIPTION
plots various parameters versus depth or age
SYNTAX
strip_magic.py [command line optins]
OPTIONS
-h prints help message and quits
-DM NUM: specify data model num, options 2 (legacy) or 3 (default)
-f FILE: specify input magic format file from magic,default='pmag_results.txt'
supported types=[pmag_specimens, pmag_samples, pmag_sites, pmag_results, magic_web]
-obj [sit,sam,all]: specify object to site,sample,all for pmag_result table, default is all
-fmt [svg,png,jpg], format for images - default is svg
-x [age,pos]: specify whether age or stratigraphic position
-y [dec,inc,int,chi,lat,lon,vdm,vadm]
(lat and lon are VGP lat and lon)
-Iex: plot the expected inc at lat - only available for results with lat info in file
-ts TS amin amax: plot the GPTS for the time interval between amin and amax (numbers in Ma)
TS: [ck95, gts04]
-mcd method_code, specify method code, default is first one encountered
-sav save plot and quit
NOTES
when x and/or y are not specified, a list of possibilities will be presented to the user for choosing
"""
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
xaxis, xplotind, yplotind = "", 0, 0 # (0 for strat pos)
yaxis, Xinc = "", ""
plot = 0
obj = 'all'
data_model_num = int(pmag.get_named_arg("-DM", 3))
# 2.5 keys
if data_model_num == 2:
supported = ['pmag_specimens', 'pmag_samples',
'pmag_sites', 'pmag_results', 'magic_web'] # available file types
Depth_keys = ['specimen_core_depth', 'specimen_height', 'specimen_elevation',
'specimen_composite_depth', 'sample_core_depth', 'sample_height',
'sample_elevation', 'sample_composite_depth', 'site_core_depth',
'site_height', 'site_elevation', 'site_composite_depth', 'average_height']
Age_keys = ['specimen_inferred_age', 'sample_inferred_age',
'site_inferred_age', 'average_age']
Unit_keys = {'specimen_inferred_age': 'specimen_inferred_age_unit',
'sample_inferred_age': 'sample_inferred_age_unit',
'site_inferred_age': 'site_inferred_age_unit', 'average_age': 'average_age_unit'}
Dec_keys = ['measurement_dec', 'specimen_dec',
'sample_dec', 'site_dec', 'average_dec']
Inc_keys = ['measurement_inc', 'specimen_inc',
'sample_inc', 'site_inc', 'average_inc']
Int_keys = ['measurement_magnitude', 'measurement_magn_moment', 'measurement_magn_volume',
'measurement_magn_mass', 'specimen_int', 'specimen_int_rel', 'sample_int',
'sample_int_rel', 'site_int', 'site_int_rel', 'average_int', 'average_int_rel']
Chi_keys = ['measurement_chi_volume', 'measurement_chi_mass']
Lat_keys = ['sample_lat', 'site_lat', 'average_lat']
VLat_keys = ['vgp_lat']
VLon_keys = ['vgp_lon']
Vdm_keys = ['vdm']
Vadm_keys = ['vadm']
method_col_name = "magic_method_codes"
else:
# 3.0 keys
supported = ["specimens", "samples", "sites", "locations"] # available file types
Depth_keys = [ "height", "core_depth", "elevation", "composite_depth" ]
Age_keys = [ "age" ]
Unit_keys = { "age": "age" }
Chi_keys = [ "susc_chi_volume", "susc_chi_mass" ]
Int_keys = [ "magn_moment", "magn_volume", "magn_mass", "int_abs", "int_rel" ]
Inc_keys = [ "dir_inc" ]
Dec_keys = [ "dir_dec" ]
Lat_Keys = [ "lat" ]
VLat_keys = [ "vgp_lat", "pole_lat" ]
VLon_keys = [ "vgp_lon", "pole_lon" ]
Vdm_keys = [ "vdm", "pdm" ]
Vadm_keys = [ "vadm", "padm" ]
method_col_name = "method_codes"
#
X_keys = [Age_keys, Depth_keys]
Y_keys = [Dec_keys, Inc_keys, Int_keys, Chi_keys,
VLat_keys, VLon_keys, Vdm_keys, Vadm_keys]
method, fmt = "", 'svg'
FIG = {'strat': 1}
plotexp, pTS = 0, 0
dir_path = pmag.get_named_arg("-WD", ".")
# default files
if data_model_num == 3:
res_file = pmag.get_named_arg("-f", "sites.txt")
else:
res_file = pmag.get_named_arg("-f", "pmag_results.txt")
res_file = pmag.resolve_file_name(res_file, dir_path)
if '-fmt' in sys.argv:
ind = sys.argv.index('-fmt')
fmt = sys.argv[ind+1]
if '-obj' in sys.argv:
ind = sys.argv.index('-obj')
obj = sys.argv[ind+1]
if '-x' in sys.argv:
ind = sys.argv.index('-x')
xaxis = sys.argv[ind+1]
if '-y' in sys.argv:
ind = sys.argv.index('-y')
yaxis = sys.argv[ind+1]
if yaxis == 'dec':
ykeys = Dec_keys
if yaxis == 'inc':
ykeys = Inc_keys
if yaxis == 'int':
ykeys = Int_keys
if yaxis == 'chi':
ykeys = Chi_keys
if yaxis == 'lat':
ykeys = VLat_keys
if yaxis == 'lon':
ykeys = VLon_keys
if yaxis == 'vdm':
ykeys = Vdm_keys
if yaxis == 'vadm':
ykeys = Vadm_keys
if '-mcd' in sys.argv:
ind = sys.argv.index('-mcd')
method = sys.argv[ind+1]
if '-ts' in sys.argv:
ind = sys.argv.index('-ts')
ts = sys.argv[ind+1]
amin = float(sys.argv[ind+2])
amax = float(sys.argv[ind+3])
pTS = 1
if '-Iex' in sys.argv:
plotexp = 1
if '-sav' in sys.argv:
plot = 1
#
#
# get data read in
Results, file_type = pmag.magic_read(res_file)
if file_type not in supported:
print("Unsupported file type ({}), try again".format(file_type))
sys.exit()
PltObjs = ['all']
if data_model_num == 2:
if file_type == 'pmag_results': # find out what to plot
for rec in Results:
resname = rec['pmag_result_name'].split()
if 'Sample' in resname and 'sam' not in PltObjs:
PltObjs.append('sam')
if 'Site' in resname and 'sit' not in PltObjs:
PltObjs.append('sit')
methcodes = []
# need to know all the measurement types from method_codes
if "magic_method_codes" in list(Results[0].keys()):
for rec in Results:
meths = rec["magic_method_codes"].split(":")
for meth in meths:
if meth.strip() not in methcodes and 'LP' in meth:
# look for the lab treatments
methcodes.append(meth.strip())
#
# initialize some variables
X_unit = "" # Unit for age or depth plotting (meters if depth)
Xplots, Yplots = [], []
Xunits = []
yplotind, xplotind = 0, 0
#
# step through possible plottable keys
#
if xaxis == "" or yaxis == "":
for key in list(Results[0].keys()):
for keys in X_keys:
for xkeys in keys:
if key in xkeys:
for ResRec in Results:
if ResRec[key] != "":
# only plot something if there is something to plot!
Xplots.append(key)
break
for keys in Y_keys:
for pkeys in keys:
if key in pkeys:
for ResRec in Results:
if ResRec[key] != "":
Yplots.append(key)
break
X, Y = [], []
for plt in Xplots:
if plt in Age_keys and 'age' not in X:
X.append('age')
if plt in Depth_keys and 'pos' not in X:
X.append('pos')
for plt in Yplots:
if plt in Dec_keys and 'dec' not in Y:
Y.append('dec')
if plt in Inc_keys and 'inc' not in Y:
Y.append('inc')
if plt in Int_keys and 'int' not in Y:
Y.append('int')
if plt in Chi_keys and 'chi' not in Y:
Y.append('chi')
if plt in VLat_keys and 'lat' not in Y:
Y.append('lat')
if plt in VLon_keys and 'lon' not in Y:
Y.append('lon')
if plt in Vadm_keys and 'vadm' not in Y:
Y.append('vadm')
if plt in Vdm_keys and 'vdm' not in Y:
Y.append('vdm')
if file_type == 'pmag_results':
print('available objects for plotting: ', PltObjs)
print('available X plots: ', X)
print('available Y plots: ', Y)
print('available method codes: ', methcodes)
f = open(dir_path+'/.striprc', 'w')
for x in X:
f.write('x:'+x+'\n')
for y in Y:
f.write('y:'+y+'\n')
for m in methcodes:
f.write('m:'+m+'\n')
for obj in PltObjs:
f.write('obj:'+obj+'\n')
sys.exit()
if plotexp == 1:
for lkey in Lat_keys:
for key in list(Results[0].keys()):
if key == lkey:
lat = float(Results[0][lkey])
Xinc = [pmag.pinc(lat), -pmag.pinc(lat)]
break
if Xinc == "":
print('can not plot expected inc for site - lat unknown')
if method != "" and method not in methcodes:
print('your method not available, but these are: ')
print(methcodes)
print('use ', methcodes[0], '? ^D to quit')
if xaxis == 'age':
for akey in Age_keys:
for key in list(Results[0].keys()):
if key == akey:
Xplots.append(key)
Xunits.append(Unit_keys[key])
if xaxis == 'pos':
for dkey in Depth_keys:
for key in list(Results[0].keys()):
if key == dkey:
Xplots.append(key)
if len(Xplots) == 0:
print('desired X axis information not found')
sys.exit()
if xaxis == 'age':
age_unit = Results[0][Xunits[0]]
if len(Xplots) > 1:
print('multiple X axis keys found, using: ', Xplots[xplotind])
for ykey in ykeys:
for key in list(Results[0].keys()):
if key == ykey:
Yplots.append(key)
if len(Yplots) == 0:
print('desired Y axis information not found')
sys.exit()
if len(Yplots) > 1:
print('multiple Y axis keys found, using: ', Yplots[yplotind])
# check if age or depth info
if len(Xplots) == 0:
print("Must have either age or height info to plot ")
sys.exit()
#
# check for variable to plot
#
#
# determine X axis (age or depth)
#
if xaxis == "age":
plotind = "1"
if method == "":
try:
method = methcodes[0]
except IndexError:
method = ""
if xaxis == 'pos':
xlab = "Stratigraphic Height (meters)"
else:
xlab = "Age ("+age_unit+")"
Xkey = Xplots[xplotind]
Ykey = Yplots[yplotind]
ylab = Ykey
#
# collect the data for plotting
XY = []
isign = 1.
# if float(Results[0][Xkey])/float(Results[-1][Xkey])>0 and float(Results[0][Xkey])<0:
# isign=-1. # x axis all same sign and negative, take positive (e.g.,for depth in core)
# xlab="Stratigraphic Position (meters)"
# else:
# isign=1.
for rec in Results:
if "magic_method_codes" in list(rec.keys()):
meths = rec["magic_method_codes"].split(":")
if method in meths: # make sure it is desired lab treatment step
if obj == 'all' and rec[Xkey].strip() != "":
XY.append([isign*float(rec[Xkey]), float(rec[Ykey])])
elif rec[Xkey].strip() != "":
name = rec['pmag_result_name'].split()
if obj == 'sit' and "Site" in name:
XY.append([isign*float(rec[Xkey]), float(rec[Ykey])])
if obj == 'sam' and "Sample" in name:
XY.append([isign*float(rec[Xkey]), float(rec[Ykey])])
elif method == "":
if obj == 'all' and rec[Xkey].strip() != "":
XY.append([isign*float(rec[Xkey]), float(rec[Ykey])])
elif rec[Xkey].strip() != "":
name = rec['pmag_result_name'].split()
if obj == 'sit' and "Site" in name:
XY.append([isign*float(rec[Xkey]), float(rec[Ykey])])
if obj == 'sam' and "Sample" in name:
XY.append([isign*float(rec[Xkey]), float(rec[Ykey])])
else:
print("Something wrong with your plotting choices")
break
XY.sort()
title = ""
if "er_locations_names" in list(Results[0].keys()):
title = Results[0]["er_location_names"]
if "er_locations_name" in list(Results[0].keys()):
title = Results[0]["er_location_name"]
labels = [xlab, ylab, title]
pmagplotlib.plot_init(FIG['strat'], 10, 5)
pmagplotlib.plot_strat(FIG['strat'], XY, labels) # plot them
if plotexp == 1:
pmagplotlib.plot_hs(FIG['strat'], Xinc, 'b', '--')
if yaxis == 'inc' or yaxis == 'lat':
pmagplotlib.plot_hs(FIG['strat'], [0], 'b', '-')
pmagplotlib.plot_hs(FIG['strat'], [-90, 90], 'g', '-')
if pTS == 1:
FIG['ts'] = 2
pmagplotlib.plot_init(FIG['ts'], 10, 5)
pmagplotlib.plot_ts(FIG['ts'], [amin, amax], ts)
files = {}
for key in list(FIG.keys()):
files[key] = key+'.'+fmt
if pmagplotlib.isServer:
black = '#000000'
purple = '#800080'
files = {}
files['strat'] = xaxis+'_'+yaxis+'_.'+fmt
files['ts'] = 'ts.'+fmt
titles = {}
titles['strat'] = 'Depth/Time Series Plot'
titles['ts'] = 'Time Series Plot'
FIG = pmagplotlib.add_borders(FIG, titles, black, purple)
pmagplotlib.save_plots(FIG, files)
elif plot == 1:
pmagplotlib.save_plots(FIG, files)
else:
pmagplotlib.draw_figs(FIG)
ans = input(" S[a]ve to save plot, [q]uit without saving: ")
if ans == "a":
pmagplotlib.save_plots(FIG, files) | python | def main():
"""
NAME
strip_magic.py
DESCRIPTION
plots various parameters versus depth or age
SYNTAX
strip_magic.py [command line optins]
OPTIONS
-h prints help message and quits
-DM NUM: specify data model num, options 2 (legacy) or 3 (default)
-f FILE: specify input magic format file from magic,default='pmag_results.txt'
supported types=[pmag_specimens, pmag_samples, pmag_sites, pmag_results, magic_web]
-obj [sit,sam,all]: specify object to site,sample,all for pmag_result table, default is all
-fmt [svg,png,jpg], format for images - default is svg
-x [age,pos]: specify whether age or stratigraphic position
-y [dec,inc,int,chi,lat,lon,vdm,vadm]
(lat and lon are VGP lat and lon)
-Iex: plot the expected inc at lat - only available for results with lat info in file
-ts TS amin amax: plot the GPTS for the time interval between amin and amax (numbers in Ma)
TS: [ck95, gts04]
-mcd method_code, specify method code, default is first one encountered
-sav save plot and quit
NOTES
when x and/or y are not specified, a list of possibilities will be presented to the user for choosing
"""
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
xaxis, xplotind, yplotind = "", 0, 0 # (0 for strat pos)
yaxis, Xinc = "", ""
plot = 0
obj = 'all'
data_model_num = int(pmag.get_named_arg("-DM", 3))
# 2.5 keys
if data_model_num == 2:
supported = ['pmag_specimens', 'pmag_samples',
'pmag_sites', 'pmag_results', 'magic_web'] # available file types
Depth_keys = ['specimen_core_depth', 'specimen_height', 'specimen_elevation',
'specimen_composite_depth', 'sample_core_depth', 'sample_height',
'sample_elevation', 'sample_composite_depth', 'site_core_depth',
'site_height', 'site_elevation', 'site_composite_depth', 'average_height']
Age_keys = ['specimen_inferred_age', 'sample_inferred_age',
'site_inferred_age', 'average_age']
Unit_keys = {'specimen_inferred_age': 'specimen_inferred_age_unit',
'sample_inferred_age': 'sample_inferred_age_unit',
'site_inferred_age': 'site_inferred_age_unit', 'average_age': 'average_age_unit'}
Dec_keys = ['measurement_dec', 'specimen_dec',
'sample_dec', 'site_dec', 'average_dec']
Inc_keys = ['measurement_inc', 'specimen_inc',
'sample_inc', 'site_inc', 'average_inc']
Int_keys = ['measurement_magnitude', 'measurement_magn_moment', 'measurement_magn_volume',
'measurement_magn_mass', 'specimen_int', 'specimen_int_rel', 'sample_int',
'sample_int_rel', 'site_int', 'site_int_rel', 'average_int', 'average_int_rel']
Chi_keys = ['measurement_chi_volume', 'measurement_chi_mass']
Lat_keys = ['sample_lat', 'site_lat', 'average_lat']
VLat_keys = ['vgp_lat']
VLon_keys = ['vgp_lon']
Vdm_keys = ['vdm']
Vadm_keys = ['vadm']
method_col_name = "magic_method_codes"
else:
# 3.0 keys
supported = ["specimens", "samples", "sites", "locations"] # available file types
Depth_keys = [ "height", "core_depth", "elevation", "composite_depth" ]
Age_keys = [ "age" ]
Unit_keys = { "age": "age" }
Chi_keys = [ "susc_chi_volume", "susc_chi_mass" ]
Int_keys = [ "magn_moment", "magn_volume", "magn_mass", "int_abs", "int_rel" ]
Inc_keys = [ "dir_inc" ]
Dec_keys = [ "dir_dec" ]
Lat_Keys = [ "lat" ]
VLat_keys = [ "vgp_lat", "pole_lat" ]
VLon_keys = [ "vgp_lon", "pole_lon" ]
Vdm_keys = [ "vdm", "pdm" ]
Vadm_keys = [ "vadm", "padm" ]
method_col_name = "method_codes"
#
X_keys = [Age_keys, Depth_keys]
Y_keys = [Dec_keys, Inc_keys, Int_keys, Chi_keys,
VLat_keys, VLon_keys, Vdm_keys, Vadm_keys]
method, fmt = "", 'svg'
FIG = {'strat': 1}
plotexp, pTS = 0, 0
dir_path = pmag.get_named_arg("-WD", ".")
# default files
if data_model_num == 3:
res_file = pmag.get_named_arg("-f", "sites.txt")
else:
res_file = pmag.get_named_arg("-f", "pmag_results.txt")
res_file = pmag.resolve_file_name(res_file, dir_path)
if '-fmt' in sys.argv:
ind = sys.argv.index('-fmt')
fmt = sys.argv[ind+1]
if '-obj' in sys.argv:
ind = sys.argv.index('-obj')
obj = sys.argv[ind+1]
if '-x' in sys.argv:
ind = sys.argv.index('-x')
xaxis = sys.argv[ind+1]
if '-y' in sys.argv:
ind = sys.argv.index('-y')
yaxis = sys.argv[ind+1]
if yaxis == 'dec':
ykeys = Dec_keys
if yaxis == 'inc':
ykeys = Inc_keys
if yaxis == 'int':
ykeys = Int_keys
if yaxis == 'chi':
ykeys = Chi_keys
if yaxis == 'lat':
ykeys = VLat_keys
if yaxis == 'lon':
ykeys = VLon_keys
if yaxis == 'vdm':
ykeys = Vdm_keys
if yaxis == 'vadm':
ykeys = Vadm_keys
if '-mcd' in sys.argv:
ind = sys.argv.index('-mcd')
method = sys.argv[ind+1]
if '-ts' in sys.argv:
ind = sys.argv.index('-ts')
ts = sys.argv[ind+1]
amin = float(sys.argv[ind+2])
amax = float(sys.argv[ind+3])
pTS = 1
if '-Iex' in sys.argv:
plotexp = 1
if '-sav' in sys.argv:
plot = 1
#
#
# get data read in
Results, file_type = pmag.magic_read(res_file)
if file_type not in supported:
print("Unsupported file type ({}), try again".format(file_type))
sys.exit()
PltObjs = ['all']
if data_model_num == 2:
if file_type == 'pmag_results': # find out what to plot
for rec in Results:
resname = rec['pmag_result_name'].split()
if 'Sample' in resname and 'sam' not in PltObjs:
PltObjs.append('sam')
if 'Site' in resname and 'sit' not in PltObjs:
PltObjs.append('sit')
methcodes = []
# need to know all the measurement types from method_codes
if "magic_method_codes" in list(Results[0].keys()):
for rec in Results:
meths = rec["magic_method_codes"].split(":")
for meth in meths:
if meth.strip() not in methcodes and 'LP' in meth:
# look for the lab treatments
methcodes.append(meth.strip())
#
# initialize some variables
X_unit = "" # Unit for age or depth plotting (meters if depth)
Xplots, Yplots = [], []
Xunits = []
yplotind, xplotind = 0, 0
#
# step through possible plottable keys
#
if xaxis == "" or yaxis == "":
for key in list(Results[0].keys()):
for keys in X_keys:
for xkeys in keys:
if key in xkeys:
for ResRec in Results:
if ResRec[key] != "":
# only plot something if there is something to plot!
Xplots.append(key)
break
for keys in Y_keys:
for pkeys in keys:
if key in pkeys:
for ResRec in Results:
if ResRec[key] != "":
Yplots.append(key)
break
X, Y = [], []
for plt in Xplots:
if plt in Age_keys and 'age' not in X:
X.append('age')
if plt in Depth_keys and 'pos' not in X:
X.append('pos')
for plt in Yplots:
if plt in Dec_keys and 'dec' not in Y:
Y.append('dec')
if plt in Inc_keys and 'inc' not in Y:
Y.append('inc')
if plt in Int_keys and 'int' not in Y:
Y.append('int')
if plt in Chi_keys and 'chi' not in Y:
Y.append('chi')
if plt in VLat_keys and 'lat' not in Y:
Y.append('lat')
if plt in VLon_keys and 'lon' not in Y:
Y.append('lon')
if plt in Vadm_keys and 'vadm' not in Y:
Y.append('vadm')
if plt in Vdm_keys and 'vdm' not in Y:
Y.append('vdm')
if file_type == 'pmag_results':
print('available objects for plotting: ', PltObjs)
print('available X plots: ', X)
print('available Y plots: ', Y)
print('available method codes: ', methcodes)
f = open(dir_path+'/.striprc', 'w')
for x in X:
f.write('x:'+x+'\n')
for y in Y:
f.write('y:'+y+'\n')
for m in methcodes:
f.write('m:'+m+'\n')
for obj in PltObjs:
f.write('obj:'+obj+'\n')
sys.exit()
if plotexp == 1:
for lkey in Lat_keys:
for key in list(Results[0].keys()):
if key == lkey:
lat = float(Results[0][lkey])
Xinc = [pmag.pinc(lat), -pmag.pinc(lat)]
break
if Xinc == "":
print('can not plot expected inc for site - lat unknown')
if method != "" and method not in methcodes:
print('your method not available, but these are: ')
print(methcodes)
print('use ', methcodes[0], '? ^D to quit')
if xaxis == 'age':
for akey in Age_keys:
for key in list(Results[0].keys()):
if key == akey:
Xplots.append(key)
Xunits.append(Unit_keys[key])
if xaxis == 'pos':
for dkey in Depth_keys:
for key in list(Results[0].keys()):
if key == dkey:
Xplots.append(key)
if len(Xplots) == 0:
print('desired X axis information not found')
sys.exit()
if xaxis == 'age':
age_unit = Results[0][Xunits[0]]
if len(Xplots) > 1:
print('multiple X axis keys found, using: ', Xplots[xplotind])
for ykey in ykeys:
for key in list(Results[0].keys()):
if key == ykey:
Yplots.append(key)
if len(Yplots) == 0:
print('desired Y axis information not found')
sys.exit()
if len(Yplots) > 1:
print('multiple Y axis keys found, using: ', Yplots[yplotind])
# check if age or depth info
if len(Xplots) == 0:
print("Must have either age or height info to plot ")
sys.exit()
#
# check for variable to plot
#
#
# determine X axis (age or depth)
#
if xaxis == "age":
plotind = "1"
if method == "":
try:
method = methcodes[0]
except IndexError:
method = ""
if xaxis == 'pos':
xlab = "Stratigraphic Height (meters)"
else:
xlab = "Age ("+age_unit+")"
Xkey = Xplots[xplotind]
Ykey = Yplots[yplotind]
ylab = Ykey
#
# collect the data for plotting
XY = []
isign = 1.
# if float(Results[0][Xkey])/float(Results[-1][Xkey])>0 and float(Results[0][Xkey])<0:
# isign=-1. # x axis all same sign and negative, take positive (e.g.,for depth in core)
# xlab="Stratigraphic Position (meters)"
# else:
# isign=1.
for rec in Results:
if "magic_method_codes" in list(rec.keys()):
meths = rec["magic_method_codes"].split(":")
if method in meths: # make sure it is desired lab treatment step
if obj == 'all' and rec[Xkey].strip() != "":
XY.append([isign*float(rec[Xkey]), float(rec[Ykey])])
elif rec[Xkey].strip() != "":
name = rec['pmag_result_name'].split()
if obj == 'sit' and "Site" in name:
XY.append([isign*float(rec[Xkey]), float(rec[Ykey])])
if obj == 'sam' and "Sample" in name:
XY.append([isign*float(rec[Xkey]), float(rec[Ykey])])
elif method == "":
if obj == 'all' and rec[Xkey].strip() != "":
XY.append([isign*float(rec[Xkey]), float(rec[Ykey])])
elif rec[Xkey].strip() != "":
name = rec['pmag_result_name'].split()
if obj == 'sit' and "Site" in name:
XY.append([isign*float(rec[Xkey]), float(rec[Ykey])])
if obj == 'sam' and "Sample" in name:
XY.append([isign*float(rec[Xkey]), float(rec[Ykey])])
else:
print("Something wrong with your plotting choices")
break
XY.sort()
title = ""
if "er_locations_names" in list(Results[0].keys()):
title = Results[0]["er_location_names"]
if "er_locations_name" in list(Results[0].keys()):
title = Results[0]["er_location_name"]
labels = [xlab, ylab, title]
pmagplotlib.plot_init(FIG['strat'], 10, 5)
pmagplotlib.plot_strat(FIG['strat'], XY, labels) # plot them
if plotexp == 1:
pmagplotlib.plot_hs(FIG['strat'], Xinc, 'b', '--')
if yaxis == 'inc' or yaxis == 'lat':
pmagplotlib.plot_hs(FIG['strat'], [0], 'b', '-')
pmagplotlib.plot_hs(FIG['strat'], [-90, 90], 'g', '-')
if pTS == 1:
FIG['ts'] = 2
pmagplotlib.plot_init(FIG['ts'], 10, 5)
pmagplotlib.plot_ts(FIG['ts'], [amin, amax], ts)
files = {}
for key in list(FIG.keys()):
files[key] = key+'.'+fmt
if pmagplotlib.isServer:
black = '#000000'
purple = '#800080'
files = {}
files['strat'] = xaxis+'_'+yaxis+'_.'+fmt
files['ts'] = 'ts.'+fmt
titles = {}
titles['strat'] = 'Depth/Time Series Plot'
titles['ts'] = 'Time Series Plot'
FIG = pmagplotlib.add_borders(FIG, titles, black, purple)
pmagplotlib.save_plots(FIG, files)
elif plot == 1:
pmagplotlib.save_plots(FIG, files)
else:
pmagplotlib.draw_figs(FIG)
ans = input(" S[a]ve to save plot, [q]uit without saving: ")
if ans == "a":
pmagplotlib.save_plots(FIG, files) | NAME
strip_magic.py
DESCRIPTION
plots various parameters versus depth or age
SYNTAX
strip_magic.py [command line optins]
OPTIONS
-h prints help message and quits
-DM NUM: specify data model num, options 2 (legacy) or 3 (default)
-f FILE: specify input magic format file from magic,default='pmag_results.txt'
supported types=[pmag_specimens, pmag_samples, pmag_sites, pmag_results, magic_web]
-obj [sit,sam,all]: specify object to site,sample,all for pmag_result table, default is all
-fmt [svg,png,jpg], format for images - default is svg
-x [age,pos]: specify whether age or stratigraphic position
-y [dec,inc,int,chi,lat,lon,vdm,vadm]
(lat and lon are VGP lat and lon)
-Iex: plot the expected inc at lat - only available for results with lat info in file
-ts TS amin amax: plot the GPTS for the time interval between amin and amax (numbers in Ma)
TS: [ck95, gts04]
-mcd method_code, specify method code, default is first one encountered
-sav save plot and quit
NOTES
when x and/or y are not specified, a list of possibilities will be presented to the user for choosing | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/strip_magic.py#L11-L375 |
PmagPy/PmagPy | programs/site_edit_magic.py | main | def main():
"""
NAME
site_edit_magic.py
DESCRIPTION
makes equal area projections site by site
from pmag_specimens.txt file with
Fisher confidence ellipse using McFadden and McElhinny (1988)
technique for combining lines and planes
allows testing and reject specimens for bad orientations
SYNTAX
site_edit_magic.py [command line options]
OPTIONS
-h: prints help and quits
-f: specify pmag_specimen format file, default is pmag_specimens.txt
-fsa: specify er_samples.txt file
-exc: use existing pmag_criteria.txt file
-N: reset all sample flags to good
OUPUT
edited er_samples.txt file
"""
dir_path='.'
FIG={} # plot dictionary
FIG['eqarea']=1 # eqarea is figure 1
in_file='pmag_specimens.txt'
sampfile='er_samples.txt'
out_file=""
fmt,plot='svg',1
Crits=""
M,N=180.,1
repeat=''
renew=0
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-WD' in sys.argv:
ind=sys.argv.index('-WD')
dir_path=sys.argv[ind+1]
if '-f' in sys.argv:
ind=sys.argv.index("-f")
in_file=sys.argv[ind+1]
if '-fsa' in sys.argv:
ind=sys.argv.index("-fsa")
sampfile=sys.argv[ind+1]
if '-exc' in sys.argv:
Crits,file_type=pmag.magic_read(dir_path+'/pmag_criteria.txt')
for crit in Crits:
if crit['pmag_criteria_code']=='DE-SPEC':
M=float(crit['specimen_mad'])
N=float(crit['specimen_n'])
if '-fmt' in sys.argv:
ind=sys.argv.index("-fmt")
fmt=sys.argv[ind+1]
if '-N' in sys.argv: renew=1
#
if in_file[0]!="/":in_file=dir_path+'/'+in_file
if sampfile[0]!="/":sampfile=dir_path+'/'+sampfile
crd='s'
Specs,file_type=pmag.magic_read(in_file)
if file_type!='pmag_specimens':
print(' bad pmag_specimen input file')
sys.exit()
Samps,file_type=pmag.magic_read(sampfile)
if file_type!='er_samples':
print(' bad er_samples input file')
sys.exit()
SO_methods=[]
for rec in Samps:
if 'sample_orientation_flag' not in list(rec.keys()): rec['sample_orientation_flag']='g'
if 'sample_description' not in list(rec.keys()): rec['sample_description']=''
if renew==1:
rec['sample_orientation_flag']='g'
description=rec['sample_description']
if '#' in description:
newdesc=""
c=0
while description[c]!='#' and c<len(description)-1: # look for first pound sign
newdesc=newdesc+description[c]
c+=1
while description[c]=='#':
c+=1# skip first set of pound signs
while description[c]!='#':c+=1 # find second set of pound signs
while description[c]=='#' and c<len(description)-1:c+=1 # skip second set of pound signs
while c<len(description)-1: # look for first pound sign
newdesc=newdesc+description[c]
c+=1
rec['sample_description']=newdesc # edit out old comment about orientations
if "magic_method_codes" in rec:
methlist=rec["magic_method_codes"]
for meth in methlist.split(":"):
if "SO" in meth.strip() and "SO-POM" not in meth.strip():
if meth.strip() not in SO_methods: SO_methods.append(meth.strip())
pmag.magic_write(sampfile,Samps,'er_samples')
SO_priorities=pmag.set_priorities(SO_methods,0)
sitelist=[]
for rec in Specs:
if rec['er_site_name'] not in sitelist: sitelist.append(rec['er_site_name'])
sitelist.sort()
EQ={}
EQ['eqarea']=1
pmagplotlib.plot_init(EQ['eqarea'],5,5)
k=0
while k<len(sitelist):
site=sitelist[k]
print(site)
data=[]
ThisSiteSpecs=pmag.get_dictitem(Specs,'er_site_name',site,'T')
ThisSiteSpecs=pmag.get_dictitem(ThisSiteSpecs,'specimen_tilt_correction','-1','T') # get all the unoriented data
for spec in ThisSiteSpecs:
if spec['specimen_mad']!="" and spec['specimen_n']!="" and float(spec['specimen_mad'])<=M and float(spec['specimen_n'])>=N:
# good spec, now get orientation....
redo,p=1,0
if len(SO_methods)<=1:
az_type=SO_methods[0]
orient=pmag.find_samp_rec(spec["er_sample_name"],Samps,az_type)
redo=0
while redo==1:
if p>=len(SO_priorities):
print("no orientation data for ",spec['er_sample_name'])
orient["sample_azimuth"]=""
orient["sample_dip"]=""
redo=0
else:
az_type=SO_methods[SO_methods.index(SO_priorities[p])]
orient=pmag.find_samp_rec(spec["er_sample_name"],Samps,az_type)
if orient["sample_azimuth"] !="":
redo=0
p+=1
if orient['sample_azimuth']!="":
rec={}
for key in list(spec.keys()):rec[key]=spec[key]
rec['dec'],rec['inc']=pmag.dogeo(float(spec['specimen_dec']),float(spec['specimen_inc']),float(orient['sample_azimuth']),float(orient['sample_dip']))
rec["tilt_correction"]='1'
crd='g'
rec['sample_azimuth']=orient['sample_azimuth']
rec['sample_dip']=orient['sample_dip']
data.append(rec)
if len(data)>2:
print('specimen, dec, inc, n_meas/MAD,| method codes ')
for i in range(len(data)):
print('%s: %7.1f %7.1f %s / %s | %s' % (data[i]['er_specimen_name'], data[i]['dec'], data[i]['inc'], data[i]['specimen_n'], data[i]['specimen_mad'], data[i]['magic_method_codes']))
fpars=pmag.dolnp(data,'specimen_direction_type')
print("\n Site lines planes kappa a95 dec inc")
print(site, fpars["n_lines"], fpars["n_planes"], fpars["K"], fpars["alpha95"], fpars["dec"], fpars["inc"], fpars["R"])
if out_file!="":
if float(fpars["alpha95"])<=acutoff and float(fpars["K"])>=kcutoff:
out.write('%s %s %s\n'%(fpars["dec"],fpars['inc'],fpars['alpha95']))
pmagplotlib.plot_lnp(EQ['eqarea'],site,data,fpars,'specimen_direction_type')
pmagplotlib.draw_figs(EQ)
if k!=0 and repeat!='y':
ans=input("s[a]ve plot, [q]uit, [e]dit specimens, [p]revious site, <return> to continue:\n ")
elif k==0 and repeat!='y':
ans=input("s[a]ve plot, [q]uit, [e]dit specimens, <return> to continue:\n ")
if ans=="p": k-=2
if ans=="a":
files={}
files['eqarea']=site+'_'+crd+'_eqarea'+'.'+fmt
pmagplotlib.save_plots(EQ,files)
if ans=="q": sys.exit()
if ans=="e" and Samps==[]:
print("can't edit samples without orientation file, sorry")
elif ans=="e":
# k-=1
testspec=input("Enter name of specimen to check: ")
for spec in data:
if spec['er_specimen_name']==testspec:
# first test wrong direction of drill arrows (flip drill direction in opposite direction and re-calculate d,i
d,i=pmag.dogeo(float(spec['specimen_dec']),float(spec['specimen_inc']),float(spec['sample_azimuth'])-180.,-float(spec['sample_dip']))
XY=pmag.dimap(d,i)
pmagplotlib.plot_xy(EQ['eqarea'],[XY[0]],[XY[1]],sym='g^')
# first test wrong end of compass (take az-180.)
d,i=pmag.dogeo(float(spec['specimen_dec']),float(spec['specimen_inc']),float(spec['sample_azimuth'])-180.,float(spec['sample_dip']))
XY=pmag.dimap(d,i)
pmagplotlib.plot_xy(EQ['eqarea'],[XY[0]],[XY[1]],sym='kv')
# did the sample spin in the hole?
# now spin around specimen's z
X_up,Y_up,X_d,Y_d=[],[],[],[]
for incr in range(0,360,5):
d,i=pmag.dogeo(float(spec['specimen_dec'])+incr,float(spec['specimen_inc']),float(spec['sample_azimuth']),float(spec['sample_dip']))
XY=pmag.dimap(d,i)
if i>=0:
X_d.append(XY[0])
Y_d.append(XY[1])
else:
X_up.append(XY[0])
Y_up.append(XY[1])
pmagplotlib.plot_xy(EQ['eqarea'],X_d,Y_d,sym='b.')
pmagplotlib.plot_xy(EQ['eqarea'],X_up,Y_up,sym='c.')
pmagplotlib.draw_figs(EQ)
break
print("Triangle: wrong arrow for drill direction.")
print("Delta: wrong end of compass.")
print("Small circle: wrong mark on sample. [cyan upper hemisphere]")
deleteme=input("Mark this sample as bad? y/[n] ")
if deleteme=='y':
reason=input("Reason: [1] broke, [2] wrong drill direction, [3] wrong compass direction, [4] bad mark, [5] displaced block [6] other ")
if reason=='1':
description=' sample broke while drilling'
if reason=='2':
description=' wrong drill direction '
if reason=='3':
description=' wrong compass direction '
if reason=='4':
description=' bad mark in field'
if reason=='5':
description=' displaced block'
if reason=='6':
description=input('Enter brief reason for deletion: ')
for samp in Samps:
if samp['er_sample_name']==spec['er_sample_name']:
samp['sample_orientation_flag']='b'
samp['sample_description']=samp['sample_description']+' ## direction deleted because: '+description+'##' # mark description
pmag.magic_write(sampfile,Samps,'er_samples')
repeat=input("Mark another sample, this site? y/[n] ")
if repeat=='y': k-=1
else:
print('skipping site - not enough data with specified coordinate system')
k+=1
print("sample flags stored in ",sampfile) | python | def main():
"""
NAME
site_edit_magic.py
DESCRIPTION
makes equal area projections site by site
from pmag_specimens.txt file with
Fisher confidence ellipse using McFadden and McElhinny (1988)
technique for combining lines and planes
allows testing and reject specimens for bad orientations
SYNTAX
site_edit_magic.py [command line options]
OPTIONS
-h: prints help and quits
-f: specify pmag_specimen format file, default is pmag_specimens.txt
-fsa: specify er_samples.txt file
-exc: use existing pmag_criteria.txt file
-N: reset all sample flags to good
OUPUT
edited er_samples.txt file
"""
dir_path='.'
FIG={} # plot dictionary
FIG['eqarea']=1 # eqarea is figure 1
in_file='pmag_specimens.txt'
sampfile='er_samples.txt'
out_file=""
fmt,plot='svg',1
Crits=""
M,N=180.,1
repeat=''
renew=0
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-WD' in sys.argv:
ind=sys.argv.index('-WD')
dir_path=sys.argv[ind+1]
if '-f' in sys.argv:
ind=sys.argv.index("-f")
in_file=sys.argv[ind+1]
if '-fsa' in sys.argv:
ind=sys.argv.index("-fsa")
sampfile=sys.argv[ind+1]
if '-exc' in sys.argv:
Crits,file_type=pmag.magic_read(dir_path+'/pmag_criteria.txt')
for crit in Crits:
if crit['pmag_criteria_code']=='DE-SPEC':
M=float(crit['specimen_mad'])
N=float(crit['specimen_n'])
if '-fmt' in sys.argv:
ind=sys.argv.index("-fmt")
fmt=sys.argv[ind+1]
if '-N' in sys.argv: renew=1
#
if in_file[0]!="/":in_file=dir_path+'/'+in_file
if sampfile[0]!="/":sampfile=dir_path+'/'+sampfile
crd='s'
Specs,file_type=pmag.magic_read(in_file)
if file_type!='pmag_specimens':
print(' bad pmag_specimen input file')
sys.exit()
Samps,file_type=pmag.magic_read(sampfile)
if file_type!='er_samples':
print(' bad er_samples input file')
sys.exit()
SO_methods=[]
for rec in Samps:
if 'sample_orientation_flag' not in list(rec.keys()): rec['sample_orientation_flag']='g'
if 'sample_description' not in list(rec.keys()): rec['sample_description']=''
if renew==1:
rec['sample_orientation_flag']='g'
description=rec['sample_description']
if '#' in description:
newdesc=""
c=0
while description[c]!='#' and c<len(description)-1: # look for first pound sign
newdesc=newdesc+description[c]
c+=1
while description[c]=='#':
c+=1# skip first set of pound signs
while description[c]!='#':c+=1 # find second set of pound signs
while description[c]=='#' and c<len(description)-1:c+=1 # skip second set of pound signs
while c<len(description)-1: # look for first pound sign
newdesc=newdesc+description[c]
c+=1
rec['sample_description']=newdesc # edit out old comment about orientations
if "magic_method_codes" in rec:
methlist=rec["magic_method_codes"]
for meth in methlist.split(":"):
if "SO" in meth.strip() and "SO-POM" not in meth.strip():
if meth.strip() not in SO_methods: SO_methods.append(meth.strip())
pmag.magic_write(sampfile,Samps,'er_samples')
SO_priorities=pmag.set_priorities(SO_methods,0)
sitelist=[]
for rec in Specs:
if rec['er_site_name'] not in sitelist: sitelist.append(rec['er_site_name'])
sitelist.sort()
EQ={}
EQ['eqarea']=1
pmagplotlib.plot_init(EQ['eqarea'],5,5)
k=0
while k<len(sitelist):
site=sitelist[k]
print(site)
data=[]
ThisSiteSpecs=pmag.get_dictitem(Specs,'er_site_name',site,'T')
ThisSiteSpecs=pmag.get_dictitem(ThisSiteSpecs,'specimen_tilt_correction','-1','T') # get all the unoriented data
for spec in ThisSiteSpecs:
if spec['specimen_mad']!="" and spec['specimen_n']!="" and float(spec['specimen_mad'])<=M and float(spec['specimen_n'])>=N:
# good spec, now get orientation....
redo,p=1,0
if len(SO_methods)<=1:
az_type=SO_methods[0]
orient=pmag.find_samp_rec(spec["er_sample_name"],Samps,az_type)
redo=0
while redo==1:
if p>=len(SO_priorities):
print("no orientation data for ",spec['er_sample_name'])
orient["sample_azimuth"]=""
orient["sample_dip"]=""
redo=0
else:
az_type=SO_methods[SO_methods.index(SO_priorities[p])]
orient=pmag.find_samp_rec(spec["er_sample_name"],Samps,az_type)
if orient["sample_azimuth"] !="":
redo=0
p+=1
if orient['sample_azimuth']!="":
rec={}
for key in list(spec.keys()):rec[key]=spec[key]
rec['dec'],rec['inc']=pmag.dogeo(float(spec['specimen_dec']),float(spec['specimen_inc']),float(orient['sample_azimuth']),float(orient['sample_dip']))
rec["tilt_correction"]='1'
crd='g'
rec['sample_azimuth']=orient['sample_azimuth']
rec['sample_dip']=orient['sample_dip']
data.append(rec)
if len(data)>2:
print('specimen, dec, inc, n_meas/MAD,| method codes ')
for i in range(len(data)):
print('%s: %7.1f %7.1f %s / %s | %s' % (data[i]['er_specimen_name'], data[i]['dec'], data[i]['inc'], data[i]['specimen_n'], data[i]['specimen_mad'], data[i]['magic_method_codes']))
fpars=pmag.dolnp(data,'specimen_direction_type')
print("\n Site lines planes kappa a95 dec inc")
print(site, fpars["n_lines"], fpars["n_planes"], fpars["K"], fpars["alpha95"], fpars["dec"], fpars["inc"], fpars["R"])
if out_file!="":
if float(fpars["alpha95"])<=acutoff and float(fpars["K"])>=kcutoff:
out.write('%s %s %s\n'%(fpars["dec"],fpars['inc'],fpars['alpha95']))
pmagplotlib.plot_lnp(EQ['eqarea'],site,data,fpars,'specimen_direction_type')
pmagplotlib.draw_figs(EQ)
if k!=0 and repeat!='y':
ans=input("s[a]ve plot, [q]uit, [e]dit specimens, [p]revious site, <return> to continue:\n ")
elif k==0 and repeat!='y':
ans=input("s[a]ve plot, [q]uit, [e]dit specimens, <return> to continue:\n ")
if ans=="p": k-=2
if ans=="a":
files={}
files['eqarea']=site+'_'+crd+'_eqarea'+'.'+fmt
pmagplotlib.save_plots(EQ,files)
if ans=="q": sys.exit()
if ans=="e" and Samps==[]:
print("can't edit samples without orientation file, sorry")
elif ans=="e":
# k-=1
testspec=input("Enter name of specimen to check: ")
for spec in data:
if spec['er_specimen_name']==testspec:
# first test wrong direction of drill arrows (flip drill direction in opposite direction and re-calculate d,i
d,i=pmag.dogeo(float(spec['specimen_dec']),float(spec['specimen_inc']),float(spec['sample_azimuth'])-180.,-float(spec['sample_dip']))
XY=pmag.dimap(d,i)
pmagplotlib.plot_xy(EQ['eqarea'],[XY[0]],[XY[1]],sym='g^')
# first test wrong end of compass (take az-180.)
d,i=pmag.dogeo(float(spec['specimen_dec']),float(spec['specimen_inc']),float(spec['sample_azimuth'])-180.,float(spec['sample_dip']))
XY=pmag.dimap(d,i)
pmagplotlib.plot_xy(EQ['eqarea'],[XY[0]],[XY[1]],sym='kv')
# did the sample spin in the hole?
# now spin around specimen's z
X_up,Y_up,X_d,Y_d=[],[],[],[]
for incr in range(0,360,5):
d,i=pmag.dogeo(float(spec['specimen_dec'])+incr,float(spec['specimen_inc']),float(spec['sample_azimuth']),float(spec['sample_dip']))
XY=pmag.dimap(d,i)
if i>=0:
X_d.append(XY[0])
Y_d.append(XY[1])
else:
X_up.append(XY[0])
Y_up.append(XY[1])
pmagplotlib.plot_xy(EQ['eqarea'],X_d,Y_d,sym='b.')
pmagplotlib.plot_xy(EQ['eqarea'],X_up,Y_up,sym='c.')
pmagplotlib.draw_figs(EQ)
break
print("Triangle: wrong arrow for drill direction.")
print("Delta: wrong end of compass.")
print("Small circle: wrong mark on sample. [cyan upper hemisphere]")
deleteme=input("Mark this sample as bad? y/[n] ")
if deleteme=='y':
reason=input("Reason: [1] broke, [2] wrong drill direction, [3] wrong compass direction, [4] bad mark, [5] displaced block [6] other ")
if reason=='1':
description=' sample broke while drilling'
if reason=='2':
description=' wrong drill direction '
if reason=='3':
description=' wrong compass direction '
if reason=='4':
description=' bad mark in field'
if reason=='5':
description=' displaced block'
if reason=='6':
description=input('Enter brief reason for deletion: ')
for samp in Samps:
if samp['er_sample_name']==spec['er_sample_name']:
samp['sample_orientation_flag']='b'
samp['sample_description']=samp['sample_description']+' ## direction deleted because: '+description+'##' # mark description
pmag.magic_write(sampfile,Samps,'er_samples')
repeat=input("Mark another sample, this site? y/[n] ")
if repeat=='y': k-=1
else:
print('skipping site - not enough data with specified coordinate system')
k+=1
print("sample flags stored in ",sampfile) | NAME
site_edit_magic.py
DESCRIPTION
makes equal area projections site by site
from pmag_specimens.txt file with
Fisher confidence ellipse using McFadden and McElhinny (1988)
technique for combining lines and planes
allows testing and reject specimens for bad orientations
SYNTAX
site_edit_magic.py [command line options]
OPTIONS
-h: prints help and quits
-f: specify pmag_specimen format file, default is pmag_specimens.txt
-fsa: specify er_samples.txt file
-exc: use existing pmag_criteria.txt file
-N: reset all sample flags to good
OUPUT
edited er_samples.txt file | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/site_edit_magic.py#L11-L235 |
PmagPy/PmagPy | programs/lowes.py | main | def main():
"""
NAME
lowes.py
DESCRIPTION
Plots Lowes spectrum for input IGRF-like file
SYNTAX
lowes.py [options]
OPTIONS:
-h prints help message and quits
-f FILE specify file name with input data
-d date specify desired date
-r read desired dates from file
-n normalize to dipole term
INPUT FORMAT:
l m g h
"""
norm=0
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
data=np.loadtxt(file)
dates=[2000]
elif '-d' in sys.argv:
ind=sys.argv.index('-d')
dates=[float(sys.argv[ind+1])]
elif '-r' in sys.argv:
ind=sys.argv.index('-r')
dates=np.loadtxt(sys.argv[ind+1])
if '-n' in sys.argv: norm=1
if len(sys.argv)!=0 and '-h' in sys.argv:
print(main.__doc__)
sys.exit()
plt.semilogy()
plt.xlabel('Degree (l)')
plt.ylabel('Power ($\mu$T$^2$)')
labels=[]
for date in dates:
if date!=2000:
gh=pmag.doigrf(0,0,0,date,coeffs=1)
data=pmag.unpack(gh)
Ls,Rs=pmag.lowes(data)
labels.append(str(date))
print(date,Rs[0])
if norm==1:
Rs=old_div(np.array(Rs),Rs[0])
#plt.plot(Ls,Rs,'ro')
plt.plot(Ls,Rs,linewidth=2)
plt.legend(labels,'upper right')
plt.draw()
input() | python | def main():
"""
NAME
lowes.py
DESCRIPTION
Plots Lowes spectrum for input IGRF-like file
SYNTAX
lowes.py [options]
OPTIONS:
-h prints help message and quits
-f FILE specify file name with input data
-d date specify desired date
-r read desired dates from file
-n normalize to dipole term
INPUT FORMAT:
l m g h
"""
norm=0
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
data=np.loadtxt(file)
dates=[2000]
elif '-d' in sys.argv:
ind=sys.argv.index('-d')
dates=[float(sys.argv[ind+1])]
elif '-r' in sys.argv:
ind=sys.argv.index('-r')
dates=np.loadtxt(sys.argv[ind+1])
if '-n' in sys.argv: norm=1
if len(sys.argv)!=0 and '-h' in sys.argv:
print(main.__doc__)
sys.exit()
plt.semilogy()
plt.xlabel('Degree (l)')
plt.ylabel('Power ($\mu$T$^2$)')
labels=[]
for date in dates:
if date!=2000:
gh=pmag.doigrf(0,0,0,date,coeffs=1)
data=pmag.unpack(gh)
Ls,Rs=pmag.lowes(data)
labels.append(str(date))
print(date,Rs[0])
if norm==1:
Rs=old_div(np.array(Rs),Rs[0])
#plt.plot(Ls,Rs,'ro')
plt.plot(Ls,Rs,linewidth=2)
plt.legend(labels,'upper right')
plt.draw()
input() | NAME
lowes.py
DESCRIPTION
Plots Lowes spectrum for input IGRF-like file
SYNTAX
lowes.py [options]
OPTIONS:
-h prints help message and quits
-f FILE specify file name with input data
-d date specify desired date
-r read desired dates from file
-n normalize to dipole term
INPUT FORMAT:
l m g h | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/lowes.py#L15-L66 |
PmagPy/PmagPy | data_files/LearningPython/main.py | main | def main():
"""This program prints doubled values!"""
import numpy
X=arange(.1,10.1,.2) #make a list of numbers
Y=myfunc(X) # calls myfunc with argument X
for i in range(len(X)):
print(X[i],Y[i]) | python | def main():
"""This program prints doubled values!"""
import numpy
X=arange(.1,10.1,.2) #make a list of numbers
Y=myfunc(X) # calls myfunc with argument X
for i in range(len(X)):
print(X[i],Y[i]) | This program prints doubled values! | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/data_files/LearningPython/main.py#L11-L17 |
PmagPy/PmagPy | programs/common_mean.py | main | def main():
"""
NAME
common_mean.py
DESCRIPTION
calculates bootstrap statistics to test for common mean
INPUT FORMAT
takes dec/inc as first two columns in two space delimited files
SYNTAX
common_mean.py [command line options]
OPTIONS
-h prints help message and quits
-f FILE, input file
-f2 FILE, optional second file to compare with first file
-dir D I, optional direction to compare with input file
-fmt [svg,jpg,pnd,pdf] set figure format [default is svg]
NOTES
must have either F2 OR dir but not both
"""
d,i,file2="","",""
fmt,plot='svg',0
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
if '-sav' in sys.argv: plot=1
if '-fmt' in sys.argv:
ind=sys.argv.index('-fmt')
fmt=sys.argv[ind+1]
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file1=sys.argv[ind+1]
if '-f2' in sys.argv:
ind=sys.argv.index('-f2')
file2=sys.argv[ind+1]
if '-dir' in sys.argv:
ind=sys.argv.index('-dir')
d=float(sys.argv[ind+1])
i=float(sys.argv[ind+2])
D1=numpy.loadtxt(file1,dtype=numpy.float)
if file2!="": D2=numpy.loadtxt(file2,dtype=numpy.float)
#
counter,NumSims=0,1000
#
# get bootstrapped means for first data set
#
print("Doing first set of directions, please be patient..")
BDI1=pmag.di_boot(D1)
#
# convert to cartesian coordinates X1,X2, Y1,Y2 and Z1, Z2
#
if d=="": # repeat for second data set
print("Doing second set of directions, please be patient..")
BDI2=pmag.di_boot(D2)
else:
BDI2=[]
# set up plots
CDF={'X':1,'Y':2,'Z':3}
pmagplotlib.plot_init(CDF['X'],4,4)
pmagplotlib.plot_init(CDF['Y'],4,4)
pmagplotlib.plot_init(CDF['Z'],4,4)
# draw the cdfs
pmagplotlib.plot_com(CDF,BDI1,BDI2,[d,i])
files={}
files['X']='CD_X.'+fmt
files['Y']='CD_Y.'+fmt
files['Z']='CD_Z.'+fmt
if plot==0:
pmagplotlib.draw_figs(CDF)
ans=input("S[a]ve plots, <Return> to quit ")
if ans=="a":
pmagplotlib.save_plots(CDF,files)
else:
sys.exit()
else:
pmagplotlib.save_plots(CDF,files)
sys.exit() | python | def main():
"""
NAME
common_mean.py
DESCRIPTION
calculates bootstrap statistics to test for common mean
INPUT FORMAT
takes dec/inc as first two columns in two space delimited files
SYNTAX
common_mean.py [command line options]
OPTIONS
-h prints help message and quits
-f FILE, input file
-f2 FILE, optional second file to compare with first file
-dir D I, optional direction to compare with input file
-fmt [svg,jpg,pnd,pdf] set figure format [default is svg]
NOTES
must have either F2 OR dir but not both
"""
d,i,file2="","",""
fmt,plot='svg',0
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
if '-sav' in sys.argv: plot=1
if '-fmt' in sys.argv:
ind=sys.argv.index('-fmt')
fmt=sys.argv[ind+1]
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file1=sys.argv[ind+1]
if '-f2' in sys.argv:
ind=sys.argv.index('-f2')
file2=sys.argv[ind+1]
if '-dir' in sys.argv:
ind=sys.argv.index('-dir')
d=float(sys.argv[ind+1])
i=float(sys.argv[ind+2])
D1=numpy.loadtxt(file1,dtype=numpy.float)
if file2!="": D2=numpy.loadtxt(file2,dtype=numpy.float)
#
counter,NumSims=0,1000
#
# get bootstrapped means for first data set
#
print("Doing first set of directions, please be patient..")
BDI1=pmag.di_boot(D1)
#
# convert to cartesian coordinates X1,X2, Y1,Y2 and Z1, Z2
#
if d=="": # repeat for second data set
print("Doing second set of directions, please be patient..")
BDI2=pmag.di_boot(D2)
else:
BDI2=[]
# set up plots
CDF={'X':1,'Y':2,'Z':3}
pmagplotlib.plot_init(CDF['X'],4,4)
pmagplotlib.plot_init(CDF['Y'],4,4)
pmagplotlib.plot_init(CDF['Z'],4,4)
# draw the cdfs
pmagplotlib.plot_com(CDF,BDI1,BDI2,[d,i])
files={}
files['X']='CD_X.'+fmt
files['Y']='CD_Y.'+fmt
files['Z']='CD_Z.'+fmt
if plot==0:
pmagplotlib.draw_figs(CDF)
ans=input("S[a]ve plots, <Return> to quit ")
if ans=="a":
pmagplotlib.save_plots(CDF,files)
else:
sys.exit()
else:
pmagplotlib.save_plots(CDF,files)
sys.exit() | NAME
common_mean.py
DESCRIPTION
calculates bootstrap statistics to test for common mean
INPUT FORMAT
takes dec/inc as first two columns in two space delimited files
SYNTAX
common_mean.py [command line options]
OPTIONS
-h prints help message and quits
-f FILE, input file
-f2 FILE, optional second file to compare with first file
-dir D I, optional direction to compare with input file
-fmt [svg,jpg,pnd,pdf] set figure format [default is svg]
NOTES
must have either F2 OR dir but not both | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/common_mean.py#L12-L94 |
PmagPy/PmagPy | SPD/lib/lib_curvature.py | AraiCurvature | def AraiCurvature(x=x,y=y):
"""
input: list of x points, list of y points
output: k, a, b, SSE. curvature, circle center, and SSE
Function for calculating the radius of the best fit circle to a set of
x-y coordinates.
Paterson, G. A., (2011), A simple test for the presence of multidomain
behaviour during paleointensity experiments, J. Geophys. Res., in press,
doi: 10.1029/2011JB008369
"""
# makes sure all values are floats, then norms them by largest value
X = numpy.array(list(map(float, x)))
X = old_div(X, max(X))
Y = numpy.array(list(map(float, y)))
Y = old_div(Y, max(Y))
XY = numpy.array(list(zip(X, Y)))
#Provide the intitial estimate
E1=TaubinSVD(XY);
#Determine the iterative solution
E2=LMA(XY, E1);
estimates=[E2[2], E2[0], E2[1]];
best_a = E2[0]
best_b = E2[1]
best_r = E2[2]
if best_a <= numpy.mean(X) and best_b <= numpy.mean(Y):
k = old_div(-1.,best_r)
else:
k = old_div(1.,best_r)
SSE = get_SSE(best_a, best_b, best_r, X, Y)
return k, best_a, best_b, SSE | python | def AraiCurvature(x=x,y=y):
"""
input: list of x points, list of y points
output: k, a, b, SSE. curvature, circle center, and SSE
Function for calculating the radius of the best fit circle to a set of
x-y coordinates.
Paterson, G. A., (2011), A simple test for the presence of multidomain
behaviour during paleointensity experiments, J. Geophys. Res., in press,
doi: 10.1029/2011JB008369
"""
# makes sure all values are floats, then norms them by largest value
X = numpy.array(list(map(float, x)))
X = old_div(X, max(X))
Y = numpy.array(list(map(float, y)))
Y = old_div(Y, max(Y))
XY = numpy.array(list(zip(X, Y)))
#Provide the intitial estimate
E1=TaubinSVD(XY);
#Determine the iterative solution
E2=LMA(XY, E1);
estimates=[E2[2], E2[0], E2[1]];
best_a = E2[0]
best_b = E2[1]
best_r = E2[2]
if best_a <= numpy.mean(X) and best_b <= numpy.mean(Y):
k = old_div(-1.,best_r)
else:
k = old_div(1.,best_r)
SSE = get_SSE(best_a, best_b, best_r, X, Y)
return k, best_a, best_b, SSE | input: list of x points, list of y points
output: k, a, b, SSE. curvature, circle center, and SSE
Function for calculating the radius of the best fit circle to a set of
x-y coordinates.
Paterson, G. A., (2011), A simple test for the presence of multidomain
behaviour during paleointensity experiments, J. Geophys. Res., in press,
doi: 10.1029/2011JB008369 | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_curvature.py#L19-L55 |
PmagPy/PmagPy | SPD/lib/lib_curvature.py | TaubinSVD | def TaubinSVD(XY):
"""
algebraic circle fit
input: list [[x_1, y_1], [x_2, y_2], ....]
output: a, b, r. a and b are the center of the fitting circle, and r is the radius
Algebraic circle fit by Taubin
G. Taubin, "Estimation Of Planar Curves, Surfaces And Nonplanar
Space Curves Defined By Implicit Equations, With
Applications To Edge And Range Image Segmentation",
IEEE Trans. PAMI, Vol. 13, pages 1115-1138, (1991)
"""
XY = numpy.array(XY)
X = XY[:,0] - numpy.mean(XY[:,0]) # norming points by x avg
Y = XY[:,1] - numpy.mean(XY[:,1]) # norming points by y avg
centroid = [numpy.mean(XY[:,0]), numpy.mean(XY[:,1])]
Z = X * X + Y * Y
Zmean = numpy.mean(Z)
Z0 = old_div((Z - Zmean), (2. * numpy.sqrt(Zmean)))
ZXY = numpy.array([Z0, X, Y]).T
U, S, V = numpy.linalg.svd(ZXY, full_matrices=False) #
V = V.transpose()
A = V[:,2]
A[0] = old_div(A[0], (2. * numpy.sqrt(Zmean)))
A = numpy.concatenate([A, [(-1. * Zmean * A[0])]], axis=0)
a, b = (-1 * A[1:3]) / A[0] / 2 + centroid
r = numpy.sqrt(A[1]*A[1]+A[2]*A[2]-4*A[0]*A[3])/abs(A[0])/2;
return a,b,r | python | def TaubinSVD(XY):
"""
algebraic circle fit
input: list [[x_1, y_1], [x_2, y_2], ....]
output: a, b, r. a and b are the center of the fitting circle, and r is the radius
Algebraic circle fit by Taubin
G. Taubin, "Estimation Of Planar Curves, Surfaces And Nonplanar
Space Curves Defined By Implicit Equations, With
Applications To Edge And Range Image Segmentation",
IEEE Trans. PAMI, Vol. 13, pages 1115-1138, (1991)
"""
XY = numpy.array(XY)
X = XY[:,0] - numpy.mean(XY[:,0]) # norming points by x avg
Y = XY[:,1] - numpy.mean(XY[:,1]) # norming points by y avg
centroid = [numpy.mean(XY[:,0]), numpy.mean(XY[:,1])]
Z = X * X + Y * Y
Zmean = numpy.mean(Z)
Z0 = old_div((Z - Zmean), (2. * numpy.sqrt(Zmean)))
ZXY = numpy.array([Z0, X, Y]).T
U, S, V = numpy.linalg.svd(ZXY, full_matrices=False) #
V = V.transpose()
A = V[:,2]
A[0] = old_div(A[0], (2. * numpy.sqrt(Zmean)))
A = numpy.concatenate([A, [(-1. * Zmean * A[0])]], axis=0)
a, b = (-1 * A[1:3]) / A[0] / 2 + centroid
r = numpy.sqrt(A[1]*A[1]+A[2]*A[2]-4*A[0]*A[3])/abs(A[0])/2;
return a,b,r | algebraic circle fit
input: list [[x_1, y_1], [x_2, y_2], ....]
output: a, b, r. a and b are the center of the fitting circle, and r is the radius
Algebraic circle fit by Taubin
G. Taubin, "Estimation Of Planar Curves, Surfaces And Nonplanar
Space Curves Defined By Implicit Equations, With
Applications To Edge And Range Image Segmentation",
IEEE Trans. PAMI, Vol. 13, pages 1115-1138, (1991) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_curvature.py#L58-L85 |
PmagPy/PmagPy | SPD/lib/lib_curvature.py | VarCircle | def VarCircle(XY, Par): # must have at least 4 sets of xy points or else division by zero occurs
"""
computing the sample variance of distances from data points (XY) to the circle Par = [a b R]
"""
if type(XY) != numpy.ndarray:
XY = numpy.array(XY)
n = len(XY)
if n < 4:
raise Warning("Circle cannot be calculated with less than 4 data points. Please include more data")
Dx = XY[:,0] - Par[0]
Dy = XY[:,1] - Par[1]
D = numpy.sqrt(Dx * Dx + Dy * Dy) - Par[2]
result = old_div(numpy.dot(D, D),(n-3))
return result | python | def VarCircle(XY, Par): # must have at least 4 sets of xy points or else division by zero occurs
"""
computing the sample variance of distances from data points (XY) to the circle Par = [a b R]
"""
if type(XY) != numpy.ndarray:
XY = numpy.array(XY)
n = len(XY)
if n < 4:
raise Warning("Circle cannot be calculated with less than 4 data points. Please include more data")
Dx = XY[:,0] - Par[0]
Dy = XY[:,1] - Par[1]
D = numpy.sqrt(Dx * Dx + Dy * Dy) - Par[2]
result = old_div(numpy.dot(D, D),(n-3))
return result | computing the sample variance of distances from data points (XY) to the circle Par = [a b R] | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_curvature.py#L88-L101 |
PmagPy/PmagPy | SPD/lib/lib_curvature.py | LMA | def LMA(XY,ParIni):
"""
input: list of x and y values [[x_1, y_1], [x_2, y_2], ....], and a tuple containing an initial guess (a, b, r)
which is acquired by using an algebraic circle fit (TaubinSVD)
output: a, b, r. a and b are the center of the fitting circle, and r is the radius
% Geometric circle fit (minimizing orthogonal distances)
% based on the Levenberg-Marquardt scheme in the
% "algebraic parameters" A,B,C,D with constraint B*B+C*C-4*A*D=1
% N. Chernov and C. Lesort, "Least squares fitting of circles",
% J. Math. Imag. Vision, Vol. 23, 239-251 (2005)
"""
factorUp=10
factorDown=0.04
lambda0=0.01
epsilon=0.000001
IterMAX = 50
AdjustMax = 20
Xshift=0
Yshift=0
dX=1
dY=0;
n = len(XY); # number of data points
anew = ParIni[0] + Xshift
bnew = ParIni[1] + Yshift
Anew = old_div(1.,(2.*ParIni[2]))
aabb = anew*anew + bnew*bnew
Fnew = (aabb - ParIni[2]*ParIni[2])*Anew
Tnew = numpy.arccos(old_div(-anew,numpy.sqrt(aabb)))
if bnew > 0:
Tnew = 2*numpy.pi - Tnew
VarNew = VarCircle(XY,ParIni)
VarLambda = lambda0;
finish = 0;
for it in range(0,IterMAX):
Aold = Anew
Fold = Fnew
Told = Tnew
VarOld = VarNew
H = numpy.sqrt(1+4*Aold*Fold);
aold = -H*numpy.cos(Told)/(Aold+Aold) - Xshift;
bold = -H*numpy.sin(Told)/(Aold+Aold) - Yshift;
Rold = old_div(1,abs(Aold+Aold));
DD = 1 + 4*Aold*Fold;
D = numpy.sqrt(DD);
CT = numpy.cos(Told);
ST = numpy.sin(Told);
H11=0;
H12=0;
H13=0;
H22=0;
H23=0;
H33=0;
F1=0;
F2=0;
F3=0;
for i in range(0,n):
Xi = XY[i,0] + Xshift;
Yi = XY[i,1] + Yshift;
Zi = Xi*Xi + Yi*Yi;
Ui = Xi*CT + Yi*ST;
Vi =-Xi*ST + Yi*CT;
ADF = Aold*Zi + D*Ui + Fold;
SQ = numpy.sqrt(4*Aold*ADF + 1);
DEN = SQ + 1;
Gi = 2*ADF/DEN;
FACT = 2/DEN*(1 - Aold*Gi/SQ);
DGDAi = FACT*(Zi + 2*Fold*Ui/D) - Gi*Gi/SQ;
DGDFi = FACT*(2*Aold*Ui/D + 1);
DGDTi = FACT*D*Vi;
H11 = H11 + DGDAi*DGDAi;
H12 = H12 + DGDAi*DGDFi;
H13 = H13 + DGDAi*DGDTi;
H22 = H22 + DGDFi*DGDFi;
H23 = H23 + DGDFi*DGDTi;
H33 = H33 + DGDTi*DGDTi;
F1 = F1 + Gi*DGDAi;
F2 = F2 + Gi*DGDFi;
F3 = F3 + Gi*DGDTi;
for adjust in range(1,AdjustMax):
# Cholesly decomposition
G11 = numpy.sqrt(H11 + VarLambda);
G12 = old_div(H12,G11)
G13 = old_div(H13,G11)
G22 = numpy.sqrt(H22 + VarLambda - G12*G12);
G23 = old_div((H23 - G12*G13),G22);
G33 = numpy.sqrt(H33 + VarLambda - G13*G13 - G23*G23);
D1 = old_div(F1,G11);
D2 = old_div((F2 - G12*D1),G22);
D3 = old_div((F3 - G13*D1 - G23*D2),G33);
dT = old_div(D3,G33);
dF = old_div((D2 - G23*dT),G22)
dA = old_div((D1 - G12*dF - G13*dT),G11)
# updating the parameters
Anew = Aold - dA;
Fnew = Fold - dF;
Tnew = Told - dT;
if 1+4*Anew*Fnew < epsilon and VarLambda>1:
Xshift = Xshift + dX;
Yshift = Yshift + dY;
H = numpy.sqrt(1+4*Aold*Fold);
aTemp = -H*numpy.cos(Told)/(Aold+Aold) + dX;
bTemp = -H*numpy.sin(Told)/(Aold+Aold) + dY;
rTemp = old_div(1,abs(Aold+Aold));
Anew = old_div(1,(rTemp + rTemp));
aabb = aTemp*aTemp + bTemp*bTemp;
Fnew = (aabb - rTemp*rTemp)*Anew;
Tnew = numpy.arccos(old_div(-aTemp,numpy.sqrt(aabb)));
if bTemp > 0:
Tnew = 2*numpy.pi - Tnew;
VarNew = VarOld;
break;
if 1+4*Anew*Fnew < epsilon:
VarLambda = VarLambda * factorUp;
continue;
DD = 1 + 4*Anew*Fnew;
D = numpy.sqrt(DD);
CT = numpy.cos(Tnew);
ST = numpy.sin(Tnew);
GG = 0;
for i in range(0, n):
Xi = XY[i,0] + Xshift;
Yi = XY[i,1] + Yshift;
Zi = Xi*Xi + Yi*Yi;
Ui = Xi*CT + Yi*ST;
ADF = Anew*Zi + D*Ui + Fnew;
SQ = numpy.sqrt(4*Anew*ADF + 1);
DEN = SQ + 1;
Gi = 2*ADF/DEN;
GG = GG + Gi*Gi;
VarNew = old_div(GG,(n-3));
H = numpy.sqrt(1+4*Anew*Fnew);
anew = -H*numpy.cos(Tnew)/(Anew+Anew) - Xshift;
bnew = -H*numpy.sin(Tnew)/(Anew+Anew) - Yshift;
Rnew = old_div(1,abs(Anew+Anew));
if VarNew <= VarOld:
progress = old_div((abs(anew-aold) + abs(bnew-bold) + abs(Rnew-Rold)),(Rnew+Rold));
if progress < epsilon:
Aold = Anew;
Fold = Fnew;
Told = Tnew;
VarOld = VarNew # %#ok<NASGU>
finish = 1;
break;
VarLambda = VarLambda * factorDown
break
else: # % no improvement
VarLambda = VarLambda * factorUp;
continue;
if finish == 1:
break
H = numpy.sqrt(1+4*Aold*Fold);
result_a = -H*numpy.cos(Told)/(Aold+Aold) - Xshift;
result_b = -H*numpy.sin(Told)/(Aold+Aold) - Yshift;
result_r = old_div(1,abs(Aold+Aold));
return result_a, result_b, result_r | python | def LMA(XY,ParIni):
"""
input: list of x and y values [[x_1, y_1], [x_2, y_2], ....], and a tuple containing an initial guess (a, b, r)
which is acquired by using an algebraic circle fit (TaubinSVD)
output: a, b, r. a and b are the center of the fitting circle, and r is the radius
% Geometric circle fit (minimizing orthogonal distances)
% based on the Levenberg-Marquardt scheme in the
% "algebraic parameters" A,B,C,D with constraint B*B+C*C-4*A*D=1
% N. Chernov and C. Lesort, "Least squares fitting of circles",
% J. Math. Imag. Vision, Vol. 23, 239-251 (2005)
"""
factorUp=10
factorDown=0.04
lambda0=0.01
epsilon=0.000001
IterMAX = 50
AdjustMax = 20
Xshift=0
Yshift=0
dX=1
dY=0;
n = len(XY); # number of data points
anew = ParIni[0] + Xshift
bnew = ParIni[1] + Yshift
Anew = old_div(1.,(2.*ParIni[2]))
aabb = anew*anew + bnew*bnew
Fnew = (aabb - ParIni[2]*ParIni[2])*Anew
Tnew = numpy.arccos(old_div(-anew,numpy.sqrt(aabb)))
if bnew > 0:
Tnew = 2*numpy.pi - Tnew
VarNew = VarCircle(XY,ParIni)
VarLambda = lambda0;
finish = 0;
for it in range(0,IterMAX):
Aold = Anew
Fold = Fnew
Told = Tnew
VarOld = VarNew
H = numpy.sqrt(1+4*Aold*Fold);
aold = -H*numpy.cos(Told)/(Aold+Aold) - Xshift;
bold = -H*numpy.sin(Told)/(Aold+Aold) - Yshift;
Rold = old_div(1,abs(Aold+Aold));
DD = 1 + 4*Aold*Fold;
D = numpy.sqrt(DD);
CT = numpy.cos(Told);
ST = numpy.sin(Told);
H11=0;
H12=0;
H13=0;
H22=0;
H23=0;
H33=0;
F1=0;
F2=0;
F3=0;
for i in range(0,n):
Xi = XY[i,0] + Xshift;
Yi = XY[i,1] + Yshift;
Zi = Xi*Xi + Yi*Yi;
Ui = Xi*CT + Yi*ST;
Vi =-Xi*ST + Yi*CT;
ADF = Aold*Zi + D*Ui + Fold;
SQ = numpy.sqrt(4*Aold*ADF + 1);
DEN = SQ + 1;
Gi = 2*ADF/DEN;
FACT = 2/DEN*(1 - Aold*Gi/SQ);
DGDAi = FACT*(Zi + 2*Fold*Ui/D) - Gi*Gi/SQ;
DGDFi = FACT*(2*Aold*Ui/D + 1);
DGDTi = FACT*D*Vi;
H11 = H11 + DGDAi*DGDAi;
H12 = H12 + DGDAi*DGDFi;
H13 = H13 + DGDAi*DGDTi;
H22 = H22 + DGDFi*DGDFi;
H23 = H23 + DGDFi*DGDTi;
H33 = H33 + DGDTi*DGDTi;
F1 = F1 + Gi*DGDAi;
F2 = F2 + Gi*DGDFi;
F3 = F3 + Gi*DGDTi;
for adjust in range(1,AdjustMax):
# Cholesly decomposition
G11 = numpy.sqrt(H11 + VarLambda);
G12 = old_div(H12,G11)
G13 = old_div(H13,G11)
G22 = numpy.sqrt(H22 + VarLambda - G12*G12);
G23 = old_div((H23 - G12*G13),G22);
G33 = numpy.sqrt(H33 + VarLambda - G13*G13 - G23*G23);
D1 = old_div(F1,G11);
D2 = old_div((F2 - G12*D1),G22);
D3 = old_div((F3 - G13*D1 - G23*D2),G33);
dT = old_div(D3,G33);
dF = old_div((D2 - G23*dT),G22)
dA = old_div((D1 - G12*dF - G13*dT),G11)
# updating the parameters
Anew = Aold - dA;
Fnew = Fold - dF;
Tnew = Told - dT;
if 1+4*Anew*Fnew < epsilon and VarLambda>1:
Xshift = Xshift + dX;
Yshift = Yshift + dY;
H = numpy.sqrt(1+4*Aold*Fold);
aTemp = -H*numpy.cos(Told)/(Aold+Aold) + dX;
bTemp = -H*numpy.sin(Told)/(Aold+Aold) + dY;
rTemp = old_div(1,abs(Aold+Aold));
Anew = old_div(1,(rTemp + rTemp));
aabb = aTemp*aTemp + bTemp*bTemp;
Fnew = (aabb - rTemp*rTemp)*Anew;
Tnew = numpy.arccos(old_div(-aTemp,numpy.sqrt(aabb)));
if bTemp > 0:
Tnew = 2*numpy.pi - Tnew;
VarNew = VarOld;
break;
if 1+4*Anew*Fnew < epsilon:
VarLambda = VarLambda * factorUp;
continue;
DD = 1 + 4*Anew*Fnew;
D = numpy.sqrt(DD);
CT = numpy.cos(Tnew);
ST = numpy.sin(Tnew);
GG = 0;
for i in range(0, n):
Xi = XY[i,0] + Xshift;
Yi = XY[i,1] + Yshift;
Zi = Xi*Xi + Yi*Yi;
Ui = Xi*CT + Yi*ST;
ADF = Anew*Zi + D*Ui + Fnew;
SQ = numpy.sqrt(4*Anew*ADF + 1);
DEN = SQ + 1;
Gi = 2*ADF/DEN;
GG = GG + Gi*Gi;
VarNew = old_div(GG,(n-3));
H = numpy.sqrt(1+4*Anew*Fnew);
anew = -H*numpy.cos(Tnew)/(Anew+Anew) - Xshift;
bnew = -H*numpy.sin(Tnew)/(Anew+Anew) - Yshift;
Rnew = old_div(1,abs(Anew+Anew));
if VarNew <= VarOld:
progress = old_div((abs(anew-aold) + abs(bnew-bold) + abs(Rnew-Rold)),(Rnew+Rold));
if progress < epsilon:
Aold = Anew;
Fold = Fnew;
Told = Tnew;
VarOld = VarNew # %#ok<NASGU>
finish = 1;
break;
VarLambda = VarLambda * factorDown
break
else: # % no improvement
VarLambda = VarLambda * factorUp;
continue;
if finish == 1:
break
H = numpy.sqrt(1+4*Aold*Fold);
result_a = -H*numpy.cos(Told)/(Aold+Aold) - Xshift;
result_b = -H*numpy.sin(Told)/(Aold+Aold) - Yshift;
result_r = old_div(1,abs(Aold+Aold));
return result_a, result_b, result_r | input: list of x and y values [[x_1, y_1], [x_2, y_2], ....], and a tuple containing an initial guess (a, b, r)
which is acquired by using an algebraic circle fit (TaubinSVD)
output: a, b, r. a and b are the center of the fitting circle, and r is the radius
% Geometric circle fit (minimizing orthogonal distances)
% based on the Levenberg-Marquardt scheme in the
% "algebraic parameters" A,B,C,D with constraint B*B+C*C-4*A*D=1
% N. Chernov and C. Lesort, "Least squares fitting of circles",
% J. Math. Imag. Vision, Vol. 23, 239-251 (2005) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_curvature.py#L104-L293 |
PmagPy/PmagPy | SPD/lib/lib_curvature.py | get_SSE | def get_SSE(a,b,r,x,y):
"""
input: a, b, r, x, y. circle center, radius, xpts, ypts
output: SSE
"""
SSE = 0
X = numpy.array(x)
Y = numpy.array(y)
for i in range(len(X)):
x = X[i]
y = Y[i]
v = (numpy.sqrt( (x -a)**2 + (y - b)**2 ) - r )**2
SSE += v
return SSE | python | def get_SSE(a,b,r,x,y):
"""
input: a, b, r, x, y. circle center, radius, xpts, ypts
output: SSE
"""
SSE = 0
X = numpy.array(x)
Y = numpy.array(y)
for i in range(len(X)):
x = X[i]
y = Y[i]
v = (numpy.sqrt( (x -a)**2 + (y - b)**2 ) - r )**2
SSE += v
return SSE | input: a, b, r, x, y. circle center, radius, xpts, ypts
output: SSE | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_curvature.py#L297-L310 |
PmagPy/PmagPy | programs/sundec.py | main | def main():
"""
NAME
sundec.py
DESCRIPTION
calculates calculates declination from sun compass measurements
INPUT FORMAT
GMT_offset, lat,long,year,month,day,hours,minutes,shadow_angle
where GMT_offset is the hours to subtract from local time for GMT.
SYNTAX
sundec.py [-i][-f FILE] [< filename ]
OPTIONS
-i for interactive data entry
-f FILE to set file name on command line
otherwise put data in input format in space delimited file
OUTPUT:
declination
"""
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
f=open(file,'r')
data=f.readlines() # read in data from standard input
for line in data: # step through line by line
dec=spitout(line)
sys.exit()
if '-i' in sys.argv:
while 1: # repeat this block until program killed
sundata={} # dictionary with sundata in it
print ("Time difference between Greenwich Mean Time (hrs to subtract from local time to get GMT): ")
try:
sundata["delta_u"]=input("<cntl-D> to quit ")
except:
print("\n Good-bye\n")
sys.exit()
date=""
date=date+input("Year: <cntl-D to quit> ")
date=date+":"+input("Month: ")
date=date+":"+input("Day: ")
date=date+":"+input("hour: ")
date=date+":"+input("minute: ")
sundata["date"]=date
sundata["lat"]=input("Latitude of sampling site (negative in southern hemisphere): ")
sundata["lon"]=input("Longitude of sampling site (negative for western hemisphere): ")
sundata["shadow_angle"]=input("Shadow angle: ")
print('%7.1f'%(pmag.dosundec(sundata))) # call sundec function from pmag module and print
else:
data=sys.stdin.readlines() # read in data from standard input
for line in data: # step through line by line
dec=spitout(line) | python | def main():
"""
NAME
sundec.py
DESCRIPTION
calculates calculates declination from sun compass measurements
INPUT FORMAT
GMT_offset, lat,long,year,month,day,hours,minutes,shadow_angle
where GMT_offset is the hours to subtract from local time for GMT.
SYNTAX
sundec.py [-i][-f FILE] [< filename ]
OPTIONS
-i for interactive data entry
-f FILE to set file name on command line
otherwise put data in input format in space delimited file
OUTPUT:
declination
"""
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
f=open(file,'r')
data=f.readlines() # read in data from standard input
for line in data: # step through line by line
dec=spitout(line)
sys.exit()
if '-i' in sys.argv:
while 1: # repeat this block until program killed
sundata={} # dictionary with sundata in it
print ("Time difference between Greenwich Mean Time (hrs to subtract from local time to get GMT): ")
try:
sundata["delta_u"]=input("<cntl-D> to quit ")
except:
print("\n Good-bye\n")
sys.exit()
date=""
date=date+input("Year: <cntl-D to quit> ")
date=date+":"+input("Month: ")
date=date+":"+input("Day: ")
date=date+":"+input("hour: ")
date=date+":"+input("minute: ")
sundata["date"]=date
sundata["lat"]=input("Latitude of sampling site (negative in southern hemisphere): ")
sundata["lon"]=input("Longitude of sampling site (negative for western hemisphere): ")
sundata["shadow_angle"]=input("Shadow angle: ")
print('%7.1f'%(pmag.dosundec(sundata))) # call sundec function from pmag module and print
else:
data=sys.stdin.readlines() # read in data from standard input
for line in data: # step through line by line
dec=spitout(line) | NAME
sundec.py
DESCRIPTION
calculates calculates declination from sun compass measurements
INPUT FORMAT
GMT_offset, lat,long,year,month,day,hours,minutes,shadow_angle
where GMT_offset is the hours to subtract from local time for GMT.
SYNTAX
sundec.py [-i][-f FILE] [< filename ]
OPTIONS
-i for interactive data entry
-f FILE to set file name on command line
otherwise put data in input format in space delimited file
OUTPUT:
declination | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/sundec.py#L23-L79 |
PmagPy/PmagPy | programs/deprecated/sites_locations.py | main | def main():
"""
NAME
sites_locations.py
DESCRIPTION
reads in er_sites.txt file and finds all locations and bounds of locations
outputs er_locations.txt file
SYNTAX
sites_locations.py [command line options]
OPTIONS
-h prints help message and quits
-f: specimen input er_sites format file, default is "er_sites.txt"
-F: locations table: default is "er_locations.txt"
"""
# set defaults
site_file="er_sites.txt"
loc_file="er_locations.txt"
Names,user=[],"unknown"
Done=[]
version_num=pmag.get_version()
args=sys.argv
dir_path='.'
# get command line stuff
if '-WD' in args:
ind=args.index("-WD")
dir_path=args[ind+1]
if "-h" in args:
print(main.__doc__)
sys.exit()
if '-f' in args:
ind=args.index("-f")
site_file=args[ind+1]
if '-F' in args:
ind=args.index("-F")
loc_file=args[ind+1]
#
site_file=dir_path+'/'+site_file
loc_file=dir_path+'/'+loc_file
Sites,file_type=pmag.magic_read(site_file)
if file_type != 'er_sites':
print(file_type)
print(file_type,"This is not a valid er_sites file ")
sys.exit()
# read in site data
#
LocNames,Locations=[],[]
for site in Sites:
if site['er_location_name'] not in LocNames: # new location name
LocNames.append(site['er_location_name'])
sites_locs=pmag.get_dictitem(Sites,'er_location_name',site['er_location_name'],'T') # get all sites for this loc
lats=pmag.get_dictkey(sites_locs,'site_lat','f') # get all the latitudes as floats
lons=pmag.get_dictkey(sites_locs,'site_lon','f') # get all the longitudes as floats
LocRec={'er_citation_names':'This study','er_location_name':site['er_location_name'],'location_type':''}
LocRec['location_begin_lat']=str(min(lats))
LocRec['location_end_lat']=str(max(lats))
LocRec['location_begin_lon']=str(min(lons))
LocRec['location_end_lon']=str(max(lons))
Locations.append(LocRec)
if len(Locations)>0:
pmag.magic_write(loc_file,Locations,"er_locations")
print("Locations written to: ",loc_file) | python | def main():
"""
NAME
sites_locations.py
DESCRIPTION
reads in er_sites.txt file and finds all locations and bounds of locations
outputs er_locations.txt file
SYNTAX
sites_locations.py [command line options]
OPTIONS
-h prints help message and quits
-f: specimen input er_sites format file, default is "er_sites.txt"
-F: locations table: default is "er_locations.txt"
"""
# set defaults
site_file="er_sites.txt"
loc_file="er_locations.txt"
Names,user=[],"unknown"
Done=[]
version_num=pmag.get_version()
args=sys.argv
dir_path='.'
# get command line stuff
if '-WD' in args:
ind=args.index("-WD")
dir_path=args[ind+1]
if "-h" in args:
print(main.__doc__)
sys.exit()
if '-f' in args:
ind=args.index("-f")
site_file=args[ind+1]
if '-F' in args:
ind=args.index("-F")
loc_file=args[ind+1]
#
site_file=dir_path+'/'+site_file
loc_file=dir_path+'/'+loc_file
Sites,file_type=pmag.magic_read(site_file)
if file_type != 'er_sites':
print(file_type)
print(file_type,"This is not a valid er_sites file ")
sys.exit()
# read in site data
#
LocNames,Locations=[],[]
for site in Sites:
if site['er_location_name'] not in LocNames: # new location name
LocNames.append(site['er_location_name'])
sites_locs=pmag.get_dictitem(Sites,'er_location_name',site['er_location_name'],'T') # get all sites for this loc
lats=pmag.get_dictkey(sites_locs,'site_lat','f') # get all the latitudes as floats
lons=pmag.get_dictkey(sites_locs,'site_lon','f') # get all the longitudes as floats
LocRec={'er_citation_names':'This study','er_location_name':site['er_location_name'],'location_type':''}
LocRec['location_begin_lat']=str(min(lats))
LocRec['location_end_lat']=str(max(lats))
LocRec['location_begin_lon']=str(min(lons))
LocRec['location_end_lon']=str(max(lons))
Locations.append(LocRec)
if len(Locations)>0:
pmag.magic_write(loc_file,Locations,"er_locations")
print("Locations written to: ",loc_file) | NAME
sites_locations.py
DESCRIPTION
reads in er_sites.txt file and finds all locations and bounds of locations
outputs er_locations.txt file
SYNTAX
sites_locations.py [command line options]
OPTIONS
-h prints help message and quits
-f: specimen input er_sites format file, default is "er_sites.txt"
-F: locations table: default is "er_locations.txt" | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/deprecated/sites_locations.py#L7-L70 |
PmagPy/PmagPy | programs/zeq_magic2.py | main | def main():
"""
NAME
zeq_magic.py
DESCRIPTION
reads in magic_measurements formatted file, makes plots of remanence decay
during demagnetization experiments. Reads in prior interpretations saved in
a pmag_specimens formatted file and allows re-interpretations of best-fit lines
and planes and saves (revised or new) interpretations in a pmag_specimens file.
interpretations are saved in the coordinate system used. Also allows judicious editting of
measurements to eliminate "bad" measurements. These are marked as such in the magic_measurements
input file. they are NOT deleted, just ignored.
SYNTAX
zeq_magic.py [command line options]
OPTIONS
-h prints help message and quits
-f MEASFILE: sets magic_measurements format input file, default: magic_measurements.txt
-fsp SPECFILE: sets pmag_specimens format file with prior interpreations, default: zeq_specimens.txt
-Fp PLTFILE: sets filename for saved plot, default is name_type.fmt (where type is zijd, eqarea or decay curve)
-crd [s,g,t]: sets coordinate system, g=geographic, t=tilt adjusted, default: specimen coordinate system
-fsa SAMPFILE: sets er_samples format file with orientation information, default: er_samples.txt
-spc SPEC plots single specimen SPEC, saves plot with specified format
with optional -dir settings and quits
-dir [L,P,F][beg][end]: sets calculation type for principal component analysis, default is none
beg: starting step for PCA calculation
end: ending step for PCA calculation
[L,P,F]: calculation type for line, plane or fisher mean
must be used with -spc option
-fmt FMT: set format of saved plot [png,svg,jpg]
-A: suppresses averaging of replicate measurements, default is to average
-sav: saves all plots without review
SCREEN OUTPUT:
Specimen, N, a95, StepMin, StepMax, Dec, Inc, calculation type
"""
# initialize some variables
doave,e,b=1,0,0 # average replicates, initial end and beginning step
plots,coord=0,'s'
noorient=0
version_num=pmag.get_version()
verbose=pmagplotlib.verbose
beg_pca,end_pca,direction_type="","",'l'
calculation_type,fmt="","svg"
user,spec_keys,locname="",[],''
plot_file=""
sfile=""
plot_file=""
PriorRecs=[] # empty list for prior interpretations
backup=0
specimen="" # can skip everything and just plot one specimen with bounds e,b
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-WD' in sys.argv:
ind=sys.argv.index('-WD')
dir_path=sys.argv[ind+1]
else:
dir_path='.'
inspec=dir_path+'/'+'zeq_specimens.txt'
meas_file,geo,tilt,ask,samp_file=dir_path+'/magic_measurements.txt',0,0,0,dir_path+'/er_samples.txt'
if '-f' in sys.argv:
ind=sys.argv.index('-f')
meas_file=dir_path+'/'+sys.argv[ind+1]
if '-fsp' in sys.argv:
ind=sys.argv.index('-fsp')
inspec=dir_path+'/'+sys.argv[ind+1]
if '-fsa' in sys.argv:
ind=sys.argv.index('-fsa')
samp_file=dir_path+'/'+sys.argv[ind+1]
sfile='ok'
if '-crd' in sys.argv:
ind=sys.argv.index('-crd')
coord=sys.argv[ind+1]
if coord=='g' or coord=='t':
samp_data,file_type=pmag.magic_read(samp_file)
if file_type=='er_samples':sfile='ok'
geo=1
if coord=='t':tilt=1
if '-spc' in sys.argv:
ind=sys.argv.index('-spc')
specimen=sys.argv[ind+1]
if '-dir' in sys.argv:
ind=sys.argv.index('-dir')
direction_type=sys.argv[ind+1]
beg_pca=int(sys.argv[ind+2])
end_pca=int(sys.argv[ind+3])
if direction_type=='L':calculation_type='DE-BFL'
if direction_type=='P':calculation_type='DE-BFP'
if direction_type=='F':calculation_type='DE-FM'
if '-Fp' in sys.argv:
ind=sys.argv.index('-Fp')
plot_file=dir_path+'/'+sys.argv[ind+1]
if '-A' in sys.argv: doave=0
if '-sav' in sys.argv:
plots=1
verbose=0
if '-fmt' in sys.argv:
ind=sys.argv.index('-fmt')
fmt=sys.argv[ind+1]
#
first_save=1
meas_data,file_type=pmag.magic_read(meas_file)
changeM,changeS=0,0 # check if data or interpretations have changed
if file_type != 'magic_measurements':
print(file_type)
print(file_type,"This is not a valid magic_measurements file ")
sys.exit()
for rec in meas_data:
if "magic_method_codes" not in rec.keys(): rec["magic_method_codes"]=""
methods=""
tmp=rec["magic_method_codes"].replace(" ","").split(":")
for meth in tmp:
methods=methods+meth+":"
rec["magic_method_codes"]=methods[:-1] # get rid of annoying spaces in Anthony's export files
if "magic_instrument_codes" not in rec.keys() :rec["magic_instrument_codes"]=""
PriorSpecs=[]
PriorRecs,file_type=pmag.magic_read(inspec)
if len(PriorRecs)==0:
if verbose:print("starting new file ",inspec)
for Rec in PriorRecs:
if 'magic_software_packages' not in Rec.keys():Rec['magic_software_packages']=""
if Rec['er_specimen_name'] not in PriorSpecs:
if 'specimen_comp_name' not in Rec.keys():Rec['specimen_comp_name']="A"
PriorSpecs.append(Rec['er_specimen_name'])
else:
if 'specimen_comp_name' not in Rec.keys():Rec['specimen_comp_name']="A"
if "magic_method_codes" in Rec.keys():
methods=[]
tmp=Rec["magic_method_codes"].replace(" ","").split(":")
for meth in tmp:
methods.append(meth)
if 'DE-FM' in methods:
Rec['calculation_type']='DE-FM' # this won't be imported but helps
if 'DE-BFL' in methods:
Rec['calculation_type']='DE-BFL'
if 'DE-BFL-A' in methods:
Rec['calculation_type']='DE-BFL-A'
if 'DE-BFL-O' in methods:
Rec['calculation_type']='DE-BFL-O'
if 'DE-BFP' in methods:
Rec['calculation_type']='DE-BFP'
else:
Rec['calculation_type']='DE-BFL' # default is to assume a best-fit line
#
# get list of unique specimen names
#
sids=pmag.get_specs(meas_data)
#
# set up plots, angle sets X axis to horizontal, direction_type 'l' is best-fit line
# direction_type='p' is great circle
#
#
# draw plots for sample s - default is just to step through zijderveld diagrams
#
#
# define figure numbers for equal area, zijderveld,
# and intensity vs. demagnetiztion step respectively
ZED={}
ZED['eqarea'],ZED['zijd'], ZED['demag']=1,2,3
pmagplotlib.plot_init(ZED['eqarea'],5,5)
pmagplotlib.plot_init(ZED['zijd'],6,5)
pmagplotlib.plot_init(ZED['demag'],5,5)
save_pca=0
if specimen=="":
k = 0
else:
k=sids.index(specimen)
angle,direction_type="",""
setangle=0
CurrRecs=[]
while k < len(sids):
CurrRecs=[]
if setangle==0:angle=""
method_codes,inst_code=[],""
s=sids[k]
PmagSpecRec={}
PmagSpecRec["er_analyst_mail_names"]=user
PmagSpecRec['magic_software_packages']=version_num
PmagSpecRec['specimen_description']=""
PmagSpecRec['magic_method_codes']=""
if verbose and s!="":print(s, k , 'out of ',len(sids))
#
# collect info for the PmagSpecRec dictionary
#
s_meas=pmag.get_dictitem(meas_data,'er_specimen_name',s,'T') # fish out this specimen
s_meas=pmag.get_dictitem(s_meas,'magic_method_codes','Z','has') # fish out zero field steps
if len(s_meas)>0:
for rec in s_meas: # fix up a few things for the output record
PmagSpecRec["magic_instrument_codes"]=rec["magic_instrument_codes"] # copy over instruments
PmagSpecRec["er_citation_names"]="This study"
PmagSpecRec["er_specimen_name"]=s
PmagSpecRec["er_sample_name"]=rec["er_sample_name"]
PmagSpecRec["er_site_name"]=rec["er_site_name"]
PmagSpecRec["er_location_name"]=rec["er_location_name"]
locname=rec['er_location_name']
if 'er_expedition_name' in rec.keys(): PmagSpecRec["er_expedition_name"]=rec["er_expedition_name"]
PmagSpecRec["magic_method_codes"]=rec["magic_method_codes"]
if "magic_experiment_name" not in rec.keys():
PmagSpecRec["magic_experiment_names"]=""
else:
PmagSpecRec["magic_experiment_names"]=rec["magic_experiment_name"]
break
#
# find the data from the meas_data file for this specimen
#
data,units=pmag.find_dmag_rec(s,meas_data)
PmagSpecRec["measurement_step_unit"]= units
u=units.split(":")
if "T" in units:PmagSpecRec["magic_method_codes"]=PmagSpecRec["magic_method_codes"]+":LP-DIR-AF"
if "K" in units:PmagSpecRec["magic_method_codes"]=PmagSpecRec["magic_method_codes"]+":LP-DIR-T"
if "J" in units:PmagSpecRec["magic_method_codes"]=PmagSpecRec["magic_method_codes"]+":LP-DIR-M"
#
# find prior interpretation
#
if len(CurrRecs)==0: # check if already in
beg_pca,end_pca="",""
calculation_type=""
if inspec !="":
if verbose: print(" looking up previous interpretations...")
precs=pmag.get_dictitem(PriorRecs,'er_specimen_name',s,'T') # get all the prior recs with this specimen name
precs=pmag.get_dictitem(precs,'magic_method_codes','LP-DIR','has') # get the directional data
PriorRecs=pmag.get_dictitem(PriorRecs,'er_specimen_name',s,'F') # take them all out of prior recs
# get the ones that meet the current coordinate system
for prec in precs:
if 'specimen_tilt_correction' not in prec.keys() or prec['specimen_tilt_correction']=='-1':
crd='s'
elif prec['specimen_tilt_correction']=='0':
crd='g'
elif prec['specimen_tilt_correction']=='100':
crd='t'
else:
crd='?'
CurrRec={}
for key in prec.keys():CurrRec[key]=prec[key]
CurrRecs.append(CurrRec) # put in CurrRecs
method_codes= CurrRec["magic_method_codes"].replace(" ","").split(':')
calculation_type='DE-BFL'
if 'DE-FM' in method_codes: calculation_type='DE-FM'
if 'DE-BFP' in method_codes: calculation_type='DE-BFP'
if 'DE-BFL-A' in method_codes: calculation_type='DE-BFL-A'
if 'specimen_dang' not in CurrRec.keys():
if verbose:print('Run mk_redo.py and zeq_magic_redo.py to get the specimen_dang values')
CurrRec['specimen_dang']=-1
if calculation_type!='DE-FM' and crd==coord: # not a fisher mean
if verbose:print("Specimen N MAD DANG start end dec inc type component coordinates")
if units=='K':
if verbose:print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %s %s %s \n' % (CurrRec["er_specimen_name"],int(CurrRec["specimen_n"]),float(CurrRec["specimen_mad"]),float(CurrRec["specimen_dang"]),float(CurrRec["measurement_step_min"])-273,float(CurrRec["measurement_step_max"])-273,float(CurrRec["specimen_dec"]),float(CurrRec["specimen_inc"]),calculation_type,CurrRec['specimen_comp_name'],crd))
elif units=='T':
if verbose:print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %s %s %s \n' % (CurrRec["er_specimen_name"],int(CurrRec["specimen_n"]),float(CurrRec["specimen_mad"]),float(CurrRec["specimen_dang"]),float(CurrRec["measurement_step_min"])*1e3,float(CurrRec["measurement_step_max"])*1e3,float(CurrRec["specimen_dec"]),float(CurrRec["specimen_inc"]),calculation_type,CurrRec['specimen_comp_name'],crd))
elif 'T' in units and 'K' in units:
if float(CurrRec['measurement_step_min'])<1.0 :
min=float(CurrRec['measurement_step_min'])*1e3
else:
min=float(CurrRec['measurement_step_min'])-273
if float(CurrRec['measurement_step_max'])<1.0 :
max=float(CurrRec['measurement_step_max'])*1e3
else:
max=float(CurrRec['measurement_step_max'])-273
if verbose:print('%s %i %7.1f %i %i %7.1f %7.1f %7.1f, %s %s\n' % (CurrRec["er_specimen_name"],int(CurrRec["specimen_n"]),float(CurrRec["specimen_mad"]),float(CurrRec['specimen_dang']),min,max,float(CurrRec["specimen_dec"]),float(CurrRec["specimen_inc"]),calculation_type,crd))
elif 'J' in units:
if verbose:print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %s %s %s \n' % (CurrRec["er_specimen_name"],int(CurrRec["specimen_n"]),float(CurrRec["specimen_mad"]),float(CurrRec['specimen_dang']),float(CurrRec["measurement_step_min"]),float(CurrRec["measurement_step_max"]),float(CurrRec["specimen_dec"]),float(CurrRec["specimen_inc"]),calculation_type,CurrRec['specimen_comp_name'],crd))
elif calculation_type=='DE-FM' and crd==coord: # fisher mean
if verbose:print("Specimen a95 DANG start end dec inc type component coordinates")
if units=='K':
if verbose:print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %s %s %s \n' % (CurrRec["er_specimen_name"],int(CurrRec["specimen_n"]),float(CurrRec["specimen_alpha95"]),float(CurrRec["measurement_step_min"])-273,float(CurrRec["measurement_step_max"])-273,float(CurrRec["specimen_dec"]),float(CurrRec["specimen_inc"]),calculation_type,CurrRec['specimen_comp_name'],crd))
elif units=='T':
if verbose:print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %s %s %s \n' % (CurrRec["er_specimen_name"],int(CurrRec["specimen_n"]),float(CurrRec["specimen_alpha95"]),float(CurrRec["measurement_step_min"])*1e3,float(CurrRec["measurement_step_max"])*1e3,float(CurrRec["specimen_dec"]),float(CurrRec["specimen_inc"]),calculation_type,CurrRec['specimen_comp_name'],crd))
elif 'T' in units and 'K' in units:
if float(CurrRec['measurement_step_min'])<1.0 :
min=float(CurrRec['measurement_step_min'])*1e3
else:
min=float(CurrRec['measurement_step_min'])-273
if float(CurrRec['measurement_step_max'])<1.0 :
max=float(CurrRec['measurement_step_max'])*1e3
else:
max=float(CurrRec['measurement_step_max'])-273
if verbose:print('%s %i %7.1f %i %i %7.1f %7.1f %s %s \n' % (CurrRec["er_specimen_name"],int(CurrRec["specimen_n"]),float(CurrRec["specimen_alpha95"]),min,max,float(CurrRec["specimen_dec"]),float(CurrRec["specimen_inc"]),calculation_type,crd))
elif 'J' in units:
if verbose:print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %s %s %s \n' % (CurrRec["er_specimen_name"],int(CurrRec["specimen_n"]),float(CurrRec["specimen_mad"]),float(CurrRec["measurement_step_min"]),float(CurrRec["measurement_step_max"]),float(CurrRec["specimen_dec"]),float(CurrRec["specimen_inc"]),calculation_type,CurrRec['specimen_comp_name'],crd))
if len(CurrRecs)==0:beg_pca,end_pca="",""
datablock=data
noskip=1
if len(datablock) <3:
noskip=0
if backup==0:
k+=1
else:
k-=1
if len(CurrRecs)>0:
for rec in CurrRecs:
PriorRecs.append(rec)
CurrRecs=[]
else:
backup=0
if noskip:
#
# find replicate measurements at given treatment step and average them
#
# step_meth,avedata=pmag.vspec(data)
# if len(avedata) != len(datablock):
# if doave==1:
# method_codes.append("DE-VM")
# datablock=avedata
# #
# do geo or stratigraphic correction now
#
if geo==1:
#
# find top priority orientation method
orient,az_type=pmag.get_orient(samp_data,PmagSpecRec["er_sample_name"])
if az_type=='SO-NO':
if verbose: print("no orientation data for ",s)
orient["sample_azimuth"]=0
orient["sample_dip"]=0
noorient=1
method_codes.append("SO-NO")
orient["sample_azimuth"]=0
orient["sample_dip"]=0
orient["sample_bed_dip_azimuth"]=0
orient["sample_bed_dip"]=0
noorient=1
method_codes.append("SO-NO")
else:
noorient=0
#
# if stratigraphic selected, get stratigraphic correction
#
tiltblock,geoblock=[],[]
for rec in datablock:
d_geo,i_geo=pmag.dogeo(rec[1],rec[2],float(orient["sample_azimuth"]),float(orient["sample_dip"]))
geoblock.append([rec[0],d_geo,i_geo,rec[3],rec[4],rec[5],rec[6]])
if tilt==1 and "sample_bed_dip" in orient.keys() and float(orient['sample_bed_dip'])!=0:
d_tilt,i_tilt=pmag.dotilt(d_geo,i_geo,float(orient["sample_bed_dip_direction"]),float(orient["sample_bed_dip"]))
tiltblock.append([rec[0],d_tilt,i_tilt,rec[3],rec[4],rec[5],rec[6]])
if tilt==1: plotblock=tiltblock
if geo==1 and tilt==0:plotblock=geoblock
if geo==0 and tilt==0: plotblock=datablock
#
# set the end pca point to last point if not set
if e==0 or e>len(plotblock)-1: e=len(plotblock)-1
if angle=="": angle=plotblock[0][1] # rotate to NRM declination
title=s+'_s'
if geo==1 and tilt==0 and noorient!=1:title=s+'_g'
if tilt==1 and noorient!=1:title=s+'_t'
pmagplotlib.plot_zed(ZED,plotblock,angle,title,units)
if verbose:pmagplotlib.draw_figs(ZED)
if len(CurrRecs)!=0:
for prec in CurrRecs:
if 'calculation_type' not in prec.keys():
calculation_type=''
else:
calculation_type=prec["calculation_type"]
direction_type=prec["specimen_direction_type"]
if calculation_type !="":
beg_pca,end_pca="",""
for j in range(len(datablock)):
if data[j][0]==float(prec["measurement_step_min"]):beg_pca=j
if data[j][0]==float(prec["measurement_step_max"]):end_pca=j
if beg_pca=="" or end_pca=="":
if verbose:
print("something wrong with prior interpretation ")
break
if calculation_type!="":
if beg_pca=="":beg_pca=0
if end_pca=="":end_pca=len(plotblock)-1
if geo==1 and tilt==0:
mpars=pmag.domean(geoblock,beg_pca,end_pca,calculation_type)
if mpars["specimen_direction_type"]!="Error":
pmagplotlib.plot_dir(ZED,mpars,geoblock,angle)
if verbose:pmagplotlib.draw_figs(ZED)
if geo==1 and tilt==1:
mpars=pmag.domean(tiltblock,beg_pca,end_pca,calculation_type)
if mpars["specimen_direction_type"]!="Error":
pmagplotlib.plot_dir(ZED,mpars,tiltblock,angle)
if verbose:pmagplotlib.draw_figs(ZED)
if geo==0 and tilt==0:
mpars=pmag.domean(datablock,beg_pca,end_pca,calculation_type)
if mpars["specimen_direction_type"]!="Error":
pmagplotlib.plot_dir(ZED,mpars,plotblock,angle)
if verbose:pmagplotlib.draw_figs(ZED)
#
# print out data for this sample to screen
#
recnum=0
for plotrec in plotblock:
if units=='T' and verbose: print('%s: %i %7.1f %s %8.3e %7.1f %7.1f %s' % (plotrec[5], recnum,plotrec[0]*1e3," mT",plotrec[3],plotrec[1],plotrec[2],plotrec[6]))
if units=="K" and verbose: print('%s: %i %7.1f %s %8.3e %7.1f %7.1f %s' % (plotrec[5], recnum,plotrec[0]-273,' C',plotrec[3],plotrec[1],plotrec[2],plotrec[6]))
if units=="J" and verbose: print('%s: %i %7.1f %s %8.3e %7.1f %7.1f %s' % (plotrec[5], recnum,plotrec[0],' J',plotrec[3],plotrec[1],plotrec[2],plotrec[6]))
if 'K' in units and 'T' in units:
if plotrec[0]>=1. and verbose: print('%s: %i %7.1f %s %8.3e %7.1f %7.1f %s' % (plotrec[5], recnum,plotrec[0]-273,' C',plotrec[3],plotrec[1],plotrec[2],plotrec[6]))
if plotrec[0]<1. and verbose: print('%s: %i %7.1f %s %8.3e %7.1f %7.1f %s' % (plotrec[5], recnum,plotrec[0]*1e3," mT",plotrec[3],plotrec[1],plotrec[2],plotrec[6]))
recnum += 1
if specimen!="":
if plot_file=="":
basename=locname+'_'+s
else:
basename=plot_file
files={}
for key in ZED.keys():
files[key]=basename+'_'+key+'.'+fmt
pmagplotlib.save_plots(ZED,files)
sys.exit()
else: # interactive
if plots==0:
ans='b'
k+=1
changeS=0
while ans != "":
if len(CurrRecs)==0:
print("""
g/b: indicates good/bad measurement. "bad" measurements excluded from calculation
set s[a]ve plot, [b]ounds for pca and calculate, [p]revious, [s]pecimen,
change [h]orizontal projection angle, change [c]oordinate systems,
[e]dit data, [q]uit:
""")
else:
print("""
g/b: indicates good/bad measurement. "bad" measurements excluded from calculation
set s[a]ve plot, [b]ounds for pca and calculate, [p]revious, [s]pecimen,
change [h]orizontal projection angle, change [c]oordinate systems,
[d]elete current interpretation(s), [e]dit data, [q]uit:
""")
ans=input('<Return> for next specimen \n')
setangle=0
if ans=='d': # delete this interpretation
CurrRecs=[]
k-=1 # replot same specimen
ans=""
changeS=1
if ans=='q':
if changeM==1:
ans=input('Save changes to magic_measurements.txt? y/[n] ')
if ans=='y':
pmag.magic_write(meas_file,meas_data,'magic_measurements')
print("Good bye")
sys.exit()
if ans=='a':
if plot_file=="":
basename=locname+'_'+s+'_'
else:
basename=plot_file
files={}
for key in ZED.keys():
files[key]=basename+'_'+coord+'_'+key+'.'+fmt
pmagplotlib.save_plots(ZED,files)
ans=""
if ans=='p':
k-=2
ans=""
backup=1
if ans=='c':
k-=1 # replot same block
if tilt==0 and geo ==1:print("You are currently viewing geographic coordinates ")
if tilt==1 and geo ==1:print("You are currently viewing stratigraphic coordinates ")
if tilt==0 and geo ==0: print("You are currently viewing sample coordinates ")
print("\n Which coordinate system do you wish to view? ")
coord=input(" <Return> specimen, [g] geographic, [t] tilt corrected ")
if coord=="g":geo,tilt=1,0
if coord=="t":
geo=1
tilt=1
if coord=="":
coord='s'
geo=0
tilt=0
if geo==1 and sfile=="":
samp_file=input(" Input er_samples file for sample orientations [er_samples.txt] " )
if samp_file=="":samp_file="er_samples.txt"
samp_data,file_type=pmag.magic_read(samp_file)
if file_type != 'er_samples':
print(file_type)
print("This is not a valid er_samples file - coordinate system not changed")
else:
sfile="ok"
ans=""
if ans=='s':
keepon=1
sample=input('Enter desired specimen name (or first part there of): ')
while keepon==1:
try:
k =sids.index(sample)
keepon=0
except:
tmplist=[]
for qq in range(len(sids)):
if sample in sids[qq]:tmplist.append(sids[qq])
print(sample," not found, but this was: ")
print(tmplist)
sample=input('Select one or try again\n ')
angle,direction_type="",""
setangle=0
ans=""
if ans=='h':
k-=1
angle=input("Enter desired declination for X axis 0-360 ")
angle=float(angle)
if angle==0:angle=0.001
s=sids[k]
setangle=1
ans=""
if ans=='e':
k-=1
ans=""
recnum=0
for plotrec in plotblock:
if plotrec[0]<=200 and verbose: print('%s: %i %7.1f %s %8.3e %7.1f %7.1f ' % (plotrec[5], recnum,plotrec[0]*1e3," mT",plotrec[3],plotrec[1],plotrec[2]))
if plotrec[0]>200 and verbose: print('%s: %i %7.1f %s %8.3e %7.1f %7.1f ' % (plotrec[5], recnum,plotrec[0]-273,' C',plotrec[3],plotrec[1],plotrec[2]))
recnum += 1
answer=input('Enter index of point to change from bad to good or vice versa: ')
try:
ind=int(answer)
meas_data=pmag.mark_dmag_rec(s,ind,meas_data)
changeM=1
except:
'bad entry, try again'
if ans=='b':
if end_pca=="":end_pca=len(plotblock)-1
if beg_pca=="":beg_pca=0
k-=1 # stay on same sample until through
GoOn=0
while GoOn==0:
print('Enter index of first point for pca: ','[',beg_pca,']')
answer=input('return to keep default ')
if answer != "":
beg_pca=int(answer)
print('Enter index of last point for pca: ','[',end_pca,']')
answer=input('return to keep default ')
try:
end_pca=int(answer)
if plotblock[beg_pca][5]=='b' or plotblock[end_pca][5]=='b':
print("Can't select 'bad' measurement for PCA bounds -try again")
end_pca=len(plotblock)-1
beg_pca=0
elif beg_pca >=0 and beg_pca<=len(plotblock)-2 and end_pca>0 and end_pca<len(plotblock):
GoOn=1
else:
print(beg_pca,end_pca, " are bad entry of indices - try again")
end_pca=len(plotblock)-1
beg_pca=0
except:
print(beg_pca,end_pca, " are bad entry of indices - try again")
end_pca=len(plotblock)-1
beg_pca=0
GoOn=0
while GoOn==0:
if calculation_type!="":
print("Prior calculation type = ",calculation_type)
ct=input('Enter new Calculation Type: best-fit line, plane or fisher mean [l]/p/f : ' )
if ct=="" or ct=="l":
direction_type="l"
calculation_type="DE-BFL"
GoOn=1
elif ct=='p':
direction_type="p"
calculation_type="DE-BFP"
GoOn=1
elif ct=='f':
direction_type="l"
calculation_type="DE-FM"
GoOn=1
else:
print("bad entry of calculation type: try again. ")
pmagplotlib.plot_zed(ZED,plotblock,angle,s,units)
if verbose:pmagplotlib.draw_figs(ZED)
if geo==1 and tilt==0:
mpars=pmag.domean(geoblock,beg_pca,end_pca,calculation_type)
if mpars['specimen_direction_type']=='Error':break
PmagSpecRec["specimen_dec"]='%7.1f ' %(mpars["specimen_dec"])
PmagSpecRec["specimen_inc"]='%7.1f ' %(mpars["specimen_inc"])
if "SO-NO" not in method_codes:
PmagSpecRec["specimen_tilt_correction"]='0'
method_codes.append("DA-DIR-GEO")
else:
PmagSpecRec["specimen_tilt_correction"]='-1'
pmagplotlib.plot_dir(ZED,mpars,geoblock,angle)
if verbose:pmagplotlib.draw_figs(ZED)
if geo==1 and tilt==1:
mpars=pmag.domean(tiltblock,beg_pca,end_pca,calculation_type)
if mpars['specimen_direction_type']=='Error':break
PmagSpecRec["specimen_dec"]='%7.1f ' %(mpars["specimen_dec"])
PmagSpecRec["specimen_inc"]='%7.1f ' %(mpars["specimen_inc"])
if "SO-NO" not in method_codes:
PmagSpecRec["specimen_tilt_correction"]='100'
method_codes.append("DA-DIR-TILT")
else:
PmagSpecRec["specimen_tilt_correction"]='-1'
pmagplotlib.plot_dir(ZED,mpars,tiltblock,angle)
if verbose:pmagplotlib.draw_figs(ZED)
if geo==0 and tilt==0:
mpars=pmag.domean(datablock,beg_pca,end_pca,calculation_type)
if mpars['specimen_direction_type']=='Error':break
PmagSpecRec["specimen_dec"]='%7.1f ' %(mpars["specimen_dec"])
PmagSpecRec["specimen_inc"]='%7.1f ' %(mpars["specimen_inc"])
PmagSpecRec["specimen_tilt_correction"]='-1'
pmagplotlib.plot_dir(ZED,mpars,plotblock,angle)
if verbose:pmagplotlib.draw_figs(ZED)
PmagSpecRec["measurement_step_min"]='%8.3e ' %(mpars["measurement_step_min"])
PmagSpecRec["measurement_step_max"]='%8.3e ' %(mpars["measurement_step_max"])
PmagSpecRec["specimen_correction"]='u'
PmagSpecRec["specimen_dang"]='%7.1f ' %(mpars['specimen_dang'])
print('DANG: ',PmagSpecRec["specimen_dang"])
if calculation_type!='DE-FM':
PmagSpecRec["specimen_mad"]='%7.1f ' %(mpars["specimen_mad"])
PmagSpecRec["specimen_alpha95"]=""
else:
PmagSpecRec["specimen_alpha95"]='%7.1f ' %(mpars["specimen_alpha95"])
PmagSpecRec["specimen_mad"]=""
PmagSpecRec["specimen_n"]='%i ' %(mpars["specimen_n"])
PmagSpecRec["specimen_direction_type"]=direction_type
PmagSpecRec["calculation_type"]=calculation_type # redundant and won't be imported - just for convenience
method_codes=PmagSpecRec["magic_method_codes"].split(':')
if len(method_codes) != 0:
methstring=""
for meth in method_codes:
ctype=meth.split('-')
if 'DE' not in ctype:methstring=methstring+ ":" +meth # don't include old direction estimation methods
methstring=methstring+':'+calculation_type
PmagSpecRec["magic_method_codes"]= methstring.strip(':')
print('Method codes: ',PmagSpecRec['magic_method_codes'])
if calculation_type!='DE-FM':
if units=='K':
print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f, %s \n' % (PmagSpecRec["er_specimen_name"],int(PmagSpecRec["specimen_n"]),float(PmagSpecRec["specimen_mad"]),float(PmagSpecRec["specimen_dang"]),float(PmagSpecRec["measurement_step_min"])-273,float(PmagSpecRec["measurement_step_max"])-273,float(PmagSpecRec["specimen_dec"]),float(PmagSpecRec["specimen_inc"]),calculation_type))
elif units== 'T':
print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f, %s \n' % (PmagSpecRec["er_specimen_name"],int(PmagSpecRec["specimen_n"]),float(PmagSpecRec["specimen_mad"]),float(PmagSpecRec["specimen_dang"]),float(PmagSpecRec["measurement_step_min"])*1e3,float(PmagSpecRec["measurement_step_max"])*1e3,float(PmagSpecRec["specimen_dec"]),float(PmagSpecRec["specimen_inc"]),calculation_type))
elif 'T' in units and 'K' in units:
if float(PmagSpecRec['measurement_step_min'])<1.0 :
min=float(PmagSpecRec['measurement_step_min'])*1e3
else:
min=float(PmagSpecRec['measurement_step_min'])-273
if float(PmagSpecRec['measurement_step_max'])<1.0 :
max=float(PmagSpecRec['measurement_step_max'])*1e3
else:
max=float(PmagSpecRec['measurement_step_max'])-273
print('%s %i %7.1f %i %i %7.1f %7.1f %7.1f, %s \n' % (PmagSpecRec["er_specimen_name"],int(PmagSpecRec["specimen_n"]),float(PmagSpecRec["specimen_mad"]),float(PmagSpecRec["specimen_dang"]),min,max,float(PmagSpecRec["specimen_dec"]),float(PmagSpecRec["specimen_inc"]),calculation_type))
else:
print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f, %s \n' % (PmagSpecRec["er_specimen_name"],int(PmagSpecRec["specimen_n"]),float(PmagSpecRec["specimen_mad"]),float(PmagSpecRec["specimen_dang"]),float(PmagSpecRec["measurement_step_min"]),float(PmagSpecRec["measurement_step_max"]),float(PmagSpecRec["specimen_dec"]),float(PmagSpecRec["specimen_inc"]),calculation_type))
else:
if 'K' in units:
print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f, %s \n' % (PmagSpecRec["er_specimen_name"],int(PmagSpecRec["specimen_n"]),float(PmagSpecRec["specimen_alpha95"]),float(PmagSpecRec["specimen_dang"]),float(PmagSpecRec["measurement_step_min"])-273,float(PmagSpecRec["measurement_step_max"])-273,float(PmagSpecRec["specimen_dec"]),float(PmagSpecRec["specimen_inc"]),calculation_type))
elif 'T' in units:
print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f, %s \n' % (PmagSpecRec["er_specimen_name"],int(PmagSpecRec["specimen_n"]),float(PmagSpecRec["specimen_alpha95"]),float(PmagSpecRec["specimen_dang"]),float(PmagSpecRec["measurement_step_min"])*1e3,float(PmagSpecRec["measurement_step_max"])*1e3,float(PmagSpecRec["specimen_dec"]),float(PmagSpecRec["specimen_inc"]),calculation_type))
elif 'T' in units and 'K' in units:
if float(PmagSpecRec['measurement_step_min'])<1.0 :
min=float(PmagSpecRec['measurement_step_min'])*1e3
else:
min=float(PmagSpecRec['measurement_step_min'])-273
if float(PmagSpecRec['measurement_step_max'])<1.0 :
max=float(PmagSpecRec['measurement_step_max'])*1e3
else:
max=float(PmagSpecRec['measurement_step_max'])-273
print('%s %i %7.1f %i %i %7.1f %7.1f, %s \n' % (PmagSpecRec["er_specimen_name"],int(PmagSpecRec["specimen_n"]),float(PmagSpecRec["specimen_alpha95"]),min,max,float(PmagSpecRec["specimen_dec"]),float(PmagSpecRec["specimen_inc"]),calculation_type))
else:
print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f, %s \n' % (PmagSpecRec["er_specimen_name"],int(PmagSpecRec["specimen_n"]),float(PmagSpecRec["specimen_alpha95"]),float(PmagSpecRec["measurement_step_min"]),float(PmagSpecRec["measurement_step_max"]),float(PmagSpecRec["specimen_dec"]),float(PmagSpecRec["specimen_inc"]),calculation_type))
saveit=input("Save this interpretation? [y]/n \n")
if saveit!="n":
changeS=1
#
# put in details
#
angle,direction_type,setangle="","",0
if len(CurrRecs)>0:
replace=input(" [0] add new component, or [1] replace existing interpretation(s) [default is replace] ")
if replace=="1" or replace=="":
CurrRecs=[]
PmagSpecRec['specimen_comp_name']='A'
CurrRecs.append(PmagSpecRec)
else:
print('These are the current component names for this specimen: ')
for trec in CurrRecs:print(trec['specimen_comp_name'])
compnum=input("Enter new component name: ")
PmagSpecRec['specimen_comp_name']=compnum
print("Adding new component: ",PmagSpecRec['specimen_comp_name'])
CurrRecs.append(PmagSpecRec)
else:
PmagSpecRec['specimen_comp_name']='A'
CurrRecs.append(PmagSpecRec)
k+=1
ans=""
else:
ans=""
else: # plots=1
k+=1
files={}
locname.replace('/','-')
print(PmagSpecRec)
for key in ZED.keys():
files[key]="LO:_"+locname+'_SI:_'+PmagSpecRec['er_site_name']+'_SA:_'+PmagSpecRec['er_sample_name']+'_SP:_'+s+'_CO:_'+coord+'_TY:_'+key+'_.'+fmt
if pmagplotlib.isServer:
black = '#000000'
purple = '#800080'
titles={}
titles['demag']='DeMag Plot'
titles['zijd']='Zijderveld Plot'
titles['eqarea']='Equal Area Plot'
ZED = pmagplotlib.add_borders(ZED,titles,black,purple)
pmagplotlib.save_plots(ZED,files)
if len(CurrRecs)>0:
for rec in CurrRecs: PriorRecs.append(rec)
if changeS==1:
if len(PriorRecs)>0:
save_redo(PriorRecs,inspec)
else:
os.system('rm '+inspec)
CurrRecs,beg_pca,end_pca=[],"","" # next up
changeS=0
else: k+=1 # skip record - not enough data
if changeM==1:
pmag.magic_write(meas_file,meas_data,'magic_measurements') | python | def main():
"""
NAME
zeq_magic.py
DESCRIPTION
reads in magic_measurements formatted file, makes plots of remanence decay
during demagnetization experiments. Reads in prior interpretations saved in
a pmag_specimens formatted file and allows re-interpretations of best-fit lines
and planes and saves (revised or new) interpretations in a pmag_specimens file.
interpretations are saved in the coordinate system used. Also allows judicious editting of
measurements to eliminate "bad" measurements. These are marked as such in the magic_measurements
input file. they are NOT deleted, just ignored.
SYNTAX
zeq_magic.py [command line options]
OPTIONS
-h prints help message and quits
-f MEASFILE: sets magic_measurements format input file, default: magic_measurements.txt
-fsp SPECFILE: sets pmag_specimens format file with prior interpreations, default: zeq_specimens.txt
-Fp PLTFILE: sets filename for saved plot, default is name_type.fmt (where type is zijd, eqarea or decay curve)
-crd [s,g,t]: sets coordinate system, g=geographic, t=tilt adjusted, default: specimen coordinate system
-fsa SAMPFILE: sets er_samples format file with orientation information, default: er_samples.txt
-spc SPEC plots single specimen SPEC, saves plot with specified format
with optional -dir settings and quits
-dir [L,P,F][beg][end]: sets calculation type for principal component analysis, default is none
beg: starting step for PCA calculation
end: ending step for PCA calculation
[L,P,F]: calculation type for line, plane or fisher mean
must be used with -spc option
-fmt FMT: set format of saved plot [png,svg,jpg]
-A: suppresses averaging of replicate measurements, default is to average
-sav: saves all plots without review
SCREEN OUTPUT:
Specimen, N, a95, StepMin, StepMax, Dec, Inc, calculation type
"""
# initialize some variables
doave,e,b=1,0,0 # average replicates, initial end and beginning step
plots,coord=0,'s'
noorient=0
version_num=pmag.get_version()
verbose=pmagplotlib.verbose
beg_pca,end_pca,direction_type="","",'l'
calculation_type,fmt="","svg"
user,spec_keys,locname="",[],''
plot_file=""
sfile=""
plot_file=""
PriorRecs=[] # empty list for prior interpretations
backup=0
specimen="" # can skip everything and just plot one specimen with bounds e,b
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-WD' in sys.argv:
ind=sys.argv.index('-WD')
dir_path=sys.argv[ind+1]
else:
dir_path='.'
inspec=dir_path+'/'+'zeq_specimens.txt'
meas_file,geo,tilt,ask,samp_file=dir_path+'/magic_measurements.txt',0,0,0,dir_path+'/er_samples.txt'
if '-f' in sys.argv:
ind=sys.argv.index('-f')
meas_file=dir_path+'/'+sys.argv[ind+1]
if '-fsp' in sys.argv:
ind=sys.argv.index('-fsp')
inspec=dir_path+'/'+sys.argv[ind+1]
if '-fsa' in sys.argv:
ind=sys.argv.index('-fsa')
samp_file=dir_path+'/'+sys.argv[ind+1]
sfile='ok'
if '-crd' in sys.argv:
ind=sys.argv.index('-crd')
coord=sys.argv[ind+1]
if coord=='g' or coord=='t':
samp_data,file_type=pmag.magic_read(samp_file)
if file_type=='er_samples':sfile='ok'
geo=1
if coord=='t':tilt=1
if '-spc' in sys.argv:
ind=sys.argv.index('-spc')
specimen=sys.argv[ind+1]
if '-dir' in sys.argv:
ind=sys.argv.index('-dir')
direction_type=sys.argv[ind+1]
beg_pca=int(sys.argv[ind+2])
end_pca=int(sys.argv[ind+3])
if direction_type=='L':calculation_type='DE-BFL'
if direction_type=='P':calculation_type='DE-BFP'
if direction_type=='F':calculation_type='DE-FM'
if '-Fp' in sys.argv:
ind=sys.argv.index('-Fp')
plot_file=dir_path+'/'+sys.argv[ind+1]
if '-A' in sys.argv: doave=0
if '-sav' in sys.argv:
plots=1
verbose=0
if '-fmt' in sys.argv:
ind=sys.argv.index('-fmt')
fmt=sys.argv[ind+1]
#
first_save=1
meas_data,file_type=pmag.magic_read(meas_file)
changeM,changeS=0,0 # check if data or interpretations have changed
if file_type != 'magic_measurements':
print(file_type)
print(file_type,"This is not a valid magic_measurements file ")
sys.exit()
for rec in meas_data:
if "magic_method_codes" not in rec.keys(): rec["magic_method_codes"]=""
methods=""
tmp=rec["magic_method_codes"].replace(" ","").split(":")
for meth in tmp:
methods=methods+meth+":"
rec["magic_method_codes"]=methods[:-1] # get rid of annoying spaces in Anthony's export files
if "magic_instrument_codes" not in rec.keys() :rec["magic_instrument_codes"]=""
PriorSpecs=[]
PriorRecs,file_type=pmag.magic_read(inspec)
if len(PriorRecs)==0:
if verbose:print("starting new file ",inspec)
for Rec in PriorRecs:
if 'magic_software_packages' not in Rec.keys():Rec['magic_software_packages']=""
if Rec['er_specimen_name'] not in PriorSpecs:
if 'specimen_comp_name' not in Rec.keys():Rec['specimen_comp_name']="A"
PriorSpecs.append(Rec['er_specimen_name'])
else:
if 'specimen_comp_name' not in Rec.keys():Rec['specimen_comp_name']="A"
if "magic_method_codes" in Rec.keys():
methods=[]
tmp=Rec["magic_method_codes"].replace(" ","").split(":")
for meth in tmp:
methods.append(meth)
if 'DE-FM' in methods:
Rec['calculation_type']='DE-FM' # this won't be imported but helps
if 'DE-BFL' in methods:
Rec['calculation_type']='DE-BFL'
if 'DE-BFL-A' in methods:
Rec['calculation_type']='DE-BFL-A'
if 'DE-BFL-O' in methods:
Rec['calculation_type']='DE-BFL-O'
if 'DE-BFP' in methods:
Rec['calculation_type']='DE-BFP'
else:
Rec['calculation_type']='DE-BFL' # default is to assume a best-fit line
#
# get list of unique specimen names
#
sids=pmag.get_specs(meas_data)
#
# set up plots, angle sets X axis to horizontal, direction_type 'l' is best-fit line
# direction_type='p' is great circle
#
#
# draw plots for sample s - default is just to step through zijderveld diagrams
#
#
# define figure numbers for equal area, zijderveld,
# and intensity vs. demagnetiztion step respectively
ZED={}
ZED['eqarea'],ZED['zijd'], ZED['demag']=1,2,3
pmagplotlib.plot_init(ZED['eqarea'],5,5)
pmagplotlib.plot_init(ZED['zijd'],6,5)
pmagplotlib.plot_init(ZED['demag'],5,5)
save_pca=0
if specimen=="":
k = 0
else:
k=sids.index(specimen)
angle,direction_type="",""
setangle=0
CurrRecs=[]
while k < len(sids):
CurrRecs=[]
if setangle==0:angle=""
method_codes,inst_code=[],""
s=sids[k]
PmagSpecRec={}
PmagSpecRec["er_analyst_mail_names"]=user
PmagSpecRec['magic_software_packages']=version_num
PmagSpecRec['specimen_description']=""
PmagSpecRec['magic_method_codes']=""
if verbose and s!="":print(s, k , 'out of ',len(sids))
#
# collect info for the PmagSpecRec dictionary
#
s_meas=pmag.get_dictitem(meas_data,'er_specimen_name',s,'T') # fish out this specimen
s_meas=pmag.get_dictitem(s_meas,'magic_method_codes','Z','has') # fish out zero field steps
if len(s_meas)>0:
for rec in s_meas: # fix up a few things for the output record
PmagSpecRec["magic_instrument_codes"]=rec["magic_instrument_codes"] # copy over instruments
PmagSpecRec["er_citation_names"]="This study"
PmagSpecRec["er_specimen_name"]=s
PmagSpecRec["er_sample_name"]=rec["er_sample_name"]
PmagSpecRec["er_site_name"]=rec["er_site_name"]
PmagSpecRec["er_location_name"]=rec["er_location_name"]
locname=rec['er_location_name']
if 'er_expedition_name' in rec.keys(): PmagSpecRec["er_expedition_name"]=rec["er_expedition_name"]
PmagSpecRec["magic_method_codes"]=rec["magic_method_codes"]
if "magic_experiment_name" not in rec.keys():
PmagSpecRec["magic_experiment_names"]=""
else:
PmagSpecRec["magic_experiment_names"]=rec["magic_experiment_name"]
break
#
# find the data from the meas_data file for this specimen
#
data,units=pmag.find_dmag_rec(s,meas_data)
PmagSpecRec["measurement_step_unit"]= units
u=units.split(":")
if "T" in units:PmagSpecRec["magic_method_codes"]=PmagSpecRec["magic_method_codes"]+":LP-DIR-AF"
if "K" in units:PmagSpecRec["magic_method_codes"]=PmagSpecRec["magic_method_codes"]+":LP-DIR-T"
if "J" in units:PmagSpecRec["magic_method_codes"]=PmagSpecRec["magic_method_codes"]+":LP-DIR-M"
#
# find prior interpretation
#
if len(CurrRecs)==0: # check if already in
beg_pca,end_pca="",""
calculation_type=""
if inspec !="":
if verbose: print(" looking up previous interpretations...")
precs=pmag.get_dictitem(PriorRecs,'er_specimen_name',s,'T') # get all the prior recs with this specimen name
precs=pmag.get_dictitem(precs,'magic_method_codes','LP-DIR','has') # get the directional data
PriorRecs=pmag.get_dictitem(PriorRecs,'er_specimen_name',s,'F') # take them all out of prior recs
# get the ones that meet the current coordinate system
for prec in precs:
if 'specimen_tilt_correction' not in prec.keys() or prec['specimen_tilt_correction']=='-1':
crd='s'
elif prec['specimen_tilt_correction']=='0':
crd='g'
elif prec['specimen_tilt_correction']=='100':
crd='t'
else:
crd='?'
CurrRec={}
for key in prec.keys():CurrRec[key]=prec[key]
CurrRecs.append(CurrRec) # put in CurrRecs
method_codes= CurrRec["magic_method_codes"].replace(" ","").split(':')
calculation_type='DE-BFL'
if 'DE-FM' in method_codes: calculation_type='DE-FM'
if 'DE-BFP' in method_codes: calculation_type='DE-BFP'
if 'DE-BFL-A' in method_codes: calculation_type='DE-BFL-A'
if 'specimen_dang' not in CurrRec.keys():
if verbose:print('Run mk_redo.py and zeq_magic_redo.py to get the specimen_dang values')
CurrRec['specimen_dang']=-1
if calculation_type!='DE-FM' and crd==coord: # not a fisher mean
if verbose:print("Specimen N MAD DANG start end dec inc type component coordinates")
if units=='K':
if verbose:print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %s %s %s \n' % (CurrRec["er_specimen_name"],int(CurrRec["specimen_n"]),float(CurrRec["specimen_mad"]),float(CurrRec["specimen_dang"]),float(CurrRec["measurement_step_min"])-273,float(CurrRec["measurement_step_max"])-273,float(CurrRec["specimen_dec"]),float(CurrRec["specimen_inc"]),calculation_type,CurrRec['specimen_comp_name'],crd))
elif units=='T':
if verbose:print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %s %s %s \n' % (CurrRec["er_specimen_name"],int(CurrRec["specimen_n"]),float(CurrRec["specimen_mad"]),float(CurrRec["specimen_dang"]),float(CurrRec["measurement_step_min"])*1e3,float(CurrRec["measurement_step_max"])*1e3,float(CurrRec["specimen_dec"]),float(CurrRec["specimen_inc"]),calculation_type,CurrRec['specimen_comp_name'],crd))
elif 'T' in units and 'K' in units:
if float(CurrRec['measurement_step_min'])<1.0 :
min=float(CurrRec['measurement_step_min'])*1e3
else:
min=float(CurrRec['measurement_step_min'])-273
if float(CurrRec['measurement_step_max'])<1.0 :
max=float(CurrRec['measurement_step_max'])*1e3
else:
max=float(CurrRec['measurement_step_max'])-273
if verbose:print('%s %i %7.1f %i %i %7.1f %7.1f %7.1f, %s %s\n' % (CurrRec["er_specimen_name"],int(CurrRec["specimen_n"]),float(CurrRec["specimen_mad"]),float(CurrRec['specimen_dang']),min,max,float(CurrRec["specimen_dec"]),float(CurrRec["specimen_inc"]),calculation_type,crd))
elif 'J' in units:
if verbose:print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %s %s %s \n' % (CurrRec["er_specimen_name"],int(CurrRec["specimen_n"]),float(CurrRec["specimen_mad"]),float(CurrRec['specimen_dang']),float(CurrRec["measurement_step_min"]),float(CurrRec["measurement_step_max"]),float(CurrRec["specimen_dec"]),float(CurrRec["specimen_inc"]),calculation_type,CurrRec['specimen_comp_name'],crd))
elif calculation_type=='DE-FM' and crd==coord: # fisher mean
if verbose:print("Specimen a95 DANG start end dec inc type component coordinates")
if units=='K':
if verbose:print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %s %s %s \n' % (CurrRec["er_specimen_name"],int(CurrRec["specimen_n"]),float(CurrRec["specimen_alpha95"]),float(CurrRec["measurement_step_min"])-273,float(CurrRec["measurement_step_max"])-273,float(CurrRec["specimen_dec"]),float(CurrRec["specimen_inc"]),calculation_type,CurrRec['specimen_comp_name'],crd))
elif units=='T':
if verbose:print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %s %s %s \n' % (CurrRec["er_specimen_name"],int(CurrRec["specimen_n"]),float(CurrRec["specimen_alpha95"]),float(CurrRec["measurement_step_min"])*1e3,float(CurrRec["measurement_step_max"])*1e3,float(CurrRec["specimen_dec"]),float(CurrRec["specimen_inc"]),calculation_type,CurrRec['specimen_comp_name'],crd))
elif 'T' in units and 'K' in units:
if float(CurrRec['measurement_step_min'])<1.0 :
min=float(CurrRec['measurement_step_min'])*1e3
else:
min=float(CurrRec['measurement_step_min'])-273
if float(CurrRec['measurement_step_max'])<1.0 :
max=float(CurrRec['measurement_step_max'])*1e3
else:
max=float(CurrRec['measurement_step_max'])-273
if verbose:print('%s %i %7.1f %i %i %7.1f %7.1f %s %s \n' % (CurrRec["er_specimen_name"],int(CurrRec["specimen_n"]),float(CurrRec["specimen_alpha95"]),min,max,float(CurrRec["specimen_dec"]),float(CurrRec["specimen_inc"]),calculation_type,crd))
elif 'J' in units:
if verbose:print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %s %s %s \n' % (CurrRec["er_specimen_name"],int(CurrRec["specimen_n"]),float(CurrRec["specimen_mad"]),float(CurrRec["measurement_step_min"]),float(CurrRec["measurement_step_max"]),float(CurrRec["specimen_dec"]),float(CurrRec["specimen_inc"]),calculation_type,CurrRec['specimen_comp_name'],crd))
if len(CurrRecs)==0:beg_pca,end_pca="",""
datablock=data
noskip=1
if len(datablock) <3:
noskip=0
if backup==0:
k+=1
else:
k-=1
if len(CurrRecs)>0:
for rec in CurrRecs:
PriorRecs.append(rec)
CurrRecs=[]
else:
backup=0
if noskip:
#
# find replicate measurements at given treatment step and average them
#
# step_meth,avedata=pmag.vspec(data)
# if len(avedata) != len(datablock):
# if doave==1:
# method_codes.append("DE-VM")
# datablock=avedata
# #
# do geo or stratigraphic correction now
#
if geo==1:
#
# find top priority orientation method
orient,az_type=pmag.get_orient(samp_data,PmagSpecRec["er_sample_name"])
if az_type=='SO-NO':
if verbose: print("no orientation data for ",s)
orient["sample_azimuth"]=0
orient["sample_dip"]=0
noorient=1
method_codes.append("SO-NO")
orient["sample_azimuth"]=0
orient["sample_dip"]=0
orient["sample_bed_dip_azimuth"]=0
orient["sample_bed_dip"]=0
noorient=1
method_codes.append("SO-NO")
else:
noorient=0
#
# if stratigraphic selected, get stratigraphic correction
#
tiltblock,geoblock=[],[]
for rec in datablock:
d_geo,i_geo=pmag.dogeo(rec[1],rec[2],float(orient["sample_azimuth"]),float(orient["sample_dip"]))
geoblock.append([rec[0],d_geo,i_geo,rec[3],rec[4],rec[5],rec[6]])
if tilt==1 and "sample_bed_dip" in orient.keys() and float(orient['sample_bed_dip'])!=0:
d_tilt,i_tilt=pmag.dotilt(d_geo,i_geo,float(orient["sample_bed_dip_direction"]),float(orient["sample_bed_dip"]))
tiltblock.append([rec[0],d_tilt,i_tilt,rec[3],rec[4],rec[5],rec[6]])
if tilt==1: plotblock=tiltblock
if geo==1 and tilt==0:plotblock=geoblock
if geo==0 and tilt==0: plotblock=datablock
#
# set the end pca point to last point if not set
if e==0 or e>len(plotblock)-1: e=len(plotblock)-1
if angle=="": angle=plotblock[0][1] # rotate to NRM declination
title=s+'_s'
if geo==1 and tilt==0 and noorient!=1:title=s+'_g'
if tilt==1 and noorient!=1:title=s+'_t'
pmagplotlib.plot_zed(ZED,plotblock,angle,title,units)
if verbose:pmagplotlib.draw_figs(ZED)
if len(CurrRecs)!=0:
for prec in CurrRecs:
if 'calculation_type' not in prec.keys():
calculation_type=''
else:
calculation_type=prec["calculation_type"]
direction_type=prec["specimen_direction_type"]
if calculation_type !="":
beg_pca,end_pca="",""
for j in range(len(datablock)):
if data[j][0]==float(prec["measurement_step_min"]):beg_pca=j
if data[j][0]==float(prec["measurement_step_max"]):end_pca=j
if beg_pca=="" or end_pca=="":
if verbose:
print("something wrong with prior interpretation ")
break
if calculation_type!="":
if beg_pca=="":beg_pca=0
if end_pca=="":end_pca=len(plotblock)-1
if geo==1 and tilt==0:
mpars=pmag.domean(geoblock,beg_pca,end_pca,calculation_type)
if mpars["specimen_direction_type"]!="Error":
pmagplotlib.plot_dir(ZED,mpars,geoblock,angle)
if verbose:pmagplotlib.draw_figs(ZED)
if geo==1 and tilt==1:
mpars=pmag.domean(tiltblock,beg_pca,end_pca,calculation_type)
if mpars["specimen_direction_type"]!="Error":
pmagplotlib.plot_dir(ZED,mpars,tiltblock,angle)
if verbose:pmagplotlib.draw_figs(ZED)
if geo==0 and tilt==0:
mpars=pmag.domean(datablock,beg_pca,end_pca,calculation_type)
if mpars["specimen_direction_type"]!="Error":
pmagplotlib.plot_dir(ZED,mpars,plotblock,angle)
if verbose:pmagplotlib.draw_figs(ZED)
#
# print out data for this sample to screen
#
recnum=0
for plotrec in plotblock:
if units=='T' and verbose: print('%s: %i %7.1f %s %8.3e %7.1f %7.1f %s' % (plotrec[5], recnum,plotrec[0]*1e3," mT",plotrec[3],plotrec[1],plotrec[2],plotrec[6]))
if units=="K" and verbose: print('%s: %i %7.1f %s %8.3e %7.1f %7.1f %s' % (plotrec[5], recnum,plotrec[0]-273,' C',plotrec[3],plotrec[1],plotrec[2],plotrec[6]))
if units=="J" and verbose: print('%s: %i %7.1f %s %8.3e %7.1f %7.1f %s' % (plotrec[5], recnum,plotrec[0],' J',plotrec[3],plotrec[1],plotrec[2],plotrec[6]))
if 'K' in units and 'T' in units:
if plotrec[0]>=1. and verbose: print('%s: %i %7.1f %s %8.3e %7.1f %7.1f %s' % (plotrec[5], recnum,plotrec[0]-273,' C',plotrec[3],plotrec[1],plotrec[2],plotrec[6]))
if plotrec[0]<1. and verbose: print('%s: %i %7.1f %s %8.3e %7.1f %7.1f %s' % (plotrec[5], recnum,plotrec[0]*1e3," mT",plotrec[3],plotrec[1],plotrec[2],plotrec[6]))
recnum += 1
if specimen!="":
if plot_file=="":
basename=locname+'_'+s
else:
basename=plot_file
files={}
for key in ZED.keys():
files[key]=basename+'_'+key+'.'+fmt
pmagplotlib.save_plots(ZED,files)
sys.exit()
else: # interactive
if plots==0:
ans='b'
k+=1
changeS=0
while ans != "":
if len(CurrRecs)==0:
print("""
g/b: indicates good/bad measurement. "bad" measurements excluded from calculation
set s[a]ve plot, [b]ounds for pca and calculate, [p]revious, [s]pecimen,
change [h]orizontal projection angle, change [c]oordinate systems,
[e]dit data, [q]uit:
""")
else:
print("""
g/b: indicates good/bad measurement. "bad" measurements excluded from calculation
set s[a]ve plot, [b]ounds for pca and calculate, [p]revious, [s]pecimen,
change [h]orizontal projection angle, change [c]oordinate systems,
[d]elete current interpretation(s), [e]dit data, [q]uit:
""")
ans=input('<Return> for next specimen \n')
setangle=0
if ans=='d': # delete this interpretation
CurrRecs=[]
k-=1 # replot same specimen
ans=""
changeS=1
if ans=='q':
if changeM==1:
ans=input('Save changes to magic_measurements.txt? y/[n] ')
if ans=='y':
pmag.magic_write(meas_file,meas_data,'magic_measurements')
print("Good bye")
sys.exit()
if ans=='a':
if plot_file=="":
basename=locname+'_'+s+'_'
else:
basename=plot_file
files={}
for key in ZED.keys():
files[key]=basename+'_'+coord+'_'+key+'.'+fmt
pmagplotlib.save_plots(ZED,files)
ans=""
if ans=='p':
k-=2
ans=""
backup=1
if ans=='c':
k-=1 # replot same block
if tilt==0 and geo ==1:print("You are currently viewing geographic coordinates ")
if tilt==1 and geo ==1:print("You are currently viewing stratigraphic coordinates ")
if tilt==0 and geo ==0: print("You are currently viewing sample coordinates ")
print("\n Which coordinate system do you wish to view? ")
coord=input(" <Return> specimen, [g] geographic, [t] tilt corrected ")
if coord=="g":geo,tilt=1,0
if coord=="t":
geo=1
tilt=1
if coord=="":
coord='s'
geo=0
tilt=0
if geo==1 and sfile=="":
samp_file=input(" Input er_samples file for sample orientations [er_samples.txt] " )
if samp_file=="":samp_file="er_samples.txt"
samp_data,file_type=pmag.magic_read(samp_file)
if file_type != 'er_samples':
print(file_type)
print("This is not a valid er_samples file - coordinate system not changed")
else:
sfile="ok"
ans=""
if ans=='s':
keepon=1
sample=input('Enter desired specimen name (or first part there of): ')
while keepon==1:
try:
k =sids.index(sample)
keepon=0
except:
tmplist=[]
for qq in range(len(sids)):
if sample in sids[qq]:tmplist.append(sids[qq])
print(sample," not found, but this was: ")
print(tmplist)
sample=input('Select one or try again\n ')
angle,direction_type="",""
setangle=0
ans=""
if ans=='h':
k-=1
angle=input("Enter desired declination for X axis 0-360 ")
angle=float(angle)
if angle==0:angle=0.001
s=sids[k]
setangle=1
ans=""
if ans=='e':
k-=1
ans=""
recnum=0
for plotrec in plotblock:
if plotrec[0]<=200 and verbose: print('%s: %i %7.1f %s %8.3e %7.1f %7.1f ' % (plotrec[5], recnum,plotrec[0]*1e3," mT",plotrec[3],plotrec[1],plotrec[2]))
if plotrec[0]>200 and verbose: print('%s: %i %7.1f %s %8.3e %7.1f %7.1f ' % (plotrec[5], recnum,plotrec[0]-273,' C',plotrec[3],plotrec[1],plotrec[2]))
recnum += 1
answer=input('Enter index of point to change from bad to good or vice versa: ')
try:
ind=int(answer)
meas_data=pmag.mark_dmag_rec(s,ind,meas_data)
changeM=1
except:
'bad entry, try again'
if ans=='b':
if end_pca=="":end_pca=len(plotblock)-1
if beg_pca=="":beg_pca=0
k-=1 # stay on same sample until through
GoOn=0
while GoOn==0:
print('Enter index of first point for pca: ','[',beg_pca,']')
answer=input('return to keep default ')
if answer != "":
beg_pca=int(answer)
print('Enter index of last point for pca: ','[',end_pca,']')
answer=input('return to keep default ')
try:
end_pca=int(answer)
if plotblock[beg_pca][5]=='b' or plotblock[end_pca][5]=='b':
print("Can't select 'bad' measurement for PCA bounds -try again")
end_pca=len(plotblock)-1
beg_pca=0
elif beg_pca >=0 and beg_pca<=len(plotblock)-2 and end_pca>0 and end_pca<len(plotblock):
GoOn=1
else:
print(beg_pca,end_pca, " are bad entry of indices - try again")
end_pca=len(plotblock)-1
beg_pca=0
except:
print(beg_pca,end_pca, " are bad entry of indices - try again")
end_pca=len(plotblock)-1
beg_pca=0
GoOn=0
while GoOn==0:
if calculation_type!="":
print("Prior calculation type = ",calculation_type)
ct=input('Enter new Calculation Type: best-fit line, plane or fisher mean [l]/p/f : ' )
if ct=="" or ct=="l":
direction_type="l"
calculation_type="DE-BFL"
GoOn=1
elif ct=='p':
direction_type="p"
calculation_type="DE-BFP"
GoOn=1
elif ct=='f':
direction_type="l"
calculation_type="DE-FM"
GoOn=1
else:
print("bad entry of calculation type: try again. ")
pmagplotlib.plot_zed(ZED,plotblock,angle,s,units)
if verbose:pmagplotlib.draw_figs(ZED)
if geo==1 and tilt==0:
mpars=pmag.domean(geoblock,beg_pca,end_pca,calculation_type)
if mpars['specimen_direction_type']=='Error':break
PmagSpecRec["specimen_dec"]='%7.1f ' %(mpars["specimen_dec"])
PmagSpecRec["specimen_inc"]='%7.1f ' %(mpars["specimen_inc"])
if "SO-NO" not in method_codes:
PmagSpecRec["specimen_tilt_correction"]='0'
method_codes.append("DA-DIR-GEO")
else:
PmagSpecRec["specimen_tilt_correction"]='-1'
pmagplotlib.plot_dir(ZED,mpars,geoblock,angle)
if verbose:pmagplotlib.draw_figs(ZED)
if geo==1 and tilt==1:
mpars=pmag.domean(tiltblock,beg_pca,end_pca,calculation_type)
if mpars['specimen_direction_type']=='Error':break
PmagSpecRec["specimen_dec"]='%7.1f ' %(mpars["specimen_dec"])
PmagSpecRec["specimen_inc"]='%7.1f ' %(mpars["specimen_inc"])
if "SO-NO" not in method_codes:
PmagSpecRec["specimen_tilt_correction"]='100'
method_codes.append("DA-DIR-TILT")
else:
PmagSpecRec["specimen_tilt_correction"]='-1'
pmagplotlib.plot_dir(ZED,mpars,tiltblock,angle)
if verbose:pmagplotlib.draw_figs(ZED)
if geo==0 and tilt==0:
mpars=pmag.domean(datablock,beg_pca,end_pca,calculation_type)
if mpars['specimen_direction_type']=='Error':break
PmagSpecRec["specimen_dec"]='%7.1f ' %(mpars["specimen_dec"])
PmagSpecRec["specimen_inc"]='%7.1f ' %(mpars["specimen_inc"])
PmagSpecRec["specimen_tilt_correction"]='-1'
pmagplotlib.plot_dir(ZED,mpars,plotblock,angle)
if verbose:pmagplotlib.draw_figs(ZED)
PmagSpecRec["measurement_step_min"]='%8.3e ' %(mpars["measurement_step_min"])
PmagSpecRec["measurement_step_max"]='%8.3e ' %(mpars["measurement_step_max"])
PmagSpecRec["specimen_correction"]='u'
PmagSpecRec["specimen_dang"]='%7.1f ' %(mpars['specimen_dang'])
print('DANG: ',PmagSpecRec["specimen_dang"])
if calculation_type!='DE-FM':
PmagSpecRec["specimen_mad"]='%7.1f ' %(mpars["specimen_mad"])
PmagSpecRec["specimen_alpha95"]=""
else:
PmagSpecRec["specimen_alpha95"]='%7.1f ' %(mpars["specimen_alpha95"])
PmagSpecRec["specimen_mad"]=""
PmagSpecRec["specimen_n"]='%i ' %(mpars["specimen_n"])
PmagSpecRec["specimen_direction_type"]=direction_type
PmagSpecRec["calculation_type"]=calculation_type # redundant and won't be imported - just for convenience
method_codes=PmagSpecRec["magic_method_codes"].split(':')
if len(method_codes) != 0:
methstring=""
for meth in method_codes:
ctype=meth.split('-')
if 'DE' not in ctype:methstring=methstring+ ":" +meth # don't include old direction estimation methods
methstring=methstring+':'+calculation_type
PmagSpecRec["magic_method_codes"]= methstring.strip(':')
print('Method codes: ',PmagSpecRec['magic_method_codes'])
if calculation_type!='DE-FM':
if units=='K':
print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f, %s \n' % (PmagSpecRec["er_specimen_name"],int(PmagSpecRec["specimen_n"]),float(PmagSpecRec["specimen_mad"]),float(PmagSpecRec["specimen_dang"]),float(PmagSpecRec["measurement_step_min"])-273,float(PmagSpecRec["measurement_step_max"])-273,float(PmagSpecRec["specimen_dec"]),float(PmagSpecRec["specimen_inc"]),calculation_type))
elif units== 'T':
print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f, %s \n' % (PmagSpecRec["er_specimen_name"],int(PmagSpecRec["specimen_n"]),float(PmagSpecRec["specimen_mad"]),float(PmagSpecRec["specimen_dang"]),float(PmagSpecRec["measurement_step_min"])*1e3,float(PmagSpecRec["measurement_step_max"])*1e3,float(PmagSpecRec["specimen_dec"]),float(PmagSpecRec["specimen_inc"]),calculation_type))
elif 'T' in units and 'K' in units:
if float(PmagSpecRec['measurement_step_min'])<1.0 :
min=float(PmagSpecRec['measurement_step_min'])*1e3
else:
min=float(PmagSpecRec['measurement_step_min'])-273
if float(PmagSpecRec['measurement_step_max'])<1.0 :
max=float(PmagSpecRec['measurement_step_max'])*1e3
else:
max=float(PmagSpecRec['measurement_step_max'])-273
print('%s %i %7.1f %i %i %7.1f %7.1f %7.1f, %s \n' % (PmagSpecRec["er_specimen_name"],int(PmagSpecRec["specimen_n"]),float(PmagSpecRec["specimen_mad"]),float(PmagSpecRec["specimen_dang"]),min,max,float(PmagSpecRec["specimen_dec"]),float(PmagSpecRec["specimen_inc"]),calculation_type))
else:
print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f, %s \n' % (PmagSpecRec["er_specimen_name"],int(PmagSpecRec["specimen_n"]),float(PmagSpecRec["specimen_mad"]),float(PmagSpecRec["specimen_dang"]),float(PmagSpecRec["measurement_step_min"]),float(PmagSpecRec["measurement_step_max"]),float(PmagSpecRec["specimen_dec"]),float(PmagSpecRec["specimen_inc"]),calculation_type))
else:
if 'K' in units:
print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f, %s \n' % (PmagSpecRec["er_specimen_name"],int(PmagSpecRec["specimen_n"]),float(PmagSpecRec["specimen_alpha95"]),float(PmagSpecRec["specimen_dang"]),float(PmagSpecRec["measurement_step_min"])-273,float(PmagSpecRec["measurement_step_max"])-273,float(PmagSpecRec["specimen_dec"]),float(PmagSpecRec["specimen_inc"]),calculation_type))
elif 'T' in units:
print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f, %s \n' % (PmagSpecRec["er_specimen_name"],int(PmagSpecRec["specimen_n"]),float(PmagSpecRec["specimen_alpha95"]),float(PmagSpecRec["specimen_dang"]),float(PmagSpecRec["measurement_step_min"])*1e3,float(PmagSpecRec["measurement_step_max"])*1e3,float(PmagSpecRec["specimen_dec"]),float(PmagSpecRec["specimen_inc"]),calculation_type))
elif 'T' in units and 'K' in units:
if float(PmagSpecRec['measurement_step_min'])<1.0 :
min=float(PmagSpecRec['measurement_step_min'])*1e3
else:
min=float(PmagSpecRec['measurement_step_min'])-273
if float(PmagSpecRec['measurement_step_max'])<1.0 :
max=float(PmagSpecRec['measurement_step_max'])*1e3
else:
max=float(PmagSpecRec['measurement_step_max'])-273
print('%s %i %7.1f %i %i %7.1f %7.1f, %s \n' % (PmagSpecRec["er_specimen_name"],int(PmagSpecRec["specimen_n"]),float(PmagSpecRec["specimen_alpha95"]),min,max,float(PmagSpecRec["specimen_dec"]),float(PmagSpecRec["specimen_inc"]),calculation_type))
else:
print('%s %i %7.1f %7.1f %7.1f %7.1f %7.1f, %s \n' % (PmagSpecRec["er_specimen_name"],int(PmagSpecRec["specimen_n"]),float(PmagSpecRec["specimen_alpha95"]),float(PmagSpecRec["measurement_step_min"]),float(PmagSpecRec["measurement_step_max"]),float(PmagSpecRec["specimen_dec"]),float(PmagSpecRec["specimen_inc"]),calculation_type))
saveit=input("Save this interpretation? [y]/n \n")
if saveit!="n":
changeS=1
#
# put in details
#
angle,direction_type,setangle="","",0
if len(CurrRecs)>0:
replace=input(" [0] add new component, or [1] replace existing interpretation(s) [default is replace] ")
if replace=="1" or replace=="":
CurrRecs=[]
PmagSpecRec['specimen_comp_name']='A'
CurrRecs.append(PmagSpecRec)
else:
print('These are the current component names for this specimen: ')
for trec in CurrRecs:print(trec['specimen_comp_name'])
compnum=input("Enter new component name: ")
PmagSpecRec['specimen_comp_name']=compnum
print("Adding new component: ",PmagSpecRec['specimen_comp_name'])
CurrRecs.append(PmagSpecRec)
else:
PmagSpecRec['specimen_comp_name']='A'
CurrRecs.append(PmagSpecRec)
k+=1
ans=""
else:
ans=""
else: # plots=1
k+=1
files={}
locname.replace('/','-')
print(PmagSpecRec)
for key in ZED.keys():
files[key]="LO:_"+locname+'_SI:_'+PmagSpecRec['er_site_name']+'_SA:_'+PmagSpecRec['er_sample_name']+'_SP:_'+s+'_CO:_'+coord+'_TY:_'+key+'_.'+fmt
if pmagplotlib.isServer:
black = '#000000'
purple = '#800080'
titles={}
titles['demag']='DeMag Plot'
titles['zijd']='Zijderveld Plot'
titles['eqarea']='Equal Area Plot'
ZED = pmagplotlib.add_borders(ZED,titles,black,purple)
pmagplotlib.save_plots(ZED,files)
if len(CurrRecs)>0:
for rec in CurrRecs: PriorRecs.append(rec)
if changeS==1:
if len(PriorRecs)>0:
save_redo(PriorRecs,inspec)
else:
os.system('rm '+inspec)
CurrRecs,beg_pca,end_pca=[],"","" # next up
changeS=0
else: k+=1 # skip record - not enough data
if changeM==1:
pmag.magic_write(meas_file,meas_data,'magic_measurements') | NAME
zeq_magic.py
DESCRIPTION
reads in magic_measurements formatted file, makes plots of remanence decay
during demagnetization experiments. Reads in prior interpretations saved in
a pmag_specimens formatted file and allows re-interpretations of best-fit lines
and planes and saves (revised or new) interpretations in a pmag_specimens file.
interpretations are saved in the coordinate system used. Also allows judicious editting of
measurements to eliminate "bad" measurements. These are marked as such in the magic_measurements
input file. they are NOT deleted, just ignored.
SYNTAX
zeq_magic.py [command line options]
OPTIONS
-h prints help message and quits
-f MEASFILE: sets magic_measurements format input file, default: magic_measurements.txt
-fsp SPECFILE: sets pmag_specimens format file with prior interpreations, default: zeq_specimens.txt
-Fp PLTFILE: sets filename for saved plot, default is name_type.fmt (where type is zijd, eqarea or decay curve)
-crd [s,g,t]: sets coordinate system, g=geographic, t=tilt adjusted, default: specimen coordinate system
-fsa SAMPFILE: sets er_samples format file with orientation information, default: er_samples.txt
-spc SPEC plots single specimen SPEC, saves plot with specified format
with optional -dir settings and quits
-dir [L,P,F][beg][end]: sets calculation type for principal component analysis, default is none
beg: starting step for PCA calculation
end: ending step for PCA calculation
[L,P,F]: calculation type for line, plane or fisher mean
must be used with -spc option
-fmt FMT: set format of saved plot [png,svg,jpg]
-A: suppresses averaging of replicate measurements, default is to average
-sav: saves all plots without review
SCREEN OUTPUT:
Specimen, N, a95, StepMin, StepMax, Dec, Inc, calculation type | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/zeq_magic2.py#L17-L729 |
PmagPy/PmagPy | pmagpy/controlled_vocabularies2.py | Vocabulary.get_one_meth_type | def get_one_meth_type(self, mtype, method_list):
"""
Get all codes of one type (i.e., 'anisotropy_estimation')
"""
cond = method_list['dtype'] == mtype
codes = method_list[cond]
return codes | python | def get_one_meth_type(self, mtype, method_list):
"""
Get all codes of one type (i.e., 'anisotropy_estimation')
"""
cond = method_list['dtype'] == mtype
codes = method_list[cond]
return codes | Get all codes of one type (i.e., 'anisotropy_estimation') | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/controlled_vocabularies2.py#L29-L35 |
PmagPy/PmagPy | pmagpy/controlled_vocabularies2.py | Vocabulary.get_one_meth_category | def get_one_meth_category(self, category, all_codes, code_types):
"""
Get all codes in one category (i.e., all pmag codes).
This can include multiple method types (i.e., 'anisotropy_estimation', 'sample_prepartion', etc.)
"""
categories = Series(code_types[code_types[category] == True].index)
cond = all_codes['dtype'].isin(categories)
codes = all_codes[cond]
return codes | python | def get_one_meth_category(self, category, all_codes, code_types):
"""
Get all codes in one category (i.e., all pmag codes).
This can include multiple method types (i.e., 'anisotropy_estimation', 'sample_prepartion', etc.)
"""
categories = Series(code_types[code_types[category] == True].index)
cond = all_codes['dtype'].isin(categories)
codes = all_codes[cond]
return codes | Get all codes in one category (i.e., all pmag codes).
This can include multiple method types (i.e., 'anisotropy_estimation', 'sample_prepartion', etc.) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/controlled_vocabularies2.py#L37-L45 |
PmagPy/PmagPy | programs/azdip_magic.py | main | def main():
"""
NAME
azdip_magic.py
DESCRIPTION
takes space delimited AzDip file and converts to MagIC formatted tables
SYNTAX
azdip_magic.py [command line options]
OPTIONS
-f FILE: specify input file
-Fsa FILE: specify output file, default is: er_samples.txt/samples.txt
-ncn NCON: specify naming convention: default is #1 below
-mcd: specify sampling method codes as a colon delimited string: [default is: FS-FD]
FS-FD field sampling done with a drill
FS-H field sampling done with hand samples
FS-LOC-GPS field location done with GPS
FS-LOC-MAP field location done with map
SO-POM a Pomeroy orientation device was used
SO-ASC an ASC orientation device was used
SO-MAG orientation with magnetic compass
-loc: location name, default="unknown"
-app appends to existing samples file, default is to overwrite
INPUT FORMAT
Input files must be space delimited:
Samp Az Dip Strike Dip
Orientation convention:
Lab arrow azimuth = mag_azimuth; Lab arrow dip = 90-field_dip
e.g. field_dip is degrees from horizontal of drill direction
Magnetic declination convention:
Az is already corrected in file
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX
[5] site name same as sample
[6] site is entered under a separate column -- NOT CURRENTLY SUPPORTED
[7-Z] [XXXX]YYY: XXXX is site designation with Z characters with sample name XXXXYYYY
NB: all others you will have to customize your self
or e-mail [email protected] for help.
OUTPUT
output saved in samples file will overwrite any existing files
"""
args = sys.argv
if "-h" in args:
print(main.__doc__)
sys.exit()
dataframe = extractor.command_line_dataframe([['f', False, 'orient.txt'], ['Fsa', False, 'samples.txt'], ['ncn', False, "1"], ['mcd', False, 'FS-FD'], ['loc', False, 'unknown'], ['app', False, False], ['WD', False, '.'], ['ID', False, '.'], ['DM', False, 3]])
checked_args = extractor.extract_and_check_args(args, dataframe)
#print('checked_args:', checked_args)
orient_file, samp_file, samp_con, method_codes, location_name, append, output_dir, input_dir, data_model = extractor.get_vars(['f', 'Fsa', 'ncn', 'mcd', 'loc', 'app', 'WD', 'ID', 'DM'], checked_args)
if len(str(samp_con)) > 1:
samp_con, Z = samp_con.split('-')
Z = float(Z)
else:
Z = 1
ipmag.azdip_magic(orient_file, samp_file, samp_con, Z, method_codes, location_name, append, output_dir, input_dir, data_model) | python | def main():
"""
NAME
azdip_magic.py
DESCRIPTION
takes space delimited AzDip file and converts to MagIC formatted tables
SYNTAX
azdip_magic.py [command line options]
OPTIONS
-f FILE: specify input file
-Fsa FILE: specify output file, default is: er_samples.txt/samples.txt
-ncn NCON: specify naming convention: default is #1 below
-mcd: specify sampling method codes as a colon delimited string: [default is: FS-FD]
FS-FD field sampling done with a drill
FS-H field sampling done with hand samples
FS-LOC-GPS field location done with GPS
FS-LOC-MAP field location done with map
SO-POM a Pomeroy orientation device was used
SO-ASC an ASC orientation device was used
SO-MAG orientation with magnetic compass
-loc: location name, default="unknown"
-app appends to existing samples file, default is to overwrite
INPUT FORMAT
Input files must be space delimited:
Samp Az Dip Strike Dip
Orientation convention:
Lab arrow azimuth = mag_azimuth; Lab arrow dip = 90-field_dip
e.g. field_dip is degrees from horizontal of drill direction
Magnetic declination convention:
Az is already corrected in file
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX
[5] site name same as sample
[6] site is entered under a separate column -- NOT CURRENTLY SUPPORTED
[7-Z] [XXXX]YYY: XXXX is site designation with Z characters with sample name XXXXYYYY
NB: all others you will have to customize your self
or e-mail [email protected] for help.
OUTPUT
output saved in samples file will overwrite any existing files
"""
args = sys.argv
if "-h" in args:
print(main.__doc__)
sys.exit()
dataframe = extractor.command_line_dataframe([['f', False, 'orient.txt'], ['Fsa', False, 'samples.txt'], ['ncn', False, "1"], ['mcd', False, 'FS-FD'], ['loc', False, 'unknown'], ['app', False, False], ['WD', False, '.'], ['ID', False, '.'], ['DM', False, 3]])
checked_args = extractor.extract_and_check_args(args, dataframe)
#print('checked_args:', checked_args)
orient_file, samp_file, samp_con, method_codes, location_name, append, output_dir, input_dir, data_model = extractor.get_vars(['f', 'Fsa', 'ncn', 'mcd', 'loc', 'app', 'WD', 'ID', 'DM'], checked_args)
if len(str(samp_con)) > 1:
samp_con, Z = samp_con.split('-')
Z = float(Z)
else:
Z = 1
ipmag.azdip_magic(orient_file, samp_file, samp_con, Z, method_codes, location_name, append, output_dir, input_dir, data_model) | NAME
azdip_magic.py
DESCRIPTION
takes space delimited AzDip file and converts to MagIC formatted tables
SYNTAX
azdip_magic.py [command line options]
OPTIONS
-f FILE: specify input file
-Fsa FILE: specify output file, default is: er_samples.txt/samples.txt
-ncn NCON: specify naming convention: default is #1 below
-mcd: specify sampling method codes as a colon delimited string: [default is: FS-FD]
FS-FD field sampling done with a drill
FS-H field sampling done with hand samples
FS-LOC-GPS field location done with GPS
FS-LOC-MAP field location done with map
SO-POM a Pomeroy orientation device was used
SO-ASC an ASC orientation device was used
SO-MAG orientation with magnetic compass
-loc: location name, default="unknown"
-app appends to existing samples file, default is to overwrite
INPUT FORMAT
Input files must be space delimited:
Samp Az Dip Strike Dip
Orientation convention:
Lab arrow azimuth = mag_azimuth; Lab arrow dip = 90-field_dip
e.g. field_dip is degrees from horizontal of drill direction
Magnetic declination convention:
Az is already corrected in file
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX
[5] site name same as sample
[6] site is entered under a separate column -- NOT CURRENTLY SUPPORTED
[7-Z] [XXXX]YYY: XXXX is site designation with Z characters with sample name XXXXYYYY
NB: all others you will have to customize your self
or e-mail [email protected] for help.
OUTPUT
output saved in samples file will overwrite any existing files | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/azdip_magic.py#L10-L79 |
PmagPy/PmagPy | programs/plot_geomagia.py | main | def main():
"""
NAME
plot_geomagia.py
DESCRIPTION
makes a map and VADM plot of geomagia download file
SYNTAX
plot_geomagia.py [command line options]
OPTIONS
-h prints help message and quits
-f FILE, specify geomagia download file
-res [c,l,i,h] specify resolution (crude,low,intermediate,high)
-etp plot the etopo20 topographic mesh
-pad [LAT LON] pad bounding box by LAT/LON (default is [.5 .5] degrees)
-grd SPACE specify grid spacing
-prj [lcc] , specify projection (lcc=lambert conic conformable), default is mercator
-o color ocean blue/land green (default is not)
-d plot details of rivers, boundaries, etc.
-sav save plot and quit quietly
-fmt [png,svg,eps,jpg,pdf] specify format for output, default is pdf
DEFAULTS
resolution: intermediate
saved images are in pdf
"""
dir_path='.'
names,res,proj,locs,padlon,padlat,fancy,gridspace,details=[],'l','lcc','',0,0,0,15,1
Age_bounds=[-5000,2000]
Lat_bounds=[20,45]
Lon_bounds=[15,55]
fmt='pdf'
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
ind = sys.argv.index('-f')
sites_file=sys.argv[ind+1]
if '-res' in sys.argv:
ind = sys.argv.index('-res')
res=sys.argv[ind+1]
if '-etp' in sys.argv:fancy=1
if '-o' in sys.argv:ocean=1
if '-d' in sys.argv:details=1
if '-prj' in sys.argv:
ind = sys.argv.index('-prj')
proj=sys.argv[ind+1]
if '-fmt' in sys.argv:
ind = sys.argv.index('-fmt')
fmt=sys.argv[ind+1]
verbose=pmagplotlib.verbose
if '-sav' in sys.argv:
verbose=0
if '-pad' in sys.argv:
ind = sys.argv.index('-pad')
padlat=float(sys.argv[ind+1])
padlon=float(sys.argv[ind+2])
if '-grd' in sys.argv:
ind = sys.argv.index('-grd')
gridspace=float(sys.argv[ind+1])
if '-WD' in sys.argv:
ind = sys.argv.index('-WD')
dir_path=sys.argv[ind+1]
sites_file=dir_path+'/'+sites_file
geo_in=open(sites_file,'r').readlines()
Age,AgeErr,Vadm,VadmErr,slats,slons=[],[],[],[],[],[]
for line in geo_in[2:]: # skip top two rows`
rec=line.split()
if float(rec[0])>Age_bounds[0] and float(rec[0])<Age_bounds[1] \
and float(rec[12])>Lat_bounds[0] and float(rec[12]) < Lat_bounds[1]\
and float(rec[13])>Lon_bounds[0] and float(rec[13])<Lon_bounds[1]:
Age.append(float(rec[0]))
AgeErr.append(float(rec[1]))
Vadm.append(10.*float(rec[6]))
VadmErr.append(10.*float(rec[7]))
slats.append(float(rec[12]))
slons.append(float(rec[13]))
FIGS={'map':1,'vadms':2}
pmagplotlib.plot_init(FIGS['map'],6,6)
pmagplotlib.plot_init(FIGS['vadms'],6,6)
Opts={'res':res,'proj':proj,'loc_name':locs,'padlon':padlon,'padlat':padlat,'latmin':numpy.min(slats)-padlat,'latmax':numpy.max(slats)+padlat,'lonmin':numpy.min(slons)-padlon,'lonmax':numpy.max(slons)+padlon,'sym':'ro','boundinglat':0.,'pltgrid':1}
Opts['lon_0']=int(0.5*(numpy.min(slons)+numpy.max(slons)))
Opts['lat_0']=int(0.5*(numpy.min(slats)+numpy.max(slats)))
Opts['gridspace']=gridspace
if details==1:
Opts['details']={'coasts':1,'rivers':0,'states':1,'countries':1,'ocean':1}
else:
Opts['details']={'coasts':1,'rivers':0,'states':0,'countries':0,'ocean':1}
Opts['details']['fancy']=fancy
pmagplotlib.plot_map(FIGS['map'],slats,slons,Opts)
pmagplotlib.plot_xy(FIGS['vadms'],Age,Vadm,sym='bo',xlab='Age (Years CE)',ylab=r'VADM (ZAm$^2$)')
if verbose:pmagplotlib.draw_figs(FIGS)
files={}
for key in list(FIGS.keys()):
files[key]=key+'.'+fmt
if pmagplotlib.isServer:
black = '#000000'
purple = '#800080'
titles={}
titles['map']='Map'
titles['vadms']='VADMs'
FIG = pmagplotlib.add_borders(FIGS,titles,black,purple)
pmagplotlib.save_plots(FIGS,files)
elif verbose:
ans=input(" S[a]ve to save plot, Return to quit: ")
if ans=="a":
pmagplotlib.save_plots(FIGS,files)
else:
pmagplotlib.save_plots(FIGS,files) | python | def main():
"""
NAME
plot_geomagia.py
DESCRIPTION
makes a map and VADM plot of geomagia download file
SYNTAX
plot_geomagia.py [command line options]
OPTIONS
-h prints help message and quits
-f FILE, specify geomagia download file
-res [c,l,i,h] specify resolution (crude,low,intermediate,high)
-etp plot the etopo20 topographic mesh
-pad [LAT LON] pad bounding box by LAT/LON (default is [.5 .5] degrees)
-grd SPACE specify grid spacing
-prj [lcc] , specify projection (lcc=lambert conic conformable), default is mercator
-o color ocean blue/land green (default is not)
-d plot details of rivers, boundaries, etc.
-sav save plot and quit quietly
-fmt [png,svg,eps,jpg,pdf] specify format for output, default is pdf
DEFAULTS
resolution: intermediate
saved images are in pdf
"""
dir_path='.'
names,res,proj,locs,padlon,padlat,fancy,gridspace,details=[],'l','lcc','',0,0,0,15,1
Age_bounds=[-5000,2000]
Lat_bounds=[20,45]
Lon_bounds=[15,55]
fmt='pdf'
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
ind = sys.argv.index('-f')
sites_file=sys.argv[ind+1]
if '-res' in sys.argv:
ind = sys.argv.index('-res')
res=sys.argv[ind+1]
if '-etp' in sys.argv:fancy=1
if '-o' in sys.argv:ocean=1
if '-d' in sys.argv:details=1
if '-prj' in sys.argv:
ind = sys.argv.index('-prj')
proj=sys.argv[ind+1]
if '-fmt' in sys.argv:
ind = sys.argv.index('-fmt')
fmt=sys.argv[ind+1]
verbose=pmagplotlib.verbose
if '-sav' in sys.argv:
verbose=0
if '-pad' in sys.argv:
ind = sys.argv.index('-pad')
padlat=float(sys.argv[ind+1])
padlon=float(sys.argv[ind+2])
if '-grd' in sys.argv:
ind = sys.argv.index('-grd')
gridspace=float(sys.argv[ind+1])
if '-WD' in sys.argv:
ind = sys.argv.index('-WD')
dir_path=sys.argv[ind+1]
sites_file=dir_path+'/'+sites_file
geo_in=open(sites_file,'r').readlines()
Age,AgeErr,Vadm,VadmErr,slats,slons=[],[],[],[],[],[]
for line in geo_in[2:]: # skip top two rows`
rec=line.split()
if float(rec[0])>Age_bounds[0] and float(rec[0])<Age_bounds[1] \
and float(rec[12])>Lat_bounds[0] and float(rec[12]) < Lat_bounds[1]\
and float(rec[13])>Lon_bounds[0] and float(rec[13])<Lon_bounds[1]:
Age.append(float(rec[0]))
AgeErr.append(float(rec[1]))
Vadm.append(10.*float(rec[6]))
VadmErr.append(10.*float(rec[7]))
slats.append(float(rec[12]))
slons.append(float(rec[13]))
FIGS={'map':1,'vadms':2}
pmagplotlib.plot_init(FIGS['map'],6,6)
pmagplotlib.plot_init(FIGS['vadms'],6,6)
Opts={'res':res,'proj':proj,'loc_name':locs,'padlon':padlon,'padlat':padlat,'latmin':numpy.min(slats)-padlat,'latmax':numpy.max(slats)+padlat,'lonmin':numpy.min(slons)-padlon,'lonmax':numpy.max(slons)+padlon,'sym':'ro','boundinglat':0.,'pltgrid':1}
Opts['lon_0']=int(0.5*(numpy.min(slons)+numpy.max(slons)))
Opts['lat_0']=int(0.5*(numpy.min(slats)+numpy.max(slats)))
Opts['gridspace']=gridspace
if details==1:
Opts['details']={'coasts':1,'rivers':0,'states':1,'countries':1,'ocean':1}
else:
Opts['details']={'coasts':1,'rivers':0,'states':0,'countries':0,'ocean':1}
Opts['details']['fancy']=fancy
pmagplotlib.plot_map(FIGS['map'],slats,slons,Opts)
pmagplotlib.plot_xy(FIGS['vadms'],Age,Vadm,sym='bo',xlab='Age (Years CE)',ylab=r'VADM (ZAm$^2$)')
if verbose:pmagplotlib.draw_figs(FIGS)
files={}
for key in list(FIGS.keys()):
files[key]=key+'.'+fmt
if pmagplotlib.isServer:
black = '#000000'
purple = '#800080'
titles={}
titles['map']='Map'
titles['vadms']='VADMs'
FIG = pmagplotlib.add_borders(FIGS,titles,black,purple)
pmagplotlib.save_plots(FIGS,files)
elif verbose:
ans=input(" S[a]ve to save plot, Return to quit: ")
if ans=="a":
pmagplotlib.save_plots(FIGS,files)
else:
pmagplotlib.save_plots(FIGS,files) | NAME
plot_geomagia.py
DESCRIPTION
makes a map and VADM plot of geomagia download file
SYNTAX
plot_geomagia.py [command line options]
OPTIONS
-h prints help message and quits
-f FILE, specify geomagia download file
-res [c,l,i,h] specify resolution (crude,low,intermediate,high)
-etp plot the etopo20 topographic mesh
-pad [LAT LON] pad bounding box by LAT/LON (default is [.5 .5] degrees)
-grd SPACE specify grid spacing
-prj [lcc] , specify projection (lcc=lambert conic conformable), default is mercator
-o color ocean blue/land green (default is not)
-d plot details of rivers, boundaries, etc.
-sav save plot and quit quietly
-fmt [png,svg,eps,jpg,pdf] specify format for output, default is pdf
DEFAULTS
resolution: intermediate
saved images are in pdf | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/plot_geomagia.py#L15-L124 |
PmagPy/PmagPy | programs/conversion_scripts/k15_magic.py | main | def main():
"""
NAME
k15_magic.py
DESCRIPTION
converts .k15 format data to magic_measurements format.
assums Jelinek Kappabridge measurement scheme
SYNTAX
k15_magic.py [-h] [command line options]
OPTIONS
-h prints help message and quits
-DM DATA_MODEL: specify data model 2 or 3 (default 3)
-f KFILE: specify .k15 format input file
-F MFILE: specify measurement output file
-Fsa SFILE, specify sample file for output
-Fa AFILE, specify specimen file for output [rmag_anisotropy for data model 2 only]
#-ins INST: specify instrument that measurements were made on # not implemented
-spc NUM: specify number of digits for specimen ID, default is 0
-ncn NCOM: specify naming convention (default is #1)
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXXYYY: YYY is sample designation with Z characters from site XXX
[5] site name = sample name
[6] site name entered in site_name column in the orient.txt format input file -- NOT CURRENTLY SUPPORTED
[7-Z] [XXX]YYY: XXX is site designation with Z characters from samples XXXYYY
NB: all others you will have to either customize your
self or e-mail [email protected] for help.
DEFAULTS
MFILE: measurements.txt
SFILE: samples.txt
AFILE: specimens.txt
INPUT
name [az,pl,strike,dip], followed by
3 rows of 5 measurements for each specimen
"""
args = sys.argv
if '-h' in args:
print(do_help())
sys.exit()
# def k15_magic(k15file, specnum=0, sample_naming_con='1', er_location_name="unknown", measfile='magic_measurements.txt', sampfile="er_samples.txt", aniso_outfile='rmag_anisotropy.txt', result_file="rmag_results.txt", input_dir_path='.', output_dir_path='.'):
dataframe = extractor.command_line_dataframe([['f', True, ''], ['F', False, 'measurements.txt'], ['Fsa', False, 'samples.txt'], ['Fa', False, 'specimens.txt'], [
'Fr', False, 'rmag_results.txt'], ['spc', False, 0], ['ncn', False, '1'], ['loc', False, 'unknown'], ['WD', False, '.'], ['ID', False, '.'], ['DM', False, 3]])
checked_args = extractor.extract_and_check_args(args, dataframe)
k15file, measfile, sampfile, aniso_outfile, result_file, specnum, sample_naming_con, location_name, output_dir_path, input_dir_path, data_model_num = extractor.get_vars(
['f', 'F', 'Fsa', 'Fa', 'Fr', 'spc', 'ncn', 'loc', 'WD', 'ID', 'DM'], checked_args)
program_ran, error_message = convert.k15(k15file, specnum=specnum, sample_naming_con=sample_naming_con, location=location_name, meas_file=measfile,
samp_file=sampfile, aniso_outfile=aniso_outfile, result_file=result_file, input_dir_path=input_dir_path, dir_path=output_dir_path, data_model_num=data_model_num) | python | def main():
"""
NAME
k15_magic.py
DESCRIPTION
converts .k15 format data to magic_measurements format.
assums Jelinek Kappabridge measurement scheme
SYNTAX
k15_magic.py [-h] [command line options]
OPTIONS
-h prints help message and quits
-DM DATA_MODEL: specify data model 2 or 3 (default 3)
-f KFILE: specify .k15 format input file
-F MFILE: specify measurement output file
-Fsa SFILE, specify sample file for output
-Fa AFILE, specify specimen file for output [rmag_anisotropy for data model 2 only]
#-ins INST: specify instrument that measurements were made on # not implemented
-spc NUM: specify number of digits for specimen ID, default is 0
-ncn NCOM: specify naming convention (default is #1)
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXXYYY: YYY is sample designation with Z characters from site XXX
[5] site name = sample name
[6] site name entered in site_name column in the orient.txt format input file -- NOT CURRENTLY SUPPORTED
[7-Z] [XXX]YYY: XXX is site designation with Z characters from samples XXXYYY
NB: all others you will have to either customize your
self or e-mail [email protected] for help.
DEFAULTS
MFILE: measurements.txt
SFILE: samples.txt
AFILE: specimens.txt
INPUT
name [az,pl,strike,dip], followed by
3 rows of 5 measurements for each specimen
"""
args = sys.argv
if '-h' in args:
print(do_help())
sys.exit()
# def k15_magic(k15file, specnum=0, sample_naming_con='1', er_location_name="unknown", measfile='magic_measurements.txt', sampfile="er_samples.txt", aniso_outfile='rmag_anisotropy.txt', result_file="rmag_results.txt", input_dir_path='.', output_dir_path='.'):
dataframe = extractor.command_line_dataframe([['f', True, ''], ['F', False, 'measurements.txt'], ['Fsa', False, 'samples.txt'], ['Fa', False, 'specimens.txt'], [
'Fr', False, 'rmag_results.txt'], ['spc', False, 0], ['ncn', False, '1'], ['loc', False, 'unknown'], ['WD', False, '.'], ['ID', False, '.'], ['DM', False, 3]])
checked_args = extractor.extract_and_check_args(args, dataframe)
k15file, measfile, sampfile, aniso_outfile, result_file, specnum, sample_naming_con, location_name, output_dir_path, input_dir_path, data_model_num = extractor.get_vars(
['f', 'F', 'Fsa', 'Fa', 'Fr', 'spc', 'ncn', 'loc', 'WD', 'ID', 'DM'], checked_args)
program_ran, error_message = convert.k15(k15file, specnum=specnum, sample_naming_con=sample_naming_con, location=location_name, meas_file=measfile,
samp_file=sampfile, aniso_outfile=aniso_outfile, result_file=result_file, input_dir_path=input_dir_path, dir_path=output_dir_path, data_model_num=data_model_num) | NAME
k15_magic.py
DESCRIPTION
converts .k15 format data to magic_measurements format.
assums Jelinek Kappabridge measurement scheme
SYNTAX
k15_magic.py [-h] [command line options]
OPTIONS
-h prints help message and quits
-DM DATA_MODEL: specify data model 2 or 3 (default 3)
-f KFILE: specify .k15 format input file
-F MFILE: specify measurement output file
-Fsa SFILE, specify sample file for output
-Fa AFILE, specify specimen file for output [rmag_anisotropy for data model 2 only]
#-ins INST: specify instrument that measurements were made on # not implemented
-spc NUM: specify number of digits for specimen ID, default is 0
-ncn NCOM: specify naming convention (default is #1)
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXXYYY: YYY is sample designation with Z characters from site XXX
[5] site name = sample name
[6] site name entered in site_name column in the orient.txt format input file -- NOT CURRENTLY SUPPORTED
[7-Z] [XXX]YYY: XXX is site designation with Z characters from samples XXXYYY
NB: all others you will have to either customize your
self or e-mail [email protected] for help.
DEFAULTS
MFILE: measurements.txt
SFILE: samples.txt
AFILE: specimens.txt
INPUT
name [az,pl,strike,dip], followed by
3 rows of 5 measurements for each specimen | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/conversion_scripts/k15_magic.py#L8-L67 |
PmagPy/PmagPy | programs/conversion_scripts2/cit_magic2.py | main | def main(command_line=True, **kwargs):
"""
NAME
cit_magic.py
DESCRIPTION
converts CIT and .sam format files to magic_measurements format files
SYNTAX
cit_magic.py [command line options]
OPTIONS
-h: prints the help message and quits.
-usr USER: identify user, default is ""
-f FILE: specify .sam format input file, required
-WD Working Directory: output directory where files are going to go
-fsi SITEFILE : specify file with site names and locations [tab delimited magic file]
-F FILE: specify output measurements file, default is magic_measurements.txt
-Fsp FILE: specify output er_specimens.txt file, default is er_specimens.txt
-Fsi FILE: specify output er_sites.txt file, default is er_sites.txt
-Fsa FILE: specify output er_samples.txt file, default is er_samples.txt # LORI
-n [gm,kg,cc,m3]: specify normalization
-A: don't average replicate measurements
-spc NUM : specify number of characters to designate a specimen, default = 0
-ncn NCON: specify naming convention
-loc LOCNAME : specify location/study name, must have either LOCNAME or SITEFILE or be a synthetic
-mcd [FS-FD:SO-MAG,.....] colon delimited list for method codes applied to all specimens in .sam file
-dc B PHI THETA: dc lab field (in microTesla), phi,and theta must be input as a tuple "(DC,PHI,THETA)". If not input user will be asked for values, this is advantagious if there are differing dc fields between steps or specimens. Note: this currently only works with the decimal IZZI naming convetion (XXX.0,1,2 where XXX is the treatment temperature and 0 is a zero field step, 1 is in field, and 2 is a pTRM check). All other steps are hardcoded dc_field = 0.
INPUT
Best to put separate experiments in separate files (all AF, thermal, thellier, trm aquisition, Shaw, etc.)
NOTES:
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3: default] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX
[5] site name = sample name
[6] site name entered in site_name column in the orient.txt format input file -- NOT CURRENTLY SUPPORTED
[7-Z] [XXX]YYY: XXX is site designation with Z characters from samples XXXYYY
NB: all others you will have to either customize your
self or e-mail [email protected] or [email protected] for help.
"""
#
# NB: use PHI, THETA = -1 -1 to signal that it changes, i.e. in anisotropy experiment
# -ac B : peak AF field (in mT) for ARM acquisition, default is none
#
#initialize variables
norm='cc'
samp_con,Z='3',1
meas_file='magic_measurements.txt'
spec_file='er_specimens.txt'
samp_file='er_samples.txt'
site_file='er_sites.txt'
ErSpecs,ErSamps,ErSites,ErLocs,ErCits=[],[],[],[],[]
MeasRecs=[]
specnum,units,locname=0,"1","unknown"
citation="This study"
dir_path='.'
args=sys.argv
if command_line:
if '-WD' in args:
ind=args.index("-WD")
dir_path=args[ind+1]
if "-h" in args:
print(main.__doc__)
return False
if "-usr" in args:
ind=args.index("-usr")
user=args[ind+1]
if '-F' in args:
ind=args.index("-F")
meas_file=args[ind+1]
if '-Fsp' in args:
ind=args.index("-Fsp")
spec_file=args[ind+1]
if '-Fsa' in args:
ind=args.index("-Fsa")
samp_file=args[ind+1]
if '-Fsi' in args: # LORI addition
ind=args.index("-Fsi")
site_file=args[ind+1]
if '-loc' in args:
ind=args.index("-loc")
locname=args[ind+1]
if '-mcd' in args:
ind=args.index("-mcd")
methods=args[ind+1]
else:
methods='SO-MAG'
if '-spc' in args:
ind=args.index("-spc")
specnum=-int(args[ind+1])
if '-n' in args:
ind=args.index("-n")
norm=args[ind+1]
if "-A" in args:
avg=1
else:
avg=0
if '-dc' in args:
ind=args.index('-dc')
DC_FIELD,DC_PHI,DC_THETA=list(map(float,args[ind+1].strip('( ) [ ]').split(',')))
DC_FIELD *= 1e-6
yn=''
GET_DC_PARAMS=False
else: GET_DC_PARAMS,DC_FIELD,DC_PHI,DC_THETA,yn=True,0,0,-90,''
if "-ncn" in args:
ind=args.index("-ncn")
samp_con=sys.argv[ind+1]
if "4" in samp_con:
if "-" not in samp_con:
print("option [4] must be in form 4-Z where Z is an integer")
return False, "naming convention option [4] must be in form 4-Z where Z is an integer"
else:
Z=samp_con.split("-")[1]
samp_con="4"
elif "7" in samp_con:
if "-" not in samp_con:
print("option [7] must be in form 7-Z where Z is an integer")
return False, "naming convention option [7] must be in form 7-Z where Z is an integer"
else:
Z=samp_con.split("-")[1]
samp_con="7"
if '-f' in args:
ind=args.index("-f")
magfile=args[ind+1]
if '-ID' in args:
ind = args.index('-ID')
input_dir_path = args[ind+1]
else:
input_dir_path = os.path.split(magfile)[0]
output_dir_path = dir_path
# LJ
# if you are running as a module:
elif not command_line:
dir_path = kwargs.get('dir_path', '.')
user = kwargs.get('user', '')
meas_file = kwargs.get('meas_file', 'magic_measurements.txt') # outfile
spec_file = kwargs.get('spec_file', 'er_specimens.txt') # specimen outfile
samp_file = kwargs.get('samp_file', 'er_samples.txt') # sample outfile
site_file = kwargs.get('site_file', 'er_sites.txt') # site outfile
locname = kwargs.get('locname', '')
methods = kwargs.get('methods', ['SO-MAG'])
specnum = -int(kwargs.get('specnum', 0))
norm = kwargs.get('norm', 'cc')
avg = kwargs.get('avg', 0) # 0 means do average, 1 means don't
samp_con = kwargs.get('samp_con', '3')
magfile = kwargs.get('magfile', '')
input_dir_path = kwargs.get('input_dir_path', os.path.split(magfile)[0])
output_dir_path = dir_path
DC_FIELD,DC_PHI,DC_THETA = list(map(float, kwargs.get('dc_params', (0,0,-90))))
DC_FIELD *= 1e-6
yn = ''
if DC_FIELD==0 and DC_PHI==0 and DC_THETA==-90: GET_DC_PARAMS=True
else: GET_DC_PARAMS=False
# done with module-specific stuff
# formatting and checking variables
if "4" in samp_con:
if "-" not in samp_con:
print("option [4] must be in form 4-Z where Z is an integer")
return False, "naming convention option [4] must be in form 4-Z where Z is an integer"
else:
Z=samp_con.split("-")[1]
samp_con="4"
elif "7" in samp_con:
if "-" not in samp_con:
print("option [7] must be in form 7-Z where Z is an integer")
return False, "naming convention option [7] must be in form 7-Z where Z is an integer"
else:
Z=samp_con.split("-")[1]
samp_con="7"
magfile = os.path.join(input_dir_path, magfile)
spec_file = os.path.join(output_dir_path, spec_file)
samp_file = os.path.join(output_dir_path, samp_file)
site_file = os.path.join(output_dir_path, site_file)
meas_file= os.path.join(output_dir_path, meas_file)
FIRST_GET_DC=True
try:
with open(magfile,'r') as file_input:
File = file_input.readlines()
except Exception as ex:
print("bad sam file name: ", magfile)
return False, "bad sam file name"
if len(File) == 1: File = File[0].split('\r'); File = [x+"\r\n" for x in File]
sids,ln,format=[],0,'CIT'
formats=['CIT','2G','APP','JRA']
if File[ln].strip()=='CIT': ln+=1
ErLocRec={}
ErLocRec["er_location_name"]=locname
ErLocRec["er_citation_names"]=citation
comment=File[ln]
if comment=='CIT':
format=comment
ln+=1
comment=File[ln]
print(comment)
ln+=1
specimens,samples,sites=[],[],[]
if format=='CIT':
line=File[ln].split()
site_lat=line[0]
site_lon=line[1]
ErLocRec["location_begin_lat"]=site_lat
ErLocRec["location_begin_lon"]=site_lon
ErLocRec["location_end_lat"]=site_lat
ErLocRec["location_end_lon"]=site_lon
ErLocs.append(ErLocRec)
try: Cdec=float(line[2])
except ValueError: pdb.set_trace()
for k in range(ln+1,len(File)):
line=File[k]
rec=line.split()
if rec == []: continue
specimen=rec[0]
specimens.append(specimen)
for specimen in specimens:
ErSpecRec,ErSampRec,ErSiteRec={},{},{}
if specnum!=0:
sample=specimen[:specnum]
else: sample=specimen
site=pmag.parse_site(sample,samp_con,Z)
ErSpecRec['er_specimen_name']=specimen
ErSpecRec['er_sample_name']=sample
ErSpecRec['er_site_name']=site
ErSpecRec['er_location_name']=locname
ErSpecRec['er_citation_names']=citation
ErSampRec['er_sample_name']=sample
ErSampRec['er_site_name']=site
ErSampRec['er_location_name']=locname
ErSampRec['er_citation_names']=citation
ErSampRec['magic_method_codes']=methods
ErSampRec['sample_declination_correction']='%7.1f'%(Cdec)
ErSiteRec['er_site_name']=site
ErSiteRec['er_location_name']=locname
ErSiteRec['er_citation_names']=citation
ErSiteRec['site_lat']=site_lat
ErSiteRec['site_lon']=site_lon
with open(os.path.join(input_dir_path,specimen),'r') as finput:
Lines = list(finput.readlines())
comment = ""
line=Lines[0].split()
if len(line)>2:
comment=line[2]
info=Lines[1].split()
vol=float(info[-1])
if vol!=1.0:
if norm=='cc':units="1"
if norm=='m3':units="2"
ErSpecRec['specimen_weight']=""
if units=="1" or "":
ErSpecRec['specimen_volume']='%10.3e'%(vol*1e-6)
else:
ErSpecRec['specimen_volume']='%10.3e'%(vol)
else:
if norm=='cc':units="1"
if norm=='m3':units="2"
ErSpecRec['specimen_volume']=""
if units=="1" or "":
ErSpecRec['specimen_weight']='%10.3e'%(vol*1e-3)
else:
ErSpecRec['specimen_weight']='%10.3e'%(vol)
dip=float(info[-2])
dip_direction=float(info[-3])+Cdec+90.
sample_dip=-float(info[-4])
sample_azimuth=float(info[-5])+Cdec-90.
if len(info)>5:
ErSampRec['sample_height']=info[-6]
else:
ErSampRec['sample_height']='0'
ErSampRec['sample_azimuth']='%7.1f'%(sample_azimuth)
ErSampRec['sample_dip']='%7.1f'%(sample_dip)
ErSampRec['sample_bed_dip']='%7.1f'%(dip)
ErSampRec['sample_bed_dip_direction']='%7.1f'%(dip_direction)
ErSampRec['sample_class']=''
ErSampRec['sample_type']=''
ErSampRec['sample_lithology']=''
if Cdec!=0 or Cdec!="":
ErSampRec['magic_method_codes']='SO-CMD-NORTH'
else:
ErSampRec['magic_method_codes']='SO-MAG'
for line in Lines[2:len(Lines)]:
if line == '\n': continue
MeasRec=ErSpecRec.copy()
# Remove specimen_volume and specimen_weight as they do not exits in the magic_measurement table
del MeasRec["specimen_volume"]
del MeasRec["specimen_weight"]
treat_type=line[0:3]
if treat_type[1] == '.':
treat_type = 'NRM'
treat=line[2:6]
try: float(treat)
except ValueError: treat = line[3:6]
if treat_type.startswith('NRM'):
MeasRec['magic_method_codes']='LT-NO'
MeasRec['measurement_temp']='273'
MeasRec['treatment_temp']='273'
MeasRec['treatment_dc_field']='0'
MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI
MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA
MeasRec['treatment_ac_field']='0'
elif treat_type.startswith('AF'):
MeasRec['magic_method_codes']='LT-AF-Z'
MeasRec['measurement_temp']='273'
MeasRec['treatment_temp']='273'
MeasRec['treatment_dc_field']='0'
MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI
MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA
if treat.strip() == '':
MeasRec['treatment_ac_field']='0'
else:
MeasRec['treatment_ac_field']='%10.3e'%(float(treat)*1e-3)
elif treat_type.startswith('ARM'):
MeasRec['magic_method_codes']="LP-ARM"
MeasRec['measurement_temp']='273'
MeasRec['treatment_temp']='273'
MeasRec['treatment_dc_field']='0'
MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI
MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA
if treat.strip() == '':
MeasRec['treatment_ac_field']='0'
else:
MeasRec['magic_method_codes']="LP-ARM-AFD"
MeasRec['treatment_ac_field']='%10.3e'%(float(treat)*1e-3)
elif treat_type.startswith('TT'):
MeasRec['magic_method_codes']='LT-T-Z'
MeasRec['measurement_temp']='273'
if treat.strip() == '':
MeasRec['treatment_temp']='273'
else:
MeasRec['treatment_temp']='%7.1f'%(float(treat)+273)
MeasRec['treatment_dc_field']='0'
MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI
MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA
MeasRec['treatment_ac_field']='0'
elif treat_type.startswith('LT') or treat_type.startswith('LN2'):
MeasRec['magic_method_codes']='LT-LT-Z'
MeasRec['measurement_temp']='273'
MeasRec['treatment_temp']='77'
MeasRec['treatment_dc_field']='0'
MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI
MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA
MeasRec['treatment_ac_field']='0'
elif line[4] == '0': #assume decimal IZZI format 0 field thus can hardcode the dc fields
MeasRec['magic_method_codes']='LT-T-Z'
MeasRec['measurement_temp']='273'
MeasRec['treatment_temp']=str(int(treat_type) + 273)
MeasRec['treatment_dc_field']='0'
MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI
MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA
MeasRec['treatment_ac_field']='0'
elif line[4] == '1': #assume decimal IZZI format in constant field
if GET_DC_PARAMS: GET_DC_PARAMS, FIRST_GET_DC, yn, DC_FIELD, DC_PHI, DC_THETA = get_dc_params(FIRST_GET_DC,specimen,treat_type,yn)
MeasRec['magic_method_codes']='LT-T-I'
MeasRec['measurement_temp']='273'
MeasRec['treatment_temp']=str(int(treat_type) + 273)
MeasRec['treatment_dc_field']='%1.2e'%DC_FIELD
MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI
MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA
MeasRec['treatment_ac_field']='0'
elif line[4] == '2': #assume decimal IZZI format PTRM step
if GET_DC_PARAMS: GET_DC_PARAMS, FIRST_GET_DC, yn, DC_FIELD, DC_PHI, DC_THETA = get_dc_params(FIRST_GET_DC,specimen,treat_type,yn)
MeasRec['magic_method_codes']='LT-PTRM-I'
MeasRec['measurement_temp']='273'
MeasRec['treatment_temp']=str(int(treat_type) + 273)
MeasRec['treatment_dc_field']='%1.2e'%DC_FIELD
MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI
MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA
MeasRec['treatment_ac_field']='0'
else:
print("trouble with your treatment steps")
MeasRec['measurement_dec']=line[46:51]
MeasRec['measurement_inc']=line[52:58]
M='%8.2e'%(float(line[31:39])*vol*1e-3) # convert to Am2
MeasRec['measurement_magn_moment']=M
MeasRec['measurement_csd']='%7.1f'%(eval(line[41:46]))
MeasRec["measurement_positions"]='1'
MeasRec['measurement_standard']='u'
if len(line)>60:
MeasRec['magic_instrument_codes']=line[85:]
MeasRec['measurement_sd_x']='%8.2e'%(float(line[58:67])*1e-8) #(convert e-5emu to Am2)
MeasRec['measurement_sd_y']='%8.2e'%(float(line[67:76])*1e-8)
MeasRec['measurement_sd_z']='%8.2e'%(float(line[76:85])*1e-8)
MeasRecs.append(MeasRec)
ErSpecs.append(ErSpecRec)
if sample not in samples:
samples.append(sample)
ErSamps.append(ErSampRec)
site=pmag.parse_site(sample,samp_con,Z)
if site not in sites:
sites.append(site)
ErSites.append(ErSiteRec)
pmag.magic_write(spec_file,ErSpecs,'er_specimens')
print('specimens stored in ',spec_file)
pmag.magic_write(samp_file,ErSamps,'er_samples')
print('samples stored in ',samp_file)
pmag.magic_write(site_file,ErSites,'er_sites')
print('sites stored in ', site_file)
Fixed=pmag.measurements_methods(MeasRecs,avg)
pmag.magic_write(meas_file,Fixed,'magic_measurements')
print('data stored in ',meas_file)
return True, meas_file | python | def main(command_line=True, **kwargs):
"""
NAME
cit_magic.py
DESCRIPTION
converts CIT and .sam format files to magic_measurements format files
SYNTAX
cit_magic.py [command line options]
OPTIONS
-h: prints the help message and quits.
-usr USER: identify user, default is ""
-f FILE: specify .sam format input file, required
-WD Working Directory: output directory where files are going to go
-fsi SITEFILE : specify file with site names and locations [tab delimited magic file]
-F FILE: specify output measurements file, default is magic_measurements.txt
-Fsp FILE: specify output er_specimens.txt file, default is er_specimens.txt
-Fsi FILE: specify output er_sites.txt file, default is er_sites.txt
-Fsa FILE: specify output er_samples.txt file, default is er_samples.txt # LORI
-n [gm,kg,cc,m3]: specify normalization
-A: don't average replicate measurements
-spc NUM : specify number of characters to designate a specimen, default = 0
-ncn NCON: specify naming convention
-loc LOCNAME : specify location/study name, must have either LOCNAME or SITEFILE or be a synthetic
-mcd [FS-FD:SO-MAG,.....] colon delimited list for method codes applied to all specimens in .sam file
-dc B PHI THETA: dc lab field (in microTesla), phi,and theta must be input as a tuple "(DC,PHI,THETA)". If not input user will be asked for values, this is advantagious if there are differing dc fields between steps or specimens. Note: this currently only works with the decimal IZZI naming convetion (XXX.0,1,2 where XXX is the treatment temperature and 0 is a zero field step, 1 is in field, and 2 is a pTRM check). All other steps are hardcoded dc_field = 0.
INPUT
Best to put separate experiments in separate files (all AF, thermal, thellier, trm aquisition, Shaw, etc.)
NOTES:
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3: default] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX
[5] site name = sample name
[6] site name entered in site_name column in the orient.txt format input file -- NOT CURRENTLY SUPPORTED
[7-Z] [XXX]YYY: XXX is site designation with Z characters from samples XXXYYY
NB: all others you will have to either customize your
self or e-mail [email protected] or [email protected] for help.
"""
#
# NB: use PHI, THETA = -1 -1 to signal that it changes, i.e. in anisotropy experiment
# -ac B : peak AF field (in mT) for ARM acquisition, default is none
#
#initialize variables
norm='cc'
samp_con,Z='3',1
meas_file='magic_measurements.txt'
spec_file='er_specimens.txt'
samp_file='er_samples.txt'
site_file='er_sites.txt'
ErSpecs,ErSamps,ErSites,ErLocs,ErCits=[],[],[],[],[]
MeasRecs=[]
specnum,units,locname=0,"1","unknown"
citation="This study"
dir_path='.'
args=sys.argv
if command_line:
if '-WD' in args:
ind=args.index("-WD")
dir_path=args[ind+1]
if "-h" in args:
print(main.__doc__)
return False
if "-usr" in args:
ind=args.index("-usr")
user=args[ind+1]
if '-F' in args:
ind=args.index("-F")
meas_file=args[ind+1]
if '-Fsp' in args:
ind=args.index("-Fsp")
spec_file=args[ind+1]
if '-Fsa' in args:
ind=args.index("-Fsa")
samp_file=args[ind+1]
if '-Fsi' in args: # LORI addition
ind=args.index("-Fsi")
site_file=args[ind+1]
if '-loc' in args:
ind=args.index("-loc")
locname=args[ind+1]
if '-mcd' in args:
ind=args.index("-mcd")
methods=args[ind+1]
else:
methods='SO-MAG'
if '-spc' in args:
ind=args.index("-spc")
specnum=-int(args[ind+1])
if '-n' in args:
ind=args.index("-n")
norm=args[ind+1]
if "-A" in args:
avg=1
else:
avg=0
if '-dc' in args:
ind=args.index('-dc')
DC_FIELD,DC_PHI,DC_THETA=list(map(float,args[ind+1].strip('( ) [ ]').split(',')))
DC_FIELD *= 1e-6
yn=''
GET_DC_PARAMS=False
else: GET_DC_PARAMS,DC_FIELD,DC_PHI,DC_THETA,yn=True,0,0,-90,''
if "-ncn" in args:
ind=args.index("-ncn")
samp_con=sys.argv[ind+1]
if "4" in samp_con:
if "-" not in samp_con:
print("option [4] must be in form 4-Z where Z is an integer")
return False, "naming convention option [4] must be in form 4-Z where Z is an integer"
else:
Z=samp_con.split("-")[1]
samp_con="4"
elif "7" in samp_con:
if "-" not in samp_con:
print("option [7] must be in form 7-Z where Z is an integer")
return False, "naming convention option [7] must be in form 7-Z where Z is an integer"
else:
Z=samp_con.split("-")[1]
samp_con="7"
if '-f' in args:
ind=args.index("-f")
magfile=args[ind+1]
if '-ID' in args:
ind = args.index('-ID')
input_dir_path = args[ind+1]
else:
input_dir_path = os.path.split(magfile)[0]
output_dir_path = dir_path
# LJ
# if you are running as a module:
elif not command_line:
dir_path = kwargs.get('dir_path', '.')
user = kwargs.get('user', '')
meas_file = kwargs.get('meas_file', 'magic_measurements.txt') # outfile
spec_file = kwargs.get('spec_file', 'er_specimens.txt') # specimen outfile
samp_file = kwargs.get('samp_file', 'er_samples.txt') # sample outfile
site_file = kwargs.get('site_file', 'er_sites.txt') # site outfile
locname = kwargs.get('locname', '')
methods = kwargs.get('methods', ['SO-MAG'])
specnum = -int(kwargs.get('specnum', 0))
norm = kwargs.get('norm', 'cc')
avg = kwargs.get('avg', 0) # 0 means do average, 1 means don't
samp_con = kwargs.get('samp_con', '3')
magfile = kwargs.get('magfile', '')
input_dir_path = kwargs.get('input_dir_path', os.path.split(magfile)[0])
output_dir_path = dir_path
DC_FIELD,DC_PHI,DC_THETA = list(map(float, kwargs.get('dc_params', (0,0,-90))))
DC_FIELD *= 1e-6
yn = ''
if DC_FIELD==0 and DC_PHI==0 and DC_THETA==-90: GET_DC_PARAMS=True
else: GET_DC_PARAMS=False
# done with module-specific stuff
# formatting and checking variables
if "4" in samp_con:
if "-" not in samp_con:
print("option [4] must be in form 4-Z where Z is an integer")
return False, "naming convention option [4] must be in form 4-Z where Z is an integer"
else:
Z=samp_con.split("-")[1]
samp_con="4"
elif "7" in samp_con:
if "-" not in samp_con:
print("option [7] must be in form 7-Z where Z is an integer")
return False, "naming convention option [7] must be in form 7-Z where Z is an integer"
else:
Z=samp_con.split("-")[1]
samp_con="7"
magfile = os.path.join(input_dir_path, magfile)
spec_file = os.path.join(output_dir_path, spec_file)
samp_file = os.path.join(output_dir_path, samp_file)
site_file = os.path.join(output_dir_path, site_file)
meas_file= os.path.join(output_dir_path, meas_file)
FIRST_GET_DC=True
try:
with open(magfile,'r') as file_input:
File = file_input.readlines()
except Exception as ex:
print("bad sam file name: ", magfile)
return False, "bad sam file name"
if len(File) == 1: File = File[0].split('\r'); File = [x+"\r\n" for x in File]
sids,ln,format=[],0,'CIT'
formats=['CIT','2G','APP','JRA']
if File[ln].strip()=='CIT': ln+=1
ErLocRec={}
ErLocRec["er_location_name"]=locname
ErLocRec["er_citation_names"]=citation
comment=File[ln]
if comment=='CIT':
format=comment
ln+=1
comment=File[ln]
print(comment)
ln+=1
specimens,samples,sites=[],[],[]
if format=='CIT':
line=File[ln].split()
site_lat=line[0]
site_lon=line[1]
ErLocRec["location_begin_lat"]=site_lat
ErLocRec["location_begin_lon"]=site_lon
ErLocRec["location_end_lat"]=site_lat
ErLocRec["location_end_lon"]=site_lon
ErLocs.append(ErLocRec)
try: Cdec=float(line[2])
except ValueError: pdb.set_trace()
for k in range(ln+1,len(File)):
line=File[k]
rec=line.split()
if rec == []: continue
specimen=rec[0]
specimens.append(specimen)
for specimen in specimens:
ErSpecRec,ErSampRec,ErSiteRec={},{},{}
if specnum!=0:
sample=specimen[:specnum]
else: sample=specimen
site=pmag.parse_site(sample,samp_con,Z)
ErSpecRec['er_specimen_name']=specimen
ErSpecRec['er_sample_name']=sample
ErSpecRec['er_site_name']=site
ErSpecRec['er_location_name']=locname
ErSpecRec['er_citation_names']=citation
ErSampRec['er_sample_name']=sample
ErSampRec['er_site_name']=site
ErSampRec['er_location_name']=locname
ErSampRec['er_citation_names']=citation
ErSampRec['magic_method_codes']=methods
ErSampRec['sample_declination_correction']='%7.1f'%(Cdec)
ErSiteRec['er_site_name']=site
ErSiteRec['er_location_name']=locname
ErSiteRec['er_citation_names']=citation
ErSiteRec['site_lat']=site_lat
ErSiteRec['site_lon']=site_lon
with open(os.path.join(input_dir_path,specimen),'r') as finput:
Lines = list(finput.readlines())
comment = ""
line=Lines[0].split()
if len(line)>2:
comment=line[2]
info=Lines[1].split()
vol=float(info[-1])
if vol!=1.0:
if norm=='cc':units="1"
if norm=='m3':units="2"
ErSpecRec['specimen_weight']=""
if units=="1" or "":
ErSpecRec['specimen_volume']='%10.3e'%(vol*1e-6)
else:
ErSpecRec['specimen_volume']='%10.3e'%(vol)
else:
if norm=='cc':units="1"
if norm=='m3':units="2"
ErSpecRec['specimen_volume']=""
if units=="1" or "":
ErSpecRec['specimen_weight']='%10.3e'%(vol*1e-3)
else:
ErSpecRec['specimen_weight']='%10.3e'%(vol)
dip=float(info[-2])
dip_direction=float(info[-3])+Cdec+90.
sample_dip=-float(info[-4])
sample_azimuth=float(info[-5])+Cdec-90.
if len(info)>5:
ErSampRec['sample_height']=info[-6]
else:
ErSampRec['sample_height']='0'
ErSampRec['sample_azimuth']='%7.1f'%(sample_azimuth)
ErSampRec['sample_dip']='%7.1f'%(sample_dip)
ErSampRec['sample_bed_dip']='%7.1f'%(dip)
ErSampRec['sample_bed_dip_direction']='%7.1f'%(dip_direction)
ErSampRec['sample_class']=''
ErSampRec['sample_type']=''
ErSampRec['sample_lithology']=''
if Cdec!=0 or Cdec!="":
ErSampRec['magic_method_codes']='SO-CMD-NORTH'
else:
ErSampRec['magic_method_codes']='SO-MAG'
for line in Lines[2:len(Lines)]:
if line == '\n': continue
MeasRec=ErSpecRec.copy()
# Remove specimen_volume and specimen_weight as they do not exits in the magic_measurement table
del MeasRec["specimen_volume"]
del MeasRec["specimen_weight"]
treat_type=line[0:3]
if treat_type[1] == '.':
treat_type = 'NRM'
treat=line[2:6]
try: float(treat)
except ValueError: treat = line[3:6]
if treat_type.startswith('NRM'):
MeasRec['magic_method_codes']='LT-NO'
MeasRec['measurement_temp']='273'
MeasRec['treatment_temp']='273'
MeasRec['treatment_dc_field']='0'
MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI
MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA
MeasRec['treatment_ac_field']='0'
elif treat_type.startswith('AF'):
MeasRec['magic_method_codes']='LT-AF-Z'
MeasRec['measurement_temp']='273'
MeasRec['treatment_temp']='273'
MeasRec['treatment_dc_field']='0'
MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI
MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA
if treat.strip() == '':
MeasRec['treatment_ac_field']='0'
else:
MeasRec['treatment_ac_field']='%10.3e'%(float(treat)*1e-3)
elif treat_type.startswith('ARM'):
MeasRec['magic_method_codes']="LP-ARM"
MeasRec['measurement_temp']='273'
MeasRec['treatment_temp']='273'
MeasRec['treatment_dc_field']='0'
MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI
MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA
if treat.strip() == '':
MeasRec['treatment_ac_field']='0'
else:
MeasRec['magic_method_codes']="LP-ARM-AFD"
MeasRec['treatment_ac_field']='%10.3e'%(float(treat)*1e-3)
elif treat_type.startswith('TT'):
MeasRec['magic_method_codes']='LT-T-Z'
MeasRec['measurement_temp']='273'
if treat.strip() == '':
MeasRec['treatment_temp']='273'
else:
MeasRec['treatment_temp']='%7.1f'%(float(treat)+273)
MeasRec['treatment_dc_field']='0'
MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI
MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA
MeasRec['treatment_ac_field']='0'
elif treat_type.startswith('LT') or treat_type.startswith('LN2'):
MeasRec['magic_method_codes']='LT-LT-Z'
MeasRec['measurement_temp']='273'
MeasRec['treatment_temp']='77'
MeasRec['treatment_dc_field']='0'
MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI
MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA
MeasRec['treatment_ac_field']='0'
elif line[4] == '0': #assume decimal IZZI format 0 field thus can hardcode the dc fields
MeasRec['magic_method_codes']='LT-T-Z'
MeasRec['measurement_temp']='273'
MeasRec['treatment_temp']=str(int(treat_type) + 273)
MeasRec['treatment_dc_field']='0'
MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI
MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA
MeasRec['treatment_ac_field']='0'
elif line[4] == '1': #assume decimal IZZI format in constant field
if GET_DC_PARAMS: GET_DC_PARAMS, FIRST_GET_DC, yn, DC_FIELD, DC_PHI, DC_THETA = get_dc_params(FIRST_GET_DC,specimen,treat_type,yn)
MeasRec['magic_method_codes']='LT-T-I'
MeasRec['measurement_temp']='273'
MeasRec['treatment_temp']=str(int(treat_type) + 273)
MeasRec['treatment_dc_field']='%1.2e'%DC_FIELD
MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI
MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA
MeasRec['treatment_ac_field']='0'
elif line[4] == '2': #assume decimal IZZI format PTRM step
if GET_DC_PARAMS: GET_DC_PARAMS, FIRST_GET_DC, yn, DC_FIELD, DC_PHI, DC_THETA = get_dc_params(FIRST_GET_DC,specimen,treat_type,yn)
MeasRec['magic_method_codes']='LT-PTRM-I'
MeasRec['measurement_temp']='273'
MeasRec['treatment_temp']=str(int(treat_type) + 273)
MeasRec['treatment_dc_field']='%1.2e'%DC_FIELD
MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI
MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA
MeasRec['treatment_ac_field']='0'
else:
print("trouble with your treatment steps")
MeasRec['measurement_dec']=line[46:51]
MeasRec['measurement_inc']=line[52:58]
M='%8.2e'%(float(line[31:39])*vol*1e-3) # convert to Am2
MeasRec['measurement_magn_moment']=M
MeasRec['measurement_csd']='%7.1f'%(eval(line[41:46]))
MeasRec["measurement_positions"]='1'
MeasRec['measurement_standard']='u'
if len(line)>60:
MeasRec['magic_instrument_codes']=line[85:]
MeasRec['measurement_sd_x']='%8.2e'%(float(line[58:67])*1e-8) #(convert e-5emu to Am2)
MeasRec['measurement_sd_y']='%8.2e'%(float(line[67:76])*1e-8)
MeasRec['measurement_sd_z']='%8.2e'%(float(line[76:85])*1e-8)
MeasRecs.append(MeasRec)
ErSpecs.append(ErSpecRec)
if sample not in samples:
samples.append(sample)
ErSamps.append(ErSampRec)
site=pmag.parse_site(sample,samp_con,Z)
if site not in sites:
sites.append(site)
ErSites.append(ErSiteRec)
pmag.magic_write(spec_file,ErSpecs,'er_specimens')
print('specimens stored in ',spec_file)
pmag.magic_write(samp_file,ErSamps,'er_samples')
print('samples stored in ',samp_file)
pmag.magic_write(site_file,ErSites,'er_sites')
print('sites stored in ', site_file)
Fixed=pmag.measurements_methods(MeasRecs,avg)
pmag.magic_write(meas_file,Fixed,'magic_measurements')
print('data stored in ',meas_file)
return True, meas_file | NAME
cit_magic.py
DESCRIPTION
converts CIT and .sam format files to magic_measurements format files
SYNTAX
cit_magic.py [command line options]
OPTIONS
-h: prints the help message and quits.
-usr USER: identify user, default is ""
-f FILE: specify .sam format input file, required
-WD Working Directory: output directory where files are going to go
-fsi SITEFILE : specify file with site names and locations [tab delimited magic file]
-F FILE: specify output measurements file, default is magic_measurements.txt
-Fsp FILE: specify output er_specimens.txt file, default is er_specimens.txt
-Fsi FILE: specify output er_sites.txt file, default is er_sites.txt
-Fsa FILE: specify output er_samples.txt file, default is er_samples.txt # LORI
-n [gm,kg,cc,m3]: specify normalization
-A: don't average replicate measurements
-spc NUM : specify number of characters to designate a specimen, default = 0
-ncn NCON: specify naming convention
-loc LOCNAME : specify location/study name, must have either LOCNAME or SITEFILE or be a synthetic
-mcd [FS-FD:SO-MAG,.....] colon delimited list for method codes applied to all specimens in .sam file
-dc B PHI THETA: dc lab field (in microTesla), phi,and theta must be input as a tuple "(DC,PHI,THETA)". If not input user will be asked for values, this is advantagious if there are differing dc fields between steps or specimens. Note: this currently only works with the decimal IZZI naming convetion (XXX.0,1,2 where XXX is the treatment temperature and 0 is a zero field step, 1 is in field, and 2 is a pTRM check). All other steps are hardcoded dc_field = 0.
INPUT
Best to put separate experiments in separate files (all AF, thermal, thellier, trm aquisition, Shaw, etc.)
NOTES:
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3: default] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX
[5] site name = sample name
[6] site name entered in site_name column in the orient.txt format input file -- NOT CURRENTLY SUPPORTED
[7-Z] [XXX]YYY: XXX is site designation with Z characters from samples XXXYYY
NB: all others you will have to either customize your
self or e-mail [email protected] or [email protected] for help. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/conversion_scripts2/cit_magic2.py#L11-L419 |
PmagPy/PmagPy | programs/zeq.py | main | def main():
"""
NAME
zeq.py
DESCRIPTION
plots demagnetization data. The equal area projection has the X direction (usually North in geographic coordinates)
to the top. The red line is the X axis of the Zijderveld diagram. Solid symbols are lower hemisphere.
The solid (open) symbols in the Zijderveld diagram are X,Y (X,Z) pairs. The demagnetization diagram plots the
fractional remanence remaining after each step. The green line is the fraction of the total remaence removed
between each step.
INPUT FORMAT
takes specimen_name treatment intensity declination inclination in space
delimited file
SYNTAX
zeq.py [command line options
OPTIONS
-f FILE for reading from command line
-u [mT,C] specify units of mT OR C, default is unscaled
-sav save figure and quit
-fmt [svg,jpg,png,pdf] set figure format [default is svg]
-beg [step number] treatment step for beginning of PCA calculation, 0 is default
-end [step number] treatment step for end of PCA calculation, last step is default
-ct [l,p,f] Calculation Type: best-fit line, plane or fisher mean; line is default
"""
files,fmt,plot={},'svg',0
end_pca,beg_pca="",""
calculation_type='DE-BFL'
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
else:
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
else:
print(main.__doc__)
sys.exit()
if '-u' in sys.argv:
ind=sys.argv.index('-u')
units=sys.argv[ind+1]
if units=="C":SIunits="K"
if units=="mT":SIunits="T"
else:
units="U"
SIunits="U"
if '-sav' in sys.argv:plot=1
if '-ct' in sys.argv:
ind=sys.argv.index('-ct')
ct=sys.argv[ind+1]
if ct=='f':calculation_type='DE-FM'
if ct=='p':calculation_type='DE-BFP'
if '-fmt' in sys.argv:
ind=sys.argv.index('-fmt')
fmt=sys.argv[ind+1]
if '-beg' in sys.argv:
ind=sys.argv.index('-beg')
beg_pca=int(sys.argv[ind+1])
if '-end' in sys.argv:
ind=sys.argv.index('-end')
end_pca=int(sys.argv[ind+1])
f=open(file,'r')
data=f.readlines()
#
datablock= [] # set up list for data
s="" # initialize specimen name
angle=0.
for line in data: # read in the data from standard input
rec=line.split() # split each line on space to get records
if angle=="":angle=float(rec[3])
if s=="":s=rec[0]
if units=='mT':datablock.append([float(rec[1])*1e-3,float(rec[3]),float(rec[4]),1e-3*float(rec[2]),'','g']) # treatment, dec, inc, int # convert to T and Am^2 (assume emu)
if units=='C':datablock.append([float(rec[1])+273.,float(rec[3]),float(rec[4]),1e-3*float(rec[2]),'','g']) # treatment, dec, inc, int, convert to K and Am^2, assume emu
if units=='U':datablock.append([float(rec[1]),float(rec[3]),float(rec[4]),float(rec[2]),'','g']) # treatment, dec, inc, int, using unscaled units
# define figure numbers in a dictionary for equal area, zijderveld,
# and intensity vs. demagnetiztion step respectively
ZED={}
ZED['eqarea'],ZED['zijd'], ZED['demag']=1,2,3
pmagplotlib.plot_init(ZED['eqarea'],5,5) # initialize plots
pmagplotlib.plot_init(ZED['zijd'],5,5)
pmagplotlib.plot_init(ZED['demag'],5,5)
#
#
pmagplotlib.plot_zed(ZED,datablock,angle,s,SIunits) # plot the data
if plot==0:pmagplotlib.draw_figs(ZED)
#
# print out data for this sample to screen
#
recnum=0
for plotrec in datablock:
if units=='mT':print('%i %7.1f %8.3e %7.1f %7.1f ' % (recnum,plotrec[0]*1e3,plotrec[3],plotrec[1],plotrec[2]))
if units=='C':print('%i %7.1f %8.3e %7.1f %7.1f ' % (recnum,plotrec[0]-273.,plotrec[3],plotrec[1],plotrec[2]))
if units=='U':print('%i %7.1f %8.3e %7.1f %7.1f ' % (recnum,plotrec[0],plotrec[3],plotrec[1],plotrec[2]))
recnum += 1
if plot==0:
while 1:
if beg_pca!="" and end_pca!="" and calculation_type!="":
pmagplotlib.plot_zed(ZED,datablock,angle,s,SIunits) # plot the data
mpars=pmag.domean(datablock,beg_pca,end_pca,calculation_type) # get best-fit direction/great circle
pmagplotlib.plot_dir(ZED,mpars,datablock,angle) # plot the best-fit direction/great circle
print('Specimen, calc_type, N, min, max, MAD, dec, inc')
if units=='mT':print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"]*1e3,mpars["measurement_step_max"]*1e3,mpars["specimen_mad"],mpars["specimen_dec"],mpars["specimen_inc"]))
if units=='C':print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"]-273,mpars["measurement_step_max"]-273,mpars["specimen_mad"],mpars["specimen_dec"],mpars["specimen_inc"]))
if units=='U':print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"],mpars["measurement_step_max"],mpars["specimen_mad"],mpars["specimen_dec"],mpars["specimen_inc"]))
if end_pca=="":end_pca=len(datablock)-1 # initialize end_pca, beg_pca to first and last measurement
if beg_pca=="":beg_pca=0
ans=input(" s[a]ve plot, [b]ounds for pca and calculate, change [h]orizontal projection angle, [q]uit: ")
if ans =='q':
sys.exit()
if ans=='a':
files={}
for key in list(ZED.keys()):
files[key]=s+'_'+key+'.'+fmt
pmagplotlib.save_plots(ZED,files)
if ans=='h':
angle=float(input(" Declination to project onto horizontal axis? "))
pmagplotlib.plot_zed(ZED,datablock,angle,s,SIunits) # plot the data
if ans=='b':
GoOn=0
while GoOn==0: # keep going until reasonable bounds are set
print('Enter index of first point for pca: ','[',beg_pca,']')
answer=input('return to keep default ')
if answer != "":beg_pca=int(answer)
print('Enter index of last point for pca: ','[',end_pca,']')
answer=input('return to keep default ')
if answer != "":
end_pca=int(answer)
if beg_pca >=0 and beg_pca<=len(datablock)-2 and end_pca>0 and end_pca<len(datablock):
GoOn=1
else:
print("Bad entry of indices - try again")
end_pca=len(datablock)-1
beg_pca=0
GoOn=0
while GoOn==0:
ct=input('Enter Calculation Type: best-fit line, plane or fisher mean [l]/p/f : ' )
if ct=="" or ct=="l":
calculation_type="DE-BFL"
GoOn=1 # all good
elif ct=='p':
calculation_type="DE-BFP"
GoOn=1 # all good
elif ct=='f':
calculation_type="DE-FM"
GoOn=1 # all good
else:
print("bad entry of calculation type: try again. ") # keep going
pmagplotlib.plot_zed(ZED,datablock,angle,s,SIunits) # plot the data
mpars=pmag.domean(datablock,beg_pca,end_pca,calculation_type) # get best-fit direction/great circle
pmagplotlib.plot_dir(ZED,mpars,datablock,angle) # plot the best-fit direction/great circle
print('Specimen, calc_type, N, min, max, MAD, dec, inc')
if units=='mT':print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"]*1e3,mpars["measurement_step_max"]*1e3,mpars["specimen_mad"],mpars["specimen_dec"],mpars["specimen_inc"]))
if units=='C':print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"]-273,mpars["measurement_step_max"]-273,mpars["specimen_mad"],mpars["specimen_dec"],mpars["specimen_inc"]))
if units=='U':print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"],mpars["measurement_step_max"],mpars["specimen_mad"],mpars["specimen_dec"],mpars["specimen_inc"]))
pmagplotlib.draw_figs(ZED)
else:
print(beg_pca,end_pca)
if beg_pca!="" and end_pca!="":
pmagplotlib.plot_zed(ZED,datablock,angle,s,SIunits) # plot the data
mpars=pmag.domean(datablock,beg_pca,end_pca,calculation_type) # get best-fit direction/great circle
pmagplotlib.plot_dir(ZED,mpars,datablock,angle) # plot the best-fit direction/great circle
print('Specimen, calc_type, N, min, max, MAD, dec, inc')
if units=='mT':print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"]*1e3,mpars["measurement_step_max"]*1e3,mpars["specimen_mad"],mpars["specimen_dec"],mpars["specimen_inc"]))
if units=='C':print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"]-273,mpars["measurement_step_max"]-273,mpars["specimen_mad"],mpars["specimen_dec"],mpars["specimen_inc"]))
if units=='U':print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"],mpars["measurement_step_max"],mpars["specimen_mad"],mpars["specimen_dec"],mpars["specimen_inc"]))
files={}
for key in list(ZED.keys()):
files[key]=s+'_'+key+'.'+fmt
pmagplotlib.save_plots(ZED,files) | python | def main():
"""
NAME
zeq.py
DESCRIPTION
plots demagnetization data. The equal area projection has the X direction (usually North in geographic coordinates)
to the top. The red line is the X axis of the Zijderveld diagram. Solid symbols are lower hemisphere.
The solid (open) symbols in the Zijderveld diagram are X,Y (X,Z) pairs. The demagnetization diagram plots the
fractional remanence remaining after each step. The green line is the fraction of the total remaence removed
between each step.
INPUT FORMAT
takes specimen_name treatment intensity declination inclination in space
delimited file
SYNTAX
zeq.py [command line options
OPTIONS
-f FILE for reading from command line
-u [mT,C] specify units of mT OR C, default is unscaled
-sav save figure and quit
-fmt [svg,jpg,png,pdf] set figure format [default is svg]
-beg [step number] treatment step for beginning of PCA calculation, 0 is default
-end [step number] treatment step for end of PCA calculation, last step is default
-ct [l,p,f] Calculation Type: best-fit line, plane or fisher mean; line is default
"""
files,fmt,plot={},'svg',0
end_pca,beg_pca="",""
calculation_type='DE-BFL'
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
else:
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
else:
print(main.__doc__)
sys.exit()
if '-u' in sys.argv:
ind=sys.argv.index('-u')
units=sys.argv[ind+1]
if units=="C":SIunits="K"
if units=="mT":SIunits="T"
else:
units="U"
SIunits="U"
if '-sav' in sys.argv:plot=1
if '-ct' in sys.argv:
ind=sys.argv.index('-ct')
ct=sys.argv[ind+1]
if ct=='f':calculation_type='DE-FM'
if ct=='p':calculation_type='DE-BFP'
if '-fmt' in sys.argv:
ind=sys.argv.index('-fmt')
fmt=sys.argv[ind+1]
if '-beg' in sys.argv:
ind=sys.argv.index('-beg')
beg_pca=int(sys.argv[ind+1])
if '-end' in sys.argv:
ind=sys.argv.index('-end')
end_pca=int(sys.argv[ind+1])
f=open(file,'r')
data=f.readlines()
#
datablock= [] # set up list for data
s="" # initialize specimen name
angle=0.
for line in data: # read in the data from standard input
rec=line.split() # split each line on space to get records
if angle=="":angle=float(rec[3])
if s=="":s=rec[0]
if units=='mT':datablock.append([float(rec[1])*1e-3,float(rec[3]),float(rec[4]),1e-3*float(rec[2]),'','g']) # treatment, dec, inc, int # convert to T and Am^2 (assume emu)
if units=='C':datablock.append([float(rec[1])+273.,float(rec[3]),float(rec[4]),1e-3*float(rec[2]),'','g']) # treatment, dec, inc, int, convert to K and Am^2, assume emu
if units=='U':datablock.append([float(rec[1]),float(rec[3]),float(rec[4]),float(rec[2]),'','g']) # treatment, dec, inc, int, using unscaled units
# define figure numbers in a dictionary for equal area, zijderveld,
# and intensity vs. demagnetiztion step respectively
ZED={}
ZED['eqarea'],ZED['zijd'], ZED['demag']=1,2,3
pmagplotlib.plot_init(ZED['eqarea'],5,5) # initialize plots
pmagplotlib.plot_init(ZED['zijd'],5,5)
pmagplotlib.plot_init(ZED['demag'],5,5)
#
#
pmagplotlib.plot_zed(ZED,datablock,angle,s,SIunits) # plot the data
if plot==0:pmagplotlib.draw_figs(ZED)
#
# print out data for this sample to screen
#
recnum=0
for plotrec in datablock:
if units=='mT':print('%i %7.1f %8.3e %7.1f %7.1f ' % (recnum,plotrec[0]*1e3,plotrec[3],plotrec[1],plotrec[2]))
if units=='C':print('%i %7.1f %8.3e %7.1f %7.1f ' % (recnum,plotrec[0]-273.,plotrec[3],plotrec[1],plotrec[2]))
if units=='U':print('%i %7.1f %8.3e %7.1f %7.1f ' % (recnum,plotrec[0],plotrec[3],plotrec[1],plotrec[2]))
recnum += 1
if plot==0:
while 1:
if beg_pca!="" and end_pca!="" and calculation_type!="":
pmagplotlib.plot_zed(ZED,datablock,angle,s,SIunits) # plot the data
mpars=pmag.domean(datablock,beg_pca,end_pca,calculation_type) # get best-fit direction/great circle
pmagplotlib.plot_dir(ZED,mpars,datablock,angle) # plot the best-fit direction/great circle
print('Specimen, calc_type, N, min, max, MAD, dec, inc')
if units=='mT':print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"]*1e3,mpars["measurement_step_max"]*1e3,mpars["specimen_mad"],mpars["specimen_dec"],mpars["specimen_inc"]))
if units=='C':print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"]-273,mpars["measurement_step_max"]-273,mpars["specimen_mad"],mpars["specimen_dec"],mpars["specimen_inc"]))
if units=='U':print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"],mpars["measurement_step_max"],mpars["specimen_mad"],mpars["specimen_dec"],mpars["specimen_inc"]))
if end_pca=="":end_pca=len(datablock)-1 # initialize end_pca, beg_pca to first and last measurement
if beg_pca=="":beg_pca=0
ans=input(" s[a]ve plot, [b]ounds for pca and calculate, change [h]orizontal projection angle, [q]uit: ")
if ans =='q':
sys.exit()
if ans=='a':
files={}
for key in list(ZED.keys()):
files[key]=s+'_'+key+'.'+fmt
pmagplotlib.save_plots(ZED,files)
if ans=='h':
angle=float(input(" Declination to project onto horizontal axis? "))
pmagplotlib.plot_zed(ZED,datablock,angle,s,SIunits) # plot the data
if ans=='b':
GoOn=0
while GoOn==0: # keep going until reasonable bounds are set
print('Enter index of first point for pca: ','[',beg_pca,']')
answer=input('return to keep default ')
if answer != "":beg_pca=int(answer)
print('Enter index of last point for pca: ','[',end_pca,']')
answer=input('return to keep default ')
if answer != "":
end_pca=int(answer)
if beg_pca >=0 and beg_pca<=len(datablock)-2 and end_pca>0 and end_pca<len(datablock):
GoOn=1
else:
print("Bad entry of indices - try again")
end_pca=len(datablock)-1
beg_pca=0
GoOn=0
while GoOn==0:
ct=input('Enter Calculation Type: best-fit line, plane or fisher mean [l]/p/f : ' )
if ct=="" or ct=="l":
calculation_type="DE-BFL"
GoOn=1 # all good
elif ct=='p':
calculation_type="DE-BFP"
GoOn=1 # all good
elif ct=='f':
calculation_type="DE-FM"
GoOn=1 # all good
else:
print("bad entry of calculation type: try again. ") # keep going
pmagplotlib.plot_zed(ZED,datablock,angle,s,SIunits) # plot the data
mpars=pmag.domean(datablock,beg_pca,end_pca,calculation_type) # get best-fit direction/great circle
pmagplotlib.plot_dir(ZED,mpars,datablock,angle) # plot the best-fit direction/great circle
print('Specimen, calc_type, N, min, max, MAD, dec, inc')
if units=='mT':print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"]*1e3,mpars["measurement_step_max"]*1e3,mpars["specimen_mad"],mpars["specimen_dec"],mpars["specimen_inc"]))
if units=='C':print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"]-273,mpars["measurement_step_max"]-273,mpars["specimen_mad"],mpars["specimen_dec"],mpars["specimen_inc"]))
if units=='U':print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"],mpars["measurement_step_max"],mpars["specimen_mad"],mpars["specimen_dec"],mpars["specimen_inc"]))
pmagplotlib.draw_figs(ZED)
else:
print(beg_pca,end_pca)
if beg_pca!="" and end_pca!="":
pmagplotlib.plot_zed(ZED,datablock,angle,s,SIunits) # plot the data
mpars=pmag.domean(datablock,beg_pca,end_pca,calculation_type) # get best-fit direction/great circle
pmagplotlib.plot_dir(ZED,mpars,datablock,angle) # plot the best-fit direction/great circle
print('Specimen, calc_type, N, min, max, MAD, dec, inc')
if units=='mT':print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"]*1e3,mpars["measurement_step_max"]*1e3,mpars["specimen_mad"],mpars["specimen_dec"],mpars["specimen_inc"]))
if units=='C':print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"]-273,mpars["measurement_step_max"]-273,mpars["specimen_mad"],mpars["specimen_dec"],mpars["specimen_inc"]))
if units=='U':print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"],mpars["measurement_step_max"],mpars["specimen_mad"],mpars["specimen_dec"],mpars["specimen_inc"]))
files={}
for key in list(ZED.keys()):
files[key]=s+'_'+key+'.'+fmt
pmagplotlib.save_plots(ZED,files) | NAME
zeq.py
DESCRIPTION
plots demagnetization data. The equal area projection has the X direction (usually North in geographic coordinates)
to the top. The red line is the X axis of the Zijderveld diagram. Solid symbols are lower hemisphere.
The solid (open) symbols in the Zijderveld diagram are X,Y (X,Z) pairs. The demagnetization diagram plots the
fractional remanence remaining after each step. The green line is the fraction of the total remaence removed
between each step.
INPUT FORMAT
takes specimen_name treatment intensity declination inclination in space
delimited file
SYNTAX
zeq.py [command line options
OPTIONS
-f FILE for reading from command line
-u [mT,C] specify units of mT OR C, default is unscaled
-sav save figure and quit
-fmt [svg,jpg,png,pdf] set figure format [default is svg]
-beg [step number] treatment step for beginning of PCA calculation, 0 is default
-end [step number] treatment step for end of PCA calculation, last step is default
-ct [l,p,f] Calculation Type: best-fit line, plane or fisher mean; line is default | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/zeq.py#L13-L186 |
PmagPy/PmagPy | programs/make_magic_plots.py | check_for_reqd_cols | def check_for_reqd_cols(data, reqd_cols):
"""
Check data (PmagPy list of dicts) for required columns
"""
missing = []
for col in reqd_cols:
if col not in data[0]:
missing.append(col)
return missing | python | def check_for_reqd_cols(data, reqd_cols):
"""
Check data (PmagPy list of dicts) for required columns
"""
missing = []
for col in reqd_cols:
if col not in data[0]:
missing.append(col)
return missing | Check data (PmagPy list of dicts) for required columns | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/make_magic_plots.py#L26-L34 |
PmagPy/PmagPy | programs/make_magic_plots.py | main | def main():
"""
NAME
make_magic_plots.py
DESCRIPTION
inspects magic directory for available data and makes plots
SYNTAX
make_magic_plots.py [command line options]
INPUT
magic files
OPTIONS
-h prints help message and quits
-f FILE specifies input file name
-fmt [png,eps,svg,jpg,pdf] specify format, default is png
"""
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
# reset log files
for fname in ['log.txt', 'errors.txt']:
f = os.path.join(os.getcwd(), fname)
if os.path.exists(f):
os.remove(f)
dirlist = ['./']
dir_path = os.getcwd()
#
if '-fmt' in sys.argv:
ind = sys.argv.index("-fmt")
fmt = sys.argv[ind + 1]
else:
fmt = 'png'
if '-f' in sys.argv:
ind = sys.argv.index("-f")
filelist = [sys.argv[ind + 1]]
else:
filelist = os.listdir(dir_path)
## initialize some variables
samp_file = 'samples.txt'
azimuth_key = 'azimuth'
meas_file = 'measurements.txt'
loc_key = 'location'
loc_file = 'locations.txt'
method_key = 'method_codes'
dec_key = 'dir_dec'
inc_key = 'dir_inc'
tilt_corr_key = "dir_tilt_correction"
aniso_tilt_corr_key = "aniso_tilt_correction"
hyst_bcr_key = "hyst_bcr"
hyst_mr_key = "hyst_mr_moment"
hyst_ms_key = "hyst_ms_moment"
hyst_bc_key = "hyst_bc"
Mkeys = ['magnitude', 'magn_moment', 'magn_volume', 'magn_mass']
results_file = 'sites.txt'
hyst_file = 'specimens.txt'
aniso_file = 'specimens.txt'
# create contribution and propagate data throughout
con = cb.Contribution()
con.propagate_location_to_measurements()
con.propagate_location_to_specimens()
con.propagate_location_to_samples()
if not con.tables:
print('-E- No MagIC tables could be found in this directory')
error_log("No MagIC tables found")
return
# try to get the contribution id for error logging
con_id = ""
if 'contribution' in con.tables:
if 'id' in con.tables['contribution'].df.columns:
con_id = con.tables['contribution'].df.iloc[0]['id']
# check to see if propagation worked, otherwise you can't plot by location
lowest_table = None
for table in con.ancestry:
if table in con.tables:
lowest_table = table
break
do_full_directory = False
# check that locations propagated down to the lowest table in the contribution
if 'location' in con.tables[lowest_table].df.columns:
if 'locations' not in con.tables:
info_log('location names propagated to {}, but could not be validated'.format(lowest_table))
# are there any locations in the lowest table?
elif not all(con.tables[lowest_table].df['location'].isnull()):
locs = con.tables['locations'].df.index.unique()
lowest_locs = con.tables[lowest_table].df['location'].unique()
incorrect_locs = set(lowest_locs).difference(set(locs))
# are they actual locations?
if not incorrect_locs:
info_log('location names propagated to {}'.format(lowest_table))
else:
do_full_directory = True
error_log('location names did not propagate fully to {} table (looks like there are some naming inconsistencies between tables)'.format(lowest_table), con_id=con_id)
else:
do_full_directory = True
error_log('could not propagate location names down to {} table'.format(lowest_table), con_id=con_id)
else:
do_full_directory = True
error_log('could not propagate location names down to {} table'.format(lowest_table), con_id=con_id)
all_data = {}
all_data['measurements'] = con.tables.get('measurements', None)
all_data['specimens'] = con.tables.get('specimens', None)
all_data['samples'] = con.tables.get('samples', None)
all_data['sites'] = con.tables.get('sites', None)
all_data['locations'] = con.tables.get('locations', None)
if 'locations' in con.tables:
locations = con.tables['locations'].df.index.unique()
else:
locations = ['']
dirlist = [loc for loc in locations if cb.not_null(loc, False) and loc != 'nan']
if not dirlist:
dirlist = ["./"]
if do_full_directory:
dirlist = ["./"]
# plot the whole contribution as one location
if dirlist == ["./"]:
error_log('plotting the entire contribution as one location', con_id=con_id)
for fname in os.listdir("."):
if fname.endswith(".txt"):
shutil.copy(fname, "tmp_" + fname)
# if possible, go through all data by location
# use tmp_*.txt files to separate out by location
for loc in dirlist:
print('\nworking on: ', loc)
def get_data(dtype, loc_name):
"""
Extract data of type dtype for location loc_name.
Write tmp_dtype.txt files if possible.
"""
if cb.not_null(all_data[dtype], False):
data_container = all_data[dtype]
if loc_name == "./":
data_df = data_container.df
else:
# awkward workaround for chars like "(" and "?" that break in regex
try:
data_df = data_container.df[data_container.df['location'].astype(str).str.contains(loc_name, na=False)]
except: #sre_constants.error:
data_df = data_container.df[data_container.df['location'] == loc_name]
data = data_container.convert_to_pmag_data_list(df=data_df)
res = data_container.write_magic_file('tmp_{}.txt'.format(dtype), df=data_df)
if not res:
return []
return data
meas_data = get_data('measurements', loc)
spec_data = get_data('specimens', loc)
samp_data = get_data('samples', loc)
site_data = get_data('sites', loc)
loc_data = get_data('locations', loc)
if loc == "./": # if you can't sort by location, do everything together
try:
meas_data = con.tables['measurements'].convert_to_pmag_data_list()
except KeyError:
meas_data = None
try:
spec_data = con.tables['specimens'].convert_to_pmag_data_list()
except KeyError:
spec_data = None
try:
samp_data = con.tables['samples'].convert_to_pmag_data_list()
except KeyError:
samp_data = None
try:
site_data = con.tables['sites'].convert_to_pmag_data_list()
except KeyError:
site_data = None
crd = 's'
if samp_file in filelist and samp_data: # find coordinate systems
samps = samp_data
file_type = "samples"
# get all non blank sample orientations
Srecs = pmag.get_dictitem(samps, azimuth_key, '', 'F')
if len(Srecs) > 0:
crd = 'g'
print('using geographic coordinates')
else:
print('using specimen coordinates')
else:
if VERBOSE:
print('-I- No sample data found')
if meas_file in filelist and meas_data: # start with measurement data
print('working on measurements data')
data = meas_data
file_type = 'measurements'
# looking for zeq_magic possibilities
# get all non blank method codes
AFZrecs = pmag.get_dictitem(data, method_key, 'LT-AF-Z', 'has')
# get all non blank method codes
TZrecs = pmag.get_dictitem(data, method_key, 'LT-T-Z', 'has')
# get all non blank method codes
MZrecs = pmag.get_dictitem(data, method_key, 'LT-M-Z', 'has')
# get all dec measurements
Drecs = pmag.get_dictitem(data, dec_key, '', 'F')
# get all inc measurements
Irecs = pmag.get_dictitem(data, inc_key, '', 'F')
for key in Mkeys:
Mrecs = pmag.get_dictitem(
data, key, '', 'F') # get intensity data
if len(Mrecs) > 0:
break
# potential for stepwise demag curves
if len(AFZrecs) > 0 or len(TZrecs) > 0 or len(MZrecs) > 0 and len(Drecs) > 0 and len(Irecs) > 0 and len(Mrecs) > 0:
CMD = 'zeq_magic.py -f tmp_measurements.txt -fsp tmp_specimens.txt -fsa tmp_samples.txt -fsi tmp_sites.txt -sav -fmt ' + fmt + ' -crd ' + crd + " -new"
print(CMD)
info_log(CMD, loc)
os.system(CMD)
# looking for thellier_magic possibilities
if len(pmag.get_dictitem(data, method_key, 'LP-PI-TRM', 'has')) > 0:
CMD = 'thellier_magic.py -f tmp_measurements.txt -fsp tmp_specimens.txt -sav -fmt ' + fmt
print(CMD)
info_log(CMD, loc)
os.system(CMD)
# looking for hysteresis possibilities
if len(pmag.get_dictitem(data, method_key, 'LP-HYS', 'has')) > 0: # find hyst experiments
# check for reqd columns
missing = check_for_reqd_cols(data, ['treat_temp'])
if missing:
error_log('LP-HYS method code present, but required column(s) [{}] missing'.format(", ".join(missing)), loc, "quick_hyst.py", con_id=con_id)
else:
CMD = 'quick_hyst.py -f tmp_measurements.txt -sav -fmt ' + fmt
print(CMD)
info_log(CMD, loc)
os.system(CMD)
# equal area plots of directional data
# at measurment level (by specimen)
if data:
missing = check_for_reqd_cols(data, ['dir_dec', 'dir_inc'])
if not missing:
CMD = "eqarea_magic.py -f tmp_measurements.txt -obj spc -sav -no-tilt -fmt " + fmt
print(CMD)
os.system(CMD)
info_log(CMD, loc, "eqarea_magic.py")
else:
if VERBOSE:
print('-I- No measurement data found')
# site data
if results_file in filelist and site_data:
print('-I- result file found', results_file)
data = site_data
file_type = 'sites'
print('-I- working on site directions')
print('number of datapoints: ', len(data), loc)
dec_key = 'dir_dec'
inc_key = 'dir_inc'
int_key = 'int_abs'
SiteDIs = pmag.get_dictitem(data, dec_key, "", 'F') # find decs
SiteDIs = pmag.get_dictitem(
SiteDIs, inc_key, "", 'F') # find decs and incs
dir_data_found = len(SiteDIs)
print('{} Dec/inc pairs found'.format(dir_data_found))
if SiteDIs:
# then convert tilt_corr_key to correct format
old_SiteDIs = SiteDIs
SiteDIs = []
for rec in old_SiteDIs:
if tilt_corr_key not in rec:
rec[tilt_corr_key] = "0"
# make sure tilt_corr_key is a correct format
try:
rec[tilt_corr_key] = str(int(float(rec[tilt_corr_key])))
except ValueError:
rec[tilt_corr_key] = "0"
SiteDIs.append(rec)
print('number of individual directions: ', len(SiteDIs))
# tilt corrected coordinates
SiteDIs_t = pmag.get_dictitem(SiteDIs, tilt_corr_key, '100',
'T', float_to_int=True)
print('number of tilt corrected directions: ', len(SiteDIs_t))
SiteDIs_g = pmag.get_dictitem(
SiteDIs, tilt_corr_key, '0', 'T', float_to_int=True) # geographic coordinates
print('number of geographic directions: ', len(SiteDIs_g))
SiteDIs_s = pmag.get_dictitem(
SiteDIs, tilt_corr_key, '-1', 'T', float_to_int=True) # sample coordinates
print('number of sample directions: ', len(SiteDIs_s))
SiteDIs_x = pmag.get_dictitem(
SiteDIs, tilt_corr_key, '', 'T') # no coordinates
print('number of no coordinates directions: ', len(SiteDIs_x))
if len(SiteDIs_t) > 0 or len(SiteDIs_g) > 0 or len(SiteDIs_s) > 0 or len(SiteDIs_x) > 0:
CRD = ""
if len(SiteDIs_t) > 0:
CRD = ' -crd t'
elif len(SiteDIs_g) > 0:
CRD = ' -crd g'
elif len(SiteDIs_s) > 0:
CRD = ' -crd s'
CMD = 'eqarea_magic.py -f tmp_sites.txt -fsp tmp_specimens.txt -fsa tmp_samples.txt -flo tmp_locations.txt -sav -fmt ' + fmt + CRD
print(CMD)
info_log(CMD, loc)
os.system(CMD)
else:
if dir_data_found:
error_log('{} dec/inc pairs found, but no equal area plots were made'.format(dir_data_found), loc, "equarea_magic.py", con_id=con_id)
#
print('-I- working on VGP map')
VGPs = pmag.get_dictitem(
SiteDIs, 'vgp_lat', "", 'F') # are there any VGPs?
if len(VGPs) > 0: # YES!
CMD = 'vgpmap_magic.py -f tmp_sites.txt -prj moll -res c -sym ro 5 -sav -fmt png'
print(CMD)
info_log(CMD, loc, 'vgpmap_magic.py')
os.system(CMD)
else:
print('-I- No vgps found')
print('-I- Look for intensities')
# is there any intensity data?
if site_data:
if int_key in site_data[0].keys():
# old way, wasn't working right:
#CMD = 'magic_select.py -key ' + int_key + ' 0. has -F tmp1.txt -f tmp_sites.txt'
Selection = pmag.get_dictkey(site_data, int_key, dtype="f")
with open('intensities.txt', 'w') as out:
for rec in Selection:
if rec != 0:
out.write(str(rec * 1e6) + "\n")
loc = loc.replace(" ", "_")
if loc == "./":
loc_name = ""
else:
loc_name = loc
histfile = 'LO:_' + loc_name + \
'_TY:_intensities_histogram:_.' + fmt
# maybe run histplot.main here instead, so you can return an error message
CMD = "histplot.py -twin -b 1 -xlab 'Intensity (uT)' -sav -f intensities.txt -F " + histfile
os.system(CMD)
info_log(CMD, loc)
print(CMD)
else:
print('-I- No intensities found')
else:
print('-I- No intensities found')
##
if hyst_file in filelist and spec_data:
print('working on hysteresis', hyst_file)
data = spec_data
file_type = 'specimens'
hdata = pmag.get_dictitem(data, hyst_bcr_key, '', 'F')
hdata = pmag.get_dictitem(hdata, hyst_mr_key, '', 'F')
hdata = pmag.get_dictitem(hdata, hyst_ms_key, '', 'F')
# there are data for a dayplot
hdata = pmag.get_dictitem(hdata, hyst_bc_key, '', 'F')
if len(hdata) > 0:
CMD = 'dayplot_magic.py -f tmp_specimens.txt -sav -fmt ' + fmt
info_log(CMD, loc)
print(CMD)
else:
print('no hysteresis data found')
if aniso_file in filelist and spec_data: # do anisotropy plots if possible
print('working on anisotropy', aniso_file)
data = spec_data
file_type = 'specimens'
# make sure there is some anisotropy data
if not data:
print('No anisotropy data found')
elif 'aniso_s' not in data[0]:
print('No anisotropy data found')
else:
# get specimen coordinates
if aniso_tilt_corr_key not in data[0]:
sdata = data
else:
sdata = pmag.get_dictitem(
data, aniso_tilt_corr_key, '-1', 'T', float_to_int=True)
# get specimen coordinates
gdata = pmag.get_dictitem(
data, aniso_tilt_corr_key, '0', 'T', float_to_int=True)
# get specimen coordinates
tdata = pmag.get_dictitem(
data, aniso_tilt_corr_key, '100', 'T', float_to_int=True)
CRD = ""
CMD = 'aniso_magic.py -x -B -sav -fmt ' + fmt + " -new"
if len(sdata) > 3:
CMD = CMD + ' -crd s'
print(CMD)
info_log(CMD, loc)
os.system(CMD)
if len(gdata) > 3:
CMD = CMD + ' -crd g'
print(CMD)
info_log(CMD, loc)
os.system(CMD)
if len(tdata) > 3:
CMD = CMD + ' -crd t'
print(CMD)
info_log(CMD, loc)
os.system(CMD)
# remove temporary files
for fname in glob.glob('tmp*.txt'):
os.remove(fname)
try:
os.remove('intensities.txt')
except FileNotFoundError:
pass
if loc_file in filelist and loc_data:
#data, file_type = pmag.magic_read(loc_file) # read in location data
data = loc_data
print('-I- working on pole map')
poles = pmag.get_dictitem(
data, 'pole_lat', "", 'F') # are there any poles?
poles = pmag.get_dictitem(
poles, 'pole_lon', "", 'F') # are there any poles?
if len(poles) > 0: # YES!
CMD = 'polemap_magic.py -sav -fmt png -rev gv 40'
print(CMD)
info_log(CMD, "all locations", "polemap_magic.py")
os.system(CMD)
else:
print('-I- No poles found')
thumbnails.make_thumbnails(dir_path) | python | def main():
"""
NAME
make_magic_plots.py
DESCRIPTION
inspects magic directory for available data and makes plots
SYNTAX
make_magic_plots.py [command line options]
INPUT
magic files
OPTIONS
-h prints help message and quits
-f FILE specifies input file name
-fmt [png,eps,svg,jpg,pdf] specify format, default is png
"""
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
# reset log files
for fname in ['log.txt', 'errors.txt']:
f = os.path.join(os.getcwd(), fname)
if os.path.exists(f):
os.remove(f)
dirlist = ['./']
dir_path = os.getcwd()
#
if '-fmt' in sys.argv:
ind = sys.argv.index("-fmt")
fmt = sys.argv[ind + 1]
else:
fmt = 'png'
if '-f' in sys.argv:
ind = sys.argv.index("-f")
filelist = [sys.argv[ind + 1]]
else:
filelist = os.listdir(dir_path)
## initialize some variables
samp_file = 'samples.txt'
azimuth_key = 'azimuth'
meas_file = 'measurements.txt'
loc_key = 'location'
loc_file = 'locations.txt'
method_key = 'method_codes'
dec_key = 'dir_dec'
inc_key = 'dir_inc'
tilt_corr_key = "dir_tilt_correction"
aniso_tilt_corr_key = "aniso_tilt_correction"
hyst_bcr_key = "hyst_bcr"
hyst_mr_key = "hyst_mr_moment"
hyst_ms_key = "hyst_ms_moment"
hyst_bc_key = "hyst_bc"
Mkeys = ['magnitude', 'magn_moment', 'magn_volume', 'magn_mass']
results_file = 'sites.txt'
hyst_file = 'specimens.txt'
aniso_file = 'specimens.txt'
# create contribution and propagate data throughout
con = cb.Contribution()
con.propagate_location_to_measurements()
con.propagate_location_to_specimens()
con.propagate_location_to_samples()
if not con.tables:
print('-E- No MagIC tables could be found in this directory')
error_log("No MagIC tables found")
return
# try to get the contribution id for error logging
con_id = ""
if 'contribution' in con.tables:
if 'id' in con.tables['contribution'].df.columns:
con_id = con.tables['contribution'].df.iloc[0]['id']
# check to see if propagation worked, otherwise you can't plot by location
lowest_table = None
for table in con.ancestry:
if table in con.tables:
lowest_table = table
break
do_full_directory = False
# check that locations propagated down to the lowest table in the contribution
if 'location' in con.tables[lowest_table].df.columns:
if 'locations' not in con.tables:
info_log('location names propagated to {}, but could not be validated'.format(lowest_table))
# are there any locations in the lowest table?
elif not all(con.tables[lowest_table].df['location'].isnull()):
locs = con.tables['locations'].df.index.unique()
lowest_locs = con.tables[lowest_table].df['location'].unique()
incorrect_locs = set(lowest_locs).difference(set(locs))
# are they actual locations?
if not incorrect_locs:
info_log('location names propagated to {}'.format(lowest_table))
else:
do_full_directory = True
error_log('location names did not propagate fully to {} table (looks like there are some naming inconsistencies between tables)'.format(lowest_table), con_id=con_id)
else:
do_full_directory = True
error_log('could not propagate location names down to {} table'.format(lowest_table), con_id=con_id)
else:
do_full_directory = True
error_log('could not propagate location names down to {} table'.format(lowest_table), con_id=con_id)
all_data = {}
all_data['measurements'] = con.tables.get('measurements', None)
all_data['specimens'] = con.tables.get('specimens', None)
all_data['samples'] = con.tables.get('samples', None)
all_data['sites'] = con.tables.get('sites', None)
all_data['locations'] = con.tables.get('locations', None)
if 'locations' in con.tables:
locations = con.tables['locations'].df.index.unique()
else:
locations = ['']
dirlist = [loc for loc in locations if cb.not_null(loc, False) and loc != 'nan']
if not dirlist:
dirlist = ["./"]
if do_full_directory:
dirlist = ["./"]
# plot the whole contribution as one location
if dirlist == ["./"]:
error_log('plotting the entire contribution as one location', con_id=con_id)
for fname in os.listdir("."):
if fname.endswith(".txt"):
shutil.copy(fname, "tmp_" + fname)
# if possible, go through all data by location
# use tmp_*.txt files to separate out by location
for loc in dirlist:
print('\nworking on: ', loc)
def get_data(dtype, loc_name):
"""
Extract data of type dtype for location loc_name.
Write tmp_dtype.txt files if possible.
"""
if cb.not_null(all_data[dtype], False):
data_container = all_data[dtype]
if loc_name == "./":
data_df = data_container.df
else:
# awkward workaround for chars like "(" and "?" that break in regex
try:
data_df = data_container.df[data_container.df['location'].astype(str).str.contains(loc_name, na=False)]
except: #sre_constants.error:
data_df = data_container.df[data_container.df['location'] == loc_name]
data = data_container.convert_to_pmag_data_list(df=data_df)
res = data_container.write_magic_file('tmp_{}.txt'.format(dtype), df=data_df)
if not res:
return []
return data
meas_data = get_data('measurements', loc)
spec_data = get_data('specimens', loc)
samp_data = get_data('samples', loc)
site_data = get_data('sites', loc)
loc_data = get_data('locations', loc)
if loc == "./": # if you can't sort by location, do everything together
try:
meas_data = con.tables['measurements'].convert_to_pmag_data_list()
except KeyError:
meas_data = None
try:
spec_data = con.tables['specimens'].convert_to_pmag_data_list()
except KeyError:
spec_data = None
try:
samp_data = con.tables['samples'].convert_to_pmag_data_list()
except KeyError:
samp_data = None
try:
site_data = con.tables['sites'].convert_to_pmag_data_list()
except KeyError:
site_data = None
crd = 's'
if samp_file in filelist and samp_data: # find coordinate systems
samps = samp_data
file_type = "samples"
# get all non blank sample orientations
Srecs = pmag.get_dictitem(samps, azimuth_key, '', 'F')
if len(Srecs) > 0:
crd = 'g'
print('using geographic coordinates')
else:
print('using specimen coordinates')
else:
if VERBOSE:
print('-I- No sample data found')
if meas_file in filelist and meas_data: # start with measurement data
print('working on measurements data')
data = meas_data
file_type = 'measurements'
# looking for zeq_magic possibilities
# get all non blank method codes
AFZrecs = pmag.get_dictitem(data, method_key, 'LT-AF-Z', 'has')
# get all non blank method codes
TZrecs = pmag.get_dictitem(data, method_key, 'LT-T-Z', 'has')
# get all non blank method codes
MZrecs = pmag.get_dictitem(data, method_key, 'LT-M-Z', 'has')
# get all dec measurements
Drecs = pmag.get_dictitem(data, dec_key, '', 'F')
# get all inc measurements
Irecs = pmag.get_dictitem(data, inc_key, '', 'F')
for key in Mkeys:
Mrecs = pmag.get_dictitem(
data, key, '', 'F') # get intensity data
if len(Mrecs) > 0:
break
# potential for stepwise demag curves
if len(AFZrecs) > 0 or len(TZrecs) > 0 or len(MZrecs) > 0 and len(Drecs) > 0 and len(Irecs) > 0 and len(Mrecs) > 0:
CMD = 'zeq_magic.py -f tmp_measurements.txt -fsp tmp_specimens.txt -fsa tmp_samples.txt -fsi tmp_sites.txt -sav -fmt ' + fmt + ' -crd ' + crd + " -new"
print(CMD)
info_log(CMD, loc)
os.system(CMD)
# looking for thellier_magic possibilities
if len(pmag.get_dictitem(data, method_key, 'LP-PI-TRM', 'has')) > 0:
CMD = 'thellier_magic.py -f tmp_measurements.txt -fsp tmp_specimens.txt -sav -fmt ' + fmt
print(CMD)
info_log(CMD, loc)
os.system(CMD)
# looking for hysteresis possibilities
if len(pmag.get_dictitem(data, method_key, 'LP-HYS', 'has')) > 0: # find hyst experiments
# check for reqd columns
missing = check_for_reqd_cols(data, ['treat_temp'])
if missing:
error_log('LP-HYS method code present, but required column(s) [{}] missing'.format(", ".join(missing)), loc, "quick_hyst.py", con_id=con_id)
else:
CMD = 'quick_hyst.py -f tmp_measurements.txt -sav -fmt ' + fmt
print(CMD)
info_log(CMD, loc)
os.system(CMD)
# equal area plots of directional data
# at measurment level (by specimen)
if data:
missing = check_for_reqd_cols(data, ['dir_dec', 'dir_inc'])
if not missing:
CMD = "eqarea_magic.py -f tmp_measurements.txt -obj spc -sav -no-tilt -fmt " + fmt
print(CMD)
os.system(CMD)
info_log(CMD, loc, "eqarea_magic.py")
else:
if VERBOSE:
print('-I- No measurement data found')
# site data
if results_file in filelist and site_data:
print('-I- result file found', results_file)
data = site_data
file_type = 'sites'
print('-I- working on site directions')
print('number of datapoints: ', len(data), loc)
dec_key = 'dir_dec'
inc_key = 'dir_inc'
int_key = 'int_abs'
SiteDIs = pmag.get_dictitem(data, dec_key, "", 'F') # find decs
SiteDIs = pmag.get_dictitem(
SiteDIs, inc_key, "", 'F') # find decs and incs
dir_data_found = len(SiteDIs)
print('{} Dec/inc pairs found'.format(dir_data_found))
if SiteDIs:
# then convert tilt_corr_key to correct format
old_SiteDIs = SiteDIs
SiteDIs = []
for rec in old_SiteDIs:
if tilt_corr_key not in rec:
rec[tilt_corr_key] = "0"
# make sure tilt_corr_key is a correct format
try:
rec[tilt_corr_key] = str(int(float(rec[tilt_corr_key])))
except ValueError:
rec[tilt_corr_key] = "0"
SiteDIs.append(rec)
print('number of individual directions: ', len(SiteDIs))
# tilt corrected coordinates
SiteDIs_t = pmag.get_dictitem(SiteDIs, tilt_corr_key, '100',
'T', float_to_int=True)
print('number of tilt corrected directions: ', len(SiteDIs_t))
SiteDIs_g = pmag.get_dictitem(
SiteDIs, tilt_corr_key, '0', 'T', float_to_int=True) # geographic coordinates
print('number of geographic directions: ', len(SiteDIs_g))
SiteDIs_s = pmag.get_dictitem(
SiteDIs, tilt_corr_key, '-1', 'T', float_to_int=True) # sample coordinates
print('number of sample directions: ', len(SiteDIs_s))
SiteDIs_x = pmag.get_dictitem(
SiteDIs, tilt_corr_key, '', 'T') # no coordinates
print('number of no coordinates directions: ', len(SiteDIs_x))
if len(SiteDIs_t) > 0 or len(SiteDIs_g) > 0 or len(SiteDIs_s) > 0 or len(SiteDIs_x) > 0:
CRD = ""
if len(SiteDIs_t) > 0:
CRD = ' -crd t'
elif len(SiteDIs_g) > 0:
CRD = ' -crd g'
elif len(SiteDIs_s) > 0:
CRD = ' -crd s'
CMD = 'eqarea_magic.py -f tmp_sites.txt -fsp tmp_specimens.txt -fsa tmp_samples.txt -flo tmp_locations.txt -sav -fmt ' + fmt + CRD
print(CMD)
info_log(CMD, loc)
os.system(CMD)
else:
if dir_data_found:
error_log('{} dec/inc pairs found, but no equal area plots were made'.format(dir_data_found), loc, "equarea_magic.py", con_id=con_id)
#
print('-I- working on VGP map')
VGPs = pmag.get_dictitem(
SiteDIs, 'vgp_lat', "", 'F') # are there any VGPs?
if len(VGPs) > 0: # YES!
CMD = 'vgpmap_magic.py -f tmp_sites.txt -prj moll -res c -sym ro 5 -sav -fmt png'
print(CMD)
info_log(CMD, loc, 'vgpmap_magic.py')
os.system(CMD)
else:
print('-I- No vgps found')
print('-I- Look for intensities')
# is there any intensity data?
if site_data:
if int_key in site_data[0].keys():
# old way, wasn't working right:
#CMD = 'magic_select.py -key ' + int_key + ' 0. has -F tmp1.txt -f tmp_sites.txt'
Selection = pmag.get_dictkey(site_data, int_key, dtype="f")
with open('intensities.txt', 'w') as out:
for rec in Selection:
if rec != 0:
out.write(str(rec * 1e6) + "\n")
loc = loc.replace(" ", "_")
if loc == "./":
loc_name = ""
else:
loc_name = loc
histfile = 'LO:_' + loc_name + \
'_TY:_intensities_histogram:_.' + fmt
# maybe run histplot.main here instead, so you can return an error message
CMD = "histplot.py -twin -b 1 -xlab 'Intensity (uT)' -sav -f intensities.txt -F " + histfile
os.system(CMD)
info_log(CMD, loc)
print(CMD)
else:
print('-I- No intensities found')
else:
print('-I- No intensities found')
##
if hyst_file in filelist and spec_data:
print('working on hysteresis', hyst_file)
data = spec_data
file_type = 'specimens'
hdata = pmag.get_dictitem(data, hyst_bcr_key, '', 'F')
hdata = pmag.get_dictitem(hdata, hyst_mr_key, '', 'F')
hdata = pmag.get_dictitem(hdata, hyst_ms_key, '', 'F')
# there are data for a dayplot
hdata = pmag.get_dictitem(hdata, hyst_bc_key, '', 'F')
if len(hdata) > 0:
CMD = 'dayplot_magic.py -f tmp_specimens.txt -sav -fmt ' + fmt
info_log(CMD, loc)
print(CMD)
else:
print('no hysteresis data found')
if aniso_file in filelist and spec_data: # do anisotropy plots if possible
print('working on anisotropy', aniso_file)
data = spec_data
file_type = 'specimens'
# make sure there is some anisotropy data
if not data:
print('No anisotropy data found')
elif 'aniso_s' not in data[0]:
print('No anisotropy data found')
else:
# get specimen coordinates
if aniso_tilt_corr_key not in data[0]:
sdata = data
else:
sdata = pmag.get_dictitem(
data, aniso_tilt_corr_key, '-1', 'T', float_to_int=True)
# get specimen coordinates
gdata = pmag.get_dictitem(
data, aniso_tilt_corr_key, '0', 'T', float_to_int=True)
# get specimen coordinates
tdata = pmag.get_dictitem(
data, aniso_tilt_corr_key, '100', 'T', float_to_int=True)
CRD = ""
CMD = 'aniso_magic.py -x -B -sav -fmt ' + fmt + " -new"
if len(sdata) > 3:
CMD = CMD + ' -crd s'
print(CMD)
info_log(CMD, loc)
os.system(CMD)
if len(gdata) > 3:
CMD = CMD + ' -crd g'
print(CMD)
info_log(CMD, loc)
os.system(CMD)
if len(tdata) > 3:
CMD = CMD + ' -crd t'
print(CMD)
info_log(CMD, loc)
os.system(CMD)
# remove temporary files
for fname in glob.glob('tmp*.txt'):
os.remove(fname)
try:
os.remove('intensities.txt')
except FileNotFoundError:
pass
if loc_file in filelist and loc_data:
#data, file_type = pmag.magic_read(loc_file) # read in location data
data = loc_data
print('-I- working on pole map')
poles = pmag.get_dictitem(
data, 'pole_lat', "", 'F') # are there any poles?
poles = pmag.get_dictitem(
poles, 'pole_lon', "", 'F') # are there any poles?
if len(poles) > 0: # YES!
CMD = 'polemap_magic.py -sav -fmt png -rev gv 40'
print(CMD)
info_log(CMD, "all locations", "polemap_magic.py")
os.system(CMD)
else:
print('-I- No poles found')
thumbnails.make_thumbnails(dir_path) | NAME
make_magic_plots.py
DESCRIPTION
inspects magic directory for available data and makes plots
SYNTAX
make_magic_plots.py [command line options]
INPUT
magic files
OPTIONS
-h prints help message and quits
-f FILE specifies input file name
-fmt [png,eps,svg,jpg,pdf] specify format, default is png | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/make_magic_plots.py#L37-L464 |
PmagPy/PmagPy | programs/core_depthplot.py | main | def main():
"""
NAME
core_depthplot.py
DESCRIPTION
plots various measurements versus core_depth or age. plots data flagged as 'FS-SS-C' as discrete samples.
SYNTAX
core_depthplot.py [command line options]
# or, for Anaconda users:
core_depthplot_anaconda [command line options]
OPTIONS
-h prints help message and quits
-f FILE: specify input measurments format file
-fsum FILE: specify input LIMS database (IODP) core summary csv file
-fwig FILE: specify input depth,wiggle to plot, in magic format with sample_core_depth key for depth
-fsa FILE: specify input er_samples format file from magic for depth
-fa FILE: specify input ages format file from magic for age
NB: must have either -fsa OR -fa (not both)
-fsp FILE sym size: specify input zeq_specimen format file from magic, sym and size
NB: PCAs will have specified color, while fisher means will be white with specified color as the edgecolor
-fres FILE specify input pmag_results file from magic, sym and size
-LP [AF,T,ARM,IRM, X] step [in mT,C,mT,mT, mass/vol] to plot
-S do not plot blanket treatment data (if this is set, you don't need the -LP)
-sym SYM SIZE, symbol, size for continuous points (e.g., ro 5, bs 10, g^ 10 for red dot, blue square, green triangle), default is blue dot at 5 pt
-D do not plot declination
-M do not plot magnetization
-log plot magnetization on a log scale
-L do not connect dots with a line
-I do not plot inclination
-d min max [in m] depth range to plot
-n normalize by weight in er_specimen table
-Iex: plot the expected inc at lat - only available for results with lat info in file
-ts TS amin amax: plot the GPTS for the time interval between amin and amax (numbers in Ma)
TS: [ck95, gts04, gts12]
-ds [mbsf,mcd] specify depth scale, mbsf default
-fmt [svg, eps, pdf, png] specify output format for plot (default: svg)
-sav save plot silently
DEFAULTS:
Measurements file: measurements.txt
Samples file: samples.txt
NRM step
Summary file: none
"""
args = sys.argv
if '-h' in args:
print(main.__doc__)
sys.exit()
dataframe = extractor.command_line_dataframe([ ['f', False, 'measurements.txt'], ['fsum', False, ''],
['fwig', False, ''], ['fsa', False, ''],
['fa', False, ''], ['fsp', False, ''],
['fres', False, '' ], ['fmt', False, 'svg'],
['LP', False, ''], ['n', False, False],
['d', False, '-1 -1'], ['ts', False, ''],
['WD', False, '.'], ['L', False, True],
['S', False, True], ['D', False, True],
['I', False, True], ['M', False, True],
['log', False, 0],
['ds', False, 'sample_core_depth'],
['sym', False, 'bo 5'], ['ID', False, '.'],
['sav', False, False], ['DM', False, 3]])
checked_args = extractor.extract_and_check_args(args, dataframe)
meas_file, sum_file, wig_file, samp_file, age_file, spc_file, res_file, fmt, meth, norm, depth, timescale, dir_path, pltLine, pltSus, pltDec, pltInc, pltMag, logit, depth_scale, symbol, input_dir, save, data_model_num = extractor.get_vars(
['f', 'fsum', 'fwig', 'fsa', 'fa', 'fsp', 'fres', 'fmt', 'LP', 'n', 'd', 'ts', 'WD', 'L', 'S', 'D', 'I', 'M', 'log', 'ds', 'sym', 'ID', 'sav', 'DM'], checked_args)
# format some variables
# format symbol/size
try:
sym, size = symbol.split()
size = int(size)
except:
print('you should provide -sym in this format: ro 5')
print('using defaults instead')
sym, size = 'ro', 5
# format result file, symbol, size
if res_file:
try:
res_file, res_sym, res_size = res_file.split()
except:
print('you must provide -fres in this format: -fres filename symbol size')
print(
'could not parse {}, defaulting to using no result file'.format(res_file))
res_file, res_sym, res_size = '', '', 0
else:
res_file, res_sym, res_size = '', '', 0
# format specimen file, symbol, size
if spc_file:
try:
spc_file, spc_sym, spc_size = spc_file.split()
except:
print('you must provide -fsp in this format: -fsp filename symbol size')
print(
'could not parse {}, defaulting to using no specimen file'.format(spc_file))
spc_file, spc_sym, spc_size = '', '', 0
else:
spc_file, spc_sym, spc_size = '', '', 0
# format min/max depth
try:
dmin, dmax = depth.split()
except:
print('you must provide -d in this format: -d dmin dmax')
print('could not parse {}, defaulting to plotting all depths'.format(depth))
dmin, dmax = -1, -1
# format timescale, min/max time
if timescale:
try:
timescale, amin, amax = timescale.split()
pltTime = True
except:
print(
'you must provide -ts in this format: -ts timescale minimum_age maximum_age')
print(
'could not parse {}, defaulting to using no timescale'.format(timescale))
timescale, amin, amax = None, -1, -1
pltTime = False
else:
timescale, amin, amax = None, -1, -1
pltTime = False
# format norm and wt_file
if norm and not isinstance(norm, bool):
wt_file = norm
norm = True
else:
norm = False
wt_file = ''
# format list of protcols and step
try:
method, step = meth.split()
except:
print(
'To use the -LP flag you must provide both the protocol and the step in this format:\n-LP [AF,T,ARM,IRM, X] step [in mT,C,mT,mT, mass/vol] to plot')
print('Defaulting to using no protocol')
method, step = 'LT-NO', 0
# list of varnames
#['f', 'fsum', 'fwig', 'fsa', 'fa', 'fsp', 'fres', 'fmt', 'LP', 'n', 'd', 'ts', 'WD', 'L', 'S', 'D', 'I', 'M', 'log', 'ds', 'sym' ]
#meas_file, sum_file, wig_file, samp_file, age_file, spc_file, res_file, fmt, meth, norm, depth, timescale, dir_path, pltLine, pltSus, pltDec, pltInc, pltMag, logit, depth_scale, symbol
fig, figname = ipmag.core_depthplot(input_dir, meas_file, spc_file, samp_file, age_file, sum_file, wt_file, depth_scale, dmin, dmax, sym, size,
spc_sym, spc_size, method, step, fmt, pltDec, pltInc, pltMag, pltLine, pltSus, logit, pltTime, timescale, amin, amax, norm, data_model_num)
if not pmagplotlib.isServer:
figname = figname.replace(':', '_')
if fig and save:
print('-I- Created plot: {}'.format(figname))
plt.savefig(figname)
return
app = wx.App(redirect=False)
if not fig:
pw.simple_warning(
'No plot was able to be created with the data you provided.\nMake sure you have given all the required information and try again')
return False
dpi = fig.get_dpi()
pixel_width = dpi * fig.get_figwidth()
pixel_height = dpi * fig.get_figheight()
figname = os.path.join(dir_path, figname)
plot_frame = pmag_menu_dialogs.PlotFrame((int(pixel_width), int(pixel_height + 50)),
fig, figname, standalone=True)
app.MainLoop() | python | def main():
"""
NAME
core_depthplot.py
DESCRIPTION
plots various measurements versus core_depth or age. plots data flagged as 'FS-SS-C' as discrete samples.
SYNTAX
core_depthplot.py [command line options]
# or, for Anaconda users:
core_depthplot_anaconda [command line options]
OPTIONS
-h prints help message and quits
-f FILE: specify input measurments format file
-fsum FILE: specify input LIMS database (IODP) core summary csv file
-fwig FILE: specify input depth,wiggle to plot, in magic format with sample_core_depth key for depth
-fsa FILE: specify input er_samples format file from magic for depth
-fa FILE: specify input ages format file from magic for age
NB: must have either -fsa OR -fa (not both)
-fsp FILE sym size: specify input zeq_specimen format file from magic, sym and size
NB: PCAs will have specified color, while fisher means will be white with specified color as the edgecolor
-fres FILE specify input pmag_results file from magic, sym and size
-LP [AF,T,ARM,IRM, X] step [in mT,C,mT,mT, mass/vol] to plot
-S do not plot blanket treatment data (if this is set, you don't need the -LP)
-sym SYM SIZE, symbol, size for continuous points (e.g., ro 5, bs 10, g^ 10 for red dot, blue square, green triangle), default is blue dot at 5 pt
-D do not plot declination
-M do not plot magnetization
-log plot magnetization on a log scale
-L do not connect dots with a line
-I do not plot inclination
-d min max [in m] depth range to plot
-n normalize by weight in er_specimen table
-Iex: plot the expected inc at lat - only available for results with lat info in file
-ts TS amin amax: plot the GPTS for the time interval between amin and amax (numbers in Ma)
TS: [ck95, gts04, gts12]
-ds [mbsf,mcd] specify depth scale, mbsf default
-fmt [svg, eps, pdf, png] specify output format for plot (default: svg)
-sav save plot silently
DEFAULTS:
Measurements file: measurements.txt
Samples file: samples.txt
NRM step
Summary file: none
"""
args = sys.argv
if '-h' in args:
print(main.__doc__)
sys.exit()
dataframe = extractor.command_line_dataframe([ ['f', False, 'measurements.txt'], ['fsum', False, ''],
['fwig', False, ''], ['fsa', False, ''],
['fa', False, ''], ['fsp', False, ''],
['fres', False, '' ], ['fmt', False, 'svg'],
['LP', False, ''], ['n', False, False],
['d', False, '-1 -1'], ['ts', False, ''],
['WD', False, '.'], ['L', False, True],
['S', False, True], ['D', False, True],
['I', False, True], ['M', False, True],
['log', False, 0],
['ds', False, 'sample_core_depth'],
['sym', False, 'bo 5'], ['ID', False, '.'],
['sav', False, False], ['DM', False, 3]])
checked_args = extractor.extract_and_check_args(args, dataframe)
meas_file, sum_file, wig_file, samp_file, age_file, spc_file, res_file, fmt, meth, norm, depth, timescale, dir_path, pltLine, pltSus, pltDec, pltInc, pltMag, logit, depth_scale, symbol, input_dir, save, data_model_num = extractor.get_vars(
['f', 'fsum', 'fwig', 'fsa', 'fa', 'fsp', 'fres', 'fmt', 'LP', 'n', 'd', 'ts', 'WD', 'L', 'S', 'D', 'I', 'M', 'log', 'ds', 'sym', 'ID', 'sav', 'DM'], checked_args)
# format some variables
# format symbol/size
try:
sym, size = symbol.split()
size = int(size)
except:
print('you should provide -sym in this format: ro 5')
print('using defaults instead')
sym, size = 'ro', 5
# format result file, symbol, size
if res_file:
try:
res_file, res_sym, res_size = res_file.split()
except:
print('you must provide -fres in this format: -fres filename symbol size')
print(
'could not parse {}, defaulting to using no result file'.format(res_file))
res_file, res_sym, res_size = '', '', 0
else:
res_file, res_sym, res_size = '', '', 0
# format specimen file, symbol, size
if spc_file:
try:
spc_file, spc_sym, spc_size = spc_file.split()
except:
print('you must provide -fsp in this format: -fsp filename symbol size')
print(
'could not parse {}, defaulting to using no specimen file'.format(spc_file))
spc_file, spc_sym, spc_size = '', '', 0
else:
spc_file, spc_sym, spc_size = '', '', 0
# format min/max depth
try:
dmin, dmax = depth.split()
except:
print('you must provide -d in this format: -d dmin dmax')
print('could not parse {}, defaulting to plotting all depths'.format(depth))
dmin, dmax = -1, -1
# format timescale, min/max time
if timescale:
try:
timescale, amin, amax = timescale.split()
pltTime = True
except:
print(
'you must provide -ts in this format: -ts timescale minimum_age maximum_age')
print(
'could not parse {}, defaulting to using no timescale'.format(timescale))
timescale, amin, amax = None, -1, -1
pltTime = False
else:
timescale, amin, amax = None, -1, -1
pltTime = False
# format norm and wt_file
if norm and not isinstance(norm, bool):
wt_file = norm
norm = True
else:
norm = False
wt_file = ''
# format list of protcols and step
try:
method, step = meth.split()
except:
print(
'To use the -LP flag you must provide both the protocol and the step in this format:\n-LP [AF,T,ARM,IRM, X] step [in mT,C,mT,mT, mass/vol] to plot')
print('Defaulting to using no protocol')
method, step = 'LT-NO', 0
# list of varnames
#['f', 'fsum', 'fwig', 'fsa', 'fa', 'fsp', 'fres', 'fmt', 'LP', 'n', 'd', 'ts', 'WD', 'L', 'S', 'D', 'I', 'M', 'log', 'ds', 'sym' ]
#meas_file, sum_file, wig_file, samp_file, age_file, spc_file, res_file, fmt, meth, norm, depth, timescale, dir_path, pltLine, pltSus, pltDec, pltInc, pltMag, logit, depth_scale, symbol
fig, figname = ipmag.core_depthplot(input_dir, meas_file, spc_file, samp_file, age_file, sum_file, wt_file, depth_scale, dmin, dmax, sym, size,
spc_sym, spc_size, method, step, fmt, pltDec, pltInc, pltMag, pltLine, pltSus, logit, pltTime, timescale, amin, amax, norm, data_model_num)
if not pmagplotlib.isServer:
figname = figname.replace(':', '_')
if fig and save:
print('-I- Created plot: {}'.format(figname))
plt.savefig(figname)
return
app = wx.App(redirect=False)
if not fig:
pw.simple_warning(
'No plot was able to be created with the data you provided.\nMake sure you have given all the required information and try again')
return False
dpi = fig.get_dpi()
pixel_width = dpi * fig.get_figwidth()
pixel_height = dpi * fig.get_figheight()
figname = os.path.join(dir_path, figname)
plot_frame = pmag_menu_dialogs.PlotFrame((int(pixel_width), int(pixel_height + 50)),
fig, figname, standalone=True)
app.MainLoop() | NAME
core_depthplot.py
DESCRIPTION
plots various measurements versus core_depth or age. plots data flagged as 'FS-SS-C' as discrete samples.
SYNTAX
core_depthplot.py [command line options]
# or, for Anaconda users:
core_depthplot_anaconda [command line options]
OPTIONS
-h prints help message and quits
-f FILE: specify input measurments format file
-fsum FILE: specify input LIMS database (IODP) core summary csv file
-fwig FILE: specify input depth,wiggle to plot, in magic format with sample_core_depth key for depth
-fsa FILE: specify input er_samples format file from magic for depth
-fa FILE: specify input ages format file from magic for age
NB: must have either -fsa OR -fa (not both)
-fsp FILE sym size: specify input zeq_specimen format file from magic, sym and size
NB: PCAs will have specified color, while fisher means will be white with specified color as the edgecolor
-fres FILE specify input pmag_results file from magic, sym and size
-LP [AF,T,ARM,IRM, X] step [in mT,C,mT,mT, mass/vol] to plot
-S do not plot blanket treatment data (if this is set, you don't need the -LP)
-sym SYM SIZE, symbol, size for continuous points (e.g., ro 5, bs 10, g^ 10 for red dot, blue square, green triangle), default is blue dot at 5 pt
-D do not plot declination
-M do not plot magnetization
-log plot magnetization on a log scale
-L do not connect dots with a line
-I do not plot inclination
-d min max [in m] depth range to plot
-n normalize by weight in er_specimen table
-Iex: plot the expected inc at lat - only available for results with lat info in file
-ts TS amin amax: plot the GPTS for the time interval between amin and amax (numbers in Ma)
TS: [ck95, gts04, gts12]
-ds [mbsf,mcd] specify depth scale, mbsf default
-fmt [svg, eps, pdf, png] specify output format for plot (default: svg)
-sav save plot silently
DEFAULTS:
Measurements file: measurements.txt
Samples file: samples.txt
NRM step
Summary file: none | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/core_depthplot.py#L19-L193 |
PmagPy/PmagPy | programs/pca.py | main | def main():
"""
NAME
pca.py
DESCRIPTION
calculates best-fit line/plane through demagnetization data
INPUT FORMAT
takes specimen_name treatment intensity declination inclination in space delimited file
SYNTAX
pca.py [command line options][< filename]
OPTIONS
-h prints help and quits
-f FILE
-dir [L,P,F][BEG][END] specify direction type, beginning and end
(L:line, P:plane or F:fisher mean of unit vectors)
BEG: first step (NRM = step zero)
END: last step (NRM = step zero)
< filename for reading from standard input
OUTPUT:
specimen_name calculation_type N beg end MAD dec inc
if calculation_type is 'p', dec and inc are pole to plane, otherwise, best-fit direction
EXAMPLE:
pca.py -dir L 1 5 <ex3.3
will calculate best-fit line through demagnetization steps 1 and 5 from file ex5.1
"""
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
f=open(file,'r')
data=f.readlines()
else:
data=sys.stdin.readlines() # read in data from standard input
if '-dir' in sys.argv: #
ind=sys.argv.index('-dir')
typ=sys.argv[ind+1]
if typ=='L': calculation_type='DE-BFL'
if typ=='P': calculation_type='DE-BFP'
if typ=='F': calculation_type='DE-FM'
beg_pca = int(sys.argv[ind+2])
end_pca = int(sys.argv[ind+3])
#
#
datablock= [] # set up list for data
s=""
ind=0
for line in data: # read in the data from standard input
rec=line.split() # split each line on space to get records
if s=="":
s=rec[0]
print(s, calculation_type)
print(ind,rec[1],rec[3],rec[4],rec[2])
ind+=1
datablock.append([float(rec[1]),float(rec[3]),float(rec[4]),float(rec[2]),'0']) # treatment,dec,inc,int,dummy
mpars=pmag.domean(datablock,beg_pca,end_pca,calculation_type)
if calculation_type=="DE-FM":
print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"],mpars["measurement_step_max"],mpars["specimen_a95"],mpars["specimen_dec"],mpars["specimen_inc"]))
else:
print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"],mpars["measurement_step_max"],mpars["specimen_mad"],mpars["specimen_dec"],mpars["specimen_inc"])) | python | def main():
"""
NAME
pca.py
DESCRIPTION
calculates best-fit line/plane through demagnetization data
INPUT FORMAT
takes specimen_name treatment intensity declination inclination in space delimited file
SYNTAX
pca.py [command line options][< filename]
OPTIONS
-h prints help and quits
-f FILE
-dir [L,P,F][BEG][END] specify direction type, beginning and end
(L:line, P:plane or F:fisher mean of unit vectors)
BEG: first step (NRM = step zero)
END: last step (NRM = step zero)
< filename for reading from standard input
OUTPUT:
specimen_name calculation_type N beg end MAD dec inc
if calculation_type is 'p', dec and inc are pole to plane, otherwise, best-fit direction
EXAMPLE:
pca.py -dir L 1 5 <ex3.3
will calculate best-fit line through demagnetization steps 1 and 5 from file ex5.1
"""
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
f=open(file,'r')
data=f.readlines()
else:
data=sys.stdin.readlines() # read in data from standard input
if '-dir' in sys.argv: #
ind=sys.argv.index('-dir')
typ=sys.argv[ind+1]
if typ=='L': calculation_type='DE-BFL'
if typ=='P': calculation_type='DE-BFP'
if typ=='F': calculation_type='DE-FM'
beg_pca = int(sys.argv[ind+2])
end_pca = int(sys.argv[ind+3])
#
#
datablock= [] # set up list for data
s=""
ind=0
for line in data: # read in the data from standard input
rec=line.split() # split each line on space to get records
if s=="":
s=rec[0]
print(s, calculation_type)
print(ind,rec[1],rec[3],rec[4],rec[2])
ind+=1
datablock.append([float(rec[1]),float(rec[3]),float(rec[4]),float(rec[2]),'0']) # treatment,dec,inc,int,dummy
mpars=pmag.domean(datablock,beg_pca,end_pca,calculation_type)
if calculation_type=="DE-FM":
print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"],mpars["measurement_step_max"],mpars["specimen_a95"],mpars["specimen_dec"],mpars["specimen_inc"]))
else:
print('%s %s %i %6.2f %6.2f %6.1f %7.1f %7.1f' % (s,calculation_type,mpars["specimen_n"],mpars["measurement_step_min"],mpars["measurement_step_max"],mpars["specimen_mad"],mpars["specimen_dec"],mpars["specimen_inc"])) | NAME
pca.py
DESCRIPTION
calculates best-fit line/plane through demagnetization data
INPUT FORMAT
takes specimen_name treatment intensity declination inclination in space delimited file
SYNTAX
pca.py [command line options][< filename]
OPTIONS
-h prints help and quits
-f FILE
-dir [L,P,F][BEG][END] specify direction type, beginning and end
(L:line, P:plane or F:fisher mean of unit vectors)
BEG: first step (NRM = step zero)
END: last step (NRM = step zero)
< filename for reading from standard input
OUTPUT:
specimen_name calculation_type N beg end MAD dec inc
if calculation_type is 'p', dec and inc are pole to plane, otherwise, best-fit direction
EXAMPLE:
pca.py -dir L 1 5 <ex3.3
will calculate best-fit line through demagnetization steps 1 and 5 from file ex5.1 | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/pca.py#L6-L72 |
PmagPy/PmagPy | programs/pt_rot.py | main | def main():
"""
NAME
pt_rot.py
DESCRIPTION
rotates pt according to specified age and plate
SYNTAX
pt_rot.py [command line options]
OPTIONS
-h prints help and quits
-f file with lon lat plate age Dplate as space delimited input
Dplate is the destination plate coordinates desires
- default is "fixed south africa"
Dplate should be one of: [nwaf, neaf,saf,aus, eur, ind, sam, ant, grn, nam]
-ff file Efile, file has lat lon data file and Efile has sequential rotation poles: Elat Elon Omega
-F OFILE, output sites (pmag_results) formatted file with rotated points stored in pole_lon, pole_lat (vgp_lon, vgp_lat). (data_model=2.5)
default is to print out rotated lon, lat to standard output
-dm [2.5,3] set data model for output. Default is 3
"""
dir_path='.'
PTS=[]
ResRecs=[]
ofile=""
data_model=3
Dplates=['nwaf', 'neaf','saf','aus', 'eur', 'ind', 'sam', 'ant', 'grn', 'nam']
if '-WD' in sys.argv:
ind = sys.argv.index('-WD')
dir_path=sys.argv[ind+1]
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-F' in sys.argv:
ind = sys.argv.index('-F')
ofile=dir_path+'/'+sys.argv[ind+1]
if '-dm' in sys.argv:
ind = sys.argv.index('-dm')
data_model=dir_path+'/'+sys.argv[ind+1]
if '-f' in sys.argv:
ind = sys.argv.index('-f')
file=dir_path+'/'+sys.argv[ind+1]
f=open(file,'r')
data=f.readlines()
elif '-ff' in sys.argv:
ind = sys.argv.index('-ff')
file=dir_path+'/'+sys.argv[ind+1]
f=open(file,'r')
data=f.readlines()
Efile=dir_path+'/'+sys.argv[ind+2]
f=open(Efile,'r')
edata=f.readlines()
Poles=[]
for p in edata:
rec=p.split()
pole=[float(rec[0]),float(rec[1]),float(rec[2])] # pole is lat/lon/omega
Poles.append(pole)
else:
data=sys.stdin.readlines()
polelatkey,polelonkey='pole_lat','pole_lon'
if data_model!=3:
polelatkey,polelonkey='vgp_lat','vgp_lon'
for line in data:
PtRec={}
rec=line.split()
PtRec['site_lon']=rec[0]
PtRec['site_lat']=rec[1]
if '-ff' in sys.argv:
pt_lat,pt_lon=float(rec[1]),float(rec[0])
for pole in Poles:
ptrot= pmag.pt_rot(pole,[pt_lat],[pt_lon])
pt_lat=ptrot[0][0]
pt_lon=ptrot[1][0]
if ofile=="":
print(ptrot[1][0], ptrot[0][0])
else:
ResRec={polelonkey: '%7.1f'%(ptrot[0][0]),polelatkey:'%7.1f'%( ptrot[1][0])}
ResRecs.append(ResRec)
else:
PtRec['cont']=rec[2]
if PtRec['cont']=='af':PtRec['cont']='saf' # use fixed south africa
PtRec['age']=rec[3]
if len(rec)>4:
PtRec['dcont']=rec[4]
PTS.append(PtRec)
if '-ff' not in sys.argv:
for pt in PTS:
pole='not specified'
pt_lat=float(pt['site_lat'])
pt_lon=float(pt['site_lon'])
age=float(pt['age'])
ptrot=[[pt_lat],[pt_lon]]
if pt['cont']=='ib':
pole=frp.get_pole(pt['cont'],age)
ptrot= pmag.pt_rot(pole,[pt_lat],[pt_lon])
pt_lat=ptrot[0][0]
pt_lon=ptrot[1][0]
pt['cont']='eur'
if pt['cont']!='saf':
pole1=frp.get_pole(pt['cont'],age)
ptrot= pmag.pt_rot(pole1,[pt_lat],[pt_lon])
if 'dcont' in list(pt.keys()):
pt_lat=ptrot[0][0]
pt_lon=ptrot[1][0]
pole=frp.get_pole(pt['dcont'],age)
pole[2]=-pole[2]
ptrot= pmag.pt_rot(pole,[pt_lat],[pt_lon])
if ofile=="":
print(ptrot[1][0], ptrot[0][0])
else:
ResRec={polelonkey: '%7.1f'%(ptrot[0][0]),polelatkey:'%7.1f'%( ptrot[1][0])}
ResRecs.append(ResRec)
else:
if 'dcont' in list(pt.keys()):
pole=frp.get_pole(pt['dcont'],age)
pole[2]=-pole[2]
ptrot= pmag.pt_rot(pole,[pt_lat],[pt_lon])
print(ptrot)
if ofile=="":
print(ptrot[1][0], ptrot[0][0])
else:
ResRec={polelonkey: '%7.1f'%(ptrot[0][0]),polelatkey:'%7.1f'%( ptrot[1][0])}
ResRecs.append(ResRec)
else:
if ofile=="":
print(ptrot[1][0], ptrot[0][0])
else:
ResRec={polelonkey: '%7.1f'%(ptrot[0][0]),polelatkey:'%7.1f'%( ptrot[1][0])}
ResRecs.append(ResRec)
if len(ResRecs)>0:
if data_model==3:
pmag.magic_write(ofile,ResRecs,'locations')
else:
pmag.magic_write(ofile,ResRecs,'pmag_results') | python | def main():
"""
NAME
pt_rot.py
DESCRIPTION
rotates pt according to specified age and plate
SYNTAX
pt_rot.py [command line options]
OPTIONS
-h prints help and quits
-f file with lon lat plate age Dplate as space delimited input
Dplate is the destination plate coordinates desires
- default is "fixed south africa"
Dplate should be one of: [nwaf, neaf,saf,aus, eur, ind, sam, ant, grn, nam]
-ff file Efile, file has lat lon data file and Efile has sequential rotation poles: Elat Elon Omega
-F OFILE, output sites (pmag_results) formatted file with rotated points stored in pole_lon, pole_lat (vgp_lon, vgp_lat). (data_model=2.5)
default is to print out rotated lon, lat to standard output
-dm [2.5,3] set data model for output. Default is 3
"""
dir_path='.'
PTS=[]
ResRecs=[]
ofile=""
data_model=3
Dplates=['nwaf', 'neaf','saf','aus', 'eur', 'ind', 'sam', 'ant', 'grn', 'nam']
if '-WD' in sys.argv:
ind = sys.argv.index('-WD')
dir_path=sys.argv[ind+1]
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-F' in sys.argv:
ind = sys.argv.index('-F')
ofile=dir_path+'/'+sys.argv[ind+1]
if '-dm' in sys.argv:
ind = sys.argv.index('-dm')
data_model=dir_path+'/'+sys.argv[ind+1]
if '-f' in sys.argv:
ind = sys.argv.index('-f')
file=dir_path+'/'+sys.argv[ind+1]
f=open(file,'r')
data=f.readlines()
elif '-ff' in sys.argv:
ind = sys.argv.index('-ff')
file=dir_path+'/'+sys.argv[ind+1]
f=open(file,'r')
data=f.readlines()
Efile=dir_path+'/'+sys.argv[ind+2]
f=open(Efile,'r')
edata=f.readlines()
Poles=[]
for p in edata:
rec=p.split()
pole=[float(rec[0]),float(rec[1]),float(rec[2])] # pole is lat/lon/omega
Poles.append(pole)
else:
data=sys.stdin.readlines()
polelatkey,polelonkey='pole_lat','pole_lon'
if data_model!=3:
polelatkey,polelonkey='vgp_lat','vgp_lon'
for line in data:
PtRec={}
rec=line.split()
PtRec['site_lon']=rec[0]
PtRec['site_lat']=rec[1]
if '-ff' in sys.argv:
pt_lat,pt_lon=float(rec[1]),float(rec[0])
for pole in Poles:
ptrot= pmag.pt_rot(pole,[pt_lat],[pt_lon])
pt_lat=ptrot[0][0]
pt_lon=ptrot[1][0]
if ofile=="":
print(ptrot[1][0], ptrot[0][0])
else:
ResRec={polelonkey: '%7.1f'%(ptrot[0][0]),polelatkey:'%7.1f'%( ptrot[1][0])}
ResRecs.append(ResRec)
else:
PtRec['cont']=rec[2]
if PtRec['cont']=='af':PtRec['cont']='saf' # use fixed south africa
PtRec['age']=rec[3]
if len(rec)>4:
PtRec['dcont']=rec[4]
PTS.append(PtRec)
if '-ff' not in sys.argv:
for pt in PTS:
pole='not specified'
pt_lat=float(pt['site_lat'])
pt_lon=float(pt['site_lon'])
age=float(pt['age'])
ptrot=[[pt_lat],[pt_lon]]
if pt['cont']=='ib':
pole=frp.get_pole(pt['cont'],age)
ptrot= pmag.pt_rot(pole,[pt_lat],[pt_lon])
pt_lat=ptrot[0][0]
pt_lon=ptrot[1][0]
pt['cont']='eur'
if pt['cont']!='saf':
pole1=frp.get_pole(pt['cont'],age)
ptrot= pmag.pt_rot(pole1,[pt_lat],[pt_lon])
if 'dcont' in list(pt.keys()):
pt_lat=ptrot[0][0]
pt_lon=ptrot[1][0]
pole=frp.get_pole(pt['dcont'],age)
pole[2]=-pole[2]
ptrot= pmag.pt_rot(pole,[pt_lat],[pt_lon])
if ofile=="":
print(ptrot[1][0], ptrot[0][0])
else:
ResRec={polelonkey: '%7.1f'%(ptrot[0][0]),polelatkey:'%7.1f'%( ptrot[1][0])}
ResRecs.append(ResRec)
else:
if 'dcont' in list(pt.keys()):
pole=frp.get_pole(pt['dcont'],age)
pole[2]=-pole[2]
ptrot= pmag.pt_rot(pole,[pt_lat],[pt_lon])
print(ptrot)
if ofile=="":
print(ptrot[1][0], ptrot[0][0])
else:
ResRec={polelonkey: '%7.1f'%(ptrot[0][0]),polelatkey:'%7.1f'%( ptrot[1][0])}
ResRecs.append(ResRec)
else:
if ofile=="":
print(ptrot[1][0], ptrot[0][0])
else:
ResRec={polelonkey: '%7.1f'%(ptrot[0][0]),polelatkey:'%7.1f'%( ptrot[1][0])}
ResRecs.append(ResRec)
if len(ResRecs)>0:
if data_model==3:
pmag.magic_write(ofile,ResRecs,'locations')
else:
pmag.magic_write(ofile,ResRecs,'pmag_results') | NAME
pt_rot.py
DESCRIPTION
rotates pt according to specified age and plate
SYNTAX
pt_rot.py [command line options]
OPTIONS
-h prints help and quits
-f file with lon lat plate age Dplate as space delimited input
Dplate is the destination plate coordinates desires
- default is "fixed south africa"
Dplate should be one of: [nwaf, neaf,saf,aus, eur, ind, sam, ant, grn, nam]
-ff file Efile, file has lat lon data file and Efile has sequential rotation poles: Elat Elon Omega
-F OFILE, output sites (pmag_results) formatted file with rotated points stored in pole_lon, pole_lat (vgp_lon, vgp_lat). (data_model=2.5)
default is to print out rotated lon, lat to standard output
-dm [2.5,3] set data model for output. Default is 3 | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/pt_rot.py#L12-L146 |
PmagPy/PmagPy | pmagpy/validate_upload3.py | requiredUnless | def requiredUnless(col_name, arg, dm, df, *args):
"""
Arg is a string in the format "str1, str2, ..."
Each string will be a column name.
Col_name is required in df unless each column from arg is present.
"""
# if column name is present, no need to check if it is required
if col_name in df.columns:
return None
arg_list = arg.split(",")
arg_list = [argument.strip('"') for argument in arg_list]
msg = ""
for a in arg_list:
# ignore validations that reference a different table
if "." in a:
continue
if a not in df.columns:
msg += "{} column is required unless {} is present. ".format(col_name, a)
if msg:
return msg
else:
return None
return None | python | def requiredUnless(col_name, arg, dm, df, *args):
"""
Arg is a string in the format "str1, str2, ..."
Each string will be a column name.
Col_name is required in df unless each column from arg is present.
"""
# if column name is present, no need to check if it is required
if col_name in df.columns:
return None
arg_list = arg.split(",")
arg_list = [argument.strip('"') for argument in arg_list]
msg = ""
for a in arg_list:
# ignore validations that reference a different table
if "." in a:
continue
if a not in df.columns:
msg += "{} column is required unless {} is present. ".format(col_name, a)
if msg:
return msg
else:
return None
return None | Arg is a string in the format "str1, str2, ..."
Each string will be a column name.
Col_name is required in df unless each column from arg is present. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/validate_upload3.py#L15-L37 |
PmagPy/PmagPy | pmagpy/validate_upload3.py | requiredUnlessTable | def requiredUnlessTable(col_name, arg, dm, df, con=None):
"""
Col_name must be present in df unless
arg (table_name) is present in contribution
"""
table_name = arg
if col_name in df.columns:
return None
elif not con:
return None
elif table_name in con.tables:
return None
else:
return "{} column is required unless table {} is present".format(col_name, table_name) | python | def requiredUnlessTable(col_name, arg, dm, df, con=None):
"""
Col_name must be present in df unless
arg (table_name) is present in contribution
"""
table_name = arg
if col_name in df.columns:
return None
elif not con:
return None
elif table_name in con.tables:
return None
else:
return "{} column is required unless table {} is present".format(col_name, table_name) | Col_name must be present in df unless
arg (table_name) is present in contribution | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/validate_upload3.py#L40-L53 |
PmagPy/PmagPy | pmagpy/validate_upload3.py | requiredIfGroup | def requiredIfGroup(col_name, arg, dm, df, *args):
"""
Col_name is required if other columns of
the group arg are present.
"""
group_name = arg
groups = set()
columns = df.columns
for col in columns:
if col not in dm.index:
continue
group = dm.loc[col]['group']
groups.add(group)
if group_name in groups:
if col_name in columns:
return None
else:
return "{} column is required if column group {} is used".format(col_name, group_name)
return None | python | def requiredIfGroup(col_name, arg, dm, df, *args):
"""
Col_name is required if other columns of
the group arg are present.
"""
group_name = arg
groups = set()
columns = df.columns
for col in columns:
if col not in dm.index:
continue
group = dm.loc[col]['group']
groups.add(group)
if group_name in groups:
if col_name in columns:
return None
else:
return "{} column is required if column group {} is used".format(col_name, group_name)
return None | Col_name is required if other columns of
the group arg are present. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/validate_upload3.py#L56-L74 |
PmagPy/PmagPy | pmagpy/validate_upload3.py | required | def required(col_name, arg, dm, df, *args):
"""
Col_name is required in df.columns.
Return error message if not.
"""
if col_name in df.columns:
return None
else:
return '"{}" column is required'.format(col_name) | python | def required(col_name, arg, dm, df, *args):
"""
Col_name is required in df.columns.
Return error message if not.
"""
if col_name in df.columns:
return None
else:
return '"{}" column is required'.format(col_name) | Col_name is required in df.columns.
Return error message if not. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/validate_upload3.py#L77-L85 |
PmagPy/PmagPy | pmagpy/validate_upload3.py | isIn | def isIn(row, col_name, arg, dm, df, con=None):
"""
row[col_name] must contain a value from another column.
If not, return error message.
"""
#grade = df.apply(func, args=(validation_name, arg, dm), axis=1)
cell_value = row[col_name]
cell_value = str(cell_value)
if not cell_value:
return None
elif cell_value == 'None':
return None
elif cell_value == 'nan':
return None
elif not con:
return None
# if it's in another table
cell_values = [v.strip(" ") for v in cell_value.split(":")]
if "." in arg:
table_name, table_col_name = arg.split(".")
if table_name not in con.tables:
return None
#return "Must contain a value from {} table. Missing {} table.".format(table_name, table_name)
if table_col_name not in con.tables[table_name].df.columns:
return '{} table is missing "{}" column, which is required for validating "{}" column'.format(table_name, table_col_name, col_name)
possible_values = con.tables[table_name].df[table_col_name].unique()
for value in cell_values:
if value not in possible_values:
trunc_possible_values = [val.replace(' ', '') for val in possible_values if val]
trunc_cell_value = cell_value.replace(' ', '')
if trunc_cell_value not in trunc_possible_values:
if trunc_cell_value != value:
return 'This value (long): "{}" is not found in: {} column in {} table. Also (short): {} is not in {}'.format(value, table_col_name, table_name, trunc_cell_value, arg)
else:
return 'This value: "{}" is not found in: {} column in {} table'.format(value, table_col_name, table_name)
break
# if it's in the present table:
else:
possible_values = df[arg].unique()
for value in cell_values:
if value not in possible_values:
return 'This value: "{}" is not found in: {} column'.format(value, arg)
break
return None | python | def isIn(row, col_name, arg, dm, df, con=None):
"""
row[col_name] must contain a value from another column.
If not, return error message.
"""
#grade = df.apply(func, args=(validation_name, arg, dm), axis=1)
cell_value = row[col_name]
cell_value = str(cell_value)
if not cell_value:
return None
elif cell_value == 'None':
return None
elif cell_value == 'nan':
return None
elif not con:
return None
# if it's in another table
cell_values = [v.strip(" ") for v in cell_value.split(":")]
if "." in arg:
table_name, table_col_name = arg.split(".")
if table_name not in con.tables:
return None
#return "Must contain a value from {} table. Missing {} table.".format(table_name, table_name)
if table_col_name not in con.tables[table_name].df.columns:
return '{} table is missing "{}" column, which is required for validating "{}" column'.format(table_name, table_col_name, col_name)
possible_values = con.tables[table_name].df[table_col_name].unique()
for value in cell_values:
if value not in possible_values:
trunc_possible_values = [val.replace(' ', '') for val in possible_values if val]
trunc_cell_value = cell_value.replace(' ', '')
if trunc_cell_value not in trunc_possible_values:
if trunc_cell_value != value:
return 'This value (long): "{}" is not found in: {} column in {} table. Also (short): {} is not in {}'.format(value, table_col_name, table_name, trunc_cell_value, arg)
else:
return 'This value: "{}" is not found in: {} column in {} table'.format(value, table_col_name, table_name)
break
# if it's in the present table:
else:
possible_values = df[arg].unique()
for value in cell_values:
if value not in possible_values:
return 'This value: "{}" is not found in: {} column'.format(value, arg)
break
return None | row[col_name] must contain a value from another column.
If not, return error message. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/validate_upload3.py#L87-L130 |
PmagPy/PmagPy | pmagpy/validate_upload3.py | checkMax | def checkMax(row, col_name, arg, *args):
"""
row[col_name] must be less than or equal to arg.
else, return error message.
"""
cell_value = row[col_name]
if not cell_value:
return None
elif isinstance(cell_value, float):
if np.isnan(cell_value):
return None
try:
arg_val = float(arg)
except ValueError:
if arg in row.index:
arg_val = row[arg]
else:
return None
if cb.is_null(arg_val):
return None
#arg = float(arg)
try:
if float(cell_value) <= float(arg_val):
return None
else:
return "{} ({}) must be <= {} ({})".format(str(cell_value), col_name, str(arg_val), str(arg))
# this happens when the value isn't a float (an error which will be caught elsewhere)
except ValueError:
return None | python | def checkMax(row, col_name, arg, *args):
"""
row[col_name] must be less than or equal to arg.
else, return error message.
"""
cell_value = row[col_name]
if not cell_value:
return None
elif isinstance(cell_value, float):
if np.isnan(cell_value):
return None
try:
arg_val = float(arg)
except ValueError:
if arg in row.index:
arg_val = row[arg]
else:
return None
if cb.is_null(arg_val):
return None
#arg = float(arg)
try:
if float(cell_value) <= float(arg_val):
return None
else:
return "{} ({}) must be <= {} ({})".format(str(cell_value), col_name, str(arg_val), str(arg))
# this happens when the value isn't a float (an error which will be caught elsewhere)
except ValueError:
return None | row[col_name] must be less than or equal to arg.
else, return error message. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/validate_upload3.py#L132-L160 |
PmagPy/PmagPy | pmagpy/validate_upload3.py | cv | def cv(row, col_name, arg, current_data_model, df, con):
"""
row[col_name] must contain only values from the appropriate controlled vocabulary
"""
vocabulary = con.vocab.vocabularies
cell_value = str(row[col_name])
if not cell_value:
return None
elif cell_value == "None":
return None
cell_values = cell_value.split(":")
cell_values = [c.strip() for c in cell_values]
# get possible values for controlled vocabulary
# exclude weird unicode
possible_values = []
for val in vocabulary[col_name]:
try:
possible_values.append(str(val).lower())
except UnicodeEncodeError as ex:
print(val, ex)
for value in cell_values:
if str(value).lower() == "nan":
continue
elif str(value).lower() in possible_values:
continue
elif value.lower() == "none":
continue
else:
try:
if str(float(value)) in possible_values:
continue
except:
pass
return '"{}" is not in controlled vocabulary for {}'.format(value, arg)
return None | python | def cv(row, col_name, arg, current_data_model, df, con):
"""
row[col_name] must contain only values from the appropriate controlled vocabulary
"""
vocabulary = con.vocab.vocabularies
cell_value = str(row[col_name])
if not cell_value:
return None
elif cell_value == "None":
return None
cell_values = cell_value.split(":")
cell_values = [c.strip() for c in cell_values]
# get possible values for controlled vocabulary
# exclude weird unicode
possible_values = []
for val in vocabulary[col_name]:
try:
possible_values.append(str(val).lower())
except UnicodeEncodeError as ex:
print(val, ex)
for value in cell_values:
if str(value).lower() == "nan":
continue
elif str(value).lower() in possible_values:
continue
elif value.lower() == "none":
continue
else:
try:
if str(float(value)) in possible_values:
continue
except:
pass
return '"{}" is not in controlled vocabulary for {}'.format(value, arg)
return None | row[col_name] must contain only values from the appropriate controlled vocabulary | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/validate_upload3.py#L191-L225 |
PmagPy/PmagPy | pmagpy/validate_upload3.py | requiredOneInGroup | def requiredOneInGroup(col_name, group, dm, df, *args):
"""
If col_name is present in df, the group validation is satisfied.
If not, it still may be satisfied, but not by THIS col_name.
If col_name is missing, return col_name, else return None.
Later, we will validate to see if there is at least one None (non-missing)
value for this group.
"""
if col_name in df.columns:
# if the column name is present, return nothing
return None
else:
# if the column name is missing, return column name
return col_name | python | def requiredOneInGroup(col_name, group, dm, df, *args):
"""
If col_name is present in df, the group validation is satisfied.
If not, it still may be satisfied, but not by THIS col_name.
If col_name is missing, return col_name, else return None.
Later, we will validate to see if there is at least one None (non-missing)
value for this group.
"""
if col_name in df.columns:
# if the column name is present, return nothing
return None
else:
# if the column name is missing, return column name
return col_name | If col_name is present in df, the group validation is satisfied.
If not, it still may be satisfied, but not by THIS col_name.
If col_name is missing, return col_name, else return None.
Later, we will validate to see if there is at least one None (non-missing)
value for this group. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/validate_upload3.py#L228-L241 |
PmagPy/PmagPy | pmagpy/validate_upload3.py | validate_df | def validate_df(df, dm, con=None):
"""
Take in a DataFrame and corresponding data model.
Run all validations for that DataFrame.
Output is the original DataFrame with some new columns
that contain the validation output.
Validation columns start with:
presence_pass_ (checking that req'd columns are present)
type_pass_ (checking that the data is of the correct type)
value_pass_ (checking that the value is within the appropriate range)
group_pass_ (making sure that group validations pass)
"""
# check column validity
required_one = {} # keep track of req'd one in group validations here
cols = df.columns
invalid_cols = [col for col in cols if col not in dm.index]
# go through and run all validations for the data type
for validation_name, validation in dm.iterrows():
value_type = validation['type']
if validation_name in df.columns:
output = df[validation_name].apply(test_type, args=(value_type,))
df["type_pass" + "_" + validation_name + "_" + value_type] = output
#
val_list = validation['validations']
if not val_list or isinstance(val_list, float):
continue
for num, val in enumerate(val_list):
func_name, arg = split_func(val)
if arg == "magic_table_column":
continue
# first validate for presence
if func_name in presence_operations:
func = presence_operations[func_name]
#grade = func(validation_name, df, arg, dm)
grade = func(validation_name, arg, dm, df, con)
pass_col_name = "presence_pass_" + validation_name + "_" + func.__name__
df[pass_col_name] = grade
# then validate for correct values
elif func_name in value_operations:
func = value_operations[func_name]
if validation_name in df.columns:
grade = df.apply(func, args=(validation_name, arg, dm, df, con), axis=1)
col_name = "value_pass_" + validation_name + "_" + func.__name__
if col_name in df.columns:
num_range = list(range(1, 10))
for num in num_range:
if (col_name + str(num)) in df.columns:
continue
else:
col_name = col_name + str(num)
break
df[col_name] = grade.astype(object)
# last, validate at the column group level
elif func_name in group_operations:
func = group_operations[func_name]
missing = func(validation_name, arg, dm, df)
if arg not in required_one:
required_one[arg] = [missing]
else:
required_one[arg].append(missing)
# format the group validation columns
for key, value in list(required_one.items()):
if None in value:
# this means at least one value from the required group is present,
# so the validation passes
continue
else:
# otherwise, all of the values from the required group are missing,
# so the validation fails
df["group_pass_{}".format(key)] = "you must have one column from group {}: {}".format(key, ", ".join(value))
return df | python | def validate_df(df, dm, con=None):
"""
Take in a DataFrame and corresponding data model.
Run all validations for that DataFrame.
Output is the original DataFrame with some new columns
that contain the validation output.
Validation columns start with:
presence_pass_ (checking that req'd columns are present)
type_pass_ (checking that the data is of the correct type)
value_pass_ (checking that the value is within the appropriate range)
group_pass_ (making sure that group validations pass)
"""
# check column validity
required_one = {} # keep track of req'd one in group validations here
cols = df.columns
invalid_cols = [col for col in cols if col not in dm.index]
# go through and run all validations for the data type
for validation_name, validation in dm.iterrows():
value_type = validation['type']
if validation_name in df.columns:
output = df[validation_name].apply(test_type, args=(value_type,))
df["type_pass" + "_" + validation_name + "_" + value_type] = output
#
val_list = validation['validations']
if not val_list or isinstance(val_list, float):
continue
for num, val in enumerate(val_list):
func_name, arg = split_func(val)
if arg == "magic_table_column":
continue
# first validate for presence
if func_name in presence_operations:
func = presence_operations[func_name]
#grade = func(validation_name, df, arg, dm)
grade = func(validation_name, arg, dm, df, con)
pass_col_name = "presence_pass_" + validation_name + "_" + func.__name__
df[pass_col_name] = grade
# then validate for correct values
elif func_name in value_operations:
func = value_operations[func_name]
if validation_name in df.columns:
grade = df.apply(func, args=(validation_name, arg, dm, df, con), axis=1)
col_name = "value_pass_" + validation_name + "_" + func.__name__
if col_name in df.columns:
num_range = list(range(1, 10))
for num in num_range:
if (col_name + str(num)) in df.columns:
continue
else:
col_name = col_name + str(num)
break
df[col_name] = grade.astype(object)
# last, validate at the column group level
elif func_name in group_operations:
func = group_operations[func_name]
missing = func(validation_name, arg, dm, df)
if arg not in required_one:
required_one[arg] = [missing]
else:
required_one[arg].append(missing)
# format the group validation columns
for key, value in list(required_one.items()):
if None in value:
# this means at least one value from the required group is present,
# so the validation passes
continue
else:
# otherwise, all of the values from the required group are missing,
# so the validation fails
df["group_pass_{}".format(key)] = "you must have one column from group {}: {}".format(key, ", ".join(value))
return df | Take in a DataFrame and corresponding data model.
Run all validations for that DataFrame.
Output is the original DataFrame with some new columns
that contain the validation output.
Validation columns start with:
presence_pass_ (checking that req'd columns are present)
type_pass_ (checking that the data is of the correct type)
value_pass_ (checking that the value is within the appropriate range)
group_pass_ (making sure that group validations pass) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/validate_upload3.py#L298-L369 |
PmagPy/PmagPy | pmagpy/validate_upload3.py | get_validation_col_names | def get_validation_col_names(df):
"""
Input: validated pandas DataFrame (using validate_df)
Output: names of all value validation columns,
names of all presence validation columns,
names of all type validation columns,
names of all missing group columns,
names of all validation columns (excluding groups).
"""
value_cols = df.columns.str.match("^value_pass_")
present_cols = df.columns.str.match("^presence_pass")
type_cols = df.columns.str.match("^type_pass_")
groups_missing = df.columns.str.match("^group_pass_")
#
value_col_names = df.columns[value_cols]
present_col_names = df.columns[present_cols]
type_col_names = df.columns[type_cols]
group_missing_names = df.columns[groups_missing]
#
# all validation columns
validation_cols = np.where(value_cols, value_cols, present_cols)
validation_cols = np.where(validation_cols, validation_cols, type_cols)
validation_col_names = df.columns[validation_cols]
return value_col_names, present_col_names, type_col_names, group_missing_names, validation_col_names | python | def get_validation_col_names(df):
"""
Input: validated pandas DataFrame (using validate_df)
Output: names of all value validation columns,
names of all presence validation columns,
names of all type validation columns,
names of all missing group columns,
names of all validation columns (excluding groups).
"""
value_cols = df.columns.str.match("^value_pass_")
present_cols = df.columns.str.match("^presence_pass")
type_cols = df.columns.str.match("^type_pass_")
groups_missing = df.columns.str.match("^group_pass_")
#
value_col_names = df.columns[value_cols]
present_col_names = df.columns[present_cols]
type_col_names = df.columns[type_cols]
group_missing_names = df.columns[groups_missing]
#
# all validation columns
validation_cols = np.where(value_cols, value_cols, present_cols)
validation_cols = np.where(validation_cols, validation_cols, type_cols)
validation_col_names = df.columns[validation_cols]
return value_col_names, present_col_names, type_col_names, group_missing_names, validation_col_names | Input: validated pandas DataFrame (using validate_df)
Output: names of all value validation columns,
names of all presence validation columns,
names of all type validation columns,
names of all missing group columns,
names of all validation columns (excluding groups). | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/validate_upload3.py#L374-L397 |
PmagPy/PmagPy | pmagpy/validate_upload3.py | print_row_failures | def print_row_failures(failing_items, verbose=False, outfile_name=None):
"""
Take output from get_row_failures (DataFrame), and output it to
stdout, an outfile, or both.
"""
if outfile_name:
outfile = open(outfile_name, "w")
outfile.write("\t".join(["name", "row_number", "problem_type",
"problem_col", "error_message"]))
outfile.write("\n")
else:
outfile = None
for ind, row in failing_items.iterrows():
issues = row['issues']
string = "{:10} | row number: {}".format(ind, str(row["num"]))
first_string = "\t".join([str(ind), str(row["num"])])
if verbose:
print(first_string)
#if outfile:
# ofile.write("{}\n".format(string))
for key, issue in list(issues.items()):
issue_type, issue_col = extract_col_name(key)
string = "{:10} | {:10} | {}".format(issue_type, issue_col, issue)
string = "\t".join([issue_type, issue_col, issue])
if verbose:
print(string)
if outfile:
outfile.write(first_string + "\t" + string + "\n")
if outfile:
outfile.close() | python | def print_row_failures(failing_items, verbose=False, outfile_name=None):
"""
Take output from get_row_failures (DataFrame), and output it to
stdout, an outfile, or both.
"""
if outfile_name:
outfile = open(outfile_name, "w")
outfile.write("\t".join(["name", "row_number", "problem_type",
"problem_col", "error_message"]))
outfile.write("\n")
else:
outfile = None
for ind, row in failing_items.iterrows():
issues = row['issues']
string = "{:10} | row number: {}".format(ind, str(row["num"]))
first_string = "\t".join([str(ind), str(row["num"])])
if verbose:
print(first_string)
#if outfile:
# ofile.write("{}\n".format(string))
for key, issue in list(issues.items()):
issue_type, issue_col = extract_col_name(key)
string = "{:10} | {:10} | {}".format(issue_type, issue_col, issue)
string = "\t".join([issue_type, issue_col, issue])
if verbose:
print(string)
if outfile:
outfile.write(first_string + "\t" + string + "\n")
if outfile:
outfile.close() | Take output from get_row_failures (DataFrame), and output it to
stdout, an outfile, or both. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/validate_upload3.py#L400-L429 |
PmagPy/PmagPy | pmagpy/validate_upload3.py | get_row_failures | def get_row_failures(df, value_cols, type_cols, verbose=False, outfile=None):
"""
Input: already validated DataFrame, value & type column names,
and output options.
Get details on each detected issue, row by row.
Output: DataFrame with type & value validation columns,
plus an "issues" column with a dictionary of every problem
for that row.
"""
# set temporary numeric index
df["num"] = list(range(len(df)))
# get column names for value & type validations
names = value_cols.union(type_cols)
# drop all non validation columns
value_problems = df[names.union(["num"])]
# drop validation columns that contain no problems
failing_items = value_problems.dropna(how="all", subset=names)
if not len(failing_items):
if verbose:
print("No problems")
return []
failing_items = failing_items.dropna(how="all", axis=1)
# get names of the failing items
bad_items = list(failing_items.index)
# get index numbers of the failing items
bad_indices = list(failing_items["num"])
failing_items['issues'] = failing_items.drop("num", axis=1).apply(make_row_dict, axis=1).values
# take output and print/write to file
print_row_failures(failing_items, verbose, outfile)
return failing_items | python | def get_row_failures(df, value_cols, type_cols, verbose=False, outfile=None):
"""
Input: already validated DataFrame, value & type column names,
and output options.
Get details on each detected issue, row by row.
Output: DataFrame with type & value validation columns,
plus an "issues" column with a dictionary of every problem
for that row.
"""
# set temporary numeric index
df["num"] = list(range(len(df)))
# get column names for value & type validations
names = value_cols.union(type_cols)
# drop all non validation columns
value_problems = df[names.union(["num"])]
# drop validation columns that contain no problems
failing_items = value_problems.dropna(how="all", subset=names)
if not len(failing_items):
if verbose:
print("No problems")
return []
failing_items = failing_items.dropna(how="all", axis=1)
# get names of the failing items
bad_items = list(failing_items.index)
# get index numbers of the failing items
bad_indices = list(failing_items["num"])
failing_items['issues'] = failing_items.drop("num", axis=1).apply(make_row_dict, axis=1).values
# take output and print/write to file
print_row_failures(failing_items, verbose, outfile)
return failing_items | Input: already validated DataFrame, value & type column names,
and output options.
Get details on each detected issue, row by row.
Output: DataFrame with type & value validation columns,
plus an "issues" column with a dictionary of every problem
for that row. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/validate_upload3.py#L432-L461 |
PmagPy/PmagPy | pmagpy/validate_upload3.py | get_bad_rows_and_cols | def get_bad_rows_and_cols(df, validation_names, type_col_names,
value_col_names, verbose=False):
"""
Input: validated DataFrame, all validation names, names of the type columns,
names of the value columns, verbose (True or False).
Output: list of rows with bad values, list of columns with bad values,
list of missing (but required) columns.
"""
df["num"] = list(range(len(df)))
problems = df[validation_names.union(["num"])]
all_problems = problems.dropna(how='all', axis=0, subset=validation_names)
value_problems = problems.dropna(how='all', axis=0, subset=type_col_names.union(value_col_names))
all_problems = all_problems.dropna(how='all', axis=1)
value_problems = value_problems.dropna(how='all', axis=1)
if not len(problems):
return None, None, None
#
bad_cols = all_problems.columns
prefixes = ["value_pass_", "type_pass_"]
missing_prefix = "presence_pass_"
problem_cols = []
missing_cols = []
long_missing_cols = []
problem_rows = []
for col in bad_cols:
pre, stripped_col = extract_col_name(col)
for prefix in prefixes:
if col.startswith(prefix):
problem_cols.append(stripped_col)
continue
if col.startswith(missing_prefix):
missing_cols.append(stripped_col)
long_missing_cols.append(col)
if len(value_problems):
bad_rows = list(zip(list(value_problems["num"]), list(value_problems.index)))
else:
bad_rows = []
if verbose:
if bad_rows:
formatted_rows = ["row: {}, name: {}".format(row[0], row[1]) for row in bad_rows]
if len(bad_rows) > 5:
print("-W- these rows have problems:\n", "\n".join(formatted_rows[:5]), " ...")
print("(for full error output see error file)")
else:
print("-W- these rows have problems:", "\n".join(formatted_rows))
if problem_cols:
print("-W- these columns contain bad values:", ", ".join(set(problem_cols)))
if missing_cols:
print("-W- these required columns are missing:", ", ".join(missing_cols))
return bad_rows, problem_cols, missing_cols | python | def get_bad_rows_and_cols(df, validation_names, type_col_names,
value_col_names, verbose=False):
"""
Input: validated DataFrame, all validation names, names of the type columns,
names of the value columns, verbose (True or False).
Output: list of rows with bad values, list of columns with bad values,
list of missing (but required) columns.
"""
df["num"] = list(range(len(df)))
problems = df[validation_names.union(["num"])]
all_problems = problems.dropna(how='all', axis=0, subset=validation_names)
value_problems = problems.dropna(how='all', axis=0, subset=type_col_names.union(value_col_names))
all_problems = all_problems.dropna(how='all', axis=1)
value_problems = value_problems.dropna(how='all', axis=1)
if not len(problems):
return None, None, None
#
bad_cols = all_problems.columns
prefixes = ["value_pass_", "type_pass_"]
missing_prefix = "presence_pass_"
problem_cols = []
missing_cols = []
long_missing_cols = []
problem_rows = []
for col in bad_cols:
pre, stripped_col = extract_col_name(col)
for prefix in prefixes:
if col.startswith(prefix):
problem_cols.append(stripped_col)
continue
if col.startswith(missing_prefix):
missing_cols.append(stripped_col)
long_missing_cols.append(col)
if len(value_problems):
bad_rows = list(zip(list(value_problems["num"]), list(value_problems.index)))
else:
bad_rows = []
if verbose:
if bad_rows:
formatted_rows = ["row: {}, name: {}".format(row[0], row[1]) for row in bad_rows]
if len(bad_rows) > 5:
print("-W- these rows have problems:\n", "\n".join(formatted_rows[:5]), " ...")
print("(for full error output see error file)")
else:
print("-W- these rows have problems:", "\n".join(formatted_rows))
if problem_cols:
print("-W- these columns contain bad values:", ", ".join(set(problem_cols)))
if missing_cols:
print("-W- these required columns are missing:", ", ".join(missing_cols))
return bad_rows, problem_cols, missing_cols | Input: validated DataFrame, all validation names, names of the type columns,
names of the value columns, verbose (True or False).
Output: list of rows with bad values, list of columns with bad values,
list of missing (but required) columns. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/validate_upload3.py#L464-L513 |
PmagPy/PmagPy | pmagpy/validate_upload3.py | validate_table | def validate_table(the_con, dtype, verbose=False, output_dir="."):
"""
Return name of bad table, or False if no errors found.
Calls validate_df then parses its output.
"""
print("-I- Validating {}".format(dtype))
# grab dataframe
current_df = the_con.tables[dtype].df
# grab data model
current_dm = the_con.tables[dtype].data_model.dm[dtype]
# run all validations (will add columns to current_df)
current_df = validate_df(current_df, current_dm, the_con)
# get names of the added columns
value_col_names, present_col_names, type_col_names, missing_groups, validation_col_names = get_validation_col_names(current_df)
# print out failure messages
ofile = os.path.join(output_dir, "{}_errors.txt".format(dtype))
failing_items = get_row_failures(current_df, value_col_names,
type_col_names, verbose, outfile=ofile)
bad_rows, bad_cols, missing_cols = get_bad_rows_and_cols(current_df, validation_col_names,
value_col_names, type_col_names,
verbose=True)
# delete all validation rows
current_df.drop(validation_col_names, axis=1, inplace=True)
current_df.drop(missing_groups, axis=1, inplace=True)
if len(failing_items):
print("-I- Complete list of row errors can be found in {}".format(ofile))
return dtype, bad_rows, bad_cols, missing_cols, missing_groups, failing_items
elif len(missing_cols) or len(missing_groups):
print("-I- You are missing some required headers")
if len(missing_cols):
print("-I- You are missing these required headers: {}".format(", ".join(missing_cols)))
if len(missing_groups):
formatted_groups = [group[11:] for group in missing_groups]
print('-I- You need at least one header from these groups: {}'.format(", ".join(formatted_groups)))
else:
formatted_groups = []
return dtype, bad_rows, bad_cols, missing_cols, formatted_groups, failing_items
else:
print("-I- No row errors found!")
return False | python | def validate_table(the_con, dtype, verbose=False, output_dir="."):
"""
Return name of bad table, or False if no errors found.
Calls validate_df then parses its output.
"""
print("-I- Validating {}".format(dtype))
# grab dataframe
current_df = the_con.tables[dtype].df
# grab data model
current_dm = the_con.tables[dtype].data_model.dm[dtype]
# run all validations (will add columns to current_df)
current_df = validate_df(current_df, current_dm, the_con)
# get names of the added columns
value_col_names, present_col_names, type_col_names, missing_groups, validation_col_names = get_validation_col_names(current_df)
# print out failure messages
ofile = os.path.join(output_dir, "{}_errors.txt".format(dtype))
failing_items = get_row_failures(current_df, value_col_names,
type_col_names, verbose, outfile=ofile)
bad_rows, bad_cols, missing_cols = get_bad_rows_and_cols(current_df, validation_col_names,
value_col_names, type_col_names,
verbose=True)
# delete all validation rows
current_df.drop(validation_col_names, axis=1, inplace=True)
current_df.drop(missing_groups, axis=1, inplace=True)
if len(failing_items):
print("-I- Complete list of row errors can be found in {}".format(ofile))
return dtype, bad_rows, bad_cols, missing_cols, missing_groups, failing_items
elif len(missing_cols) or len(missing_groups):
print("-I- You are missing some required headers")
if len(missing_cols):
print("-I- You are missing these required headers: {}".format(", ".join(missing_cols)))
if len(missing_groups):
formatted_groups = [group[11:] for group in missing_groups]
print('-I- You need at least one header from these groups: {}'.format(", ".join(formatted_groups)))
else:
formatted_groups = []
return dtype, bad_rows, bad_cols, missing_cols, formatted_groups, failing_items
else:
print("-I- No row errors found!")
return False | Return name of bad table, or False if no errors found.
Calls validate_df then parses its output. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/validate_upload3.py#L518-L557 |
PmagPy/PmagPy | pmagpy/validate_upload3.py | validate_contribution | def validate_contribution(the_con):
"""
Go through a Contribution and validate each table
"""
passing = True
for dtype in list(the_con.tables.keys()):
print("validating {}".format(dtype))
fail = validate_table(the_con, dtype)
if fail:
passing = False
print('--') | python | def validate_contribution(the_con):
"""
Go through a Contribution and validate each table
"""
passing = True
for dtype in list(the_con.tables.keys()):
print("validating {}".format(dtype))
fail = validate_table(the_con, dtype)
if fail:
passing = False
print('--') | Go through a Contribution and validate each table | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/validate_upload3.py#L562-L572 |
PmagPy/PmagPy | pmagpy/validate_upload3.py | split_func | def split_func(string):
"""
Take a string like 'requiredIf("arg_name")'
return the function name and the argument:
(requiredIf, arg_name)
"""
ind = string.index("(")
return string[:ind], string[ind+1:-1].strip('"') | python | def split_func(string):
"""
Take a string like 'requiredIf("arg_name")'
return the function name and the argument:
(requiredIf, arg_name)
"""
ind = string.index("(")
return string[:ind], string[ind+1:-1].strip('"') | Take a string like 'requiredIf("arg_name")'
return the function name and the argument:
(requiredIf, arg_name) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/validate_upload3.py#L578-L585 |
PmagPy/PmagPy | pmagpy/validate_upload3.py | get_degree_cols | def get_degree_cols(df):
"""
Take in a pandas DataFrame, and return a list of columns
that are in that DataFrame AND should be between 0 - 360 degrees.
"""
vals = ['lon_w', 'lon_e', 'lat_lon_precision', 'pole_lon',
'paleolon', 'paleolon_sigma',
'lon', 'lon_sigma', 'vgp_lon', 'paleo_lon', 'paleo_lon_sigma',
'azimuth', 'azimuth_dec_correction', 'dir_dec',
'geographic_precision', 'bed_dip_direction']
relevant_cols = list(set(vals).intersection(df.columns))
return relevant_cols | python | def get_degree_cols(df):
"""
Take in a pandas DataFrame, and return a list of columns
that are in that DataFrame AND should be between 0 - 360 degrees.
"""
vals = ['lon_w', 'lon_e', 'lat_lon_precision', 'pole_lon',
'paleolon', 'paleolon_sigma',
'lon', 'lon_sigma', 'vgp_lon', 'paleo_lon', 'paleo_lon_sigma',
'azimuth', 'azimuth_dec_correction', 'dir_dec',
'geographic_precision', 'bed_dip_direction']
relevant_cols = list(set(vals).intersection(df.columns))
return relevant_cols | Take in a pandas DataFrame, and return a list of columns
that are in that DataFrame AND should be between 0 - 360 degrees. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/validate_upload3.py#L588-L599 |
PmagPy/PmagPy | pmagpy/validate_upload3.py | extract_col_name | def extract_col_name(string):
"""
Take a string and split it.
String will be a format like "presence_pass_azimuth",
where "azimuth" is the MagIC column name and "presence_pass"
is the validation.
Return "presence", "azimuth".
"""
prefixes = ["presence_pass_", "value_pass_", "type_pass_"]
end = string.rfind("_")
for prefix in prefixes:
if string.startswith(prefix):
return prefix[:-6], string[len(prefix):end]
return string, string | python | def extract_col_name(string):
"""
Take a string and split it.
String will be a format like "presence_pass_azimuth",
where "azimuth" is the MagIC column name and "presence_pass"
is the validation.
Return "presence", "azimuth".
"""
prefixes = ["presence_pass_", "value_pass_", "type_pass_"]
end = string.rfind("_")
for prefix in prefixes:
if string.startswith(prefix):
return prefix[:-6], string[len(prefix):end]
return string, string | Take a string and split it.
String will be a format like "presence_pass_azimuth",
where "azimuth" is the MagIC column name and "presence_pass"
is the validation.
Return "presence", "azimuth". | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/validate_upload3.py#L602-L615 |
PmagPy/PmagPy | pmagpy/validate_upload3.py | make_row_dict | def make_row_dict(row):
"""
Takes in a DataFrame row (Series),
and return a dictionary with the row's index as key,
and the row's values as values.
{col1_name: col1_value, col2_name: col2_value}
"""
ind = row[row.notnull()].index
values = row[row.notnull()].values
# to transformation with extract_col_name here???
return dict(list(zip(ind, values))) | python | def make_row_dict(row):
"""
Takes in a DataFrame row (Series),
and return a dictionary with the row's index as key,
and the row's values as values.
{col1_name: col1_value, col2_name: col2_value}
"""
ind = row[row.notnull()].index
values = row[row.notnull()].values
# to transformation with extract_col_name here???
return dict(list(zip(ind, values))) | Takes in a DataFrame row (Series),
and return a dictionary with the row's index as key,
and the row's values as values.
{col1_name: col1_value, col2_name: col2_value} | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/validate_upload3.py#L618-L628 |
PmagPy/PmagPy | programs/eq_di.py | main | def main():
"""
NAME
eq_di.py
DESCRIPTION
converts x,y pairs digitized from equal area projection to dec inc data
SYNTAX
eq_di.py [command line options] [< filename]
OPTIONS
-f FILE, input file
-F FILE, specifies output file name
-up if data are upper hemisphere
"""
out=""
UP=0
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
dat=[]
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
f=open(file,'r')
input=f.readlines()
else:
input = sys.stdin.readlines() # read from standard input
# NEW
ofile = ""
if '-F' in sys.argv:
ind=sys.argv.index('-F')
ofile=sys.argv[ind+1]
out=open(ofile, 'w + a')
# end NEW
if '-up' in sys.argv: UP=1
for line in input:
rec=line.split()
x,y=float(rec[1]),float(rec[0]) # swap x,y cartesian for x,y geographic
#d,i=pmag.doeqdi(x,y)
r=math.sqrt(x**2+y**2)
z=1.-r**2
t=math.asin(z)
if UP==1:t=-t
if x==0.:
if y<0:
p=3.*math.pi/2.
else:
p=old_div(math.pi,2.)
else:
p=math.atan2(y,x)
d,i=p*180./math.pi,t*180./math.pi
if d<0:d+=360.
# new
outstring = '%7.1f %7.1f'%(d,i)
if ofile == "":
# print '%7.1f %7.1f'%(d,i)
print(outstring)
else:
out.write(outstring+'\n') | python | def main():
"""
NAME
eq_di.py
DESCRIPTION
converts x,y pairs digitized from equal area projection to dec inc data
SYNTAX
eq_di.py [command line options] [< filename]
OPTIONS
-f FILE, input file
-F FILE, specifies output file name
-up if data are upper hemisphere
"""
out=""
UP=0
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
dat=[]
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
f=open(file,'r')
input=f.readlines()
else:
input = sys.stdin.readlines() # read from standard input
# NEW
ofile = ""
if '-F' in sys.argv:
ind=sys.argv.index('-F')
ofile=sys.argv[ind+1]
out=open(ofile, 'w + a')
# end NEW
if '-up' in sys.argv: UP=1
for line in input:
rec=line.split()
x,y=float(rec[1]),float(rec[0]) # swap x,y cartesian for x,y geographic
#d,i=pmag.doeqdi(x,y)
r=math.sqrt(x**2+y**2)
z=1.-r**2
t=math.asin(z)
if UP==1:t=-t
if x==0.:
if y<0:
p=3.*math.pi/2.
else:
p=old_div(math.pi,2.)
else:
p=math.atan2(y,x)
d,i=p*180./math.pi,t*180./math.pi
if d<0:d+=360.
# new
outstring = '%7.1f %7.1f'%(d,i)
if ofile == "":
# print '%7.1f %7.1f'%(d,i)
print(outstring)
else:
out.write(outstring+'\n') | NAME
eq_di.py
DESCRIPTION
converts x,y pairs digitized from equal area projection to dec inc data
SYNTAX
eq_di.py [command line options] [< filename]
OPTIONS
-f FILE, input file
-F FILE, specifies output file name
-up if data are upper hemisphere | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/eq_di.py#L9-L69 |
PmagPy/PmagPy | programs/chartmaker.py | main | def main():
"""
Welcome to the thellier-thellier experiment automatic chart maker.
Please select desired step interval and upper bound for which it is valid.
e.g.,
50
500
10
600
a blank entry signals the end of data entry.
which would generate steps with 50 degree intervals up to 500, followed by 10 degree intervals up to 600.
chart is stored in: chart.txt
"""
print(main.__doc__)
if '-h' in sys.argv:sys.exit()
cont,Int,Top=1,[],[]
while cont==1:
try:
Int.append(int(input(" Enter desired treatment step interval: <return> to quit ")))
Top.append(int(input(" Enter upper bound for this interval: ")))
except:
cont=0
pmag.chart_maker(Int,Top) | python | def main():
"""
Welcome to the thellier-thellier experiment automatic chart maker.
Please select desired step interval and upper bound for which it is valid.
e.g.,
50
500
10
600
a blank entry signals the end of data entry.
which would generate steps with 50 degree intervals up to 500, followed by 10 degree intervals up to 600.
chart is stored in: chart.txt
"""
print(main.__doc__)
if '-h' in sys.argv:sys.exit()
cont,Int,Top=1,[],[]
while cont==1:
try:
Int.append(int(input(" Enter desired treatment step interval: <return> to quit ")))
Top.append(int(input(" Enter upper bound for this interval: ")))
except:
cont=0
pmag.chart_maker(Int,Top) | Welcome to the thellier-thellier experiment automatic chart maker.
Please select desired step interval and upper bound for which it is valid.
e.g.,
50
500
10
600
a blank entry signals the end of data entry.
which would generate steps with 50 degree intervals up to 500, followed by 10 degree intervals up to 600.
chart is stored in: chart.txt | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/chartmaker.py#L9-L33 |
PmagPy/PmagPy | programs/conversion_scripts/s_magic.py | main | def main():
"""
NAME
s_magic.py
DESCRIPTION
converts .s format data to measurements format.
SYNTAX
s_magic.py [command line options]
OPTIONS
-h prints help message and quits
-DM DATA_MODEL_NUM data model number (default is 3)
-f SFILE specifies the .s file name
-sig last column has sigma
-typ Anisotropy type: AMS,AARM,ATRM (default is AMS)
-F FILE specifies the specimens formatted file name
-usr USER specify username
-loc location specify location/study name
-spc NUM : specify number of characters to
designate a specimen, default = 0
-spn SPECNAME, this specimen has the name SPECNAME
-n first column has specimen name
-crd [s,g,t], specify coordinate system of data
s=specimen,g=geographic,t=tilt adjusted, default is 's'
-ncn NCON: naming convention
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXXYYY: YYY is sample designation with Z characters from site XXX
[5] sample = site
[6] sample, site, location info in er_samples.txt -- NOT CURRENTLY SUPPORTED
[7-Z] [XXX]YYY: XXX is site designation with Z characters from samples XXXYYY
NB: all others you will have to either customize your
self or e-mail [email protected] for help.
DEFAULT
FILE: specimens.txt
INPUT
X11,X22,X33,X12,X23,X13 (.s format file)
X11,X22,X33,X12,X23,X13,sigma (.s format file with -sig option)
SID, X11,X22,X33,X12,X23,X13 (.s format file with -n option)
OUTPUT
specimens.txt format file
NOTE
because .s files do not have specimen names or location information, the output MagIC files
will have to be changed prior to importing to data base.
"""
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
data_model_num = pmag.get_named_arg("-DM", 3)
data_model_num = int(float(data_model_num))
sfile = pmag.get_named_arg("-f", reqd=True)
if data_model_num == 2:
anisfile = pmag.get_named_arg("-F", "rmag_anisotropy.txt")
else:
anisfile = pmag.get_named_arg("-F", "specimens.txt")
location = pmag.get_named_arg("-loc", "unknown")
user = pmag.get_named_arg("-usr", "")
sitename = pmag.get_named_arg("unknown", "")
specnum = pmag.get_named_arg("-spc", 0)
specnum = -int(specnum)
dir_path = pmag.get_named_arg("-WD", ".")
name = pmag.get_flag_arg_from_sys("-n")
sigma = pmag.get_flag_arg_from_sys("-sig")
spec = pmag.get_named_arg("-spn", "unknown")
atype = pmag.get_named_arg("-typ", 'AMS')
samp_con = pmag.get_named_arg("-ncn", "1")
#if '-sig' in sys.argv:
# sigma = 1
#if "-n" in sys.argv:
# name = 1
coord_type = pmag.get_named_arg("-crd", 's')
convert.s_magic(sfile, anisfile, dir_path, atype,
coord_type, sigma, samp_con, specnum,
location, spec, sitename, user, data_model_num, name) | python | def main():
"""
NAME
s_magic.py
DESCRIPTION
converts .s format data to measurements format.
SYNTAX
s_magic.py [command line options]
OPTIONS
-h prints help message and quits
-DM DATA_MODEL_NUM data model number (default is 3)
-f SFILE specifies the .s file name
-sig last column has sigma
-typ Anisotropy type: AMS,AARM,ATRM (default is AMS)
-F FILE specifies the specimens formatted file name
-usr USER specify username
-loc location specify location/study name
-spc NUM : specify number of characters to
designate a specimen, default = 0
-spn SPECNAME, this specimen has the name SPECNAME
-n first column has specimen name
-crd [s,g,t], specify coordinate system of data
s=specimen,g=geographic,t=tilt adjusted, default is 's'
-ncn NCON: naming convention
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXXYYY: YYY is sample designation with Z characters from site XXX
[5] sample = site
[6] sample, site, location info in er_samples.txt -- NOT CURRENTLY SUPPORTED
[7-Z] [XXX]YYY: XXX is site designation with Z characters from samples XXXYYY
NB: all others you will have to either customize your
self or e-mail [email protected] for help.
DEFAULT
FILE: specimens.txt
INPUT
X11,X22,X33,X12,X23,X13 (.s format file)
X11,X22,X33,X12,X23,X13,sigma (.s format file with -sig option)
SID, X11,X22,X33,X12,X23,X13 (.s format file with -n option)
OUTPUT
specimens.txt format file
NOTE
because .s files do not have specimen names or location information, the output MagIC files
will have to be changed prior to importing to data base.
"""
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
data_model_num = pmag.get_named_arg("-DM", 3)
data_model_num = int(float(data_model_num))
sfile = pmag.get_named_arg("-f", reqd=True)
if data_model_num == 2:
anisfile = pmag.get_named_arg("-F", "rmag_anisotropy.txt")
else:
anisfile = pmag.get_named_arg("-F", "specimens.txt")
location = pmag.get_named_arg("-loc", "unknown")
user = pmag.get_named_arg("-usr", "")
sitename = pmag.get_named_arg("unknown", "")
specnum = pmag.get_named_arg("-spc", 0)
specnum = -int(specnum)
dir_path = pmag.get_named_arg("-WD", ".")
name = pmag.get_flag_arg_from_sys("-n")
sigma = pmag.get_flag_arg_from_sys("-sig")
spec = pmag.get_named_arg("-spn", "unknown")
atype = pmag.get_named_arg("-typ", 'AMS')
samp_con = pmag.get_named_arg("-ncn", "1")
#if '-sig' in sys.argv:
# sigma = 1
#if "-n" in sys.argv:
# name = 1
coord_type = pmag.get_named_arg("-crd", 's')
convert.s_magic(sfile, anisfile, dir_path, atype,
coord_type, sigma, samp_con, specnum,
location, spec, sitename, user, data_model_num, name) | NAME
s_magic.py
DESCRIPTION
converts .s format data to measurements format.
SYNTAX
s_magic.py [command line options]
OPTIONS
-h prints help message and quits
-DM DATA_MODEL_NUM data model number (default is 3)
-f SFILE specifies the .s file name
-sig last column has sigma
-typ Anisotropy type: AMS,AARM,ATRM (default is AMS)
-F FILE specifies the specimens formatted file name
-usr USER specify username
-loc location specify location/study name
-spc NUM : specify number of characters to
designate a specimen, default = 0
-spn SPECNAME, this specimen has the name SPECNAME
-n first column has specimen name
-crd [s,g,t], specify coordinate system of data
s=specimen,g=geographic,t=tilt adjusted, default is 's'
-ncn NCON: naming convention
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXXYYY: YYY is sample designation with Z characters from site XXX
[5] sample = site
[6] sample, site, location info in er_samples.txt -- NOT CURRENTLY SUPPORTED
[7-Z] [XXX]YYY: XXX is site designation with Z characters from samples XXXYYY
NB: all others you will have to either customize your
self or e-mail [email protected] for help.
DEFAULT
FILE: specimens.txt
INPUT
X11,X22,X33,X12,X23,X13 (.s format file)
X11,X22,X33,X12,X23,X13,sigma (.s format file with -sig option)
SID, X11,X22,X33,X12,X23,X13 (.s format file with -n option)
OUTPUT
specimens.txt format file
NOTE
because .s files do not have specimen names or location information, the output MagIC files
will have to be changed prior to importing to data base. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/conversion_scripts/s_magic.py#L8-L92 |
PmagPy/PmagPy | programs/deprecated/basemap_magic.py | main | def main():
"""
NAME
basemap_magic.py
NB: this program no longer maintained - use plot_map_pts.py for greater functionality
DESCRIPTION
makes a map of locations in er_sites.txt
SYNTAX
basemap_magic.py [command line options]
OPTIONS
-h prints help message and quits
-f SFILE, specify er_sites.txt or pmag_results.txt format file
-res [c,l,i,h] specify resolution (crude,low,intermediate,high)
-etp plot the etopo20 topographic mesh
-pad [LAT LON] pad bounding box by LAT/LON (default is [.5 .5] degrees)
-grd SPACE specify grid spacing
-prj [lcc] , specify projection (lcc=lambert conic conformable), default is mercator
-n print site names (default is not)
-l print location names (default is not)
-o color ocean blue/land green (default is not)
-R don't plot details of rivers
-B don't plot national/state boundaries, etc.
-sav save plot and quit quietly
-fmt [png,svg,eps,jpg,pdf] specify format for output, default is pdf
DEFAULTS
SFILE: 'er_sites.txt'
resolution: intermediate
saved images are in pdf
"""
dir_path = '.'
sites_file = 'er_sites.txt'
ocean = 0
res = 'i'
proj = 'merc'
prn_name = 0
prn_loc = 0
fancy = 0
rivers, boundaries = 0, 0
padlon, padlat, gridspace, details = .5, .5, .5, 1
fmt = 'pdf'
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
ind = sys.argv.index('-f')
sites_file = sys.argv[ind+1]
if '-res' in sys.argv:
ind = sys.argv.index('-res')
res = sys.argv[ind+1]
if '-etp' in sys.argv:
fancy = 1
if '-n' in sys.argv:
prn_name = 1
if '-l' in sys.argv:
prn_loc = 1
if '-o' in sys.argv:
ocean = 1
if '-R' in sys.argv:
rivers = 0
if '-B' in sys.argv:
boundaries = 0
if '-prj' in sys.argv:
ind = sys.argv.index('-prj')
proj = sys.argv[ind+1]
if '-fmt' in sys.argv:
ind = sys.argv.index('-fmt')
fmt = sys.argv[ind+1]
verbose = pmagplotlib.verbose
if '-sav' in sys.argv:
verbose = 0
if '-pad' in sys.argv:
ind = sys.argv.index('-pad')
padlat = float(sys.argv[ind+1])
padlon = float(sys.argv[ind+2])
if '-grd' in sys.argv:
ind = sys.argv.index('-grd')
gridspace = float(sys.argv[ind+1])
if '-WD' in sys.argv:
ind = sys.argv.index('-WD')
dir_path = sys.argv[ind+1]
sites_file = dir_path+'/'+sites_file
location = ""
FIG = {'map': 1}
pmagplotlib.plot_init(FIG['map'], 6, 6)
# read in er_sites file
Sites, file_type = pmag.magic_read(sites_file)
if 'results' in file_type:
latkey = 'average_lat'
lonkey = 'average_lon'
namekey = 'pmag_result_name'
lockey = 'er_location_names'
else:
latkey = 'site_lat'
lonkey = 'site_lon'
namekey = 'er_site_name'
lockey = 'er_location_name'
lats, lons = [], []
slats, slons = [], []
names, locs = [], []
for site in Sites:
if prn_loc == 1 and location == "":
location = site['er_location_name']
lats.append(float(site[latkey]))
l = float(site[lonkey])
if l < 0:
l = l+360. # make positive
lons.append(l)
if prn_name == 1:
names.append(site[namekey])
if prn_loc == 1:
locs.append(site[lockey])
for lat in lats:
slats.append(lat)
for lon in lons:
slons.append(lon)
Opts = {'res': res, 'proj': proj, 'loc_name': locs, 'padlon': padlon, 'padlat': padlat, 'latmin': numpy.min(slats)-padlat, 'latmax': numpy.max(
slats)+padlat, 'lonmin': numpy.min(slons)-padlon, 'lonmax': numpy.max(slons)+padlon, 'sym': 'ro', 'boundinglat': 0., 'pltgrid': 1.}
Opts['lon_0'] = 0.5*(numpy.min(slons)+numpy.max(slons))
Opts['lat_0'] = 0.5*(numpy.min(slats)+numpy.max(slats))
Opts['names'] = names
Opts['gridspace'] = gridspace
Opts['details'] = {'coasts': 1, 'rivers': 1,
'states': 1, 'countries': 1, 'ocean': 0}
if ocean == 1:
Opts['details']['ocean'] = 1
if rivers == 1:
Opts['details']['rivers'] = 0
if boundaries == 1:
Opts['details']['states'] = 0
Opts['details']['countries'] = 0
Opts['details']['fancy'] = fancy
pmagplotlib.plot_map(FIG['map'], lats, lons, Opts)
if verbose:
pmagplotlib.draw_figs(FIG)
files = {}
for key in list(FIG.keys()):
files[key] = 'Site_map'+'.'+fmt
if pmagplotlib.isServer:
black = '#000000'
purple = '#800080'
titles = {}
titles['map'] = 'Site Map'
FIG = pmagplotlib.add_borders(FIG, titles, black, purple)
pmagplotlib.save_plots(FIG, files)
elif verbose:
ans = input(" S[a]ve to save plot, Return to quit: ")
if ans == "a":
pmagplotlib.save_plots(FIG, files)
else:
pmagplotlib.save_plots(FIG, files) | python | def main():
"""
NAME
basemap_magic.py
NB: this program no longer maintained - use plot_map_pts.py for greater functionality
DESCRIPTION
makes a map of locations in er_sites.txt
SYNTAX
basemap_magic.py [command line options]
OPTIONS
-h prints help message and quits
-f SFILE, specify er_sites.txt or pmag_results.txt format file
-res [c,l,i,h] specify resolution (crude,low,intermediate,high)
-etp plot the etopo20 topographic mesh
-pad [LAT LON] pad bounding box by LAT/LON (default is [.5 .5] degrees)
-grd SPACE specify grid spacing
-prj [lcc] , specify projection (lcc=lambert conic conformable), default is mercator
-n print site names (default is not)
-l print location names (default is not)
-o color ocean blue/land green (default is not)
-R don't plot details of rivers
-B don't plot national/state boundaries, etc.
-sav save plot and quit quietly
-fmt [png,svg,eps,jpg,pdf] specify format for output, default is pdf
DEFAULTS
SFILE: 'er_sites.txt'
resolution: intermediate
saved images are in pdf
"""
dir_path = '.'
sites_file = 'er_sites.txt'
ocean = 0
res = 'i'
proj = 'merc'
prn_name = 0
prn_loc = 0
fancy = 0
rivers, boundaries = 0, 0
padlon, padlat, gridspace, details = .5, .5, .5, 1
fmt = 'pdf'
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
ind = sys.argv.index('-f')
sites_file = sys.argv[ind+1]
if '-res' in sys.argv:
ind = sys.argv.index('-res')
res = sys.argv[ind+1]
if '-etp' in sys.argv:
fancy = 1
if '-n' in sys.argv:
prn_name = 1
if '-l' in sys.argv:
prn_loc = 1
if '-o' in sys.argv:
ocean = 1
if '-R' in sys.argv:
rivers = 0
if '-B' in sys.argv:
boundaries = 0
if '-prj' in sys.argv:
ind = sys.argv.index('-prj')
proj = sys.argv[ind+1]
if '-fmt' in sys.argv:
ind = sys.argv.index('-fmt')
fmt = sys.argv[ind+1]
verbose = pmagplotlib.verbose
if '-sav' in sys.argv:
verbose = 0
if '-pad' in sys.argv:
ind = sys.argv.index('-pad')
padlat = float(sys.argv[ind+1])
padlon = float(sys.argv[ind+2])
if '-grd' in sys.argv:
ind = sys.argv.index('-grd')
gridspace = float(sys.argv[ind+1])
if '-WD' in sys.argv:
ind = sys.argv.index('-WD')
dir_path = sys.argv[ind+1]
sites_file = dir_path+'/'+sites_file
location = ""
FIG = {'map': 1}
pmagplotlib.plot_init(FIG['map'], 6, 6)
# read in er_sites file
Sites, file_type = pmag.magic_read(sites_file)
if 'results' in file_type:
latkey = 'average_lat'
lonkey = 'average_lon'
namekey = 'pmag_result_name'
lockey = 'er_location_names'
else:
latkey = 'site_lat'
lonkey = 'site_lon'
namekey = 'er_site_name'
lockey = 'er_location_name'
lats, lons = [], []
slats, slons = [], []
names, locs = [], []
for site in Sites:
if prn_loc == 1 and location == "":
location = site['er_location_name']
lats.append(float(site[latkey]))
l = float(site[lonkey])
if l < 0:
l = l+360. # make positive
lons.append(l)
if prn_name == 1:
names.append(site[namekey])
if prn_loc == 1:
locs.append(site[lockey])
for lat in lats:
slats.append(lat)
for lon in lons:
slons.append(lon)
Opts = {'res': res, 'proj': proj, 'loc_name': locs, 'padlon': padlon, 'padlat': padlat, 'latmin': numpy.min(slats)-padlat, 'latmax': numpy.max(
slats)+padlat, 'lonmin': numpy.min(slons)-padlon, 'lonmax': numpy.max(slons)+padlon, 'sym': 'ro', 'boundinglat': 0., 'pltgrid': 1.}
Opts['lon_0'] = 0.5*(numpy.min(slons)+numpy.max(slons))
Opts['lat_0'] = 0.5*(numpy.min(slats)+numpy.max(slats))
Opts['names'] = names
Opts['gridspace'] = gridspace
Opts['details'] = {'coasts': 1, 'rivers': 1,
'states': 1, 'countries': 1, 'ocean': 0}
if ocean == 1:
Opts['details']['ocean'] = 1
if rivers == 1:
Opts['details']['rivers'] = 0
if boundaries == 1:
Opts['details']['states'] = 0
Opts['details']['countries'] = 0
Opts['details']['fancy'] = fancy
pmagplotlib.plot_map(FIG['map'], lats, lons, Opts)
if verbose:
pmagplotlib.draw_figs(FIG)
files = {}
for key in list(FIG.keys()):
files[key] = 'Site_map'+'.'+fmt
if pmagplotlib.isServer:
black = '#000000'
purple = '#800080'
titles = {}
titles['map'] = 'Site Map'
FIG = pmagplotlib.add_borders(FIG, titles, black, purple)
pmagplotlib.save_plots(FIG, files)
elif verbose:
ans = input(" S[a]ve to save plot, Return to quit: ")
if ans == "a":
pmagplotlib.save_plots(FIG, files)
else:
pmagplotlib.save_plots(FIG, files) | NAME
basemap_magic.py
NB: this program no longer maintained - use plot_map_pts.py for greater functionality
DESCRIPTION
makes a map of locations in er_sites.txt
SYNTAX
basemap_magic.py [command line options]
OPTIONS
-h prints help message and quits
-f SFILE, specify er_sites.txt or pmag_results.txt format file
-res [c,l,i,h] specify resolution (crude,low,intermediate,high)
-etp plot the etopo20 topographic mesh
-pad [LAT LON] pad bounding box by LAT/LON (default is [.5 .5] degrees)
-grd SPACE specify grid spacing
-prj [lcc] , specify projection (lcc=lambert conic conformable), default is mercator
-n print site names (default is not)
-l print location names (default is not)
-o color ocean blue/land green (default is not)
-R don't plot details of rivers
-B don't plot national/state boundaries, etc.
-sav save plot and quit quietly
-fmt [png,svg,eps,jpg,pdf] specify format for output, default is pdf
DEFAULTS
SFILE: 'er_sites.txt'
resolution: intermediate
saved images are in pdf | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/deprecated/basemap_magic.py#L17-L169 |
PmagPy/PmagPy | programs/incfish.py | main | def main():
"""
NAME
incfish.py
DESCRIPTION
calculates fisher parameters from inc only data
INPUT FORMAT
takes inc data
SYNTAX
incfish.py [options] [< filename]
OPTIONS
-h prints help message and quits
-i for interactive filename entry
-f FILE, specify input file name
-F FILE, specify output file name
< filename for reading from standard input
OUTPUT
mean inc,Fisher inc, N, R, k, a95
NOTES
takes the absolute value of inclinations (to take into account reversals),
but returns gaussian mean if < 50.0, because of polarity ambiguity and
lack of bias.
"""
inc=[]
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
if '-i' in sys.argv: # ask for filename
file=input("Enter file name with inc data: ")
inc=numpy.loadtxt(file)
elif '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
inc=numpy.loadtxt(file)
else:
inc = numpy.loadtxt(sys.stdin,dtype=numpy.float)
ofile=""
if '-F' in sys.argv:
ind = sys.argv.index('-F')
ofile= sys.argv[ind+1]
out = open(ofile, 'w + a')
#
#get doincfish to do the dirty work:
fpars= pmag.doincfish(inc)
outstring='%7.1f %7.1f %i %8.1f %7.1f %7.1f'%(fpars['ginc'],fpars['inc'],fpars['n'],fpars['r'],fpars['k'],fpars['alpha95'])
if ofile == "":
print(outstring)
else:
out.write(outstring+'\n') | python | def main():
"""
NAME
incfish.py
DESCRIPTION
calculates fisher parameters from inc only data
INPUT FORMAT
takes inc data
SYNTAX
incfish.py [options] [< filename]
OPTIONS
-h prints help message and quits
-i for interactive filename entry
-f FILE, specify input file name
-F FILE, specify output file name
< filename for reading from standard input
OUTPUT
mean inc,Fisher inc, N, R, k, a95
NOTES
takes the absolute value of inclinations (to take into account reversals),
but returns gaussian mean if < 50.0, because of polarity ambiguity and
lack of bias.
"""
inc=[]
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
if '-i' in sys.argv: # ask for filename
file=input("Enter file name with inc data: ")
inc=numpy.loadtxt(file)
elif '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
inc=numpy.loadtxt(file)
else:
inc = numpy.loadtxt(sys.stdin,dtype=numpy.float)
ofile=""
if '-F' in sys.argv:
ind = sys.argv.index('-F')
ofile= sys.argv[ind+1]
out = open(ofile, 'w + a')
#
#get doincfish to do the dirty work:
fpars= pmag.doincfish(inc)
outstring='%7.1f %7.1f %i %8.1f %7.1f %7.1f'%(fpars['ginc'],fpars['inc'],fpars['n'],fpars['r'],fpars['k'],fpars['alpha95'])
if ofile == "":
print(outstring)
else:
out.write(outstring+'\n') | NAME
incfish.py
DESCRIPTION
calculates fisher parameters from inc only data
INPUT FORMAT
takes inc data
SYNTAX
incfish.py [options] [< filename]
OPTIONS
-h prints help message and quits
-i for interactive filename entry
-f FILE, specify input file name
-F FILE, specify output file name
< filename for reading from standard input
OUTPUT
mean inc,Fisher inc, N, R, k, a95
NOTES
takes the absolute value of inclinations (to take into account reversals),
but returns gaussian mean if < 50.0, because of polarity ambiguity and
lack of bias. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/incfish.py#L8-L63 |
PmagPy/PmagPy | pmagpy/pmag.py | sort_diclist | def sort_diclist(undecorated, sort_on):
"""
Sort a list of dictionaries by the value in each
dictionary for the sorting key
Parameters
----------
undecorated : list of dicts
sort_on : str, numeric
key that is present in all dicts to sort on
Returns
---------
ordered list of dicts
Examples
---------
>>> lst = [{'key1': 10, 'key2': 2}, {'key1': 1, 'key2': 20}]
>>> sort_diclist(lst, 'key1')
[{'key2': 20, 'key1': 1}, {'key2': 2, 'key1': 10}]
>>> sort_diclist(lst, 'key2')
[{'key2': 2, 'key1': 10}, {'key2': 20, 'key1': 1}]
"""
decorated = [(len(dict_[sort_on]) if hasattr(dict_[sort_on], '__len__') else dict_[
sort_on], index) for (index, dict_) in enumerate(undecorated)]
decorated.sort()
return[undecorated[index] for (key, index) in decorated] | python | def sort_diclist(undecorated, sort_on):
"""
Sort a list of dictionaries by the value in each
dictionary for the sorting key
Parameters
----------
undecorated : list of dicts
sort_on : str, numeric
key that is present in all dicts to sort on
Returns
---------
ordered list of dicts
Examples
---------
>>> lst = [{'key1': 10, 'key2': 2}, {'key1': 1, 'key2': 20}]
>>> sort_diclist(lst, 'key1')
[{'key2': 20, 'key1': 1}, {'key2': 2, 'key1': 10}]
>>> sort_diclist(lst, 'key2')
[{'key2': 2, 'key1': 10}, {'key2': 20, 'key1': 1}]
"""
decorated = [(len(dict_[sort_on]) if hasattr(dict_[sort_on], '__len__') else dict_[
sort_on], index) for (index, dict_) in enumerate(undecorated)]
decorated.sort()
return[undecorated[index] for (key, index) in decorated] | Sort a list of dictionaries by the value in each
dictionary for the sorting key
Parameters
----------
undecorated : list of dicts
sort_on : str, numeric
key that is present in all dicts to sort on
Returns
---------
ordered list of dicts
Examples
---------
>>> lst = [{'key1': 10, 'key2': 2}, {'key1': 1, 'key2': 20}]
>>> sort_diclist(lst, 'key1')
[{'key2': 20, 'key1': 1}, {'key2': 2, 'key1': 10}]
>>> sort_diclist(lst, 'key2')
[{'key2': 2, 'key1': 10}, {'key2': 20, 'key1': 1}] | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L32-L58 |
PmagPy/PmagPy | pmagpy/pmag.py | get_dictitem | def get_dictitem(In, k, v, flag, float_to_int=False):
""" returns a list of dictionaries from list In with key,k = value, v . CASE INSENSITIVE # allowed keywords:
requires that the value of k in the dictionaries contained in In be castable to string and requires that v be castable to a string if flag is T,F
,has or not and requires they be castable to float if flag is eval, min, or max.
float_to_int goes through the relvant values in In and truncates them,
(like "0.0" to "0") for evaluation, default is False
Parameters
__________
In : list of dictionaries to work on
k : key to test
v : key value to test
flag : [T,F,has, or not]
float_to int : if True, truncates to integer
Returns
______
list of dictionaries that meet condition
"""
if float_to_int:
try:
v = str(math.trunc(float(v)))
except ValueError: # catches non floatable strings
pass
except TypeError: # catches None
pass
fixed_In = []
for dictionary in In:
if k in dictionary:
val = dictionary[k]
try:
val = str(math.trunc(float(val)))
except ValueError: # catches non floatable strings
pass
except TypeError: # catches None
pass
dictionary[k] = val
fixed_In.append(dictionary)
In = fixed_In
if flag == "T":
# return that which is
return [dictionary for dictionary in In if k in list(dictionary.keys()) and str(dictionary[k]).lower() == str(v).lower()]
if flag == "F":
# return that which is not
return [dictionary for dictionary in In if k in list(dictionary.keys()) and str(dictionary[k]).lower() != str(v).lower()]
if flag == "has":
# return that which is contained
return [dictionary for dictionary in In if k in list(dictionary.keys()) and str(v).lower() in str(dictionary[k]).lower()]
if flag == "not":
# return that which is not contained
return [dictionary for dictionary in In if k in list(dictionary.keys()) and str(v).lower() not in str(dictionary[k]).lower()]
if flag == "eval":
A = [dictionary for dictionary in In if k in list(dictionary.keys(
)) and dictionary[k] != ''] # find records with no blank values for key
# return that which is
return [dictionary for dictionary in A if k in list(dictionary.keys()) and float(dictionary[k]) == float(v)]
if flag == "min":
A = [dictionary for dictionary in In if k in list(dictionary.keys(
)) and dictionary[k] != ''] # find records with no blank values for key
# return that which is greater than
return [dictionary for dictionary in A if k in list(dictionary.keys()) and float(dictionary[k]) >= float(v)]
if flag == "max":
A = [dictionary for dictionary in In if k in list(dictionary.keys(
)) and dictionary[k] != ''] # find records with no blank values for key
# return that which is less than
return [dictionary for dictionary in A if k in list(dictionary.keys()) and float(dictionary[k]) <= float(v)]
if flag == 'not_null':
return [dictionary for dictionary in In if dictionary[k]] | python | def get_dictitem(In, k, v, flag, float_to_int=False):
""" returns a list of dictionaries from list In with key,k = value, v . CASE INSENSITIVE # allowed keywords:
requires that the value of k in the dictionaries contained in In be castable to string and requires that v be castable to a string if flag is T,F
,has or not and requires they be castable to float if flag is eval, min, or max.
float_to_int goes through the relvant values in In and truncates them,
(like "0.0" to "0") for evaluation, default is False
Parameters
__________
In : list of dictionaries to work on
k : key to test
v : key value to test
flag : [T,F,has, or not]
float_to int : if True, truncates to integer
Returns
______
list of dictionaries that meet condition
"""
if float_to_int:
try:
v = str(math.trunc(float(v)))
except ValueError: # catches non floatable strings
pass
except TypeError: # catches None
pass
fixed_In = []
for dictionary in In:
if k in dictionary:
val = dictionary[k]
try:
val = str(math.trunc(float(val)))
except ValueError: # catches non floatable strings
pass
except TypeError: # catches None
pass
dictionary[k] = val
fixed_In.append(dictionary)
In = fixed_In
if flag == "T":
# return that which is
return [dictionary for dictionary in In if k in list(dictionary.keys()) and str(dictionary[k]).lower() == str(v).lower()]
if flag == "F":
# return that which is not
return [dictionary for dictionary in In if k in list(dictionary.keys()) and str(dictionary[k]).lower() != str(v).lower()]
if flag == "has":
# return that which is contained
return [dictionary for dictionary in In if k in list(dictionary.keys()) and str(v).lower() in str(dictionary[k]).lower()]
if flag == "not":
# return that which is not contained
return [dictionary for dictionary in In if k in list(dictionary.keys()) and str(v).lower() not in str(dictionary[k]).lower()]
if flag == "eval":
A = [dictionary for dictionary in In if k in list(dictionary.keys(
)) and dictionary[k] != ''] # find records with no blank values for key
# return that which is
return [dictionary for dictionary in A if k in list(dictionary.keys()) and float(dictionary[k]) == float(v)]
if flag == "min":
A = [dictionary for dictionary in In if k in list(dictionary.keys(
)) and dictionary[k] != ''] # find records with no blank values for key
# return that which is greater than
return [dictionary for dictionary in A if k in list(dictionary.keys()) and float(dictionary[k]) >= float(v)]
if flag == "max":
A = [dictionary for dictionary in In if k in list(dictionary.keys(
)) and dictionary[k] != ''] # find records with no blank values for key
# return that which is less than
return [dictionary for dictionary in A if k in list(dictionary.keys()) and float(dictionary[k]) <= float(v)]
if flag == 'not_null':
return [dictionary for dictionary in In if dictionary[k]] | returns a list of dictionaries from list In with key,k = value, v . CASE INSENSITIVE # allowed keywords:
requires that the value of k in the dictionaries contained in In be castable to string and requires that v be castable to a string if flag is T,F
,has or not and requires they be castable to float if flag is eval, min, or max.
float_to_int goes through the relvant values in In and truncates them,
(like "0.0" to "0") for evaluation, default is False
Parameters
__________
In : list of dictionaries to work on
k : key to test
v : key value to test
flag : [T,F,has, or not]
float_to int : if True, truncates to integer
Returns
______
list of dictionaries that meet condition | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L61-L127 |
PmagPy/PmagPy | pmagpy/pmag.py | get_dictkey | def get_dictkey(In, k, dtype):
"""
returns list of given key (k) from input list of dictionaries (In) in data type dtype. uses command:
get_dictkey(In,k,dtype). If dtype =="", data are strings; if "int", data are integers; if "f", data are floats.
"""
Out = []
for d in In:
if dtype == '':
Out.append(d[k])
if dtype == 'f':
if d[k] == "":
Out.append(0)
elif d[k] == None:
Out.append(0)
else:
Out.append(float(d[k]))
if dtype == 'int':
if d[k] == "":
Out.append(0)
elif d[k] == None:
Out.append(0)
else:
Out.append(int(d[k]))
return Out | python | def get_dictkey(In, k, dtype):
"""
returns list of given key (k) from input list of dictionaries (In) in data type dtype. uses command:
get_dictkey(In,k,dtype). If dtype =="", data are strings; if "int", data are integers; if "f", data are floats.
"""
Out = []
for d in In:
if dtype == '':
Out.append(d[k])
if dtype == 'f':
if d[k] == "":
Out.append(0)
elif d[k] == None:
Out.append(0)
else:
Out.append(float(d[k]))
if dtype == 'int':
if d[k] == "":
Out.append(0)
elif d[k] == None:
Out.append(0)
else:
Out.append(int(d[k]))
return Out | returns list of given key (k) from input list of dictionaries (In) in data type dtype. uses command:
get_dictkey(In,k,dtype). If dtype =="", data are strings; if "int", data are integers; if "f", data are floats. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L130-L154 |
PmagPy/PmagPy | pmagpy/pmag.py | get_orient | def get_orient(samp_data, er_sample_name, **kwargs):
"""
samp_data : PmagPy list of dicts or pandas DataFrame
er_sample_name : sample name
"""
if isinstance(samp_data, pd.DataFrame):
samp_data = (samp_data.T.apply(dict))
# set orientation priorities
EX = ["SO-ASC", "SO-POM"]
samp_key, az_key, dip_key = 'er_sample_name', 'sample_azimuth', 'sample_dip'
disc_key, or_key, meth_key = 'sample_description', 'sample_orientation_flag',\
'magic_method_codes'
if 'data_model' in list(kwargs.keys()) and kwargs['data_model'] == 3:
samp_key, az_key, dip_key = 'sample', 'azimuth', 'dip'
disc_key, or_key, meth_key = 'description', 'orientation_quality',\
'method_codes'
orient = {samp_key: er_sample_name, az_key: "",
dip_key: "", disc_key: ""}
# get all the orientation data for this sample
orients = get_dictitem(samp_data, samp_key, er_sample_name, 'T')
if len(orients) > 0 and or_key in list(orients[0].keys()):
# exclude all samples with bad orientation flag
orients = get_dictitem(orients, or_key, 'b', 'F')
if len(orients) > 0:
orient = orients[0] # re-initialize to first one
methods = get_dictitem(orients, meth_key, 'SO-', 'has')
# get a list of all orientation methods for this sample
methods = get_dictkey(methods, meth_key, '')
SO_methods = []
for methcode in methods:
meths = methcode.split(":")
for meth in meths:
if (meth.strip() not in EX) and meth.startswith('SO-'):
SO_methods.append(meth.strip())
# find top priority orientation method
if len(SO_methods) == 0:
print("no orientation data for sample ", er_sample_name)
# preserve meta-data anyway even though orientation is bad
# get all the orientation data for this sample
orig_data = get_dictitem(samp_data, samp_key, er_sample_name, 'T')
if len(orig_data) > 0:
orig_data = orig_data[0]
else:
orig_data = []
az_type = "SO-NO"
else:
SO_priorities = set_priorities(SO_methods, 0)
az_type = SO_methods[SO_methods.index(SO_priorities[0])]
orient = get_dictitem(orients, meth_key, az_type, 'has')[
0] # re-initialize to best one
return orient, az_type | python | def get_orient(samp_data, er_sample_name, **kwargs):
"""
samp_data : PmagPy list of dicts or pandas DataFrame
er_sample_name : sample name
"""
if isinstance(samp_data, pd.DataFrame):
samp_data = (samp_data.T.apply(dict))
# set orientation priorities
EX = ["SO-ASC", "SO-POM"]
samp_key, az_key, dip_key = 'er_sample_name', 'sample_azimuth', 'sample_dip'
disc_key, or_key, meth_key = 'sample_description', 'sample_orientation_flag',\
'magic_method_codes'
if 'data_model' in list(kwargs.keys()) and kwargs['data_model'] == 3:
samp_key, az_key, dip_key = 'sample', 'azimuth', 'dip'
disc_key, or_key, meth_key = 'description', 'orientation_quality',\
'method_codes'
orient = {samp_key: er_sample_name, az_key: "",
dip_key: "", disc_key: ""}
# get all the orientation data for this sample
orients = get_dictitem(samp_data, samp_key, er_sample_name, 'T')
if len(orients) > 0 and or_key in list(orients[0].keys()):
# exclude all samples with bad orientation flag
orients = get_dictitem(orients, or_key, 'b', 'F')
if len(orients) > 0:
orient = orients[0] # re-initialize to first one
methods = get_dictitem(orients, meth_key, 'SO-', 'has')
# get a list of all orientation methods for this sample
methods = get_dictkey(methods, meth_key, '')
SO_methods = []
for methcode in methods:
meths = methcode.split(":")
for meth in meths:
if (meth.strip() not in EX) and meth.startswith('SO-'):
SO_methods.append(meth.strip())
# find top priority orientation method
if len(SO_methods) == 0:
print("no orientation data for sample ", er_sample_name)
# preserve meta-data anyway even though orientation is bad
# get all the orientation data for this sample
orig_data = get_dictitem(samp_data, samp_key, er_sample_name, 'T')
if len(orig_data) > 0:
orig_data = orig_data[0]
else:
orig_data = []
az_type = "SO-NO"
else:
SO_priorities = set_priorities(SO_methods, 0)
az_type = SO_methods[SO_methods.index(SO_priorities[0])]
orient = get_dictitem(orients, meth_key, az_type, 'has')[
0] # re-initialize to best one
return orient, az_type | samp_data : PmagPy list of dicts or pandas DataFrame
er_sample_name : sample name | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L164-L214 |
PmagPy/PmagPy | pmagpy/pmag.py | EI | def EI(inc):
"""
Given a mean inclination value of a distribution of directions, this
function calculates the expected elongation of this distribution using a
best-fit polynomial of the TK03 GAD secular variation model (Tauxe and
Kent, 2004).
Parameters
----------
inc : inclination in degrees (int or float)
Returns
---------
elongation : float
Examples
---------
>>> pmag.EI(20)
2.4863973732
>>> pmag.EI(90)
1.0241570135500004
"""
poly_tk03 = [3.15976125e-06, -3.52459817e-04, -
1.46641090e-02, 2.89538539e+00]
return poly_tk03[0] * inc**3 + poly_tk03[1] * inc**2 + poly_tk03[2] * inc + poly_tk03[3] | python | def EI(inc):
"""
Given a mean inclination value of a distribution of directions, this
function calculates the expected elongation of this distribution using a
best-fit polynomial of the TK03 GAD secular variation model (Tauxe and
Kent, 2004).
Parameters
----------
inc : inclination in degrees (int or float)
Returns
---------
elongation : float
Examples
---------
>>> pmag.EI(20)
2.4863973732
>>> pmag.EI(90)
1.0241570135500004
"""
poly_tk03 = [3.15976125e-06, -3.52459817e-04, -
1.46641090e-02, 2.89538539e+00]
return poly_tk03[0] * inc**3 + poly_tk03[1] * inc**2 + poly_tk03[2] * inc + poly_tk03[3] | Given a mean inclination value of a distribution of directions, this
function calculates the expected elongation of this distribution using a
best-fit polynomial of the TK03 GAD secular variation model (Tauxe and
Kent, 2004).
Parameters
----------
inc : inclination in degrees (int or float)
Returns
---------
elongation : float
Examples
---------
>>> pmag.EI(20)
2.4863973732
>>> pmag.EI(90)
1.0241570135500004 | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L217-L241 |
PmagPy/PmagPy | pmagpy/pmag.py | find_f | def find_f(data):
"""
Given a distribution of directions, this function determines parameters
(elongation, inclination, flattening factor, and elongation direction) that
are consistent with the TK03 secular variation model.
Parameters
----------
data : array of declination, inclination pairs
(e.g. np.array([[140,21],[127,23],[142,19],[136,22]]))
Returns
---------
Es : list of elongation values
Is : list of inclination values
Fs : list of flattening factors
V2s : list of elongation directions (relative to the distribution)
The function will return a zero list ([0]) for each of these parameters if the directions constitute a pathological distribution.
Examples
---------
>>> directions = np.array([[140,21],[127,23],[142,19],[136,22]])
>>> Es, Is, Fs, V2s = pmag.find_f(directions)
"""
rad = np.pi/180.
Es, Is, Fs, V2s = [], [], [], []
ppars = doprinc(data)
D = ppars['dec']
Decs, Incs = data.transpose()[0], data.transpose()[1]
Tan_Incs = np.tan(Incs * rad)
for f in np.arange(1., .2, -.01):
U = old_div(np.arctan((old_div(1., f)) * Tan_Incs), rad)
fdata = np.array([Decs, U]).transpose()
ppars = doprinc(fdata)
Fs.append(f)
Es.append(old_div(ppars["tau2"], ppars["tau3"]))
ang = angle([D, 0], [ppars["V2dec"], 0])
if 180. - ang < ang:
ang = 180. - ang
V2s.append(ang)
Is.append(abs(ppars["inc"]))
if EI(abs(ppars["inc"])) <= Es[-1]:
del Es[-1]
del Is[-1]
del Fs[-1]
del V2s[-1]
if len(Fs) > 0:
for f in np.arange(Fs[-1], .2, -.005):
U = old_div(np.arctan((old_div(1., f)) * Tan_Incs), rad)
fdata = np.array([Decs, U]).transpose()
ppars = doprinc(fdata)
Fs.append(f)
Es.append(old_div(ppars["tau2"], ppars["tau3"]))
Is.append(abs(ppars["inc"]))
ang = angle([D, 0], [ppars["V2dec"], 0])
if 180. - ang < ang:
ang = 180. - ang
V2s.append(ang)
if EI(abs(ppars["inc"])) <= Es[-1]:
return Es, Is, Fs, V2s
return [0], [0], [0], [0] | python | def find_f(data):
"""
Given a distribution of directions, this function determines parameters
(elongation, inclination, flattening factor, and elongation direction) that
are consistent with the TK03 secular variation model.
Parameters
----------
data : array of declination, inclination pairs
(e.g. np.array([[140,21],[127,23],[142,19],[136,22]]))
Returns
---------
Es : list of elongation values
Is : list of inclination values
Fs : list of flattening factors
V2s : list of elongation directions (relative to the distribution)
The function will return a zero list ([0]) for each of these parameters if the directions constitute a pathological distribution.
Examples
---------
>>> directions = np.array([[140,21],[127,23],[142,19],[136,22]])
>>> Es, Is, Fs, V2s = pmag.find_f(directions)
"""
rad = np.pi/180.
Es, Is, Fs, V2s = [], [], [], []
ppars = doprinc(data)
D = ppars['dec']
Decs, Incs = data.transpose()[0], data.transpose()[1]
Tan_Incs = np.tan(Incs * rad)
for f in np.arange(1., .2, -.01):
U = old_div(np.arctan((old_div(1., f)) * Tan_Incs), rad)
fdata = np.array([Decs, U]).transpose()
ppars = doprinc(fdata)
Fs.append(f)
Es.append(old_div(ppars["tau2"], ppars["tau3"]))
ang = angle([D, 0], [ppars["V2dec"], 0])
if 180. - ang < ang:
ang = 180. - ang
V2s.append(ang)
Is.append(abs(ppars["inc"]))
if EI(abs(ppars["inc"])) <= Es[-1]:
del Es[-1]
del Is[-1]
del Fs[-1]
del V2s[-1]
if len(Fs) > 0:
for f in np.arange(Fs[-1], .2, -.005):
U = old_div(np.arctan((old_div(1., f)) * Tan_Incs), rad)
fdata = np.array([Decs, U]).transpose()
ppars = doprinc(fdata)
Fs.append(f)
Es.append(old_div(ppars["tau2"], ppars["tau3"]))
Is.append(abs(ppars["inc"]))
ang = angle([D, 0], [ppars["V2dec"], 0])
if 180. - ang < ang:
ang = 180. - ang
V2s.append(ang)
if EI(abs(ppars["inc"])) <= Es[-1]:
return Es, Is, Fs, V2s
return [0], [0], [0], [0] | Given a distribution of directions, this function determines parameters
(elongation, inclination, flattening factor, and elongation direction) that
are consistent with the TK03 secular variation model.
Parameters
----------
data : array of declination, inclination pairs
(e.g. np.array([[140,21],[127,23],[142,19],[136,22]]))
Returns
---------
Es : list of elongation values
Is : list of inclination values
Fs : list of flattening factors
V2s : list of elongation directions (relative to the distribution)
The function will return a zero list ([0]) for each of these parameters if the directions constitute a pathological distribution.
Examples
---------
>>> directions = np.array([[140,21],[127,23],[142,19],[136,22]])
>>> Es, Is, Fs, V2s = pmag.find_f(directions) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L244-L305 |
PmagPy/PmagPy | pmagpy/pmag.py | convert_lat | def convert_lat(Recs):
"""
uses lat, for age<5Ma, model_lat if present, else tries to use average_inc to estimate plat.
"""
New = []
for rec in Recs:
if 'model_lat' in list(rec.keys()) and rec['model_lat'] != "":
New.append(rec)
elif 'average_age' in list(rec.keys()) and rec['average_age'] != "" and float(rec['average_age']) <= 5.:
if 'site_lat' in list(rec.keys()) and rec['site_lat'] != "":
rec['model_lat'] = rec['site_lat']
New.append(rec)
elif 'average_inc' in list(rec.keys()) and rec['average_inc'] != "":
rec['model_lat'] = '%7.1f' % (plat(float(rec['average_inc'])))
New.append(rec)
return New | python | def convert_lat(Recs):
"""
uses lat, for age<5Ma, model_lat if present, else tries to use average_inc to estimate plat.
"""
New = []
for rec in Recs:
if 'model_lat' in list(rec.keys()) and rec['model_lat'] != "":
New.append(rec)
elif 'average_age' in list(rec.keys()) and rec['average_age'] != "" and float(rec['average_age']) <= 5.:
if 'site_lat' in list(rec.keys()) and rec['site_lat'] != "":
rec['model_lat'] = rec['site_lat']
New.append(rec)
elif 'average_inc' in list(rec.keys()) and rec['average_inc'] != "":
rec['model_lat'] = '%7.1f' % (plat(float(rec['average_inc'])))
New.append(rec)
return New | uses lat, for age<5Ma, model_lat if present, else tries to use average_inc to estimate plat. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L332-L347 |
PmagPy/PmagPy | pmagpy/pmag.py | convert_ages | def convert_ages(Recs, data_model=3):
"""
converts ages to Ma
Parameters
_________
Recs : list of dictionaries in data model by data_model
data_model : MagIC data model (default is 3)
"""
if data_model == 3:
site_key = 'site'
agekey = "age"
keybase = ""
else:
site_key = 'er_site_names'
agekey = find('age', list(rec.keys()))
if agekey != "":
keybase = agekey.split('_')[0] + '_'
New = []
for rec in Recs:
age = ''
if rec[keybase + 'age'] != "":
age = float(rec[keybase + "age"])
elif rec[keybase + 'age_low'] != "" and rec[keybase + 'age_high'] != '':
age = np.mean([rec[keybase + 'age_high'],
rec[keybase + "age_low"]])
# age = float(rec[keybase + 'age_low']) + old_div(
# (float(rec[keybase + 'age_high']) - float(rec[keybase + 'age_low'])), 2.)
if age != '':
rec[keybase + 'age_unit']
if rec[keybase + 'age_unit'] == 'Ma':
rec[keybase + 'age'] = '%10.4e' % (age)
elif rec[keybase + 'age_unit'] == 'ka' or rec[keybase + 'age_unit'] == 'Ka':
rec[keybase + 'age'] = '%10.4e' % (age * .001)
elif rec[keybase + 'age_unit'] == 'Years AD (+/-)':
rec[keybase + 'age'] = '%10.4e' % ((2011 - age) * 1e-6)
elif rec[keybase + 'age_unit'] == 'Years BP':
rec[keybase + 'age'] = '%10.4e' % ((age) * 1e-6)
rec[keybase + 'age_unit'] = 'Ma'
New.append(rec)
else:
if 'site_key' in list(rec.keys()):
print('problem in convert_ages:', rec['site_key'])
elif 'er_site_name' in list(rec.keys()):
print('problem in convert_ages:', rec['site_key'])
else:
print('problem in convert_ages:', rec)
if len(New) == 0:
print('no age key:', rec)
return New | python | def convert_ages(Recs, data_model=3):
"""
converts ages to Ma
Parameters
_________
Recs : list of dictionaries in data model by data_model
data_model : MagIC data model (default is 3)
"""
if data_model == 3:
site_key = 'site'
agekey = "age"
keybase = ""
else:
site_key = 'er_site_names'
agekey = find('age', list(rec.keys()))
if agekey != "":
keybase = agekey.split('_')[0] + '_'
New = []
for rec in Recs:
age = ''
if rec[keybase + 'age'] != "":
age = float(rec[keybase + "age"])
elif rec[keybase + 'age_low'] != "" and rec[keybase + 'age_high'] != '':
age = np.mean([rec[keybase + 'age_high'],
rec[keybase + "age_low"]])
# age = float(rec[keybase + 'age_low']) + old_div(
# (float(rec[keybase + 'age_high']) - float(rec[keybase + 'age_low'])), 2.)
if age != '':
rec[keybase + 'age_unit']
if rec[keybase + 'age_unit'] == 'Ma':
rec[keybase + 'age'] = '%10.4e' % (age)
elif rec[keybase + 'age_unit'] == 'ka' or rec[keybase + 'age_unit'] == 'Ka':
rec[keybase + 'age'] = '%10.4e' % (age * .001)
elif rec[keybase + 'age_unit'] == 'Years AD (+/-)':
rec[keybase + 'age'] = '%10.4e' % ((2011 - age) * 1e-6)
elif rec[keybase + 'age_unit'] == 'Years BP':
rec[keybase + 'age'] = '%10.4e' % ((age) * 1e-6)
rec[keybase + 'age_unit'] = 'Ma'
New.append(rec)
else:
if 'site_key' in list(rec.keys()):
print('problem in convert_ages:', rec['site_key'])
elif 'er_site_name' in list(rec.keys()):
print('problem in convert_ages:', rec['site_key'])
else:
print('problem in convert_ages:', rec)
if len(New) == 0:
print('no age key:', rec)
return New | converts ages to Ma
Parameters
_________
Recs : list of dictionaries in data model by data_model
data_model : MagIC data model (default is 3) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L350-L399 |
PmagPy/PmagPy | pmagpy/pmag.py | convert_items | def convert_items(data, mapping):
"""
Input: list of dicts (each dict a record for one item),
mapping with column names to swap into the records.
Output: updated list of dicts.
"""
new_recs = []
for rec in data:
new_rec = map_magic.mapping(rec, mapping)
new_recs.append(new_rec)
return new_recs | python | def convert_items(data, mapping):
"""
Input: list of dicts (each dict a record for one item),
mapping with column names to swap into the records.
Output: updated list of dicts.
"""
new_recs = []
for rec in data:
new_rec = map_magic.mapping(rec, mapping)
new_recs.append(new_rec)
return new_recs | Input: list of dicts (each dict a record for one item),
mapping with column names to swap into the records.
Output: updated list of dicts. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L412-L422 |
PmagPy/PmagPy | pmagpy/pmag.py | convert_directory_2_to_3 | def convert_directory_2_to_3(meas_fname="magic_measurements.txt", input_dir=".",
output_dir=".", meas_only=False, data_model=None):
"""
Convert 2.0 measurements file into 3.0 measurements file.
Merge and convert specimen, sample, site, and location data.
Also translates criteria data.
Parameters
----------
meas_name : name of measurement file (do not include full path,
default is "magic_measurements.txt")
input_dir : name of input directory (default is ".")
output_dir : name of output directory (default is ".")
meas_only : boolean, convert only measurement data (default is False)
data_model : data_model3.DataModel object (default is None)
Returns
---------
NewMeas : 3.0 measurements data (output of pmag.convert_items)
upgraded : list of files successfully upgraded to 3.0
no_upgrade: list of 2.5 files not upgraded to 3.0
"""
convert = {'specimens': map_magic.spec_magic2_2_magic3_map,
'samples': map_magic.samp_magic2_2_magic3_map,
'sites': map_magic.site_magic2_2_magic3_map,
'locations': map_magic.loc_magic2_2_magic3_map,
'ages': map_magic.age_magic2_2_magic3_map}
full_name = os.path.join(input_dir, meas_fname)
if not os.path.exists(full_name):
print("-W- {} is not a file".format(full_name))
return False, False, False
# read in data model 2.5 measurements file
data2, filetype = magic_read(full_name)
# convert list of dicts to 3.0
NewMeas = convert_items(data2, map_magic.meas_magic2_2_magic3_map)
# write 3.0 output to file
ofile = os.path.join(output_dir, 'measurements.txt')
magic_write(ofile, NewMeas, 'measurements')
upgraded = []
if os.path.exists(ofile):
print("-I- 3.0 format measurements file was successfully created: {}".format(ofile))
upgraded.append("measurements.txt")
else:
print("-W- 3.0 format measurements file could not be created")
#
no_upgrade = []
if not meas_only:
# try to convert specimens, samples, sites, & locations
for dtype in ['specimens', 'samples', 'sites', 'locations', 'ages']:
mapping = convert[dtype]
res = convert_and_combine_2_to_3(
dtype, mapping, input_dir, output_dir, data_model)
if res:
upgraded.append(res)
# try to upgrade criteria file
if os.path.exists(os.path.join(input_dir, 'pmag_criteria.txt')):
crit_file = convert_criteria_file_2_to_3(input_dir=input_dir,
output_dir=output_dir,
data_model=data_model)[0]
if crit_file:
upgraded.append(crit_file)
else:
no_upgrade.append("pmag_criteria.txt")
# create list of all un-upgradeable files
for fname in os.listdir(input_dir):
if fname in ['measurements.txt', 'specimens.txt', 'samples.txt',
'sites.txt', 'locations.txt']:
continue
elif 'rmag' in fname:
no_upgrade.append(fname)
elif fname in ['pmag_results.txt', 'er_synthetics.txt', 'er_images.txt',
'er_plots.txt']:
no_upgrade.append(fname)
return NewMeas, upgraded, no_upgrade | python | def convert_directory_2_to_3(meas_fname="magic_measurements.txt", input_dir=".",
output_dir=".", meas_only=False, data_model=None):
"""
Convert 2.0 measurements file into 3.0 measurements file.
Merge and convert specimen, sample, site, and location data.
Also translates criteria data.
Parameters
----------
meas_name : name of measurement file (do not include full path,
default is "magic_measurements.txt")
input_dir : name of input directory (default is ".")
output_dir : name of output directory (default is ".")
meas_only : boolean, convert only measurement data (default is False)
data_model : data_model3.DataModel object (default is None)
Returns
---------
NewMeas : 3.0 measurements data (output of pmag.convert_items)
upgraded : list of files successfully upgraded to 3.0
no_upgrade: list of 2.5 files not upgraded to 3.0
"""
convert = {'specimens': map_magic.spec_magic2_2_magic3_map,
'samples': map_magic.samp_magic2_2_magic3_map,
'sites': map_magic.site_magic2_2_magic3_map,
'locations': map_magic.loc_magic2_2_magic3_map,
'ages': map_magic.age_magic2_2_magic3_map}
full_name = os.path.join(input_dir, meas_fname)
if not os.path.exists(full_name):
print("-W- {} is not a file".format(full_name))
return False, False, False
# read in data model 2.5 measurements file
data2, filetype = magic_read(full_name)
# convert list of dicts to 3.0
NewMeas = convert_items(data2, map_magic.meas_magic2_2_magic3_map)
# write 3.0 output to file
ofile = os.path.join(output_dir, 'measurements.txt')
magic_write(ofile, NewMeas, 'measurements')
upgraded = []
if os.path.exists(ofile):
print("-I- 3.0 format measurements file was successfully created: {}".format(ofile))
upgraded.append("measurements.txt")
else:
print("-W- 3.0 format measurements file could not be created")
#
no_upgrade = []
if not meas_only:
# try to convert specimens, samples, sites, & locations
for dtype in ['specimens', 'samples', 'sites', 'locations', 'ages']:
mapping = convert[dtype]
res = convert_and_combine_2_to_3(
dtype, mapping, input_dir, output_dir, data_model)
if res:
upgraded.append(res)
# try to upgrade criteria file
if os.path.exists(os.path.join(input_dir, 'pmag_criteria.txt')):
crit_file = convert_criteria_file_2_to_3(input_dir=input_dir,
output_dir=output_dir,
data_model=data_model)[0]
if crit_file:
upgraded.append(crit_file)
else:
no_upgrade.append("pmag_criteria.txt")
# create list of all un-upgradeable files
for fname in os.listdir(input_dir):
if fname in ['measurements.txt', 'specimens.txt', 'samples.txt',
'sites.txt', 'locations.txt']:
continue
elif 'rmag' in fname:
no_upgrade.append(fname)
elif fname in ['pmag_results.txt', 'er_synthetics.txt', 'er_images.txt',
'er_plots.txt']:
no_upgrade.append(fname)
return NewMeas, upgraded, no_upgrade | Convert 2.0 measurements file into 3.0 measurements file.
Merge and convert specimen, sample, site, and location data.
Also translates criteria data.
Parameters
----------
meas_name : name of measurement file (do not include full path,
default is "magic_measurements.txt")
input_dir : name of input directory (default is ".")
output_dir : name of output directory (default is ".")
meas_only : boolean, convert only measurement data (default is False)
data_model : data_model3.DataModel object (default is None)
Returns
---------
NewMeas : 3.0 measurements data (output of pmag.convert_items)
upgraded : list of files successfully upgraded to 3.0
no_upgrade: list of 2.5 files not upgraded to 3.0 | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L425-L499 |
PmagPy/PmagPy | pmagpy/pmag.py | convert_and_combine_2_to_3 | def convert_and_combine_2_to_3(dtype, map_dict, input_dir=".", output_dir=".", data_model=None):
"""
Read in er_*.txt file and pmag_*.txt file in working directory.
Combine the data, then translate headers from 2.5 --> 3.0.
Last, write out the data in 3.0.
Parameters
----------
dtype : string for input type (specimens, samples, sites, etc.)
map_dict : dictionary with format {header2_format: header3_format, ...} (from mapping.map_magic module)
input_dir : input directory, default "."
output_dir : output directory, default "."
data_model : data_model3.DataModel object, default None
Returns
---------
output_file_name with 3.0 format data (or None if translation failed)
"""
# read in er_ data & make DataFrame
er_file = os.path.join(input_dir, 'er_{}.txt'.format(dtype))
er_data, er_dtype = magic_read(er_file)
if len(er_data):
er_df = pd.DataFrame(er_data)
if dtype == 'ages':
pass
# remove records with blank ages
#er_data = get_dictitem(er_data, 'age', '', "F")
#er_df = pd.DataFrame(er_data)
else:
er_df.index = er_df['er_{}_name'.format(dtype[:-1])]
else:
er_df = pd.DataFrame()
#
if dtype == 'ages':
full_df = er_df
else:
# read in pmag_ data & make DataFrame
pmag_file = os.path.join(input_dir, 'pmag_{}.txt'.format(dtype))
pmag_data, pmag_dtype = magic_read(pmag_file)
if len(pmag_data):
pmag_df = pd.DataFrame(pmag_data)
pmag_df.index = pmag_df['er_{}_name'.format(dtype[:-1])]
else:
pmag_df = pd.DataFrame()
# combine the two Dataframes
full_df = pd.concat([er_df, pmag_df], sort=True)
# sort the DataFrame so that all records from one item are together
full_df.sort_index(inplace=True)
# fix the column names to be 3.0
full_df.rename(columns=map_dict, inplace=True)
# create a MagicDataFrame object, providing the dataframe and the data type
new_df = cb.MagicDataFrame(dtype=dtype, df=full_df, dmodel=data_model)
# write out the data to file
if len(new_df.df):
new_df.write_magic_file(dir_path=output_dir)
return dtype + ".txt"
else:
print("-I- No {} data found.".format(dtype))
return None | python | def convert_and_combine_2_to_3(dtype, map_dict, input_dir=".", output_dir=".", data_model=None):
"""
Read in er_*.txt file and pmag_*.txt file in working directory.
Combine the data, then translate headers from 2.5 --> 3.0.
Last, write out the data in 3.0.
Parameters
----------
dtype : string for input type (specimens, samples, sites, etc.)
map_dict : dictionary with format {header2_format: header3_format, ...} (from mapping.map_magic module)
input_dir : input directory, default "."
output_dir : output directory, default "."
data_model : data_model3.DataModel object, default None
Returns
---------
output_file_name with 3.0 format data (or None if translation failed)
"""
# read in er_ data & make DataFrame
er_file = os.path.join(input_dir, 'er_{}.txt'.format(dtype))
er_data, er_dtype = magic_read(er_file)
if len(er_data):
er_df = pd.DataFrame(er_data)
if dtype == 'ages':
pass
# remove records with blank ages
#er_data = get_dictitem(er_data, 'age', '', "F")
#er_df = pd.DataFrame(er_data)
else:
er_df.index = er_df['er_{}_name'.format(dtype[:-1])]
else:
er_df = pd.DataFrame()
#
if dtype == 'ages':
full_df = er_df
else:
# read in pmag_ data & make DataFrame
pmag_file = os.path.join(input_dir, 'pmag_{}.txt'.format(dtype))
pmag_data, pmag_dtype = magic_read(pmag_file)
if len(pmag_data):
pmag_df = pd.DataFrame(pmag_data)
pmag_df.index = pmag_df['er_{}_name'.format(dtype[:-1])]
else:
pmag_df = pd.DataFrame()
# combine the two Dataframes
full_df = pd.concat([er_df, pmag_df], sort=True)
# sort the DataFrame so that all records from one item are together
full_df.sort_index(inplace=True)
# fix the column names to be 3.0
full_df.rename(columns=map_dict, inplace=True)
# create a MagicDataFrame object, providing the dataframe and the data type
new_df = cb.MagicDataFrame(dtype=dtype, df=full_df, dmodel=data_model)
# write out the data to file
if len(new_df.df):
new_df.write_magic_file(dir_path=output_dir)
return dtype + ".txt"
else:
print("-I- No {} data found.".format(dtype))
return None | Read in er_*.txt file and pmag_*.txt file in working directory.
Combine the data, then translate headers from 2.5 --> 3.0.
Last, write out the data in 3.0.
Parameters
----------
dtype : string for input type (specimens, samples, sites, etc.)
map_dict : dictionary with format {header2_format: header3_format, ...} (from mapping.map_magic module)
input_dir : input directory, default "."
output_dir : output directory, default "."
data_model : data_model3.DataModel object, default None
Returns
---------
output_file_name with 3.0 format data (or None if translation failed) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L502-L561 |
PmagPy/PmagPy | pmagpy/pmag.py | convert_criteria_file_2_to_3 | def convert_criteria_file_2_to_3(fname="pmag_criteria.txt", input_dir=".",
output_dir=".", data_model=None):
"""
Convert a criteria file from 2.5 to 3.0 format and write it out to file
Parameters
----------
fname : string of filename (default "pmag_criteria.txt")
input_dir : string of input directory (default ".")
output_dir : string of output directory (default ".")
data_model : data_model.DataModel object (default None)
Returns
---------
outfile : string output criteria filename, or False
crit_container : cb.MagicDataFrame with 3.0 criteria table
"""
# get criteria from infile
fname = os.path.join(input_dir, fname)
if not os.path.exists(fname):
return False, None
orig_crit, warnings = read_criteria_from_file(fname, initialize_acceptance_criteria(),
data_model=2, return_warnings=True)
converted_crit = {}
# get data model including criteria map
if not data_model:
from . import data_model3 as dm3
DM = dm3.DataModel()
else:
DM = data_model
crit_map = DM.crit_map
# drop all empty mappings
stripped_crit_map = crit_map.dropna(axis='rows')
# go through criteria and get 3.0 name and criterion_operation
for crit in orig_crit:
if orig_crit[crit]['value'] in [-999, '-999', -999.]:
continue
if crit in stripped_crit_map.index:
criterion_operation = stripped_crit_map.loc[crit]['criteria_map']['criterion_operation']
table_col = stripped_crit_map.loc[crit]['criteria_map']['table_column']
orig_crit[crit]['criterion_operation'] = criterion_operation
converted_crit[table_col] = orig_crit[crit]
else:
print('-W- Could not convert {} to 3.0, skipping'.format(crit))
# switch axes
converted_df = pd.DataFrame(converted_crit).transpose()
# name the index
converted_df.index.name = "table_column"
# rename columns to 3.0 values
# 'category' --> criterion (uses defaults from initalize_default_criteria)
# 'pmag_criteria_code' --> criterion (uses what's actually in the translated file)
converted_df.rename(columns={'pmag_criteria_code': 'criterion', 'er_citation_names': 'citations',
'criteria_definition': 'description', 'value': 'criterion_value'},
inplace=True)
# drop unused columns
valid_cols = DM.dm['criteria'].index
drop_cols = set(converted_df.columns) - set(valid_cols)
converted_df.drop(drop_cols, axis='columns', inplace=True)
# move 'table_column' from being the index to being a column
converted_df['table_column'] = converted_df.index
crit_container = cb.MagicDataFrame(dtype='criteria', df=converted_df)
crit_container.write_magic_file(dir_path=output_dir)
return "criteria.txt", crit_container | python | def convert_criteria_file_2_to_3(fname="pmag_criteria.txt", input_dir=".",
output_dir=".", data_model=None):
"""
Convert a criteria file from 2.5 to 3.0 format and write it out to file
Parameters
----------
fname : string of filename (default "pmag_criteria.txt")
input_dir : string of input directory (default ".")
output_dir : string of output directory (default ".")
data_model : data_model.DataModel object (default None)
Returns
---------
outfile : string output criteria filename, or False
crit_container : cb.MagicDataFrame with 3.0 criteria table
"""
# get criteria from infile
fname = os.path.join(input_dir, fname)
if not os.path.exists(fname):
return False, None
orig_crit, warnings = read_criteria_from_file(fname, initialize_acceptance_criteria(),
data_model=2, return_warnings=True)
converted_crit = {}
# get data model including criteria map
if not data_model:
from . import data_model3 as dm3
DM = dm3.DataModel()
else:
DM = data_model
crit_map = DM.crit_map
# drop all empty mappings
stripped_crit_map = crit_map.dropna(axis='rows')
# go through criteria and get 3.0 name and criterion_operation
for crit in orig_crit:
if orig_crit[crit]['value'] in [-999, '-999', -999.]:
continue
if crit in stripped_crit_map.index:
criterion_operation = stripped_crit_map.loc[crit]['criteria_map']['criterion_operation']
table_col = stripped_crit_map.loc[crit]['criteria_map']['table_column']
orig_crit[crit]['criterion_operation'] = criterion_operation
converted_crit[table_col] = orig_crit[crit]
else:
print('-W- Could not convert {} to 3.0, skipping'.format(crit))
# switch axes
converted_df = pd.DataFrame(converted_crit).transpose()
# name the index
converted_df.index.name = "table_column"
# rename columns to 3.0 values
# 'category' --> criterion (uses defaults from initalize_default_criteria)
# 'pmag_criteria_code' --> criterion (uses what's actually in the translated file)
converted_df.rename(columns={'pmag_criteria_code': 'criterion', 'er_citation_names': 'citations',
'criteria_definition': 'description', 'value': 'criterion_value'},
inplace=True)
# drop unused columns
valid_cols = DM.dm['criteria'].index
drop_cols = set(converted_df.columns) - set(valid_cols)
converted_df.drop(drop_cols, axis='columns', inplace=True)
# move 'table_column' from being the index to being a column
converted_df['table_column'] = converted_df.index
crit_container = cb.MagicDataFrame(dtype='criteria', df=converted_df)
crit_container.write_magic_file(dir_path=output_dir)
return "criteria.txt", crit_container | Convert a criteria file from 2.5 to 3.0 format and write it out to file
Parameters
----------
fname : string of filename (default "pmag_criteria.txt")
input_dir : string of input directory (default ".")
output_dir : string of output directory (default ".")
data_model : data_model.DataModel object (default None)
Returns
---------
outfile : string output criteria filename, or False
crit_container : cb.MagicDataFrame with 3.0 criteria table | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L564-L626 |
PmagPy/PmagPy | pmagpy/pmag.py | orient | def orient(mag_azimuth, field_dip, or_con):
"""
uses specified orientation convention to convert user supplied orientations
to laboratory azimuth and plunge
"""
or_con = str(or_con)
if mag_azimuth == -999:
return "", ""
if or_con == "1": # lab_mag_az=mag_az; sample_dip = -dip
return mag_azimuth, -field_dip
if or_con == "2":
return mag_azimuth - 90., -field_dip
if or_con == "3": # lab_mag_az=mag_az; sample_dip = 90.-dip
return mag_azimuth, 90. - field_dip
if or_con == "4": # lab_mag_az=mag_az; sample_dip = dip
return mag_azimuth, field_dip
if or_con == "5": # lab_mag_az=mag_az; sample_dip = dip-90.
return mag_azimuth, field_dip - 90.
if or_con == "6": # lab_mag_az=mag_az-90.; sample_dip = 90.-dip
return mag_azimuth - 90., 90. - field_dip
if or_con == "7": # lab_mag_az=mag_az; sample_dip = 90.-dip
return mag_azimuth - 90., 90. - field_dip
print("Error in orientation convention") | python | def orient(mag_azimuth, field_dip, or_con):
"""
uses specified orientation convention to convert user supplied orientations
to laboratory azimuth and plunge
"""
or_con = str(or_con)
if mag_azimuth == -999:
return "", ""
if or_con == "1": # lab_mag_az=mag_az; sample_dip = -dip
return mag_azimuth, -field_dip
if or_con == "2":
return mag_azimuth - 90., -field_dip
if or_con == "3": # lab_mag_az=mag_az; sample_dip = 90.-dip
return mag_azimuth, 90. - field_dip
if or_con == "4": # lab_mag_az=mag_az; sample_dip = dip
return mag_azimuth, field_dip
if or_con == "5": # lab_mag_az=mag_az; sample_dip = dip-90.
return mag_azimuth, field_dip - 90.
if or_con == "6": # lab_mag_az=mag_az-90.; sample_dip = 90.-dip
return mag_azimuth - 90., 90. - field_dip
if or_con == "7": # lab_mag_az=mag_az; sample_dip = 90.-dip
return mag_azimuth - 90., 90. - field_dip
print("Error in orientation convention") | uses specified orientation convention to convert user supplied orientations
to laboratory azimuth and plunge | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L918-L940 |
PmagPy/PmagPy | pmagpy/pmag.py | get_Sb | def get_Sb(data):
"""
returns vgp scatter for data set
"""
Sb, N = 0., 0.
for rec in data:
delta = 90. - abs(rec['vgp_lat'])
if rec['average_k'] != 0:
k = rec['average_k']
L = rec['average_lat'] * np.pi / 180. # latitude in radians
Nsi = rec['average_nn']
K = old_div(k, (2. * (1. + 3. * np.sin(L)**2) /
(5. - 3. * np.sin(L)**2)))
Sw = old_div(81., np.sqrt(K))
else:
Sw, Nsi = 0, 1.
Sb += delta**2. - old_div((Sw**2), Nsi)
N += 1.
return np.sqrt(old_div(Sb, float(N - 1.))) | python | def get_Sb(data):
"""
returns vgp scatter for data set
"""
Sb, N = 0., 0.
for rec in data:
delta = 90. - abs(rec['vgp_lat'])
if rec['average_k'] != 0:
k = rec['average_k']
L = rec['average_lat'] * np.pi / 180. # latitude in radians
Nsi = rec['average_nn']
K = old_div(k, (2. * (1. + 3. * np.sin(L)**2) /
(5. - 3. * np.sin(L)**2)))
Sw = old_div(81., np.sqrt(K))
else:
Sw, Nsi = 0, 1.
Sb += delta**2. - old_div((Sw**2), Nsi)
N += 1.
return np.sqrt(old_div(Sb, float(N - 1.))) | returns vgp scatter for data set | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L943-L961 |
PmagPy/PmagPy | pmagpy/pmag.py | get_sb_df | def get_sb_df(df, mm97=False):
"""
Calculates Sf for a dataframe with VGP Lat., and optional Fisher's k, site latitude and N information can be used to correct for within site scatter (McElhinny & McFadden, 1997)
Parameters
_________
df : Pandas Dataframe with columns
REQUIRED:
vgp_lat : VGP latitude
ONLY REQUIRED for MM97 correction:
dir_k : Fisher kappa estimate
dir_n : number of specimens (samples) per site
lat : latitude of the site
mm97 : if True, will do the correction for within site scatter
Returns:
_______
Sf : Sf
"""
df['delta'] = 90.-df.vgp_lat
Sp2 = np.sum(df.delta**2)/(df.shape[0]-1)
if 'dir_k' in df.columns and mm97:
ks = df.dir_k
Ns = df.dir_n
Ls = np.radians(df.lat)
A95s = 140./np.sqrt(ks*Ns)
Sw2_n = 0.335*(A95s**2)*(2.*(1.+3.*np.sin(Ls)**2) /
(5.-3.*np.sin(Ls)**2))
return np.sqrt(Sp2-Sw2_n.mean())
else:
return np.sqrt(Sp2) | python | def get_sb_df(df, mm97=False):
"""
Calculates Sf for a dataframe with VGP Lat., and optional Fisher's k, site latitude and N information can be used to correct for within site scatter (McElhinny & McFadden, 1997)
Parameters
_________
df : Pandas Dataframe with columns
REQUIRED:
vgp_lat : VGP latitude
ONLY REQUIRED for MM97 correction:
dir_k : Fisher kappa estimate
dir_n : number of specimens (samples) per site
lat : latitude of the site
mm97 : if True, will do the correction for within site scatter
Returns:
_______
Sf : Sf
"""
df['delta'] = 90.-df.vgp_lat
Sp2 = np.sum(df.delta**2)/(df.shape[0]-1)
if 'dir_k' in df.columns and mm97:
ks = df.dir_k
Ns = df.dir_n
Ls = np.radians(df.lat)
A95s = 140./np.sqrt(ks*Ns)
Sw2_n = 0.335*(A95s**2)*(2.*(1.+3.*np.sin(Ls)**2) /
(5.-3.*np.sin(Ls)**2))
return np.sqrt(Sp2-Sw2_n.mean())
else:
return np.sqrt(Sp2) | Calculates Sf for a dataframe with VGP Lat., and optional Fisher's k, site latitude and N information can be used to correct for within site scatter (McElhinny & McFadden, 1997)
Parameters
_________
df : Pandas Dataframe with columns
REQUIRED:
vgp_lat : VGP latitude
ONLY REQUIRED for MM97 correction:
dir_k : Fisher kappa estimate
dir_n : number of specimens (samples) per site
lat : latitude of the site
mm97 : if True, will do the correction for within site scatter
Returns:
_______
Sf : Sf | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L964-L994 |
PmagPy/PmagPy | pmagpy/pmag.py | grade | def grade(PmagRec, ACCEPT, type, data_model=2.5):
"""
Finds the 'grade' (pass/fail; A/F) of a record (specimen,sample,site) given the acceptance criteria
"""
GREATERTHAN = ['specimen_q', 'site_k', 'site_n', 'site_n_lines', 'site_int_n', 'measurement_step_min', 'specimen_int_ptrm_n', 'specimen_fvds', 'specimen_frac', 'specimen_f', 'specimen_n', 'specimen_int_n', 'sample_int_n', 'average_age_min', 'average_k', 'average_r', 'specimen_magn_moment',
'specimen_magn_volume', 'specimen_rsc', 'sample_n', 'sample_n_lines', 'sample_n_planes', 'sample_k', 'sample_r', 'site_magn_moment', 'site_magn_volume', 'site_magn_mass', 'site_r'] # these statistics must be exceede to pass, all others must be less than (except specimen_scat, which must be true)
ISTRUE = ['specimen_scat']
kill = [] # criteria that kill the record
sigma_types = ['sample_int_sigma', 'sample_int_sigma_perc', 'site_int_sigma',
'site_int_sigma_perc', 'average_int_sigma', 'average_int_sigma_perc']
sigmas = []
accept = {}
if type == 'specimen_int':
USEKEYS = ['specimen_q', 'measurement_step_min', 'measurement_step_max', 'specimen_int_ptrm_n', 'specimen_fvds', 'specimen_frac', 'specimen_f', 'specimen_int_n', 'specimen_magn_moment',
'specimen_magn_volume', 'specimen_rsc', 'specimen_scat', 'specimen_drats', 'specimen_int_mad', 'specimen_int_dang', 'specimen_md', 'specimen_b_beta', 'specimen_w', 'specimen_gmax']
if data_model == 3.0:
USEKEYS = [map_magic.spec_magic2_2_magic3_map[k] for k in USEKEYS]
elif type == 'specimen_dir':
USEKEYS = ['measurement_step_min', 'measurement_step_max', 'specimen_mad',
'specimen_n', 'specimen_magn_moment', 'specimen_magn_volume']
if data_model == 3.0:
USEKEYS = [map_magic.spec_magic2_2_magic3_map[k] for k in USEKEYS]
elif type == 'sample_int':
USEKEYS = ['sample_int_n', 'sample_int_sigma', 'sample_int_sigma_perc']
if data_model == 3.0:
USEKEYS = [map_magic.samp_magic2_2_magic3_map[k] for k in USEKEYS]
elif type == 'sample_dir':
USEKEYS = ['sample_alpha95', 'sample_n', 'sample_n_lines',
'sample_n_planes', 'sample_k', 'sample_r']
if data_model == 3.0:
USEKEYS = [map_magic.samp_magic2_2_magic3_map[k] for k in USEKEYS]
elif type == 'site_int':
USEKEYS = ['site_int_sigma', 'site_int_sigma_perc', 'site_int_n']
if data_model == 3.0:
USEKEYS = [map_magic.site_magic2_2_magic3_map[k] for k in USEKEYS]
elif type == 'site_dir':
USEKEYS = ['site_alpha95', 'site_k', 'site_n',
'site_n_lines', 'site_n_planes', 'site_r']
if data_model == 3.0:
USEKEYS = [map_magic.site_magic2_2_magic3_map[k] for k in USEKEYS]
for key in list(ACCEPT.keys()):
if ACCEPT[key] != "" and key in USEKEYS:
if key in ISTRUE and ACCEPT[key] == 'TRUE' or ACCEPT[key] == 'True':
# this is because Excel always capitalizes True to TRUE and
# python doesn't recognize that as a boolean. never mind
ACCEPT[key] = '1'
elif ACCEPT[key] == 'FALSE' or ACCEPT[key] == 'False':
ACCEPT[key] = '0'
elif eval(ACCEPT[key]) == 0:
ACCEPT[key] = ""
accept[key] = ACCEPT[key]
for key in sigma_types:
if key in USEKEYS and key in list(accept.keys()) and key in list(PmagRec.keys()):
sigmas.append(key)
if len(sigmas) > 1:
if PmagRec[sigmas[0]] == "" or PmagRec[sigmas[1]] == "":
kill.append(sigmas[0])
kill.append(sigmas[1])
elif eval(PmagRec[sigmas[0]]) > eval(accept[sigmas[0]]) and eval(PmagRec[sigmas[1]]) > eval(accept[sigmas[1]]):
kill.append(sigmas[0])
kill.append(sigmas[1])
elif len(sigmas) == 1 and sigmas[0] in list(accept.keys()):
if PmagRec[sigmas[0]] > accept[sigmas[0]]:
kill.append(sigmas[0])
for key in list(accept.keys()):
if accept[key] != "":
if key not in list(PmagRec.keys()) or PmagRec[key] == '':
kill.append(key)
elif key not in sigma_types:
if key in ISTRUE: # boolean must be true
if PmagRec[key] != '1':
kill.append(key)
if key in GREATERTHAN:
if eval(str(PmagRec[key])) < eval(str(accept[key])):
kill.append(key)
else:
if eval(str(PmagRec[key])) > eval(str(accept[key])):
kill.append(key)
return kill | python | def grade(PmagRec, ACCEPT, type, data_model=2.5):
"""
Finds the 'grade' (pass/fail; A/F) of a record (specimen,sample,site) given the acceptance criteria
"""
GREATERTHAN = ['specimen_q', 'site_k', 'site_n', 'site_n_lines', 'site_int_n', 'measurement_step_min', 'specimen_int_ptrm_n', 'specimen_fvds', 'specimen_frac', 'specimen_f', 'specimen_n', 'specimen_int_n', 'sample_int_n', 'average_age_min', 'average_k', 'average_r', 'specimen_magn_moment',
'specimen_magn_volume', 'specimen_rsc', 'sample_n', 'sample_n_lines', 'sample_n_planes', 'sample_k', 'sample_r', 'site_magn_moment', 'site_magn_volume', 'site_magn_mass', 'site_r'] # these statistics must be exceede to pass, all others must be less than (except specimen_scat, which must be true)
ISTRUE = ['specimen_scat']
kill = [] # criteria that kill the record
sigma_types = ['sample_int_sigma', 'sample_int_sigma_perc', 'site_int_sigma',
'site_int_sigma_perc', 'average_int_sigma', 'average_int_sigma_perc']
sigmas = []
accept = {}
if type == 'specimen_int':
USEKEYS = ['specimen_q', 'measurement_step_min', 'measurement_step_max', 'specimen_int_ptrm_n', 'specimen_fvds', 'specimen_frac', 'specimen_f', 'specimen_int_n', 'specimen_magn_moment',
'specimen_magn_volume', 'specimen_rsc', 'specimen_scat', 'specimen_drats', 'specimen_int_mad', 'specimen_int_dang', 'specimen_md', 'specimen_b_beta', 'specimen_w', 'specimen_gmax']
if data_model == 3.0:
USEKEYS = [map_magic.spec_magic2_2_magic3_map[k] for k in USEKEYS]
elif type == 'specimen_dir':
USEKEYS = ['measurement_step_min', 'measurement_step_max', 'specimen_mad',
'specimen_n', 'specimen_magn_moment', 'specimen_magn_volume']
if data_model == 3.0:
USEKEYS = [map_magic.spec_magic2_2_magic3_map[k] for k in USEKEYS]
elif type == 'sample_int':
USEKEYS = ['sample_int_n', 'sample_int_sigma', 'sample_int_sigma_perc']
if data_model == 3.0:
USEKEYS = [map_magic.samp_magic2_2_magic3_map[k] for k in USEKEYS]
elif type == 'sample_dir':
USEKEYS = ['sample_alpha95', 'sample_n', 'sample_n_lines',
'sample_n_planes', 'sample_k', 'sample_r']
if data_model == 3.0:
USEKEYS = [map_magic.samp_magic2_2_magic3_map[k] for k in USEKEYS]
elif type == 'site_int':
USEKEYS = ['site_int_sigma', 'site_int_sigma_perc', 'site_int_n']
if data_model == 3.0:
USEKEYS = [map_magic.site_magic2_2_magic3_map[k] for k in USEKEYS]
elif type == 'site_dir':
USEKEYS = ['site_alpha95', 'site_k', 'site_n',
'site_n_lines', 'site_n_planes', 'site_r']
if data_model == 3.0:
USEKEYS = [map_magic.site_magic2_2_magic3_map[k] for k in USEKEYS]
for key in list(ACCEPT.keys()):
if ACCEPT[key] != "" and key in USEKEYS:
if key in ISTRUE and ACCEPT[key] == 'TRUE' or ACCEPT[key] == 'True':
# this is because Excel always capitalizes True to TRUE and
# python doesn't recognize that as a boolean. never mind
ACCEPT[key] = '1'
elif ACCEPT[key] == 'FALSE' or ACCEPT[key] == 'False':
ACCEPT[key] = '0'
elif eval(ACCEPT[key]) == 0:
ACCEPT[key] = ""
accept[key] = ACCEPT[key]
for key in sigma_types:
if key in USEKEYS and key in list(accept.keys()) and key in list(PmagRec.keys()):
sigmas.append(key)
if len(sigmas) > 1:
if PmagRec[sigmas[0]] == "" or PmagRec[sigmas[1]] == "":
kill.append(sigmas[0])
kill.append(sigmas[1])
elif eval(PmagRec[sigmas[0]]) > eval(accept[sigmas[0]]) and eval(PmagRec[sigmas[1]]) > eval(accept[sigmas[1]]):
kill.append(sigmas[0])
kill.append(sigmas[1])
elif len(sigmas) == 1 and sigmas[0] in list(accept.keys()):
if PmagRec[sigmas[0]] > accept[sigmas[0]]:
kill.append(sigmas[0])
for key in list(accept.keys()):
if accept[key] != "":
if key not in list(PmagRec.keys()) or PmagRec[key] == '':
kill.append(key)
elif key not in sigma_types:
if key in ISTRUE: # boolean must be true
if PmagRec[key] != '1':
kill.append(key)
if key in GREATERTHAN:
if eval(str(PmagRec[key])) < eval(str(accept[key])):
kill.append(key)
else:
if eval(str(PmagRec[key])) > eval(str(accept[key])):
kill.append(key)
return kill | Finds the 'grade' (pass/fail; A/F) of a record (specimen,sample,site) given the acceptance criteria | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L1031-L1110 |
PmagPy/PmagPy | pmagpy/pmag.py | flip | def flip(di_block, combine=False):
"""
determines 'normal' direction along the principle eigenvector, then flips the antipodes of
the reverse mode to the antipode
Parameters
___________
di_block : nested list of directions
Return
D1 : normal mode
D2 : flipped reverse mode as two DI blocks
combine : if True return combined D1, D2, nested D,I pairs
"""
ppars = doprinc(di_block) # get principle direction
if combine:
D3 = []
D1, D2 = [], []
for rec in di_block:
ang = angle([rec[0], rec[1]], [ppars['dec'], ppars['inc']])
if ang > 90.:
d, i = (rec[0] - 180.) % 360., -rec[1]
D2.append([d, i])
if combine:
D3.append([d, i])
else:
D1.append([rec[0], rec[1]])
if combine:
D3.append([rec[0], rec[1]])
if combine:
return D3
else:
return D1, D2 | python | def flip(di_block, combine=False):
"""
determines 'normal' direction along the principle eigenvector, then flips the antipodes of
the reverse mode to the antipode
Parameters
___________
di_block : nested list of directions
Return
D1 : normal mode
D2 : flipped reverse mode as two DI blocks
combine : if True return combined D1, D2, nested D,I pairs
"""
ppars = doprinc(di_block) # get principle direction
if combine:
D3 = []
D1, D2 = [], []
for rec in di_block:
ang = angle([rec[0], rec[1]], [ppars['dec'], ppars['inc']])
if ang > 90.:
d, i = (rec[0] - 180.) % 360., -rec[1]
D2.append([d, i])
if combine:
D3.append([d, i])
else:
D1.append([rec[0], rec[1]])
if combine:
D3.append([rec[0], rec[1]])
if combine:
return D3
else:
return D1, D2 | determines 'normal' direction along the principle eigenvector, then flips the antipodes of
the reverse mode to the antipode
Parameters
___________
di_block : nested list of directions
Return
D1 : normal mode
D2 : flipped reverse mode as two DI blocks
combine : if True return combined D1, D2, nested D,I pairs | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L1115-L1146 |
PmagPy/PmagPy | pmagpy/pmag.py | dia_vgp | def dia_vgp(*args): # new function interface by J.Holmes, SIO, 6/1/2011
"""
Converts directional data (declination, inclination, alpha95) at a given
location (Site latitude, Site longitude) to pole position (pole longitude,
pole latitude, dp, dm)
Parameters
----------
Takes input as (Dec, Inc, a95, Site latitude, Site longitude)
Input can be as individual values (5 parameters)
or
as a list of lists: [[Dec, Inc, a95, lat, lon],[Dec, Inc, a95, lat, lon]]
Returns
----------
if input is individual values for one pole the return is:
pole longitude, pole latitude, dp, dm
if input is list of lists the return is:
list of pole longitudes, list of pole latitude, list of dp, list of dm
"""
# test whether arguments are one 2-D list or 5 floats
if len(args) == 1: # args comes in as a tuple of multi-dim lists.
largs = list(args).pop() # scrap the tuple.
# reorganize the lists so that we get columns of data in each var.
(decs, dips, a95s, slats, slongs) = list(zip(*largs))
else:
# When args > 1, we are receiving five floats. This usually happens when the invoking script is
# executed in interactive mode.
(decs, dips, a95s, slats, slongs) = (args)
# We send all incoming data to numpy in an array form. Even if it means a
# 1x1 matrix. That's OKAY. Really.
(dec, dip, a95, slat, slong) = (np.array(decs), np.array(dips), np.array(a95s),
np.array(slats), np.array(slongs)) # package columns into arrays
rad = old_div(np.pi, 180.) # convert to radians
dec, dip, a95, slat, slong = dec * rad, dip * \
rad, a95 * rad, slat * rad, slong * rad
p = np.arctan2(2.0, np.tan(dip))
plat = np.arcsin(np.sin(slat) * np.cos(p) +
np.cos(slat) * np.sin(p) * np.cos(dec))
beta = old_div((np.sin(p) * np.sin(dec)), np.cos(plat))
# -------------------------------------------------------------------------
# The deal with "boolmask":
# We needed a quick way to assign matrix values based on a logic decision, in this case setting boundaries
# on out-of-bounds conditions. Creating a matrix of boolean values the size of the original matrix and using
# it to "mask" the assignment solves this problem nicely. The downside to this is that Numpy complains if you
# attempt to mask a non-matrix, so we have to check for array type and do a normal assignment if the type is
# scalar. These checks are made before calculating for the rest of the function.
# -------------------------------------------------------------------------
boolmask = beta > 1. # create a mask of boolean values
if isinstance(beta, np.ndarray):
beta[boolmask] = 1. # assigns 1 only to elements that mask TRUE.
# Numpy gets upset if you try our masking trick with a scalar or a 0-D
# matrix.
else:
if boolmask:
beta = 1.
boolmask = beta < -1.
if isinstance(beta, np.ndarray):
beta[boolmask] = -1. # assigns -1 only to elements that mask TRUE.
else:
if boolmask:
beta = -1.
beta = np.arcsin(beta)
plong = slong + np.pi - beta
if (np.cos(p) > np.sin(slat) * np.sin(plat)).any():
boolmask = (np.cos(p) > (np.sin(slat) * np.sin(plat)))
if isinstance(plong, np.ndarray):
plong[boolmask] = (slong + beta)[boolmask]
else:
if boolmask:
plong = slong + beta
boolmask = (plong < 0)
if isinstance(plong, np.ndarray):
plong[boolmask] = plong[boolmask] + 2 * np.pi
else:
if boolmask:
plong = plong + 2 * np.pi
boolmask = (plong > 2 * np.pi)
if isinstance(plong, np.ndarray):
plong[boolmask] = plong[boolmask] - 2 * np.pi
else:
if boolmask:
plong = plong - 2 * np.pi
dm = np.rad2deg(a95 * (old_div(np.sin(p), np.cos(dip))))
dp = np.rad2deg(a95 * (old_div((1 + 3 * (np.cos(p)**2)), 2)))
plat = np.rad2deg(plat)
plong = np.rad2deg(plong)
return plong.tolist(), plat.tolist(), dp.tolist(), dm.tolist() | python | def dia_vgp(*args): # new function interface by J.Holmes, SIO, 6/1/2011
"""
Converts directional data (declination, inclination, alpha95) at a given
location (Site latitude, Site longitude) to pole position (pole longitude,
pole latitude, dp, dm)
Parameters
----------
Takes input as (Dec, Inc, a95, Site latitude, Site longitude)
Input can be as individual values (5 parameters)
or
as a list of lists: [[Dec, Inc, a95, lat, lon],[Dec, Inc, a95, lat, lon]]
Returns
----------
if input is individual values for one pole the return is:
pole longitude, pole latitude, dp, dm
if input is list of lists the return is:
list of pole longitudes, list of pole latitude, list of dp, list of dm
"""
# test whether arguments are one 2-D list or 5 floats
if len(args) == 1: # args comes in as a tuple of multi-dim lists.
largs = list(args).pop() # scrap the tuple.
# reorganize the lists so that we get columns of data in each var.
(decs, dips, a95s, slats, slongs) = list(zip(*largs))
else:
# When args > 1, we are receiving five floats. This usually happens when the invoking script is
# executed in interactive mode.
(decs, dips, a95s, slats, slongs) = (args)
# We send all incoming data to numpy in an array form. Even if it means a
# 1x1 matrix. That's OKAY. Really.
(dec, dip, a95, slat, slong) = (np.array(decs), np.array(dips), np.array(a95s),
np.array(slats), np.array(slongs)) # package columns into arrays
rad = old_div(np.pi, 180.) # convert to radians
dec, dip, a95, slat, slong = dec * rad, dip * \
rad, a95 * rad, slat * rad, slong * rad
p = np.arctan2(2.0, np.tan(dip))
plat = np.arcsin(np.sin(slat) * np.cos(p) +
np.cos(slat) * np.sin(p) * np.cos(dec))
beta = old_div((np.sin(p) * np.sin(dec)), np.cos(plat))
# -------------------------------------------------------------------------
# The deal with "boolmask":
# We needed a quick way to assign matrix values based on a logic decision, in this case setting boundaries
# on out-of-bounds conditions. Creating a matrix of boolean values the size of the original matrix and using
# it to "mask" the assignment solves this problem nicely. The downside to this is that Numpy complains if you
# attempt to mask a non-matrix, so we have to check for array type and do a normal assignment if the type is
# scalar. These checks are made before calculating for the rest of the function.
# -------------------------------------------------------------------------
boolmask = beta > 1. # create a mask of boolean values
if isinstance(beta, np.ndarray):
beta[boolmask] = 1. # assigns 1 only to elements that mask TRUE.
# Numpy gets upset if you try our masking trick with a scalar or a 0-D
# matrix.
else:
if boolmask:
beta = 1.
boolmask = beta < -1.
if isinstance(beta, np.ndarray):
beta[boolmask] = -1. # assigns -1 only to elements that mask TRUE.
else:
if boolmask:
beta = -1.
beta = np.arcsin(beta)
plong = slong + np.pi - beta
if (np.cos(p) > np.sin(slat) * np.sin(plat)).any():
boolmask = (np.cos(p) > (np.sin(slat) * np.sin(plat)))
if isinstance(plong, np.ndarray):
plong[boolmask] = (slong + beta)[boolmask]
else:
if boolmask:
plong = slong + beta
boolmask = (plong < 0)
if isinstance(plong, np.ndarray):
plong[boolmask] = plong[boolmask] + 2 * np.pi
else:
if boolmask:
plong = plong + 2 * np.pi
boolmask = (plong > 2 * np.pi)
if isinstance(plong, np.ndarray):
plong[boolmask] = plong[boolmask] - 2 * np.pi
else:
if boolmask:
plong = plong - 2 * np.pi
dm = np.rad2deg(a95 * (old_div(np.sin(p), np.cos(dip))))
dp = np.rad2deg(a95 * (old_div((1 + 3 * (np.cos(p)**2)), 2)))
plat = np.rad2deg(plat)
plong = np.rad2deg(plong)
return plong.tolist(), plat.tolist(), dp.tolist(), dm.tolist() | Converts directional data (declination, inclination, alpha95) at a given
location (Site latitude, Site longitude) to pole position (pole longitude,
pole latitude, dp, dm)
Parameters
----------
Takes input as (Dec, Inc, a95, Site latitude, Site longitude)
Input can be as individual values (5 parameters)
or
as a list of lists: [[Dec, Inc, a95, lat, lon],[Dec, Inc, a95, lat, lon]]
Returns
----------
if input is individual values for one pole the return is:
pole longitude, pole latitude, dp, dm
if input is list of lists the return is:
list of pole longitudes, list of pole latitude, list of dp, list of dm | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L1150-L1245 |
PmagPy/PmagPy | pmagpy/pmag.py | int_pars | def int_pars(x, y, vds, **kwargs):
"""
calculates York regression and Coe parameters (with Tauxe Fvds)
"""
# first do linear regression a la York
# do Data Model 3 way:
if 'version' in list(kwargs.keys()) and kwargs['version'] == 3:
n_key = 'int_n_measurements'
b_key = 'int_b'
sigma_key = 'int_b_sigma'
f_key = 'int_f'
fvds_key = 'int_fvds'
g_key = 'int_g'
q_key = 'int_q'
b_beta_key = 'int_b_beta'
else: # version 2
n_key = 'specimen_int_n'
b_key = 'specimen_b'
sigma_key = 'specimen_b_sigma'
f_key = 'specimen_f'
fvds_key = 'specimen_fvds'
g_key = 'specimen_g'
q_key = 'specimen_q'
b_beta_key = 'specimen_b_beta'
xx, yer, xer, xyer, yy, xsum, ysum, xy = 0., 0., 0., 0., 0., 0., 0., 0.
xprime, yprime = [], []
pars = {}
pars[n_key] = len(x)
n = float(len(x))
if n <= 2:
print("shouldn't be here at all!")
return pars, 1
for i in range(len(x)):
xx += x[i]**2.
yy += y[i]**2.
xy += x[i] * y[i]
xsum += x[i]
ysum += y[i]
xsig = np.sqrt(old_div((xx - (old_div(xsum**2., n))), (n - 1.)))
ysig = np.sqrt(old_div((yy - (old_div(ysum**2., n))), (n - 1.)))
sum = 0
for i in range(int(n)):
yer += (y[i] - old_div(ysum, n))**2.
xer += (x[i] - old_div(xsum, n))**2.
xyer += (y[i] - old_div(ysum, n)) * (x[i] - old_div(xsum, n))
slop = -np.sqrt(old_div(yer, xer))
pars[b_key] = slop
s1 = 2. * yer - 2. * slop * xyer
s2 = (n - 2.) * xer
sigma = np.sqrt(old_div(s1, s2))
pars[sigma_key] = sigma
s = old_div((xy - (xsum * ysum / n)), (xx - old_div((xsum**2.), n)))
r = old_div((s * xsig), ysig)
pars["specimen_rsc"] = r**2.
ytot = abs(old_div(ysum, n) - slop * xsum / n)
for i in range(int(n)):
xprime.append(old_div((slop * x[i] + y[i] - ytot), (2. * slop)))
yprime.append((old_div((slop * x[i] + y[i] - ytot), 2.)) + ytot)
sumdy, dy = 0, []
dyt = abs(yprime[0] - yprime[int(n) - 1])
for i in range((int(n) - 1)):
dy.append(abs(yprime[i + 1] - yprime[i]))
sumdy += dy[i]**2.
f = old_div(dyt, ytot)
pars[f_key] = f
pars["specimen_ytot"] = ytot
ff = old_div(dyt, vds)
pars[fvds_key] = ff
ddy = (old_div(1., dyt)) * sumdy
g = 1. - old_div(ddy, dyt)
pars[g_key] = g
q = abs(slop) * f * g / sigma
pars[q_key] = q
pars[b_beta_key] = old_div(-sigma, slop)
return pars, 0 | python | def int_pars(x, y, vds, **kwargs):
"""
calculates York regression and Coe parameters (with Tauxe Fvds)
"""
# first do linear regression a la York
# do Data Model 3 way:
if 'version' in list(kwargs.keys()) and kwargs['version'] == 3:
n_key = 'int_n_measurements'
b_key = 'int_b'
sigma_key = 'int_b_sigma'
f_key = 'int_f'
fvds_key = 'int_fvds'
g_key = 'int_g'
q_key = 'int_q'
b_beta_key = 'int_b_beta'
else: # version 2
n_key = 'specimen_int_n'
b_key = 'specimen_b'
sigma_key = 'specimen_b_sigma'
f_key = 'specimen_f'
fvds_key = 'specimen_fvds'
g_key = 'specimen_g'
q_key = 'specimen_q'
b_beta_key = 'specimen_b_beta'
xx, yer, xer, xyer, yy, xsum, ysum, xy = 0., 0., 0., 0., 0., 0., 0., 0.
xprime, yprime = [], []
pars = {}
pars[n_key] = len(x)
n = float(len(x))
if n <= 2:
print("shouldn't be here at all!")
return pars, 1
for i in range(len(x)):
xx += x[i]**2.
yy += y[i]**2.
xy += x[i] * y[i]
xsum += x[i]
ysum += y[i]
xsig = np.sqrt(old_div((xx - (old_div(xsum**2., n))), (n - 1.)))
ysig = np.sqrt(old_div((yy - (old_div(ysum**2., n))), (n - 1.)))
sum = 0
for i in range(int(n)):
yer += (y[i] - old_div(ysum, n))**2.
xer += (x[i] - old_div(xsum, n))**2.
xyer += (y[i] - old_div(ysum, n)) * (x[i] - old_div(xsum, n))
slop = -np.sqrt(old_div(yer, xer))
pars[b_key] = slop
s1 = 2. * yer - 2. * slop * xyer
s2 = (n - 2.) * xer
sigma = np.sqrt(old_div(s1, s2))
pars[sigma_key] = sigma
s = old_div((xy - (xsum * ysum / n)), (xx - old_div((xsum**2.), n)))
r = old_div((s * xsig), ysig)
pars["specimen_rsc"] = r**2.
ytot = abs(old_div(ysum, n) - slop * xsum / n)
for i in range(int(n)):
xprime.append(old_div((slop * x[i] + y[i] - ytot), (2. * slop)))
yprime.append((old_div((slop * x[i] + y[i] - ytot), 2.)) + ytot)
sumdy, dy = 0, []
dyt = abs(yprime[0] - yprime[int(n) - 1])
for i in range((int(n) - 1)):
dy.append(abs(yprime[i + 1] - yprime[i]))
sumdy += dy[i]**2.
f = old_div(dyt, ytot)
pars[f_key] = f
pars["specimen_ytot"] = ytot
ff = old_div(dyt, vds)
pars[fvds_key] = ff
ddy = (old_div(1., dyt)) * sumdy
g = 1. - old_div(ddy, dyt)
pars[g_key] = g
q = abs(slop) * f * g / sigma
pars[q_key] = q
pars[b_beta_key] = old_div(-sigma, slop)
return pars, 0 | calculates York regression and Coe parameters (with Tauxe Fvds) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L1248-L1324 |
PmagPy/PmagPy | pmagpy/pmag.py | dovds | def dovds(data):
"""
calculates vector difference sum for demagnetization data
"""
vds, X = 0, []
for rec in data:
X.append(dir2cart(rec))
for k in range(len(X) - 1):
xdif = X[k + 1][0] - X[k][0]
ydif = X[k + 1][1] - X[k][1]
zdif = X[k + 1][2] - X[k][2]
vds += np.sqrt(xdif**2 + ydif**2 + zdif**2)
vds += np.sqrt(X[-1][0]**2 + X[-1][1]**2 + X[-1][2]**2)
return vds | python | def dovds(data):
"""
calculates vector difference sum for demagnetization data
"""
vds, X = 0, []
for rec in data:
X.append(dir2cart(rec))
for k in range(len(X) - 1):
xdif = X[k + 1][0] - X[k][0]
ydif = X[k + 1][1] - X[k][1]
zdif = X[k + 1][2] - X[k][2]
vds += np.sqrt(xdif**2 + ydif**2 + zdif**2)
vds += np.sqrt(X[-1][0]**2 + X[-1][1]**2 + X[-1][2]**2)
return vds | calculates vector difference sum for demagnetization data | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L1327-L1340 |
PmagPy/PmagPy | pmagpy/pmag.py | vspec_magic | def vspec_magic(data):
"""
Takes average vector of replicate measurements
"""
vdata, Dirdata, step_meth = [], [], ""
if len(data) == 0:
return vdata
treat_init = ["treatment_temp", "treatment_temp_decay_rate", "treatment_temp_dc_on", "treatment_temp_dc_off", "treatment_ac_field", "treatment_ac_field_decay_rate", "treatment_ac_field_dc_on",
"treatment_ac_field_dc_off", "treatment_dc_field", "treatment_dc_field_decay_rate", "treatment_dc_field_ac_on", "treatment_dc_field_ac_off", "treatment_dc_field_phi", "treatment_dc_field_theta"]
treats = []
#
# find keys that are used
#
for key in treat_init:
if key in list(data[0].keys()):
treats.append(key) # get a list of keys
stop = {}
stop["er_specimen_name"] = "stop"
for key in treats:
stop[key] = "" # tells program when to quit and go home
data.append(stop)
#
# set initial states
#
DataState0, newstate = {}, 0
for key in treats:
DataState0[key] = data[0][key] # set beginning treatment
k, R = 1, 0
for i in range(k, len(data)):
FDirdata, Dirdata, DataStateCurr, newstate = [], [], {}, 0
for key in treats: # check if anything changed
DataStateCurr[key] = data[i][key]
if DataStateCurr[key].strip() != DataState0[key].strip():
newstate = 1 # something changed
if newstate == 1:
if i == k: # sample is unique
vdata.append(data[i - 1])
else: # measurement is not unique
# print "averaging: records " ,k,i
for l in range(k - 1, i):
if 'orientation' in data[l]['measurement_description']:
data[l]['measurement_description'] = ""
Dirdata.append([float(data[l]['measurement_dec']), float(
data[l]['measurement_inc']), float(data[l]['measurement_magn_moment'])])
FDirdata.append(
[float(data[l]['measurement_dec']), float(data[l]['measurement_inc'])])
dir, R = vector_mean(Dirdata)
Fpars = fisher_mean(FDirdata)
vrec = data[i - 1]
vrec['measurement_dec'] = '%7.1f' % (dir[0])
vrec['measurement_inc'] = '%7.1f' % (dir[1])
vrec['measurement_magn_moment'] = '%8.3e' % (
old_div(R, (i - k + 1)))
vrec['measurement_csd'] = '%7.1f' % (Fpars['csd'])
vrec['measurement_positions'] = '%7.1f' % (Fpars['n'])
vrec['measurement_description'] = 'average of multiple measurements'
if "magic_method_codes" in list(vrec.keys()):
meths = vrec["magic_method_codes"].strip().split(":")
if "DE-VM" not in meths:
meths.append("DE-VM")
methods = ""
for meth in meths:
methods = methods + meth + ":"
vrec["magic_method_codes"] = methods[:-1]
else:
vrec["magic_method_codes"] = "DE-VM"
vdata.append(vrec)
# reset state to new one
for key in treats:
DataState0[key] = data[i][key] # set beginning treatment
k = i + 1
if data[i]["er_specimen_name"] == "stop":
del data[-1] # get rid of dummy stop sign
return vdata, treats | python | def vspec_magic(data):
"""
Takes average vector of replicate measurements
"""
vdata, Dirdata, step_meth = [], [], ""
if len(data) == 0:
return vdata
treat_init = ["treatment_temp", "treatment_temp_decay_rate", "treatment_temp_dc_on", "treatment_temp_dc_off", "treatment_ac_field", "treatment_ac_field_decay_rate", "treatment_ac_field_dc_on",
"treatment_ac_field_dc_off", "treatment_dc_field", "treatment_dc_field_decay_rate", "treatment_dc_field_ac_on", "treatment_dc_field_ac_off", "treatment_dc_field_phi", "treatment_dc_field_theta"]
treats = []
#
# find keys that are used
#
for key in treat_init:
if key in list(data[0].keys()):
treats.append(key) # get a list of keys
stop = {}
stop["er_specimen_name"] = "stop"
for key in treats:
stop[key] = "" # tells program when to quit and go home
data.append(stop)
#
# set initial states
#
DataState0, newstate = {}, 0
for key in treats:
DataState0[key] = data[0][key] # set beginning treatment
k, R = 1, 0
for i in range(k, len(data)):
FDirdata, Dirdata, DataStateCurr, newstate = [], [], {}, 0
for key in treats: # check if anything changed
DataStateCurr[key] = data[i][key]
if DataStateCurr[key].strip() != DataState0[key].strip():
newstate = 1 # something changed
if newstate == 1:
if i == k: # sample is unique
vdata.append(data[i - 1])
else: # measurement is not unique
# print "averaging: records " ,k,i
for l in range(k - 1, i):
if 'orientation' in data[l]['measurement_description']:
data[l]['measurement_description'] = ""
Dirdata.append([float(data[l]['measurement_dec']), float(
data[l]['measurement_inc']), float(data[l]['measurement_magn_moment'])])
FDirdata.append(
[float(data[l]['measurement_dec']), float(data[l]['measurement_inc'])])
dir, R = vector_mean(Dirdata)
Fpars = fisher_mean(FDirdata)
vrec = data[i - 1]
vrec['measurement_dec'] = '%7.1f' % (dir[0])
vrec['measurement_inc'] = '%7.1f' % (dir[1])
vrec['measurement_magn_moment'] = '%8.3e' % (
old_div(R, (i - k + 1)))
vrec['measurement_csd'] = '%7.1f' % (Fpars['csd'])
vrec['measurement_positions'] = '%7.1f' % (Fpars['n'])
vrec['measurement_description'] = 'average of multiple measurements'
if "magic_method_codes" in list(vrec.keys()):
meths = vrec["magic_method_codes"].strip().split(":")
if "DE-VM" not in meths:
meths.append("DE-VM")
methods = ""
for meth in meths:
methods = methods + meth + ":"
vrec["magic_method_codes"] = methods[:-1]
else:
vrec["magic_method_codes"] = "DE-VM"
vdata.append(vrec)
# reset state to new one
for key in treats:
DataState0[key] = data[i][key] # set beginning treatment
k = i + 1
if data[i]["er_specimen_name"] == "stop":
del data[-1] # get rid of dummy stop sign
return vdata, treats | Takes average vector of replicate measurements | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L1343-L1416 |
PmagPy/PmagPy | pmagpy/pmag.py | get_specs | def get_specs(data):
"""
Takes a magic format file and returns a list of unique specimen names
"""
# sort the specimen names
speclist = []
for rec in data:
try:
spec = rec["er_specimen_name"]
except KeyError as e:
spec = rec["specimen"]
if spec not in speclist:
speclist.append(spec)
speclist.sort()
return speclist | python | def get_specs(data):
"""
Takes a magic format file and returns a list of unique specimen names
"""
# sort the specimen names
speclist = []
for rec in data:
try:
spec = rec["er_specimen_name"]
except KeyError as e:
spec = rec["specimen"]
if spec not in speclist:
speclist.append(spec)
speclist.sort()
return speclist | Takes a magic format file and returns a list of unique specimen names | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L1496-L1510 |
PmagPy/PmagPy | pmagpy/pmag.py | vector_mean | def vector_mean(data):
"""
calculates the vector mean of a given set of vectors
Parameters
__________
data : nested array of [dec,inc,intensity]
Returns
_______
dir : array of [dec, inc, 1]
R : resultant vector length
"""
Xbar = np.zeros((3))
X = dir2cart(data).transpose()
for i in range(3):
Xbar[i] = X[i].sum()
R = np.sqrt(Xbar[0]**2+Xbar[1]**2+Xbar[2]**2)
Xbar = Xbar/R
dir = cart2dir(Xbar)
return dir, R | python | def vector_mean(data):
"""
calculates the vector mean of a given set of vectors
Parameters
__________
data : nested array of [dec,inc,intensity]
Returns
_______
dir : array of [dec, inc, 1]
R : resultant vector length
"""
Xbar = np.zeros((3))
X = dir2cart(data).transpose()
for i in range(3):
Xbar[i] = X[i].sum()
R = np.sqrt(Xbar[0]**2+Xbar[1]**2+Xbar[2]**2)
Xbar = Xbar/R
dir = cart2dir(Xbar)
return dir, R | calculates the vector mean of a given set of vectors
Parameters
__________
data : nested array of [dec,inc,intensity]
Returns
_______
dir : array of [dec, inc, 1]
R : resultant vector length | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L1513-L1533 |
PmagPy/PmagPy | pmagpy/pmag.py | mark_dmag_rec | def mark_dmag_rec(s, ind, data):
"""
Edits demagnetization data to mark "bad" points with measurement_flag
"""
datablock = []
for rec in data:
if rec['er_specimen_name'] == s:
meths = rec['magic_method_codes'].split(':')
if 'LT-NO' in meths or 'LT-AF-Z' in meths or 'LT-T-Z' in meths:
datablock.append(rec)
dmagrec = datablock[ind]
for k in range(len(data)):
meths = data[k]['magic_method_codes'].split(':')
if 'LT-NO' in meths or 'LT-AF-Z' in meths or 'LT-T-Z' in meths:
if data[k]['er_specimen_name'] == s:
if data[k]['treatment_temp'] == dmagrec['treatment_temp'] and data[k]['treatment_ac_field'] == dmagrec['treatment_ac_field']:
if data[k]['measurement_dec'] == dmagrec['measurement_dec'] and data[k]['measurement_inc'] == dmagrec['measurement_inc'] and data[k]['measurement_magn_moment'] == dmagrec['measurement_magn_moment']:
if data[k]['measurement_flag'] == 'g':
flag = 'b'
else:
flag = 'g'
data[k]['measurement_flag'] = flag
break
return data | python | def mark_dmag_rec(s, ind, data):
"""
Edits demagnetization data to mark "bad" points with measurement_flag
"""
datablock = []
for rec in data:
if rec['er_specimen_name'] == s:
meths = rec['magic_method_codes'].split(':')
if 'LT-NO' in meths or 'LT-AF-Z' in meths or 'LT-T-Z' in meths:
datablock.append(rec)
dmagrec = datablock[ind]
for k in range(len(data)):
meths = data[k]['magic_method_codes'].split(':')
if 'LT-NO' in meths or 'LT-AF-Z' in meths or 'LT-T-Z' in meths:
if data[k]['er_specimen_name'] == s:
if data[k]['treatment_temp'] == dmagrec['treatment_temp'] and data[k]['treatment_ac_field'] == dmagrec['treatment_ac_field']:
if data[k]['measurement_dec'] == dmagrec['measurement_dec'] and data[k]['measurement_inc'] == dmagrec['measurement_inc'] and data[k]['measurement_magn_moment'] == dmagrec['measurement_magn_moment']:
if data[k]['measurement_flag'] == 'g':
flag = 'b'
else:
flag = 'g'
data[k]['measurement_flag'] = flag
break
return data | Edits demagnetization data to mark "bad" points with measurement_flag | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L1536-L1559 |
PmagPy/PmagPy | pmagpy/pmag.py | find_dmag_rec | def find_dmag_rec(s, data, **kwargs):
"""
Returns demagnetization data for specimen s from the data. Excludes other
kinds of experiments and "bad" measurements
Parameters
__________
s : specimen name
data : DataFrame with measurement data
**kwargs :
version : if not 3, assume data model = 2.5
Returns
________
datablock : nested list of data for zijderveld plotting
[[tr, dec, inc, int, ZI, flag],...]
tr : treatment step
dec : declination
inc : inclination
int : intensity
ZI : whether zero-field first or infield-first step
flag : g or b , default is set to 'g'
units : list of units found ['T','K','J'] for tesla, kelvin or joules
"""
if 'version' in list(kwargs.keys()) and kwargs['version'] == 3:
# convert dataframe to list of dictionaries
data = data.to_dict('records')
spec_key, dec_key, inc_key = 'specimen', 'dir_dec', 'dir_inc'
flag_key, temp_key, ac_key = 'flag', 'treat_temp', 'treat_ac_field'
meth_key = 'method_codes'
power_key, time_key = 'treat_mw_power', 'treat_mw_time'
Mkeys = ['magn_moment', 'magn_volume', 'magn_mass', 'magnitude']
# just look in the intensity column
inst_key = 'instrument_codes'
else:
spec_key, dec_key, inc_key = 'er_specimen_name', 'measurement_dec', 'measurement_inc'
flag_key = 'measurement_flag'
flag_key, temp_key, ac_key = 'measurement_flag', 'treatment_temp', 'treatment_ac_field'
meth_key = 'magic_method_codes'
power_key, time_key = 'treatment_mw_power', 'treatment_mw_time'
Mkeys = ['measurement_magn_moment', 'measurement_magn_volume',
'measurement_magn_mass', 'measurement_magnitude']
inst_key = 'magic_instrument_codes'
EX = ["LP-AN-ARM", "LP-AN-TRM", "LP-ARM-AFD", "LP-ARM2-AFD", "LP-TRM-AFD",
"LP-TRM", "LP-TRM-TD", "LP-X"] # list of excluded lab protocols
INC = ["LT-NO", "LT-AF-Z", "LT-T-Z",
"LT-M-Z", "LP-PI-TRM-IZ", "LP-PI-M-IZ"]
datablock, tr = [], ""
therm_flag, af_flag, mw_flag = 0, 0, 0
units = []
spec_meas = get_dictitem(data, spec_key, s, 'T')
for rec in spec_meas:
if flag_key not in list(rec.keys()):
rec[flag_key] = 'g'
skip = 1
tr = ""
meths = rec[meth_key].split(":")
methods = []
for m in meths:
methods.append(m.strip()) # get rid of the stupid spaces!
for meth in methods:
if meth.strip() in INC:
skip = 0
for meth in EX:
if meth in methods:
skip = 1
if skip == 0:
if "LT-NO" in methods:
tr = float(rec[temp_key])
if "LT-AF-Z" in methods:
af_flag = 1
try:
tr = float(rec[ac_key])
except (KeyError, ValueError):
tr = 0
if "T" not in units:
units.append("T")
if "LT-T-Z" in methods:
therm_flag = 1
tr = float(rec[temp_key])
if "K" not in units:
units.append("K")
if "LT-M-Z" in methods:
mw_flag = 1
tr = float(rec[power_key]) * float(rec[time_key])
if "J" not in units:
units.append("J")
# looking for in-field first thellier or microwave data -
# otherwise, just ignore this
if "LP-PI-TRM-IZ" in methods or "LP-PI-M-IZ" in methods:
ZI = 0
else:
ZI = 1
if tr != "":
dec, inc, int = "", "", ""
if dec_key in list(rec.keys()) and cb.not_null(rec[dec_key], False):
dec = float(rec[dec_key])
if inc_key in list(rec.keys()) and cb.not_null(rec[inc_key], False):
inc = float(rec[inc_key])
for key in Mkeys:
if key in list(rec.keys()) and cb.not_null(rec[key], False):
int = float(rec[key])
if inst_key not in list(rec.keys()):
rec[inst_key] = ''
datablock.append(
[tr, dec, inc, int, ZI, rec[flag_key], rec[inst_key]])
if therm_flag == 1:
for k in range(len(datablock)):
if datablock[k][0] == 0.:
datablock[k][0] = 273.
if af_flag == 1:
for k in range(len(datablock)):
if datablock[k][0] >= 273 and datablock[k][0] <= 323:
datablock[k][0] = 0.
meas_units = ""
if len(units) > 0:
for u in units:
meas_units = meas_units + u + ":"
meas_units = meas_units[:-1]
return datablock, meas_units | python | def find_dmag_rec(s, data, **kwargs):
"""
Returns demagnetization data for specimen s from the data. Excludes other
kinds of experiments and "bad" measurements
Parameters
__________
s : specimen name
data : DataFrame with measurement data
**kwargs :
version : if not 3, assume data model = 2.5
Returns
________
datablock : nested list of data for zijderveld plotting
[[tr, dec, inc, int, ZI, flag],...]
tr : treatment step
dec : declination
inc : inclination
int : intensity
ZI : whether zero-field first or infield-first step
flag : g or b , default is set to 'g'
units : list of units found ['T','K','J'] for tesla, kelvin or joules
"""
if 'version' in list(kwargs.keys()) and kwargs['version'] == 3:
# convert dataframe to list of dictionaries
data = data.to_dict('records')
spec_key, dec_key, inc_key = 'specimen', 'dir_dec', 'dir_inc'
flag_key, temp_key, ac_key = 'flag', 'treat_temp', 'treat_ac_field'
meth_key = 'method_codes'
power_key, time_key = 'treat_mw_power', 'treat_mw_time'
Mkeys = ['magn_moment', 'magn_volume', 'magn_mass', 'magnitude']
# just look in the intensity column
inst_key = 'instrument_codes'
else:
spec_key, dec_key, inc_key = 'er_specimen_name', 'measurement_dec', 'measurement_inc'
flag_key = 'measurement_flag'
flag_key, temp_key, ac_key = 'measurement_flag', 'treatment_temp', 'treatment_ac_field'
meth_key = 'magic_method_codes'
power_key, time_key = 'treatment_mw_power', 'treatment_mw_time'
Mkeys = ['measurement_magn_moment', 'measurement_magn_volume',
'measurement_magn_mass', 'measurement_magnitude']
inst_key = 'magic_instrument_codes'
EX = ["LP-AN-ARM", "LP-AN-TRM", "LP-ARM-AFD", "LP-ARM2-AFD", "LP-TRM-AFD",
"LP-TRM", "LP-TRM-TD", "LP-X"] # list of excluded lab protocols
INC = ["LT-NO", "LT-AF-Z", "LT-T-Z",
"LT-M-Z", "LP-PI-TRM-IZ", "LP-PI-M-IZ"]
datablock, tr = [], ""
therm_flag, af_flag, mw_flag = 0, 0, 0
units = []
spec_meas = get_dictitem(data, spec_key, s, 'T')
for rec in spec_meas:
if flag_key not in list(rec.keys()):
rec[flag_key] = 'g'
skip = 1
tr = ""
meths = rec[meth_key].split(":")
methods = []
for m in meths:
methods.append(m.strip()) # get rid of the stupid spaces!
for meth in methods:
if meth.strip() in INC:
skip = 0
for meth in EX:
if meth in methods:
skip = 1
if skip == 0:
if "LT-NO" in methods:
tr = float(rec[temp_key])
if "LT-AF-Z" in methods:
af_flag = 1
try:
tr = float(rec[ac_key])
except (KeyError, ValueError):
tr = 0
if "T" not in units:
units.append("T")
if "LT-T-Z" in methods:
therm_flag = 1
tr = float(rec[temp_key])
if "K" not in units:
units.append("K")
if "LT-M-Z" in methods:
mw_flag = 1
tr = float(rec[power_key]) * float(rec[time_key])
if "J" not in units:
units.append("J")
# looking for in-field first thellier or microwave data -
# otherwise, just ignore this
if "LP-PI-TRM-IZ" in methods or "LP-PI-M-IZ" in methods:
ZI = 0
else:
ZI = 1
if tr != "":
dec, inc, int = "", "", ""
if dec_key in list(rec.keys()) and cb.not_null(rec[dec_key], False):
dec = float(rec[dec_key])
if inc_key in list(rec.keys()) and cb.not_null(rec[inc_key], False):
inc = float(rec[inc_key])
for key in Mkeys:
if key in list(rec.keys()) and cb.not_null(rec[key], False):
int = float(rec[key])
if inst_key not in list(rec.keys()):
rec[inst_key] = ''
datablock.append(
[tr, dec, inc, int, ZI, rec[flag_key], rec[inst_key]])
if therm_flag == 1:
for k in range(len(datablock)):
if datablock[k][0] == 0.:
datablock[k][0] = 273.
if af_flag == 1:
for k in range(len(datablock)):
if datablock[k][0] >= 273 and datablock[k][0] <= 323:
datablock[k][0] = 0.
meas_units = ""
if len(units) > 0:
for u in units:
meas_units = meas_units + u + ":"
meas_units = meas_units[:-1]
return datablock, meas_units | Returns demagnetization data for specimen s from the data. Excludes other
kinds of experiments and "bad" measurements
Parameters
__________
s : specimen name
data : DataFrame with measurement data
**kwargs :
version : if not 3, assume data model = 2.5
Returns
________
datablock : nested list of data for zijderveld plotting
[[tr, dec, inc, int, ZI, flag],...]
tr : treatment step
dec : declination
inc : inclination
int : intensity
ZI : whether zero-field first or infield-first step
flag : g or b , default is set to 'g'
units : list of units found ['T','K','J'] for tesla, kelvin or joules | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L1567-L1686 |
PmagPy/PmagPy | pmagpy/pmag.py | open_file | def open_file(infile, verbose=True):
"""
Open file and return a list of the file's lines.
Try to use utf-8 encoding, and if that fails use Latin-1.
Parameters
----------
infile : str
full path to file
Returns
----------
data: list
all lines in the file
"""
try:
with codecs.open(infile, "r", "utf-8") as f:
lines = list(f.readlines())
# file might not exist
except FileNotFoundError:
if verbose:
print(
'-W- You are trying to open a file: {} that does not exist'.format(infile))
return []
# encoding might be wrong
except UnicodeDecodeError:
try:
with codecs.open(infile, "r", "Latin-1") as f:
print(
'-I- Using less strict decoding for {}, output may have formatting errors'.format(infile))
lines = list(f.readlines())
# if file exists, and encoding is correct, who knows what the problem is
except Exception as ex:
print("-W- ", type(ex), ex)
return []
except Exception as ex:
print("-W- ", type(ex), ex)
return []
# don't leave a blank line at the end
i = 0
while i < 10:
if not len(lines[-1].strip("\n").strip("\t")):
lines = lines[:-1]
i += 1
else:
i = 10
return lines | python | def open_file(infile, verbose=True):
"""
Open file and return a list of the file's lines.
Try to use utf-8 encoding, and if that fails use Latin-1.
Parameters
----------
infile : str
full path to file
Returns
----------
data: list
all lines in the file
"""
try:
with codecs.open(infile, "r", "utf-8") as f:
lines = list(f.readlines())
# file might not exist
except FileNotFoundError:
if verbose:
print(
'-W- You are trying to open a file: {} that does not exist'.format(infile))
return []
# encoding might be wrong
except UnicodeDecodeError:
try:
with codecs.open(infile, "r", "Latin-1") as f:
print(
'-I- Using less strict decoding for {}, output may have formatting errors'.format(infile))
lines = list(f.readlines())
# if file exists, and encoding is correct, who knows what the problem is
except Exception as ex:
print("-W- ", type(ex), ex)
return []
except Exception as ex:
print("-W- ", type(ex), ex)
return []
# don't leave a blank line at the end
i = 0
while i < 10:
if not len(lines[-1].strip("\n").strip("\t")):
lines = lines[:-1]
i += 1
else:
i = 10
return lines | Open file and return a list of the file's lines.
Try to use utf-8 encoding, and if that fails use Latin-1.
Parameters
----------
infile : str
full path to file
Returns
----------
data: list
all lines in the file | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L1689-L1735 |
PmagPy/PmagPy | pmagpy/pmag.py | magic_read | def magic_read(infile, data=None, return_keys=False, verbose=False):
"""
Reads a Magic template file, returns data in a list of dictionaries.
Parameters
___________
Required:
infile : the MagIC formatted tab delimited data file
first line contains 'tab' in the first column and the data file type in the second (e.g., measurements, specimen, sample, etc.)
Optional:
data : data read in with, e.g., file.readlines()
Returns
_______
list of dictionaries, file type
"""
if infile:
if not os.path.exists(infile):
if return_keys:
return [], 'empty_file', []
return [], 'empty_file'
hold, magic_data, magic_record, magic_keys = [], [], {}, []
if data:
lines = list(data)
elif (not data) and (not infile):
if return_keys:
return [], 'empty_file', []
return [], 'empty_file'
else:
# if the file doesn't exist, end here
if not os.path.exists(infile):
if return_keys:
return [], 'bad_file', []
return [], 'bad_file'
# use custom pmagpy open_file
lines = open_file(infile, verbose=verbose)
if not lines:
if return_keys:
return [], 'bad_file', []
return [], 'bad_file'
d_line = lines[0][:-1].strip('\n').strip('\r').strip('\t')
if not d_line:
if return_keys:
return [], 'empty_file', []
return [], 'empty_file'
if d_line[0] == "s" or d_line[1] == "s":
delim = 'space'
elif d_line[0] == "t" or d_line[1] == "t":
delim = 'tab'
else:
print('-W- error reading {}. Check that this is a MagIC-format file'.format(infile))
if return_keys:
return [], 'bad_file', []
return [], 'bad_file'
if delim == 'space':
file_type = d_line.split()[1]
if delim == 'tab':
file_type = d_line.split('\t')[1]
if file_type == 'delimited':
if delim == 'space':
file_type = d_line.split()[2]
if delim == 'tab':
file_type = d_line.split('\t')[2]
line = lines[1].strip('\n').strip('\r')
if delim == 'space':
line = line.split() # lines[1][:-1].split()
if delim == 'tab':
line = line.split('\t') # lines[1][:-1].split('\t')
for key in line:
magic_keys.append(key)
lines = lines[2:]
if len(lines) < 1:
if return_keys:
return [], 'empty_file', []
return [], 'empty_file'
for line in lines[:-1]:
line.replace('\n', '')
if delim == 'space':
rec = line[:-1].split()
if delim == 'tab':
rec = line[:-1].split('\t')
hold.append(rec)
line = lines[-1].replace('\n', '').replace('\r', '')
if delim == 'space':
rec = line[:-1].split()
if delim == 'tab':
rec = line.split('\t')
hold.append(rec)
for rec in hold:
magic_record = {}
if len(magic_keys) > len(rec):
# pad rec with empty strings if needed
for i in range(len(magic_keys) - len(rec)):
rec.append('')
if len(magic_keys) != len(rec):
# ignores this warning when reading the dividers in an upload.txt
# composite file
if rec != ['>>>>>>>>>>'] and 'delimited' not in rec[0]:
print("Warning: Uneven record lengths detected in {}: ".format(infile))
print('keys:', magic_keys)
print('record:', rec)
# modified by Ron Shaar:
# add a health check:
# if len(magic_keys) > len(rec): take rec
# if len(magic_keys) < len(rec): take magic_keys
# original code: for k in range(len(rec)):
# channged to: for k in range(min(len(magic_keys),len(rec))):
for k in range(min(len(magic_keys), len(rec))):
magic_record[magic_keys[k]] = rec[k].strip('\n').strip('\r')
magic_data.append(magic_record)
magictype = file_type.lower().split("_")
Types = ['er', 'magic', 'pmag', 'rmag']
if magictype in Types:
file_type = file_type.lower()
if return_keys:
return magic_data, file_type, magic_keys
return magic_data, file_type | python | def magic_read(infile, data=None, return_keys=False, verbose=False):
"""
Reads a Magic template file, returns data in a list of dictionaries.
Parameters
___________
Required:
infile : the MagIC formatted tab delimited data file
first line contains 'tab' in the first column and the data file type in the second (e.g., measurements, specimen, sample, etc.)
Optional:
data : data read in with, e.g., file.readlines()
Returns
_______
list of dictionaries, file type
"""
if infile:
if not os.path.exists(infile):
if return_keys:
return [], 'empty_file', []
return [], 'empty_file'
hold, magic_data, magic_record, magic_keys = [], [], {}, []
if data:
lines = list(data)
elif (not data) and (not infile):
if return_keys:
return [], 'empty_file', []
return [], 'empty_file'
else:
# if the file doesn't exist, end here
if not os.path.exists(infile):
if return_keys:
return [], 'bad_file', []
return [], 'bad_file'
# use custom pmagpy open_file
lines = open_file(infile, verbose=verbose)
if not lines:
if return_keys:
return [], 'bad_file', []
return [], 'bad_file'
d_line = lines[0][:-1].strip('\n').strip('\r').strip('\t')
if not d_line:
if return_keys:
return [], 'empty_file', []
return [], 'empty_file'
if d_line[0] == "s" or d_line[1] == "s":
delim = 'space'
elif d_line[0] == "t" or d_line[1] == "t":
delim = 'tab'
else:
print('-W- error reading {}. Check that this is a MagIC-format file'.format(infile))
if return_keys:
return [], 'bad_file', []
return [], 'bad_file'
if delim == 'space':
file_type = d_line.split()[1]
if delim == 'tab':
file_type = d_line.split('\t')[1]
if file_type == 'delimited':
if delim == 'space':
file_type = d_line.split()[2]
if delim == 'tab':
file_type = d_line.split('\t')[2]
line = lines[1].strip('\n').strip('\r')
if delim == 'space':
line = line.split() # lines[1][:-1].split()
if delim == 'tab':
line = line.split('\t') # lines[1][:-1].split('\t')
for key in line:
magic_keys.append(key)
lines = lines[2:]
if len(lines) < 1:
if return_keys:
return [], 'empty_file', []
return [], 'empty_file'
for line in lines[:-1]:
line.replace('\n', '')
if delim == 'space':
rec = line[:-1].split()
if delim == 'tab':
rec = line[:-1].split('\t')
hold.append(rec)
line = lines[-1].replace('\n', '').replace('\r', '')
if delim == 'space':
rec = line[:-1].split()
if delim == 'tab':
rec = line.split('\t')
hold.append(rec)
for rec in hold:
magic_record = {}
if len(magic_keys) > len(rec):
# pad rec with empty strings if needed
for i in range(len(magic_keys) - len(rec)):
rec.append('')
if len(magic_keys) != len(rec):
# ignores this warning when reading the dividers in an upload.txt
# composite file
if rec != ['>>>>>>>>>>'] and 'delimited' not in rec[0]:
print("Warning: Uneven record lengths detected in {}: ".format(infile))
print('keys:', magic_keys)
print('record:', rec)
# modified by Ron Shaar:
# add a health check:
# if len(magic_keys) > len(rec): take rec
# if len(magic_keys) < len(rec): take magic_keys
# original code: for k in range(len(rec)):
# channged to: for k in range(min(len(magic_keys),len(rec))):
for k in range(min(len(magic_keys), len(rec))):
magic_record[magic_keys[k]] = rec[k].strip('\n').strip('\r')
magic_data.append(magic_record)
magictype = file_type.lower().split("_")
Types = ['er', 'magic', 'pmag', 'rmag']
if magictype in Types:
file_type = file_type.lower()
if return_keys:
return magic_data, file_type, magic_keys
return magic_data, file_type | Reads a Magic template file, returns data in a list of dictionaries.
Parameters
___________
Required:
infile : the MagIC formatted tab delimited data file
first line contains 'tab' in the first column and the data file type in the second (e.g., measurements, specimen, sample, etc.)
Optional:
data : data read in with, e.g., file.readlines()
Returns
_______
list of dictionaries, file type | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L1738-L1853 |
PmagPy/PmagPy | pmagpy/pmag.py | magic_read_dict | def magic_read_dict(path, data=None, sort_by_this_name=None, return_keys=False):
"""
Read a magic-formatted tab-delimited file and return a dictionary of
dictionaries, with this format:
{'Z35.5a': {'specimen_weight': '1.000e-03', 'er_citation_names': 'This study', 'specimen_volume': '', 'er_location_name': '', 'er_site_name': 'Z35.', 'er_sample_name': 'Z35.5', 'specimen_class': '', 'er_specimen_name': 'Z35.5a', 'specimen_lithology': '', 'specimen_type': ''}, ....}
return data, file_type, and keys (if return_keys is true)
"""
DATA = {}
#fin = open(path, 'r')
#first_line = fin.readline()
lines = open_file(path)
if not lines:
if return_keys:
return {}, 'empty_file', None
else:
return {}, 'empty_file'
first_line = lines.pop(0)
if first_line[0] == "s" or first_line[1] == "s":
delim = ' '
elif first_line[0] == "t" or first_line[1] == "t":
delim = '\t'
else:
print('-W- error reading ', path)
if return_keys:
return {}, 'bad_file', None
else:
return {}, 'bad_file'
file_type = first_line.strip('\n').strip('\r').split(delim)[1]
item_type = file_type
#item_type = file_type.split('_')[1][:-1]
if sort_by_this_name:
pass
elif item_type == 'age':
sort_by_this_name = "by_line_number"
else:
sort_by_this_name = item_type
line = lines.pop(0)
header = line.strip('\n').strip('\r').split(delim)
counter = 0
for line in lines:
tmp_data = {}
tmp_line = line.strip('\n').strip('\r').split(delim)
for i in range(len(header)):
if i < len(tmp_line):
tmp_data[header[i]] = tmp_line[i].strip()
else:
tmp_data[header[i]] = ""
if sort_by_this_name == "by_line_number":
DATA[counter] = tmp_data
counter += 1
else:
if tmp_data[sort_by_this_name] != "":
DATA[tmp_data[sort_by_this_name]] = tmp_data
if return_keys:
return DATA, file_type, header
else:
return DATA, file_type | python | def magic_read_dict(path, data=None, sort_by_this_name=None, return_keys=False):
"""
Read a magic-formatted tab-delimited file and return a dictionary of
dictionaries, with this format:
{'Z35.5a': {'specimen_weight': '1.000e-03', 'er_citation_names': 'This study', 'specimen_volume': '', 'er_location_name': '', 'er_site_name': 'Z35.', 'er_sample_name': 'Z35.5', 'specimen_class': '', 'er_specimen_name': 'Z35.5a', 'specimen_lithology': '', 'specimen_type': ''}, ....}
return data, file_type, and keys (if return_keys is true)
"""
DATA = {}
#fin = open(path, 'r')
#first_line = fin.readline()
lines = open_file(path)
if not lines:
if return_keys:
return {}, 'empty_file', None
else:
return {}, 'empty_file'
first_line = lines.pop(0)
if first_line[0] == "s" or first_line[1] == "s":
delim = ' '
elif first_line[0] == "t" or first_line[1] == "t":
delim = '\t'
else:
print('-W- error reading ', path)
if return_keys:
return {}, 'bad_file', None
else:
return {}, 'bad_file'
file_type = first_line.strip('\n').strip('\r').split(delim)[1]
item_type = file_type
#item_type = file_type.split('_')[1][:-1]
if sort_by_this_name:
pass
elif item_type == 'age':
sort_by_this_name = "by_line_number"
else:
sort_by_this_name = item_type
line = lines.pop(0)
header = line.strip('\n').strip('\r').split(delim)
counter = 0
for line in lines:
tmp_data = {}
tmp_line = line.strip('\n').strip('\r').split(delim)
for i in range(len(header)):
if i < len(tmp_line):
tmp_data[header[i]] = tmp_line[i].strip()
else:
tmp_data[header[i]] = ""
if sort_by_this_name == "by_line_number":
DATA[counter] = tmp_data
counter += 1
else:
if tmp_data[sort_by_this_name] != "":
DATA[tmp_data[sort_by_this_name]] = tmp_data
if return_keys:
return DATA, file_type, header
else:
return DATA, file_type | Read a magic-formatted tab-delimited file and return a dictionary of
dictionaries, with this format:
{'Z35.5a': {'specimen_weight': '1.000e-03', 'er_citation_names': 'This study', 'specimen_volume': '', 'er_location_name': '', 'er_site_name': 'Z35.', 'er_sample_name': 'Z35.5', 'specimen_class': '', 'er_specimen_name': 'Z35.5a', 'specimen_lithology': '', 'specimen_type': ''}, ....}
return data, file_type, and keys (if return_keys is true) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L1856-L1914 |
PmagPy/PmagPy | pmagpy/pmag.py | sort_magic_data | def sort_magic_data(magic_data, sort_name):
'''
Sort magic_data by header (like er_specimen_name for example)
'''
magic_data_sorted = {}
for rec in magic_data:
name = rec[sort_name]
if name not in list(magic_data_sorted.keys()):
magic_data_sorted[name] = []
magic_data_sorted[name].append(rec)
return magic_data_sorted | python | def sort_magic_data(magic_data, sort_name):
'''
Sort magic_data by header (like er_specimen_name for example)
'''
magic_data_sorted = {}
for rec in magic_data:
name = rec[sort_name]
if name not in list(magic_data_sorted.keys()):
magic_data_sorted[name] = []
magic_data_sorted[name].append(rec)
return magic_data_sorted | Sort magic_data by header (like er_specimen_name for example) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L1917-L1927 |
PmagPy/PmagPy | pmagpy/pmag.py | upload_read | def upload_read(infile, table):
"""
Reads a table from a MagIC upload (or downloaded) txt file, puts data in a
list of dictionaries
"""
delim = 'tab'
hold, magic_data, magic_record, magic_keys = [], [], {}, []
f = open(infile, "r")
#
# look for right table
#
line = f.readline()[:-1]
file_type = line.split('\t')[1]
if file_type == 'delimited':
file_type = line.split('\t')[2]
if delim == 'tab':
line = f.readline()[:-1].split('\t')
else:
f.close()
print("only tab delimitted files are supported now")
return
while file_type != table:
while line[0][0:5] in f.readlines() != ">>>>>":
pass
line = f.readline()[:-1]
file_type = line.split('\t')[1]
if file_type == 'delimited':
file_type = line.split('\t')[2]
ine = f.readline()[:-1].split('\t')
while line[0][0:5] in f.readlines() != ">>>>>":
for key in line:
magic_keys.append(key)
for line in f.readlines():
rec = line[:-1].split('\t')
hold.append(rec)
for rec in hold:
magic_record = {}
if len(magic_keys) != len(rec):
print("Uneven record lengths detected: ", rec)
input("Return to continue.... ")
for k in range(len(magic_keys)):
magic_record[magic_keys[k]] = rec[k]
magic_data.append(magic_record)
f.close()
return magic_data | python | def upload_read(infile, table):
"""
Reads a table from a MagIC upload (or downloaded) txt file, puts data in a
list of dictionaries
"""
delim = 'tab'
hold, magic_data, magic_record, magic_keys = [], [], {}, []
f = open(infile, "r")
#
# look for right table
#
line = f.readline()[:-1]
file_type = line.split('\t')[1]
if file_type == 'delimited':
file_type = line.split('\t')[2]
if delim == 'tab':
line = f.readline()[:-1].split('\t')
else:
f.close()
print("only tab delimitted files are supported now")
return
while file_type != table:
while line[0][0:5] in f.readlines() != ">>>>>":
pass
line = f.readline()[:-1]
file_type = line.split('\t')[1]
if file_type == 'delimited':
file_type = line.split('\t')[2]
ine = f.readline()[:-1].split('\t')
while line[0][0:5] in f.readlines() != ">>>>>":
for key in line:
magic_keys.append(key)
for line in f.readlines():
rec = line[:-1].split('\t')
hold.append(rec)
for rec in hold:
magic_record = {}
if len(magic_keys) != len(rec):
print("Uneven record lengths detected: ", rec)
input("Return to continue.... ")
for k in range(len(magic_keys)):
magic_record[magic_keys[k]] = rec[k]
magic_data.append(magic_record)
f.close()
return magic_data | Reads a table from a MagIC upload (or downloaded) txt file, puts data in a
list of dictionaries | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L1930-L1974 |
PmagPy/PmagPy | pmagpy/pmag.py | putout | def putout(ofile, keylist, Rec):
"""
writes out a magic format record to ofile
"""
pmag_out = open(ofile, 'a')
outstring = ""
for key in keylist:
try:
outstring = outstring + '\t' + str(Rec[key]).strip()
except:
print(key, Rec[key])
# raw_input()
outstring = outstring + '\n'
pmag_out.write(outstring[1:])
pmag_out.close() | python | def putout(ofile, keylist, Rec):
"""
writes out a magic format record to ofile
"""
pmag_out = open(ofile, 'a')
outstring = ""
for key in keylist:
try:
outstring = outstring + '\t' + str(Rec[key]).strip()
except:
print(key, Rec[key])
# raw_input()
outstring = outstring + '\n'
pmag_out.write(outstring[1:])
pmag_out.close() | writes out a magic format record to ofile | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L1977-L1991 |
PmagPy/PmagPy | pmagpy/pmag.py | first_rec | def first_rec(ofile, Rec, file_type):
"""
opens the file ofile as a magic template file with headers as the keys to Rec
"""
keylist = []
opened = False
# sometimes Windows needs a little extra time to open a file
# or else it throws an error
while not opened:
try:
pmag_out = open(ofile, 'w')
opened = True
except IOError:
time.sleep(1)
outstring = "tab \t" + file_type + "\n"
pmag_out.write(outstring)
keystring = ""
for key in list(Rec.keys()):
keystring = keystring + '\t' + key.strip()
keylist.append(key)
keystring = keystring + '\n'
pmag_out.write(keystring[1:])
pmag_out.close()
return keylist | python | def first_rec(ofile, Rec, file_type):
"""
opens the file ofile as a magic template file with headers as the keys to Rec
"""
keylist = []
opened = False
# sometimes Windows needs a little extra time to open a file
# or else it throws an error
while not opened:
try:
pmag_out = open(ofile, 'w')
opened = True
except IOError:
time.sleep(1)
outstring = "tab \t" + file_type + "\n"
pmag_out.write(outstring)
keystring = ""
for key in list(Rec.keys()):
keystring = keystring + '\t' + key.strip()
keylist.append(key)
keystring = keystring + '\n'
pmag_out.write(keystring[1:])
pmag_out.close()
return keylist | opens the file ofile as a magic template file with headers as the keys to Rec | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L1994-L2017 |
PmagPy/PmagPy | pmagpy/pmag.py | magic_write_old | def magic_write_old(ofile, Recs, file_type):
"""
writes out a magic format list of dictionaries to ofile
Parameters
_________
ofile : path to output file
Recs : list of dictionaries in MagIC format
file_type : MagIC table type (e.g., specimens)
Effects :
writes a MagIC formatted file from Recs
"""
if len(Recs) < 1:
print ('nothing to write')
return
pmag_out = open(ofile, 'w')
outstring = "tab \t" + file_type + "\n"
pmag_out.write(outstring)
keystring = ""
keylist = []
for key in list(Recs[0].keys()):
keylist.append(key)
keylist.sort()
for key in keylist:
keystring = keystring + '\t' + key.strip()
keystring = keystring + '\n'
pmag_out.write(keystring[1:])
for Rec in Recs:
outstring = ""
for key in keylist:
try:
outstring = outstring + '\t' + str(Rec[key].strip())
except:
if 'er_specimen_name' in list(Rec.keys()):
print(Rec['er_specimen_name'])
elif 'er_specimen_names' in list(Rec.keys()):
print(Rec['er_specimen_names'])
print(key, Rec[key])
# raw_input()
outstring = outstring + '\n'
pmag_out.write(outstring[1:])
pmag_out.close() | python | def magic_write_old(ofile, Recs, file_type):
"""
writes out a magic format list of dictionaries to ofile
Parameters
_________
ofile : path to output file
Recs : list of dictionaries in MagIC format
file_type : MagIC table type (e.g., specimens)
Effects :
writes a MagIC formatted file from Recs
"""
if len(Recs) < 1:
print ('nothing to write')
return
pmag_out = open(ofile, 'w')
outstring = "tab \t" + file_type + "\n"
pmag_out.write(outstring)
keystring = ""
keylist = []
for key in list(Recs[0].keys()):
keylist.append(key)
keylist.sort()
for key in keylist:
keystring = keystring + '\t' + key.strip()
keystring = keystring + '\n'
pmag_out.write(keystring[1:])
for Rec in Recs:
outstring = ""
for key in keylist:
try:
outstring = outstring + '\t' + str(Rec[key].strip())
except:
if 'er_specimen_name' in list(Rec.keys()):
print(Rec['er_specimen_name'])
elif 'er_specimen_names' in list(Rec.keys()):
print(Rec['er_specimen_names'])
print(key, Rec[key])
# raw_input()
outstring = outstring + '\n'
pmag_out.write(outstring[1:])
pmag_out.close() | writes out a magic format list of dictionaries to ofile
Parameters
_________
ofile : path to output file
Recs : list of dictionaries in MagIC format
file_type : MagIC table type (e.g., specimens)
Effects :
writes a MagIC formatted file from Recs | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L2020-L2062 |
PmagPy/PmagPy | pmagpy/pmag.py | magic_write | def magic_write(ofile, Recs, file_type):
"""
Parameters
_________
ofile : path to output file
Recs : list of dictionaries in MagIC format
file_type : MagIC table type (e.g., specimens)
Return :
[True,False] : True if successful
ofile : same as input
Effects :
writes a MagIC formatted file from Recs
"""
if len(Recs) < 1:
print('No records to write to file {}'.format(ofile))
return False, ""
if os.path.split(ofile)[0] != "" and not os.path.isdir(os.path.split(ofile)[0]):
os.mkdir(os.path.split(ofile)[0])
pmag_out = open(ofile, 'w+', errors="backslashreplace")
outstring = "tab \t" + file_type
outstring = outstring.strip("\n").strip(
"\r") + "\n" # make sure it's clean for Windows
pmag_out.write(outstring)
keystring = ""
keylist = []
for key in list(Recs[0].keys()):
keylist.append(key)
keylist.sort()
for key in keylist:
keystring = keystring + '\t' + key.strip()
keystring = keystring + '\n'
pmag_out.write(keystring[1:])
for Rec in Recs:
outstring = ""
for key in keylist:
try:
outstring = outstring + '\t' + str(Rec[key]).strip()
except KeyError:
if 'er_specimen_name' in list(Rec.keys()):
print(Rec['er_specimen_name'])
elif 'specimen' in list(Rec.keys()):
print(Rec['specimen'])
elif 'er_specimen_names' in list(Rec.keys()):
print('specimen names:', Rec['er_specimen_names'])
print("No data for %s" % key)
# just skip it:
outstring = outstring + "\t"
# raw_input()
outstring = outstring + '\n'
pmag_out.write(outstring[1:])
pmag_out.close()
print(len(Recs), ' records written to file ', ofile)
return True, ofile | python | def magic_write(ofile, Recs, file_type):
"""
Parameters
_________
ofile : path to output file
Recs : list of dictionaries in MagIC format
file_type : MagIC table type (e.g., specimens)
Return :
[True,False] : True if successful
ofile : same as input
Effects :
writes a MagIC formatted file from Recs
"""
if len(Recs) < 1:
print('No records to write to file {}'.format(ofile))
return False, ""
if os.path.split(ofile)[0] != "" and not os.path.isdir(os.path.split(ofile)[0]):
os.mkdir(os.path.split(ofile)[0])
pmag_out = open(ofile, 'w+', errors="backslashreplace")
outstring = "tab \t" + file_type
outstring = outstring.strip("\n").strip(
"\r") + "\n" # make sure it's clean for Windows
pmag_out.write(outstring)
keystring = ""
keylist = []
for key in list(Recs[0].keys()):
keylist.append(key)
keylist.sort()
for key in keylist:
keystring = keystring + '\t' + key.strip()
keystring = keystring + '\n'
pmag_out.write(keystring[1:])
for Rec in Recs:
outstring = ""
for key in keylist:
try:
outstring = outstring + '\t' + str(Rec[key]).strip()
except KeyError:
if 'er_specimen_name' in list(Rec.keys()):
print(Rec['er_specimen_name'])
elif 'specimen' in list(Rec.keys()):
print(Rec['specimen'])
elif 'er_specimen_names' in list(Rec.keys()):
print('specimen names:', Rec['er_specimen_names'])
print("No data for %s" % key)
# just skip it:
outstring = outstring + "\t"
# raw_input()
outstring = outstring + '\n'
pmag_out.write(outstring[1:])
pmag_out.close()
print(len(Recs), ' records written to file ', ofile)
return True, ofile | Parameters
_________
ofile : path to output file
Recs : list of dictionaries in MagIC format
file_type : MagIC table type (e.g., specimens)
Return :
[True,False] : True if successful
ofile : same as input
Effects :
writes a MagIC formatted file from Recs | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L2065-L2120 |
PmagPy/PmagPy | pmagpy/pmag.py | dotilt | def dotilt(dec, inc, bed_az, bed_dip):
"""
Does a tilt correction on a direction (dec,inc) using bedding dip direction
and bedding dip.
Parameters
----------
dec : declination directions in degrees
inc : inclination direction in degrees
bed_az : bedding dip direction
bed_dip : bedding dip
Returns
-------
dec,inc : a tuple of rotated dec, inc values
Examples
-------
>>> pmag.dotilt(91.2,43.1,90.0,20.0)
(90.952568837153436, 23.103411670066617)
"""
rad = old_div(np.pi, 180.) # converts from degrees to radians
X = dir2cart([dec, inc, 1.]) # get cartesian coordinates of dec,inc
# get some sines and cosines of new coordinate system
sa, ca = -np.sin(bed_az * rad), np.cos(bed_az * rad)
cdp, sdp = np.cos(bed_dip * rad), np.sin(bed_dip * rad)
# do the rotation
xc = X[0] * (sa * sa + ca * ca * cdp) + X[1] * \
(ca * sa * (1. - cdp)) + X[2] * sdp * ca
yc = X[0] * ca * sa * (1. - cdp) + X[1] * \
(ca * ca + sa * sa * cdp) - X[2] * sa * sdp
zc = X[0] * ca * sdp - X[1] * sdp * sa - X[2] * cdp
# convert back to direction:
Dir = cart2dir([xc, yc, -zc])
# return declination, inclination of rotated direction
return Dir[0], Dir[1] | python | def dotilt(dec, inc, bed_az, bed_dip):
"""
Does a tilt correction on a direction (dec,inc) using bedding dip direction
and bedding dip.
Parameters
----------
dec : declination directions in degrees
inc : inclination direction in degrees
bed_az : bedding dip direction
bed_dip : bedding dip
Returns
-------
dec,inc : a tuple of rotated dec, inc values
Examples
-------
>>> pmag.dotilt(91.2,43.1,90.0,20.0)
(90.952568837153436, 23.103411670066617)
"""
rad = old_div(np.pi, 180.) # converts from degrees to radians
X = dir2cart([dec, inc, 1.]) # get cartesian coordinates of dec,inc
# get some sines and cosines of new coordinate system
sa, ca = -np.sin(bed_az * rad), np.cos(bed_az * rad)
cdp, sdp = np.cos(bed_dip * rad), np.sin(bed_dip * rad)
# do the rotation
xc = X[0] * (sa * sa + ca * ca * cdp) + X[1] * \
(ca * sa * (1. - cdp)) + X[2] * sdp * ca
yc = X[0] * ca * sa * (1. - cdp) + X[1] * \
(ca * ca + sa * sa * cdp) - X[2] * sa * sdp
zc = X[0] * ca * sdp - X[1] * sdp * sa - X[2] * cdp
# convert back to direction:
Dir = cart2dir([xc, yc, -zc])
# return declination, inclination of rotated direction
return Dir[0], Dir[1] | Does a tilt correction on a direction (dec,inc) using bedding dip direction
and bedding dip.
Parameters
----------
dec : declination directions in degrees
inc : inclination direction in degrees
bed_az : bedding dip direction
bed_dip : bedding dip
Returns
-------
dec,inc : a tuple of rotated dec, inc values
Examples
-------
>>> pmag.dotilt(91.2,43.1,90.0,20.0)
(90.952568837153436, 23.103411670066617) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L2123-L2158 |
PmagPy/PmagPy | pmagpy/pmag.py | dotilt_V | def dotilt_V(indat):
"""
Does a tilt correction on an array with rows of dec,inc bedding dip direction and dip.
Parameters
----------
input : declination, inclination, bedding dip direction and bedding dip
nested array of [[dec1, inc1, bed_az1, bed_dip1],[dec2,inc2,bed_az2,bed_dip2]...]
Returns
-------
dec,inc : arrays of rotated declination, inclination
"""
indat = indat.transpose()
# unpack input array into separate arrays
dec, inc, bed_az, bed_dip = indat[0], indat[1], indat[2], indat[3]
rad = old_div(np.pi, 180.) # convert to radians
Dir = np.array([dec, inc]).transpose()
X = dir2cart(Dir).transpose() # get cartesian coordinates
N = np.size(dec)
# get some sines and cosines of new coordinate system
sa, ca = -np.sin(bed_az * rad), np.cos(bed_az * rad)
cdp, sdp = np.cos(bed_dip * rad), np.sin(bed_dip * rad)
# do the rotation
xc = X[0] * (sa * sa + ca * ca * cdp) + X[1] * \
(ca * sa * (1. - cdp)) + X[2] * sdp * ca
yc = X[0] * ca * sa * (1. - cdp) + X[1] * \
(ca * ca + sa * sa * cdp) - X[2] * sa * sdp
zc = X[0] * ca * sdp - X[1] * sdp * sa - X[2] * cdp
# convert back to direction:
cart = np.array([xc, yc, -zc]).transpose()
Dir = cart2dir(cart).transpose()
# return declination, inclination arrays of rotated direction
return Dir[0], Dir[1] | python | def dotilt_V(indat):
"""
Does a tilt correction on an array with rows of dec,inc bedding dip direction and dip.
Parameters
----------
input : declination, inclination, bedding dip direction and bedding dip
nested array of [[dec1, inc1, bed_az1, bed_dip1],[dec2,inc2,bed_az2,bed_dip2]...]
Returns
-------
dec,inc : arrays of rotated declination, inclination
"""
indat = indat.transpose()
# unpack input array into separate arrays
dec, inc, bed_az, bed_dip = indat[0], indat[1], indat[2], indat[3]
rad = old_div(np.pi, 180.) # convert to radians
Dir = np.array([dec, inc]).transpose()
X = dir2cart(Dir).transpose() # get cartesian coordinates
N = np.size(dec)
# get some sines and cosines of new coordinate system
sa, ca = -np.sin(bed_az * rad), np.cos(bed_az * rad)
cdp, sdp = np.cos(bed_dip * rad), np.sin(bed_dip * rad)
# do the rotation
xc = X[0] * (sa * sa + ca * ca * cdp) + X[1] * \
(ca * sa * (1. - cdp)) + X[2] * sdp * ca
yc = X[0] * ca * sa * (1. - cdp) + X[1] * \
(ca * ca + sa * sa * cdp) - X[2] * sa * sdp
zc = X[0] * ca * sdp - X[1] * sdp * sa - X[2] * cdp
# convert back to direction:
cart = np.array([xc, yc, -zc]).transpose()
Dir = cart2dir(cart).transpose()
# return declination, inclination arrays of rotated direction
return Dir[0], Dir[1] | Does a tilt correction on an array with rows of dec,inc bedding dip direction and dip.
Parameters
----------
input : declination, inclination, bedding dip direction and bedding dip
nested array of [[dec1, inc1, bed_az1, bed_dip1],[dec2,inc2,bed_az2,bed_dip2]...]
Returns
-------
dec,inc : arrays of rotated declination, inclination | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L2161-L2195 |
PmagPy/PmagPy | pmagpy/pmag.py | dogeo | def dogeo(dec, inc, az, pl):
"""
Rotates declination and inclination into geographic coordinates using the
azimuth and plunge of the X direction (lab arrow) of a specimen.
Parameters
----------
dec : declination in specimen coordinates
inc : inclination in specimen coordinates
Returns
-------
rotated_direction : tuple of declination, inclination in geographic coordinates
Examples
--------
>>> pmag.dogeo(0.0,90.0,0.0,45.5)
(180.0, 44.5)
"""
A1, A2, A3 = [], [], [] # set up lists for rotation vector
# put dec inc in direction list and set length to unity
Dir = [dec, inc, 1.]
X = dir2cart(Dir) # get cartesian coordinates
#
# set up rotation matrix
#
A1 = dir2cart([az, pl, 1.])
A2 = dir2cart([az + 90., 0, 1.])
A3 = dir2cart([az - 180., 90. - pl, 1.])
#
# do rotation
#
xp = A1[0] * X[0] + A2[0] * X[1] + A3[0] * X[2]
yp = A1[1] * X[0] + A2[1] * X[1] + A3[1] * X[2]
zp = A1[2] * X[0] + A2[2] * X[1] + A3[2] * X[2]
#
# transform back to dec,inc
#
Dir_geo = cart2dir([xp, yp, zp])
return Dir_geo[0], Dir_geo[1] | python | def dogeo(dec, inc, az, pl):
"""
Rotates declination and inclination into geographic coordinates using the
azimuth and plunge of the X direction (lab arrow) of a specimen.
Parameters
----------
dec : declination in specimen coordinates
inc : inclination in specimen coordinates
Returns
-------
rotated_direction : tuple of declination, inclination in geographic coordinates
Examples
--------
>>> pmag.dogeo(0.0,90.0,0.0,45.5)
(180.0, 44.5)
"""
A1, A2, A3 = [], [], [] # set up lists for rotation vector
# put dec inc in direction list and set length to unity
Dir = [dec, inc, 1.]
X = dir2cart(Dir) # get cartesian coordinates
#
# set up rotation matrix
#
A1 = dir2cart([az, pl, 1.])
A2 = dir2cart([az + 90., 0, 1.])
A3 = dir2cart([az - 180., 90. - pl, 1.])
#
# do rotation
#
xp = A1[0] * X[0] + A2[0] * X[1] + A3[0] * X[2]
yp = A1[1] * X[0] + A2[1] * X[1] + A3[1] * X[2]
zp = A1[2] * X[0] + A2[2] * X[1] + A3[2] * X[2]
#
# transform back to dec,inc
#
Dir_geo = cart2dir([xp, yp, zp])
return Dir_geo[0], Dir_geo[1] | Rotates declination and inclination into geographic coordinates using the
azimuth and plunge of the X direction (lab arrow) of a specimen.
Parameters
----------
dec : declination in specimen coordinates
inc : inclination in specimen coordinates
Returns
-------
rotated_direction : tuple of declination, inclination in geographic coordinates
Examples
--------
>>> pmag.dogeo(0.0,90.0,0.0,45.5)
(180.0, 44.5) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L2198-L2237 |
PmagPy/PmagPy | pmagpy/pmag.py | dogeo_V | def dogeo_V(indat):
"""
Rotates declination and inclination into geographic coordinates using the
azimuth and plunge of the X direction (lab arrow) of a specimen.
Parameters
----------
indat: nested list of [dec, inc, az, pl] data
Returns
-------
rotated_directions : arrays of Declinations and Inclinations
"""
indat = indat.transpose()
# unpack input array into separate arrays
dec, inc, az, pl = indat[0], indat[1], indat[2], indat[3]
Dir = np.array([dec, inc]).transpose()
X = dir2cart(Dir).transpose() # get cartesian coordinates
N = np.size(dec)
A1 = dir2cart(np.array([az, pl, np.ones(N)]).transpose()).transpose()
A2 = dir2cart(
np.array([az + 90., np.zeros(N), np.ones(N)]).transpose()).transpose()
A3 = dir2cart(
np.array([az - 180., 90. - pl, np.ones(N)]).transpose()).transpose()
# do rotation
#
xp = A1[0] * X[0] + A2[0] * X[1] + A3[0] * X[2]
yp = A1[1] * X[0] + A2[1] * X[1] + A3[1] * X[2]
zp = A1[2] * X[0] + A2[2] * X[1] + A3[2] * X[2]
cart = np.array([xp, yp, zp]).transpose()
#
# transform back to dec,inc
#
Dir_geo = cart2dir(cart).transpose()
# send back declination and inclination arrays
return Dir_geo[0], Dir_geo[1] | python | def dogeo_V(indat):
"""
Rotates declination and inclination into geographic coordinates using the
azimuth and plunge of the X direction (lab arrow) of a specimen.
Parameters
----------
indat: nested list of [dec, inc, az, pl] data
Returns
-------
rotated_directions : arrays of Declinations and Inclinations
"""
indat = indat.transpose()
# unpack input array into separate arrays
dec, inc, az, pl = indat[0], indat[1], indat[2], indat[3]
Dir = np.array([dec, inc]).transpose()
X = dir2cart(Dir).transpose() # get cartesian coordinates
N = np.size(dec)
A1 = dir2cart(np.array([az, pl, np.ones(N)]).transpose()).transpose()
A2 = dir2cart(
np.array([az + 90., np.zeros(N), np.ones(N)]).transpose()).transpose()
A3 = dir2cart(
np.array([az - 180., 90. - pl, np.ones(N)]).transpose()).transpose()
# do rotation
#
xp = A1[0] * X[0] + A2[0] * X[1] + A3[0] * X[2]
yp = A1[1] * X[0] + A2[1] * X[1] + A3[1] * X[2]
zp = A1[2] * X[0] + A2[2] * X[1] + A3[2] * X[2]
cart = np.array([xp, yp, zp]).transpose()
#
# transform back to dec,inc
#
Dir_geo = cart2dir(cart).transpose()
# send back declination and inclination arrays
return Dir_geo[0], Dir_geo[1] | Rotates declination and inclination into geographic coordinates using the
azimuth and plunge of the X direction (lab arrow) of a specimen.
Parameters
----------
indat: nested list of [dec, inc, az, pl] data
Returns
-------
rotated_directions : arrays of Declinations and Inclinations | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L2240-L2278 |
PmagPy/PmagPy | pmagpy/pmag.py | dodirot | def dodirot(D, I, Dbar, Ibar):
"""
Rotate a direction (declination, inclination) by the difference between
dec=0 and inc = 90 and the provided desired mean direction
Parameters
----------
D : declination to be rotated
I : inclination to be rotated
Dbar : declination of desired mean
Ibar : inclination of desired mean
Returns
----------
drot, irot : rotated declination and inclination
"""
d, irot = dogeo(D, I, Dbar, 90. - Ibar)
drot = d - 180.
if drot < 360.:
drot = drot + 360.
if drot > 360.:
drot = drot - 360.
return drot, irot | python | def dodirot(D, I, Dbar, Ibar):
"""
Rotate a direction (declination, inclination) by the difference between
dec=0 and inc = 90 and the provided desired mean direction
Parameters
----------
D : declination to be rotated
I : inclination to be rotated
Dbar : declination of desired mean
Ibar : inclination of desired mean
Returns
----------
drot, irot : rotated declination and inclination
"""
d, irot = dogeo(D, I, Dbar, 90. - Ibar)
drot = d - 180.
if drot < 360.:
drot = drot + 360.
if drot > 360.:
drot = drot - 360.
return drot, irot | Rotate a direction (declination, inclination) by the difference between
dec=0 and inc = 90 and the provided desired mean direction
Parameters
----------
D : declination to be rotated
I : inclination to be rotated
Dbar : declination of desired mean
Ibar : inclination of desired mean
Returns
----------
drot, irot : rotated declination and inclination | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L2281-L2303 |
PmagPy/PmagPy | pmagpy/pmag.py | dodirot_V | def dodirot_V(di_block, Dbar, Ibar):
"""
Rotate an array of dec/inc pairs to coordinate system with Dec,Inc as 0,90
Parameters
___________________
di_block : array of [[Dec1,Inc1],[Dec2,Inc2],....]
Dbar : declination of desired center
Ibar : inclination of desired center
Returns
__________
array of rotated decs and incs: [[rot_Dec1,rot_Inc1],[rot_Dec2,rot_Inc2],....]
"""
N = di_block.shape[0]
DipDir, Dip = np.ones(N, dtype=np.float).transpose(
)*(Dbar-180.), np.ones(N, dtype=np.float).transpose()*(90.-Ibar)
di_block = di_block.transpose()
data = np.array([di_block[0], di_block[1], DipDir, Dip]).transpose()
drot, irot = dotilt_V(data)
drot = (drot-180.) % 360. #
return np.column_stack((drot, irot)) | python | def dodirot_V(di_block, Dbar, Ibar):
"""
Rotate an array of dec/inc pairs to coordinate system with Dec,Inc as 0,90
Parameters
___________________
di_block : array of [[Dec1,Inc1],[Dec2,Inc2],....]
Dbar : declination of desired center
Ibar : inclination of desired center
Returns
__________
array of rotated decs and incs: [[rot_Dec1,rot_Inc1],[rot_Dec2,rot_Inc2],....]
"""
N = di_block.shape[0]
DipDir, Dip = np.ones(N, dtype=np.float).transpose(
)*(Dbar-180.), np.ones(N, dtype=np.float).transpose()*(90.-Ibar)
di_block = di_block.transpose()
data = np.array([di_block[0], di_block[1], DipDir, Dip]).transpose()
drot, irot = dotilt_V(data)
drot = (drot-180.) % 360. #
return np.column_stack((drot, irot)) | Rotate an array of dec/inc pairs to coordinate system with Dec,Inc as 0,90
Parameters
___________________
di_block : array of [[Dec1,Inc1],[Dec2,Inc2],....]
Dbar : declination of desired center
Ibar : inclination of desired center
Returns
__________
array of rotated decs and incs: [[rot_Dec1,rot_Inc1],[rot_Dec2,rot_Inc2],....] | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L2306-L2327 |
PmagPy/PmagPy | pmagpy/pmag.py | find_samp_rec | def find_samp_rec(s, data, az_type):
"""
find the orientation info for samp s
"""
datablock, or_error, bed_error = [], 0, 0
orient = {}
orient["sample_dip"] = ""
orient["sample_azimuth"] = ""
orient['sample_description'] = ""
for rec in data:
if rec["er_sample_name"].lower() == s.lower():
if 'sample_orientation_flag' in list(rec.keys()) and rec['sample_orientation_flag'] == 'b':
orient['sample_orientation_flag'] = 'b'
return orient
if "magic_method_codes" in list(rec.keys()) and az_type != "0":
methods = rec["magic_method_codes"].replace(" ", "").split(":")
if az_type in methods and "sample_azimuth" in list(rec.keys()) and rec["sample_azimuth"] != "":
orient["sample_azimuth"] = float(rec["sample_azimuth"])
if "sample_dip" in list(rec.keys()) and rec["sample_dip"] != "":
orient["sample_dip"] = float(rec["sample_dip"])
if "sample_bed_dip_direction" in list(rec.keys()) and rec["sample_bed_dip_direction"] != "":
orient["sample_bed_dip_direction"] = float(
rec["sample_bed_dip_direction"])
if "sample_bed_dip" in list(rec.keys()) and rec["sample_bed_dip"] != "":
orient["sample_bed_dip"] = float(rec["sample_bed_dip"])
else:
if "sample_azimuth" in list(rec.keys()):
orient["sample_azimuth"] = float(rec["sample_azimuth"])
if "sample_dip" in list(rec.keys()):
orient["sample_dip"] = float(rec["sample_dip"])
if "sample_bed_dip_direction" in list(rec.keys()):
orient["sample_bed_dip_direction"] = float(
rec["sample_bed_dip_direction"])
if "sample_bed_dip" in list(rec.keys()):
orient["sample_bed_dip"] = float(rec["sample_bed_dip"])
if 'sample_description' in list(rec.keys()):
orient['sample_description'] = rec['sample_description']
if orient["sample_azimuth"] != "":
break
return orient | python | def find_samp_rec(s, data, az_type):
"""
find the orientation info for samp s
"""
datablock, or_error, bed_error = [], 0, 0
orient = {}
orient["sample_dip"] = ""
orient["sample_azimuth"] = ""
orient['sample_description'] = ""
for rec in data:
if rec["er_sample_name"].lower() == s.lower():
if 'sample_orientation_flag' in list(rec.keys()) and rec['sample_orientation_flag'] == 'b':
orient['sample_orientation_flag'] = 'b'
return orient
if "magic_method_codes" in list(rec.keys()) and az_type != "0":
methods = rec["magic_method_codes"].replace(" ", "").split(":")
if az_type in methods and "sample_azimuth" in list(rec.keys()) and rec["sample_azimuth"] != "":
orient["sample_azimuth"] = float(rec["sample_azimuth"])
if "sample_dip" in list(rec.keys()) and rec["sample_dip"] != "":
orient["sample_dip"] = float(rec["sample_dip"])
if "sample_bed_dip_direction" in list(rec.keys()) and rec["sample_bed_dip_direction"] != "":
orient["sample_bed_dip_direction"] = float(
rec["sample_bed_dip_direction"])
if "sample_bed_dip" in list(rec.keys()) and rec["sample_bed_dip"] != "":
orient["sample_bed_dip"] = float(rec["sample_bed_dip"])
else:
if "sample_azimuth" in list(rec.keys()):
orient["sample_azimuth"] = float(rec["sample_azimuth"])
if "sample_dip" in list(rec.keys()):
orient["sample_dip"] = float(rec["sample_dip"])
if "sample_bed_dip_direction" in list(rec.keys()):
orient["sample_bed_dip_direction"] = float(
rec["sample_bed_dip_direction"])
if "sample_bed_dip" in list(rec.keys()):
orient["sample_bed_dip"] = float(rec["sample_bed_dip"])
if 'sample_description' in list(rec.keys()):
orient['sample_description'] = rec['sample_description']
if orient["sample_azimuth"] != "":
break
return orient | find the orientation info for samp s | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/pmag.py#L2330-L2369 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.