repository_name
stringlengths 5
67
| func_path_in_repository
stringlengths 4
234
| func_name
stringlengths 0
314
| whole_func_string
stringlengths 52
3.87M
| language
stringclasses 6
values | func_code_string
stringlengths 52
3.87M
| func_documentation_string
stringlengths 1
47.2k
| func_code_url
stringlengths 85
339
|
---|---|---|---|---|---|---|---|
jjgomera/iapws | iapws/iapws97.py | _Backward3a_P_hs | def _Backward3a_P_hs(h, s):
"""Backward equation for region 3a, P=f(h,s)
Parameters
----------
h : float
Specific enthalpy, [kJ/kg]
s : float
Specific entropy, [kJ/kgK]
Returns
-------
P : float
Pressure, [MPa]
References
----------
IAPWS, Revised Supplementary Release on Backward Equations p(h,s) for
Region 3, Equations as a Function of h and s for the Region Boundaries, and
an Equation Tsat(h,s) for Region 4 of the IAPWS Industrial Formulation 1997
for the Thermodynamic Properties of Water and Steam,
http://www.iapws.org/relguide/Supp-phs3-2014.pdf. Eq 1
Examples
--------
>>> _Backward3a_P_hs(1700,3.8)
25.55703246
>>> _Backward3a_P_hs(2000,4.2)
45.40873468
>>> _Backward3a_P_hs(2100,4.3)
60.78123340
"""
I = [0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 6, 7, 8, 10, 10,
14, 18, 20, 22, 22, 24, 28, 28, 32, 32]
J = [0, 1, 5, 0, 3, 4, 8, 14, 6, 16, 0, 2, 3, 0, 1, 4, 5, 28, 28, 24, 1,
32, 36, 22, 28, 36, 16, 28, 36, 16, 36, 10, 28]
n = [0.770889828326934e1, -0.260835009128688e2, 0.267416218930389e3,
0.172221089496844e2, -0.293542332145970e3, 0.614135601882478e3,
-0.610562757725674e5, -0.651272251118219e8, 0.735919313521937e5,
-0.116646505914191e11, 0.355267086434461e2, -0.596144543825955e3,
-0.475842430145708e3, 0.696781965359503e2, 0.335674250377312e3,
0.250526809130882e5, 0.146997380630766e6, 0.538069315091534e20,
0.143619827291346e22, 0.364985866165994e20, -0.254741561156775e4,
0.240120197096563e28, -0.393847464679496e30, 0.147073407024852e25,
-0.426391250432059e32, 0.194509340621077e39, 0.666212132114896e24,
0.706777016552858e34, 0.175563621975576e42, 0.108408607429124e29,
0.730872705175151e44, 0.159145847398870e25, 0.377121605943324e41]
nu = h/2300
sigma = s/4.4
suma = 0
for i, j, ni in zip(I, J, n):
suma += ni * (nu-1.01)**i * (sigma-0.75)**j
return 99*suma | python | def _Backward3a_P_hs(h, s):
"""Backward equation for region 3a, P=f(h,s)
Parameters
----------
h : float
Specific enthalpy, [kJ/kg]
s : float
Specific entropy, [kJ/kgK]
Returns
-------
P : float
Pressure, [MPa]
References
----------
IAPWS, Revised Supplementary Release on Backward Equations p(h,s) for
Region 3, Equations as a Function of h and s for the Region Boundaries, and
an Equation Tsat(h,s) for Region 4 of the IAPWS Industrial Formulation 1997
for the Thermodynamic Properties of Water and Steam,
http://www.iapws.org/relguide/Supp-phs3-2014.pdf. Eq 1
Examples
--------
>>> _Backward3a_P_hs(1700,3.8)
25.55703246
>>> _Backward3a_P_hs(2000,4.2)
45.40873468
>>> _Backward3a_P_hs(2100,4.3)
60.78123340
"""
I = [0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 6, 7, 8, 10, 10,
14, 18, 20, 22, 22, 24, 28, 28, 32, 32]
J = [0, 1, 5, 0, 3, 4, 8, 14, 6, 16, 0, 2, 3, 0, 1, 4, 5, 28, 28, 24, 1,
32, 36, 22, 28, 36, 16, 28, 36, 16, 36, 10, 28]
n = [0.770889828326934e1, -0.260835009128688e2, 0.267416218930389e3,
0.172221089496844e2, -0.293542332145970e3, 0.614135601882478e3,
-0.610562757725674e5, -0.651272251118219e8, 0.735919313521937e5,
-0.116646505914191e11, 0.355267086434461e2, -0.596144543825955e3,
-0.475842430145708e3, 0.696781965359503e2, 0.335674250377312e3,
0.250526809130882e5, 0.146997380630766e6, 0.538069315091534e20,
0.143619827291346e22, 0.364985866165994e20, -0.254741561156775e4,
0.240120197096563e28, -0.393847464679496e30, 0.147073407024852e25,
-0.426391250432059e32, 0.194509340621077e39, 0.666212132114896e24,
0.706777016552858e34, 0.175563621975576e42, 0.108408607429124e29,
0.730872705175151e44, 0.159145847398870e25, 0.377121605943324e41]
nu = h/2300
sigma = s/4.4
suma = 0
for i, j, ni in zip(I, J, n):
suma += ni * (nu-1.01)**i * (sigma-0.75)**j
return 99*suma | Backward equation for region 3a, P=f(h,s)
Parameters
----------
h : float
Specific enthalpy, [kJ/kg]
s : float
Specific entropy, [kJ/kgK]
Returns
-------
P : float
Pressure, [MPa]
References
----------
IAPWS, Revised Supplementary Release on Backward Equations p(h,s) for
Region 3, Equations as a Function of h and s for the Region Boundaries, and
an Equation Tsat(h,s) for Region 4 of the IAPWS Industrial Formulation 1997
for the Thermodynamic Properties of Water and Steam,
http://www.iapws.org/relguide/Supp-phs3-2014.pdf. Eq 1
Examples
--------
>>> _Backward3a_P_hs(1700,3.8)
25.55703246
>>> _Backward3a_P_hs(2000,4.2)
45.40873468
>>> _Backward3a_P_hs(2100,4.3)
60.78123340 | https://github.com/jjgomera/iapws/blob/1e5812aab38212fb8a63736f61cdcfa427d223b1/iapws/iapws97.py#L2603-L2656 |
jjgomera/iapws | iapws/iapws97.py | _Backward3_P_hs | def _Backward3_P_hs(h, s):
"""Backward equation for region 3, P=f(h,s)
Parameters
----------
h : float
Specific enthalpy, [kJ/kg]
s : float
Specific entropy, [kJ/kgK]
Returns
-------
P : float
Pressure, [MPa]
"""
sc = 4.41202148223476
if s <= sc:
return _Backward3a_P_hs(h, s)
else:
return _Backward3b_P_hs(h, s) | python | def _Backward3_P_hs(h, s):
"""Backward equation for region 3, P=f(h,s)
Parameters
----------
h : float
Specific enthalpy, [kJ/kg]
s : float
Specific entropy, [kJ/kgK]
Returns
-------
P : float
Pressure, [MPa]
"""
sc = 4.41202148223476
if s <= sc:
return _Backward3a_P_hs(h, s)
else:
return _Backward3b_P_hs(h, s) | Backward equation for region 3, P=f(h,s)
Parameters
----------
h : float
Specific enthalpy, [kJ/kg]
s : float
Specific entropy, [kJ/kgK]
Returns
-------
P : float
Pressure, [MPa] | https://github.com/jjgomera/iapws/blob/1e5812aab38212fb8a63736f61cdcfa427d223b1/iapws/iapws97.py#L2717-L2736 |
jjgomera/iapws | iapws/iapws97.py | _Backward3_sat_v_P | def _Backward3_sat_v_P(P, T, x):
"""Backward equation for region 3 for saturated state, vs=f(P,x)
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [MPa]
x : integer
Vapor quality, [-]
Returns
-------
v : float
Specific volume, [m³/kg]
Notes
-----
The vapor quality (x) can be 0 (saturated liquid) or 1 (saturated vapour)
"""
if x == 0:
if P < 19.00881189:
region = "c"
elif P < 21.0434:
region = "s"
elif P < 21.9316:
region = "u"
else:
region = "y"
else:
if P < 20.5:
region = "t"
elif P < 21.0434:
region = "r"
elif P < 21.9009:
region = "x"
else:
region = "z"
return _Backward3x_v_PT(T, P, region) | python | def _Backward3_sat_v_P(P, T, x):
"""Backward equation for region 3 for saturated state, vs=f(P,x)
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [MPa]
x : integer
Vapor quality, [-]
Returns
-------
v : float
Specific volume, [m³/kg]
Notes
-----
The vapor quality (x) can be 0 (saturated liquid) or 1 (saturated vapour)
"""
if x == 0:
if P < 19.00881189:
region = "c"
elif P < 21.0434:
region = "s"
elif P < 21.9316:
region = "u"
else:
region = "y"
else:
if P < 20.5:
region = "t"
elif P < 21.0434:
region = "r"
elif P < 21.9009:
region = "x"
else:
region = "z"
return _Backward3x_v_PT(T, P, region) | Backward equation for region 3 for saturated state, vs=f(P,x)
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [MPa]
x : integer
Vapor quality, [-]
Returns
-------
v : float
Specific volume, [m³/kg]
Notes
-----
The vapor quality (x) can be 0 (saturated liquid) or 1 (saturated vapour) | https://github.com/jjgomera/iapws/blob/1e5812aab38212fb8a63736f61cdcfa427d223b1/iapws/iapws97.py#L2739-L2779 |
jjgomera/iapws | iapws/iapws97.py | _Backward3_v_PT | def _Backward3_v_PT(P, T):
"""Backward equation for region 3, v=f(P,T)
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [MPa]
Returns
-------
v : float
Specific volume, [m³/kg]
References
----------
IAPWS, Revised Supplementary Release on Backward Equations for Specific
Volume as a Function of Pressure and Temperature v(p,T) for Region 3 of the
IAPWS Industrial Formulation 1997 for the Thermodynamic Properties of Water
and Steam, http://www.iapws.org/relguide/Supp-VPT3-2016.pdf, Table 2 and 10
"""
if P > 40:
if T <= _tab_P(P):
region = "a"
else:
region = "b"
elif 25 < P <= 40:
tcd = _txx_P(P, "cd")
tab = _tab_P(P)
tef = _tef_P(P)
if T <= tcd:
region = "c"
elif tcd < T <= tab:
region = "d"
elif tab < T <= tef:
region = "e"
else:
region = "f"
elif 23.5 < P <= 25:
tcd = _txx_P(P, "cd")
tgh = _txx_P(P, "gh")
tef = _tef_P(P)
tij = _txx_P(P, "ij")
tjk = _txx_P(P, "jk")
if T <= tcd:
region = "c"
elif tcd < T <= tgh:
region = "g"
elif tgh < T <= tef:
region = "h"
elif tef < T <= tij:
region = "i"
elif tij < T <= tjk:
region = "j"
else:
region = "k"
elif 23 < P <= 23.5:
tcd = _txx_P(P, "cd")
tgh = _txx_P(P, "gh")
tef = _tef_P(P)
tij = _txx_P(P, "ij")
tjk = _txx_P(P, "jk")
if T <= tcd:
region = "c"
elif tcd < T <= tgh:
region = "l"
elif tgh < T <= tef:
region = "h"
elif tef < T <= tij:
region = "i"
elif tij < T <= tjk:
region = "j"
else:
region = "k"
elif 22.5 < P <= 23:
tcd = _txx_P(P, "cd")
tgh = _txx_P(P, "gh")
tmn = _txx_P(P, "mn")
tef = _tef_P(P)
top = _top_P(P)
tij = _txx_P(P, "ij")
tjk = _txx_P(P, "jk")
if T <= tcd:
region = "c"
elif tcd < T <= tgh:
region = "l"
elif tgh < T <= tmn:
region = "m"
elif tmn < T <= tef:
region = "n"
elif tef < T <= top:
region = "o"
elif top < T <= tij:
region = "p"
elif tij < T <= tjk:
region = "j"
else:
region = "k"
elif _PSat_T(643.15) < P <= 22.5:
tcd = _txx_P(P, "cd")
tqu = _txx_P(P, "qu")
trx = _txx_P(P, "rx")
tjk = _txx_P(P, "jk")
if T <= tcd:
region = "c"
elif tcd < T <= tqu:
region = "q"
elif tqu < T <= trx:
# Table 10
tef = _tef_P(P)
twx = _twx_P(P)
tuv = _txx_P(P, "uv")
if 22.11 < P <= 22.5:
if T <= tuv:
region = "u"
elif tuv <= T <= tef:
region = "v"
elif tef <= T <= twx:
region = "w"
else:
region = "x"
elif 22.064 < P <= 22.11:
if T <= tuv:
region = "u"
elif tuv <= T <= tef:
region = "y"
elif tef <= T <= twx:
region = "z"
else:
region = "x"
elif T > _TSat_P(P):
if _PSat_T(643.15) < P <= 21.90096265:
region = "x"
elif 21.90096265 < P <= 22.064:
if T <= twx:
region = "z"
else:
region = "x"
elif T <= _TSat_P(P):
if _PSat_T(643.15) < P <= 21.93161551:
region = "u"
elif 21.93161551 < P <= 22.064:
if T <= tuv:
region = "u"
else:
region = "y"
elif trx < T <= tjk:
region = "r"
else:
region = "k"
elif 20.5 < P <= _PSat_T(643.15):
tcd = _txx_P(P, "cd")
Ts = _TSat_P(P)
tjk = _txx_P(P, "jk")
if T <= tcd:
region = "c"
elif tcd < T <= Ts:
region = "s"
elif Ts < T <= tjk:
region = "r"
else:
region = "k"
elif 19.00881189173929 < P <= 20.5:
tcd = _txx_P(P, "cd")
Ts = _TSat_P(P)
if T <= tcd:
region = "c"
elif tcd < T <= Ts:
region = "s"
else:
region = "t"
elif Ps_623 < P <= 19.00881189173929:
Ts = _TSat_P(P)
if T <= Ts:
region = "c"
else:
region = "t"
return _Backward3x_v_PT(T, P, region) | python | def _Backward3_v_PT(P, T):
"""Backward equation for region 3, v=f(P,T)
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [MPa]
Returns
-------
v : float
Specific volume, [m³/kg]
References
----------
IAPWS, Revised Supplementary Release on Backward Equations for Specific
Volume as a Function of Pressure and Temperature v(p,T) for Region 3 of the
IAPWS Industrial Formulation 1997 for the Thermodynamic Properties of Water
and Steam, http://www.iapws.org/relguide/Supp-VPT3-2016.pdf, Table 2 and 10
"""
if P > 40:
if T <= _tab_P(P):
region = "a"
else:
region = "b"
elif 25 < P <= 40:
tcd = _txx_P(P, "cd")
tab = _tab_P(P)
tef = _tef_P(P)
if T <= tcd:
region = "c"
elif tcd < T <= tab:
region = "d"
elif tab < T <= tef:
region = "e"
else:
region = "f"
elif 23.5 < P <= 25:
tcd = _txx_P(P, "cd")
tgh = _txx_P(P, "gh")
tef = _tef_P(P)
tij = _txx_P(P, "ij")
tjk = _txx_P(P, "jk")
if T <= tcd:
region = "c"
elif tcd < T <= tgh:
region = "g"
elif tgh < T <= tef:
region = "h"
elif tef < T <= tij:
region = "i"
elif tij < T <= tjk:
region = "j"
else:
region = "k"
elif 23 < P <= 23.5:
tcd = _txx_P(P, "cd")
tgh = _txx_P(P, "gh")
tef = _tef_P(P)
tij = _txx_P(P, "ij")
tjk = _txx_P(P, "jk")
if T <= tcd:
region = "c"
elif tcd < T <= tgh:
region = "l"
elif tgh < T <= tef:
region = "h"
elif tef < T <= tij:
region = "i"
elif tij < T <= tjk:
region = "j"
else:
region = "k"
elif 22.5 < P <= 23:
tcd = _txx_P(P, "cd")
tgh = _txx_P(P, "gh")
tmn = _txx_P(P, "mn")
tef = _tef_P(P)
top = _top_P(P)
tij = _txx_P(P, "ij")
tjk = _txx_P(P, "jk")
if T <= tcd:
region = "c"
elif tcd < T <= tgh:
region = "l"
elif tgh < T <= tmn:
region = "m"
elif tmn < T <= tef:
region = "n"
elif tef < T <= top:
region = "o"
elif top < T <= tij:
region = "p"
elif tij < T <= tjk:
region = "j"
else:
region = "k"
elif _PSat_T(643.15) < P <= 22.5:
tcd = _txx_P(P, "cd")
tqu = _txx_P(P, "qu")
trx = _txx_P(P, "rx")
tjk = _txx_P(P, "jk")
if T <= tcd:
region = "c"
elif tcd < T <= tqu:
region = "q"
elif tqu < T <= trx:
# Table 10
tef = _tef_P(P)
twx = _twx_P(P)
tuv = _txx_P(P, "uv")
if 22.11 < P <= 22.5:
if T <= tuv:
region = "u"
elif tuv <= T <= tef:
region = "v"
elif tef <= T <= twx:
region = "w"
else:
region = "x"
elif 22.064 < P <= 22.11:
if T <= tuv:
region = "u"
elif tuv <= T <= tef:
region = "y"
elif tef <= T <= twx:
region = "z"
else:
region = "x"
elif T > _TSat_P(P):
if _PSat_T(643.15) < P <= 21.90096265:
region = "x"
elif 21.90096265 < P <= 22.064:
if T <= twx:
region = "z"
else:
region = "x"
elif T <= _TSat_P(P):
if _PSat_T(643.15) < P <= 21.93161551:
region = "u"
elif 21.93161551 < P <= 22.064:
if T <= tuv:
region = "u"
else:
region = "y"
elif trx < T <= tjk:
region = "r"
else:
region = "k"
elif 20.5 < P <= _PSat_T(643.15):
tcd = _txx_P(P, "cd")
Ts = _TSat_P(P)
tjk = _txx_P(P, "jk")
if T <= tcd:
region = "c"
elif tcd < T <= Ts:
region = "s"
elif Ts < T <= tjk:
region = "r"
else:
region = "k"
elif 19.00881189173929 < P <= 20.5:
tcd = _txx_P(P, "cd")
Ts = _TSat_P(P)
if T <= tcd:
region = "c"
elif tcd < T <= Ts:
region = "s"
else:
region = "t"
elif Ps_623 < P <= 19.00881189173929:
Ts = _TSat_P(P)
if T <= Ts:
region = "c"
else:
region = "t"
return _Backward3x_v_PT(T, P, region) | Backward equation for region 3, v=f(P,T)
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [MPa]
Returns
-------
v : float
Specific volume, [m³/kg]
References
----------
IAPWS, Revised Supplementary Release on Backward Equations for Specific
Volume as a Function of Pressure and Temperature v(p,T) for Region 3 of the
IAPWS Industrial Formulation 1997 for the Thermodynamic Properties of Water
and Steam, http://www.iapws.org/relguide/Supp-VPT3-2016.pdf, Table 2 and 10 | https://github.com/jjgomera/iapws/blob/1e5812aab38212fb8a63736f61cdcfa427d223b1/iapws/iapws97.py#L2782-L2961 |
jjgomera/iapws | iapws/iapws97.py | _Backward3x_v_PT | def _Backward3x_v_PT(T, P, x):
"""Backward equation for region 3x, v=f(P,T)
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [MPa]
x : char
Region 3 subregion code
Returns
-------
v : float
Specific volume, [m³/kg]
References
----------
IAPWS, Revised Supplementary Release on Backward Equations for Specific
Volume as a Function of Pressure and Temperature v(p,T) for Region 3 of the
IAPWS Industrial Formulation 1997 for the Thermodynamic Properties of Water
and Steam, http://www.iapws.org/relguide/Supp-VPT3-2016.pdf, Eq. 4-5
Examples
--------
>>> _Backward3x_v_PT(630,50,"a")
0.001470853100
>>> _Backward3x_v_PT(670,80,"a")
0.001503831359
>>> _Backward3x_v_PT(710,50,"b")
0.002204728587
>>> _Backward3x_v_PT(750,80,"b")
0.001973692940
>>> _Backward3x_v_PT(630,20,"c")
0.001761696406
>>> _Backward3x_v_PT(650,30,"c")
0.001819560617
>>> _Backward3x_v_PT(656,26,"d")
0.002245587720
>>> _Backward3x_v_PT(670,30,"d")
0.002506897702
>>> _Backward3x_v_PT(661,26,"e")
0.002970225962
>>> _Backward3x_v_PT(675,30,"e")
0.003004627086
>>> _Backward3x_v_PT(671,26,"f")
0.005019029401
>>> _Backward3x_v_PT(690,30,"f")
0.004656470142
>>> _Backward3x_v_PT(649,23.6,"g")
0.002163198378
>>> _Backward3x_v_PT(650,24,"g")
0.002166044161
>>> _Backward3x_v_PT(652,23.6,"h")
0.002651081407
>>> _Backward3x_v_PT(654,24,"h")
0.002967802335
>>> _Backward3x_v_PT(653,23.6,"i")
0.003273916816
>>> _Backward3x_v_PT(655,24,"i")
0.003550329864
>>> _Backward3x_v_PT(655,23.5,"j")
0.004545001142
>>> _Backward3x_v_PT(660,24,"j")
0.005100267704
>>> _Backward3x_v_PT(660,23,"k")
0.006109525997
>>> _Backward3x_v_PT(670,24,"k")
0.006427325645
>>> _Backward3x_v_PT(646,22.6,"l")
0.002117860851
>>> _Backward3x_v_PT(646,23,"l")
0.002062374674
>>> _Backward3x_v_PT(648.6,22.6,"m")
0.002533063780
>>> _Backward3x_v_PT(649.3,22.8,"m")
0.002572971781
>>> _Backward3x_v_PT(649,22.6,"n")
0.002923432711
>>> _Backward3x_v_PT(649.7,22.8,"n")
0.002913311494
>>> _Backward3x_v_PT(649.1,22.6,"o")
0.003131208996
>>> _Backward3x_v_PT(649.9,22.8,"o")
0.003221160278
>>> _Backward3x_v_PT(649.4,22.6,"p")
0.003715596186
>>> _Backward3x_v_PT(650.2,22.8,"p")
0.003664754790
>>> _Backward3x_v_PT(640,21.1,"q")
0.001970999272
>>> _Backward3x_v_PT(643,21.8,"q")
0.002043919161
>>> _Backward3x_v_PT(644,21.1,"r")
0.005251009921
>>> _Backward3x_v_PT(648,21.8,"r")
0.005256844741
>>> _Backward3x_v_PT(635,19.1,"s")
0.001932829079
>>> _Backward3x_v_PT(638,20,"s")
0.001985387227
>>> _Backward3x_v_PT(626,17,"t")
0.008483262001
>>> _Backward3x_v_PT(640,20,"t")
0.006227528101
>>> _Backward3x_v_PT(644.6,21.5,"u")
0.002268366647
>>> _Backward3x_v_PT(646.1,22,"u")
0.002296350553
>>> _Backward3x_v_PT(648.6,22.5,"v")
0.002832373260
>>> _Backward3x_v_PT(647.9,22.3,"v")
0.002811424405
>>> _Backward3x_v_PT(647.5,22.15,"w")
0.003694032281
>>> _Backward3x_v_PT(648.1,22.3,"w")
0.003622226305
>>> _Backward3x_v_PT(648,22.11,"x")
0.004528072649
>>> _Backward3x_v_PT(649,22.3,"x")
0.004556905799
>>> _Backward3x_v_PT(646.84,22,"y")
0.002698354719
>>> _Backward3x_v_PT(647.05,22.064,"y")
0.002717655648
>>> _Backward3x_v_PT(646.89,22,"z")
0.003798732962
>>> _Backward3x_v_PT(647.15,22.064,"z")
0.003701940009
"""
par = {
"a": [0.0024, 100, 760, 0.085, 0.817, 1, 1, 1],
"b": [0.0041, 100, 860, 0.280, 0.779, 1, 1, 1],
"c": [0.0022, 40, 690, 0.259, 0.903, 1, 1, 1],
"d": [0.0029, 40, 690, 0.559, 0.939, 1, 1, 4],
"e": [0.0032, 40, 710, 0.587, 0.918, 1, 1, 1],
"f": [0.0064, 40, 730, 0.587, 0.891, 0.5, 1, 4],
"g": [0.0027, 25, 660, 0.872, 0.971, 1, 1, 4],
"h": [0.0032, 25, 660, 0.898, 0.983, 1, 1, 4],
"i": [0.0041, 25, 660, 0.910, 0.984, 0.5, 1, 4],
"j": [0.0054, 25, 670, 0.875, 0.964, 0.5, 1, 4],
"k": [0.0077, 25, 680, 0.802, 0.935, 1, 1, 1],
"l": [0.0026, 24, 650, 0.908, 0.989, 1, 1, 4],
"m": [0.0028, 23, 650, 1.000, 0.997, 1, 0.25, 1],
"n": [0.0031, 23, 650, 0.976, 0.997, None, None, None],
"o": [0.0034, 23, 650, 0.974, 0.996, 0.5, 1, 1],
"p": [0.0041, 23, 650, 0.972, 0.997, 0.5, 1, 1],
"q": [0.0022, 23, 650, 0.848, 0.983, 1, 1, 4],
"r": [0.0054, 23, 650, 0.874, 0.982, 1, 1, 1],
"s": [0.0022, 21, 640, 0.886, 0.990, 1, 1, 4],
"t": [0.0088, 20, 650, 0.803, 1.020, 1, 1, 1],
"u": [0.0026, 23, 650, 0.902, 0.988, 1, 1, 1],
"v": [0.0031, 23, 650, 0.960, 0.995, 1, 1, 1],
"w": [0.0039, 23, 650, 0.959, 0.995, 1, 1, 4],
"x": [0.0049, 23, 650, 0.910, 0.988, 1, 1, 1],
"y": [0.0031, 22, 650, 0.996, 0.994, 1, 1, 4],
"z": [0.0038, 22, 650, 0.993, 0.994, 1, 1, 4],
}
I = {
"a": [-12, -12, -12, -10, -10, -10, -8, -8, -8, -6, -5, -5, -5, -4, -3,
-3, -3, -3, -2, -2, -2, -1, -1, -1, 0, 0, 1, 1, 2, 2],
"b": [-12, -12, -10, -10, -8, -6, -6, -6, -5, -5, -5, -4, -4, -4, -3,
-3, -3, -3, -3, -2, -2, -2, -1, -1, 0, 0, 1, 1, 2, 3, 4, 4],
"c": [-12, -12, -12, -10, -10, -10, -8, -8, -8, -6, -5, -5, -5, -4, -4,
-3, -3, -2, -2, -2, -1, -1, -1, 0, 0, 0, 1, 1, 2, 2, 2, 2, 3, 3,
8],
"d": [-12, -12, -12, -12, -12, -12, -10, -10, -10, -10, -10, -10, -10,
-8, -8, -8, -8, -6, -6, -5, -5, -5, -5, -4, -4, -4, -3, -3, -2,
-2, -1, -1, -1, 0, 0, 1, 1, 3],
"e": [-12, -12, -10, -10, -10, -10, -10, -8, -8, -8, -6, -5, -4, -4,
-3, -3, -3, -2, -2, -2, -2, -1, 0, 0, 1, 1, 1, 2, 2],
"f": [0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 3, 3, 3, 4, 5, 5, 6, 7, 7,
10, 12, 12, 12, 14, 14, 14, 14, 14, 16, 16, 18, 18, 20, 20, 20,
22, 24, 24, 28, 32],
"g": [-12, -12, -12, -12, -12, -12, -10, -10, -10, -8, -8, -8, -8, -6,
-6, -5, -5, -4, -3, -2, -2, -2, -2, -1, -1, -1, 0, 0, 0, 1, 1, 1,
3, 5, 6, 8, 10, 10],
"h": [-12, -12, -10, -10, -10, -10, -10, -10, -8, -8, -8, -8, -8, -6,
-6, -6, -5, -5, -5, -4, -4, -3, -3, -2, -1, -1, 0, 1, 1],
"i": [0, 0, 0, 1, 1, 1, 1, 2, 3, 3, 4, 4, 4, 5, 5, 5, 7, 7, 8, 8, 10,
12, 12, 12, 14, 14, 14, 14, 18, 18, 18, 18, 18, 20, 20, 22, 24,
24, 32, 32, 36, 36],
"j": [0, 0, 0, 1, 1, 1, 2, 2, 3, 4, 4, 5, 5, 5, 6, 10, 12, 12, 14, 14,
14, 16, 18, 20, 20, 24, 24, 28, 28],
"k": [-2, -2, -1, -1, 0, -0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2,
2, 2, 2, 2, 5, 5, 5, 6, 6, 6, 6, 8, 10, 12],
"l": [-12, -12, -12, -12, -12, -10, -10, -8, -8, -8, -8, -8, -8, -8,
-6, -5, -5, -4, -4, -3, -3, -3, -3, -2, -2, -2, -1, -1, -1, 0, 0,
0, 0, 1, 1, 2, 4, 5, 5, 6, 10, 10, 14],
"m": [0, 3, 8, 20, 1, 3, 4, 5, 1, 6, 2, 4, 14, 2, 5, 3, 0, 1, 1, 1, 28,
2, 16, 0, 5, 0, 3, 4, 12, 16, 1, 8, 14, 0, 2, 3, 4, 8, 14, 24],
"n": [0, 3, 4, 6, 7, 10, 12, 14, 18, 0, 3, 5, 6, 8, 12, 0, 3, 7, 12,
2, 3, 4, 2, 4, 7, 4, 3, 5, 6, 0, 0, 3, 1, 0, 1, 0, 1, 0, 1],
"o": [0, 0, 0, 2, 3, 4, 4, 4, 4, 4, 5, 5, 6, 7, 8, 8, 8, 10, 10, 14,
14, 20, 20, 24],
"p": [0, 0, 0, 0, 1, 2, 3, 3, 4, 6, 7, 7, 8, 10, 12, 12, 12, 14, 14,
14, 16, 18, 20, 22, 24, 24, 36],
"q": [-12, -12, -10, -10, -10, -10, -8, -6, -5, -5, -4, -4, -3, -2,
-2, -2, -2, -1, -1, -1, 0, 1, 1, 1],
"r": [-8, -8, -3, -3, -3, -3, -3, 0, 0, 0, 0, 3, 3, 8, 8, 8, 8, 10,
10, 10, 10, 10, 10, 10, 10, 12, 14],
"s": [-12, -12, -10, -8, -6, -5, -5, -4, -4, -3, -3, -2, -1, -1, -1, 0,
0, 0, 0, 1, 1, 3, 3, 3, 4, 4, 4, 5, 14],
"t": [0, 0, 0, 0, 1, 1, 2, 2, 2, 3, 3, 4, 4, 7, 7, 7, 7, 7, 10, 10, 10,
10, 10, 18, 20, 22, 22, 24, 28, 32, 32, 32, 36],
"u": [-12, -10, -10, -10, -8, -8, -8, -6, -6, -5, -5, -5, -3, -1, -1,
-1, -1, 0, 0, 1, 2, 2, 3, 5, 5, 5, 6, 6, 8, 8, 10, 12, 12, 12,
14, 14, 14, 14],
"v": [-10, -8, -6, -6, -6, -6, -6, -6, -5, -5, -5, -5, -5, -5, -4, -4,
-4, -4, -3, -3, -3, -2, -2, -1, -1, 0, 0, 0, 1, 1, 3, 4, 4, 4, 5,
8, 10, 12, 14],
"w": [-12, -12, -10, -10, -8, -8, -8, -6, -6, -6, -6, -5, -4, -4, -3,
-3, -2, -2, -1, -1, -1, 0, 0, 1, 2, 2, 3, 3, 5, 5, 5, 8, 8, 10,
10],
"x": [-8, -6, -5, -4, -4, -4, -3, -3, -1, 0, 0, 0, 1, 1, 2, 3, 3, 3, 4,
5, 5, 5, 6, 8, 8, 8, 8, 10, 12, 12, 12, 12, 14, 14, 14, 14],
"y": [0, 0, 0, 0, 1, 2, 2, 2, 2, 3, 3, 3, 4, 4, 5, 5, 8, 8, 10, 12],
"z": [-8, -6, -5, -5, -4, -4, -4, -3, -3, -3, -2, -1, 0, 1, 2, 3, 3, 6,
6, 6, 6, 8, 8]}
J = {
"a": [5, 10, 12, 5, 10, 12, 5, 8, 10, 1, 1, 5, 10, 8, 0, 1, 3, 6, 0,
2, 3, 0, 1, 2, 0, 1, 0, 2, 0, 2],
"b": [10, 12, 8, 14, 8, 5, 6, 8, 5, 8, 10, 2, 4, 5, 0, 1, 2, 3, 5, 0,
2, 5, 0, 2, 0, 1, 0, 2, 0, 2, 0, 1],
"c": [6, 8, 10, 6, 8, 10, 5, 6, 7, 8, 1, 4, 7, 2, 8, 0, 3, 0, 4, 5, 0,
1, 2, 0, 1, 2, 0, 2, 0, 1, 3, 7, 0, 7, 1],
"d": [4, 6, 7, 10, 12, 16, 0, 2, 4, 6, 8, 10, 14, 3, 7, 8, 10, 6, 8, 1,
2, 5, 7, 0, 1, 7, 2, 4, 0, 1, 0, 1, 5, 0, 2, 0, 6, 0],
"e": [14, 16, 3, 6, 10, 14, 16, 7, 8, 10, 6, 6, 2, 4, 2, 6, 7, 0, 1,
3, 4, 0, 0, 1, 0, 4, 6, 0, 2],
"f": [-3, -2, -1, 0, 1, 2, -1, 1, 2, 3, 0, 1, -5, -2, 0, -3, -8, 1, -6,
-4, 1, -6, -10, -8, -4, -12, -10, -8, -6, -4, -10, -8, -12, -10,
-12, -10, -6, -12, -12, -4, -12, -12],
"g": [7, 12, 14, 18, 22, 24, 14, 20, 24, 7, 8, 10, 12, 8, 22, 7, 20,
22, 7, 3, 5, 14, 24, 2, 8, 18, 0, 1, 2, 0, 1, 3, 24, 22, 12, 3,
0, 6],
"h": [8, 12, 4, 6, 8, 10, 14, 16, 0, 1, 6, 7, 8, 4, 6, 8, 2, 3, 4, 2,
4, 1, 2, 0, 0, 2, 0, 0, 2],
"i": [0, 1, 10, -4, -2, -1, 0, 0, -5, 0, -3, -2, -1, -6, -1, 12, -4,
-3, -6, 10, -8, -12, -6, -4, -10, -8, -4, 5, -12, -10, -8, -6,
2, -12, -10, -12, -12, -8, -10, -5, -10, -8],
"j": [-1, 0, 1, -2, -1, 1, -1, 1, -2, -2, 2, -3, -2, 0, 3, -6, -8, -3,
-10, -8, -5, -10, -12, -12, -10, -12, -6, -12, -5],
"k": [10, 12, -5, 6, -12, -6, -2, -1, 0, 1, 2, 3, 14, -3, -2, 0, 1, 2,
-8, -6, -3, -2, 0, 4, -12, -6, -3, -12, -10, -8, -5, -12, -12,
-10],
"l": [14, 16, 18, 20, 22, 14, 24, 6, 10, 12, 14, 18, 24, 36, 8, 4, 5,
7, 16, 1, 3, 18, 20, 2, 3, 10, 0, 1, 3, 0, 1, 2, 12, 0, 16, 1, 0,
0, 1, 14, 4, 12, 10],
"m": [0, 0, 0, 2, 5, 5, 5, 5, 6, 6, 7, 8, 8, 10, 10, 12, 14, 14, 18,
20, 20, 22, 22, 24, 24, 28, 28, 28, 28, 28, 32, 32, 32, 36, 36,
36, 36, 36, 36, 36],
"n": [-12, -12, -12, -12, -12, -12, -12, -12, -12, -10, -10, -10, -10,
-10, -10, -8, -8, -8, -8, -6, -6, -6, -5, -5, -5, -4, -3, -3,
-3, -2, -1, -1, 0, 1, 1, 2, 4, 5, 6],
"o": [-12, -4, -1, -1, -10, -12, -8, -5, -4, -1, -4, -3, -8, -12, -10,
-8, -4, -12, -8, -12, -8, -12, -10, -12],
"p": [-1, 0, 1, 2, 1, -1, -3, 0, -2, -2, -5, -4, -2, -3, -12, -6, -5,
-10, -8, -3, -8, -8, -10, -10, -12, -8, -12],
"q": [10, 12, 6, 7, 8, 10, 8, 6, 2, 5, 3, 4, 3, 0, 1, 2, 4, 0, 1, 2,
0, 0, 1, 3],
"r": [6, 14, -3, 3, 4, 5, 8, -1, 0, 1, 5, -6, -2, -12, -10, -8, -5,
-12, -10, -8, -6, -5, -4, -3, -2, -12, -12],
"s": [20, 24, 22, 14, 36, 8, 16, 6, 32, 3, 8, 4, 1, 2, 3, 0, 1, 4, 28,
0, 32, 0, 1, 2, 3, 18, 24, 4, 24],
"t": [0, 1, 4, 12, 0, 10, 0, 6, 14, 3, 8, 0, 10, 3, 4, 7, 20, 36, 10,
12, 14, 16, 22, 18, 32, 22, 36, 24, 28, 22, 32, 36, 36],
"u": [14, 10, 12, 14, 10, 12, 14, 8, 12, 4, 8, 12, 2, -1, 1, 12, 14,
-3, 1, -2, 5, 10, -5, -4, 2, 3, -5, 2, -8, 8, -4, -12, -4, 4,
-12, -10, -6, 6],
"v": [-8, -12, -12, -3, 5, 6, 8, 10, 1, 2, 6, 8, 10, 14, -12, -10, -6,
10, -3, 10, 12, 2, 4, -2, 0, -2, 6, 10, -12, -10, 3, -6, 3, 10,
2, -12, -2, -3, 1],
"w": [8, 14, -1, 8, 6, 8, 14, -4, -3, 2, 8, -10, -1, 3, -10, 3, 1, 2,
-8, -4, 1, -12, 1, -1, -1, 2, -12, -5, -10, -8, -6, -12, -10,
-12, -8],
"x": [14, 10, 10, 1, 2, 14, -2, 12, 5, 0, 4, 10, -10, -1, 6, -12, 0,
8, 3, -6, -2, 1, 1, -6, -3, 1, 8, -8, -10, -8, -5, -4, -12, -10,
-8, -6],
"y": [-3, 1, 5, 8, 8, -4, -1, 4, 5, -8, 4, 8, -6, 6, -2, 1, -8, -2,
-5, -8],
"z": [3, 6, 6, 8, 5, 6, 8, -2, 5, 6, 2, -6, 3, 1, 6, -6, -2, -6, -5,
-4, -1, -8, -4]}
n = {
"a": [0.110879558823853e-2, 0.572616740810616e3, -0.767051948380852e5,
-0.253321069529674e-1, 0.628008049345689e4, 0.234105654131876e6,
0.216867826045856, -0.156237904341963e3, -0.269893956176613e5,
-0.180407100085505e-3, 0.116732227668261e-2, 0.266987040856040e2,
0.282776617243286e5, -0.242431520029523e4, 0.435217323022733e-3,
-0.122494831387441e-1, 0.179357604019989e1, 0.442729521058314e2,
-0.593223489018342e-2, 0.453186261685774, 0.135825703129140e1,
0.408748415856745e-1, 0.474686397863312, 0.118646814997915e1,
0.546987265727549, 0.195266770452643, -0.502268790869663e-1,
-0.369645308193377, 0.633828037528420e-2, 0.797441793901017e-1],
"b": [-0.827670470003621e-1, 0.416887126010565e2, 0.483651982197059e-1,
-0.291032084950276e5, -0.111422582236948e3, -.202300083904014e-1,
0.294002509338515e3, 0.140244997609658e3, -0.344384158811459e3,
0.361182452612149e3, -0.140699677420738e4, -0.202023902676481e-2,
0.171346792457471e3, -0.425597804058632e1, 0.691346085000334e-5,
0.151140509678925e-2, -0.416375290166236e-1, -.413754957011042e2,
-0.506673295721637e2, -0.572212965569023e-3, 0.608817368401785e1,
0.239600660256161e2, 0.122261479925384e-1, 0.216356057692938e1,
0.398198903368642, -0.116892827834085, -0.102845919373532,
-0.492676637589284, 0.655540456406790e-1, -0.240462535078530,
-0.269798180310075e-1, 0.128369435967012],
"c": [0.311967788763030e1, 0.276713458847564e5, 0.322583103403269e8,
-0.342416065095363e3, -0.899732529907377e6, -0.793892049821251e8,
0.953193003217388e2, 0.229784742345072e4, 0.175336675322499e6,
0.791214365222792e7, 0.319933345844209e-4, -0.659508863555767e2,
-0.833426563212851e6, 0.645734680583292e-1, -0.382031020570813e7,
0.406398848470079e-4, 0.310327498492008e2, -0.892996718483724e-3,
0.234604891591616e3, 0.377515668966951e4, 0.158646812591361e-1,
0.707906336241843, 0.126016225146570e2, 0.736143655772152,
0.676544268999101, -0.178100588189137e2, -0.156531975531713,
0.117707430048158e2, 0.840143653860447e-1, -0.186442467471949,
-0.440170203949645e2, 0.123290423502494e7, -0.240650039730845e-1,
-0.107077716660869e7, 0.438319858566475e-1],
"d": [-0.452484847171645e-9, .315210389538801e-4, -.214991352047545e-2,
0.508058874808345e3, -0.127123036845932e8, 0.115371133120497e13,
-.197805728776273e-15, .241554806033972e-10,
-.156481703640525e-5, 0.277211346836625e-2, -0.203578994462286e2,
0.144369489909053e7, -0.411254217946539e11, 0.623449786243773e-5,
-.221774281146038e2, -0.689315087933158e5, -0.195419525060713e8,
0.316373510564015e4, 0.224040754426988e7, -0.436701347922356e-5,
-.404213852833996e-3, -0.348153203414663e3, -0.385294213555289e6,
0.135203700099403e-6, 0.134648383271089e-3, 0.125031835351736e6,
0.968123678455841e-1, 0.225660517512438e3, -0.190102435341872e-3,
-.299628410819229e-1, 0.500833915372121e-2, 0.387842482998411,
-0.138535367777182e4, 0.870745245971773, 0.171946252068742e1,
-.326650121426383e-1, 0.498044171727877e4, 0.551478022765087e-2],
"e": [0.715815808404721e9, -0.114328360753449e12, .376531002015720e-11,
-0.903983668691157e-4, 0.665695908836252e6, 0.535364174960127e10,
0.794977402335603e11, 0.922230563421437e2, -0.142586073991215e6,
-0.111796381424162e7, 0.896121629640760e4, -0.669989239070491e4,
0.451242538486834e-2, -0.339731325977713e2, -0.120523111552278e1,
0.475992667717124e5, -0.266627750390341e6, -0.153314954386524e-3,
0.305638404828265, 0.123654999499486e3, -0.104390794213011e4,
-0.157496516174308e-1, 0.685331118940253, 0.178373462873903e1,
-0.544674124878910, 0.204529931318843e4, -0.228342359328752e5,
0.413197481515899, -0.341931835910405e2],
"f": [-0.251756547792325e-7, .601307193668763e-5, -.100615977450049e-2,
0.999969140252192, 0.214107759236486e1, -0.165175571959086e2,
-0.141987303638727e-2, 0.269251915156554e1, 0.349741815858722e2,
-0.300208695771783e2, -0.131546288252539e1, -0.839091277286169e1,
0.181545608337015e-9, -0.591099206478909e-3, 0.152115067087106e1,
0.252956470663225e-4, 0.100726265203786e-14, -0.14977453386065e1,
-0.793940970562969e-9, -0.150290891264717e-3, .151205531275133e1,
0.470942606221652e-5, .195049710391712e-12, -.911627886266077e-8,
.604374640201265e-3, -.225132933900136e-15, .610916973582981e-11,
-.303063908043404e-6, -.137796070798409e-4, -.919296736666106e-3,
.639288223132545e-9, .753259479898699e-6, -0.400321478682929e-12,
.756140294351614e-8, -.912082054034891e-11, -.237612381140539e-7,
0.269586010591874e-4, -.732828135157839e-10, .241995578306660e-9,
-.405735532730322e-3, .189424143498011e-9, -.486632965074563e-9],
"g": [0.412209020652996e-4, -0.114987238280587e7, 0.948180885032080e10,
-0.195788865718971e18, 0.4962507048713e25, -0.105549884548496e29,
-0.758642165988278e12, -.922172769596101e23, .725379072059348e30,
-0.617718249205859e2, 0.107555033344858e5, -0.379545802336487e8,
0.228646846221831e12, -0.499741093010619e7, -.280214310054101e31,
0.104915406769586e7, 0.613754229168619e28, 0.802056715528378e32,
-0.298617819828065e8, -0.910782540134681e2, 0.135033227281565e6,
-0.712949383408211e19, -0.104578785289542e37, .304331584444093e2,
0.593250797959445e10, -0.364174062110798e28, 0.921791403532461,
-0.337693609657471, -0.724644143758508e2, -0.110480239272601,
0.536516031875059e1, -0.291441872156205e4, 0.616338176535305e40,
-0.120889175861180e39, 0.818396024524612e23, 0.940781944835829e9,
-0.367279669545448e5, -0.837513931798655e16],
"h": [0.561379678887577e-1, 0.774135421587083e10, 0.111482975877938e-8,
-0.143987128208183e-2, 0.193696558764920e4, -0.605971823585005e9,
0.171951568124337e14, -.185461154985145e17, 0.38785116807801e-16,
-.395464327846105e-13, -0.170875935679023e3, -0.21201062070122e4,
0.177683337348191e8, 0.110177443629575e2, -0.234396091693313e6,
-0.656174421999594e7, 0.156362212977396e-4, -0.212946257021400e1,
0.135249306374858e2, 0.177189164145813, 0.139499167345464e4,
-0.703670932036388e-2, -0.152011044389648, 0.981916922991113e-4,
0.147199658618076e-2, 0.202618487025578e2, 0.899345518944240,
-0.211346402240858, 0.249971752957491e2],
"i": [0.106905684359136e1, -0.148620857922333e1, 0.259862256980408e15,
-.446352055678749e-11, -.566620757170032e-6,
-.235302885736849e-2, -0.269226321968839, 0.922024992944392e1,
0.357633505503772e-11, -.173942565562222e2, 0.700681785556229e-5,
-.267050351075768e-3, -.231779669675624e1, -.753533046979752e-12,
.481337131452891e1, -0.223286270422356e22, -.118746004987383e-4,
.646412934136496e-2, -0.410588536330937e-9, .422739537057241e20,
.313698180473812e-12, 0.16439533434504e-23, -.339823323754373e-5,
-.135268639905021e-1, -.723252514211625e-14, .184386437538366e-8,
-.463959533752385e-1, -.99226310037675e14, .688169154439335e-16,
-.222620998452197e-10, -.540843018624083e-7, .345570606200257e-2,
.422275800304086e11, -.126974478770487e-14, .927237985153679e-9,
.612670812016489e-13, -.722693924063497e-11,
-.383669502636822e-3, .374684572410204e-3, -0.931976897511086e5,
-0.247690616026922e-1, .658110546759474e2],
"j": [-0.111371317395540e-3, 0.100342892423685e1, 0.530615581928979e1,
0.179058760078792e-5, -0.728541958464774e-3, -.187576133371704e2,
0.199060874071849e-2, 0.243574755377290e2, -0.177040785499444e-3,
-0.25968038522713e-2, -0.198704578406823e3, 0.738627790224287e-4,
-0.236264692844138e-2, -0.161023121314333e1, 0.622322971786473e4,
-.960754116701669e-8, -.510572269720488e-10, .767373781404211e-2,
.663855469485254e-14, -.717590735526745e-9, 0.146564542926508e-4,
.309029474277013e-11, -.464216300971708e-15,
-.390499637961161e-13, -.236716126781431e-9,
.454652854268717e-11, -.422271787482497e-2,
0.283911742354706e-10, 0.270929002720228e1],
"k": [-0.401215699576099e9, 0.484501478318406e11, .394721471363678e-14,
.372629967374147e5, -.369794374168666e-29, -.380436407012452e-14,
0.475361629970233e-6, -0.879148916140706e-3, 0.844317863844331,
0.122433162656600e2, -0.104529634830279e3, 0.589702771277429e3,
-.291026851164444e14, .170343072841850e-5, -0.277617606975748e-3,
-0.344709605486686e1, 0.221333862447095e2, -0.194646110037079e3,
.808354639772825e-15, -.18084520914547e-10, -.696664158132412e-5,
-0.181057560300994e-2, 0.255830298579027e1, 0.328913873658481e4,
-.173270241249904e-18, -.661876792558034e-6, -.39568892342125e-2,
.604203299819132e-17, -.400879935920517e-13, .160751107464958e-8,
.383719409025556e-4, -.649565446702457e-14, -.149095328506e-11,
0.541449377329581e-8],
"l": [0.260702058647537e10, -.188277213604704e15, 0.554923870289667e19,
-.758966946387758e23, .413865186848908e27, -.81503800073806e12,
-.381458260489955e33, -.123239564600519e-1, 0.226095631437174e8,
-.49501780950672e12, 0.529482996422863e16, -0.444359478746295e23,
.521635864527315e35, -0.487095672740742e55, -0.714430209937547e6,
0.127868634615495, -0.100752127917598e2, 0.777451437960990e7,
-.108105480796471e25, -.357578581169659e-5, -0.212857169423484e1,
0.270706111085238e30, -0.695953622348829e33, 0.110609027472280,
0.721559163361354e2, -0.306367307532219e15, 0.265839618885530e-4,
0.253392392889754e-1, -0.214443041836579e3, 0.937846601489667,
0.223184043101700e1, 0.338401222509191e2, 0.494237237179718e21,
-0.198068404154428, -0.141415349881140e31, -0.993862421613651e2,
0.125070534142731e3, -0.996473529004439e3, 0.473137909872765e5,
0.116662121219322e33, -0.315874976271533e16,
-0.445703369196945e33, 0.642794932373694e33],
"m": [0.811384363481847, -0.568199310990094e4, -0.178657198172556e11,
0.795537657613427e32, -0.814568209346872e5, -0.659774567602874e8,
-.152861148659302e11, -0.560165667510446e12, 0.458384828593949e6,
-0.385754000383848e14, 0.453735800004273e8, 0.939454935735563e12,
.266572856432938e28, -0.547578313899097e10, 0.200725701112386e15,
0.185007245563239e13, 0.185135446828337e9, -0.170451090076385e12,
0.157890366037614e15, -0.202530509748774e16, 0.36819392618357e60,
0.170215539458936e18, 0.639234909918741e42, -.821698160721956e15,
-.795260241872306e24, 0.23341586947851e18, -0.600079934586803e23,
0.594584382273384e25, 0.189461279349492e40, -.810093428842645e46,
0.188813911076809e22, 0.111052244098768e36, 0.291133958602503e46,
-.329421923951460e22, -.137570282536696e26, 0.181508996303902e28,
-.346865122768353e30, -.21196114877426e38, -0.128617899887675e49,
0.479817895699239e65],
"n": [.280967799943151e-38, .614869006573609e-30, .582238667048942e-27,
.390628369238462e-22, .821445758255119e-20, .402137961842776e-14,
.651718171878301e-12, -.211773355803058e-7, 0.264953354380072e-2,
-.135031446451331e-31, -.607246643970893e-23,
-.402352115234494e-18, -.744938506925544e-16,
.189917206526237e-12, .364975183508473e-5, .177274872361946e-25,
-.334952758812999e-18, -.421537726098389e-8,
-.391048167929649e-1, .541276911564176e-13, .705412100773699e-11,
.258585887897486e-8, -.493111362030162e-10, -.158649699894543e-5,
-0.525037427886100, 0.220019901729615e-2, -0.643064132636925e-2,
0.629154149015048e2, 0.135147318617061e3, 0.240560808321713e-6,
-.890763306701305e-3, -0.440209599407714e4, -0.302807107747776e3,
0.159158748314599e4, 0.232534272709876e6, -0.792681207132600e6,
-.869871364662769e11, .354542769185671e12, 0.400849240129329e15],
"o": [.128746023979718e-34, -.735234770382342e-11, .28907869214915e-2,
0.244482731907223, 0.141733492030985e-23, -0.354533853059476e-28,
-.594539202901431e-17, -.585188401782779e-8, .201377325411803e-5,
0.138647388209306e1, -0.173959365084772e-4, 0.137680878349369e-2,
.814897605805513e-14, .425596631351839e-25,
-.387449113787755e-17, .13981474793024e-12, -.171849638951521e-2,
0.641890529513296e-21, .118960578072018e-10,
-.155282762571611e-17, .233907907347507e-7,
-.174093247766213e-12, .377682649089149e-8,
-.516720236575302e-10],
"p": [-0.982825342010366e-4, 0.105145700850612e1, 0.116033094095084e3,
0.324664750281543e4, -0.123592348610137e4, -0.561403450013495e-1,
0.856677401640869e-7, 0.236313425393924e3, 0.972503292350109e-2,
-.103001994531927e1, -0.149653706199162e-8, -.215743778861592e-4,
-0.834452198291445e1, 0.586602660564988, 0.343480022104968e-25,
.816256095947021e-5, .294985697916798e-2, 0.711730466276584e-16,
0.400954763806941e-9, 0.107766027032853e2, -0.409449599138182e-6,
-.729121307758902e-5, 0.677107970938909e-8, 0.602745973022975e-7,
-.382323011855257e-10, .179946628317437e-2,
-.345042834640005e-3],
"q": [-0.820433843259950e5, 0.473271518461586e11, -.805950021005413e-1,
0.328600025435980e2, -0.35661702998249e4, -0.172985781433335e10,
0.351769232729192e8, -0.775489259985144e6, 0.710346691966018e-4,
0.993499883820274e5, -0.642094171904570, -0.612842816820083e4,
.232808472983776e3, -0.142808220416837e-4, -0.643596060678456e-2,
-0.428577227475614e1, 0.225689939161918e4, 0.100355651721510e-2,
0.333491455143516, 0.109697576888873e1, 0.961917379376452,
-0.838165632204598e-1, 0.247795908411492e1, -.319114969006533e4],
"r": [.144165955660863e-2, -.701438599628258e13, -.830946716459219e-16,
0.261975135368109, 0.393097214706245e3, -0.104334030654021e5,
0.490112654154211e9, -0.147104222772069e-3, 0.103602748043408e1,
0.305308890065089e1, -0.399745276971264e7, 0.569233719593750e-11,
-.464923504407778e-1, -.535400396512906e-17,
.399988795693162e-12, -.536479560201811e-6, .159536722411202e-1,
.270303248860217e-14, .244247453858506e-7, -0.983430636716454e-5,
0.663513144224454e-1, -0.993456957845006e1, 0.546491323528491e3,
-0.143365406393758e5, 0.150764974125511e6, -.337209709340105e-9,
0.377501980025469e-8],
"s": [-0.532466612140254e23, .100415480000824e32, -.191540001821367e30,
0.105618377808847e17, 0.202281884477061e59, 0.884585472596134e8,
0.166540181638363e23, -0.313563197669111e6, -.185662327545324e54,
-.624942093918942e-1, -0.50416072413259e10, 0.187514491833092e5,
0.121399979993217e-2, 0.188317043049455e1, -0.167073503962060e4,
0.965961650599775, 0.294885696802488e1, -0.653915627346115e5,
0.604012200163444e50, -0.198339358557937, -0.175984090163501e58,
0.356314881403987e1, -0.575991255144384e3, 0.456213415338071e5,
-.109174044987829e8, 0.437796099975134e34, -0.616552611135792e46,
0.193568768917797e10, 0.950898170425042e54],
"t": [0.155287249586268e1, 0.664235115009031e1, -0.289366236727210e4,
-0.385923202309848e13, -.291002915783761e1, -.829088246858083e12,
0.176814899675218e1, -0.534686695713469e9, 0.160464608687834e18,
0.196435366560186e6, 0.156637427541729e13, -0.178154560260006e1,
-0.229746237623692e16, 0.385659001648006e8, 0.110554446790543e10,
-.677073830687349e14, -.327910592086523e31, -.341552040860644e51,
-.527251339709047e21, .245375640937055e24, -0.168776617209269e27,
.358958955867578e29, -0.656475280339411e36, 0.355286045512301e39,
.569021454413270e58, -.700584546433113e48, -0.705772623326374e65,
0.166861176200148e53, -.300475129680486e61, -.668481295196808e51,
.428432338620678e69, -.444227367758304e72, -.281396013562745e77],
"u": [0.122088349258355e18, 0.104216468608488e10, -.882666931564652e16,
.259929510849499e20, 0.222612779142211e15, -0.878473585050085e18,
-0.314432577551552e22, -.216934916996285e13, .159079648196849e21,
-.339567617303423e3, 0.884387651337836e13, -0.843405926846418e21,
0.114178193518022e2, -0.122708229235641e-3, -0.106201671767107e3,
.903443213959313e25, -0.693996270370852e28, 0.648916718965575e-8,
0.718957567127851e4, 0.105581745346187e-2, -0.651903203602581e15,
-0.160116813274676e25, -0.510254294237837e-8, -0.152355388953402,
0.677143292290144e12, 0.276378438378930e15, 0.116862983141686e-1,
-.301426947980171e14, 0.169719813884840e-7, 0.104674840020929e27,
-0.10801690456014e5, -0.990623601934295e-12, 0.536116483602738e7,
.226145963747881e22, -0.488731565776210e-9, 0.151001548880670e-4,
-0.227700464643920e5, -0.781754507698846e28],
"v": [-.415652812061591e-54, .177441742924043e-60,
-.357078668203377e-54, 0.359252213604114e-25,
-0.259123736380269e2, 0.594619766193460e5, -0.624184007103158e11,
0.313080299915944e17, .105006446192036e-8, -0.192824336984852e-5,
0.654144373749937e6, 0.513117462865044e13, -.697595750347391e19,
-.103977184454767e29, .119563135540666e-47,
-.436677034051655e-41, .926990036530639e-29, .587793105620748e21,
.280375725094731e-17, -0.192359972440634e23, .742705723302738e27,
-0.517429682450605e2, 0.820612048645469e7, -0.188214882341448e-8,
.184587261114837e-1, -0.135830407782663e-5, -.723681885626348e17,
-.223449194054124e27, -.111526741826431e-34,
.276032601145151e-28, 0.134856491567853e15, 0.652440293345860e-9,
0.510655119774360e17, -.468138358908732e32, -.760667491183279e16,
-.417247986986821e-18, 0.312545677756104e14,
-.100375333864186e15, .247761392329058e27],
"w": [-.586219133817016e-7, -.894460355005526e11, .531168037519774e-30,
0.109892402329239, -0.575368389425212e-1, 0.228276853990249e5,
-.158548609655002e19, .329865748576503e-27,
-.634987981190669e-24, 0.615762068640611e-8, -.961109240985747e8,
-.406274286652625e-44, -0.471103725498077e-12, 0.725937724828145,
.187768525763682e-38, -.103308436323771e4, -0.662552816342168e-1,
0.579514041765710e3, .237416732616644e-26, .271700235739893e-14,
-0.9078862134836e2, -0.171242509570207e-36, 0.156792067854621e3,
0.923261357901470, -0.597865988422577e1, 0.321988767636389e7,
-.399441390042203e-29, .493429086046981e-7, .812036983370565e-19,
-.207610284654137e-11, -.340821291419719e-6,
.542000573372233e-17, -.856711586510214e-12,
0.266170454405981e-13, 0.858133791857099e-5],
"x": [.377373741298151e19, -.507100883722913e13, -0.10336322559886e16,
.184790814320773e-5, -.924729378390945e-3, -0.425999562292738e24,
-.462307771873973e-12, .107319065855767e22, 0.648662492280682e11,
0.244200600688281e1, -0.851535733484258e10, 0.169894481433592e22,
0.215780222509020e-26, -0.320850551367334, -0.382642448458610e17,
-.275386077674421e-28, -.563199253391666e6, -.326068646279314e21,
0.397949001553184e14, 0.100824008584757e-6, 0.162234569738433e5,
-0.432355225319745e11, -.59287424559861e12, 0.133061647281106e1,
0.157338197797544e7, 0.258189614270853e14, 0.262413209706358e25,
-.920011937431142e-1, 0.220213765905426e-2, -0.110433759109547e2,
0.847004870612087e7, -0.592910695762536e9, -0.183027173269660e-4,
0.181339603516302, -0.119228759669889e4, 0.430867658061468e7],
"y": [-0.525597995024633e-9, 0.583441305228407e4, -.134778968457925e17,
.118973500934212e26, -0.159096490904708e27, -.315839902302021e-6,
0.496212197158239e3, 0.327777227273171e19, -0.527114657850696e22,
.210017506281863e-16, 0.705106224399834e21, -.266713136106469e31,
-0.145370512554562e-7, 0.149333917053130e28, -.149795620287641e8,
-.3818819062711e16, 0.724660165585797e-4, -0.937808169550193e14,
0.514411468376383e10, -0.828198594040141e5],
"z": [0.24400789229065e-10, -0.463057430331242e7, 0.728803274777712e10,
.327776302858856e16, -.110598170118409e10, -0.323899915729957e13,
.923814007023245e16, 0.842250080413712e-12, 0.663221436245506e12,
-.167170186672139e15, .253749358701391e4, -0.819731559610523e-20,
0.328380587890663e12, -0.625004791171543e8, 0.803197957462023e21,
-.204397011338353e-10, -.378391047055938e4, 0.97287654593862e-2,
0.154355721681459e2, -0.373962862928643e4, -0.682859011374572e11,
-0.248488015614543e-3, 0.394536049497068e7]}
I = I[x]
J = J[x]
n = n[x]
v_, P_, T_, a, b, c, d, e = par[x]
Pr = P/P_
Tr = T/T_
suma = 0
if x == "n":
for i, j, ni in zip(I, J, n):
suma += ni * (Pr-a)**i * (Tr-b)**j
return v_*exp(suma)
else:
for i, j, ni in zip(I, J, n):
suma += ni * (Pr-a)**(c*i) * (Tr-b)**(j*d)
return v_*suma**e | python | def _Backward3x_v_PT(T, P, x):
"""Backward equation for region 3x, v=f(P,T)
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [MPa]
x : char
Region 3 subregion code
Returns
-------
v : float
Specific volume, [m³/kg]
References
----------
IAPWS, Revised Supplementary Release on Backward Equations for Specific
Volume as a Function of Pressure and Temperature v(p,T) for Region 3 of the
IAPWS Industrial Formulation 1997 for the Thermodynamic Properties of Water
and Steam, http://www.iapws.org/relguide/Supp-VPT3-2016.pdf, Eq. 4-5
Examples
--------
>>> _Backward3x_v_PT(630,50,"a")
0.001470853100
>>> _Backward3x_v_PT(670,80,"a")
0.001503831359
>>> _Backward3x_v_PT(710,50,"b")
0.002204728587
>>> _Backward3x_v_PT(750,80,"b")
0.001973692940
>>> _Backward3x_v_PT(630,20,"c")
0.001761696406
>>> _Backward3x_v_PT(650,30,"c")
0.001819560617
>>> _Backward3x_v_PT(656,26,"d")
0.002245587720
>>> _Backward3x_v_PT(670,30,"d")
0.002506897702
>>> _Backward3x_v_PT(661,26,"e")
0.002970225962
>>> _Backward3x_v_PT(675,30,"e")
0.003004627086
>>> _Backward3x_v_PT(671,26,"f")
0.005019029401
>>> _Backward3x_v_PT(690,30,"f")
0.004656470142
>>> _Backward3x_v_PT(649,23.6,"g")
0.002163198378
>>> _Backward3x_v_PT(650,24,"g")
0.002166044161
>>> _Backward3x_v_PT(652,23.6,"h")
0.002651081407
>>> _Backward3x_v_PT(654,24,"h")
0.002967802335
>>> _Backward3x_v_PT(653,23.6,"i")
0.003273916816
>>> _Backward3x_v_PT(655,24,"i")
0.003550329864
>>> _Backward3x_v_PT(655,23.5,"j")
0.004545001142
>>> _Backward3x_v_PT(660,24,"j")
0.005100267704
>>> _Backward3x_v_PT(660,23,"k")
0.006109525997
>>> _Backward3x_v_PT(670,24,"k")
0.006427325645
>>> _Backward3x_v_PT(646,22.6,"l")
0.002117860851
>>> _Backward3x_v_PT(646,23,"l")
0.002062374674
>>> _Backward3x_v_PT(648.6,22.6,"m")
0.002533063780
>>> _Backward3x_v_PT(649.3,22.8,"m")
0.002572971781
>>> _Backward3x_v_PT(649,22.6,"n")
0.002923432711
>>> _Backward3x_v_PT(649.7,22.8,"n")
0.002913311494
>>> _Backward3x_v_PT(649.1,22.6,"o")
0.003131208996
>>> _Backward3x_v_PT(649.9,22.8,"o")
0.003221160278
>>> _Backward3x_v_PT(649.4,22.6,"p")
0.003715596186
>>> _Backward3x_v_PT(650.2,22.8,"p")
0.003664754790
>>> _Backward3x_v_PT(640,21.1,"q")
0.001970999272
>>> _Backward3x_v_PT(643,21.8,"q")
0.002043919161
>>> _Backward3x_v_PT(644,21.1,"r")
0.005251009921
>>> _Backward3x_v_PT(648,21.8,"r")
0.005256844741
>>> _Backward3x_v_PT(635,19.1,"s")
0.001932829079
>>> _Backward3x_v_PT(638,20,"s")
0.001985387227
>>> _Backward3x_v_PT(626,17,"t")
0.008483262001
>>> _Backward3x_v_PT(640,20,"t")
0.006227528101
>>> _Backward3x_v_PT(644.6,21.5,"u")
0.002268366647
>>> _Backward3x_v_PT(646.1,22,"u")
0.002296350553
>>> _Backward3x_v_PT(648.6,22.5,"v")
0.002832373260
>>> _Backward3x_v_PT(647.9,22.3,"v")
0.002811424405
>>> _Backward3x_v_PT(647.5,22.15,"w")
0.003694032281
>>> _Backward3x_v_PT(648.1,22.3,"w")
0.003622226305
>>> _Backward3x_v_PT(648,22.11,"x")
0.004528072649
>>> _Backward3x_v_PT(649,22.3,"x")
0.004556905799
>>> _Backward3x_v_PT(646.84,22,"y")
0.002698354719
>>> _Backward3x_v_PT(647.05,22.064,"y")
0.002717655648
>>> _Backward3x_v_PT(646.89,22,"z")
0.003798732962
>>> _Backward3x_v_PT(647.15,22.064,"z")
0.003701940009
"""
par = {
"a": [0.0024, 100, 760, 0.085, 0.817, 1, 1, 1],
"b": [0.0041, 100, 860, 0.280, 0.779, 1, 1, 1],
"c": [0.0022, 40, 690, 0.259, 0.903, 1, 1, 1],
"d": [0.0029, 40, 690, 0.559, 0.939, 1, 1, 4],
"e": [0.0032, 40, 710, 0.587, 0.918, 1, 1, 1],
"f": [0.0064, 40, 730, 0.587, 0.891, 0.5, 1, 4],
"g": [0.0027, 25, 660, 0.872, 0.971, 1, 1, 4],
"h": [0.0032, 25, 660, 0.898, 0.983, 1, 1, 4],
"i": [0.0041, 25, 660, 0.910, 0.984, 0.5, 1, 4],
"j": [0.0054, 25, 670, 0.875, 0.964, 0.5, 1, 4],
"k": [0.0077, 25, 680, 0.802, 0.935, 1, 1, 1],
"l": [0.0026, 24, 650, 0.908, 0.989, 1, 1, 4],
"m": [0.0028, 23, 650, 1.000, 0.997, 1, 0.25, 1],
"n": [0.0031, 23, 650, 0.976, 0.997, None, None, None],
"o": [0.0034, 23, 650, 0.974, 0.996, 0.5, 1, 1],
"p": [0.0041, 23, 650, 0.972, 0.997, 0.5, 1, 1],
"q": [0.0022, 23, 650, 0.848, 0.983, 1, 1, 4],
"r": [0.0054, 23, 650, 0.874, 0.982, 1, 1, 1],
"s": [0.0022, 21, 640, 0.886, 0.990, 1, 1, 4],
"t": [0.0088, 20, 650, 0.803, 1.020, 1, 1, 1],
"u": [0.0026, 23, 650, 0.902, 0.988, 1, 1, 1],
"v": [0.0031, 23, 650, 0.960, 0.995, 1, 1, 1],
"w": [0.0039, 23, 650, 0.959, 0.995, 1, 1, 4],
"x": [0.0049, 23, 650, 0.910, 0.988, 1, 1, 1],
"y": [0.0031, 22, 650, 0.996, 0.994, 1, 1, 4],
"z": [0.0038, 22, 650, 0.993, 0.994, 1, 1, 4],
}
I = {
"a": [-12, -12, -12, -10, -10, -10, -8, -8, -8, -6, -5, -5, -5, -4, -3,
-3, -3, -3, -2, -2, -2, -1, -1, -1, 0, 0, 1, 1, 2, 2],
"b": [-12, -12, -10, -10, -8, -6, -6, -6, -5, -5, -5, -4, -4, -4, -3,
-3, -3, -3, -3, -2, -2, -2, -1, -1, 0, 0, 1, 1, 2, 3, 4, 4],
"c": [-12, -12, -12, -10, -10, -10, -8, -8, -8, -6, -5, -5, -5, -4, -4,
-3, -3, -2, -2, -2, -1, -1, -1, 0, 0, 0, 1, 1, 2, 2, 2, 2, 3, 3,
8],
"d": [-12, -12, -12, -12, -12, -12, -10, -10, -10, -10, -10, -10, -10,
-8, -8, -8, -8, -6, -6, -5, -5, -5, -5, -4, -4, -4, -3, -3, -2,
-2, -1, -1, -1, 0, 0, 1, 1, 3],
"e": [-12, -12, -10, -10, -10, -10, -10, -8, -8, -8, -6, -5, -4, -4,
-3, -3, -3, -2, -2, -2, -2, -1, 0, 0, 1, 1, 1, 2, 2],
"f": [0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 3, 3, 3, 4, 5, 5, 6, 7, 7,
10, 12, 12, 12, 14, 14, 14, 14, 14, 16, 16, 18, 18, 20, 20, 20,
22, 24, 24, 28, 32],
"g": [-12, -12, -12, -12, -12, -12, -10, -10, -10, -8, -8, -8, -8, -6,
-6, -5, -5, -4, -3, -2, -2, -2, -2, -1, -1, -1, 0, 0, 0, 1, 1, 1,
3, 5, 6, 8, 10, 10],
"h": [-12, -12, -10, -10, -10, -10, -10, -10, -8, -8, -8, -8, -8, -6,
-6, -6, -5, -5, -5, -4, -4, -3, -3, -2, -1, -1, 0, 1, 1],
"i": [0, 0, 0, 1, 1, 1, 1, 2, 3, 3, 4, 4, 4, 5, 5, 5, 7, 7, 8, 8, 10,
12, 12, 12, 14, 14, 14, 14, 18, 18, 18, 18, 18, 20, 20, 22, 24,
24, 32, 32, 36, 36],
"j": [0, 0, 0, 1, 1, 1, 2, 2, 3, 4, 4, 5, 5, 5, 6, 10, 12, 12, 14, 14,
14, 16, 18, 20, 20, 24, 24, 28, 28],
"k": [-2, -2, -1, -1, 0, -0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2,
2, 2, 2, 2, 5, 5, 5, 6, 6, 6, 6, 8, 10, 12],
"l": [-12, -12, -12, -12, -12, -10, -10, -8, -8, -8, -8, -8, -8, -8,
-6, -5, -5, -4, -4, -3, -3, -3, -3, -2, -2, -2, -1, -1, -1, 0, 0,
0, 0, 1, 1, 2, 4, 5, 5, 6, 10, 10, 14],
"m": [0, 3, 8, 20, 1, 3, 4, 5, 1, 6, 2, 4, 14, 2, 5, 3, 0, 1, 1, 1, 28,
2, 16, 0, 5, 0, 3, 4, 12, 16, 1, 8, 14, 0, 2, 3, 4, 8, 14, 24],
"n": [0, 3, 4, 6, 7, 10, 12, 14, 18, 0, 3, 5, 6, 8, 12, 0, 3, 7, 12,
2, 3, 4, 2, 4, 7, 4, 3, 5, 6, 0, 0, 3, 1, 0, 1, 0, 1, 0, 1],
"o": [0, 0, 0, 2, 3, 4, 4, 4, 4, 4, 5, 5, 6, 7, 8, 8, 8, 10, 10, 14,
14, 20, 20, 24],
"p": [0, 0, 0, 0, 1, 2, 3, 3, 4, 6, 7, 7, 8, 10, 12, 12, 12, 14, 14,
14, 16, 18, 20, 22, 24, 24, 36],
"q": [-12, -12, -10, -10, -10, -10, -8, -6, -5, -5, -4, -4, -3, -2,
-2, -2, -2, -1, -1, -1, 0, 1, 1, 1],
"r": [-8, -8, -3, -3, -3, -3, -3, 0, 0, 0, 0, 3, 3, 8, 8, 8, 8, 10,
10, 10, 10, 10, 10, 10, 10, 12, 14],
"s": [-12, -12, -10, -8, -6, -5, -5, -4, -4, -3, -3, -2, -1, -1, -1, 0,
0, 0, 0, 1, 1, 3, 3, 3, 4, 4, 4, 5, 14],
"t": [0, 0, 0, 0, 1, 1, 2, 2, 2, 3, 3, 4, 4, 7, 7, 7, 7, 7, 10, 10, 10,
10, 10, 18, 20, 22, 22, 24, 28, 32, 32, 32, 36],
"u": [-12, -10, -10, -10, -8, -8, -8, -6, -6, -5, -5, -5, -3, -1, -1,
-1, -1, 0, 0, 1, 2, 2, 3, 5, 5, 5, 6, 6, 8, 8, 10, 12, 12, 12,
14, 14, 14, 14],
"v": [-10, -8, -6, -6, -6, -6, -6, -6, -5, -5, -5, -5, -5, -5, -4, -4,
-4, -4, -3, -3, -3, -2, -2, -1, -1, 0, 0, 0, 1, 1, 3, 4, 4, 4, 5,
8, 10, 12, 14],
"w": [-12, -12, -10, -10, -8, -8, -8, -6, -6, -6, -6, -5, -4, -4, -3,
-3, -2, -2, -1, -1, -1, 0, 0, 1, 2, 2, 3, 3, 5, 5, 5, 8, 8, 10,
10],
"x": [-8, -6, -5, -4, -4, -4, -3, -3, -1, 0, 0, 0, 1, 1, 2, 3, 3, 3, 4,
5, 5, 5, 6, 8, 8, 8, 8, 10, 12, 12, 12, 12, 14, 14, 14, 14],
"y": [0, 0, 0, 0, 1, 2, 2, 2, 2, 3, 3, 3, 4, 4, 5, 5, 8, 8, 10, 12],
"z": [-8, -6, -5, -5, -4, -4, -4, -3, -3, -3, -2, -1, 0, 1, 2, 3, 3, 6,
6, 6, 6, 8, 8]}
J = {
"a": [5, 10, 12, 5, 10, 12, 5, 8, 10, 1, 1, 5, 10, 8, 0, 1, 3, 6, 0,
2, 3, 0, 1, 2, 0, 1, 0, 2, 0, 2],
"b": [10, 12, 8, 14, 8, 5, 6, 8, 5, 8, 10, 2, 4, 5, 0, 1, 2, 3, 5, 0,
2, 5, 0, 2, 0, 1, 0, 2, 0, 2, 0, 1],
"c": [6, 8, 10, 6, 8, 10, 5, 6, 7, 8, 1, 4, 7, 2, 8, 0, 3, 0, 4, 5, 0,
1, 2, 0, 1, 2, 0, 2, 0, 1, 3, 7, 0, 7, 1],
"d": [4, 6, 7, 10, 12, 16, 0, 2, 4, 6, 8, 10, 14, 3, 7, 8, 10, 6, 8, 1,
2, 5, 7, 0, 1, 7, 2, 4, 0, 1, 0, 1, 5, 0, 2, 0, 6, 0],
"e": [14, 16, 3, 6, 10, 14, 16, 7, 8, 10, 6, 6, 2, 4, 2, 6, 7, 0, 1,
3, 4, 0, 0, 1, 0, 4, 6, 0, 2],
"f": [-3, -2, -1, 0, 1, 2, -1, 1, 2, 3, 0, 1, -5, -2, 0, -3, -8, 1, -6,
-4, 1, -6, -10, -8, -4, -12, -10, -8, -6, -4, -10, -8, -12, -10,
-12, -10, -6, -12, -12, -4, -12, -12],
"g": [7, 12, 14, 18, 22, 24, 14, 20, 24, 7, 8, 10, 12, 8, 22, 7, 20,
22, 7, 3, 5, 14, 24, 2, 8, 18, 0, 1, 2, 0, 1, 3, 24, 22, 12, 3,
0, 6],
"h": [8, 12, 4, 6, 8, 10, 14, 16, 0, 1, 6, 7, 8, 4, 6, 8, 2, 3, 4, 2,
4, 1, 2, 0, 0, 2, 0, 0, 2],
"i": [0, 1, 10, -4, -2, -1, 0, 0, -5, 0, -3, -2, -1, -6, -1, 12, -4,
-3, -6, 10, -8, -12, -6, -4, -10, -8, -4, 5, -12, -10, -8, -6,
2, -12, -10, -12, -12, -8, -10, -5, -10, -8],
"j": [-1, 0, 1, -2, -1, 1, -1, 1, -2, -2, 2, -3, -2, 0, 3, -6, -8, -3,
-10, -8, -5, -10, -12, -12, -10, -12, -6, -12, -5],
"k": [10, 12, -5, 6, -12, -6, -2, -1, 0, 1, 2, 3, 14, -3, -2, 0, 1, 2,
-8, -6, -3, -2, 0, 4, -12, -6, -3, -12, -10, -8, -5, -12, -12,
-10],
"l": [14, 16, 18, 20, 22, 14, 24, 6, 10, 12, 14, 18, 24, 36, 8, 4, 5,
7, 16, 1, 3, 18, 20, 2, 3, 10, 0, 1, 3, 0, 1, 2, 12, 0, 16, 1, 0,
0, 1, 14, 4, 12, 10],
"m": [0, 0, 0, 2, 5, 5, 5, 5, 6, 6, 7, 8, 8, 10, 10, 12, 14, 14, 18,
20, 20, 22, 22, 24, 24, 28, 28, 28, 28, 28, 32, 32, 32, 36, 36,
36, 36, 36, 36, 36],
"n": [-12, -12, -12, -12, -12, -12, -12, -12, -12, -10, -10, -10, -10,
-10, -10, -8, -8, -8, -8, -6, -6, -6, -5, -5, -5, -4, -3, -3,
-3, -2, -1, -1, 0, 1, 1, 2, 4, 5, 6],
"o": [-12, -4, -1, -1, -10, -12, -8, -5, -4, -1, -4, -3, -8, -12, -10,
-8, -4, -12, -8, -12, -8, -12, -10, -12],
"p": [-1, 0, 1, 2, 1, -1, -3, 0, -2, -2, -5, -4, -2, -3, -12, -6, -5,
-10, -8, -3, -8, -8, -10, -10, -12, -8, -12],
"q": [10, 12, 6, 7, 8, 10, 8, 6, 2, 5, 3, 4, 3, 0, 1, 2, 4, 0, 1, 2,
0, 0, 1, 3],
"r": [6, 14, -3, 3, 4, 5, 8, -1, 0, 1, 5, -6, -2, -12, -10, -8, -5,
-12, -10, -8, -6, -5, -4, -3, -2, -12, -12],
"s": [20, 24, 22, 14, 36, 8, 16, 6, 32, 3, 8, 4, 1, 2, 3, 0, 1, 4, 28,
0, 32, 0, 1, 2, 3, 18, 24, 4, 24],
"t": [0, 1, 4, 12, 0, 10, 0, 6, 14, 3, 8, 0, 10, 3, 4, 7, 20, 36, 10,
12, 14, 16, 22, 18, 32, 22, 36, 24, 28, 22, 32, 36, 36],
"u": [14, 10, 12, 14, 10, 12, 14, 8, 12, 4, 8, 12, 2, -1, 1, 12, 14,
-3, 1, -2, 5, 10, -5, -4, 2, 3, -5, 2, -8, 8, -4, -12, -4, 4,
-12, -10, -6, 6],
"v": [-8, -12, -12, -3, 5, 6, 8, 10, 1, 2, 6, 8, 10, 14, -12, -10, -6,
10, -3, 10, 12, 2, 4, -2, 0, -2, 6, 10, -12, -10, 3, -6, 3, 10,
2, -12, -2, -3, 1],
"w": [8, 14, -1, 8, 6, 8, 14, -4, -3, 2, 8, -10, -1, 3, -10, 3, 1, 2,
-8, -4, 1, -12, 1, -1, -1, 2, -12, -5, -10, -8, -6, -12, -10,
-12, -8],
"x": [14, 10, 10, 1, 2, 14, -2, 12, 5, 0, 4, 10, -10, -1, 6, -12, 0,
8, 3, -6, -2, 1, 1, -6, -3, 1, 8, -8, -10, -8, -5, -4, -12, -10,
-8, -6],
"y": [-3, 1, 5, 8, 8, -4, -1, 4, 5, -8, 4, 8, -6, 6, -2, 1, -8, -2,
-5, -8],
"z": [3, 6, 6, 8, 5, 6, 8, -2, 5, 6, 2, -6, 3, 1, 6, -6, -2, -6, -5,
-4, -1, -8, -4]}
n = {
"a": [0.110879558823853e-2, 0.572616740810616e3, -0.767051948380852e5,
-0.253321069529674e-1, 0.628008049345689e4, 0.234105654131876e6,
0.216867826045856, -0.156237904341963e3, -0.269893956176613e5,
-0.180407100085505e-3, 0.116732227668261e-2, 0.266987040856040e2,
0.282776617243286e5, -0.242431520029523e4, 0.435217323022733e-3,
-0.122494831387441e-1, 0.179357604019989e1, 0.442729521058314e2,
-0.593223489018342e-2, 0.453186261685774, 0.135825703129140e1,
0.408748415856745e-1, 0.474686397863312, 0.118646814997915e1,
0.546987265727549, 0.195266770452643, -0.502268790869663e-1,
-0.369645308193377, 0.633828037528420e-2, 0.797441793901017e-1],
"b": [-0.827670470003621e-1, 0.416887126010565e2, 0.483651982197059e-1,
-0.291032084950276e5, -0.111422582236948e3, -.202300083904014e-1,
0.294002509338515e3, 0.140244997609658e3, -0.344384158811459e3,
0.361182452612149e3, -0.140699677420738e4, -0.202023902676481e-2,
0.171346792457471e3, -0.425597804058632e1, 0.691346085000334e-5,
0.151140509678925e-2, -0.416375290166236e-1, -.413754957011042e2,
-0.506673295721637e2, -0.572212965569023e-3, 0.608817368401785e1,
0.239600660256161e2, 0.122261479925384e-1, 0.216356057692938e1,
0.398198903368642, -0.116892827834085, -0.102845919373532,
-0.492676637589284, 0.655540456406790e-1, -0.240462535078530,
-0.269798180310075e-1, 0.128369435967012],
"c": [0.311967788763030e1, 0.276713458847564e5, 0.322583103403269e8,
-0.342416065095363e3, -0.899732529907377e6, -0.793892049821251e8,
0.953193003217388e2, 0.229784742345072e4, 0.175336675322499e6,
0.791214365222792e7, 0.319933345844209e-4, -0.659508863555767e2,
-0.833426563212851e6, 0.645734680583292e-1, -0.382031020570813e7,
0.406398848470079e-4, 0.310327498492008e2, -0.892996718483724e-3,
0.234604891591616e3, 0.377515668966951e4, 0.158646812591361e-1,
0.707906336241843, 0.126016225146570e2, 0.736143655772152,
0.676544268999101, -0.178100588189137e2, -0.156531975531713,
0.117707430048158e2, 0.840143653860447e-1, -0.186442467471949,
-0.440170203949645e2, 0.123290423502494e7, -0.240650039730845e-1,
-0.107077716660869e7, 0.438319858566475e-1],
"d": [-0.452484847171645e-9, .315210389538801e-4, -.214991352047545e-2,
0.508058874808345e3, -0.127123036845932e8, 0.115371133120497e13,
-.197805728776273e-15, .241554806033972e-10,
-.156481703640525e-5, 0.277211346836625e-2, -0.203578994462286e2,
0.144369489909053e7, -0.411254217946539e11, 0.623449786243773e-5,
-.221774281146038e2, -0.689315087933158e5, -0.195419525060713e8,
0.316373510564015e4, 0.224040754426988e7, -0.436701347922356e-5,
-.404213852833996e-3, -0.348153203414663e3, -0.385294213555289e6,
0.135203700099403e-6, 0.134648383271089e-3, 0.125031835351736e6,
0.968123678455841e-1, 0.225660517512438e3, -0.190102435341872e-3,
-.299628410819229e-1, 0.500833915372121e-2, 0.387842482998411,
-0.138535367777182e4, 0.870745245971773, 0.171946252068742e1,
-.326650121426383e-1, 0.498044171727877e4, 0.551478022765087e-2],
"e": [0.715815808404721e9, -0.114328360753449e12, .376531002015720e-11,
-0.903983668691157e-4, 0.665695908836252e6, 0.535364174960127e10,
0.794977402335603e11, 0.922230563421437e2, -0.142586073991215e6,
-0.111796381424162e7, 0.896121629640760e4, -0.669989239070491e4,
0.451242538486834e-2, -0.339731325977713e2, -0.120523111552278e1,
0.475992667717124e5, -0.266627750390341e6, -0.153314954386524e-3,
0.305638404828265, 0.123654999499486e3, -0.104390794213011e4,
-0.157496516174308e-1, 0.685331118940253, 0.178373462873903e1,
-0.544674124878910, 0.204529931318843e4, -0.228342359328752e5,
0.413197481515899, -0.341931835910405e2],
"f": [-0.251756547792325e-7, .601307193668763e-5, -.100615977450049e-2,
0.999969140252192, 0.214107759236486e1, -0.165175571959086e2,
-0.141987303638727e-2, 0.269251915156554e1, 0.349741815858722e2,
-0.300208695771783e2, -0.131546288252539e1, -0.839091277286169e1,
0.181545608337015e-9, -0.591099206478909e-3, 0.152115067087106e1,
0.252956470663225e-4, 0.100726265203786e-14, -0.14977453386065e1,
-0.793940970562969e-9, -0.150290891264717e-3, .151205531275133e1,
0.470942606221652e-5, .195049710391712e-12, -.911627886266077e-8,
.604374640201265e-3, -.225132933900136e-15, .610916973582981e-11,
-.303063908043404e-6, -.137796070798409e-4, -.919296736666106e-3,
.639288223132545e-9, .753259479898699e-6, -0.400321478682929e-12,
.756140294351614e-8, -.912082054034891e-11, -.237612381140539e-7,
0.269586010591874e-4, -.732828135157839e-10, .241995578306660e-9,
-.405735532730322e-3, .189424143498011e-9, -.486632965074563e-9],
"g": [0.412209020652996e-4, -0.114987238280587e7, 0.948180885032080e10,
-0.195788865718971e18, 0.4962507048713e25, -0.105549884548496e29,
-0.758642165988278e12, -.922172769596101e23, .725379072059348e30,
-0.617718249205859e2, 0.107555033344858e5, -0.379545802336487e8,
0.228646846221831e12, -0.499741093010619e7, -.280214310054101e31,
0.104915406769586e7, 0.613754229168619e28, 0.802056715528378e32,
-0.298617819828065e8, -0.910782540134681e2, 0.135033227281565e6,
-0.712949383408211e19, -0.104578785289542e37, .304331584444093e2,
0.593250797959445e10, -0.364174062110798e28, 0.921791403532461,
-0.337693609657471, -0.724644143758508e2, -0.110480239272601,
0.536516031875059e1, -0.291441872156205e4, 0.616338176535305e40,
-0.120889175861180e39, 0.818396024524612e23, 0.940781944835829e9,
-0.367279669545448e5, -0.837513931798655e16],
"h": [0.561379678887577e-1, 0.774135421587083e10, 0.111482975877938e-8,
-0.143987128208183e-2, 0.193696558764920e4, -0.605971823585005e9,
0.171951568124337e14, -.185461154985145e17, 0.38785116807801e-16,
-.395464327846105e-13, -0.170875935679023e3, -0.21201062070122e4,
0.177683337348191e8, 0.110177443629575e2, -0.234396091693313e6,
-0.656174421999594e7, 0.156362212977396e-4, -0.212946257021400e1,
0.135249306374858e2, 0.177189164145813, 0.139499167345464e4,
-0.703670932036388e-2, -0.152011044389648, 0.981916922991113e-4,
0.147199658618076e-2, 0.202618487025578e2, 0.899345518944240,
-0.211346402240858, 0.249971752957491e2],
"i": [0.106905684359136e1, -0.148620857922333e1, 0.259862256980408e15,
-.446352055678749e-11, -.566620757170032e-6,
-.235302885736849e-2, -0.269226321968839, 0.922024992944392e1,
0.357633505503772e-11, -.173942565562222e2, 0.700681785556229e-5,
-.267050351075768e-3, -.231779669675624e1, -.753533046979752e-12,
.481337131452891e1, -0.223286270422356e22, -.118746004987383e-4,
.646412934136496e-2, -0.410588536330937e-9, .422739537057241e20,
.313698180473812e-12, 0.16439533434504e-23, -.339823323754373e-5,
-.135268639905021e-1, -.723252514211625e-14, .184386437538366e-8,
-.463959533752385e-1, -.99226310037675e14, .688169154439335e-16,
-.222620998452197e-10, -.540843018624083e-7, .345570606200257e-2,
.422275800304086e11, -.126974478770487e-14, .927237985153679e-9,
.612670812016489e-13, -.722693924063497e-11,
-.383669502636822e-3, .374684572410204e-3, -0.931976897511086e5,
-0.247690616026922e-1, .658110546759474e2],
"j": [-0.111371317395540e-3, 0.100342892423685e1, 0.530615581928979e1,
0.179058760078792e-5, -0.728541958464774e-3, -.187576133371704e2,
0.199060874071849e-2, 0.243574755377290e2, -0.177040785499444e-3,
-0.25968038522713e-2, -0.198704578406823e3, 0.738627790224287e-4,
-0.236264692844138e-2, -0.161023121314333e1, 0.622322971786473e4,
-.960754116701669e-8, -.510572269720488e-10, .767373781404211e-2,
.663855469485254e-14, -.717590735526745e-9, 0.146564542926508e-4,
.309029474277013e-11, -.464216300971708e-15,
-.390499637961161e-13, -.236716126781431e-9,
.454652854268717e-11, -.422271787482497e-2,
0.283911742354706e-10, 0.270929002720228e1],
"k": [-0.401215699576099e9, 0.484501478318406e11, .394721471363678e-14,
.372629967374147e5, -.369794374168666e-29, -.380436407012452e-14,
0.475361629970233e-6, -0.879148916140706e-3, 0.844317863844331,
0.122433162656600e2, -0.104529634830279e3, 0.589702771277429e3,
-.291026851164444e14, .170343072841850e-5, -0.277617606975748e-3,
-0.344709605486686e1, 0.221333862447095e2, -0.194646110037079e3,
.808354639772825e-15, -.18084520914547e-10, -.696664158132412e-5,
-0.181057560300994e-2, 0.255830298579027e1, 0.328913873658481e4,
-.173270241249904e-18, -.661876792558034e-6, -.39568892342125e-2,
.604203299819132e-17, -.400879935920517e-13, .160751107464958e-8,
.383719409025556e-4, -.649565446702457e-14, -.149095328506e-11,
0.541449377329581e-8],
"l": [0.260702058647537e10, -.188277213604704e15, 0.554923870289667e19,
-.758966946387758e23, .413865186848908e27, -.81503800073806e12,
-.381458260489955e33, -.123239564600519e-1, 0.226095631437174e8,
-.49501780950672e12, 0.529482996422863e16, -0.444359478746295e23,
.521635864527315e35, -0.487095672740742e55, -0.714430209937547e6,
0.127868634615495, -0.100752127917598e2, 0.777451437960990e7,
-.108105480796471e25, -.357578581169659e-5, -0.212857169423484e1,
0.270706111085238e30, -0.695953622348829e33, 0.110609027472280,
0.721559163361354e2, -0.306367307532219e15, 0.265839618885530e-4,
0.253392392889754e-1, -0.214443041836579e3, 0.937846601489667,
0.223184043101700e1, 0.338401222509191e2, 0.494237237179718e21,
-0.198068404154428, -0.141415349881140e31, -0.993862421613651e2,
0.125070534142731e3, -0.996473529004439e3, 0.473137909872765e5,
0.116662121219322e33, -0.315874976271533e16,
-0.445703369196945e33, 0.642794932373694e33],
"m": [0.811384363481847, -0.568199310990094e4, -0.178657198172556e11,
0.795537657613427e32, -0.814568209346872e5, -0.659774567602874e8,
-.152861148659302e11, -0.560165667510446e12, 0.458384828593949e6,
-0.385754000383848e14, 0.453735800004273e8, 0.939454935735563e12,
.266572856432938e28, -0.547578313899097e10, 0.200725701112386e15,
0.185007245563239e13, 0.185135446828337e9, -0.170451090076385e12,
0.157890366037614e15, -0.202530509748774e16, 0.36819392618357e60,
0.170215539458936e18, 0.639234909918741e42, -.821698160721956e15,
-.795260241872306e24, 0.23341586947851e18, -0.600079934586803e23,
0.594584382273384e25, 0.189461279349492e40, -.810093428842645e46,
0.188813911076809e22, 0.111052244098768e36, 0.291133958602503e46,
-.329421923951460e22, -.137570282536696e26, 0.181508996303902e28,
-.346865122768353e30, -.21196114877426e38, -0.128617899887675e49,
0.479817895699239e65],
"n": [.280967799943151e-38, .614869006573609e-30, .582238667048942e-27,
.390628369238462e-22, .821445758255119e-20, .402137961842776e-14,
.651718171878301e-12, -.211773355803058e-7, 0.264953354380072e-2,
-.135031446451331e-31, -.607246643970893e-23,
-.402352115234494e-18, -.744938506925544e-16,
.189917206526237e-12, .364975183508473e-5, .177274872361946e-25,
-.334952758812999e-18, -.421537726098389e-8,
-.391048167929649e-1, .541276911564176e-13, .705412100773699e-11,
.258585887897486e-8, -.493111362030162e-10, -.158649699894543e-5,
-0.525037427886100, 0.220019901729615e-2, -0.643064132636925e-2,
0.629154149015048e2, 0.135147318617061e3, 0.240560808321713e-6,
-.890763306701305e-3, -0.440209599407714e4, -0.302807107747776e3,
0.159158748314599e4, 0.232534272709876e6, -0.792681207132600e6,
-.869871364662769e11, .354542769185671e12, 0.400849240129329e15],
"o": [.128746023979718e-34, -.735234770382342e-11, .28907869214915e-2,
0.244482731907223, 0.141733492030985e-23, -0.354533853059476e-28,
-.594539202901431e-17, -.585188401782779e-8, .201377325411803e-5,
0.138647388209306e1, -0.173959365084772e-4, 0.137680878349369e-2,
.814897605805513e-14, .425596631351839e-25,
-.387449113787755e-17, .13981474793024e-12, -.171849638951521e-2,
0.641890529513296e-21, .118960578072018e-10,
-.155282762571611e-17, .233907907347507e-7,
-.174093247766213e-12, .377682649089149e-8,
-.516720236575302e-10],
"p": [-0.982825342010366e-4, 0.105145700850612e1, 0.116033094095084e3,
0.324664750281543e4, -0.123592348610137e4, -0.561403450013495e-1,
0.856677401640869e-7, 0.236313425393924e3, 0.972503292350109e-2,
-.103001994531927e1, -0.149653706199162e-8, -.215743778861592e-4,
-0.834452198291445e1, 0.586602660564988, 0.343480022104968e-25,
.816256095947021e-5, .294985697916798e-2, 0.711730466276584e-16,
0.400954763806941e-9, 0.107766027032853e2, -0.409449599138182e-6,
-.729121307758902e-5, 0.677107970938909e-8, 0.602745973022975e-7,
-.382323011855257e-10, .179946628317437e-2,
-.345042834640005e-3],
"q": [-0.820433843259950e5, 0.473271518461586e11, -.805950021005413e-1,
0.328600025435980e2, -0.35661702998249e4, -0.172985781433335e10,
0.351769232729192e8, -0.775489259985144e6, 0.710346691966018e-4,
0.993499883820274e5, -0.642094171904570, -0.612842816820083e4,
.232808472983776e3, -0.142808220416837e-4, -0.643596060678456e-2,
-0.428577227475614e1, 0.225689939161918e4, 0.100355651721510e-2,
0.333491455143516, 0.109697576888873e1, 0.961917379376452,
-0.838165632204598e-1, 0.247795908411492e1, -.319114969006533e4],
"r": [.144165955660863e-2, -.701438599628258e13, -.830946716459219e-16,
0.261975135368109, 0.393097214706245e3, -0.104334030654021e5,
0.490112654154211e9, -0.147104222772069e-3, 0.103602748043408e1,
0.305308890065089e1, -0.399745276971264e7, 0.569233719593750e-11,
-.464923504407778e-1, -.535400396512906e-17,
.399988795693162e-12, -.536479560201811e-6, .159536722411202e-1,
.270303248860217e-14, .244247453858506e-7, -0.983430636716454e-5,
0.663513144224454e-1, -0.993456957845006e1, 0.546491323528491e3,
-0.143365406393758e5, 0.150764974125511e6, -.337209709340105e-9,
0.377501980025469e-8],
"s": [-0.532466612140254e23, .100415480000824e32, -.191540001821367e30,
0.105618377808847e17, 0.202281884477061e59, 0.884585472596134e8,
0.166540181638363e23, -0.313563197669111e6, -.185662327545324e54,
-.624942093918942e-1, -0.50416072413259e10, 0.187514491833092e5,
0.121399979993217e-2, 0.188317043049455e1, -0.167073503962060e4,
0.965961650599775, 0.294885696802488e1, -0.653915627346115e5,
0.604012200163444e50, -0.198339358557937, -0.175984090163501e58,
0.356314881403987e1, -0.575991255144384e3, 0.456213415338071e5,
-.109174044987829e8, 0.437796099975134e34, -0.616552611135792e46,
0.193568768917797e10, 0.950898170425042e54],
"t": [0.155287249586268e1, 0.664235115009031e1, -0.289366236727210e4,
-0.385923202309848e13, -.291002915783761e1, -.829088246858083e12,
0.176814899675218e1, -0.534686695713469e9, 0.160464608687834e18,
0.196435366560186e6, 0.156637427541729e13, -0.178154560260006e1,
-0.229746237623692e16, 0.385659001648006e8, 0.110554446790543e10,
-.677073830687349e14, -.327910592086523e31, -.341552040860644e51,
-.527251339709047e21, .245375640937055e24, -0.168776617209269e27,
.358958955867578e29, -0.656475280339411e36, 0.355286045512301e39,
.569021454413270e58, -.700584546433113e48, -0.705772623326374e65,
0.166861176200148e53, -.300475129680486e61, -.668481295196808e51,
.428432338620678e69, -.444227367758304e72, -.281396013562745e77],
"u": [0.122088349258355e18, 0.104216468608488e10, -.882666931564652e16,
.259929510849499e20, 0.222612779142211e15, -0.878473585050085e18,
-0.314432577551552e22, -.216934916996285e13, .159079648196849e21,
-.339567617303423e3, 0.884387651337836e13, -0.843405926846418e21,
0.114178193518022e2, -0.122708229235641e-3, -0.106201671767107e3,
.903443213959313e25, -0.693996270370852e28, 0.648916718965575e-8,
0.718957567127851e4, 0.105581745346187e-2, -0.651903203602581e15,
-0.160116813274676e25, -0.510254294237837e-8, -0.152355388953402,
0.677143292290144e12, 0.276378438378930e15, 0.116862983141686e-1,
-.301426947980171e14, 0.169719813884840e-7, 0.104674840020929e27,
-0.10801690456014e5, -0.990623601934295e-12, 0.536116483602738e7,
.226145963747881e22, -0.488731565776210e-9, 0.151001548880670e-4,
-0.227700464643920e5, -0.781754507698846e28],
"v": [-.415652812061591e-54, .177441742924043e-60,
-.357078668203377e-54, 0.359252213604114e-25,
-0.259123736380269e2, 0.594619766193460e5, -0.624184007103158e11,
0.313080299915944e17, .105006446192036e-8, -0.192824336984852e-5,
0.654144373749937e6, 0.513117462865044e13, -.697595750347391e19,
-.103977184454767e29, .119563135540666e-47,
-.436677034051655e-41, .926990036530639e-29, .587793105620748e21,
.280375725094731e-17, -0.192359972440634e23, .742705723302738e27,
-0.517429682450605e2, 0.820612048645469e7, -0.188214882341448e-8,
.184587261114837e-1, -0.135830407782663e-5, -.723681885626348e17,
-.223449194054124e27, -.111526741826431e-34,
.276032601145151e-28, 0.134856491567853e15, 0.652440293345860e-9,
0.510655119774360e17, -.468138358908732e32, -.760667491183279e16,
-.417247986986821e-18, 0.312545677756104e14,
-.100375333864186e15, .247761392329058e27],
"w": [-.586219133817016e-7, -.894460355005526e11, .531168037519774e-30,
0.109892402329239, -0.575368389425212e-1, 0.228276853990249e5,
-.158548609655002e19, .329865748576503e-27,
-.634987981190669e-24, 0.615762068640611e-8, -.961109240985747e8,
-.406274286652625e-44, -0.471103725498077e-12, 0.725937724828145,
.187768525763682e-38, -.103308436323771e4, -0.662552816342168e-1,
0.579514041765710e3, .237416732616644e-26, .271700235739893e-14,
-0.9078862134836e2, -0.171242509570207e-36, 0.156792067854621e3,
0.923261357901470, -0.597865988422577e1, 0.321988767636389e7,
-.399441390042203e-29, .493429086046981e-7, .812036983370565e-19,
-.207610284654137e-11, -.340821291419719e-6,
.542000573372233e-17, -.856711586510214e-12,
0.266170454405981e-13, 0.858133791857099e-5],
"x": [.377373741298151e19, -.507100883722913e13, -0.10336322559886e16,
.184790814320773e-5, -.924729378390945e-3, -0.425999562292738e24,
-.462307771873973e-12, .107319065855767e22, 0.648662492280682e11,
0.244200600688281e1, -0.851535733484258e10, 0.169894481433592e22,
0.215780222509020e-26, -0.320850551367334, -0.382642448458610e17,
-.275386077674421e-28, -.563199253391666e6, -.326068646279314e21,
0.397949001553184e14, 0.100824008584757e-6, 0.162234569738433e5,
-0.432355225319745e11, -.59287424559861e12, 0.133061647281106e1,
0.157338197797544e7, 0.258189614270853e14, 0.262413209706358e25,
-.920011937431142e-1, 0.220213765905426e-2, -0.110433759109547e2,
0.847004870612087e7, -0.592910695762536e9, -0.183027173269660e-4,
0.181339603516302, -0.119228759669889e4, 0.430867658061468e7],
"y": [-0.525597995024633e-9, 0.583441305228407e4, -.134778968457925e17,
.118973500934212e26, -0.159096490904708e27, -.315839902302021e-6,
0.496212197158239e3, 0.327777227273171e19, -0.527114657850696e22,
.210017506281863e-16, 0.705106224399834e21, -.266713136106469e31,
-0.145370512554562e-7, 0.149333917053130e28, -.149795620287641e8,
-.3818819062711e16, 0.724660165585797e-4, -0.937808169550193e14,
0.514411468376383e10, -0.828198594040141e5],
"z": [0.24400789229065e-10, -0.463057430331242e7, 0.728803274777712e10,
.327776302858856e16, -.110598170118409e10, -0.323899915729957e13,
.923814007023245e16, 0.842250080413712e-12, 0.663221436245506e12,
-.167170186672139e15, .253749358701391e4, -0.819731559610523e-20,
0.328380587890663e12, -0.625004791171543e8, 0.803197957462023e21,
-.204397011338353e-10, -.378391047055938e4, 0.97287654593862e-2,
0.154355721681459e2, -0.373962862928643e4, -0.682859011374572e11,
-0.248488015614543e-3, 0.394536049497068e7]}
I = I[x]
J = J[x]
n = n[x]
v_, P_, T_, a, b, c, d, e = par[x]
Pr = P/P_
Tr = T/T_
suma = 0
if x == "n":
for i, j, ni in zip(I, J, n):
suma += ni * (Pr-a)**i * (Tr-b)**j
return v_*exp(suma)
else:
for i, j, ni in zip(I, J, n):
suma += ni * (Pr-a)**(c*i) * (Tr-b)**(j*d)
return v_*suma**e | Backward equation for region 3x, v=f(P,T)
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [MPa]
x : char
Region 3 subregion code
Returns
-------
v : float
Specific volume, [m³/kg]
References
----------
IAPWS, Revised Supplementary Release on Backward Equations for Specific
Volume as a Function of Pressure and Temperature v(p,T) for Region 3 of the
IAPWS Industrial Formulation 1997 for the Thermodynamic Properties of Water
and Steam, http://www.iapws.org/relguide/Supp-VPT3-2016.pdf, Eq. 4-5
Examples
--------
>>> _Backward3x_v_PT(630,50,"a")
0.001470853100
>>> _Backward3x_v_PT(670,80,"a")
0.001503831359
>>> _Backward3x_v_PT(710,50,"b")
0.002204728587
>>> _Backward3x_v_PT(750,80,"b")
0.001973692940
>>> _Backward3x_v_PT(630,20,"c")
0.001761696406
>>> _Backward3x_v_PT(650,30,"c")
0.001819560617
>>> _Backward3x_v_PT(656,26,"d")
0.002245587720
>>> _Backward3x_v_PT(670,30,"d")
0.002506897702
>>> _Backward3x_v_PT(661,26,"e")
0.002970225962
>>> _Backward3x_v_PT(675,30,"e")
0.003004627086
>>> _Backward3x_v_PT(671,26,"f")
0.005019029401
>>> _Backward3x_v_PT(690,30,"f")
0.004656470142
>>> _Backward3x_v_PT(649,23.6,"g")
0.002163198378
>>> _Backward3x_v_PT(650,24,"g")
0.002166044161
>>> _Backward3x_v_PT(652,23.6,"h")
0.002651081407
>>> _Backward3x_v_PT(654,24,"h")
0.002967802335
>>> _Backward3x_v_PT(653,23.6,"i")
0.003273916816
>>> _Backward3x_v_PT(655,24,"i")
0.003550329864
>>> _Backward3x_v_PT(655,23.5,"j")
0.004545001142
>>> _Backward3x_v_PT(660,24,"j")
0.005100267704
>>> _Backward3x_v_PT(660,23,"k")
0.006109525997
>>> _Backward3x_v_PT(670,24,"k")
0.006427325645
>>> _Backward3x_v_PT(646,22.6,"l")
0.002117860851
>>> _Backward3x_v_PT(646,23,"l")
0.002062374674
>>> _Backward3x_v_PT(648.6,22.6,"m")
0.002533063780
>>> _Backward3x_v_PT(649.3,22.8,"m")
0.002572971781
>>> _Backward3x_v_PT(649,22.6,"n")
0.002923432711
>>> _Backward3x_v_PT(649.7,22.8,"n")
0.002913311494
>>> _Backward3x_v_PT(649.1,22.6,"o")
0.003131208996
>>> _Backward3x_v_PT(649.9,22.8,"o")
0.003221160278
>>> _Backward3x_v_PT(649.4,22.6,"p")
0.003715596186
>>> _Backward3x_v_PT(650.2,22.8,"p")
0.003664754790
>>> _Backward3x_v_PT(640,21.1,"q")
0.001970999272
>>> _Backward3x_v_PT(643,21.8,"q")
0.002043919161
>>> _Backward3x_v_PT(644,21.1,"r")
0.005251009921
>>> _Backward3x_v_PT(648,21.8,"r")
0.005256844741
>>> _Backward3x_v_PT(635,19.1,"s")
0.001932829079
>>> _Backward3x_v_PT(638,20,"s")
0.001985387227
>>> _Backward3x_v_PT(626,17,"t")
0.008483262001
>>> _Backward3x_v_PT(640,20,"t")
0.006227528101
>>> _Backward3x_v_PT(644.6,21.5,"u")
0.002268366647
>>> _Backward3x_v_PT(646.1,22,"u")
0.002296350553
>>> _Backward3x_v_PT(648.6,22.5,"v")
0.002832373260
>>> _Backward3x_v_PT(647.9,22.3,"v")
0.002811424405
>>> _Backward3x_v_PT(647.5,22.15,"w")
0.003694032281
>>> _Backward3x_v_PT(648.1,22.3,"w")
0.003622226305
>>> _Backward3x_v_PT(648,22.11,"x")
0.004528072649
>>> _Backward3x_v_PT(649,22.3,"x")
0.004556905799
>>> _Backward3x_v_PT(646.84,22,"y")
0.002698354719
>>> _Backward3x_v_PT(647.05,22.064,"y")
0.002717655648
>>> _Backward3x_v_PT(646.89,22,"z")
0.003798732962
>>> _Backward3x_v_PT(647.15,22.064,"z")
0.003701940009 | https://github.com/jjgomera/iapws/blob/1e5812aab38212fb8a63736f61cdcfa427d223b1/iapws/iapws97.py#L2964-L3569 |
jjgomera/iapws | iapws/iapws97.py | _Region4 | def _Region4(P, x):
"""Basic equation for region 4
Parameters
----------
P : float
Pressure, [MPa]
x : float
Vapor quality, [-]
Returns
-------
prop : dict
Dict with calculated properties. The available properties are:
* T: Saturated temperature, [K]
* P: Saturated pressure, [MPa]
* x: Vapor quality, [-]
* v: Specific volume, [m³/kg]
* h: Specific enthalpy, [kJ/kg]
* s: Specific entropy, [kJ/kgK]
"""
T = _TSat_P(P)
if T > 623.15:
rhol = 1./_Backward3_sat_v_P(P, T, 0)
P1 = _Region3(rhol, T)
rhov = 1./_Backward3_sat_v_P(P, T, 1)
P2 = _Region3(rhov, T)
else:
P1 = _Region1(T, P)
P2 = _Region2(T, P)
propiedades = {}
propiedades["T"] = T
propiedades["P"] = P
propiedades["v"] = P1["v"]+x*(P2["v"]-P1["v"])
propiedades["h"] = P1["h"]+x*(P2["h"]-P1["h"])
propiedades["s"] = P1["s"]+x*(P2["s"]-P1["s"])
propiedades["cp"] = None
propiedades["cv"] = None
propiedades["w"] = None
propiedades["alfav"] = None
propiedades["kt"] = None
propiedades["region"] = 4
propiedades["x"] = x
return propiedades | python | def _Region4(P, x):
"""Basic equation for region 4
Parameters
----------
P : float
Pressure, [MPa]
x : float
Vapor quality, [-]
Returns
-------
prop : dict
Dict with calculated properties. The available properties are:
* T: Saturated temperature, [K]
* P: Saturated pressure, [MPa]
* x: Vapor quality, [-]
* v: Specific volume, [m³/kg]
* h: Specific enthalpy, [kJ/kg]
* s: Specific entropy, [kJ/kgK]
"""
T = _TSat_P(P)
if T > 623.15:
rhol = 1./_Backward3_sat_v_P(P, T, 0)
P1 = _Region3(rhol, T)
rhov = 1./_Backward3_sat_v_P(P, T, 1)
P2 = _Region3(rhov, T)
else:
P1 = _Region1(T, P)
P2 = _Region2(T, P)
propiedades = {}
propiedades["T"] = T
propiedades["P"] = P
propiedades["v"] = P1["v"]+x*(P2["v"]-P1["v"])
propiedades["h"] = P1["h"]+x*(P2["h"]-P1["h"])
propiedades["s"] = P1["s"]+x*(P2["s"]-P1["s"])
propiedades["cp"] = None
propiedades["cv"] = None
propiedades["w"] = None
propiedades["alfav"] = None
propiedades["kt"] = None
propiedades["region"] = 4
propiedades["x"] = x
return propiedades | Basic equation for region 4
Parameters
----------
P : float
Pressure, [MPa]
x : float
Vapor quality, [-]
Returns
-------
prop : dict
Dict with calculated properties. The available properties are:
* T: Saturated temperature, [K]
* P: Saturated pressure, [MPa]
* x: Vapor quality, [-]
* v: Specific volume, [m³/kg]
* h: Specific enthalpy, [kJ/kg]
* s: Specific entropy, [kJ/kgK] | https://github.com/jjgomera/iapws/blob/1e5812aab38212fb8a63736f61cdcfa427d223b1/iapws/iapws97.py#L3573-L3618 |
jjgomera/iapws | iapws/iapws97.py | _Bound_TP | def _Bound_TP(T, P):
"""Region definition for input T and P
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [MPa]
Returns
-------
region : float
IAPWS-97 region code
References
----------
Wagner, W; Kretzschmar, H-J: International Steam Tables: Properties of
Water and Steam Based on the Industrial Formulation IAPWS-IF97; Springer,
2008; doi: 10.1007/978-3-540-74234-0. Fig. 2.3
"""
region = None
if 1073.15 < T <= 2273.15 and Pmin <= P <= 50:
region = 5
elif Pmin <= P <= Ps_623:
Tsat = _TSat_P(P)
if 273.15 <= T <= Tsat:
region = 1
elif Tsat < T <= 1073.15:
region = 2
elif Ps_623 < P <= 100:
T_b23 = _t_P(P)
if 273.15 <= T <= 623.15:
region = 1
elif 623.15 < T < T_b23:
region = 3
elif T_b23 <= T <= 1073.15:
region = 2
return region | python | def _Bound_TP(T, P):
"""Region definition for input T and P
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [MPa]
Returns
-------
region : float
IAPWS-97 region code
References
----------
Wagner, W; Kretzschmar, H-J: International Steam Tables: Properties of
Water and Steam Based on the Industrial Formulation IAPWS-IF97; Springer,
2008; doi: 10.1007/978-3-540-74234-0. Fig. 2.3
"""
region = None
if 1073.15 < T <= 2273.15 and Pmin <= P <= 50:
region = 5
elif Pmin <= P <= Ps_623:
Tsat = _TSat_P(P)
if 273.15 <= T <= Tsat:
region = 1
elif Tsat < T <= 1073.15:
region = 2
elif Ps_623 < P <= 100:
T_b23 = _t_P(P)
if 273.15 <= T <= 623.15:
region = 1
elif 623.15 < T < T_b23:
region = 3
elif T_b23 <= T <= 1073.15:
region = 2
return region | Region definition for input T and P
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [MPa]
Returns
-------
region : float
IAPWS-97 region code
References
----------
Wagner, W; Kretzschmar, H-J: International Steam Tables: Properties of
Water and Steam Based on the Industrial Formulation IAPWS-IF97; Springer,
2008; doi: 10.1007/978-3-540-74234-0. Fig. 2.3 | https://github.com/jjgomera/iapws/blob/1e5812aab38212fb8a63736f61cdcfa427d223b1/iapws/iapws97.py#L3813-L3851 |
jjgomera/iapws | iapws/iapws97.py | _Bound_Ph | def _Bound_Ph(P, h):
"""Region definition for input P y h
Parameters
----------
P : float
Pressure, [MPa]
h : float
Specific enthalpy, [kJ/kg]
Returns
-------
region : float
IAPWS-97 region code
References
----------
Wagner, W; Kretzschmar, H-J: International Steam Tables: Properties of
Water and Steam Based on the Industrial Formulation IAPWS-IF97; Springer,
2008; doi: 10.1007/978-3-540-74234-0. Fig. 2.5
"""
region = None
if Pmin <= P <= Ps_623:
h14 = _Region1(_TSat_P(P), P)["h"]
h24 = _Region2(_TSat_P(P), P)["h"]
h25 = _Region2(1073.15, P)["h"]
hmin = _Region1(273.15, P)["h"]
hmax = _Region5(2273.15, P)["h"]
if hmin <= h <= h14:
region = 1
elif h14 < h < h24:
region = 4
elif h24 <= h <= h25:
region = 2
elif h25 < h <= hmax:
region = 5
elif Ps_623 < P < Pc:
hmin = _Region1(273.15, P)["h"]
h13 = _Region1(623.15, P)["h"]
h32 = _Region2(_t_P(P), P)["h"]
h25 = _Region2(1073.15, P)["h"]
hmax = _Region5(2273.15, P)["h"]
if hmin <= h <= h13:
region = 1
elif h13 < h < h32:
try:
p34 = _PSat_h(h)
except NotImplementedError:
p34 = Pc
if P < p34:
region = 4
else:
region = 3
elif h32 <= h <= h25:
region = 2
elif h25 < h <= hmax:
region = 5
elif Pc <= P <= 100:
hmin = _Region1(273.15, P)["h"]
h13 = _Region1(623.15, P)["h"]
h32 = _Region2(_t_P(P), P)["h"]
h25 = _Region2(1073.15, P)["h"]
hmax = _Region5(2273.15, P)["h"]
if hmin <= h <= h13:
region = 1
elif h13 < h < h32:
region = 3
elif h32 <= h <= h25:
region = 2
elif P <= 50 and h25 <= h <= hmax:
region = 5
return region | python | def _Bound_Ph(P, h):
"""Region definition for input P y h
Parameters
----------
P : float
Pressure, [MPa]
h : float
Specific enthalpy, [kJ/kg]
Returns
-------
region : float
IAPWS-97 region code
References
----------
Wagner, W; Kretzschmar, H-J: International Steam Tables: Properties of
Water and Steam Based on the Industrial Formulation IAPWS-IF97; Springer,
2008; doi: 10.1007/978-3-540-74234-0. Fig. 2.5
"""
region = None
if Pmin <= P <= Ps_623:
h14 = _Region1(_TSat_P(P), P)["h"]
h24 = _Region2(_TSat_P(P), P)["h"]
h25 = _Region2(1073.15, P)["h"]
hmin = _Region1(273.15, P)["h"]
hmax = _Region5(2273.15, P)["h"]
if hmin <= h <= h14:
region = 1
elif h14 < h < h24:
region = 4
elif h24 <= h <= h25:
region = 2
elif h25 < h <= hmax:
region = 5
elif Ps_623 < P < Pc:
hmin = _Region1(273.15, P)["h"]
h13 = _Region1(623.15, P)["h"]
h32 = _Region2(_t_P(P), P)["h"]
h25 = _Region2(1073.15, P)["h"]
hmax = _Region5(2273.15, P)["h"]
if hmin <= h <= h13:
region = 1
elif h13 < h < h32:
try:
p34 = _PSat_h(h)
except NotImplementedError:
p34 = Pc
if P < p34:
region = 4
else:
region = 3
elif h32 <= h <= h25:
region = 2
elif h25 < h <= hmax:
region = 5
elif Pc <= P <= 100:
hmin = _Region1(273.15, P)["h"]
h13 = _Region1(623.15, P)["h"]
h32 = _Region2(_t_P(P), P)["h"]
h25 = _Region2(1073.15, P)["h"]
hmax = _Region5(2273.15, P)["h"]
if hmin <= h <= h13:
region = 1
elif h13 < h < h32:
region = 3
elif h32 <= h <= h25:
region = 2
elif P <= 50 and h25 <= h <= hmax:
region = 5
return region | Region definition for input P y h
Parameters
----------
P : float
Pressure, [MPa]
h : float
Specific enthalpy, [kJ/kg]
Returns
-------
region : float
IAPWS-97 region code
References
----------
Wagner, W; Kretzschmar, H-J: International Steam Tables: Properties of
Water and Steam Based on the Industrial Formulation IAPWS-IF97; Springer,
2008; doi: 10.1007/978-3-540-74234-0. Fig. 2.5 | https://github.com/jjgomera/iapws/blob/1e5812aab38212fb8a63736f61cdcfa427d223b1/iapws/iapws97.py#L3854-L3925 |
jjgomera/iapws | iapws/iapws97.py | _Bound_Ps | def _Bound_Ps(P, s):
"""Region definition for input P and s
Parameters
----------
P : float
Pressure, [MPa]
s : float
Specific entropy, [kJ/kgK]
Returns
-------
region : float
IAPWS-97 region code
References
----------
Wagner, W; Kretzschmar, H-J: International Steam Tables: Properties of
Water and Steam Based on the Industrial Formulation IAPWS-IF97; Springer,
2008; doi: 10.1007/978-3-540-74234-0. Fig. 2.9
"""
region = None
if Pmin <= P <= Ps_623:
smin = _Region1(273.15, P)["s"]
s14 = _Region1(_TSat_P(P), P)["s"]
s24 = _Region2(_TSat_P(P), P)["s"]
s25 = _Region2(1073.15, P)["s"]
smax = _Region5(2273.15, P)["s"]
if smin <= s <= s14:
region = 1
elif s14 < s < s24:
region = 4
elif s24 <= s <= s25:
region = 2
elif s25 < s <= smax:
region = 5
elif Ps_623 < P < Pc:
smin = _Region1(273.15, P)["s"]
s13 = _Region1(623.15, P)["s"]
s32 = _Region2(_t_P(P), P)["s"]
s25 = _Region2(1073.15, P)["s"]
smax = _Region5(2273.15, P)["s"]
if smin <= s <= s13:
region = 1
elif s13 < s < s32:
try:
p34 = _PSat_s(s)
except NotImplementedError:
p34 = Pc
if P < p34:
region = 4
else:
region = 3
elif s32 <= s <= s25:
region = 2
elif s25 < s <= smax:
region = 5
elif Pc <= P <= 100:
smin = _Region1(273.15, P)["s"]
s13 = _Region1(623.15, P)["s"]
s32 = _Region2(_t_P(P), P)["s"]
s25 = _Region2(1073.15, P)["s"]
smax = _Region5(2273.15, P)["s"]
if smin <= s <= s13:
region = 1
elif s13 < s < s32:
region = 3
elif s32 <= s <= s25:
region = 2
elif P <= 50 and s25 <= s <= smax:
region = 5
return region | python | def _Bound_Ps(P, s):
"""Region definition for input P and s
Parameters
----------
P : float
Pressure, [MPa]
s : float
Specific entropy, [kJ/kgK]
Returns
-------
region : float
IAPWS-97 region code
References
----------
Wagner, W; Kretzschmar, H-J: International Steam Tables: Properties of
Water and Steam Based on the Industrial Formulation IAPWS-IF97; Springer,
2008; doi: 10.1007/978-3-540-74234-0. Fig. 2.9
"""
region = None
if Pmin <= P <= Ps_623:
smin = _Region1(273.15, P)["s"]
s14 = _Region1(_TSat_P(P), P)["s"]
s24 = _Region2(_TSat_P(P), P)["s"]
s25 = _Region2(1073.15, P)["s"]
smax = _Region5(2273.15, P)["s"]
if smin <= s <= s14:
region = 1
elif s14 < s < s24:
region = 4
elif s24 <= s <= s25:
region = 2
elif s25 < s <= smax:
region = 5
elif Ps_623 < P < Pc:
smin = _Region1(273.15, P)["s"]
s13 = _Region1(623.15, P)["s"]
s32 = _Region2(_t_P(P), P)["s"]
s25 = _Region2(1073.15, P)["s"]
smax = _Region5(2273.15, P)["s"]
if smin <= s <= s13:
region = 1
elif s13 < s < s32:
try:
p34 = _PSat_s(s)
except NotImplementedError:
p34 = Pc
if P < p34:
region = 4
else:
region = 3
elif s32 <= s <= s25:
region = 2
elif s25 < s <= smax:
region = 5
elif Pc <= P <= 100:
smin = _Region1(273.15, P)["s"]
s13 = _Region1(623.15, P)["s"]
s32 = _Region2(_t_P(P), P)["s"]
s25 = _Region2(1073.15, P)["s"]
smax = _Region5(2273.15, P)["s"]
if smin <= s <= s13:
region = 1
elif s13 < s < s32:
region = 3
elif s32 <= s <= s25:
region = 2
elif P <= 50 and s25 <= s <= smax:
region = 5
return region | Region definition for input P and s
Parameters
----------
P : float
Pressure, [MPa]
s : float
Specific entropy, [kJ/kgK]
Returns
-------
region : float
IAPWS-97 region code
References
----------
Wagner, W; Kretzschmar, H-J: International Steam Tables: Properties of
Water and Steam Based on the Industrial Formulation IAPWS-IF97; Springer,
2008; doi: 10.1007/978-3-540-74234-0. Fig. 2.9 | https://github.com/jjgomera/iapws/blob/1e5812aab38212fb8a63736f61cdcfa427d223b1/iapws/iapws97.py#L3928-L3999 |
jjgomera/iapws | iapws/iapws97.py | _Bound_hs | def _Bound_hs(h, s):
"""Region definition for input h and s
Parameters
----------
h : float
Specific enthalpy, [kJ/kg]
s : float
Specific entropy, [kJ/kgK]
Returns
-------
region : float
IAPWS-97 region code
References
----------
Wagner, W; Kretzschmar, H-J: International Steam Tables: Properties of
Water and Steam Based on the Industrial Formulation IAPWS-IF97; Springer,
2008; doi: 10.1007/978-3-540-74234-0. Fig. 2.14
"""
region = None
s13 = _Region1(623.15, 100)["s"]
s13s = _Region1(623.15, Ps_623)["s"]
sTPmax = _Region2(1073.15, 100)["s"]
s2ab = _Region2(1073.15, 4)["s"]
# Left point in h-s plot
smin = _Region1(273.15, 100)["s"]
hmin = _Region1(273.15, Pmin)["h"]
# Right point in h-s plot
_Pmax = _Region2(1073.15, Pmin)
hmax = _Pmax["h"]
smax = _Pmax["s"]
# Region 4 left and right point
_sL = _Region1(273.15, Pmin)
h4l = _sL["h"]
s4l = _sL["s"]
_sV = _Region2(273.15, Pmin)
h4v = _sV["h"]
s4v = _sV["s"]
if smin <= s <= s13:
hmin = h4l+(s-s4l)/(s4v-s4l)*(h4v-h4l)
hs = _h1_s(s)
T = _Backward1_T_Ps(100, s)-0.0218
hmax = _Region1(T, 100)["h"]
if hmin <= h < hs:
region = 4
elif hs <= h <= hmax:
region = 1
elif s13 < s <= s13s:
hmin = h4l+(s-s4l)/(s4v-s4l)*(h4v-h4l)
hs = _h1_s(s)
h13 = _h13_s(s)
v = _Backward3_v_Ps(100, s)*(1+9.6e-5)
T = _Backward3_T_Ps(100, s)-0.0248
hmax = _Region3(1/v, T)["h"]
if hmin <= h < hs:
region = 4
elif hs <= h < h13:
region = 1
elif h13 <= h <= hmax:
region = 3
elif s13s < s <= sc:
hmin = h4l+(s-s4l)/(s4v-s4l)*(h4v-h4l)
hs = _h3a_s(s)
v = _Backward3_v_Ps(100, s)*(1+9.6e-5)
T = _Backward3_T_Ps(100, s)-0.0248
hmax = _Region3(1/v, T)["h"]
if hmin <= h < hs:
region = 4
elif hs <= h <= hmax:
region = 3
elif sc < s < 5.049096828:
hmin = h4l+(s-s4l)/(s4v-s4l)*(h4v-h4l)
hs = _h2c3b_s(s)
v = _Backward3_v_Ps(100, s)*(1+9.6e-5)
T = _Backward3_T_Ps(100, s)-0.0248
hmax = _Region3(1/v, T)["h"]
if hmin <= h < hs:
region = 4
elif hs <= h <= hmax:
region = 3
elif 5.049096828 <= s < 5.260578707:
# Specific zone with 2-3 boundary in s shape
hmin = h4l+(s-s4l)/(s4v-s4l)*(h4v-h4l)
hs = _h2c3b_s(s)
h23max = _Region2(863.15, 100)["h"]
h23min = _Region2(623.15, Ps_623)["h"]
T = _Backward2_T_Ps(100, s)-0.019
hmax = _Region2(T, 100)["h"]
if hmin <= h < hs:
region = 4
elif hs <= h < h23min:
region = 3
elif h23min <= h < h23max:
if _Backward2c_P_hs(h, s) <= _P23_T(_t_hs(h, s)):
region = 2
else:
region = 3
elif h23max <= h <= hmax:
region = 2
elif 5.260578707 <= s < 5.85:
hmin = h4l+(s-s4l)/(s4v-s4l)*(h4v-h4l)
hs = _h2c3b_s(s)
T = _Backward2_T_Ps(100, s)-0.019
hmax = _Region2(T, 100)["h"]
if hmin <= h < hs:
region = 4
elif hs <= h <= hmax:
region = 2
elif 5.85 <= s < sTPmax:
hmin = h4l+(s-s4l)/(s4v-s4l)*(h4v-h4l)
hs = _h2ab_s(s)
T = _Backward2_T_Ps(100, s)-0.019
hmax = _Region2(T, 100)["h"]
if hmin <= h < hs:
region = 4
elif hs <= h <= hmax:
region = 2
elif sTPmax <= s < s2ab:
hmin = h4l+(s-s4l)/(s4v-s4l)*(h4v-h4l)
hs = _h2ab_s(s)
P = _Backward2_P_hs(h, s)
hmax = _Region2(1073.15, P)["h"]
if hmin <= h < hs:
region = 4
elif hs <= h <= hmax:
region = 2
elif s2ab <= s < s4v:
hmin = h4l+(s-s4l)/(s4v-s4l)*(h4v-h4l)
hs = _h2ab_s(s)
P = _Backward2_P_hs(h, s)
hmax = _Region2(1073.15, P)["h"]
if hmin <= h < hs:
region = 4
elif hs <= h <= hmax:
region = 2
elif s4v <= s <= smax:
hmin = _Region2(273.15, Pmin)["h"]
P = _Backward2a_P_hs(h, s)
hmax = _Region2(1073.15, P)["h"]
if Pmin <= P <= 100 and hmin <= h <= hmax:
region = 2
# Check region 5
if not region and \
_Region5(1073.15, 50)["s"] < s <= _Region5(2273.15, Pmin)["s"] \
and _Region5(1073.15, 50)["h"] < h <= _Region5(2273.15, Pmin)["h"]:
def funcion(par):
return (_Region5(par[0], par[1])["h"]-h,
_Region5(par[0], par[1])["s"]-s)
T, P = fsolve(funcion, [1400, 1])
if 1073.15 < T <= 2273.15 and Pmin <= P <= 50:
region = 5
return region | python | def _Bound_hs(h, s):
"""Region definition for input h and s
Parameters
----------
h : float
Specific enthalpy, [kJ/kg]
s : float
Specific entropy, [kJ/kgK]
Returns
-------
region : float
IAPWS-97 region code
References
----------
Wagner, W; Kretzschmar, H-J: International Steam Tables: Properties of
Water and Steam Based on the Industrial Formulation IAPWS-IF97; Springer,
2008; doi: 10.1007/978-3-540-74234-0. Fig. 2.14
"""
region = None
s13 = _Region1(623.15, 100)["s"]
s13s = _Region1(623.15, Ps_623)["s"]
sTPmax = _Region2(1073.15, 100)["s"]
s2ab = _Region2(1073.15, 4)["s"]
# Left point in h-s plot
smin = _Region1(273.15, 100)["s"]
hmin = _Region1(273.15, Pmin)["h"]
# Right point in h-s plot
_Pmax = _Region2(1073.15, Pmin)
hmax = _Pmax["h"]
smax = _Pmax["s"]
# Region 4 left and right point
_sL = _Region1(273.15, Pmin)
h4l = _sL["h"]
s4l = _sL["s"]
_sV = _Region2(273.15, Pmin)
h4v = _sV["h"]
s4v = _sV["s"]
if smin <= s <= s13:
hmin = h4l+(s-s4l)/(s4v-s4l)*(h4v-h4l)
hs = _h1_s(s)
T = _Backward1_T_Ps(100, s)-0.0218
hmax = _Region1(T, 100)["h"]
if hmin <= h < hs:
region = 4
elif hs <= h <= hmax:
region = 1
elif s13 < s <= s13s:
hmin = h4l+(s-s4l)/(s4v-s4l)*(h4v-h4l)
hs = _h1_s(s)
h13 = _h13_s(s)
v = _Backward3_v_Ps(100, s)*(1+9.6e-5)
T = _Backward3_T_Ps(100, s)-0.0248
hmax = _Region3(1/v, T)["h"]
if hmin <= h < hs:
region = 4
elif hs <= h < h13:
region = 1
elif h13 <= h <= hmax:
region = 3
elif s13s < s <= sc:
hmin = h4l+(s-s4l)/(s4v-s4l)*(h4v-h4l)
hs = _h3a_s(s)
v = _Backward3_v_Ps(100, s)*(1+9.6e-5)
T = _Backward3_T_Ps(100, s)-0.0248
hmax = _Region3(1/v, T)["h"]
if hmin <= h < hs:
region = 4
elif hs <= h <= hmax:
region = 3
elif sc < s < 5.049096828:
hmin = h4l+(s-s4l)/(s4v-s4l)*(h4v-h4l)
hs = _h2c3b_s(s)
v = _Backward3_v_Ps(100, s)*(1+9.6e-5)
T = _Backward3_T_Ps(100, s)-0.0248
hmax = _Region3(1/v, T)["h"]
if hmin <= h < hs:
region = 4
elif hs <= h <= hmax:
region = 3
elif 5.049096828 <= s < 5.260578707:
# Specific zone with 2-3 boundary in s shape
hmin = h4l+(s-s4l)/(s4v-s4l)*(h4v-h4l)
hs = _h2c3b_s(s)
h23max = _Region2(863.15, 100)["h"]
h23min = _Region2(623.15, Ps_623)["h"]
T = _Backward2_T_Ps(100, s)-0.019
hmax = _Region2(T, 100)["h"]
if hmin <= h < hs:
region = 4
elif hs <= h < h23min:
region = 3
elif h23min <= h < h23max:
if _Backward2c_P_hs(h, s) <= _P23_T(_t_hs(h, s)):
region = 2
else:
region = 3
elif h23max <= h <= hmax:
region = 2
elif 5.260578707 <= s < 5.85:
hmin = h4l+(s-s4l)/(s4v-s4l)*(h4v-h4l)
hs = _h2c3b_s(s)
T = _Backward2_T_Ps(100, s)-0.019
hmax = _Region2(T, 100)["h"]
if hmin <= h < hs:
region = 4
elif hs <= h <= hmax:
region = 2
elif 5.85 <= s < sTPmax:
hmin = h4l+(s-s4l)/(s4v-s4l)*(h4v-h4l)
hs = _h2ab_s(s)
T = _Backward2_T_Ps(100, s)-0.019
hmax = _Region2(T, 100)["h"]
if hmin <= h < hs:
region = 4
elif hs <= h <= hmax:
region = 2
elif sTPmax <= s < s2ab:
hmin = h4l+(s-s4l)/(s4v-s4l)*(h4v-h4l)
hs = _h2ab_s(s)
P = _Backward2_P_hs(h, s)
hmax = _Region2(1073.15, P)["h"]
if hmin <= h < hs:
region = 4
elif hs <= h <= hmax:
region = 2
elif s2ab <= s < s4v:
hmin = h4l+(s-s4l)/(s4v-s4l)*(h4v-h4l)
hs = _h2ab_s(s)
P = _Backward2_P_hs(h, s)
hmax = _Region2(1073.15, P)["h"]
if hmin <= h < hs:
region = 4
elif hs <= h <= hmax:
region = 2
elif s4v <= s <= smax:
hmin = _Region2(273.15, Pmin)["h"]
P = _Backward2a_P_hs(h, s)
hmax = _Region2(1073.15, P)["h"]
if Pmin <= P <= 100 and hmin <= h <= hmax:
region = 2
# Check region 5
if not region and \
_Region5(1073.15, 50)["s"] < s <= _Region5(2273.15, Pmin)["s"] \
and _Region5(1073.15, 50)["h"] < h <= _Region5(2273.15, Pmin)["h"]:
def funcion(par):
return (_Region5(par[0], par[1])["h"]-h,
_Region5(par[0], par[1])["s"]-s)
T, P = fsolve(funcion, [1400, 1])
if 1073.15 < T <= 2273.15 and Pmin <= P <= 50:
region = 5
return region | Region definition for input h and s
Parameters
----------
h : float
Specific enthalpy, [kJ/kg]
s : float
Specific entropy, [kJ/kgK]
Returns
-------
region : float
IAPWS-97 region code
References
----------
Wagner, W; Kretzschmar, H-J: International Steam Tables: Properties of
Water and Steam Based on the Industrial Formulation IAPWS-IF97; Springer,
2008; doi: 10.1007/978-3-540-74234-0. Fig. 2.14 | https://github.com/jjgomera/iapws/blob/1e5812aab38212fb8a63736f61cdcfa427d223b1/iapws/iapws97.py#L4002-L4171 |
jjgomera/iapws | iapws/iapws97.py | prop0 | def prop0(T, P):
"""Ideal gas properties
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [MPa]
Returns
-------
prop : dict
Dict with calculated properties. The available properties are:
* v: Specific volume, [m³/kg]
* h: Specific enthalpy, [kJ/kg]
* s: Specific entropy, [kJ/kgK]
* cp: Specific isobaric heat capacity, [kJ/kgK]
* cv: Specific isocoric heat capacity, [kJ/kgK]
* w: Speed of sound, [m/s]
* alfav: Cubic expansion coefficient, [1/K]
* kt: Isothermal compressibility, [1/MPa]
"""
if T <= 1073.15:
Tr = 540/T
Pr = P/1.
go, gop, gopp, got, gott, gopt = Region2_cp0(Tr, Pr)
else:
Tr = 1000/T
Pr = P/1.
go, gop, gopp, got, gott, gopt = Region5_cp0(Tr, Pr)
prop0 = {}
prop0["v"] = Pr*gop*R*T/P/1000
prop0["h"] = Tr*got*R*T
prop0["s"] = R*(Tr*got-go)
prop0["cp"] = -R*Tr**2*gott
prop0["cv"] = R*(-Tr**2*gott-1)
prop0["w"] = (R*T*1000/(1+1/Tr**2/gott))**0.5
prop0["alfav"] = 1/T
prop0["xkappa"] = 1/P
return prop0 | python | def prop0(T, P):
"""Ideal gas properties
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [MPa]
Returns
-------
prop : dict
Dict with calculated properties. The available properties are:
* v: Specific volume, [m³/kg]
* h: Specific enthalpy, [kJ/kg]
* s: Specific entropy, [kJ/kgK]
* cp: Specific isobaric heat capacity, [kJ/kgK]
* cv: Specific isocoric heat capacity, [kJ/kgK]
* w: Speed of sound, [m/s]
* alfav: Cubic expansion coefficient, [1/K]
* kt: Isothermal compressibility, [1/MPa]
"""
if T <= 1073.15:
Tr = 540/T
Pr = P/1.
go, gop, gopp, got, gott, gopt = Region2_cp0(Tr, Pr)
else:
Tr = 1000/T
Pr = P/1.
go, gop, gopp, got, gott, gopt = Region5_cp0(Tr, Pr)
prop0 = {}
prop0["v"] = Pr*gop*R*T/P/1000
prop0["h"] = Tr*got*R*T
prop0["s"] = R*(Tr*got-go)
prop0["cp"] = -R*Tr**2*gott
prop0["cv"] = R*(-Tr**2*gott-1)
prop0["w"] = (R*T*1000/(1+1/Tr**2/gott))**0.5
prop0["alfav"] = 1/T
prop0["xkappa"] = 1/P
return prop0 | Ideal gas properties
Parameters
----------
T : float
Temperature, [K]
P : float
Pressure, [MPa]
Returns
-------
prop : dict
Dict with calculated properties. The available properties are:
* v: Specific volume, [m³/kg]
* h: Specific enthalpy, [kJ/kg]
* s: Specific entropy, [kJ/kgK]
* cp: Specific isobaric heat capacity, [kJ/kgK]
* cv: Specific isocoric heat capacity, [kJ/kgK]
* w: Speed of sound, [m/s]
* alfav: Cubic expansion coefficient, [1/K]
* kt: Isothermal compressibility, [1/MPa] | https://github.com/jjgomera/iapws/blob/1e5812aab38212fb8a63736f61cdcfa427d223b1/iapws/iapws97.py#L4174-L4217 |
jjgomera/iapws | iapws/iapws97.py | IAPWS97.calculable | def calculable(self):
"""Check if class is calculable by its kwargs"""
self._thermo = ""
if self.kwargs["T"] and self.kwargs["P"]:
self._thermo = "TP"
elif self.kwargs["P"] and self.kwargs["h"] is not None:
self._thermo = "Ph"
elif self.kwargs["P"] and self.kwargs["s"] is not None:
self._thermo = "Ps"
# TODO: Add other pairs definitions options
# elif self.kwargs["P"] and self.kwargs["v"]:
# self._thermo = "Pv"
# elif self.kwargs["T"] and self.kwargs["s"] is not None:
# self._thermo = "Ts"
elif self.kwargs["h"] is not None and self.kwargs["s"] is not None:
self._thermo = "hs"
elif self.kwargs["T"] and self.kwargs["x"] is not None:
self._thermo = "Tx"
elif self.kwargs["P"] and self.kwargs["x"] is not None:
self._thermo = "Px"
return self._thermo | python | def calculable(self):
"""Check if class is calculable by its kwargs"""
self._thermo = ""
if self.kwargs["T"] and self.kwargs["P"]:
self._thermo = "TP"
elif self.kwargs["P"] and self.kwargs["h"] is not None:
self._thermo = "Ph"
elif self.kwargs["P"] and self.kwargs["s"] is not None:
self._thermo = "Ps"
# TODO: Add other pairs definitions options
# elif self.kwargs["P"] and self.kwargs["v"]:
# self._thermo = "Pv"
# elif self.kwargs["T"] and self.kwargs["s"] is not None:
# self._thermo = "Ts"
elif self.kwargs["h"] is not None and self.kwargs["s"] is not None:
self._thermo = "hs"
elif self.kwargs["T"] and self.kwargs["x"] is not None:
self._thermo = "Tx"
elif self.kwargs["P"] and self.kwargs["x"] is not None:
self._thermo = "Px"
return self._thermo | Check if class is calculable by its kwargs | https://github.com/jjgomera/iapws/blob/1e5812aab38212fb8a63736f61cdcfa427d223b1/iapws/iapws97.py#L4341-L4361 |
jjgomera/iapws | iapws/iapws97.py | IAPWS97.derivative | def derivative(self, z, x, y, fase):
"""Wrapper derivative for custom derived properties
where x, y, z can be: P, T, v, u, h, s, g, a"""
return deriv_G(self, z, x, y, fase) | python | def derivative(self, z, x, y, fase):
"""Wrapper derivative for custom derived properties
where x, y, z can be: P, T, v, u, h, s, g, a"""
return deriv_G(self, z, x, y, fase) | Wrapper derivative for custom derived properties
where x, y, z can be: P, T, v, u, h, s, g, a | https://github.com/jjgomera/iapws/blob/1e5812aab38212fb8a63736f61cdcfa427d223b1/iapws/iapws97.py#L4703-L4706 |
jjgomera/iapws | iapws/ammonia.py | Ttr | def Ttr(x):
"""Equation for the triple point of ammonia-water mixture
Parameters
----------
x : float
Mole fraction of ammonia in mixture, [mol/mol]
Returns
-------
Ttr : float
Triple point temperature, [K]
Notes
------
Raise :class:`NotImplementedError` if input isn't in limit:
* 0 ≤ x ≤ 1
References
----------
IAPWS, Guideline on the IAPWS Formulation 2001 for the Thermodynamic
Properties of Ammonia-Water Mixtures,
http://www.iapws.org/relguide/nh3h2o.pdf, Eq 9
"""
if 0 <= x <= 0.33367:
Ttr = 273.16*(1-0.3439823*x-1.3274271*x**2-274.973*x**3)
elif 0.33367 < x <= 0.58396:
Ttr = 193.549*(1-4.987368*(x-0.5)**2)
elif 0.58396 < x <= 0.81473:
Ttr = 194.38*(1-4.886151*(x-2/3)**2+10.37298*(x-2/3)**3)
elif 0.81473 < x <= 1:
Ttr = 195.495*(1-0.323998*(1-x)-15.87560*(1-x)**4)
else:
raise NotImplementedError("Incoming out of bound")
return Ttr | python | def Ttr(x):
"""Equation for the triple point of ammonia-water mixture
Parameters
----------
x : float
Mole fraction of ammonia in mixture, [mol/mol]
Returns
-------
Ttr : float
Triple point temperature, [K]
Notes
------
Raise :class:`NotImplementedError` if input isn't in limit:
* 0 ≤ x ≤ 1
References
----------
IAPWS, Guideline on the IAPWS Formulation 2001 for the Thermodynamic
Properties of Ammonia-Water Mixtures,
http://www.iapws.org/relguide/nh3h2o.pdf, Eq 9
"""
if 0 <= x <= 0.33367:
Ttr = 273.16*(1-0.3439823*x-1.3274271*x**2-274.973*x**3)
elif 0.33367 < x <= 0.58396:
Ttr = 193.549*(1-4.987368*(x-0.5)**2)
elif 0.58396 < x <= 0.81473:
Ttr = 194.38*(1-4.886151*(x-2/3)**2+10.37298*(x-2/3)**3)
elif 0.81473 < x <= 1:
Ttr = 195.495*(1-0.323998*(1-x)-15.87560*(1-x)**4)
else:
raise NotImplementedError("Incoming out of bound")
return Ttr | Equation for the triple point of ammonia-water mixture
Parameters
----------
x : float
Mole fraction of ammonia in mixture, [mol/mol]
Returns
-------
Ttr : float
Triple point temperature, [K]
Notes
------
Raise :class:`NotImplementedError` if input isn't in limit:
* 0 ≤ x ≤ 1
References
----------
IAPWS, Guideline on the IAPWS Formulation 2001 for the Thermodynamic
Properties of Ammonia-Water Mixtures,
http://www.iapws.org/relguide/nh3h2o.pdf, Eq 9 | https://github.com/jjgomera/iapws/blob/1e5812aab38212fb8a63736f61cdcfa427d223b1/iapws/ammonia.py#L566-L601 |
jjgomera/iapws | iapws/ammonia.py | H2ONH3._prop | def _prop(self, rho, T, x):
"""Thermodynamic properties of ammonia-water mixtures
Parameters
----------
T : float
Temperature [K]
rho : float
Density [kg/m³]
x : float
Mole fraction of ammonia in mixture [mol/mol]
Returns
-------
prop : dict
Dictionary with thermodynamic properties of ammonia-water mixtures:
* M: Mixture molecular mass, [g/mol]
* P: Pressure, [MPa]
* u: Specific internal energy, [kJ/kg]
* s: Specific entropy, [kJ/kgK]
* h: Specific enthalpy, [kJ/kg]
* a: Specific Helmholtz energy, [kJ/kg]
* g: Specific gibbs energy, [kJ/kg]
* cv: Specific isochoric heat capacity, [kJ/kgK]
* cp: Specific isobaric heat capacity, [kJ/kgK]
* w: Speed of sound, [m/s]
* fugH2O: Fugacity of water, [-]
* fugNH3: Fugacity of ammonia, [-]
References
----------
IAPWS, Guideline on the IAPWS Formulation 2001 for the Thermodynamic
Properties of Ammonia-Water Mixtures,
http://www.iapws.org/relguide/nh3h2o.pdf, Table 4
"""
# FIXME: The values are good, bad difer by 1%, a error I can find
# In Pressure happen and only use fird
M = (1-x)*IAPWS95.M + x*NH3.M
R = 8.314471/M
phio = self._phi0(rho, T, x)
fio = phio["fio"]
tau0 = phio["tau"]
fiot = phio["fiot"]
fiott = phio["fiott"]
phir = self._phir(rho, T, x)
fir = phir["fir"]
tau = phir["tau"]
delta = phir["delta"]
firt = phir["firt"]
firtt = phir["firtt"]
fird = phir["fird"]
firdd = phir["firdd"]
firdt = phir["firdt"]
F = phir["F"]
prop = {}
Z = 1 + delta*fird
prop["M"] = M
prop["P"] = Z*R*T*rho/1000
prop["u"] = R*T*(tau0*fiot + tau*firt)
prop["s"] = R*(tau0*fiot + tau*firt - fio - fir)
prop["h"] = R*T*(1+delta*fird+tau0*fiot+tau*firt)
prop["g"] = prop["h"]-T*prop["s"]
prop["a"] = prop["u"]-T*prop["s"]
cvR = -tau0**2*fiott - tau**2*firtt
prop["cv"] = R*cvR
prop["cp"] = R*(cvR+(1+delta*fird-delta*tau*firdt)**2 /
(1+2*delta*fird+delta**2*firdd))
prop["w"] = (R*T*1000*(1+2*delta*fird+delta**2*firdd +
(1+delta*fird-delta*tau*firdt)**2 / cvR))**0.5
prop["fugH2O"] = Z*exp(fir+delta*fird-x*F)
prop["fugNH3"] = Z*exp(fir+delta*fird+(1-x)*F)
return prop | python | def _prop(self, rho, T, x):
"""Thermodynamic properties of ammonia-water mixtures
Parameters
----------
T : float
Temperature [K]
rho : float
Density [kg/m³]
x : float
Mole fraction of ammonia in mixture [mol/mol]
Returns
-------
prop : dict
Dictionary with thermodynamic properties of ammonia-water mixtures:
* M: Mixture molecular mass, [g/mol]
* P: Pressure, [MPa]
* u: Specific internal energy, [kJ/kg]
* s: Specific entropy, [kJ/kgK]
* h: Specific enthalpy, [kJ/kg]
* a: Specific Helmholtz energy, [kJ/kg]
* g: Specific gibbs energy, [kJ/kg]
* cv: Specific isochoric heat capacity, [kJ/kgK]
* cp: Specific isobaric heat capacity, [kJ/kgK]
* w: Speed of sound, [m/s]
* fugH2O: Fugacity of water, [-]
* fugNH3: Fugacity of ammonia, [-]
References
----------
IAPWS, Guideline on the IAPWS Formulation 2001 for the Thermodynamic
Properties of Ammonia-Water Mixtures,
http://www.iapws.org/relguide/nh3h2o.pdf, Table 4
"""
# FIXME: The values are good, bad difer by 1%, a error I can find
# In Pressure happen and only use fird
M = (1-x)*IAPWS95.M + x*NH3.M
R = 8.314471/M
phio = self._phi0(rho, T, x)
fio = phio["fio"]
tau0 = phio["tau"]
fiot = phio["fiot"]
fiott = phio["fiott"]
phir = self._phir(rho, T, x)
fir = phir["fir"]
tau = phir["tau"]
delta = phir["delta"]
firt = phir["firt"]
firtt = phir["firtt"]
fird = phir["fird"]
firdd = phir["firdd"]
firdt = phir["firdt"]
F = phir["F"]
prop = {}
Z = 1 + delta*fird
prop["M"] = M
prop["P"] = Z*R*T*rho/1000
prop["u"] = R*T*(tau0*fiot + tau*firt)
prop["s"] = R*(tau0*fiot + tau*firt - fio - fir)
prop["h"] = R*T*(1+delta*fird+tau0*fiot+tau*firt)
prop["g"] = prop["h"]-T*prop["s"]
prop["a"] = prop["u"]-T*prop["s"]
cvR = -tau0**2*fiott - tau**2*firtt
prop["cv"] = R*cvR
prop["cp"] = R*(cvR+(1+delta*fird-delta*tau*firdt)**2 /
(1+2*delta*fird+delta**2*firdd))
prop["w"] = (R*T*1000*(1+2*delta*fird+delta**2*firdd +
(1+delta*fird-delta*tau*firdt)**2 / cvR))**0.5
prop["fugH2O"] = Z*exp(fir+delta*fird-x*F)
prop["fugNH3"] = Z*exp(fir+delta*fird+(1-x)*F)
return prop | Thermodynamic properties of ammonia-water mixtures
Parameters
----------
T : float
Temperature [K]
rho : float
Density [kg/m³]
x : float
Mole fraction of ammonia in mixture [mol/mol]
Returns
-------
prop : dict
Dictionary with thermodynamic properties of ammonia-water mixtures:
* M: Mixture molecular mass, [g/mol]
* P: Pressure, [MPa]
* u: Specific internal energy, [kJ/kg]
* s: Specific entropy, [kJ/kgK]
* h: Specific enthalpy, [kJ/kg]
* a: Specific Helmholtz energy, [kJ/kg]
* g: Specific gibbs energy, [kJ/kg]
* cv: Specific isochoric heat capacity, [kJ/kgK]
* cp: Specific isobaric heat capacity, [kJ/kgK]
* w: Speed of sound, [m/s]
* fugH2O: Fugacity of water, [-]
* fugNH3: Fugacity of ammonia, [-]
References
----------
IAPWS, Guideline on the IAPWS Formulation 2001 for the Thermodynamic
Properties of Ammonia-Water Mixtures,
http://www.iapws.org/relguide/nh3h2o.pdf, Table 4 | https://github.com/jjgomera/iapws/blob/1e5812aab38212fb8a63736f61cdcfa427d223b1/iapws/ammonia.py#L210-L286 |
jjgomera/iapws | iapws/ammonia.py | H2ONH3._phi0 | def _phi0(self, rho, T, x):
"""Ideal gas Helmholtz energy of binary mixtures and derivatives
Parameters
----------
rho : float
Density, [kg/m³]
T : float
Temperature, [K]
x : float
Mole fraction of ammonia in mixture, [mol/mol]
Returns
-------
prop : dict
Dictionary with ideal adimensional helmholtz energy and
derivatives:
* tau: the adimensional temperature variable, [-]
* delta: the adimensional density variable, [-]
* fio,[-]
* fiot: [∂fio/∂τ]δ [-]
* fiod: [∂fio/∂δ]τ [-]
* fiott: [∂²fio/∂τ²]δ [-]
* fiodt: [∂²fio/∂τ∂δ] [-]
* fiodd: [∂²fio/∂δ²]τ [-]
References
----------
IAPWS, Guideline on the IAPWS Formulation 2001 for the Thermodynamic
Properties of Ammonia-Water Mixtures,
http://www.iapws.org/relguide/nh3h2o.pdf, Eq 2
"""
# Define reducing parameters for mixture model
M = (1-x)*IAPWS95.M + x*NH3.M
tau = 500/T
delta = rho/15/M
# Table 2
Fi0 = {
"log_water": 3.006320,
"ao_water": [-7.720435, 8.649358],
"pow_water": [0, 1],
"ao_exp": [0.012436, 0.97315, 1.279500, 0.969560, 0.248730],
"titao": [1.666, 4.578, 10.018, 11.964, 35.600],
"log_nh3": -1.0,
"ao_nh3": [-16.444285, 4.036946, 10.69955, -1.775436, 0.82374034],
"pow_nh3": [0, 1, 1/3, -3/2, -7/4]}
fiod = 1/delta
fiodd = -1/delta**2
fiodt = 0
fiow = fiotw = fiottw = 0
fioa = fiota = fiotta = 0
# Water section
if x < 1:
fiow = Fi0["log_water"]*log(tau) + log(1-x)
fiotw = Fi0["log_water"]/tau
fiottw = -Fi0["log_water"]/tau**2
for n, t in zip(Fi0["ao_water"], Fi0["pow_water"]):
fiow += n*tau**t
if t != 0:
fiotw += t*n*tau**(t-1)
if t not in [0, 1]:
fiottw += n*t*(t-1)*tau**(t-2)
for n, t in zip(Fi0["ao_exp"], Fi0["titao"]):
fiow += n*log(1-exp(-tau*t))
fiotw += n*t*((1-exp(-t*tau))**-1-1)
fiottw -= n*t**2*exp(-t*tau)*(1-exp(-t*tau))**-2
# ammonia section
if x > 0:
fioa = Fi0["log_nh3"]*log(tau) + log(x)
fiota = Fi0["log_nh3"]/tau
fiotta = -Fi0["log_nh3"]/tau**2
for n, t in zip(Fi0["ao_nh3"], Fi0["pow_nh3"]):
fioa += n*tau**t
if t != 0:
fiota += t*n*tau**(t-1)
if t not in [0, 1]:
fiotta += n*t*(t-1)*tau**(t-2)
prop = {}
prop["tau"] = tau
prop["delta"] = delta
prop["fio"] = log(delta) + (1-x)*fiow + x*fioa
prop["fiot"] = (1-x)*fiotw + x*fiota
prop["fiott"] = (1-x)*fiottw + x*fiotta
prop["fiod"] = fiod
prop["fiodd"] = fiodd
prop["fiodt"] = fiodt
return prop | python | def _phi0(self, rho, T, x):
"""Ideal gas Helmholtz energy of binary mixtures and derivatives
Parameters
----------
rho : float
Density, [kg/m³]
T : float
Temperature, [K]
x : float
Mole fraction of ammonia in mixture, [mol/mol]
Returns
-------
prop : dict
Dictionary with ideal adimensional helmholtz energy and
derivatives:
* tau: the adimensional temperature variable, [-]
* delta: the adimensional density variable, [-]
* fio,[-]
* fiot: [∂fio/∂τ]δ [-]
* fiod: [∂fio/∂δ]τ [-]
* fiott: [∂²fio/∂τ²]δ [-]
* fiodt: [∂²fio/∂τ∂δ] [-]
* fiodd: [∂²fio/∂δ²]τ [-]
References
----------
IAPWS, Guideline on the IAPWS Formulation 2001 for the Thermodynamic
Properties of Ammonia-Water Mixtures,
http://www.iapws.org/relguide/nh3h2o.pdf, Eq 2
"""
# Define reducing parameters for mixture model
M = (1-x)*IAPWS95.M + x*NH3.M
tau = 500/T
delta = rho/15/M
# Table 2
Fi0 = {
"log_water": 3.006320,
"ao_water": [-7.720435, 8.649358],
"pow_water": [0, 1],
"ao_exp": [0.012436, 0.97315, 1.279500, 0.969560, 0.248730],
"titao": [1.666, 4.578, 10.018, 11.964, 35.600],
"log_nh3": -1.0,
"ao_nh3": [-16.444285, 4.036946, 10.69955, -1.775436, 0.82374034],
"pow_nh3": [0, 1, 1/3, -3/2, -7/4]}
fiod = 1/delta
fiodd = -1/delta**2
fiodt = 0
fiow = fiotw = fiottw = 0
fioa = fiota = fiotta = 0
# Water section
if x < 1:
fiow = Fi0["log_water"]*log(tau) + log(1-x)
fiotw = Fi0["log_water"]/tau
fiottw = -Fi0["log_water"]/tau**2
for n, t in zip(Fi0["ao_water"], Fi0["pow_water"]):
fiow += n*tau**t
if t != 0:
fiotw += t*n*tau**(t-1)
if t not in [0, 1]:
fiottw += n*t*(t-1)*tau**(t-2)
for n, t in zip(Fi0["ao_exp"], Fi0["titao"]):
fiow += n*log(1-exp(-tau*t))
fiotw += n*t*((1-exp(-t*tau))**-1-1)
fiottw -= n*t**2*exp(-t*tau)*(1-exp(-t*tau))**-2
# ammonia section
if x > 0:
fioa = Fi0["log_nh3"]*log(tau) + log(x)
fiota = Fi0["log_nh3"]/tau
fiotta = -Fi0["log_nh3"]/tau**2
for n, t in zip(Fi0["ao_nh3"], Fi0["pow_nh3"]):
fioa += n*tau**t
if t != 0:
fiota += t*n*tau**(t-1)
if t not in [0, 1]:
fiotta += n*t*(t-1)*tau**(t-2)
prop = {}
prop["tau"] = tau
prop["delta"] = delta
prop["fio"] = log(delta) + (1-x)*fiow + x*fioa
prop["fiot"] = (1-x)*fiotw + x*fiota
prop["fiott"] = (1-x)*fiottw + x*fiotta
prop["fiod"] = fiod
prop["fiodd"] = fiodd
prop["fiodt"] = fiodt
return prop | Ideal gas Helmholtz energy of binary mixtures and derivatives
Parameters
----------
rho : float
Density, [kg/m³]
T : float
Temperature, [K]
x : float
Mole fraction of ammonia in mixture, [mol/mol]
Returns
-------
prop : dict
Dictionary with ideal adimensional helmholtz energy and
derivatives:
* tau: the adimensional temperature variable, [-]
* delta: the adimensional density variable, [-]
* fio,[-]
* fiot: [∂fio/∂τ]δ [-]
* fiod: [∂fio/∂δ]τ [-]
* fiott: [∂²fio/∂τ²]δ [-]
* fiodt: [∂²fio/∂τ∂δ] [-]
* fiodd: [∂²fio/∂δ²]τ [-]
References
----------
IAPWS, Guideline on the IAPWS Formulation 2001 for the Thermodynamic
Properties of Ammonia-Water Mixtures,
http://www.iapws.org/relguide/nh3h2o.pdf, Eq 2 | https://github.com/jjgomera/iapws/blob/1e5812aab38212fb8a63736f61cdcfa427d223b1/iapws/ammonia.py#L288-L380 |
jjgomera/iapws | iapws/ammonia.py | H2ONH3._phir | def _phir(self, rho, T, x):
"""Residual contribution to the free Helmholtz energy
Parameters
----------
rho : float
Density, [kg/m³]
T : float
Temperature, [K]
x : float
Mole fraction of ammonia in mixture, [mol/mol]
Returns
-------
prop : dict
dictionary with residual adimensional helmholtz energy and
derivatives:
* tau: the adimensional temperature variable, [-]
* delta: the adimensional density variable, [-]
* fir, [-]
* firt: [∂fir/∂τ]δ,x [-]
* fird: [∂fir/∂δ]τ,x [-]
* firtt: [∂²fir/∂τ²]δ,x [-]
* firdt: [∂²fir/∂τ∂δ]x [-]
* firdd: [∂²fir/∂δ²]τ,x [-]
* firx: [∂fir/∂x]τ,δ [-]
* F: Function for fugacity calculation, [-]
References
----------
IAPWS, Guideline on the IAPWS Formulation 2001 for the Thermodynamic
Properties of Ammonia-Water Mixtures,
http://www.iapws.org/relguide/nh3h2o.pdf, Eq 3
"""
# Temperature reducing value, Eq 4
Tc12 = 0.9648407/2*(IAPWS95.Tc+NH3.Tc)
Tn = (1-x)**2*IAPWS95.Tc + x**2*NH3.Tc + 2*x*(1-x**1.125455)*Tc12
dTnx = -2*IAPWS95.Tc*(1-x) + 2*x*NH3.Tc + 2*Tc12*(1-x**1.125455) - \
2*Tc12*1.12455*x**1.12455
# Density reducing value, Eq 5
b = 0.8978069
rhoc12 = 1/(1.2395117/2*(1/IAPWS95.rhoc+1/NH3.rhoc))
rhon = 1/((1-x)**2/IAPWS95.rhoc + x**2/NH3.rhoc +
2*x*(1-x**b)/rhoc12)
drhonx = -(2*b*x**b/rhoc12 + 2*(1-x**b)/rhoc12 +
2*x/NH3.rhoc - 2*(1-x)/IAPWS95.rhoc)/(
2*x*(1-x**b)/rhoc12 + x**2/NH3.rhoc +
(1-x)**2/IAPWS95.rhoc)**2
tau = Tn/T
delta = rho/rhon
water = IAPWS95()
phi1 = water._phir(tau, delta)
ammonia = NH3()
phi2 = ammonia._phir(tau, delta)
Dphi = self._Dphir(tau, delta, x)
prop = {}
prop["tau"] = tau
prop["delta"] = delta
prop["fir"] = (1-x)*phi1["fir"] + x*phi2["fir"] + Dphi["fir"]
prop["firt"] = (1-x)*phi1["firt"] + x*phi2["firt"] + Dphi["firt"]
prop["firtt"] = (1-x)*phi1["firtt"] + x*phi2["firtt"] + Dphi["firtt"]
prop["fird"] = (1-x)*phi1["fird"] + x*phi2["fird"] + Dphi["fird"]
prop["firdd"] = (1-x)*phi1["firdd"] + x*phi2["firdd"] + Dphi["firdd"]
prop["firdt"] = (1-x)*phi1["firdt"] + x*phi2["firdt"] + Dphi["firdt"]
prop["firx"] = -phi1["fir"] + phi2["fir"] + Dphi["firx"]
prop["F"] = prop["firx"] - delta/rhon*drhonx*prop["fird"] + \
tau/Tn*dTnx*prop["firt"]
return prop | python | def _phir(self, rho, T, x):
"""Residual contribution to the free Helmholtz energy
Parameters
----------
rho : float
Density, [kg/m³]
T : float
Temperature, [K]
x : float
Mole fraction of ammonia in mixture, [mol/mol]
Returns
-------
prop : dict
dictionary with residual adimensional helmholtz energy and
derivatives:
* tau: the adimensional temperature variable, [-]
* delta: the adimensional density variable, [-]
* fir, [-]
* firt: [∂fir/∂τ]δ,x [-]
* fird: [∂fir/∂δ]τ,x [-]
* firtt: [∂²fir/∂τ²]δ,x [-]
* firdt: [∂²fir/∂τ∂δ]x [-]
* firdd: [∂²fir/∂δ²]τ,x [-]
* firx: [∂fir/∂x]τ,δ [-]
* F: Function for fugacity calculation, [-]
References
----------
IAPWS, Guideline on the IAPWS Formulation 2001 for the Thermodynamic
Properties of Ammonia-Water Mixtures,
http://www.iapws.org/relguide/nh3h2o.pdf, Eq 3
"""
# Temperature reducing value, Eq 4
Tc12 = 0.9648407/2*(IAPWS95.Tc+NH3.Tc)
Tn = (1-x)**2*IAPWS95.Tc + x**2*NH3.Tc + 2*x*(1-x**1.125455)*Tc12
dTnx = -2*IAPWS95.Tc*(1-x) + 2*x*NH3.Tc + 2*Tc12*(1-x**1.125455) - \
2*Tc12*1.12455*x**1.12455
# Density reducing value, Eq 5
b = 0.8978069
rhoc12 = 1/(1.2395117/2*(1/IAPWS95.rhoc+1/NH3.rhoc))
rhon = 1/((1-x)**2/IAPWS95.rhoc + x**2/NH3.rhoc +
2*x*(1-x**b)/rhoc12)
drhonx = -(2*b*x**b/rhoc12 + 2*(1-x**b)/rhoc12 +
2*x/NH3.rhoc - 2*(1-x)/IAPWS95.rhoc)/(
2*x*(1-x**b)/rhoc12 + x**2/NH3.rhoc +
(1-x)**2/IAPWS95.rhoc)**2
tau = Tn/T
delta = rho/rhon
water = IAPWS95()
phi1 = water._phir(tau, delta)
ammonia = NH3()
phi2 = ammonia._phir(tau, delta)
Dphi = self._Dphir(tau, delta, x)
prop = {}
prop["tau"] = tau
prop["delta"] = delta
prop["fir"] = (1-x)*phi1["fir"] + x*phi2["fir"] + Dphi["fir"]
prop["firt"] = (1-x)*phi1["firt"] + x*phi2["firt"] + Dphi["firt"]
prop["firtt"] = (1-x)*phi1["firtt"] + x*phi2["firtt"] + Dphi["firtt"]
prop["fird"] = (1-x)*phi1["fird"] + x*phi2["fird"] + Dphi["fird"]
prop["firdd"] = (1-x)*phi1["firdd"] + x*phi2["firdd"] + Dphi["firdd"]
prop["firdt"] = (1-x)*phi1["firdt"] + x*phi2["firdt"] + Dphi["firdt"]
prop["firx"] = -phi1["fir"] + phi2["fir"] + Dphi["firx"]
prop["F"] = prop["firx"] - delta/rhon*drhonx*prop["fird"] + \
tau/Tn*dTnx*prop["firt"]
return prop | Residual contribution to the free Helmholtz energy
Parameters
----------
rho : float
Density, [kg/m³]
T : float
Temperature, [K]
x : float
Mole fraction of ammonia in mixture, [mol/mol]
Returns
-------
prop : dict
dictionary with residual adimensional helmholtz energy and
derivatives:
* tau: the adimensional temperature variable, [-]
* delta: the adimensional density variable, [-]
* fir, [-]
* firt: [∂fir/∂τ]δ,x [-]
* fird: [∂fir/∂δ]τ,x [-]
* firtt: [∂²fir/∂τ²]δ,x [-]
* firdt: [∂²fir/∂τ∂δ]x [-]
* firdd: [∂²fir/∂δ²]τ,x [-]
* firx: [∂fir/∂x]τ,δ [-]
* F: Function for fugacity calculation, [-]
References
----------
IAPWS, Guideline on the IAPWS Formulation 2001 for the Thermodynamic
Properties of Ammonia-Water Mixtures,
http://www.iapws.org/relguide/nh3h2o.pdf, Eq 3 | https://github.com/jjgomera/iapws/blob/1e5812aab38212fb8a63736f61cdcfa427d223b1/iapws/ammonia.py#L382-L457 |
jjgomera/iapws | iapws/ammonia.py | H2ONH3._Dphir | def _Dphir(self, tau, delta, x):
"""Departure function to the residual contribution to the free
Helmholtz energy
Parameters
----------
tau : float
Adimensional temperature, [-]
delta : float
Adimensional density, [-]
x : float
Mole fraction of ammonia in mixture, [mol/mol]
Returns
-------
prop : dict
Dictionary with departure contribution to the residual adimensional
helmholtz energy and derivatives:
* fir [-]
* firt: [∂Δfir/∂τ]δ,x [-]
* fird: [∂Δfir/∂δ]τ,x [-]
* firtt: [∂²Δfir/∂τ²]δ,x [-]
* firdt: [∂²Δfir/∂τ∂δ]x [-]
* firdd: [∂²Δfir/∂δ²]τ,x [-]
* firx: [∂Δfir/∂x]τ,δ [-]
References
----------
IAPWS, Guideline on the IAPWS Formulation 2001 for the Thermodynamic
Properties of Ammonia-Water Mixtures,
http://www.iapws.org/relguide/nh3h2o.pdf, Eq 8
"""
fx = x*(1-x**0.5248379)
dfx = 1-1.5248379*x**0.5248379
# Polinomial terms
n = -1.855822e-2
t = 1.5
d = 4
fir = n*delta**d*tau**t
fird = n*d*delta**(d-1)*tau**t
firdd = n*d*(d-1)*delta**(d-2)*tau**t
firt = n*t*delta**d*tau**(t-1)
firtt = n*t*(t-1)*delta**d*tau**(t-2)
firdt = n*t*d*delta**(d-1)*tau**(t-1)
firx = dfx*n*delta**d*tau**t
# Exponential terms
nr2 = [5.258010e-2, 3.552874e-10, 5.451379e-6, -5.998546e-13,
-3.687808e-6]
t2 = [0.5, 6.5, 1.75, 15, 6]
d2 = [5, 15, 12, 12, 15]
c2 = [1, 1, 1, 1, 2]
for n, d, t, c in zip(nr2, d2, t2, c2):
fir += n*delta**d*tau**t*exp(-delta**c)
fird += n*exp(-delta**c)*delta**(d-1)*tau**t*(d-c*delta**c)
firdd += n*exp(-delta**c)*delta**(d-2)*tau**t * \
((d-c*delta**c)*(d-1-c*delta**c)-c**2*delta**c)
firt += n*t*delta**d*tau**(t-1)*exp(-delta**c)
firtt += n*t*(t-1)*delta**d*tau**(t-2)*exp(-delta**c)
firdt += n*t*delta**(d-1)*tau**(t-1)*(d-c*delta**c)*exp(
-delta**c)
firx += dfx*n*delta**d*tau**t*exp(-delta**c)
# Exponential terms with composition
nr3 = [0.2586192, -1.368072e-8, 1.226146e-2, -7.181443e-2, 9.970849e-2,
1.0584086e-3, -0.1963687]
t3 = [-1, 4, 3.5, 0, -1, 8, 7.5]
d3 = [4, 15, 4, 5, 6, 10, 6]
c3 = [1, 1, 1, 1, 2, 2, 2]
for n, d, t, c in zip(nr3, d3, t3, c3):
fir += x*n*delta**d*tau**t*exp(-delta**c)
fird += x*n*exp(-delta**c)*delta**(d-1)*tau**t*(d-c*delta**c)
firdd += x*n*exp(-delta**c)*delta**(d-2)*tau**t * \
((d-c*delta**c)*(d-1-c*delta**c)-c**2*delta**c)
firt += x*n*t*delta**d*tau**(t-1)*exp(-delta**c)
firtt += x*n*t*(t-1)*delta**d*tau**(t-2)*exp(-delta**c)
firdt += x*n*t*delta**(d-1)*tau**(t-1)*(d-c*delta**c)*exp(
-delta**c)
firx += x*dfx*n*delta**d*tau**t*exp(-delta**c)
n = -0.7777897
t = 4
d = 2
c = 2
fir += x**2*n*delta**d*tau**t*exp(-delta**c)
fird += x**2*n*exp(-delta**c)*delta**(d-1)*tau**t*(d-c*delta**c)
firdd += x**2*n*exp(-delta**c)*delta**(d-2)*tau**t * \
((d-c*delta**c)*(d-1-c*delta**c)-c**2*delta**c)
firt += x**2*n*t*delta**d*tau**(t-1)*exp(-delta**c)
firtt += x**2*n*t*(t-1)*delta**d*tau**(t-2)*exp(-delta**c)
firdt += x**2*n*t*delta**(d-1)*tau**(t-1)*(d-c*delta**c)*exp(
-delta**c)
firx += x**2*dfx*n*delta**d*tau**t*exp(-delta**c)
prop = {}
prop["fir"] = fir*fx
prop["firt"] = firt*fx
prop["firtt"] = firtt*fx
prop["fird"] = fird*fx
prop["firdd"] = firdd*fx
prop["firdt"] = firdt*fx
prop["firx"] = firx
return prop | python | def _Dphir(self, tau, delta, x):
"""Departure function to the residual contribution to the free
Helmholtz energy
Parameters
----------
tau : float
Adimensional temperature, [-]
delta : float
Adimensional density, [-]
x : float
Mole fraction of ammonia in mixture, [mol/mol]
Returns
-------
prop : dict
Dictionary with departure contribution to the residual adimensional
helmholtz energy and derivatives:
* fir [-]
* firt: [∂Δfir/∂τ]δ,x [-]
* fird: [∂Δfir/∂δ]τ,x [-]
* firtt: [∂²Δfir/∂τ²]δ,x [-]
* firdt: [∂²Δfir/∂τ∂δ]x [-]
* firdd: [∂²Δfir/∂δ²]τ,x [-]
* firx: [∂Δfir/∂x]τ,δ [-]
References
----------
IAPWS, Guideline on the IAPWS Formulation 2001 for the Thermodynamic
Properties of Ammonia-Water Mixtures,
http://www.iapws.org/relguide/nh3h2o.pdf, Eq 8
"""
fx = x*(1-x**0.5248379)
dfx = 1-1.5248379*x**0.5248379
# Polinomial terms
n = -1.855822e-2
t = 1.5
d = 4
fir = n*delta**d*tau**t
fird = n*d*delta**(d-1)*tau**t
firdd = n*d*(d-1)*delta**(d-2)*tau**t
firt = n*t*delta**d*tau**(t-1)
firtt = n*t*(t-1)*delta**d*tau**(t-2)
firdt = n*t*d*delta**(d-1)*tau**(t-1)
firx = dfx*n*delta**d*tau**t
# Exponential terms
nr2 = [5.258010e-2, 3.552874e-10, 5.451379e-6, -5.998546e-13,
-3.687808e-6]
t2 = [0.5, 6.5, 1.75, 15, 6]
d2 = [5, 15, 12, 12, 15]
c2 = [1, 1, 1, 1, 2]
for n, d, t, c in zip(nr2, d2, t2, c2):
fir += n*delta**d*tau**t*exp(-delta**c)
fird += n*exp(-delta**c)*delta**(d-1)*tau**t*(d-c*delta**c)
firdd += n*exp(-delta**c)*delta**(d-2)*tau**t * \
((d-c*delta**c)*(d-1-c*delta**c)-c**2*delta**c)
firt += n*t*delta**d*tau**(t-1)*exp(-delta**c)
firtt += n*t*(t-1)*delta**d*tau**(t-2)*exp(-delta**c)
firdt += n*t*delta**(d-1)*tau**(t-1)*(d-c*delta**c)*exp(
-delta**c)
firx += dfx*n*delta**d*tau**t*exp(-delta**c)
# Exponential terms with composition
nr3 = [0.2586192, -1.368072e-8, 1.226146e-2, -7.181443e-2, 9.970849e-2,
1.0584086e-3, -0.1963687]
t3 = [-1, 4, 3.5, 0, -1, 8, 7.5]
d3 = [4, 15, 4, 5, 6, 10, 6]
c3 = [1, 1, 1, 1, 2, 2, 2]
for n, d, t, c in zip(nr3, d3, t3, c3):
fir += x*n*delta**d*tau**t*exp(-delta**c)
fird += x*n*exp(-delta**c)*delta**(d-1)*tau**t*(d-c*delta**c)
firdd += x*n*exp(-delta**c)*delta**(d-2)*tau**t * \
((d-c*delta**c)*(d-1-c*delta**c)-c**2*delta**c)
firt += x*n*t*delta**d*tau**(t-1)*exp(-delta**c)
firtt += x*n*t*(t-1)*delta**d*tau**(t-2)*exp(-delta**c)
firdt += x*n*t*delta**(d-1)*tau**(t-1)*(d-c*delta**c)*exp(
-delta**c)
firx += x*dfx*n*delta**d*tau**t*exp(-delta**c)
n = -0.7777897
t = 4
d = 2
c = 2
fir += x**2*n*delta**d*tau**t*exp(-delta**c)
fird += x**2*n*exp(-delta**c)*delta**(d-1)*tau**t*(d-c*delta**c)
firdd += x**2*n*exp(-delta**c)*delta**(d-2)*tau**t * \
((d-c*delta**c)*(d-1-c*delta**c)-c**2*delta**c)
firt += x**2*n*t*delta**d*tau**(t-1)*exp(-delta**c)
firtt += x**2*n*t*(t-1)*delta**d*tau**(t-2)*exp(-delta**c)
firdt += x**2*n*t*delta**(d-1)*tau**(t-1)*(d-c*delta**c)*exp(
-delta**c)
firx += x**2*dfx*n*delta**d*tau**t*exp(-delta**c)
prop = {}
prop["fir"] = fir*fx
prop["firt"] = firt*fx
prop["firtt"] = firtt*fx
prop["fird"] = fird*fx
prop["firdd"] = firdd*fx
prop["firdt"] = firdt*fx
prop["firx"] = firx
return prop | Departure function to the residual contribution to the free
Helmholtz energy
Parameters
----------
tau : float
Adimensional temperature, [-]
delta : float
Adimensional density, [-]
x : float
Mole fraction of ammonia in mixture, [mol/mol]
Returns
-------
prop : dict
Dictionary with departure contribution to the residual adimensional
helmholtz energy and derivatives:
* fir [-]
* firt: [∂Δfir/∂τ]δ,x [-]
* fird: [∂Δfir/∂δ]τ,x [-]
* firtt: [∂²Δfir/∂τ²]δ,x [-]
* firdt: [∂²Δfir/∂τ∂δ]x [-]
* firdd: [∂²Δfir/∂δ²]τ,x [-]
* firx: [∂Δfir/∂x]τ,δ [-]
References
----------
IAPWS, Guideline on the IAPWS Formulation 2001 for the Thermodynamic
Properties of Ammonia-Water Mixtures,
http://www.iapws.org/relguide/nh3h2o.pdf, Eq 8 | https://github.com/jjgomera/iapws/blob/1e5812aab38212fb8a63736f61cdcfa427d223b1/iapws/ammonia.py#L459-L563 |
spencerahill/aospy | aospy/data_loader.py | _preprocess_and_rename_grid_attrs | def _preprocess_and_rename_grid_attrs(func, grid_attrs=None, **kwargs):
"""Call a custom preprocessing method first then rename grid attrs.
This wrapper is needed to generate a single function to pass to the
``preprocesss`` of xr.open_mfdataset. It makes sure that the
user-specified preprocess function is called on the loaded Dataset before
aospy's is applied. An example for why this might be needed is output from
the WRF model; one needs to add a CF-compliant units attribute to the time
coordinate of all input files, because it is not present by default.
Parameters
----------
func : function
An arbitrary function to call before calling
``grid_attrs_to_aospy_names`` in ``_load_data_from_disk``. Must take
an xr.Dataset as an argument as well as ``**kwargs``.
grid_attrs : dict (optional)
Overriding dictionary of grid attributes mapping aospy internal
names to names of grid attributes used in a particular model.
Returns
-------
function
A function that calls the provided function ``func`` on the Dataset
before calling ``grid_attrs_to_aospy_names``; this is meant to be
passed as a ``preprocess`` argument to ``xr.open_mfdataset``.
"""
def func_wrapper(ds):
return grid_attrs_to_aospy_names(func(ds, **kwargs), grid_attrs)
return func_wrapper | python | def _preprocess_and_rename_grid_attrs(func, grid_attrs=None, **kwargs):
"""Call a custom preprocessing method first then rename grid attrs.
This wrapper is needed to generate a single function to pass to the
``preprocesss`` of xr.open_mfdataset. It makes sure that the
user-specified preprocess function is called on the loaded Dataset before
aospy's is applied. An example for why this might be needed is output from
the WRF model; one needs to add a CF-compliant units attribute to the time
coordinate of all input files, because it is not present by default.
Parameters
----------
func : function
An arbitrary function to call before calling
``grid_attrs_to_aospy_names`` in ``_load_data_from_disk``. Must take
an xr.Dataset as an argument as well as ``**kwargs``.
grid_attrs : dict (optional)
Overriding dictionary of grid attributes mapping aospy internal
names to names of grid attributes used in a particular model.
Returns
-------
function
A function that calls the provided function ``func`` on the Dataset
before calling ``grid_attrs_to_aospy_names``; this is meant to be
passed as a ``preprocess`` argument to ``xr.open_mfdataset``.
"""
def func_wrapper(ds):
return grid_attrs_to_aospy_names(func(ds, **kwargs), grid_attrs)
return func_wrapper | Call a custom preprocessing method first then rename grid attrs.
This wrapper is needed to generate a single function to pass to the
``preprocesss`` of xr.open_mfdataset. It makes sure that the
user-specified preprocess function is called on the loaded Dataset before
aospy's is applied. An example for why this might be needed is output from
the WRF model; one needs to add a CF-compliant units attribute to the time
coordinate of all input files, because it is not present by default.
Parameters
----------
func : function
An arbitrary function to call before calling
``grid_attrs_to_aospy_names`` in ``_load_data_from_disk``. Must take
an xr.Dataset as an argument as well as ``**kwargs``.
grid_attrs : dict (optional)
Overriding dictionary of grid attributes mapping aospy internal
names to names of grid attributes used in a particular model.
Returns
-------
function
A function that calls the provided function ``func`` on the Dataset
before calling ``grid_attrs_to_aospy_names``; this is meant to be
passed as a ``preprocess`` argument to ``xr.open_mfdataset``. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/data_loader.py#L19-L49 |
spencerahill/aospy | aospy/data_loader.py | grid_attrs_to_aospy_names | def grid_attrs_to_aospy_names(data, grid_attrs=None):
"""Rename grid attributes to be consistent with aospy conventions.
Search all of the dataset's coords and dims looking for matches to known
grid attribute names; any that are found subsequently get renamed to the
aospy name as specified in ``aospy.internal_names.GRID_ATTRS``.
Also forces any renamed grid attribute that is saved as a dim without a
coord to have a coord, which facilitates subsequent slicing/subsetting.
This function does not compare to Model coordinates or add missing
coordinates from Model objects.
Parameters
----------
data : xr.Dataset
grid_attrs : dict (default None)
Overriding dictionary of grid attributes mapping aospy internal
names to names of grid attributes used in a particular model.
Returns
-------
xr.Dataset
Data returned with coordinates consistent with aospy
conventions
"""
if grid_attrs is None:
grid_attrs = {}
# Override GRID_ATTRS with entries in grid_attrs
attrs = GRID_ATTRS.copy()
for k, v in grid_attrs.items():
if k not in attrs:
raise ValueError(
'Unrecognized internal name, {!r}, specified for a custom '
'grid attribute name. See the full list of valid internal '
'names below:\n\n{}'.format(k, list(GRID_ATTRS.keys())))
attrs[k] = (v, )
dims_and_vars = set(data.variables).union(set(data.dims))
for name_int, names_ext in attrs.items():
data_coord_name = set(names_ext).intersection(dims_and_vars)
if data_coord_name:
data = data.rename({data_coord_name.pop(): name_int})
return set_grid_attrs_as_coords(data) | python | def grid_attrs_to_aospy_names(data, grid_attrs=None):
"""Rename grid attributes to be consistent with aospy conventions.
Search all of the dataset's coords and dims looking for matches to known
grid attribute names; any that are found subsequently get renamed to the
aospy name as specified in ``aospy.internal_names.GRID_ATTRS``.
Also forces any renamed grid attribute that is saved as a dim without a
coord to have a coord, which facilitates subsequent slicing/subsetting.
This function does not compare to Model coordinates or add missing
coordinates from Model objects.
Parameters
----------
data : xr.Dataset
grid_attrs : dict (default None)
Overriding dictionary of grid attributes mapping aospy internal
names to names of grid attributes used in a particular model.
Returns
-------
xr.Dataset
Data returned with coordinates consistent with aospy
conventions
"""
if grid_attrs is None:
grid_attrs = {}
# Override GRID_ATTRS with entries in grid_attrs
attrs = GRID_ATTRS.copy()
for k, v in grid_attrs.items():
if k not in attrs:
raise ValueError(
'Unrecognized internal name, {!r}, specified for a custom '
'grid attribute name. See the full list of valid internal '
'names below:\n\n{}'.format(k, list(GRID_ATTRS.keys())))
attrs[k] = (v, )
dims_and_vars = set(data.variables).union(set(data.dims))
for name_int, names_ext in attrs.items():
data_coord_name = set(names_ext).intersection(dims_and_vars)
if data_coord_name:
data = data.rename({data_coord_name.pop(): name_int})
return set_grid_attrs_as_coords(data) | Rename grid attributes to be consistent with aospy conventions.
Search all of the dataset's coords and dims looking for matches to known
grid attribute names; any that are found subsequently get renamed to the
aospy name as specified in ``aospy.internal_names.GRID_ATTRS``.
Also forces any renamed grid attribute that is saved as a dim without a
coord to have a coord, which facilitates subsequent slicing/subsetting.
This function does not compare to Model coordinates or add missing
coordinates from Model objects.
Parameters
----------
data : xr.Dataset
grid_attrs : dict (default None)
Overriding dictionary of grid attributes mapping aospy internal
names to names of grid attributes used in a particular model.
Returns
-------
xr.Dataset
Data returned with coordinates consistent with aospy
conventions | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/data_loader.py#L52-L96 |
spencerahill/aospy | aospy/data_loader.py | set_grid_attrs_as_coords | def set_grid_attrs_as_coords(ds):
"""Set available grid attributes as coordinates in a given Dataset.
Grid attributes are assumed to have their internal aospy names. Grid
attributes are set as coordinates, such that they are carried by all
selected DataArrays with overlapping index dimensions.
Parameters
----------
ds : Dataset
Input data
Returns
-------
Dataset
Dataset with grid attributes set as coordinates
"""
grid_attrs_in_ds = set(GRID_ATTRS.keys()).intersection(
set(ds.coords) | set(ds.data_vars))
ds = ds.set_coords(grid_attrs_in_ds)
return ds | python | def set_grid_attrs_as_coords(ds):
"""Set available grid attributes as coordinates in a given Dataset.
Grid attributes are assumed to have their internal aospy names. Grid
attributes are set as coordinates, such that they are carried by all
selected DataArrays with overlapping index dimensions.
Parameters
----------
ds : Dataset
Input data
Returns
-------
Dataset
Dataset with grid attributes set as coordinates
"""
grid_attrs_in_ds = set(GRID_ATTRS.keys()).intersection(
set(ds.coords) | set(ds.data_vars))
ds = ds.set_coords(grid_attrs_in_ds)
return ds | Set available grid attributes as coordinates in a given Dataset.
Grid attributes are assumed to have their internal aospy names. Grid
attributes are set as coordinates, such that they are carried by all
selected DataArrays with overlapping index dimensions.
Parameters
----------
ds : Dataset
Input data
Returns
-------
Dataset
Dataset with grid attributes set as coordinates | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/data_loader.py#L99-L119 |
spencerahill/aospy | aospy/data_loader.py | _maybe_cast_to_float64 | def _maybe_cast_to_float64(da):
"""Cast DataArrays to np.float64 if they are of type np.float32.
Parameters
----------
da : xr.DataArray
Input DataArray
Returns
-------
DataArray
"""
if da.dtype == np.float32:
logging.warning('Datapoints were stored using the np.float32 datatype.'
'For accurate reduction operations using bottleneck, '
'datapoints are being cast to the np.float64 datatype.'
' For more information see: https://github.com/pydata/'
'xarray/issues/1346')
return da.astype(np.float64)
else:
return da | python | def _maybe_cast_to_float64(da):
"""Cast DataArrays to np.float64 if they are of type np.float32.
Parameters
----------
da : xr.DataArray
Input DataArray
Returns
-------
DataArray
"""
if da.dtype == np.float32:
logging.warning('Datapoints were stored using the np.float32 datatype.'
'For accurate reduction operations using bottleneck, '
'datapoints are being cast to the np.float64 datatype.'
' For more information see: https://github.com/pydata/'
'xarray/issues/1346')
return da.astype(np.float64)
else:
return da | Cast DataArrays to np.float64 if they are of type np.float32.
Parameters
----------
da : xr.DataArray
Input DataArray
Returns
-------
DataArray | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/data_loader.py#L122-L142 |
spencerahill/aospy | aospy/data_loader.py | _sel_var | def _sel_var(ds, var, upcast_float32=True):
"""Select the specified variable by trying all possible alternative names.
Parameters
----------
ds : Dataset
Dataset possibly containing var
var : aospy.Var
Variable to find data for
upcast_float32 : bool (default True)
Whether to cast a float32 DataArray up to float64
Returns
-------
DataArray
Raises
------
KeyError
If the variable is not in the Dataset
"""
for name in var.names:
try:
da = ds[name].rename(var.name)
if upcast_float32:
return _maybe_cast_to_float64(da)
else:
return da
except KeyError:
pass
msg = '{0} not found among names: {1} in\n{2}'.format(var, var.names, ds)
raise LookupError(msg) | python | def _sel_var(ds, var, upcast_float32=True):
"""Select the specified variable by trying all possible alternative names.
Parameters
----------
ds : Dataset
Dataset possibly containing var
var : aospy.Var
Variable to find data for
upcast_float32 : bool (default True)
Whether to cast a float32 DataArray up to float64
Returns
-------
DataArray
Raises
------
KeyError
If the variable is not in the Dataset
"""
for name in var.names:
try:
da = ds[name].rename(var.name)
if upcast_float32:
return _maybe_cast_to_float64(da)
else:
return da
except KeyError:
pass
msg = '{0} not found among names: {1} in\n{2}'.format(var, var.names, ds)
raise LookupError(msg) | Select the specified variable by trying all possible alternative names.
Parameters
----------
ds : Dataset
Dataset possibly containing var
var : aospy.Var
Variable to find data for
upcast_float32 : bool (default True)
Whether to cast a float32 DataArray up to float64
Returns
-------
DataArray
Raises
------
KeyError
If the variable is not in the Dataset | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/data_loader.py#L145-L176 |
spencerahill/aospy | aospy/data_loader.py | _prep_time_data | def _prep_time_data(ds):
"""Prepare time coordinate information in Dataset for use in aospy.
1. If the Dataset contains a time bounds coordinate, add attributes
representing the true beginning and end dates of the time interval used
to construct the Dataset
2. If the Dataset contains a time bounds coordinate, overwrite the time
coordinate values with the averages of the time bounds at each timestep
3. Decode the times into np.datetime64 objects for time indexing
Parameters
----------
ds : Dataset
Pre-processed Dataset with time coordinate renamed to
internal_names.TIME_STR
Returns
-------
Dataset
The processed Dataset
"""
ds = times.ensure_time_as_index(ds)
if TIME_BOUNDS_STR in ds:
ds = times.ensure_time_avg_has_cf_metadata(ds)
ds[TIME_STR] = times.average_time_bounds(ds)
else:
logging.warning("dt array not found. Assuming equally spaced "
"values in time, even though this may not be "
"the case")
ds = times.add_uniform_time_weights(ds)
# Suppress enable_cftimeindex is a no-op warning; we'll keep setting it for
# now to maintain backwards compatibility for older xarray versions.
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
with xr.set_options(enable_cftimeindex=True):
ds = xr.decode_cf(ds, decode_times=True, decode_coords=False,
mask_and_scale=True)
return ds | python | def _prep_time_data(ds):
"""Prepare time coordinate information in Dataset for use in aospy.
1. If the Dataset contains a time bounds coordinate, add attributes
representing the true beginning and end dates of the time interval used
to construct the Dataset
2. If the Dataset contains a time bounds coordinate, overwrite the time
coordinate values with the averages of the time bounds at each timestep
3. Decode the times into np.datetime64 objects for time indexing
Parameters
----------
ds : Dataset
Pre-processed Dataset with time coordinate renamed to
internal_names.TIME_STR
Returns
-------
Dataset
The processed Dataset
"""
ds = times.ensure_time_as_index(ds)
if TIME_BOUNDS_STR in ds:
ds = times.ensure_time_avg_has_cf_metadata(ds)
ds[TIME_STR] = times.average_time_bounds(ds)
else:
logging.warning("dt array not found. Assuming equally spaced "
"values in time, even though this may not be "
"the case")
ds = times.add_uniform_time_weights(ds)
# Suppress enable_cftimeindex is a no-op warning; we'll keep setting it for
# now to maintain backwards compatibility for older xarray versions.
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
with xr.set_options(enable_cftimeindex=True):
ds = xr.decode_cf(ds, decode_times=True, decode_coords=False,
mask_and_scale=True)
return ds | Prepare time coordinate information in Dataset for use in aospy.
1. If the Dataset contains a time bounds coordinate, add attributes
representing the true beginning and end dates of the time interval used
to construct the Dataset
2. If the Dataset contains a time bounds coordinate, overwrite the time
coordinate values with the averages of the time bounds at each timestep
3. Decode the times into np.datetime64 objects for time indexing
Parameters
----------
ds : Dataset
Pre-processed Dataset with time coordinate renamed to
internal_names.TIME_STR
Returns
-------
Dataset
The processed Dataset | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/data_loader.py#L179-L216 |
spencerahill/aospy | aospy/data_loader.py | _load_data_from_disk | def _load_data_from_disk(file_set, preprocess_func=lambda ds: ds,
data_vars='minimal', coords='minimal',
grid_attrs=None, **kwargs):
"""Load a Dataset from a list or glob-string of files.
Datasets from files are concatenated along time,
and all grid attributes are renamed to their aospy internal names.
Parameters
----------
file_set : list or str
List of paths to files or glob-string
preprocess_func : function (optional)
Custom function to call before applying any aospy logic
to the loaded dataset
data_vars : str (default 'minimal')
Mode for concatenating data variables in call to ``xr.open_mfdataset``
coords : str (default 'minimal')
Mode for concatenating coordinate variables in call to
``xr.open_mfdataset``.
grid_attrs : dict
Overriding dictionary of grid attributes mapping aospy internal
names to names of grid attributes used in a particular model.
Returns
-------
Dataset
"""
apply_preload_user_commands(file_set)
func = _preprocess_and_rename_grid_attrs(preprocess_func, grid_attrs,
**kwargs)
return xr.open_mfdataset(file_set, preprocess=func, concat_dim=TIME_STR,
decode_times=False, decode_coords=False,
mask_and_scale=True, data_vars=data_vars,
coords=coords) | python | def _load_data_from_disk(file_set, preprocess_func=lambda ds: ds,
data_vars='minimal', coords='minimal',
grid_attrs=None, **kwargs):
"""Load a Dataset from a list or glob-string of files.
Datasets from files are concatenated along time,
and all grid attributes are renamed to their aospy internal names.
Parameters
----------
file_set : list or str
List of paths to files or glob-string
preprocess_func : function (optional)
Custom function to call before applying any aospy logic
to the loaded dataset
data_vars : str (default 'minimal')
Mode for concatenating data variables in call to ``xr.open_mfdataset``
coords : str (default 'minimal')
Mode for concatenating coordinate variables in call to
``xr.open_mfdataset``.
grid_attrs : dict
Overriding dictionary of grid attributes mapping aospy internal
names to names of grid attributes used in a particular model.
Returns
-------
Dataset
"""
apply_preload_user_commands(file_set)
func = _preprocess_and_rename_grid_attrs(preprocess_func, grid_attrs,
**kwargs)
return xr.open_mfdataset(file_set, preprocess=func, concat_dim=TIME_STR,
decode_times=False, decode_coords=False,
mask_and_scale=True, data_vars=data_vars,
coords=coords) | Load a Dataset from a list or glob-string of files.
Datasets from files are concatenated along time,
and all grid attributes are renamed to their aospy internal names.
Parameters
----------
file_set : list or str
List of paths to files or glob-string
preprocess_func : function (optional)
Custom function to call before applying any aospy logic
to the loaded dataset
data_vars : str (default 'minimal')
Mode for concatenating data variables in call to ``xr.open_mfdataset``
coords : str (default 'minimal')
Mode for concatenating coordinate variables in call to
``xr.open_mfdataset``.
grid_attrs : dict
Overriding dictionary of grid attributes mapping aospy internal
names to names of grid attributes used in a particular model.
Returns
-------
Dataset | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/data_loader.py#L219-L253 |
spencerahill/aospy | aospy/data_loader.py | _setattr_default | def _setattr_default(obj, attr, value, default):
"""Set an attribute of an object to a value or default value."""
if value is None:
setattr(obj, attr, default)
else:
setattr(obj, attr, value) | python | def _setattr_default(obj, attr, value, default):
"""Set an attribute of an object to a value or default value."""
if value is None:
setattr(obj, attr, default)
else:
setattr(obj, attr, value) | Set an attribute of an object to a value or default value. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/data_loader.py#L267-L272 |
spencerahill/aospy | aospy/data_loader.py | DataLoader.load_variable | def load_variable(self, var=None, start_date=None, end_date=None,
time_offset=None, grid_attrs=None, **DataAttrs):
"""Load a DataArray for requested variable and time range.
Automatically renames all grid attributes to match aospy conventions.
Parameters
----------
var : Var
aospy Var object
start_date : datetime.datetime
start date for interval
end_date : datetime.datetime
end date for interval
time_offset : dict
Option to add a time offset to the time coordinate to correct for
incorrect metadata.
grid_attrs : dict (optional)
Overriding dictionary of grid attributes mapping aospy internal
names to names of grid attributes used in a particular model.
**DataAttrs
Attributes needed to identify a unique set of files to load from
Returns
-------
da : DataArray
DataArray for the specified variable, date range, and interval in
"""
file_set = self._generate_file_set(var=var, start_date=start_date,
end_date=end_date, **DataAttrs)
ds = _load_data_from_disk(
file_set, self.preprocess_func, data_vars=self.data_vars,
coords=self.coords, start_date=start_date, end_date=end_date,
time_offset=time_offset, grid_attrs=grid_attrs, **DataAttrs
)
if var.def_time:
ds = _prep_time_data(ds)
start_date = times.maybe_convert_to_index_date_type(
ds.indexes[TIME_STR], start_date)
end_date = times.maybe_convert_to_index_date_type(
ds.indexes[TIME_STR], end_date)
ds = set_grid_attrs_as_coords(ds)
da = _sel_var(ds, var, self.upcast_float32)
if var.def_time:
da = self._maybe_apply_time_shift(da, time_offset, **DataAttrs)
return times.sel_time(da, start_date, end_date).load()
else:
return da.load() | python | def load_variable(self, var=None, start_date=None, end_date=None,
time_offset=None, grid_attrs=None, **DataAttrs):
"""Load a DataArray for requested variable and time range.
Automatically renames all grid attributes to match aospy conventions.
Parameters
----------
var : Var
aospy Var object
start_date : datetime.datetime
start date for interval
end_date : datetime.datetime
end date for interval
time_offset : dict
Option to add a time offset to the time coordinate to correct for
incorrect metadata.
grid_attrs : dict (optional)
Overriding dictionary of grid attributes mapping aospy internal
names to names of grid attributes used in a particular model.
**DataAttrs
Attributes needed to identify a unique set of files to load from
Returns
-------
da : DataArray
DataArray for the specified variable, date range, and interval in
"""
file_set = self._generate_file_set(var=var, start_date=start_date,
end_date=end_date, **DataAttrs)
ds = _load_data_from_disk(
file_set, self.preprocess_func, data_vars=self.data_vars,
coords=self.coords, start_date=start_date, end_date=end_date,
time_offset=time_offset, grid_attrs=grid_attrs, **DataAttrs
)
if var.def_time:
ds = _prep_time_data(ds)
start_date = times.maybe_convert_to_index_date_type(
ds.indexes[TIME_STR], start_date)
end_date = times.maybe_convert_to_index_date_type(
ds.indexes[TIME_STR], end_date)
ds = set_grid_attrs_as_coords(ds)
da = _sel_var(ds, var, self.upcast_float32)
if var.def_time:
da = self._maybe_apply_time_shift(da, time_offset, **DataAttrs)
return times.sel_time(da, start_date, end_date).load()
else:
return da.load() | Load a DataArray for requested variable and time range.
Automatically renames all grid attributes to match aospy conventions.
Parameters
----------
var : Var
aospy Var object
start_date : datetime.datetime
start date for interval
end_date : datetime.datetime
end date for interval
time_offset : dict
Option to add a time offset to the time coordinate to correct for
incorrect metadata.
grid_attrs : dict (optional)
Overriding dictionary of grid attributes mapping aospy internal
names to names of grid attributes used in a particular model.
**DataAttrs
Attributes needed to identify a unique set of files to load from
Returns
-------
da : DataArray
DataArray for the specified variable, date range, and interval in | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/data_loader.py#L277-L324 |
spencerahill/aospy | aospy/data_loader.py | DataLoader._load_or_get_from_model | def _load_or_get_from_model(self, var, start_date=None, end_date=None,
time_offset=None, model=None, **DataAttrs):
"""Load a DataArray for the requested variable and time range
Supports both access of grid attributes either through the DataLoader
or through an optionally-provided Model object. Defaults to using
the version found in the DataLoader first.
"""
grid_attrs = None if model is None else model.grid_attrs
try:
return self.load_variable(
var, start_date=start_date, end_date=end_date,
time_offset=time_offset, grid_attrs=grid_attrs, **DataAttrs)
except (KeyError, IOError) as e:
if var.name not in GRID_ATTRS or model is None:
raise e
else:
try:
return getattr(model, var.name)
except AttributeError:
raise AttributeError(
'Grid attribute {} could not be located either '
'through this DataLoader or in the provided Model '
'object: {}.'.format(var, model)) | python | def _load_or_get_from_model(self, var, start_date=None, end_date=None,
time_offset=None, model=None, **DataAttrs):
"""Load a DataArray for the requested variable and time range
Supports both access of grid attributes either through the DataLoader
or through an optionally-provided Model object. Defaults to using
the version found in the DataLoader first.
"""
grid_attrs = None if model is None else model.grid_attrs
try:
return self.load_variable(
var, start_date=start_date, end_date=end_date,
time_offset=time_offset, grid_attrs=grid_attrs, **DataAttrs)
except (KeyError, IOError) as e:
if var.name not in GRID_ATTRS or model is None:
raise e
else:
try:
return getattr(model, var.name)
except AttributeError:
raise AttributeError(
'Grid attribute {} could not be located either '
'through this DataLoader or in the provided Model '
'object: {}.'.format(var, model)) | Load a DataArray for the requested variable and time range
Supports both access of grid attributes either through the DataLoader
or through an optionally-provided Model object. Defaults to using
the version found in the DataLoader first. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/data_loader.py#L326-L350 |
spencerahill/aospy | aospy/data_loader.py | DataLoader.recursively_compute_variable | def recursively_compute_variable(self, var, start_date=None, end_date=None,
time_offset=None, model=None,
**DataAttrs):
"""Compute a variable recursively, loading data where needed.
An obvious requirement here is that the variable must eventually be
able to be expressed in terms of model-native quantities; otherwise the
recursion will never stop.
Parameters
----------
var : Var
aospy Var object
start_date : datetime.datetime
start date for interval
end_date : datetime.datetime
end date for interval
time_offset : dict
Option to add a time offset to the time coordinate to correct for
incorrect metadata.
model : Model
aospy Model object (optional)
**DataAttrs
Attributes needed to identify a unique set of files to load from
Returns
-------
da : DataArray
DataArray for the specified variable, date range, and interval in
"""
if var.variables is None:
return self._load_or_get_from_model(
var, start_date, end_date, time_offset, model, **DataAttrs)
else:
data = [self.recursively_compute_variable(
v, start_date, end_date, time_offset, model, **DataAttrs)
for v in var.variables]
return var.func(*data).rename(var.name) | python | def recursively_compute_variable(self, var, start_date=None, end_date=None,
time_offset=None, model=None,
**DataAttrs):
"""Compute a variable recursively, loading data where needed.
An obvious requirement here is that the variable must eventually be
able to be expressed in terms of model-native quantities; otherwise the
recursion will never stop.
Parameters
----------
var : Var
aospy Var object
start_date : datetime.datetime
start date for interval
end_date : datetime.datetime
end date for interval
time_offset : dict
Option to add a time offset to the time coordinate to correct for
incorrect metadata.
model : Model
aospy Model object (optional)
**DataAttrs
Attributes needed to identify a unique set of files to load from
Returns
-------
da : DataArray
DataArray for the specified variable, date range, and interval in
"""
if var.variables is None:
return self._load_or_get_from_model(
var, start_date, end_date, time_offset, model, **DataAttrs)
else:
data = [self.recursively_compute_variable(
v, start_date, end_date, time_offset, model, **DataAttrs)
for v in var.variables]
return var.func(*data).rename(var.name) | Compute a variable recursively, loading data where needed.
An obvious requirement here is that the variable must eventually be
able to be expressed in terms of model-native quantities; otherwise the
recursion will never stop.
Parameters
----------
var : Var
aospy Var object
start_date : datetime.datetime
start date for interval
end_date : datetime.datetime
end date for interval
time_offset : dict
Option to add a time offset to the time coordinate to correct for
incorrect metadata.
model : Model
aospy Model object (optional)
**DataAttrs
Attributes needed to identify a unique set of files to load from
Returns
-------
da : DataArray
DataArray for the specified variable, date range, and interval in | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/data_loader.py#L352-L389 |
spencerahill/aospy | aospy/data_loader.py | DataLoader._maybe_apply_time_shift | def _maybe_apply_time_shift(da, time_offset=None, **DataAttrs):
"""Apply specified time shift to DataArray"""
if time_offset is not None:
time = times.apply_time_offset(da[TIME_STR], **time_offset)
da[TIME_STR] = time
return da | python | def _maybe_apply_time_shift(da, time_offset=None, **DataAttrs):
"""Apply specified time shift to DataArray"""
if time_offset is not None:
time = times.apply_time_offset(da[TIME_STR], **time_offset)
da[TIME_STR] = time
return da | Apply specified time shift to DataArray | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/data_loader.py#L392-L397 |
spencerahill/aospy | aospy/data_loader.py | DictDataLoader._generate_file_set | def _generate_file_set(self, var=None, start_date=None, end_date=None,
domain=None, intvl_in=None, dtype_in_vert=None,
dtype_in_time=None, intvl_out=None):
"""Returns the file_set for the given interval in."""
try:
return self.file_map[intvl_in]
except KeyError:
raise KeyError('File set does not exist for the specified'
' intvl_in {0}'.format(intvl_in)) | python | def _generate_file_set(self, var=None, start_date=None, end_date=None,
domain=None, intvl_in=None, dtype_in_vert=None,
dtype_in_time=None, intvl_out=None):
"""Returns the file_set for the given interval in."""
try:
return self.file_map[intvl_in]
except KeyError:
raise KeyError('File set does not exist for the specified'
' intvl_in {0}'.format(intvl_in)) | Returns the file_set for the given interval in. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/data_loader.py#L463-L471 |
spencerahill/aospy | aospy/data_loader.py | GFDLDataLoader._maybe_apply_time_shift | def _maybe_apply_time_shift(da, time_offset=None, **DataAttrs):
"""Correct off-by-one error in GFDL instantaneous model data.
Instantaneous data that is outputted by GFDL models is generally off by
one timestep. For example, a netCDF file that is supposed to
correspond to 6 hourly data for the month of January, will have its
last time value be in February.
"""
if time_offset is not None:
time = times.apply_time_offset(da[TIME_STR], **time_offset)
da[TIME_STR] = time
else:
if DataAttrs['dtype_in_time'] == 'inst':
if DataAttrs['intvl_in'].endswith('hr'):
offset = -1 * int(DataAttrs['intvl_in'][0])
else:
offset = 0
time = times.apply_time_offset(da[TIME_STR], hours=offset)
da[TIME_STR] = time
return da | python | def _maybe_apply_time_shift(da, time_offset=None, **DataAttrs):
"""Correct off-by-one error in GFDL instantaneous model data.
Instantaneous data that is outputted by GFDL models is generally off by
one timestep. For example, a netCDF file that is supposed to
correspond to 6 hourly data for the month of January, will have its
last time value be in February.
"""
if time_offset is not None:
time = times.apply_time_offset(da[TIME_STR], **time_offset)
da[TIME_STR] = time
else:
if DataAttrs['dtype_in_time'] == 'inst':
if DataAttrs['intvl_in'].endswith('hr'):
offset = -1 * int(DataAttrs['intvl_in'][0])
else:
offset = 0
time = times.apply_time_offset(da[TIME_STR], hours=offset)
da[TIME_STR] = time
return da | Correct off-by-one error in GFDL instantaneous model data.
Instantaneous data that is outputted by GFDL models is generally off by
one timestep. For example, a netCDF file that is supposed to
correspond to 6 hourly data for the month of January, will have its
last time value be in February. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/data_loader.py#L617-L636 |
spencerahill/aospy | aospy/var.py | Var.to_plot_units | def to_plot_units(self, data, dtype_vert=False):
"""Convert the given data to plotting units."""
if dtype_vert == 'vert_av' or not dtype_vert:
conv_factor = self.units.plot_units_conv
elif dtype_vert == ('vert_int'):
conv_factor = self.units.vert_int_plot_units_conv
else:
raise ValueError("dtype_vert value `{0}` not recognized. Only "
"bool(dtype_vert) = False, 'vert_av', and "
"'vert_int' supported.".format(dtype_vert))
if isinstance(data, dict):
return {key: val*conv_factor for key, val in data.items()}
return data*conv_factor | python | def to_plot_units(self, data, dtype_vert=False):
"""Convert the given data to plotting units."""
if dtype_vert == 'vert_av' or not dtype_vert:
conv_factor = self.units.plot_units_conv
elif dtype_vert == ('vert_int'):
conv_factor = self.units.vert_int_plot_units_conv
else:
raise ValueError("dtype_vert value `{0}` not recognized. Only "
"bool(dtype_vert) = False, 'vert_av', and "
"'vert_int' supported.".format(dtype_vert))
if isinstance(data, dict):
return {key: val*conv_factor for key, val in data.items()}
return data*conv_factor | Convert the given data to plotting units. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/var.py#L119-L131 |
spencerahill/aospy | aospy/var.py | Var.mask_unphysical | def mask_unphysical(self, data):
"""Mask data array where values are outside physically valid range."""
if not self.valid_range:
return data
else:
return np.ma.masked_outside(data, np.min(self.valid_range),
np.max(self.valid_range)) | python | def mask_unphysical(self, data):
"""Mask data array where values are outside physically valid range."""
if not self.valid_range:
return data
else:
return np.ma.masked_outside(data, np.min(self.valid_range),
np.max(self.valid_range)) | Mask data array where values are outside physically valid range. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/var.py#L133-L139 |
spencerahill/aospy | aospy/utils/vertcoord.py | to_radians | def to_radians(arr, is_delta=False):
"""Force data with units either degrees or radians to be radians."""
# Infer the units from embedded metadata, if it's there.
try:
units = arr.units
except AttributeError:
pass
else:
if units.lower().startswith('degrees'):
warn_msg = ("Conversion applied: degrees -> radians to array: "
"{}".format(arr))
logging.debug(warn_msg)
return np.deg2rad(arr)
# Otherwise, assume degrees if the values are sufficiently large.
threshold = 0.1*np.pi if is_delta else 4*np.pi
if np.max(np.abs(arr)) > threshold:
warn_msg = ("Conversion applied: degrees -> radians to array: "
"{}".format(arr))
logging.debug(warn_msg)
return np.deg2rad(arr)
return arr | python | def to_radians(arr, is_delta=False):
"""Force data with units either degrees or radians to be radians."""
# Infer the units from embedded metadata, if it's there.
try:
units = arr.units
except AttributeError:
pass
else:
if units.lower().startswith('degrees'):
warn_msg = ("Conversion applied: degrees -> radians to array: "
"{}".format(arr))
logging.debug(warn_msg)
return np.deg2rad(arr)
# Otherwise, assume degrees if the values are sufficiently large.
threshold = 0.1*np.pi if is_delta else 4*np.pi
if np.max(np.abs(arr)) > threshold:
warn_msg = ("Conversion applied: degrees -> radians to array: "
"{}".format(arr))
logging.debug(warn_msg)
return np.deg2rad(arr)
return arr | Force data with units either degrees or radians to be radians. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/vertcoord.py#L12-L32 |
spencerahill/aospy | aospy/utils/vertcoord.py | to_pascal | def to_pascal(arr, is_dp=False):
"""Force data with units either hPa or Pa to be in Pa."""
threshold = 400 if is_dp else 1200
if np.max(np.abs(arr)) < threshold:
warn_msg = "Conversion applied: hPa -> Pa to array: {}".format(arr)
logging.debug(warn_msg)
return arr*100.
return arr | python | def to_pascal(arr, is_dp=False):
"""Force data with units either hPa or Pa to be in Pa."""
threshold = 400 if is_dp else 1200
if np.max(np.abs(arr)) < threshold:
warn_msg = "Conversion applied: hPa -> Pa to array: {}".format(arr)
logging.debug(warn_msg)
return arr*100.
return arr | Force data with units either hPa or Pa to be in Pa. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/vertcoord.py#L35-L42 |
spencerahill/aospy | aospy/utils/vertcoord.py | to_hpa | def to_hpa(arr):
"""Convert pressure array from Pa to hPa (if needed)."""
if np.max(np.abs(arr)) > 1200.:
warn_msg = "Conversion applied: Pa -> hPa to array: {}".format(arr)
logging.debug(warn_msg)
return arr / 100.
return arr | python | def to_hpa(arr):
"""Convert pressure array from Pa to hPa (if needed)."""
if np.max(np.abs(arr)) > 1200.:
warn_msg = "Conversion applied: Pa -> hPa to array: {}".format(arr)
logging.debug(warn_msg)
return arr / 100.
return arr | Convert pressure array from Pa to hPa (if needed). | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/vertcoord.py#L45-L51 |
spencerahill/aospy | aospy/utils/vertcoord.py | replace_coord | def replace_coord(arr, old_dim, new_dim, new_coord):
"""Replace a coordinate with new one; new and old must have same shape."""
new_arr = arr.rename({old_dim: new_dim})
new_arr[new_dim] = new_coord
return new_arr | python | def replace_coord(arr, old_dim, new_dim, new_coord):
"""Replace a coordinate with new one; new and old must have same shape."""
new_arr = arr.rename({old_dim: new_dim})
new_arr[new_dim] = new_coord
return new_arr | Replace a coordinate with new one; new and old must have same shape. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/vertcoord.py#L59-L63 |
spencerahill/aospy | aospy/utils/vertcoord.py | to_pfull_from_phalf | def to_pfull_from_phalf(arr, pfull_coord):
"""Compute data at full pressure levels from values at half levels."""
phalf_top = arr.isel(**{internal_names.PHALF_STR: slice(1, None)})
phalf_top = replace_coord(phalf_top, internal_names.PHALF_STR,
internal_names.PFULL_STR, pfull_coord)
phalf_bot = arr.isel(**{internal_names.PHALF_STR: slice(None, -1)})
phalf_bot = replace_coord(phalf_bot, internal_names.PHALF_STR,
internal_names.PFULL_STR, pfull_coord)
return 0.5*(phalf_bot + phalf_top) | python | def to_pfull_from_phalf(arr, pfull_coord):
"""Compute data at full pressure levels from values at half levels."""
phalf_top = arr.isel(**{internal_names.PHALF_STR: slice(1, None)})
phalf_top = replace_coord(phalf_top, internal_names.PHALF_STR,
internal_names.PFULL_STR, pfull_coord)
phalf_bot = arr.isel(**{internal_names.PHALF_STR: slice(None, -1)})
phalf_bot = replace_coord(phalf_bot, internal_names.PHALF_STR,
internal_names.PFULL_STR, pfull_coord)
return 0.5*(phalf_bot + phalf_top) | Compute data at full pressure levels from values at half levels. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/vertcoord.py#L66-L75 |
spencerahill/aospy | aospy/utils/vertcoord.py | to_phalf_from_pfull | def to_phalf_from_pfull(arr, val_toa=0, val_sfc=0):
"""Compute data at half pressure levels from values at full levels.
Could be the pressure array itself, but it could also be any other data
defined at pressure levels. Requires specification of values at surface
and top of atmosphere.
"""
phalf = np.zeros((arr.shape[0] + 1, arr.shape[1], arr.shape[2]))
phalf[0] = val_toa
phalf[-1] = val_sfc
phalf[1:-1] = 0.5*(arr[:-1] + arr[1:])
return phalf | python | def to_phalf_from_pfull(arr, val_toa=0, val_sfc=0):
"""Compute data at half pressure levels from values at full levels.
Could be the pressure array itself, but it could also be any other data
defined at pressure levels. Requires specification of values at surface
and top of atmosphere.
"""
phalf = np.zeros((arr.shape[0] + 1, arr.shape[1], arr.shape[2]))
phalf[0] = val_toa
phalf[-1] = val_sfc
phalf[1:-1] = 0.5*(arr[:-1] + arr[1:])
return phalf | Compute data at half pressure levels from values at full levels.
Could be the pressure array itself, but it could also be any other data
defined at pressure levels. Requires specification of values at surface
and top of atmosphere. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/vertcoord.py#L78-L89 |
spencerahill/aospy | aospy/utils/vertcoord.py | pfull_from_ps | def pfull_from_ps(bk, pk, ps, pfull_coord):
"""Compute pressure at full levels from surface pressure."""
return to_pfull_from_phalf(phalf_from_ps(bk, pk, ps), pfull_coord) | python | def pfull_from_ps(bk, pk, ps, pfull_coord):
"""Compute pressure at full levels from surface pressure."""
return to_pfull_from_phalf(phalf_from_ps(bk, pk, ps), pfull_coord) | Compute pressure at full levels from surface pressure. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/vertcoord.py#L92-L94 |
spencerahill/aospy | aospy/utils/vertcoord.py | d_deta_from_phalf | def d_deta_from_phalf(arr, pfull_coord):
"""Compute pressure level thickness from half level pressures."""
d_deta = arr.diff(dim=internal_names.PHALF_STR, n=1)
return replace_coord(d_deta, internal_names.PHALF_STR,
internal_names.PFULL_STR, pfull_coord) | python | def d_deta_from_phalf(arr, pfull_coord):
"""Compute pressure level thickness from half level pressures."""
d_deta = arr.diff(dim=internal_names.PHALF_STR, n=1)
return replace_coord(d_deta, internal_names.PHALF_STR,
internal_names.PFULL_STR, pfull_coord) | Compute pressure level thickness from half level pressures. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/vertcoord.py#L97-L101 |
spencerahill/aospy | aospy/utils/vertcoord.py | d_deta_from_pfull | def d_deta_from_pfull(arr):
"""Compute $\partial/\partial\eta$ of the array on full hybrid levels.
$\eta$ is the model vertical coordinate, and its value is assumed to simply
increment by 1 from 0 at the surface upwards. The data to be differenced
is assumed to be defined at full pressure levels.
Parameters
----------
arr : xarray.DataArray containing the 'pfull' dim
Returns
-------
deriv : xarray.DataArray with the derivative along 'pfull' computed via
2nd order centered differencing.
""" # noqa: W605
right = arr[{internal_names.PFULL_STR: slice(2, None, None)}].values
left = arr[{internal_names.PFULL_STR: slice(0, -2, 1)}].values
deriv = xr.DataArray(np.zeros(arr.shape), dims=arr.dims,
coords=arr.coords)
deriv[{internal_names.PFULL_STR: slice(1, -1, 1)}] = (right - left) / 2.
deriv[{internal_names.PFULL_STR: 0}] = (
arr[{internal_names.PFULL_STR: 1}].values -
arr[{internal_names.PFULL_STR: 0}].values)
deriv[{internal_names.PFULL_STR: -1}] = (
arr[{internal_names.PFULL_STR: -1}].values -
arr[{internal_names.PFULL_STR: -2}].values)
return deriv | python | def d_deta_from_pfull(arr):
"""Compute $\partial/\partial\eta$ of the array on full hybrid levels.
$\eta$ is the model vertical coordinate, and its value is assumed to simply
increment by 1 from 0 at the surface upwards. The data to be differenced
is assumed to be defined at full pressure levels.
Parameters
----------
arr : xarray.DataArray containing the 'pfull' dim
Returns
-------
deriv : xarray.DataArray with the derivative along 'pfull' computed via
2nd order centered differencing.
""" # noqa: W605
right = arr[{internal_names.PFULL_STR: slice(2, None, None)}].values
left = arr[{internal_names.PFULL_STR: slice(0, -2, 1)}].values
deriv = xr.DataArray(np.zeros(arr.shape), dims=arr.dims,
coords=arr.coords)
deriv[{internal_names.PFULL_STR: slice(1, -1, 1)}] = (right - left) / 2.
deriv[{internal_names.PFULL_STR: 0}] = (
arr[{internal_names.PFULL_STR: 1}].values -
arr[{internal_names.PFULL_STR: 0}].values)
deriv[{internal_names.PFULL_STR: -1}] = (
arr[{internal_names.PFULL_STR: -1}].values -
arr[{internal_names.PFULL_STR: -2}].values)
return deriv | Compute $\partial/\partial\eta$ of the array on full hybrid levels.
$\eta$ is the model vertical coordinate, and its value is assumed to simply
increment by 1 from 0 at the surface upwards. The data to be differenced
is assumed to be defined at full pressure levels.
Parameters
----------
arr : xarray.DataArray containing the 'pfull' dim
Returns
-------
deriv : xarray.DataArray with the derivative along 'pfull' computed via
2nd order centered differencing. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/vertcoord.py#L104-L131 |
spencerahill/aospy | aospy/utils/vertcoord.py | dp_from_ps | def dp_from_ps(bk, pk, ps, pfull_coord):
"""Compute pressure level thickness from surface pressure"""
return d_deta_from_phalf(phalf_from_ps(bk, pk, ps), pfull_coord) | python | def dp_from_ps(bk, pk, ps, pfull_coord):
"""Compute pressure level thickness from surface pressure"""
return d_deta_from_phalf(phalf_from_ps(bk, pk, ps), pfull_coord) | Compute pressure level thickness from surface pressure | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/vertcoord.py#L134-L136 |
spencerahill/aospy | aospy/utils/vertcoord.py | integrate | def integrate(arr, ddim, dim=False, is_pressure=False):
"""Integrate along the given dimension."""
if is_pressure:
dim = vert_coord_name(ddim)
return (arr*ddim).sum(dim=dim) | python | def integrate(arr, ddim, dim=False, is_pressure=False):
"""Integrate along the given dimension."""
if is_pressure:
dim = vert_coord_name(ddim)
return (arr*ddim).sum(dim=dim) | Integrate along the given dimension. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/vertcoord.py#L139-L143 |
spencerahill/aospy | aospy/utils/vertcoord.py | get_dim_name | def get_dim_name(arr, names):
"""Determine if an object has an attribute name matching a given list."""
for name in names:
# TODO: raise warning/exception when multiple names arr attrs.
if hasattr(arr, name):
return name
raise AttributeError("No attributes of the object `{0}` match the "
"specified names of `{1}`".format(arr, names)) | python | def get_dim_name(arr, names):
"""Determine if an object has an attribute name matching a given list."""
for name in names:
# TODO: raise warning/exception when multiple names arr attrs.
if hasattr(arr, name):
return name
raise AttributeError("No attributes of the object `{0}` match the "
"specified names of `{1}`".format(arr, names)) | Determine if an object has an attribute name matching a given list. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/vertcoord.py#L146-L153 |
spencerahill/aospy | aospy/utils/vertcoord.py | int_dp_g | def int_dp_g(arr, dp):
"""Mass weighted integral."""
return integrate(arr, to_pascal(dp, is_dp=True),
vert_coord_name(dp)) / GRAV_EARTH | python | def int_dp_g(arr, dp):
"""Mass weighted integral."""
return integrate(arr, to_pascal(dp, is_dp=True),
vert_coord_name(dp)) / GRAV_EARTH | Mass weighted integral. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/vertcoord.py#L161-L164 |
spencerahill/aospy | aospy/utils/vertcoord.py | dp_from_p | def dp_from_p(p, ps, p_top=0., p_bot=1.1e5):
"""Get level thickness of pressure data, incorporating surface pressure.
Level edges are defined as halfway between the levels, as well as the user-
specified uppermost and lowermost values. The dp of levels whose bottom
pressure is less than the surface pressure is not changed by ps, since they
don't intersect the surface. If ps is in between a level's top and bottom
pressures, then its dp becomes the pressure difference between its top and
ps. If ps is less than a level's top and bottom pressures, then that level
is underground and its values are masked.
Note that postprocessing routines (e.g. at GFDL) typically mask out data
wherever the surface pressure is less than the level's given value, not the
level's upper edge. This masks out more levels than the
"""
p_str = get_dim_name(p, (internal_names.PLEVEL_STR, 'plev'))
p_vals = to_pascal(p.values.copy())
# Layer edges are halfway between the given pressure levels.
p_edges_interior = 0.5*(p_vals[:-1] + p_vals[1:])
p_edges = np.concatenate(([p_bot], p_edges_interior, [p_top]))
p_edge_above = p_edges[1:]
p_edge_below = p_edges[:-1]
dp = p_edge_below - p_edge_above
if not all(np.sign(dp)):
raise ValueError("dp array not all > 0 : {}".format(dp))
# Pressure difference between ps and the upper edge of each pressure level.
p_edge_above_xr = xr.DataArray(p_edge_above, dims=p.dims, coords=p.coords)
dp_to_sfc = ps - p_edge_above_xr
# Find the level adjacent to the masked, under-ground levels.
change = xr.DataArray(np.zeros(dp_to_sfc.shape), dims=dp_to_sfc.dims,
coords=dp_to_sfc.coords)
change[{p_str: slice(1, None)}] = np.diff(
np.sign(ps - to_pascal(p.copy()))
)
dp_combined = xr.DataArray(np.where(change, dp_to_sfc, dp),
dims=dp_to_sfc.dims, coords=dp_to_sfc.coords)
# Mask levels that are under ground.
above_ground = ps > to_pascal(p.copy())
above_ground[p_str] = p[p_str]
dp_with_ps = dp_combined.where(above_ground)
# Revert to original dim order.
possible_dim_orders = [
(internal_names.TIME_STR, p_str, internal_names.LAT_STR,
internal_names.LON_STR),
(internal_names.TIME_STR, p_str, internal_names.LAT_STR),
(internal_names.TIME_STR, p_str, internal_names.LON_STR),
(internal_names.TIME_STR, p_str),
(p_str, internal_names.LAT_STR, internal_names.LON_STR),
(p_str, internal_names.LAT_STR),
(p_str, internal_names.LON_STR),
(p_str,),
]
for dim_order in possible_dim_orders:
try:
return dp_with_ps.transpose(*dim_order)
except ValueError:
logging.debug("Failed transpose to dims: {}".format(dim_order))
else:
logging.debug("No transpose was successful.")
return dp_with_ps | python | def dp_from_p(p, ps, p_top=0., p_bot=1.1e5):
"""Get level thickness of pressure data, incorporating surface pressure.
Level edges are defined as halfway between the levels, as well as the user-
specified uppermost and lowermost values. The dp of levels whose bottom
pressure is less than the surface pressure is not changed by ps, since they
don't intersect the surface. If ps is in between a level's top and bottom
pressures, then its dp becomes the pressure difference between its top and
ps. If ps is less than a level's top and bottom pressures, then that level
is underground and its values are masked.
Note that postprocessing routines (e.g. at GFDL) typically mask out data
wherever the surface pressure is less than the level's given value, not the
level's upper edge. This masks out more levels than the
"""
p_str = get_dim_name(p, (internal_names.PLEVEL_STR, 'plev'))
p_vals = to_pascal(p.values.copy())
# Layer edges are halfway between the given pressure levels.
p_edges_interior = 0.5*(p_vals[:-1] + p_vals[1:])
p_edges = np.concatenate(([p_bot], p_edges_interior, [p_top]))
p_edge_above = p_edges[1:]
p_edge_below = p_edges[:-1]
dp = p_edge_below - p_edge_above
if not all(np.sign(dp)):
raise ValueError("dp array not all > 0 : {}".format(dp))
# Pressure difference between ps and the upper edge of each pressure level.
p_edge_above_xr = xr.DataArray(p_edge_above, dims=p.dims, coords=p.coords)
dp_to_sfc = ps - p_edge_above_xr
# Find the level adjacent to the masked, under-ground levels.
change = xr.DataArray(np.zeros(dp_to_sfc.shape), dims=dp_to_sfc.dims,
coords=dp_to_sfc.coords)
change[{p_str: slice(1, None)}] = np.diff(
np.sign(ps - to_pascal(p.copy()))
)
dp_combined = xr.DataArray(np.where(change, dp_to_sfc, dp),
dims=dp_to_sfc.dims, coords=dp_to_sfc.coords)
# Mask levels that are under ground.
above_ground = ps > to_pascal(p.copy())
above_ground[p_str] = p[p_str]
dp_with_ps = dp_combined.where(above_ground)
# Revert to original dim order.
possible_dim_orders = [
(internal_names.TIME_STR, p_str, internal_names.LAT_STR,
internal_names.LON_STR),
(internal_names.TIME_STR, p_str, internal_names.LAT_STR),
(internal_names.TIME_STR, p_str, internal_names.LON_STR),
(internal_names.TIME_STR, p_str),
(p_str, internal_names.LAT_STR, internal_names.LON_STR),
(p_str, internal_names.LAT_STR),
(p_str, internal_names.LON_STR),
(p_str,),
]
for dim_order in possible_dim_orders:
try:
return dp_with_ps.transpose(*dim_order)
except ValueError:
logging.debug("Failed transpose to dims: {}".format(dim_order))
else:
logging.debug("No transpose was successful.")
return dp_with_ps | Get level thickness of pressure data, incorporating surface pressure.
Level edges are defined as halfway between the levels, as well as the user-
specified uppermost and lowermost values. The dp of levels whose bottom
pressure is less than the surface pressure is not changed by ps, since they
don't intersect the surface. If ps is in between a level's top and bottom
pressures, then its dp becomes the pressure difference between its top and
ps. If ps is less than a level's top and bottom pressures, then that level
is underground and its values are masked.
Note that postprocessing routines (e.g. at GFDL) typically mask out data
wherever the surface pressure is less than the level's given value, not the
level's upper edge. This masks out more levels than the | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/vertcoord.py#L167-L228 |
spencerahill/aospy | aospy/utils/vertcoord.py | level_thickness | def level_thickness(p, p_top=0., p_bot=1.01325e5):
"""
Calculates the thickness, in Pa, of each pressure level.
Assumes that the pressure values given are at the center of that model
level, except for the lowest value (typically 1000 hPa), which is the
bottom boundary. The uppermost level extends to 0 hPa.
Unlike `dp_from_p`, this does not incorporate the surface pressure.
"""
p_vals = to_pascal(p.values.copy())
dp_vals = np.empty_like(p_vals)
# Bottom level extends from p[0] to halfway betwen p[0] and p[1].
dp_vals[0] = p_bot - 0.5*(p_vals[0] + p_vals[1])
# Middle levels extend from halfway between [k-1], [k] and [k], [k+1].
dp_vals[1:-1] = 0.5*(p_vals[0:-2] - p_vals[2:])
# Top level extends from halfway between top two levels to 0 hPa.
dp_vals[-1] = 0.5*(p_vals[-2] + p_vals[-1]) - p_top
dp = p.copy()
dp.values = dp_vals
return dp | python | def level_thickness(p, p_top=0., p_bot=1.01325e5):
"""
Calculates the thickness, in Pa, of each pressure level.
Assumes that the pressure values given are at the center of that model
level, except for the lowest value (typically 1000 hPa), which is the
bottom boundary. The uppermost level extends to 0 hPa.
Unlike `dp_from_p`, this does not incorporate the surface pressure.
"""
p_vals = to_pascal(p.values.copy())
dp_vals = np.empty_like(p_vals)
# Bottom level extends from p[0] to halfway betwen p[0] and p[1].
dp_vals[0] = p_bot - 0.5*(p_vals[0] + p_vals[1])
# Middle levels extend from halfway between [k-1], [k] and [k], [k+1].
dp_vals[1:-1] = 0.5*(p_vals[0:-2] - p_vals[2:])
# Top level extends from halfway between top two levels to 0 hPa.
dp_vals[-1] = 0.5*(p_vals[-2] + p_vals[-1]) - p_top
dp = p.copy()
dp.values = dp_vals
return dp | Calculates the thickness, in Pa, of each pressure level.
Assumes that the pressure values given are at the center of that model
level, except for the lowest value (typically 1000 hPa), which is the
bottom boundary. The uppermost level extends to 0 hPa.
Unlike `dp_from_p`, this does not incorporate the surface pressure. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/vertcoord.py#L231-L252 |
spencerahill/aospy | aospy/utils/vertcoord.py | does_coord_increase_w_index | def does_coord_increase_w_index(arr):
"""Determine if the array values increase with the index.
Useful, e.g., for pressure, which sometimes is indexed surface to TOA and
sometimes the opposite.
"""
diff = np.diff(arr)
if not np.all(np.abs(np.sign(diff))):
raise ValueError("Array is not monotonic: {}".format(arr))
# Since we know its monotonic, just test the first value.
return bool(diff[0]) | python | def does_coord_increase_w_index(arr):
"""Determine if the array values increase with the index.
Useful, e.g., for pressure, which sometimes is indexed surface to TOA and
sometimes the opposite.
"""
diff = np.diff(arr)
if not np.all(np.abs(np.sign(diff))):
raise ValueError("Array is not monotonic: {}".format(arr))
# Since we know its monotonic, just test the first value.
return bool(diff[0]) | Determine if the array values increase with the index.
Useful, e.g., for pressure, which sometimes is indexed surface to TOA and
sometimes the opposite. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/vertcoord.py#L255-L265 |
spencerahill/aospy | aospy/utils/times.py | apply_time_offset | def apply_time_offset(time, years=0, months=0, days=0, hours=0):
"""Apply a specified offset to the given time array.
This is useful for GFDL model output of instantaneous values. For example,
3 hourly data postprocessed to netCDF files spanning 1 year each will
actually have time values that are offset by 3 hours, such that the first
value is for 1 Jan 03:00 and the last value is 1 Jan 00:00 of the
subsequent year. This causes problems in xarray, e.g. when trying to group
by month. It is resolved by manually subtracting off those three hours,
such that the dates span from 1 Jan 00:00 to 31 Dec 21:00 as desired.
Parameters
----------
time : xarray.DataArray representing a timeseries
years, months, days, hours : int, optional
The number of years, months, days, and hours, respectively, to offset
the time array by. Positive values move the times later.
Returns
-------
pandas.DatetimeIndex
Examples
--------
Case of a length-1 input time array:
>>> times = xr.DataArray(datetime.datetime(1899, 12, 31, 21))
>>> apply_time_offset(times)
Timestamp('1900-01-01 00:00:00')
Case of input time array with length greater than one:
>>> times = xr.DataArray([datetime.datetime(1899, 12, 31, 21),
... datetime.datetime(1899, 1, 31, 21)])
>>> apply_time_offset(times) # doctest: +NORMALIZE_WHITESPACE
DatetimeIndex(['1900-01-01', '1899-02-01'], dtype='datetime64[ns]',
freq=None)
"""
return (pd.to_datetime(time.values) +
pd.DateOffset(years=years, months=months, days=days, hours=hours)) | python | def apply_time_offset(time, years=0, months=0, days=0, hours=0):
"""Apply a specified offset to the given time array.
This is useful for GFDL model output of instantaneous values. For example,
3 hourly data postprocessed to netCDF files spanning 1 year each will
actually have time values that are offset by 3 hours, such that the first
value is for 1 Jan 03:00 and the last value is 1 Jan 00:00 of the
subsequent year. This causes problems in xarray, e.g. when trying to group
by month. It is resolved by manually subtracting off those three hours,
such that the dates span from 1 Jan 00:00 to 31 Dec 21:00 as desired.
Parameters
----------
time : xarray.DataArray representing a timeseries
years, months, days, hours : int, optional
The number of years, months, days, and hours, respectively, to offset
the time array by. Positive values move the times later.
Returns
-------
pandas.DatetimeIndex
Examples
--------
Case of a length-1 input time array:
>>> times = xr.DataArray(datetime.datetime(1899, 12, 31, 21))
>>> apply_time_offset(times)
Timestamp('1900-01-01 00:00:00')
Case of input time array with length greater than one:
>>> times = xr.DataArray([datetime.datetime(1899, 12, 31, 21),
... datetime.datetime(1899, 1, 31, 21)])
>>> apply_time_offset(times) # doctest: +NORMALIZE_WHITESPACE
DatetimeIndex(['1900-01-01', '1899-02-01'], dtype='datetime64[ns]',
freq=None)
"""
return (pd.to_datetime(time.values) +
pd.DateOffset(years=years, months=months, days=days, hours=hours)) | Apply a specified offset to the given time array.
This is useful for GFDL model output of instantaneous values. For example,
3 hourly data postprocessed to netCDF files spanning 1 year each will
actually have time values that are offset by 3 hours, such that the first
value is for 1 Jan 03:00 and the last value is 1 Jan 00:00 of the
subsequent year. This causes problems in xarray, e.g. when trying to group
by month. It is resolved by manually subtracting off those three hours,
such that the dates span from 1 Jan 00:00 to 31 Dec 21:00 as desired.
Parameters
----------
time : xarray.DataArray representing a timeseries
years, months, days, hours : int, optional
The number of years, months, days, and hours, respectively, to offset
the time array by. Positive values move the times later.
Returns
-------
pandas.DatetimeIndex
Examples
--------
Case of a length-1 input time array:
>>> times = xr.DataArray(datetime.datetime(1899, 12, 31, 21))
>>> apply_time_offset(times)
Timestamp('1900-01-01 00:00:00')
Case of input time array with length greater than one:
>>> times = xr.DataArray([datetime.datetime(1899, 12, 31, 21),
... datetime.datetime(1899, 1, 31, 21)])
>>> apply_time_offset(times) # doctest: +NORMALIZE_WHITESPACE
DatetimeIndex(['1900-01-01', '1899-02-01'], dtype='datetime64[ns]',
freq=None) | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/times.py#L19-L58 |
spencerahill/aospy | aospy/utils/times.py | average_time_bounds | def average_time_bounds(ds):
"""Return the average of each set of time bounds in the Dataset.
Useful for creating a new time array to replace the Dataset's native time
array, in the case that the latter matches either the start or end bounds.
This can cause errors in grouping (akin to an off-by-one error) if the
timesteps span e.g. one full month each. Note that the Dataset's times
must not have already undergone "CF decoding", wherein they are converted
from floats using the 'units' attribute into datetime objects.
Parameters
----------
ds : xarray.Dataset
A Dataset containing a time bounds array with name matching
internal_names.TIME_BOUNDS_STR. This time bounds array must have two
dimensions, one of which's coordinates is the Dataset's time array, and
the other is length-2.
Returns
-------
xarray.DataArray
The mean of the start and end times of each timestep in the original
Dataset.
Raises
------
ValueError
If the time bounds array doesn't match the shape specified above.
"""
bounds = ds[TIME_BOUNDS_STR]
new_times = bounds.mean(dim=BOUNDS_STR, keep_attrs=True)
new_times = new_times.drop(TIME_STR).rename(TIME_STR)
new_times[TIME_STR] = new_times
return new_times | python | def average_time_bounds(ds):
"""Return the average of each set of time bounds in the Dataset.
Useful for creating a new time array to replace the Dataset's native time
array, in the case that the latter matches either the start or end bounds.
This can cause errors in grouping (akin to an off-by-one error) if the
timesteps span e.g. one full month each. Note that the Dataset's times
must not have already undergone "CF decoding", wherein they are converted
from floats using the 'units' attribute into datetime objects.
Parameters
----------
ds : xarray.Dataset
A Dataset containing a time bounds array with name matching
internal_names.TIME_BOUNDS_STR. This time bounds array must have two
dimensions, one of which's coordinates is the Dataset's time array, and
the other is length-2.
Returns
-------
xarray.DataArray
The mean of the start and end times of each timestep in the original
Dataset.
Raises
------
ValueError
If the time bounds array doesn't match the shape specified above.
"""
bounds = ds[TIME_BOUNDS_STR]
new_times = bounds.mean(dim=BOUNDS_STR, keep_attrs=True)
new_times = new_times.drop(TIME_STR).rename(TIME_STR)
new_times[TIME_STR] = new_times
return new_times | Return the average of each set of time bounds in the Dataset.
Useful for creating a new time array to replace the Dataset's native time
array, in the case that the latter matches either the start or end bounds.
This can cause errors in grouping (akin to an off-by-one error) if the
timesteps span e.g. one full month each. Note that the Dataset's times
must not have already undergone "CF decoding", wherein they are converted
from floats using the 'units' attribute into datetime objects.
Parameters
----------
ds : xarray.Dataset
A Dataset containing a time bounds array with name matching
internal_names.TIME_BOUNDS_STR. This time bounds array must have two
dimensions, one of which's coordinates is the Dataset's time array, and
the other is length-2.
Returns
-------
xarray.DataArray
The mean of the start and end times of each timestep in the original
Dataset.
Raises
------
ValueError
If the time bounds array doesn't match the shape specified above. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/times.py#L61-L95 |
spencerahill/aospy | aospy/utils/times.py | monthly_mean_at_each_ind | def monthly_mean_at_each_ind(monthly_means, sub_monthly_timeseries):
"""Copy monthly mean over each time index in that month.
Parameters
----------
monthly_means : xarray.DataArray
array of monthly means
sub_monthly_timeseries : xarray.DataArray
array of a timeseries at sub-monthly time resolution
Returns
-------
xarray.DataArray with eath monthly mean value from `monthly_means` repeated
at each time within that month from `sub_monthly_timeseries`
See Also
--------
monthly_mean_ts : Create timeseries of monthly mean values
"""
time = monthly_means[TIME_STR]
start = time.indexes[TIME_STR][0].replace(day=1, hour=0)
end = time.indexes[TIME_STR][-1]
new_indices = pd.DatetimeIndex(start=start, end=end, freq='MS')
arr_new = monthly_means.reindex(time=new_indices, method='backfill')
return arr_new.reindex_like(sub_monthly_timeseries, method='pad') | python | def monthly_mean_at_each_ind(monthly_means, sub_monthly_timeseries):
"""Copy monthly mean over each time index in that month.
Parameters
----------
monthly_means : xarray.DataArray
array of monthly means
sub_monthly_timeseries : xarray.DataArray
array of a timeseries at sub-monthly time resolution
Returns
-------
xarray.DataArray with eath monthly mean value from `monthly_means` repeated
at each time within that month from `sub_monthly_timeseries`
See Also
--------
monthly_mean_ts : Create timeseries of monthly mean values
"""
time = monthly_means[TIME_STR]
start = time.indexes[TIME_STR][0].replace(day=1, hour=0)
end = time.indexes[TIME_STR][-1]
new_indices = pd.DatetimeIndex(start=start, end=end, freq='MS')
arr_new = monthly_means.reindex(time=new_indices, method='backfill')
return arr_new.reindex_like(sub_monthly_timeseries, method='pad') | Copy monthly mean over each time index in that month.
Parameters
----------
monthly_means : xarray.DataArray
array of monthly means
sub_monthly_timeseries : xarray.DataArray
array of a timeseries at sub-monthly time resolution
Returns
-------
xarray.DataArray with eath monthly mean value from `monthly_means` repeated
at each time within that month from `sub_monthly_timeseries`
See Also
--------
monthly_mean_ts : Create timeseries of monthly mean values | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/times.py#L121-L145 |
spencerahill/aospy | aospy/utils/times.py | yearly_average | def yearly_average(arr, dt):
"""Average a sub-yearly time-series over each year.
Resulting timeseries comprises one value for each year in which the
original array had valid data. Accounts for (i.e. ignores) masked values
in original data when computing the annual averages.
Parameters
----------
arr : xarray.DataArray
The array to be averaged
dt : xarray.DataArray
Array of the duration of each timestep
Returns
-------
xarray.DataArray
Has the same shape and mask as the original ``arr``, except for the
time dimension, which is truncated to one value for each year that
``arr`` spanned
"""
assert_matching_time_coord(arr, dt)
yr_str = TIME_STR + '.year'
# Retain original data's mask.
dt = dt.where(np.isfinite(arr))
return ((arr*dt).groupby(yr_str).sum(TIME_STR) /
dt.groupby(yr_str).sum(TIME_STR)) | python | def yearly_average(arr, dt):
"""Average a sub-yearly time-series over each year.
Resulting timeseries comprises one value for each year in which the
original array had valid data. Accounts for (i.e. ignores) masked values
in original data when computing the annual averages.
Parameters
----------
arr : xarray.DataArray
The array to be averaged
dt : xarray.DataArray
Array of the duration of each timestep
Returns
-------
xarray.DataArray
Has the same shape and mask as the original ``arr``, except for the
time dimension, which is truncated to one value for each year that
``arr`` spanned
"""
assert_matching_time_coord(arr, dt)
yr_str = TIME_STR + '.year'
# Retain original data's mask.
dt = dt.where(np.isfinite(arr))
return ((arr*dt).groupby(yr_str).sum(TIME_STR) /
dt.groupby(yr_str).sum(TIME_STR)) | Average a sub-yearly time-series over each year.
Resulting timeseries comprises one value for each year in which the
original array had valid data. Accounts for (i.e. ignores) masked values
in original data when computing the annual averages.
Parameters
----------
arr : xarray.DataArray
The array to be averaged
dt : xarray.DataArray
Array of the duration of each timestep
Returns
-------
xarray.DataArray
Has the same shape and mask as the original ``arr``, except for the
time dimension, which is truncated to one value for each year that
``arr`` spanned | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/times.py#L148-L175 |
spencerahill/aospy | aospy/utils/times.py | ensure_datetime | def ensure_datetime(obj):
"""Return the object if it is a datetime-like object
Parameters
----------
obj : Object to be tested.
Returns
-------
The original object if it is a datetime-like object
Raises
------
TypeError if `obj` is not datetime-like
"""
_VALID_TYPES = (str, datetime.datetime, cftime.datetime,
np.datetime64)
if isinstance(obj, _VALID_TYPES):
return obj
raise TypeError("datetime-like object required. "
"Type given: {}".format(type(obj))) | python | def ensure_datetime(obj):
"""Return the object if it is a datetime-like object
Parameters
----------
obj : Object to be tested.
Returns
-------
The original object if it is a datetime-like object
Raises
------
TypeError if `obj` is not datetime-like
"""
_VALID_TYPES = (str, datetime.datetime, cftime.datetime,
np.datetime64)
if isinstance(obj, _VALID_TYPES):
return obj
raise TypeError("datetime-like object required. "
"Type given: {}".format(type(obj))) | Return the object if it is a datetime-like object
Parameters
----------
obj : Object to be tested.
Returns
-------
The original object if it is a datetime-like object
Raises
------
TypeError if `obj` is not datetime-like | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/times.py#L178-L198 |
spencerahill/aospy | aospy/utils/times.py | month_indices | def month_indices(months):
"""Convert string labels for months to integer indices.
Parameters
----------
months : str, int
If int, number of the desired month, where January=1, February=2,
etc. If str, must match either 'ann' or some subset of
'jfmamjjasond'. If 'ann', use all months. Otherwise, use the
specified months.
Returns
-------
np.ndarray of integers corresponding to desired month indices
Raises
------
TypeError : If `months` is not an int or str
See also
--------
_month_conditional
"""
if not isinstance(months, (int, str)):
raise TypeError("`months` must be of type int or str: "
"type(months) == {}".format(type(months)))
if isinstance(months, int):
return [months]
if months.lower() == 'ann':
return np.arange(1, 13)
first_letter = 'jfmamjjasond' * 2
# Python indexing starts at 0; month indices start at 1 for January.
count = first_letter.count(months)
if (count == 0) or (count > 2):
message = ("The user must provide a unique pattern of consecutive "
"first letters of months within '{}'. The provided "
"string '{}' does not comply."
" For individual months use integers."
"".format(first_letter, months))
raise ValueError(message)
st_ind = first_letter.find(months.lower())
return np.arange(st_ind, st_ind + len(months)) % 12 + 1 | python | def month_indices(months):
"""Convert string labels for months to integer indices.
Parameters
----------
months : str, int
If int, number of the desired month, where January=1, February=2,
etc. If str, must match either 'ann' or some subset of
'jfmamjjasond'. If 'ann', use all months. Otherwise, use the
specified months.
Returns
-------
np.ndarray of integers corresponding to desired month indices
Raises
------
TypeError : If `months` is not an int or str
See also
--------
_month_conditional
"""
if not isinstance(months, (int, str)):
raise TypeError("`months` must be of type int or str: "
"type(months) == {}".format(type(months)))
if isinstance(months, int):
return [months]
if months.lower() == 'ann':
return np.arange(1, 13)
first_letter = 'jfmamjjasond' * 2
# Python indexing starts at 0; month indices start at 1 for January.
count = first_letter.count(months)
if (count == 0) or (count > 2):
message = ("The user must provide a unique pattern of consecutive "
"first letters of months within '{}'. The provided "
"string '{}' does not comply."
" For individual months use integers."
"".format(first_letter, months))
raise ValueError(message)
st_ind = first_letter.find(months.lower())
return np.arange(st_ind, st_ind + len(months)) % 12 + 1 | Convert string labels for months to integer indices.
Parameters
----------
months : str, int
If int, number of the desired month, where January=1, February=2,
etc. If str, must match either 'ann' or some subset of
'jfmamjjasond'. If 'ann', use all months. Otherwise, use the
specified months.
Returns
-------
np.ndarray of integers corresponding to desired month indices
Raises
------
TypeError : If `months` is not an int or str
See also
--------
_month_conditional | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/times.py#L221-L262 |
spencerahill/aospy | aospy/utils/times.py | _month_conditional | def _month_conditional(time, months):
"""Create a conditional statement for selecting data in a DataArray.
Parameters
----------
time : xarray.DataArray
Array of times for which to subsample for specific months.
months : int, str, or xarray.DataArray of times
If int or str, passed to `month_indices`
Returns
-------
Array of bools specifying which months to keep
See Also
--------
month_indices
"""
if isinstance(months, (int, str)):
months_array = month_indices(months)
else:
months_array = months
cond = False
for month in months_array:
cond |= (time['{}.month'.format(TIME_STR)] == month)
return cond | python | def _month_conditional(time, months):
"""Create a conditional statement for selecting data in a DataArray.
Parameters
----------
time : xarray.DataArray
Array of times for which to subsample for specific months.
months : int, str, or xarray.DataArray of times
If int or str, passed to `month_indices`
Returns
-------
Array of bools specifying which months to keep
See Also
--------
month_indices
"""
if isinstance(months, (int, str)):
months_array = month_indices(months)
else:
months_array = months
cond = False
for month in months_array:
cond |= (time['{}.month'.format(TIME_STR)] == month)
return cond | Create a conditional statement for selecting data in a DataArray.
Parameters
----------
time : xarray.DataArray
Array of times for which to subsample for specific months.
months : int, str, or xarray.DataArray of times
If int or str, passed to `month_indices`
Returns
-------
Array of bools specifying which months to keep
See Also
--------
month_indices | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/times.py#L265-L289 |
spencerahill/aospy | aospy/utils/times.py | extract_months | def extract_months(time, months):
"""Extract times within specified months of the year.
Parameters
----------
time : xarray.DataArray
Array of times that can be represented by numpy.datetime64 objects
(i.e. the year is between 1678 and 2262).
months : Desired months of the year to include
Returns
-------
xarray.DataArray of the desired times
"""
inds = _month_conditional(time, months)
return time.sel(time=inds) | python | def extract_months(time, months):
"""Extract times within specified months of the year.
Parameters
----------
time : xarray.DataArray
Array of times that can be represented by numpy.datetime64 objects
(i.e. the year is between 1678 and 2262).
months : Desired months of the year to include
Returns
-------
xarray.DataArray of the desired times
"""
inds = _month_conditional(time, months)
return time.sel(time=inds) | Extract times within specified months of the year.
Parameters
----------
time : xarray.DataArray
Array of times that can be represented by numpy.datetime64 objects
(i.e. the year is between 1678 and 2262).
months : Desired months of the year to include
Returns
-------
xarray.DataArray of the desired times | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/times.py#L292-L307 |
spencerahill/aospy | aospy/utils/times.py | ensure_time_avg_has_cf_metadata | def ensure_time_avg_has_cf_metadata(ds):
"""Add time interval length and bounds coordinates for time avg data.
If the Dataset or DataArray contains time average data, enforce
that there are coordinates that track the lower and upper bounds of
the time intervals, and that there is a coordinate that tracks the
amount of time per time average interval.
CF conventions require that a quantity stored as time averages
over time intervals must have time and time_bounds coordinates [1]_.
aospy further requires AVERAGE_DT for time average data, for accurate
time-weighted averages, which can be inferred from the CF-required
time_bounds coordinate if needed. This step should be done
prior to decoding CF metadata with xarray to ensure proper
computed timedeltas for different calendar types.
.. [1] http://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#_data_representative_of_cells
Parameters
----------
ds : Dataset or DataArray
Input data
Returns
-------
Dataset or DataArray
Time average metadata attributes added if needed.
""" # noqa: E501
if TIME_WEIGHTS_STR not in ds:
time_weights = ds[TIME_BOUNDS_STR].diff(BOUNDS_STR)
time_weights = time_weights.rename(TIME_WEIGHTS_STR).squeeze()
if BOUNDS_STR in time_weights.coords:
time_weights = time_weights.drop(BOUNDS_STR)
ds[TIME_WEIGHTS_STR] = time_weights
raw_start_date = ds[TIME_BOUNDS_STR].isel(**{TIME_STR: 0, BOUNDS_STR: 0})
ds[RAW_START_DATE_STR] = raw_start_date.reset_coords(drop=True)
raw_end_date = ds[TIME_BOUNDS_STR].isel(**{TIME_STR: -1, BOUNDS_STR: 1})
ds[RAW_END_DATE_STR] = raw_end_date.reset_coords(drop=True)
for coord in [TIME_BOUNDS_STR, RAW_START_DATE_STR, RAW_END_DATE_STR]:
ds[coord].attrs['units'] = ds[TIME_STR].attrs['units']
if 'calendar' in ds[TIME_STR].attrs:
ds[coord].attrs['calendar'] = ds[TIME_STR].attrs['calendar']
unit_interval = ds[TIME_STR].attrs['units'].split('since')[0].strip()
ds[TIME_WEIGHTS_STR].attrs['units'] = unit_interval
return ds | python | def ensure_time_avg_has_cf_metadata(ds):
"""Add time interval length and bounds coordinates for time avg data.
If the Dataset or DataArray contains time average data, enforce
that there are coordinates that track the lower and upper bounds of
the time intervals, and that there is a coordinate that tracks the
amount of time per time average interval.
CF conventions require that a quantity stored as time averages
over time intervals must have time and time_bounds coordinates [1]_.
aospy further requires AVERAGE_DT for time average data, for accurate
time-weighted averages, which can be inferred from the CF-required
time_bounds coordinate if needed. This step should be done
prior to decoding CF metadata with xarray to ensure proper
computed timedeltas for different calendar types.
.. [1] http://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#_data_representative_of_cells
Parameters
----------
ds : Dataset or DataArray
Input data
Returns
-------
Dataset or DataArray
Time average metadata attributes added if needed.
""" # noqa: E501
if TIME_WEIGHTS_STR not in ds:
time_weights = ds[TIME_BOUNDS_STR].diff(BOUNDS_STR)
time_weights = time_weights.rename(TIME_WEIGHTS_STR).squeeze()
if BOUNDS_STR in time_weights.coords:
time_weights = time_weights.drop(BOUNDS_STR)
ds[TIME_WEIGHTS_STR] = time_weights
raw_start_date = ds[TIME_BOUNDS_STR].isel(**{TIME_STR: 0, BOUNDS_STR: 0})
ds[RAW_START_DATE_STR] = raw_start_date.reset_coords(drop=True)
raw_end_date = ds[TIME_BOUNDS_STR].isel(**{TIME_STR: -1, BOUNDS_STR: 1})
ds[RAW_END_DATE_STR] = raw_end_date.reset_coords(drop=True)
for coord in [TIME_BOUNDS_STR, RAW_START_DATE_STR, RAW_END_DATE_STR]:
ds[coord].attrs['units'] = ds[TIME_STR].attrs['units']
if 'calendar' in ds[TIME_STR].attrs:
ds[coord].attrs['calendar'] = ds[TIME_STR].attrs['calendar']
unit_interval = ds[TIME_STR].attrs['units'].split('since')[0].strip()
ds[TIME_WEIGHTS_STR].attrs['units'] = unit_interval
return ds | Add time interval length and bounds coordinates for time avg data.
If the Dataset or DataArray contains time average data, enforce
that there are coordinates that track the lower and upper bounds of
the time intervals, and that there is a coordinate that tracks the
amount of time per time average interval.
CF conventions require that a quantity stored as time averages
over time intervals must have time and time_bounds coordinates [1]_.
aospy further requires AVERAGE_DT for time average data, for accurate
time-weighted averages, which can be inferred from the CF-required
time_bounds coordinate if needed. This step should be done
prior to decoding CF metadata with xarray to ensure proper
computed timedeltas for different calendar types.
.. [1] http://cfconventions.org/cf-conventions/v1.6.0/cf-conventions.html#_data_representative_of_cells
Parameters
----------
ds : Dataset or DataArray
Input data
Returns
-------
Dataset or DataArray
Time average metadata attributes added if needed. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/times.py#L310-L357 |
spencerahill/aospy | aospy/utils/times.py | add_uniform_time_weights | def add_uniform_time_weights(ds):
"""Append uniform time weights to a Dataset.
All DataArrays with a time coordinate require a time weights coordinate.
For Datasets read in without a time bounds coordinate or explicit
time weights built in, aospy adds uniform time weights at each point
in the time coordinate.
Parameters
----------
ds : Dataset
Input data
Returns
-------
Dataset
"""
time = ds[TIME_STR]
unit_interval = time.attrs['units'].split('since')[0].strip()
time_weights = xr.ones_like(time)
time_weights.attrs['units'] = unit_interval
del time_weights.attrs['calendar']
ds[TIME_WEIGHTS_STR] = time_weights
return ds | python | def add_uniform_time_weights(ds):
"""Append uniform time weights to a Dataset.
All DataArrays with a time coordinate require a time weights coordinate.
For Datasets read in without a time bounds coordinate or explicit
time weights built in, aospy adds uniform time weights at each point
in the time coordinate.
Parameters
----------
ds : Dataset
Input data
Returns
-------
Dataset
"""
time = ds[TIME_STR]
unit_interval = time.attrs['units'].split('since')[0].strip()
time_weights = xr.ones_like(time)
time_weights.attrs['units'] = unit_interval
del time_weights.attrs['calendar']
ds[TIME_WEIGHTS_STR] = time_weights
return ds | Append uniform time weights to a Dataset.
All DataArrays with a time coordinate require a time weights coordinate.
For Datasets read in without a time bounds coordinate or explicit
time weights built in, aospy adds uniform time weights at each point
in the time coordinate.
Parameters
----------
ds : Dataset
Input data
Returns
-------
Dataset | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/times.py#L360-L383 |
spencerahill/aospy | aospy/utils/times.py | _assert_has_data_for_time | def _assert_has_data_for_time(da, start_date, end_date):
"""Check to make sure data is in Dataset for the given time range.
Parameters
----------
da : DataArray
DataArray with a time variable
start_date : datetime-like object or str
start date
end_date : datetime-like object or str
end date
Raises
------
AssertionError
If the time range is not within the time range of the DataArray
"""
if isinstance(start_date, str) and isinstance(end_date, str):
logging.warning(
'When using strings to specify start and end dates, the check '
'to determine if data exists for the full extent of the desired '
'interval is not implemented. Therefore it is possible that '
'you are doing a calculation for a lesser interval than you '
'specified. If you would like this check to occur, use explicit '
'datetime-like objects for bounds instead.')
return
if RAW_START_DATE_STR in da.coords:
with warnings.catch_warnings(record=True):
da_start = da[RAW_START_DATE_STR].values
da_end = da[RAW_END_DATE_STR].values
else:
times = da.time.isel(**{TIME_STR: [0, -1]})
da_start, da_end = times.values
message = ('Data does not exist for requested time range: {0} to {1};'
' found data from time range: {2} to {3}.')
# Add tolerance of one second, due to precision of cftime.datetimes
tol = datetime.timedelta(seconds=1)
if isinstance(da_start, np.datetime64):
tol = np.timedelta64(tol, 'ns')
range_exists = ((da_start - tol) <= start_date and
(da_end + tol) >= end_date)
assert (range_exists), message.format(start_date, end_date,
da_start, da_end) | python | def _assert_has_data_for_time(da, start_date, end_date):
"""Check to make sure data is in Dataset for the given time range.
Parameters
----------
da : DataArray
DataArray with a time variable
start_date : datetime-like object or str
start date
end_date : datetime-like object or str
end date
Raises
------
AssertionError
If the time range is not within the time range of the DataArray
"""
if isinstance(start_date, str) and isinstance(end_date, str):
logging.warning(
'When using strings to specify start and end dates, the check '
'to determine if data exists for the full extent of the desired '
'interval is not implemented. Therefore it is possible that '
'you are doing a calculation for a lesser interval than you '
'specified. If you would like this check to occur, use explicit '
'datetime-like objects for bounds instead.')
return
if RAW_START_DATE_STR in da.coords:
with warnings.catch_warnings(record=True):
da_start = da[RAW_START_DATE_STR].values
da_end = da[RAW_END_DATE_STR].values
else:
times = da.time.isel(**{TIME_STR: [0, -1]})
da_start, da_end = times.values
message = ('Data does not exist for requested time range: {0} to {1};'
' found data from time range: {2} to {3}.')
# Add tolerance of one second, due to precision of cftime.datetimes
tol = datetime.timedelta(seconds=1)
if isinstance(da_start, np.datetime64):
tol = np.timedelta64(tol, 'ns')
range_exists = ((da_start - tol) <= start_date and
(da_end + tol) >= end_date)
assert (range_exists), message.format(start_date, end_date,
da_start, da_end) | Check to make sure data is in Dataset for the given time range.
Parameters
----------
da : DataArray
DataArray with a time variable
start_date : datetime-like object or str
start date
end_date : datetime-like object or str
end date
Raises
------
AssertionError
If the time range is not within the time range of the DataArray | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/times.py#L386-L431 |
spencerahill/aospy | aospy/utils/times.py | sel_time | def sel_time(da, start_date, end_date):
"""Subset a DataArray or Dataset for a given date range.
Ensures that data are present for full extent of requested range.
Appends start and end date of the subset to the DataArray.
Parameters
----------
da : DataArray or Dataset
data to subset
start_date : np.datetime64
start of date interval
end_date : np.datetime64
end of date interval
Returns
----------
da : DataArray or Dataset
subsetted data
Raises
------
AssertionError
if data for requested range do not exist for part or all of
requested range
"""
_assert_has_data_for_time(da, start_date, end_date)
da[SUBSET_START_DATE_STR] = xr.DataArray(start_date)
da[SUBSET_END_DATE_STR] = xr.DataArray(end_date)
return da.sel(**{TIME_STR: slice(start_date, end_date)}) | python | def sel_time(da, start_date, end_date):
"""Subset a DataArray or Dataset for a given date range.
Ensures that data are present for full extent of requested range.
Appends start and end date of the subset to the DataArray.
Parameters
----------
da : DataArray or Dataset
data to subset
start_date : np.datetime64
start of date interval
end_date : np.datetime64
end of date interval
Returns
----------
da : DataArray or Dataset
subsetted data
Raises
------
AssertionError
if data for requested range do not exist for part or all of
requested range
"""
_assert_has_data_for_time(da, start_date, end_date)
da[SUBSET_START_DATE_STR] = xr.DataArray(start_date)
da[SUBSET_END_DATE_STR] = xr.DataArray(end_date)
return da.sel(**{TIME_STR: slice(start_date, end_date)}) | Subset a DataArray or Dataset for a given date range.
Ensures that data are present for full extent of requested range.
Appends start and end date of the subset to the DataArray.
Parameters
----------
da : DataArray or Dataset
data to subset
start_date : np.datetime64
start of date interval
end_date : np.datetime64
end of date interval
Returns
----------
da : DataArray or Dataset
subsetted data
Raises
------
AssertionError
if data for requested range do not exist for part or all of
requested range | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/times.py#L434-L463 |
spencerahill/aospy | aospy/utils/times.py | assert_matching_time_coord | def assert_matching_time_coord(arr1, arr2):
"""Check to see if two DataArrays have the same time coordinate.
Parameters
----------
arr1 : DataArray or Dataset
First DataArray or Dataset
arr2 : DataArray or Dataset
Second DataArray or Dataset
Raises
------
ValueError
If the time coordinates are not identical between the two Datasets
"""
message = ('Time weights not indexed by the same time coordinate as'
' computed data. This will lead to an improperly computed'
' time weighted average. Exiting.\n'
'arr1: {}\narr2: {}')
if not (arr1[TIME_STR].identical(arr2[TIME_STR])):
raise ValueError(message.format(arr1[TIME_STR], arr2[TIME_STR])) | python | def assert_matching_time_coord(arr1, arr2):
"""Check to see if two DataArrays have the same time coordinate.
Parameters
----------
arr1 : DataArray or Dataset
First DataArray or Dataset
arr2 : DataArray or Dataset
Second DataArray or Dataset
Raises
------
ValueError
If the time coordinates are not identical between the two Datasets
"""
message = ('Time weights not indexed by the same time coordinate as'
' computed data. This will lead to an improperly computed'
' time weighted average. Exiting.\n'
'arr1: {}\narr2: {}')
if not (arr1[TIME_STR].identical(arr2[TIME_STR])):
raise ValueError(message.format(arr1[TIME_STR], arr2[TIME_STR])) | Check to see if two DataArrays have the same time coordinate.
Parameters
----------
arr1 : DataArray or Dataset
First DataArray or Dataset
arr2 : DataArray or Dataset
Second DataArray or Dataset
Raises
------
ValueError
If the time coordinates are not identical between the two Datasets | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/times.py#L466-L486 |
spencerahill/aospy | aospy/utils/times.py | ensure_time_as_index | def ensure_time_as_index(ds):
"""Ensures that time is an indexed coordinate on relevant quantites.
Sometimes when the data we load from disk has only one timestep, the
indexing of time-defined quantities in the resulting xarray.Dataset gets
messed up, in that the time bounds array and data variables don't get
indexed by time, even though they should. Therefore, we need this helper
function to (possibly) correct this.
Note that this must be applied before CF-conventions are decoded; otherwise
it casts ``np.datetime64[ns]`` as ``int`` values.
Parameters
----------
ds : Dataset
Dataset with a time coordinate
Returns
-------
Dataset
"""
time_indexed_coords = {TIME_WEIGHTS_STR, TIME_BOUNDS_STR}
time_indexed_vars = set(ds.data_vars).union(time_indexed_coords)
time_indexed_vars = time_indexed_vars.intersection(ds.variables)
variables_to_replace = {}
for name in time_indexed_vars:
if TIME_STR not in ds[name].indexes:
da = ds[name]
if TIME_STR not in da.dims:
da = ds[name].expand_dims(TIME_STR)
da = da.assign_coords(**{TIME_STR: ds[TIME_STR]})
variables_to_replace[name] = da
return ds.assign(**variables_to_replace) | python | def ensure_time_as_index(ds):
"""Ensures that time is an indexed coordinate on relevant quantites.
Sometimes when the data we load from disk has only one timestep, the
indexing of time-defined quantities in the resulting xarray.Dataset gets
messed up, in that the time bounds array and data variables don't get
indexed by time, even though they should. Therefore, we need this helper
function to (possibly) correct this.
Note that this must be applied before CF-conventions are decoded; otherwise
it casts ``np.datetime64[ns]`` as ``int`` values.
Parameters
----------
ds : Dataset
Dataset with a time coordinate
Returns
-------
Dataset
"""
time_indexed_coords = {TIME_WEIGHTS_STR, TIME_BOUNDS_STR}
time_indexed_vars = set(ds.data_vars).union(time_indexed_coords)
time_indexed_vars = time_indexed_vars.intersection(ds.variables)
variables_to_replace = {}
for name in time_indexed_vars:
if TIME_STR not in ds[name].indexes:
da = ds[name]
if TIME_STR not in da.dims:
da = ds[name].expand_dims(TIME_STR)
da = da.assign_coords(**{TIME_STR: ds[TIME_STR]})
variables_to_replace[name] = da
return ds.assign(**variables_to_replace) | Ensures that time is an indexed coordinate on relevant quantites.
Sometimes when the data we load from disk has only one timestep, the
indexing of time-defined quantities in the resulting xarray.Dataset gets
messed up, in that the time bounds array and data variables don't get
indexed by time, even though they should. Therefore, we need this helper
function to (possibly) correct this.
Note that this must be applied before CF-conventions are decoded; otherwise
it casts ``np.datetime64[ns]`` as ``int`` values.
Parameters
----------
ds : Dataset
Dataset with a time coordinate
Returns
-------
Dataset | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/times.py#L489-L522 |
spencerahill/aospy | aospy/utils/times.py | infer_year | def infer_year(date):
"""Given a datetime-like object or string infer the year.
Parameters
----------
date : datetime-like object or str
Input date
Returns
-------
int
Examples
--------
>>> infer_year('2000')
2000
>>> infer_year('2000-01')
2000
>>> infer_year('2000-01-31')
2000
>>> infer_year(datetime.datetime(2000, 1, 1))
2000
>>> infer_year(np.datetime64('2000-01-01'))
2000
>>> infer_year(DatetimeNoLeap(2000, 1, 1))
2000
>>>
"""
if isinstance(date, str):
# Look for a string that begins with four numbers; the first four
# numbers found are the year.
pattern = r'(?P<year>\d{4})'
result = re.match(pattern, date)
if result:
return int(result.groupdict()['year'])
else:
raise ValueError('Invalid date string provided: {}'.format(date))
elif isinstance(date, np.datetime64):
return date.item().year
else:
return date.year | python | def infer_year(date):
"""Given a datetime-like object or string infer the year.
Parameters
----------
date : datetime-like object or str
Input date
Returns
-------
int
Examples
--------
>>> infer_year('2000')
2000
>>> infer_year('2000-01')
2000
>>> infer_year('2000-01-31')
2000
>>> infer_year(datetime.datetime(2000, 1, 1))
2000
>>> infer_year(np.datetime64('2000-01-01'))
2000
>>> infer_year(DatetimeNoLeap(2000, 1, 1))
2000
>>>
"""
if isinstance(date, str):
# Look for a string that begins with four numbers; the first four
# numbers found are the year.
pattern = r'(?P<year>\d{4})'
result = re.match(pattern, date)
if result:
return int(result.groupdict()['year'])
else:
raise ValueError('Invalid date string provided: {}'.format(date))
elif isinstance(date, np.datetime64):
return date.item().year
else:
return date.year | Given a datetime-like object or string infer the year.
Parameters
----------
date : datetime-like object or str
Input date
Returns
-------
int
Examples
--------
>>> infer_year('2000')
2000
>>> infer_year('2000-01')
2000
>>> infer_year('2000-01-31')
2000
>>> infer_year(datetime.datetime(2000, 1, 1))
2000
>>> infer_year(np.datetime64('2000-01-01'))
2000
>>> infer_year(DatetimeNoLeap(2000, 1, 1))
2000
>>> | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/times.py#L525-L565 |
spencerahill/aospy | aospy/utils/times.py | maybe_convert_to_index_date_type | def maybe_convert_to_index_date_type(index, date):
"""Convert a datetime-like object to the index's date type.
Datetime indexing in xarray can be done using either a pandas
DatetimeIndex or a CFTimeIndex. Both support partial-datetime string
indexing regardless of the calendar type of the underlying data;
therefore if a string is passed as a date, we return it unchanged. If a
datetime-like object is provided, it will be converted to the underlying
date type of the index. For a DatetimeIndex that is np.datetime64; for a
CFTimeIndex that is an object of type cftime.datetime specific to the
calendar used.
Parameters
----------
index : pd.Index
Input time index
date : datetime-like object or str
Input datetime
Returns
-------
date of the type appropriate for the time index of the Dataset
"""
if isinstance(date, str):
return date
if isinstance(index, pd.DatetimeIndex):
if isinstance(date, np.datetime64):
return date
else:
return np.datetime64(str(date))
else:
date_type = index.date_type
if isinstance(date, date_type):
return date
else:
if isinstance(date, np.datetime64):
# Convert to datetime.date or datetime.datetime object
date = date.item()
if isinstance(date, datetime.date):
# Convert to a datetime.datetime object
date = datetime.datetime.combine(
date, datetime.datetime.min.time())
return date_type(date.year, date.month, date.day, date.hour,
date.minute, date.second, date.microsecond) | python | def maybe_convert_to_index_date_type(index, date):
"""Convert a datetime-like object to the index's date type.
Datetime indexing in xarray can be done using either a pandas
DatetimeIndex or a CFTimeIndex. Both support partial-datetime string
indexing regardless of the calendar type of the underlying data;
therefore if a string is passed as a date, we return it unchanged. If a
datetime-like object is provided, it will be converted to the underlying
date type of the index. For a DatetimeIndex that is np.datetime64; for a
CFTimeIndex that is an object of type cftime.datetime specific to the
calendar used.
Parameters
----------
index : pd.Index
Input time index
date : datetime-like object or str
Input datetime
Returns
-------
date of the type appropriate for the time index of the Dataset
"""
if isinstance(date, str):
return date
if isinstance(index, pd.DatetimeIndex):
if isinstance(date, np.datetime64):
return date
else:
return np.datetime64(str(date))
else:
date_type = index.date_type
if isinstance(date, date_type):
return date
else:
if isinstance(date, np.datetime64):
# Convert to datetime.date or datetime.datetime object
date = date.item()
if isinstance(date, datetime.date):
# Convert to a datetime.datetime object
date = datetime.datetime.combine(
date, datetime.datetime.min.time())
return date_type(date.year, date.month, date.day, date.hour,
date.minute, date.second, date.microsecond) | Convert a datetime-like object to the index's date type.
Datetime indexing in xarray can be done using either a pandas
DatetimeIndex or a CFTimeIndex. Both support partial-datetime string
indexing regardless of the calendar type of the underlying data;
therefore if a string is passed as a date, we return it unchanged. If a
datetime-like object is provided, it will be converted to the underlying
date type of the index. For a DatetimeIndex that is np.datetime64; for a
CFTimeIndex that is an object of type cftime.datetime specific to the
calendar used.
Parameters
----------
index : pd.Index
Input time index
date : datetime-like object or str
Input datetime
Returns
-------
date of the type appropriate for the time index of the Dataset | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/times.py#L568-L614 |
spencerahill/aospy | aospy/region.py | Region._make_mask | def _make_mask(self, data, lon_str=LON_STR, lat_str=LAT_STR):
"""Construct the mask that defines a region on a given data's grid."""
mask = False
for west, east, south, north in self.mask_bounds:
if west < east:
mask_lon = (data[lon_str] > west) & (data[lon_str] < east)
else:
mask_lon = (data[lon_str] < west) | (data[lon_str] > east)
mask_lat = (data[lat_str] > south) & (data[lat_str] < north)
mask |= mask_lon & mask_lat
return mask | python | def _make_mask(self, data, lon_str=LON_STR, lat_str=LAT_STR):
"""Construct the mask that defines a region on a given data's grid."""
mask = False
for west, east, south, north in self.mask_bounds:
if west < east:
mask_lon = (data[lon_str] > west) & (data[lon_str] < east)
else:
mask_lon = (data[lon_str] < west) | (data[lon_str] > east)
mask_lat = (data[lat_str] > south) & (data[lat_str] < north)
mask |= mask_lon & mask_lat
return mask | Construct the mask that defines a region on a given data's grid. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/region.py#L220-L230 |
spencerahill/aospy | aospy/region.py | Region.mask_var | def mask_var(self, data, lon_cyclic=True, lon_str=LON_STR,
lat_str=LAT_STR):
"""Mask the given data outside this region.
Parameters
----------
data : xarray.DataArray
The array to be regionally masked.
lon_cyclic : bool, optional (default True)
Whether or not the longitudes of ``data`` span the whole globe,
meaning that they should be wrapped around as necessary to cover
the Region's full width.
lon_str, lat_str : str, optional
The names of the longitude and latitude dimensions, respectively,
in the data to be masked. Defaults are
``aospy.internal_names.LON_STR`` and
``aospy.internal_names.LON_STR``, respectively.
Returns
-------
xarray.DataArray
The original array with points outside of the region masked.
"""
# TODO: is this still necessary?
if not lon_cyclic:
if self.west_bound > self.east_bound:
raise ValueError("Longitudes of data to be masked are "
"specified as non-cyclic, but Region's "
"definition requires wraparound longitudes.")
masked = data.where(self._make_mask(data, lon_str=lon_str,
lat_str=lat_str))
return masked | python | def mask_var(self, data, lon_cyclic=True, lon_str=LON_STR,
lat_str=LAT_STR):
"""Mask the given data outside this region.
Parameters
----------
data : xarray.DataArray
The array to be regionally masked.
lon_cyclic : bool, optional (default True)
Whether or not the longitudes of ``data`` span the whole globe,
meaning that they should be wrapped around as necessary to cover
the Region's full width.
lon_str, lat_str : str, optional
The names of the longitude and latitude dimensions, respectively,
in the data to be masked. Defaults are
``aospy.internal_names.LON_STR`` and
``aospy.internal_names.LON_STR``, respectively.
Returns
-------
xarray.DataArray
The original array with points outside of the region masked.
"""
# TODO: is this still necessary?
if not lon_cyclic:
if self.west_bound > self.east_bound:
raise ValueError("Longitudes of data to be masked are "
"specified as non-cyclic, but Region's "
"definition requires wraparound longitudes.")
masked = data.where(self._make_mask(data, lon_str=lon_str,
lat_str=lat_str))
return masked | Mask the given data outside this region.
Parameters
----------
data : xarray.DataArray
The array to be regionally masked.
lon_cyclic : bool, optional (default True)
Whether or not the longitudes of ``data`` span the whole globe,
meaning that they should be wrapped around as necessary to cover
the Region's full width.
lon_str, lat_str : str, optional
The names of the longitude and latitude dimensions, respectively,
in the data to be masked. Defaults are
``aospy.internal_names.LON_STR`` and
``aospy.internal_names.LON_STR``, respectively.
Returns
-------
xarray.DataArray
The original array with points outside of the region masked. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/region.py#L232-L264 |
spencerahill/aospy | aospy/region.py | Region.ts | def ts(self, data, lon_cyclic=True, lon_str=LON_STR, lat_str=LAT_STR,
land_mask_str=LAND_MASK_STR, sfc_area_str=SFC_AREA_STR):
"""Create yearly time-series of region-averaged data.
Parameters
----------
data : xarray.DataArray
The array to create the regional timeseries of
lon_cyclic : { None, True, False }, optional (default True)
Whether or not the longitudes of ``data`` span the whole globe,
meaning that they should be wrapped around as necessary to cover
the Region's full width.
lat_str, lon_str, land_mask_str, sfc_area_str : str, optional
The name of the latitude, longitude, land mask, and surface area
coordinates, respectively, in ``data``. Defaults are the
corresponding values in ``aospy.internal_names``.
Returns
-------
xarray.DataArray
The timeseries of values averaged within the region and within each
year, one value per year.
"""
data_masked = self.mask_var(data, lon_cyclic=lon_cyclic,
lon_str=lon_str, lat_str=lat_str)
sfc_area = data[sfc_area_str]
sfc_area_masked = self.mask_var(sfc_area, lon_cyclic=lon_cyclic,
lon_str=lon_str, lat_str=lat_str)
land_mask = _get_land_mask(data, self.do_land_mask,
land_mask_str=land_mask_str)
weights = sfc_area_masked * land_mask
# Mask weights where data values are initially invalid in addition
# to applying the region mask.
weights = weights.where(np.isfinite(data))
weights_reg_sum = weights.sum(lon_str).sum(lat_str)
data_reg_sum = (data_masked * sfc_area_masked *
land_mask).sum(lat_str).sum(lon_str)
return data_reg_sum / weights_reg_sum | python | def ts(self, data, lon_cyclic=True, lon_str=LON_STR, lat_str=LAT_STR,
land_mask_str=LAND_MASK_STR, sfc_area_str=SFC_AREA_STR):
"""Create yearly time-series of region-averaged data.
Parameters
----------
data : xarray.DataArray
The array to create the regional timeseries of
lon_cyclic : { None, True, False }, optional (default True)
Whether or not the longitudes of ``data`` span the whole globe,
meaning that they should be wrapped around as necessary to cover
the Region's full width.
lat_str, lon_str, land_mask_str, sfc_area_str : str, optional
The name of the latitude, longitude, land mask, and surface area
coordinates, respectively, in ``data``. Defaults are the
corresponding values in ``aospy.internal_names``.
Returns
-------
xarray.DataArray
The timeseries of values averaged within the region and within each
year, one value per year.
"""
data_masked = self.mask_var(data, lon_cyclic=lon_cyclic,
lon_str=lon_str, lat_str=lat_str)
sfc_area = data[sfc_area_str]
sfc_area_masked = self.mask_var(sfc_area, lon_cyclic=lon_cyclic,
lon_str=lon_str, lat_str=lat_str)
land_mask = _get_land_mask(data, self.do_land_mask,
land_mask_str=land_mask_str)
weights = sfc_area_masked * land_mask
# Mask weights where data values are initially invalid in addition
# to applying the region mask.
weights = weights.where(np.isfinite(data))
weights_reg_sum = weights.sum(lon_str).sum(lat_str)
data_reg_sum = (data_masked * sfc_area_masked *
land_mask).sum(lat_str).sum(lon_str)
return data_reg_sum / weights_reg_sum | Create yearly time-series of region-averaged data.
Parameters
----------
data : xarray.DataArray
The array to create the regional timeseries of
lon_cyclic : { None, True, False }, optional (default True)
Whether or not the longitudes of ``data`` span the whole globe,
meaning that they should be wrapped around as necessary to cover
the Region's full width.
lat_str, lon_str, land_mask_str, sfc_area_str : str, optional
The name of the latitude, longitude, land mask, and surface area
coordinates, respectively, in ``data``. Defaults are the
corresponding values in ``aospy.internal_names``.
Returns
-------
xarray.DataArray
The timeseries of values averaged within the region and within each
year, one value per year. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/region.py#L266-L304 |
spencerahill/aospy | aospy/region.py | Region.av | def av(self, data, lon_str=LON_STR, lat_str=LAT_STR,
land_mask_str=LAND_MASK_STR, sfc_area_str=SFC_AREA_STR):
"""Time-average of region-averaged data.
Parameters
----------
data : xarray.DataArray
The array to compute the regional time-average of
lat_str, lon_str, land_mask_str, sfc_area_str : str, optional
The name of the latitude, longitude, land mask, and surface area
coordinates, respectively, in ``data``. Defaults are the
corresponding values in ``aospy.internal_names``.
Returns
-------
xarray.DataArray
The region-averaged and time-averaged data.
"""
ts = self.ts(data, lon_str=lon_str, lat_str=lat_str,
land_mask_str=land_mask_str, sfc_area_str=sfc_area_str)
if YEAR_STR not in ts.coords:
return ts
else:
return ts.mean(YEAR_STR) | python | def av(self, data, lon_str=LON_STR, lat_str=LAT_STR,
land_mask_str=LAND_MASK_STR, sfc_area_str=SFC_AREA_STR):
"""Time-average of region-averaged data.
Parameters
----------
data : xarray.DataArray
The array to compute the regional time-average of
lat_str, lon_str, land_mask_str, sfc_area_str : str, optional
The name of the latitude, longitude, land mask, and surface area
coordinates, respectively, in ``data``. Defaults are the
corresponding values in ``aospy.internal_names``.
Returns
-------
xarray.DataArray
The region-averaged and time-averaged data.
"""
ts = self.ts(data, lon_str=lon_str, lat_str=lat_str,
land_mask_str=land_mask_str, sfc_area_str=sfc_area_str)
if YEAR_STR not in ts.coords:
return ts
else:
return ts.mean(YEAR_STR) | Time-average of region-averaged data.
Parameters
----------
data : xarray.DataArray
The array to compute the regional time-average of
lat_str, lon_str, land_mask_str, sfc_area_str : str, optional
The name of the latitude, longitude, land mask, and surface area
coordinates, respectively, in ``data``. Defaults are the
corresponding values in ``aospy.internal_names``.
Returns
-------
xarray.DataArray
The region-averaged and time-averaged data. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/region.py#L306-L330 |
spencerahill/aospy | aospy/model.py | _rename_coords | def _rename_coords(ds, attrs):
"""Rename coordinates to aospy's internal names."""
for name_int, names_ext in attrs.items():
# Check if coord is in dataset already.
ds_coord_name = set(names_ext).intersection(set(ds.coords))
if ds_coord_name:
# Rename to the aospy internal name.
try:
ds = ds.rename({list(ds_coord_name)[0]: name_int})
logging.debug("Rename coord from `{0}` to `{1}` for "
"Dataset `{2}`".format(ds_coord_name,
name_int, ds))
# xarray throws a ValueError if the name already exists
except ValueError:
ds = ds
return ds | python | def _rename_coords(ds, attrs):
"""Rename coordinates to aospy's internal names."""
for name_int, names_ext in attrs.items():
# Check if coord is in dataset already.
ds_coord_name = set(names_ext).intersection(set(ds.coords))
if ds_coord_name:
# Rename to the aospy internal name.
try:
ds = ds.rename({list(ds_coord_name)[0]: name_int})
logging.debug("Rename coord from `{0}` to `{1}` for "
"Dataset `{2}`".format(ds_coord_name,
name_int, ds))
# xarray throws a ValueError if the name already exists
except ValueError:
ds = ds
return ds | Rename coordinates to aospy's internal names. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/model.py#L21-L36 |
spencerahill/aospy | aospy/model.py | _bounds_from_array | def _bounds_from_array(arr, dim_name, bounds_name):
"""Get the bounds of an array given its center values.
E.g. if lat-lon grid center lat/lon values are known, but not the
bounds of each grid box. The algorithm assumes that the bounds
are simply halfway between each pair of center values.
"""
# TODO: don't assume needed dimension is in axis=0
# TODO: refactor to get rid of repetitive code
spacing = arr.diff(dim_name).values
lower = xr.DataArray(np.empty_like(arr), dims=arr.dims,
coords=arr.coords)
lower.values[:-1] = arr.values[:-1] - 0.5*spacing
lower.values[-1] = arr.values[-1] - 0.5*spacing[-1]
upper = xr.DataArray(np.empty_like(arr), dims=arr.dims,
coords=arr.coords)
upper.values[:-1] = arr.values[:-1] + 0.5*spacing
upper.values[-1] = arr.values[-1] + 0.5*spacing[-1]
bounds = xr.concat([lower, upper], dim='bounds')
return bounds.T | python | def _bounds_from_array(arr, dim_name, bounds_name):
"""Get the bounds of an array given its center values.
E.g. if lat-lon grid center lat/lon values are known, but not the
bounds of each grid box. The algorithm assumes that the bounds
are simply halfway between each pair of center values.
"""
# TODO: don't assume needed dimension is in axis=0
# TODO: refactor to get rid of repetitive code
spacing = arr.diff(dim_name).values
lower = xr.DataArray(np.empty_like(arr), dims=arr.dims,
coords=arr.coords)
lower.values[:-1] = arr.values[:-1] - 0.5*spacing
lower.values[-1] = arr.values[-1] - 0.5*spacing[-1]
upper = xr.DataArray(np.empty_like(arr), dims=arr.dims,
coords=arr.coords)
upper.values[:-1] = arr.values[:-1] + 0.5*spacing
upper.values[-1] = arr.values[-1] + 0.5*spacing[-1]
bounds = xr.concat([lower, upper], dim='bounds')
return bounds.T | Get the bounds of an array given its center values.
E.g. if lat-lon grid center lat/lon values are known, but not the
bounds of each grid box. The algorithm assumes that the bounds
are simply halfway between each pair of center values. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/model.py#L39-L58 |
spencerahill/aospy | aospy/model.py | _diff_bounds | def _diff_bounds(bounds, coord):
"""Get grid spacing by subtracting upper and lower bounds."""
try:
return bounds[:, 1] - bounds[:, 0]
except IndexError:
diff = np.diff(bounds, axis=0)
return xr.DataArray(diff, dims=coord.dims, coords=coord.coords) | python | def _diff_bounds(bounds, coord):
"""Get grid spacing by subtracting upper and lower bounds."""
try:
return bounds[:, 1] - bounds[:, 0]
except IndexError:
diff = np.diff(bounds, axis=0)
return xr.DataArray(diff, dims=coord.dims, coords=coord.coords) | Get grid spacing by subtracting upper and lower bounds. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/model.py#L61-L67 |
spencerahill/aospy | aospy/model.py | _grid_sfc_area | def _grid_sfc_area(lon, lat, lon_bounds=None, lat_bounds=None):
"""Calculate surface area of each grid cell in a lon-lat grid."""
# Compute the bounds if not given.
if lon_bounds is None:
lon_bounds = _bounds_from_array(
lon, internal_names.LON_STR, internal_names.LON_BOUNDS_STR)
if lat_bounds is None:
lat_bounds = _bounds_from_array(
lat, internal_names.LAT_STR, internal_names.LAT_BOUNDS_STR)
# Compute the surface area.
dlon = _diff_bounds(utils.vertcoord.to_radians(lon_bounds, is_delta=True),
lon)
sinlat_bounds = np.sin(utils.vertcoord.to_radians(lat_bounds,
is_delta=True))
dsinlat = np.abs(_diff_bounds(sinlat_bounds, lat))
sfc_area = dlon*dsinlat*(RADIUS_EARTH**2)
# Rename the coordinates such that they match the actual lat / lon.
try:
sfc_area = sfc_area.rename(
{internal_names.LAT_BOUNDS_STR: internal_names.LAT_STR,
internal_names.LON_BOUNDS_STR: internal_names.LON_STR})
except ValueError:
pass
# Clean up: correct names and dimension order.
sfc_area = sfc_area.rename(internal_names.SFC_AREA_STR)
sfc_area[internal_names.LAT_STR] = lat
sfc_area[internal_names.LON_STR] = lon
return sfc_area.transpose() | python | def _grid_sfc_area(lon, lat, lon_bounds=None, lat_bounds=None):
"""Calculate surface area of each grid cell in a lon-lat grid."""
# Compute the bounds if not given.
if lon_bounds is None:
lon_bounds = _bounds_from_array(
lon, internal_names.LON_STR, internal_names.LON_BOUNDS_STR)
if lat_bounds is None:
lat_bounds = _bounds_from_array(
lat, internal_names.LAT_STR, internal_names.LAT_BOUNDS_STR)
# Compute the surface area.
dlon = _diff_bounds(utils.vertcoord.to_radians(lon_bounds, is_delta=True),
lon)
sinlat_bounds = np.sin(utils.vertcoord.to_radians(lat_bounds,
is_delta=True))
dsinlat = np.abs(_diff_bounds(sinlat_bounds, lat))
sfc_area = dlon*dsinlat*(RADIUS_EARTH**2)
# Rename the coordinates such that they match the actual lat / lon.
try:
sfc_area = sfc_area.rename(
{internal_names.LAT_BOUNDS_STR: internal_names.LAT_STR,
internal_names.LON_BOUNDS_STR: internal_names.LON_STR})
except ValueError:
pass
# Clean up: correct names and dimension order.
sfc_area = sfc_area.rename(internal_names.SFC_AREA_STR)
sfc_area[internal_names.LAT_STR] = lat
sfc_area[internal_names.LON_STR] = lon
return sfc_area.transpose() | Calculate surface area of each grid cell in a lon-lat grid. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/model.py#L70-L97 |
spencerahill/aospy | aospy/model.py | Model._get_grid_files | def _get_grid_files(self):
"""Get the files holding grid data for an aospy object."""
grid_file_paths = self.grid_file_paths
datasets = []
if isinstance(grid_file_paths, str):
grid_file_paths = [grid_file_paths]
for path in grid_file_paths:
try:
ds = xr.open_dataset(path, decode_times=False)
except (TypeError, AttributeError):
ds = xr.open_mfdataset(path, decode_times=False).load()
except (RuntimeError, OSError) as e:
msg = str(e) + ': {}'.format(path)
raise RuntimeError(msg)
datasets.append(ds)
return tuple(datasets) | python | def _get_grid_files(self):
"""Get the files holding grid data for an aospy object."""
grid_file_paths = self.grid_file_paths
datasets = []
if isinstance(grid_file_paths, str):
grid_file_paths = [grid_file_paths]
for path in grid_file_paths:
try:
ds = xr.open_dataset(path, decode_times=False)
except (TypeError, AttributeError):
ds = xr.open_mfdataset(path, decode_times=False).load()
except (RuntimeError, OSError) as e:
msg = str(e) + ': {}'.format(path)
raise RuntimeError(msg)
datasets.append(ds)
return tuple(datasets) | Get the files holding grid data for an aospy object. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/model.py#L221-L236 |
spencerahill/aospy | aospy/model.py | Model._set_mult_grid_attr | def _set_mult_grid_attr(self):
"""
Set multiple attrs from grid file given their names in the grid file.
"""
grid_objs = self._get_grid_files()
if self.grid_attrs is None:
self.grid_attrs = {}
# Override GRID_ATTRS with entries in grid_attrs
attrs = internal_names.GRID_ATTRS.copy()
for k, v in self.grid_attrs.items():
if k not in attrs:
raise ValueError(
'Unrecognized internal name, {!r}, specified for a '
'custom grid attribute name. See the full list of '
'valid internal names below:\n\n{}'.format(
k, list(internal_names.GRID_ATTRS.keys())))
attrs[k] = (v, )
for name_int, names_ext in attrs.items():
for name in names_ext:
grid_attr = _get_grid_attr(grid_objs, name)
if grid_attr is not None:
TIME_STR = internal_names.TIME_STR
renamed_attr = _rename_coords(grid_attr, attrs)
if ((TIME_STR not in renamed_attr.dims) and
(TIME_STR in renamed_attr.coords)):
renamed_attr = renamed_attr.drop(TIME_STR)
setattr(self, name_int, renamed_attr)
break | python | def _set_mult_grid_attr(self):
"""
Set multiple attrs from grid file given their names in the grid file.
"""
grid_objs = self._get_grid_files()
if self.grid_attrs is None:
self.grid_attrs = {}
# Override GRID_ATTRS with entries in grid_attrs
attrs = internal_names.GRID_ATTRS.copy()
for k, v in self.grid_attrs.items():
if k not in attrs:
raise ValueError(
'Unrecognized internal name, {!r}, specified for a '
'custom grid attribute name. See the full list of '
'valid internal names below:\n\n{}'.format(
k, list(internal_names.GRID_ATTRS.keys())))
attrs[k] = (v, )
for name_int, names_ext in attrs.items():
for name in names_ext:
grid_attr = _get_grid_attr(grid_objs, name)
if grid_attr is not None:
TIME_STR = internal_names.TIME_STR
renamed_attr = _rename_coords(grid_attr, attrs)
if ((TIME_STR not in renamed_attr.dims) and
(TIME_STR in renamed_attr.coords)):
renamed_attr = renamed_attr.drop(TIME_STR)
setattr(self, name_int, renamed_attr)
break | Set multiple attrs from grid file given their names in the grid file. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/model.py#L238-L268 |
spencerahill/aospy | aospy/model.py | Model.set_grid_data | def set_grid_data(self):
"""Populate the attrs that hold grid data."""
if self._grid_data_is_set:
return
self._set_mult_grid_attr()
if not np.any(getattr(self, 'sfc_area', None)):
try:
sfc_area = _grid_sfc_area(self.lon, self.lat, self.lon_bounds,
self.lat_bounds)
except AttributeError:
sfc_area = _grid_sfc_area(self.lon, self.lat)
self.sfc_area = sfc_area
try:
self.levs_thick = utils.vertcoord.level_thickness(self.level)
except AttributeError:
self.level = None
self.levs_thick = None
self._grid_data_is_set = True | python | def set_grid_data(self):
"""Populate the attrs that hold grid data."""
if self._grid_data_is_set:
return
self._set_mult_grid_attr()
if not np.any(getattr(self, 'sfc_area', None)):
try:
sfc_area = _grid_sfc_area(self.lon, self.lat, self.lon_bounds,
self.lat_bounds)
except AttributeError:
sfc_area = _grid_sfc_area(self.lon, self.lat)
self.sfc_area = sfc_area
try:
self.levs_thick = utils.vertcoord.level_thickness(self.level)
except AttributeError:
self.level = None
self.levs_thick = None
self._grid_data_is_set = True | Populate the attrs that hold grid data. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/model.py#L270-L287 |
spencerahill/aospy | aospy/utils/longitude.py | _other_to_lon | def _other_to_lon(func):
"""Wrapper for casting Longitude operator arguments to Longitude"""
def func_other_to_lon(obj, other):
return func(obj, _maybe_cast_to_lon(other))
return func_other_to_lon | python | def _other_to_lon(func):
"""Wrapper for casting Longitude operator arguments to Longitude"""
def func_other_to_lon(obj, other):
return func(obj, _maybe_cast_to_lon(other))
return func_other_to_lon | Wrapper for casting Longitude operator arguments to Longitude | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/longitude.py#L78-L82 |
spencerahill/aospy | aospy/automate.py | _get_attr_by_tag | def _get_attr_by_tag(obj, tag, attr_name):
"""Get attribute from an object via a string tag.
Parameters
----------
obj : object from which to get the attribute
attr_name : str
Unmodified name of the attribute to be found. The actual attribute
that is returned may be modified be 'tag'.
tag : str
Tag specifying how to modify 'attr_name' by pre-pending it with 'tag'.
Must be a key of the _TAG_ATTR_MODIFIERS dict.
Returns
-------
the specified attribute of obj
"""
attr_name = _TAG_ATTR_MODIFIERS[tag] + attr_name
return getattr(obj, attr_name) | python | def _get_attr_by_tag(obj, tag, attr_name):
"""Get attribute from an object via a string tag.
Parameters
----------
obj : object from which to get the attribute
attr_name : str
Unmodified name of the attribute to be found. The actual attribute
that is returned may be modified be 'tag'.
tag : str
Tag specifying how to modify 'attr_name' by pre-pending it with 'tag'.
Must be a key of the _TAG_ATTR_MODIFIERS dict.
Returns
-------
the specified attribute of obj
"""
attr_name = _TAG_ATTR_MODIFIERS[tag] + attr_name
return getattr(obj, attr_name) | Get attribute from an object via a string tag.
Parameters
----------
obj : object from which to get the attribute
attr_name : str
Unmodified name of the attribute to be found. The actual attribute
that is returned may be modified be 'tag'.
tag : str
Tag specifying how to modify 'attr_name' by pre-pending it with 'tag'.
Must be a key of the _TAG_ATTR_MODIFIERS dict.
Returns
-------
the specified attribute of obj | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/automate.py#L34-L52 |
spencerahill/aospy | aospy/automate.py | _permuted_dicts_of_specs | def _permuted_dicts_of_specs(specs):
"""Create {name: value} dict, one each for every permutation.
Each permutation becomes a dictionary, with the keys being the attr names
and the values being the corresponding value for that permutation. These
dicts can then be directly passed to the Calc constructor.
"""
permuter = itertools.product(*specs.values())
return [dict(zip(specs.keys(), perm)) for perm in permuter] | python | def _permuted_dicts_of_specs(specs):
"""Create {name: value} dict, one each for every permutation.
Each permutation becomes a dictionary, with the keys being the attr names
and the values being the corresponding value for that permutation. These
dicts can then be directly passed to the Calc constructor.
"""
permuter = itertools.product(*specs.values())
return [dict(zip(specs.keys(), perm)) for perm in permuter] | Create {name: value} dict, one each for every permutation.
Each permutation becomes a dictionary, with the keys being the attr names
and the values being the corresponding value for that permutation. These
dicts can then be directly passed to the Calc constructor. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/automate.py#L55-L64 |
spencerahill/aospy | aospy/automate.py | _get_all_objs_of_type | def _get_all_objs_of_type(type_, parent):
"""Get all attributes of the given type from the given object.
Parameters
----------
type_ : The desired type
parent : The object from which to get the attributes with type matching
'type_'
Returns
-------
A list (possibly empty) of attributes from 'parent'
"""
return set([obj for obj in parent.__dict__.values()
if isinstance(obj, type_)]) | python | def _get_all_objs_of_type(type_, parent):
"""Get all attributes of the given type from the given object.
Parameters
----------
type_ : The desired type
parent : The object from which to get the attributes with type matching
'type_'
Returns
-------
A list (possibly empty) of attributes from 'parent'
"""
return set([obj for obj in parent.__dict__.values()
if isinstance(obj, type_)]) | Get all attributes of the given type from the given object.
Parameters
----------
type_ : The desired type
parent : The object from which to get the attributes with type matching
'type_'
Returns
-------
A list (possibly empty) of attributes from 'parent' | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/automate.py#L101-L115 |
spencerahill/aospy | aospy/automate.py | _prune_invalid_time_reductions | def _prune_invalid_time_reductions(spec):
"""Prune time reductions of spec with no time dimension."""
valid_reductions = []
if not spec['var'].def_time and spec['dtype_out_time'] is not None:
for reduction in spec['dtype_out_time']:
if reduction not in _TIME_DEFINED_REDUCTIONS:
valid_reductions.append(reduction)
else:
msg = ("Var {0} has no time dimension "
"for the given time reduction "
"{1} so this calculation will "
"be skipped".format(spec['var'].name, reduction))
logging.info(msg)
else:
valid_reductions = spec['dtype_out_time']
return valid_reductions | python | def _prune_invalid_time_reductions(spec):
"""Prune time reductions of spec with no time dimension."""
valid_reductions = []
if not spec['var'].def_time and spec['dtype_out_time'] is not None:
for reduction in spec['dtype_out_time']:
if reduction not in _TIME_DEFINED_REDUCTIONS:
valid_reductions.append(reduction)
else:
msg = ("Var {0} has no time dimension "
"for the given time reduction "
"{1} so this calculation will "
"be skipped".format(spec['var'].name, reduction))
logging.info(msg)
else:
valid_reductions = spec['dtype_out_time']
return valid_reductions | Prune time reductions of spec with no time dimension. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/automate.py#L247-L262 |
spencerahill/aospy | aospy/automate.py | _compute_or_skip_on_error | def _compute_or_skip_on_error(calc, compute_kwargs):
"""Execute the Calc, catching and logging exceptions, but don't re-raise.
Prevents one failed calculation from stopping a larger requested set
of calculations.
"""
try:
return calc.compute(**compute_kwargs)
except Exception:
msg = ("Skipping aospy calculation `{0}` due to error with the "
"following traceback: \n{1}")
logging.warning(msg.format(calc, traceback.format_exc()))
return None | python | def _compute_or_skip_on_error(calc, compute_kwargs):
"""Execute the Calc, catching and logging exceptions, but don't re-raise.
Prevents one failed calculation from stopping a larger requested set
of calculations.
"""
try:
return calc.compute(**compute_kwargs)
except Exception:
msg = ("Skipping aospy calculation `{0}` due to error with the "
"following traceback: \n{1}")
logging.warning(msg.format(calc, traceback.format_exc()))
return None | Execute the Calc, catching and logging exceptions, but don't re-raise.
Prevents one failed calculation from stopping a larger requested set
of calculations. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/automate.py#L265-L277 |
spencerahill/aospy | aospy/automate.py | _submit_calcs_on_client | def _submit_calcs_on_client(calcs, client, func):
"""Submit calculations via dask.bag and a distributed client"""
logging.info('Connected to client: {}'.format(client))
if LooseVersion(dask.__version__) < '0.18':
dask_option_setter = dask.set_options
else:
dask_option_setter = dask.config.set
with dask_option_setter(get=client.get):
return db.from_sequence(calcs).map(func).compute() | python | def _submit_calcs_on_client(calcs, client, func):
"""Submit calculations via dask.bag and a distributed client"""
logging.info('Connected to client: {}'.format(client))
if LooseVersion(dask.__version__) < '0.18':
dask_option_setter = dask.set_options
else:
dask_option_setter = dask.config.set
with dask_option_setter(get=client.get):
return db.from_sequence(calcs).map(func).compute() | Submit calculations via dask.bag and a distributed client | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/automate.py#L280-L288 |
spencerahill/aospy | aospy/automate.py | _exec_calcs | def _exec_calcs(calcs, parallelize=False, client=None, **compute_kwargs):
"""Execute the given calculations.
Parameters
----------
calcs : Sequence of ``aospy.Calc`` objects
parallelize : bool, default False
Whether to submit the calculations in parallel or not
client : distributed.Client or None
The distributed Client used if parallelize is set to True; if None
a distributed LocalCluster is used.
compute_kwargs : dict of keyword arguments passed to ``Calc.compute``
Returns
-------
A list of the values returned by each Calc object that was executed.
"""
if parallelize:
def func(calc):
"""Wrap _compute_or_skip_on_error to require only the calc
argument"""
if 'write_to_tar' in compute_kwargs:
compute_kwargs['write_to_tar'] = False
return _compute_or_skip_on_error(calc, compute_kwargs)
if client is None:
n_workers = _n_workers_for_local_cluster(calcs)
with distributed.LocalCluster(n_workers=n_workers) as cluster:
with distributed.Client(cluster) as client:
result = _submit_calcs_on_client(calcs, client, func)
else:
result = _submit_calcs_on_client(calcs, client, func)
if compute_kwargs['write_to_tar']:
_serial_write_to_tar(calcs)
return result
else:
return [_compute_or_skip_on_error(calc, compute_kwargs)
for calc in calcs] | python | def _exec_calcs(calcs, parallelize=False, client=None, **compute_kwargs):
"""Execute the given calculations.
Parameters
----------
calcs : Sequence of ``aospy.Calc`` objects
parallelize : bool, default False
Whether to submit the calculations in parallel or not
client : distributed.Client or None
The distributed Client used if parallelize is set to True; if None
a distributed LocalCluster is used.
compute_kwargs : dict of keyword arguments passed to ``Calc.compute``
Returns
-------
A list of the values returned by each Calc object that was executed.
"""
if parallelize:
def func(calc):
"""Wrap _compute_or_skip_on_error to require only the calc
argument"""
if 'write_to_tar' in compute_kwargs:
compute_kwargs['write_to_tar'] = False
return _compute_or_skip_on_error(calc, compute_kwargs)
if client is None:
n_workers = _n_workers_for_local_cluster(calcs)
with distributed.LocalCluster(n_workers=n_workers) as cluster:
with distributed.Client(cluster) as client:
result = _submit_calcs_on_client(calcs, client, func)
else:
result = _submit_calcs_on_client(calcs, client, func)
if compute_kwargs['write_to_tar']:
_serial_write_to_tar(calcs)
return result
else:
return [_compute_or_skip_on_error(calc, compute_kwargs)
for calc in calcs] | Execute the given calculations.
Parameters
----------
calcs : Sequence of ``aospy.Calc`` objects
parallelize : bool, default False
Whether to submit the calculations in parallel or not
client : distributed.Client or None
The distributed Client used if parallelize is set to True; if None
a distributed LocalCluster is used.
compute_kwargs : dict of keyword arguments passed to ``Calc.compute``
Returns
-------
A list of the values returned by each Calc object that was executed. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/automate.py#L302-L339 |
spencerahill/aospy | aospy/automate.py | submit_mult_calcs | def submit_mult_calcs(calc_suite_specs, exec_options=None):
"""Generate and execute all specified computations.
Once the calculations are prepped and submitted for execution, any
calculation that triggers any exception or error is skipped, and the rest
of the calculations proceed unaffected. This prevents an error in a single
calculation from crashing a large suite of calculations.
Parameters
----------
calc_suite_specs : dict
The specifications describing the full set of calculations to be
generated and potentially executed. Accepted keys and their values:
library : module or package comprising an aospy object library
The aospy object library for these calculations.
projects : list of aospy.Proj objects
The projects to permute over.
models : 'all', 'default', or list of aospy.Model objects
The models to permute over. If 'all', use all models in the
``models`` attribute of each ``Proj``. If 'default', use all
models in the ``default_models`` attribute of each ``Proj``.
runs : 'all', 'default', or list of aospy.Run objects
The runs to permute over. If 'all', use all runs in the
``runs`` attribute of each ``Model``. If 'default', use all
runs in the ``default_runs`` attribute of each ``Model``.
variables : list of aospy.Var objects
The variables to be calculated.
regions : 'all' or list of aospy.Region objects
The region(s) over which any regional reductions will be performed.
If 'all', use all regions in the ``regions`` attribute of each
``Proj``.
date_ranges : 'default' or a list of tuples
The range of dates (inclusive) over which to perform calculations.
If 'default', use the ``default_start_date`` and
``default_end_date`` attribute of each ``Run``. Else provide a
list of tuples, each containing a pair of start and end dates,
such as ``date_ranges=[(start, end)]`` where ``start`` and
``end`` are each ``datetime.datetime`` objects, partial
datetime strings (e.g. '0001'), ``np.datetime64`` objects, or
``cftime.datetime`` objects.
output_time_intervals : {'ann', season-string, month-integer}
The sub-annual time interval over which to aggregate.
- 'ann' : Annual mean
- season-string : E.g. 'JJA' for June-July-August
- month-integer : 1 for January, 2 for February, etc. Each one is
a separate reduction, e.g. [1, 2] would produce averages (or
other specified time reduction) over all Januaries, and
separately over all Februaries.
output_time_regional_reductions : list of reduction string identifiers
Unlike most other keys, these are not permuted over when creating
the :py:class:`aospy.Calc` objects that execute the calculations;
each :py:class:`aospy.Calc` performs all of the specified
reductions. Accepted string identifiers are:
- Gridpoint-by-gridpoint output:
- 'av' : Gridpoint-by-gridpoint time-average
- 'std' : Gridpoint-by-gridpoint temporal standard deviation
- 'ts' : Gridpoint-by-gridpoint time-series
- Averages over each region specified via `region`:
- 'reg.av', 'reg.std', 'reg.ts' : analogous to 'av', 'std', 'ts'
output_vertical_reductions : {None, 'vert_av', 'vert_int'}, optional
How to reduce the data vertically:
- None : no vertical reduction
- 'vert_av' : mass-weighted vertical average
- 'vert_int' : mass-weighted vertical integral
input_time_intervals : {'annual', 'monthly', 'daily', '#hr'}
A string specifying the time resolution of the input data. In
'#hr' above, the '#' stands for a number, e.g. 3hr or 6hr, for
sub-daily output. These are the suggested specifiers, but others
may be used if they are also used by the DataLoaders for the given
Runs.
input_time_datatypes : {'inst', 'ts', 'av'}
What the time axis of the input data represents:
- 'inst' : Timeseries of instantaneous values
- 'ts' : Timeseries of averages over the period of each time-index
- 'av' : A single value averaged over a date range
input_vertical_datatypes : {False, 'pressure', 'sigma'}, optional
The vertical coordinate system used by the input data:
- False : not defined vertically
- 'pressure' : pressure coordinates
- 'sigma' : hybrid sigma-pressure coordinates
input_time_offsets : {None, dict}, optional
How to offset input data in time to correct for metadata errors
- None : no time offset applied
- dict : e.g. ``{'hours': -3}`` to offset times by -3 hours
See :py:meth:`aospy.utils.times.apply_time_offset`.
exec_options : dict or None (default None)
Options regarding how the calculations are reported, submitted, and
saved. If None, default settings are used for all options. Currently
supported options (each should be either `True` or `False`):
- prompt_verify : (default False) If True, print summary of
calculations to be performed and prompt user to confirm before
submitting for execution.
- parallelize : (default False) If True, submit calculations in
parallel.
- client : distributed.Client or None (default None) The
dask.distributed Client used to schedule computations. If None
and parallelize is True, a LocalCluster will be started.
- write_to_tar : (default True) If True, write results of calculations
to .tar files, one for each :py:class:`aospy.Run` object.
These tar files have an identical directory structures the
standard output relative to their root directory, which is
specified via the `tar_direc_out` argument of each Proj
object's instantiation.
Returns
-------
A list of the return values from each :py:meth:`aospy.Calc.compute` call
If a calculation ran without error, this value is the
:py:class:`aospy.Calc` object itself, with the results of its
calculations saved in its ``data_out`` attribute. ``data_out`` is a
dictionary, with the keys being the temporal-regional reduction
identifiers (e.g. 'reg.av'), and the values being the corresponding
result.
If any error occurred during a calculation, the return value is None.
Raises
------
AospyException
If the ``prompt_verify`` option is set to True and the user does not
respond affirmatively to the prompt.
"""
if exec_options is None:
exec_options = dict()
if exec_options.pop('prompt_verify', False):
print(_print_suite_summary(calc_suite_specs))
_user_verify()
calc_suite = CalcSuite(calc_suite_specs)
calcs = calc_suite.create_calcs()
if not calcs:
raise AospyException(
"The specified combination of parameters yielded zero "
"calculations. Most likely, one of the parameters is "
"inadvertently empty."
)
return _exec_calcs(calcs, **exec_options) | python | def submit_mult_calcs(calc_suite_specs, exec_options=None):
"""Generate and execute all specified computations.
Once the calculations are prepped and submitted for execution, any
calculation that triggers any exception or error is skipped, and the rest
of the calculations proceed unaffected. This prevents an error in a single
calculation from crashing a large suite of calculations.
Parameters
----------
calc_suite_specs : dict
The specifications describing the full set of calculations to be
generated and potentially executed. Accepted keys and their values:
library : module or package comprising an aospy object library
The aospy object library for these calculations.
projects : list of aospy.Proj objects
The projects to permute over.
models : 'all', 'default', or list of aospy.Model objects
The models to permute over. If 'all', use all models in the
``models`` attribute of each ``Proj``. If 'default', use all
models in the ``default_models`` attribute of each ``Proj``.
runs : 'all', 'default', or list of aospy.Run objects
The runs to permute over. If 'all', use all runs in the
``runs`` attribute of each ``Model``. If 'default', use all
runs in the ``default_runs`` attribute of each ``Model``.
variables : list of aospy.Var objects
The variables to be calculated.
regions : 'all' or list of aospy.Region objects
The region(s) over which any regional reductions will be performed.
If 'all', use all regions in the ``regions`` attribute of each
``Proj``.
date_ranges : 'default' or a list of tuples
The range of dates (inclusive) over which to perform calculations.
If 'default', use the ``default_start_date`` and
``default_end_date`` attribute of each ``Run``. Else provide a
list of tuples, each containing a pair of start and end dates,
such as ``date_ranges=[(start, end)]`` where ``start`` and
``end`` are each ``datetime.datetime`` objects, partial
datetime strings (e.g. '0001'), ``np.datetime64`` objects, or
``cftime.datetime`` objects.
output_time_intervals : {'ann', season-string, month-integer}
The sub-annual time interval over which to aggregate.
- 'ann' : Annual mean
- season-string : E.g. 'JJA' for June-July-August
- month-integer : 1 for January, 2 for February, etc. Each one is
a separate reduction, e.g. [1, 2] would produce averages (or
other specified time reduction) over all Januaries, and
separately over all Februaries.
output_time_regional_reductions : list of reduction string identifiers
Unlike most other keys, these are not permuted over when creating
the :py:class:`aospy.Calc` objects that execute the calculations;
each :py:class:`aospy.Calc` performs all of the specified
reductions. Accepted string identifiers are:
- Gridpoint-by-gridpoint output:
- 'av' : Gridpoint-by-gridpoint time-average
- 'std' : Gridpoint-by-gridpoint temporal standard deviation
- 'ts' : Gridpoint-by-gridpoint time-series
- Averages over each region specified via `region`:
- 'reg.av', 'reg.std', 'reg.ts' : analogous to 'av', 'std', 'ts'
output_vertical_reductions : {None, 'vert_av', 'vert_int'}, optional
How to reduce the data vertically:
- None : no vertical reduction
- 'vert_av' : mass-weighted vertical average
- 'vert_int' : mass-weighted vertical integral
input_time_intervals : {'annual', 'monthly', 'daily', '#hr'}
A string specifying the time resolution of the input data. In
'#hr' above, the '#' stands for a number, e.g. 3hr or 6hr, for
sub-daily output. These are the suggested specifiers, but others
may be used if they are also used by the DataLoaders for the given
Runs.
input_time_datatypes : {'inst', 'ts', 'av'}
What the time axis of the input data represents:
- 'inst' : Timeseries of instantaneous values
- 'ts' : Timeseries of averages over the period of each time-index
- 'av' : A single value averaged over a date range
input_vertical_datatypes : {False, 'pressure', 'sigma'}, optional
The vertical coordinate system used by the input data:
- False : not defined vertically
- 'pressure' : pressure coordinates
- 'sigma' : hybrid sigma-pressure coordinates
input_time_offsets : {None, dict}, optional
How to offset input data in time to correct for metadata errors
- None : no time offset applied
- dict : e.g. ``{'hours': -3}`` to offset times by -3 hours
See :py:meth:`aospy.utils.times.apply_time_offset`.
exec_options : dict or None (default None)
Options regarding how the calculations are reported, submitted, and
saved. If None, default settings are used for all options. Currently
supported options (each should be either `True` or `False`):
- prompt_verify : (default False) If True, print summary of
calculations to be performed and prompt user to confirm before
submitting for execution.
- parallelize : (default False) If True, submit calculations in
parallel.
- client : distributed.Client or None (default None) The
dask.distributed Client used to schedule computations. If None
and parallelize is True, a LocalCluster will be started.
- write_to_tar : (default True) If True, write results of calculations
to .tar files, one for each :py:class:`aospy.Run` object.
These tar files have an identical directory structures the
standard output relative to their root directory, which is
specified via the `tar_direc_out` argument of each Proj
object's instantiation.
Returns
-------
A list of the return values from each :py:meth:`aospy.Calc.compute` call
If a calculation ran without error, this value is the
:py:class:`aospy.Calc` object itself, with the results of its
calculations saved in its ``data_out`` attribute. ``data_out`` is a
dictionary, with the keys being the temporal-regional reduction
identifiers (e.g. 'reg.av'), and the values being the corresponding
result.
If any error occurred during a calculation, the return value is None.
Raises
------
AospyException
If the ``prompt_verify`` option is set to True and the user does not
respond affirmatively to the prompt.
"""
if exec_options is None:
exec_options = dict()
if exec_options.pop('prompt_verify', False):
print(_print_suite_summary(calc_suite_specs))
_user_verify()
calc_suite = CalcSuite(calc_suite_specs)
calcs = calc_suite.create_calcs()
if not calcs:
raise AospyException(
"The specified combination of parameters yielded zero "
"calculations. Most likely, one of the parameters is "
"inadvertently empty."
)
return _exec_calcs(calcs, **exec_options) | Generate and execute all specified computations.
Once the calculations are prepped and submitted for execution, any
calculation that triggers any exception or error is skipped, and the rest
of the calculations proceed unaffected. This prevents an error in a single
calculation from crashing a large suite of calculations.
Parameters
----------
calc_suite_specs : dict
The specifications describing the full set of calculations to be
generated and potentially executed. Accepted keys and their values:
library : module or package comprising an aospy object library
The aospy object library for these calculations.
projects : list of aospy.Proj objects
The projects to permute over.
models : 'all', 'default', or list of aospy.Model objects
The models to permute over. If 'all', use all models in the
``models`` attribute of each ``Proj``. If 'default', use all
models in the ``default_models`` attribute of each ``Proj``.
runs : 'all', 'default', or list of aospy.Run objects
The runs to permute over. If 'all', use all runs in the
``runs`` attribute of each ``Model``. If 'default', use all
runs in the ``default_runs`` attribute of each ``Model``.
variables : list of aospy.Var objects
The variables to be calculated.
regions : 'all' or list of aospy.Region objects
The region(s) over which any regional reductions will be performed.
If 'all', use all regions in the ``regions`` attribute of each
``Proj``.
date_ranges : 'default' or a list of tuples
The range of dates (inclusive) over which to perform calculations.
If 'default', use the ``default_start_date`` and
``default_end_date`` attribute of each ``Run``. Else provide a
list of tuples, each containing a pair of start and end dates,
such as ``date_ranges=[(start, end)]`` where ``start`` and
``end`` are each ``datetime.datetime`` objects, partial
datetime strings (e.g. '0001'), ``np.datetime64`` objects, or
``cftime.datetime`` objects.
output_time_intervals : {'ann', season-string, month-integer}
The sub-annual time interval over which to aggregate.
- 'ann' : Annual mean
- season-string : E.g. 'JJA' for June-July-August
- month-integer : 1 for January, 2 for February, etc. Each one is
a separate reduction, e.g. [1, 2] would produce averages (or
other specified time reduction) over all Januaries, and
separately over all Februaries.
output_time_regional_reductions : list of reduction string identifiers
Unlike most other keys, these are not permuted over when creating
the :py:class:`aospy.Calc` objects that execute the calculations;
each :py:class:`aospy.Calc` performs all of the specified
reductions. Accepted string identifiers are:
- Gridpoint-by-gridpoint output:
- 'av' : Gridpoint-by-gridpoint time-average
- 'std' : Gridpoint-by-gridpoint temporal standard deviation
- 'ts' : Gridpoint-by-gridpoint time-series
- Averages over each region specified via `region`:
- 'reg.av', 'reg.std', 'reg.ts' : analogous to 'av', 'std', 'ts'
output_vertical_reductions : {None, 'vert_av', 'vert_int'}, optional
How to reduce the data vertically:
- None : no vertical reduction
- 'vert_av' : mass-weighted vertical average
- 'vert_int' : mass-weighted vertical integral
input_time_intervals : {'annual', 'monthly', 'daily', '#hr'}
A string specifying the time resolution of the input data. In
'#hr' above, the '#' stands for a number, e.g. 3hr or 6hr, for
sub-daily output. These are the suggested specifiers, but others
may be used if they are also used by the DataLoaders for the given
Runs.
input_time_datatypes : {'inst', 'ts', 'av'}
What the time axis of the input data represents:
- 'inst' : Timeseries of instantaneous values
- 'ts' : Timeseries of averages over the period of each time-index
- 'av' : A single value averaged over a date range
input_vertical_datatypes : {False, 'pressure', 'sigma'}, optional
The vertical coordinate system used by the input data:
- False : not defined vertically
- 'pressure' : pressure coordinates
- 'sigma' : hybrid sigma-pressure coordinates
input_time_offsets : {None, dict}, optional
How to offset input data in time to correct for metadata errors
- None : no time offset applied
- dict : e.g. ``{'hours': -3}`` to offset times by -3 hours
See :py:meth:`aospy.utils.times.apply_time_offset`.
exec_options : dict or None (default None)
Options regarding how the calculations are reported, submitted, and
saved. If None, default settings are used for all options. Currently
supported options (each should be either `True` or `False`):
- prompt_verify : (default False) If True, print summary of
calculations to be performed and prompt user to confirm before
submitting for execution.
- parallelize : (default False) If True, submit calculations in
parallel.
- client : distributed.Client or None (default None) The
dask.distributed Client used to schedule computations. If None
and parallelize is True, a LocalCluster will be started.
- write_to_tar : (default True) If True, write results of calculations
to .tar files, one for each :py:class:`aospy.Run` object.
These tar files have an identical directory structures the
standard output relative to their root directory, which is
specified via the `tar_direc_out` argument of each Proj
object's instantiation.
Returns
-------
A list of the return values from each :py:meth:`aospy.Calc.compute` call
If a calculation ran without error, this value is the
:py:class:`aospy.Calc` object itself, with the results of its
calculations saved in its ``data_out`` attribute. ``data_out`` is a
dictionary, with the keys being the temporal-regional reduction
identifiers (e.g. 'reg.av'), and the values being the corresponding
result.
If any error occurred during a calculation, the return value is None.
Raises
------
AospyException
If the ``prompt_verify`` option is set to True and the user does not
respond affirmatively to the prompt. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/automate.py#L355-L508 |
spencerahill/aospy | aospy/automate.py | CalcSuite._get_requested_spec | def _get_requested_spec(self, obj, spec_name):
"""Helper to translate user specifications to needed objects."""
requested = self._specs_in[spec_name]
if isinstance(requested, str):
return _get_attr_by_tag(obj, requested, spec_name)
else:
return requested | python | def _get_requested_spec(self, obj, spec_name):
"""Helper to translate user specifications to needed objects."""
requested = self._specs_in[spec_name]
if isinstance(requested, str):
return _get_attr_by_tag(obj, requested, spec_name)
else:
return requested | Helper to translate user specifications to needed objects. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/automate.py#L152-L158 |
spencerahill/aospy | aospy/automate.py | CalcSuite._permute_core_specs | def _permute_core_specs(self):
"""Generate all requested combinations of the core objects."""
obj_trees = []
projects = self._get_requested_spec(self._obj_lib, _PROJECTS_STR)
for project in projects:
models = self._get_requested_spec(project, _MODELS_STR)
for model in models:
runs = self._get_requested_spec(model, _RUNS_STR)
for run in runs:
obj_trees.append({
self._NAMES_SUITE_TO_CALC[_PROJECTS_STR]: project,
self._NAMES_SUITE_TO_CALC[_MODELS_STR]: model,
self._NAMES_SUITE_TO_CALC[_RUNS_STR]: run,
})
return obj_trees | python | def _permute_core_specs(self):
"""Generate all requested combinations of the core objects."""
obj_trees = []
projects = self._get_requested_spec(self._obj_lib, _PROJECTS_STR)
for project in projects:
models = self._get_requested_spec(project, _MODELS_STR)
for model in models:
runs = self._get_requested_spec(model, _RUNS_STR)
for run in runs:
obj_trees.append({
self._NAMES_SUITE_TO_CALC[_PROJECTS_STR]: project,
self._NAMES_SUITE_TO_CALC[_MODELS_STR]: model,
self._NAMES_SUITE_TO_CALC[_RUNS_STR]: run,
})
return obj_trees | Generate all requested combinations of the core objects. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/automate.py#L160-L174 |
spencerahill/aospy | aospy/automate.py | CalcSuite._get_regions | def _get_regions(self):
"""Get the requested regions."""
if self._specs_in[_REGIONS_STR] == 'all':
return [_get_all_objs_of_type(
Region, getattr(self._obj_lib, 'regions', self._obj_lib)
)]
else:
return [set(self._specs_in[_REGIONS_STR])] | python | def _get_regions(self):
"""Get the requested regions."""
if self._specs_in[_REGIONS_STR] == 'all':
return [_get_all_objs_of_type(
Region, getattr(self._obj_lib, 'regions', self._obj_lib)
)]
else:
return [set(self._specs_in[_REGIONS_STR])] | Get the requested regions. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/automate.py#L176-L183 |
spencerahill/aospy | aospy/automate.py | CalcSuite._get_variables | def _get_variables(self):
"""Get the requested variables."""
if self._specs_in[_VARIABLES_STR] == 'all':
return _get_all_objs_of_type(
Var, getattr(self._obj_lib, 'variables', self._obj_lib)
)
else:
return set(self._specs_in[_VARIABLES_STR]) | python | def _get_variables(self):
"""Get the requested variables."""
if self._specs_in[_VARIABLES_STR] == 'all':
return _get_all_objs_of_type(
Var, getattr(self._obj_lib, 'variables', self._obj_lib)
)
else:
return set(self._specs_in[_VARIABLES_STR]) | Get the requested variables. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/automate.py#L185-L192 |
spencerahill/aospy | aospy/automate.py | CalcSuite._get_aux_specs | def _get_aux_specs(self):
"""Get and pre-process all of the non-core specifications."""
# Drop the "core" specifications, which are handled separately.
specs = self._specs_in.copy()
[specs.pop(core) for core in self._CORE_SPEC_NAMES]
specs[_REGIONS_STR] = self._get_regions()
specs[_VARIABLES_STR] = self._get_variables()
specs['date_ranges'] = self._get_date_ranges()
specs['output_time_regional_reductions'] = self._get_time_reg_reducts()
return specs | python | def _get_aux_specs(self):
"""Get and pre-process all of the non-core specifications."""
# Drop the "core" specifications, which are handled separately.
specs = self._specs_in.copy()
[specs.pop(core) for core in self._CORE_SPEC_NAMES]
specs[_REGIONS_STR] = self._get_regions()
specs[_VARIABLES_STR] = self._get_variables()
specs['date_ranges'] = self._get_date_ranges()
specs['output_time_regional_reductions'] = self._get_time_reg_reducts()
return specs | Get and pre-process all of the non-core specifications. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/automate.py#L205-L216 |
spencerahill/aospy | aospy/automate.py | CalcSuite._permute_aux_specs | def _permute_aux_specs(self):
"""Generate all permutations of the non-core specifications."""
# Convert to attr names that Calc is expecting.
calc_aux_mapping = self._NAMES_SUITE_TO_CALC.copy()
# Special case: manually add 'library' to mapping
calc_aux_mapping[_OBJ_LIB_STR] = None
[calc_aux_mapping.pop(core) for core in self._CORE_SPEC_NAMES]
specs = self._get_aux_specs()
for suite_name, calc_name in calc_aux_mapping.items():
specs[calc_name] = specs.pop(suite_name)
return _permuted_dicts_of_specs(specs) | python | def _permute_aux_specs(self):
"""Generate all permutations of the non-core specifications."""
# Convert to attr names that Calc is expecting.
calc_aux_mapping = self._NAMES_SUITE_TO_CALC.copy()
# Special case: manually add 'library' to mapping
calc_aux_mapping[_OBJ_LIB_STR] = None
[calc_aux_mapping.pop(core) for core in self._CORE_SPEC_NAMES]
specs = self._get_aux_specs()
for suite_name, calc_name in calc_aux_mapping.items():
specs[calc_name] = specs.pop(suite_name)
return _permuted_dicts_of_specs(specs) | Generate all permutations of the non-core specifications. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/automate.py#L218-L229 |
spencerahill/aospy | aospy/automate.py | CalcSuite._combine_core_aux_specs | def _combine_core_aux_specs(self):
"""Combine permutations over core and auxilliary Calc specs."""
all_specs = []
for core_dict in self._permute_core_specs():
for aux_dict in self._permute_aux_specs():
all_specs.append(_merge_dicts(core_dict, aux_dict))
return all_specs | python | def _combine_core_aux_specs(self):
"""Combine permutations over core and auxilliary Calc specs."""
all_specs = []
for core_dict in self._permute_core_specs():
for aux_dict in self._permute_aux_specs():
all_specs.append(_merge_dicts(core_dict, aux_dict))
return all_specs | Combine permutations over core and auxilliary Calc specs. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/automate.py#L231-L237 |
spencerahill/aospy | aospy/automate.py | CalcSuite.create_calcs | def create_calcs(self):
"""Generate a Calc object for each requested parameter combination."""
specs = self._combine_core_aux_specs()
for spec in specs:
spec['dtype_out_time'] = _prune_invalid_time_reductions(spec)
return [Calc(**sp) for sp in specs] | python | def create_calcs(self):
"""Generate a Calc object for each requested parameter combination."""
specs = self._combine_core_aux_specs()
for spec in specs:
spec['dtype_out_time'] = _prune_invalid_time_reductions(spec)
return [Calc(**sp) for sp in specs] | Generate a Calc object for each requested parameter combination. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/automate.py#L239-L244 |
spencerahill/aospy | aospy/utils/io.py | data_in_label | def data_in_label(intvl_in, dtype_in_time, dtype_in_vert=False):
"""Create string label specifying the input data of a calculation."""
intvl_lbl = intvl_in
time_lbl = dtype_in_time
lbl = '_'.join(['from', intvl_lbl, time_lbl]).replace('__', '_')
vert_lbl = dtype_in_vert if dtype_in_vert else False
if vert_lbl:
lbl = '_'.join([lbl, vert_lbl]).replace('__', '_')
return lbl | python | def data_in_label(intvl_in, dtype_in_time, dtype_in_vert=False):
"""Create string label specifying the input data of a calculation."""
intvl_lbl = intvl_in
time_lbl = dtype_in_time
lbl = '_'.join(['from', intvl_lbl, time_lbl]).replace('__', '_')
vert_lbl = dtype_in_vert if dtype_in_vert else False
if vert_lbl:
lbl = '_'.join([lbl, vert_lbl]).replace('__', '_')
return lbl | Create string label specifying the input data of a calculation. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/io.py#L8-L16 |
spencerahill/aospy | aospy/utils/io.py | time_label | def time_label(intvl, return_val=True):
"""Create time interval label for aospy data I/O."""
# Monthly labels are 2 digit integers: '01' for jan, '02' for feb, etc.
if type(intvl) in [list, tuple, np.ndarray] and len(intvl) == 1:
label = '{:02}'.format(intvl[0])
value = np.array(intvl)
elif type(intvl) == int and intvl in range(1, 13):
label = '{:02}'.format(intvl)
value = np.array([intvl])
# Seasonal and annual time labels are short strings.
else:
labels = {'jfm': (1, 2, 3),
'fma': (2, 3, 4),
'mam': (3, 4, 5),
'amj': (4, 5, 6),
'mjj': (5, 6, 7),
'jja': (6, 7, 8),
'jas': (7, 8, 9),
'aso': (8, 9, 10),
'son': (9, 10, 11),
'ond': (10, 11, 12),
'ndj': (11, 12, 1),
'djf': (1, 2, 12),
'jjas': (6, 7, 8, 9),
'djfm': (12, 1, 2, 3),
'ann': range(1, 13)}
for lbl, vals in labels.items():
if intvl == lbl or set(intvl) == set(vals):
label = lbl
value = np.array(vals)
break
if return_val:
return label, value
else:
return label | python | def time_label(intvl, return_val=True):
"""Create time interval label for aospy data I/O."""
# Monthly labels are 2 digit integers: '01' for jan, '02' for feb, etc.
if type(intvl) in [list, tuple, np.ndarray] and len(intvl) == 1:
label = '{:02}'.format(intvl[0])
value = np.array(intvl)
elif type(intvl) == int and intvl in range(1, 13):
label = '{:02}'.format(intvl)
value = np.array([intvl])
# Seasonal and annual time labels are short strings.
else:
labels = {'jfm': (1, 2, 3),
'fma': (2, 3, 4),
'mam': (3, 4, 5),
'amj': (4, 5, 6),
'mjj': (5, 6, 7),
'jja': (6, 7, 8),
'jas': (7, 8, 9),
'aso': (8, 9, 10),
'son': (9, 10, 11),
'ond': (10, 11, 12),
'ndj': (11, 12, 1),
'djf': (1, 2, 12),
'jjas': (6, 7, 8, 9),
'djfm': (12, 1, 2, 3),
'ann': range(1, 13)}
for lbl, vals in labels.items():
if intvl == lbl or set(intvl) == set(vals):
label = lbl
value = np.array(vals)
break
if return_val:
return label, value
else:
return label | Create time interval label for aospy data I/O. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/io.py#L38-L72 |
spencerahill/aospy | aospy/utils/io.py | data_name_gfdl | def data_name_gfdl(name, domain, data_type, intvl_type, data_yr,
intvl, data_in_start_yr, data_in_dur):
"""Determine the filename of GFDL model data output."""
# Determine starting year of netCDF file to be accessed.
extra_yrs = (data_yr - data_in_start_yr) % data_in_dur
data_in_yr = data_yr - extra_yrs
# Determine file name. Two cases: time series (ts) or time-averaged (av).
if data_type in ('ts', 'inst'):
if intvl_type == 'annual':
if data_in_dur == 1:
filename = '.'.join([domain, '{:04d}'.format(data_in_yr),
name, 'nc'])
else:
filename = '.'.join([domain, '{:04d}-{:04d}'.format(
data_in_yr, data_in_yr + data_in_dur - 1
), name, 'nc'])
elif intvl_type == 'monthly':
filename = (domain + '.{:04d}'.format(data_in_yr) + '01-' +
'{:04d}'.format(int(data_in_yr+data_in_dur-1)) +
'12.' + name + '.nc')
elif intvl_type == 'daily':
filename = (domain + '.{:04d}'.format(data_in_yr) + '0101-' +
'{:04d}'.format(int(data_in_yr+data_in_dur-1)) +
'1231.' + name + '.nc')
elif 'hr' in intvl_type:
filename = '.'.join(
[domain, '{:04d}010100-{:04d}123123'.format(
data_in_yr, data_in_yr + data_in_dur - 1), name, 'nc']
)
elif data_type == 'av':
if intvl_type in ['annual', 'ann']:
label = 'ann'
elif intvl_type in ['seasonal', 'seas']:
label = intvl.upper()
elif intvl_type in ['monthly', 'mon']:
label, val = time_label(intvl)
if data_in_dur == 1:
filename = (domain + '.{:04d}'.format(data_in_yr) +
'.' + label + '.nc')
else:
filename = (domain + '.{:04d}'.format(data_in_yr) + '-' +
'{:04d}'.format(int(data_in_yr+data_in_dur-1)) +
'.' + label + '.nc')
elif data_type == 'av_ts':
filename = (domain + '.{:04d}'.format(data_in_yr) + '-' +
'{:04d}'.format(int(data_in_yr+data_in_dur-1)) +
'.01-12.nc')
return filename | python | def data_name_gfdl(name, domain, data_type, intvl_type, data_yr,
intvl, data_in_start_yr, data_in_dur):
"""Determine the filename of GFDL model data output."""
# Determine starting year of netCDF file to be accessed.
extra_yrs = (data_yr - data_in_start_yr) % data_in_dur
data_in_yr = data_yr - extra_yrs
# Determine file name. Two cases: time series (ts) or time-averaged (av).
if data_type in ('ts', 'inst'):
if intvl_type == 'annual':
if data_in_dur == 1:
filename = '.'.join([domain, '{:04d}'.format(data_in_yr),
name, 'nc'])
else:
filename = '.'.join([domain, '{:04d}-{:04d}'.format(
data_in_yr, data_in_yr + data_in_dur - 1
), name, 'nc'])
elif intvl_type == 'monthly':
filename = (domain + '.{:04d}'.format(data_in_yr) + '01-' +
'{:04d}'.format(int(data_in_yr+data_in_dur-1)) +
'12.' + name + '.nc')
elif intvl_type == 'daily':
filename = (domain + '.{:04d}'.format(data_in_yr) + '0101-' +
'{:04d}'.format(int(data_in_yr+data_in_dur-1)) +
'1231.' + name + '.nc')
elif 'hr' in intvl_type:
filename = '.'.join(
[domain, '{:04d}010100-{:04d}123123'.format(
data_in_yr, data_in_yr + data_in_dur - 1), name, 'nc']
)
elif data_type == 'av':
if intvl_type in ['annual', 'ann']:
label = 'ann'
elif intvl_type in ['seasonal', 'seas']:
label = intvl.upper()
elif intvl_type in ['monthly', 'mon']:
label, val = time_label(intvl)
if data_in_dur == 1:
filename = (domain + '.{:04d}'.format(data_in_yr) +
'.' + label + '.nc')
else:
filename = (domain + '.{:04d}'.format(data_in_yr) + '-' +
'{:04d}'.format(int(data_in_yr+data_in_dur-1)) +
'.' + label + '.nc')
elif data_type == 'av_ts':
filename = (domain + '.{:04d}'.format(data_in_yr) + '-' +
'{:04d}'.format(int(data_in_yr+data_in_dur-1)) +
'.01-12.nc')
return filename | Determine the filename of GFDL model data output. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/io.py#L75-L122 |
spencerahill/aospy | aospy/utils/io.py | dmget | def dmget(files_list):
"""Call GFDL command 'dmget' to access archived files."""
if isinstance(files_list, str):
files_list = [files_list]
archive_files = []
for f in files_list:
if f.startswith('/archive'):
archive_files.append(f)
try:
subprocess.call(['dmget'] + archive_files)
except OSError:
logging.debug('dmget command not found in this machine') | python | def dmget(files_list):
"""Call GFDL command 'dmget' to access archived files."""
if isinstance(files_list, str):
files_list = [files_list]
archive_files = []
for f in files_list:
if f.startswith('/archive'):
archive_files.append(f)
try:
subprocess.call(['dmget'] + archive_files)
except OSError:
logging.debug('dmget command not found in this machine') | Call GFDL command 'dmget' to access archived files. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/utils/io.py#L125-L137 |
spencerahill/aospy | aospy/calc.py | _replace_pressure | def _replace_pressure(arguments, dtype_in_vert):
"""Replace p and dp Vars with appropriate Var objects specific to
the dtype_in_vert."""
arguments_out = []
for arg in arguments:
if isinstance(arg, Var):
if arg.name == 'p':
arguments_out.append(_P_VARS[dtype_in_vert])
elif arg.name == 'dp':
arguments_out.append(_DP_VARS[dtype_in_vert])
else:
arguments_out.append(arg)
else:
arguments_out.append(arg)
return arguments_out | python | def _replace_pressure(arguments, dtype_in_vert):
"""Replace p and dp Vars with appropriate Var objects specific to
the dtype_in_vert."""
arguments_out = []
for arg in arguments:
if isinstance(arg, Var):
if arg.name == 'p':
arguments_out.append(_P_VARS[dtype_in_vert])
elif arg.name == 'dp':
arguments_out.append(_DP_VARS[dtype_in_vert])
else:
arguments_out.append(arg)
else:
arguments_out.append(arg)
return arguments_out | Replace p and dp Vars with appropriate Var objects specific to
the dtype_in_vert. | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/calc.py#L28-L42 |
spencerahill/aospy | aospy/calc.py | _add_metadata_as_attrs | def _add_metadata_as_attrs(data, units, description, dtype_out_vert):
"""Add metadata attributes to Dataset or DataArray"""
if isinstance(data, xr.DataArray):
return _add_metadata_as_attrs_da(data, units, description,
dtype_out_vert)
else:
for name, arr in data.data_vars.items():
_add_metadata_as_attrs_da(arr, units, description,
dtype_out_vert)
return data | python | def _add_metadata_as_attrs(data, units, description, dtype_out_vert):
"""Add metadata attributes to Dataset or DataArray"""
if isinstance(data, xr.DataArray):
return _add_metadata_as_attrs_da(data, units, description,
dtype_out_vert)
else:
for name, arr in data.data_vars.items():
_add_metadata_as_attrs_da(arr, units, description,
dtype_out_vert)
return data | Add metadata attributes to Dataset or DataArray | https://github.com/spencerahill/aospy/blob/2f6e775b9b9956c54af117fdcdce2c87196afb6c/aospy/calc.py#L594-L603 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.