hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
cd8a35bcbfb312cda1686fb97584510659ede9ae
| 669
|
py
|
Python
|
Basic Data Structures/array/ListSlicing.py
|
rush2catch/algorithms-leetcode
|
38a5e6aa33d48fa14fe09c50c28a2eaabd736e55
|
[
"MIT"
] | null | null | null |
Basic Data Structures/array/ListSlicing.py
|
rush2catch/algorithms-leetcode
|
38a5e6aa33d48fa14fe09c50c28a2eaabd736e55
|
[
"MIT"
] | null | null | null |
Basic Data Structures/array/ListSlicing.py
|
rush2catch/algorithms-leetcode
|
38a5e6aa33d48fa14fe09c50c28a2eaabd736e55
|
[
"MIT"
] | null | null | null |
def list_slicing(nums, row, col):
new_matrix = []
minimal = row * col
if nums is []:
return None
elif (len(nums) % minimal is 0) and (len(nums) >= minimal):
for r in range(row):
new_matrix.append(nums[r * col : (r + 1) * col])
return new_matrix
else:
return nums
list_0 = [1, 2, 3, 6]
print(list_slicing(list_0, 1, 4))
print(list_slicing(list_0, 2, 4))
list_1 = [1, 2, 4, 5, 6, 9, 4, 6, 5, 8, 1, 4]
print(list_slicing(list_1, 3, 4))
print(list_slicing(list_1, 4, 3))
print(list_slicing(list_1, 2, 6))
print(list_slicing(list_1, 6, 2))
print(list_slicing(list_1, 5, 3))
print(list_slicing(list_1, 2, 5))
| 27.875
| 63
| 0.605381
| 122
| 669
| 3.139344
| 0.262295
| 0.258486
| 0.334204
| 0.417755
| 0.467363
| 0.295039
| 0.120104
| 0
| 0
| 0
| 0
| 0.08577
| 0.233184
| 669
| 23
| 64
| 29.086957
| 0.660819
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| false
| 0
| 0
| 0
| 0.190476
| 0.380952
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
269e4231c77fb4d48c447cd279a2ecdca59c4ba8
| 702
|
py
|
Python
|
kqueen_ui/utils/wrappers.py
|
Richard-Barrett/kqueen-ui
|
f6b41edddd5130e5cd79773640942e9a824bbae1
|
[
"MIT"
] | null | null | null |
kqueen_ui/utils/wrappers.py
|
Richard-Barrett/kqueen-ui
|
f6b41edddd5130e5cd79773640942e9a824bbae1
|
[
"MIT"
] | null | null | null |
kqueen_ui/utils/wrappers.py
|
Richard-Barrett/kqueen-ui
|
f6b41edddd5130e5cd79773640942e9a824bbae1
|
[
"MIT"
] | null | null | null |
from flask import request
from flask import redirect
from flask import session
from flask import url_for
from functools import wraps
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not session.get('user', {}).get('id', None):
return redirect(url_for('ui.login', next=request.url))
return f(*args, **kwargs)
return decorated_function
def superadmin_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not session.get('user', {}).get('role', 'member') == 'superadmin':
return redirect(url_for('ui.login', next=request.url))
return f(*args, **kwargs)
return decorated_function
| 29.25
| 77
| 0.660969
| 92
| 702
| 4.945652
| 0.326087
| 0.079121
| 0.131868
| 0.065934
| 0.650549
| 0.650549
| 0.650549
| 0.650549
| 0.650549
| 0.650549
| 0
| 0
| 0.205128
| 702
| 23
| 78
| 30.521739
| 0.815412
| 0
| 0
| 0.526316
| 0
| 0
| 0.065527
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.210526
| false
| 0
| 0.263158
| 0
| 0.789474
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 3
|
26a919609f85889cbb4dc125397fcb5ff64f815f
| 220
|
py
|
Python
|
blank/build.py
|
davidbelliott/whitgl
|
bfa1a33b0059b722a773e513653ff802c8dc7d6f
|
[
"MIT"
] | 26
|
2016-01-09T16:46:53.000Z
|
2021-05-17T02:41:13.000Z
|
blank/build.py
|
davidbelliott/whitgl
|
bfa1a33b0059b722a773e513653ff802c8dc7d6f
|
[
"MIT"
] | null | null | null |
blank/build.py
|
davidbelliott/whitgl
|
bfa1a33b0059b722a773e513653ff802c8dc7d6f
|
[
"MIT"
] | 3
|
2016-02-19T06:05:19.000Z
|
2021-03-10T02:19:35.000Z
|
import os
import sys
joinp = os.path.join
sys.path.insert(0, 'whitgl')
sys.path.insert(0, joinp('whitgl', 'input'))
import build
sys.path.insert(0, 'input')
import ninja_syntax
build.do_game('Game', '', ['png','ogg'])
| 18.333333
| 44
| 0.690909
| 36
| 220
| 4.166667
| 0.472222
| 0.14
| 0.26
| 0.28
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015228
| 0.104545
| 220
| 11
| 45
| 20
| 0.746193
| 0
| 0
| 0
| 0
| 0
| 0.145455
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.444444
| 0
| 0.444444
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
26b107fd23e87c597f676bc069f5e3b5b448d1e3
| 593
|
py
|
Python
|
suricata-4.1.4/python/suricata/ctl/test_filestore.py
|
runtest007/dpdk_surcata_4.1.1
|
5abf91f483b418b5d9c2dd410b5c850d6ed95c5f
|
[
"MIT"
] | 77
|
2019-06-17T07:05:07.000Z
|
2022-03-07T03:26:27.000Z
|
suricata-4.1.4/python/suricata/ctl/test_filestore.py
|
clockdad/DPDK_SURICATA-4_1_1
|
974cc9eb54b0b1ab90eff12a95617e3e293b77d3
|
[
"MIT"
] | 22
|
2019-07-18T02:32:10.000Z
|
2022-03-24T03:39:11.000Z
|
suricata-4.1.4/python/suricata/ctl/test_filestore.py
|
clockdad/DPDK_SURICATA-4_1_1
|
974cc9eb54b0b1ab90eff12a95617e3e293b77d3
|
[
"MIT"
] | 49
|
2019-06-18T03:31:56.000Z
|
2022-03-13T05:23:10.000Z
|
from __future__ import print_function
import unittest
import filestore
class PruneTestCase(unittest.TestCase):
def test_parse_age(self):
self.assertEqual(filestore.parse_age("1s"), 1)
self.assertEqual(filestore.parse_age("1m"), 60)
self.assertEqual(filestore.parse_age("1h"), 3600)
self.assertEqual(filestore.parse_age("1d"), 86400)
with self.assertRaises(filestore.InvalidAgeFormatError) as err:
filestore.parse_age("1")
with self.assertRaises(filestore.InvalidAgeFormatError) as err:
filestore.parse_age("1y")
| 31.210526
| 71
| 0.706577
| 68
| 593
| 5.970588
| 0.441176
| 0.137931
| 0.251232
| 0.285714
| 0.669951
| 0.35468
| 0.35468
| 0.35468
| 0.35468
| 0.35468
| 0
| 0.037344
| 0.187184
| 593
| 18
| 72
| 32.944444
| 0.804979
| 0
| 0
| 0.153846
| 0
| 0
| 0.01855
| 0
| 0
| 0
| 0
| 0
| 0.461538
| 1
| 0.076923
| false
| 0
| 0.230769
| 0
| 0.384615
| 0.076923
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
26d58240f4233e1d13f48a78a83f734ca262cc13
| 147
|
py
|
Python
|
Qcover/simulator/__init__.py
|
BAQIS-Quantum/Qcover
|
ca3776ed73fefa0cfef08042143a8cf842f8dad5
|
[
"Apache-2.0"
] | 38
|
2021-12-22T03:12:01.000Z
|
2022-03-17T06:57:10.000Z
|
Qcover/simulator/__init__.py
|
BAQIS-Quantum/Qcover
|
ca3776ed73fefa0cfef08042143a8cf842f8dad5
|
[
"Apache-2.0"
] | null | null | null |
Qcover/simulator/__init__.py
|
BAQIS-Quantum/Qcover
|
ca3776ed73fefa0cfef08042143a8cf842f8dad5
|
[
"Apache-2.0"
] | 13
|
2021-12-22T07:32:44.000Z
|
2022-02-28T06:47:41.000Z
|
from .qton import Qcircuit, Qcodes
import warnings
warnings.filterwarnings("ignore")
__all__ = [
# 'Simulator',
'Qcircuit',
'Qcodes'
]
| 16.333333
| 34
| 0.680272
| 14
| 147
| 6.857143
| 0.714286
| 0.291667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 147
| 9
| 35
| 16.333333
| 0.806723
| 0.081633
| 0
| 0
| 0
| 0
| 0.149254
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.285714
| 0
| 0.285714
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
26e97e5ea8220154eb41374939938275b9e537b0
| 741
|
py
|
Python
|
AppPython/app/core/src/forms.py
|
denalme/AplicacionPython
|
eb99af3c21f003135192ad040a0a04a40b63ea70
|
[
"MIT"
] | null | null | null |
AppPython/app/core/src/forms.py
|
denalme/AplicacionPython
|
eb99af3c21f003135192ad040a0a04a40b63ea70
|
[
"MIT"
] | null | null | null |
AppPython/app/core/src/forms.py
|
denalme/AplicacionPython
|
eb99af3c21f003135192ad040a0a04a40b63ea70
|
[
"MIT"
] | null | null | null |
from django import forms
from .pqrsf import pqrsf
class ContactForm(forms.Form):
#Atributos del formulario de contacto
usuario = forms.CharField(label="Nombre", required=True, widget=forms.TextInput(attrs={'class':'formulario input', 'placeholder':'Nombre'}))
correo = forms.EmailField(label="Correo Electrónico", required=True,widget=forms.EmailInput(attrs={'class':'formulario input','placeholder':'Correo Electrónico'}))
tipomsj = forms.ChoiceField(label="Asunto", required=True, choices=pqrsf, widget=forms.Select(attrs={'class':'formulario input'}))
mensaje = forms.CharField(label="Mensaje", required=True, widget=forms.Textarea(attrs={'class':'formulario input', 'rows':'5','placeholder':'Escribe tu Mensaje'}))
| 74.1
| 167
| 0.747638
| 87
| 741
| 6.367816
| 0.448276
| 0.086643
| 0.144404
| 0.180505
| 0.129964
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001484
| 0.090418
| 741
| 9
| 168
| 82.333333
| 0.820475
| 0.048583
| 0
| 0
| 0
| 0
| 0.285918
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.285714
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 3
|
26f6c233aae91fb0635319c24ac7a5452088a65f
| 520
|
py
|
Python
|
gdc_readgroups/exceptions.py
|
NCI-GDC/gdc-readgroups
|
874387bb3473b0a0680551339e50b072cc058eb6
|
[
"Apache-2.0"
] | null | null | null |
gdc_readgroups/exceptions.py
|
NCI-GDC/gdc-readgroups
|
874387bb3473b0a0680551339e50b072cc058eb6
|
[
"Apache-2.0"
] | null | null | null |
gdc_readgroups/exceptions.py
|
NCI-GDC/gdc-readgroups
|
874387bb3473b0a0680551339e50b072cc058eb6
|
[
"Apache-2.0"
] | 1
|
2020-01-23T22:07:10.000Z
|
2020-01-23T22:07:10.000Z
|
"""
Exceptions for Read Group headers
"""
class NoReadGroupError(Exception):
"""NoReadGroupError"""
class SamtoolsViewError(Exception):
"""SamtoolsViewError"""
class InvalidPlatformError(Exception):
"""InvalidPlatformError"""
class InvalidPlatformModelError(Exception):
"""InvalidPlatformError"""
class MissingReadgroupIdError(Exception):
"""MissingReadgroupIdError"""
class InvalidDatetimeError(Exception):
"""InvalidDatetimeError"""
class NotABamError(Exception):
"""NotABamError"""
| 20.8
| 43
| 0.746154
| 33
| 520
| 11.757576
| 0.424242
| 0.149485
| 0.175258
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121154
| 520
| 24
| 44
| 21.666667
| 0.849015
| 0.323077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 3
|
f8053be6ee69e87199ea558062ed1fe681dca092
| 361
|
py
|
Python
|
busshaming/models/agency.py
|
katharosada/busshaming
|
c8d7cd4baf9ff049cda49c92da4d5ca10f68e6a9
|
[
"MIT"
] | 42
|
2018-01-20T01:12:25.000Z
|
2022-02-02T01:40:17.000Z
|
busshaming/models/agency.py
|
katharosada/busshaming
|
c8d7cd4baf9ff049cda49c92da4d5ca10f68e6a9
|
[
"MIT"
] | 2
|
2018-01-24T03:58:17.000Z
|
2018-06-10T01:05:57.000Z
|
busshaming/models/agency.py
|
katharosada/busshaming
|
c8d7cd4baf9ff049cda49c92da4d5ca10f68e6a9
|
[
"MIT"
] | 7
|
2018-01-24T05:49:13.000Z
|
2018-12-03T08:47:43.000Z
|
from django.db import models
class Agency(models.Model):
gtfs_agency_id = models.CharField(max_length=200)
feed = models.ForeignKey('Feed')
name = models.CharField(max_length=200)
class Meta:
unique_together = ('gtfs_agency_id', 'feed')
def __str__(self):
return f'{self.feed.slug} - {self.gtfs_agency_id} ({self.name})'
| 25.785714
| 72
| 0.67867
| 49
| 361
| 4.734694
| 0.530612
| 0.12931
| 0.155172
| 0.206897
| 0.232759
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020478
| 0.188366
| 361
| 13
| 73
| 27.769231
| 0.771331
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 0.058172
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.111111
| 0.111111
| 0.888889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
f81030a9747b6fbce3be0c3890586bc3da2d99c2
| 27,895
|
py
|
Python
|
nova/network/ldapdns.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/network/ldapdns.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/network/ldapdns.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | 2
|
2017-07-20T17:31:34.000Z
|
2020-07-24T02:42:19.000Z
|
begin_unit
comment|'# Copyright 2012 Andrew Bogott for the Wikimedia Foundation'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
name|'try'
op|':'
newline|'\n'
indent|' '
name|'import'
name|'ldap'
newline|'\n'
dedent|''
name|'except'
name|'ImportError'
op|':'
newline|'\n'
comment|'# This module needs to be importable despite ldap not being a requirement'
nl|'\n'
DECL|variable|ldap
indent|' '
name|'ldap'
op|'='
name|'None'
newline|'\n'
nl|'\n'
dedent|''
name|'import'
name|'time'
newline|'\n'
nl|'\n'
name|'from'
name|'oslo_log'
name|'import'
name|'log'
name|'as'
name|'logging'
newline|'\n'
nl|'\n'
name|'import'
name|'nova'
op|'.'
name|'conf'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'exception'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'i18n'
name|'import'
name|'_'
op|','
name|'_LW'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'network'
name|'import'
name|'dns_driver'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'utils'
newline|'\n'
nl|'\n'
DECL|variable|CONF
name|'CONF'
op|'='
name|'nova'
op|'.'
name|'conf'
op|'.'
name|'CONF'
newline|'\n'
DECL|variable|LOG
name|'LOG'
op|'='
name|'logging'
op|'.'
name|'getLogger'
op|'('
name|'__name__'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
comment|'# Importing ldap.modlist breaks the tests for some reason,'
nl|'\n'
comment|'# so this is an abbreviated version of a function from'
nl|'\n'
comment|'# there.'
nl|'\n'
DECL|function|create_modlist
name|'def'
name|'create_modlist'
op|'('
name|'newattrs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'modlist'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'attrtype'
name|'in'
name|'newattrs'
op|'.'
name|'keys'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'utf8_vals'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'val'
name|'in'
name|'newattrs'
op|'['
name|'attrtype'
op|']'
op|':'
newline|'\n'
indent|' '
name|'utf8_vals'
op|'.'
name|'append'
op|'('
name|'utils'
op|'.'
name|'utf8'
op|'('
name|'val'
op|')'
op|')'
newline|'\n'
dedent|''
name|'newattrs'
op|'['
name|'attrtype'
op|']'
op|'='
name|'utf8_vals'
newline|'\n'
name|'modlist'
op|'.'
name|'append'
op|'('
op|'('
name|'attrtype'
op|','
name|'newattrs'
op|'['
name|'attrtype'
op|']'
op|')'
op|')'
newline|'\n'
dedent|''
name|'return'
name|'modlist'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|DNSEntry
dedent|''
name|'class'
name|'DNSEntry'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|member|__init__
indent|' '
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'ldap_object'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""ldap_object is an instance of ldap.LDAPObject.\n\n It should already be initialized and bound before\n getting passed in here.\n """'
newline|'\n'
name|'self'
op|'.'
name|'lobj'
op|'='
name|'ldap_object'
newline|'\n'
name|'self'
op|'.'
name|'ldap_tuple'
op|'='
name|'None'
newline|'\n'
name|'self'
op|'.'
name|'qualified_domain'
op|'='
name|'None'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'classmethod'
newline|'\n'
DECL|member|_get_tuple_for_domain
name|'def'
name|'_get_tuple_for_domain'
op|'('
name|'cls'
op|','
name|'lobj'
op|','
name|'domain'
op|')'
op|':'
newline|'\n'
indent|' '
name|'entry'
op|'='
name|'lobj'
op|'.'
name|'search_s'
op|'('
name|'CONF'
op|'.'
name|'ldap_dns_base_dn'
op|','
name|'ldap'
op|'.'
name|'SCOPE_SUBTREE'
op|','
nl|'\n'
string|"'(associatedDomain=%s)'"
op|'%'
name|'utils'
op|'.'
name|'utf8'
op|'('
name|'domain'
op|')'
op|')'
newline|'\n'
name|'if'
name|'not'
name|'entry'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'None'
newline|'\n'
dedent|''
name|'if'
name|'len'
op|'('
name|'entry'
op|')'
op|'>'
number|'1'
op|':'
newline|'\n'
indent|' '
name|'LOG'
op|'.'
name|'warning'
op|'('
name|'_LW'
op|'('
string|'"Found multiple matches for domain "'
nl|'\n'
string|'"%(domain)s.\\n%(entry)s"'
op|')'
op|','
nl|'\n'
name|'domain'
op|','
name|'entry'
op|')'
newline|'\n'
dedent|''
name|'return'
name|'entry'
op|'['
number|'0'
op|']'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'classmethod'
newline|'\n'
DECL|member|_get_all_domains
name|'def'
name|'_get_all_domains'
op|'('
name|'cls'
op|','
name|'lobj'
op|')'
op|':'
newline|'\n'
indent|' '
name|'entries'
op|'='
name|'lobj'
op|'.'
name|'search_s'
op|'('
name|'CONF'
op|'.'
name|'ldap_dns_base_dn'
op|','
nl|'\n'
name|'ldap'
op|'.'
name|'SCOPE_SUBTREE'
op|','
string|"'(sOARecord=*)'"
op|')'
newline|'\n'
name|'domains'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'entry'
name|'in'
name|'entries'
op|':'
newline|'\n'
indent|' '
name|'domain'
op|'='
name|'entry'
op|'['
number|'1'
op|']'
op|'.'
name|'get'
op|'('
string|"'associatedDomain'"
op|')'
newline|'\n'
name|'if'
name|'domain'
op|':'
newline|'\n'
indent|' '
name|'domains'
op|'.'
name|'append'
op|'('
name|'domain'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
dedent|''
dedent|''
name|'return'
name|'domains'
newline|'\n'
nl|'\n'
DECL|member|_set_tuple
dedent|''
name|'def'
name|'_set_tuple'
op|'('
name|'self'
op|','
name|'tuple'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'ldap_tuple'
op|'='
name|'tuple'
newline|'\n'
nl|'\n'
DECL|member|_qualify
dedent|''
name|'def'
name|'_qualify'
op|'('
name|'self'
op|','
name|'name'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
string|"'%s.%s'"
op|'%'
op|'('
name|'name'
op|','
name|'self'
op|'.'
name|'qualified_domain'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_dequalify
dedent|''
name|'def'
name|'_dequalify'
op|'('
name|'self'
op|','
name|'name'
op|')'
op|':'
newline|'\n'
indent|' '
name|'z'
op|'='
string|'".%s"'
op|'%'
name|'self'
op|'.'
name|'qualified_domain'
newline|'\n'
name|'if'
name|'name'
op|'.'
name|'endswith'
op|'('
name|'z'
op|')'
op|':'
newline|'\n'
indent|' '
name|'dequalified'
op|'='
name|'name'
op|'['
number|'0'
op|':'
name|'name'
op|'.'
name|'rfind'
op|'('
name|'z'
op|')'
op|']'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'LOG'
op|'.'
name|'warning'
op|'('
name|'_LW'
op|'('
string|'"Unable to dequalify. %(name)s is not in "'
nl|'\n'
string|'"%(domain)s.\\n"'
op|')'
op|','
nl|'\n'
op|'{'
string|"'name'"
op|':'
name|'name'
op|','
nl|'\n'
string|"'domain'"
op|':'
name|'self'
op|'.'
name|'qualified_domain'
op|'}'
op|')'
newline|'\n'
name|'dequalified'
op|'='
name|'None'
newline|'\n'
nl|'\n'
dedent|''
name|'return'
name|'dequalified'
newline|'\n'
nl|'\n'
DECL|member|_dn
dedent|''
name|'def'
name|'_dn'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'self'
op|'.'
name|'ldap_tuple'
op|'['
number|'0'
op|']'
newline|'\n'
DECL|variable|dn
dedent|''
name|'dn'
op|'='
name|'property'
op|'('
name|'_dn'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_rdn
name|'def'
name|'_rdn'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'self'
op|'.'
name|'dn'
op|'.'
name|'partition'
op|'('
string|"','"
op|')'
op|'['
number|'0'
op|']'
newline|'\n'
DECL|variable|rdn
dedent|''
name|'rdn'
op|'='
name|'property'
op|'('
name|'_rdn'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|DomainEntry
dedent|''
name|'class'
name|'DomainEntry'
op|'('
name|'DNSEntry'
op|')'
op|':'
newline|'\n'
nl|'\n'
indent|' '
op|'@'
name|'classmethod'
newline|'\n'
DECL|member|_soa
name|'def'
name|'_soa'
op|'('
name|'cls'
op|')'
op|':'
newline|'\n'
indent|' '
name|'date'
op|'='
name|'time'
op|'.'
name|'strftime'
op|'('
string|"'%Y%m%d%H%M%S'"
op|')'
newline|'\n'
name|'soa'
op|'='
string|"'%s %s %s %s %s %s %s'"
op|'%'
op|'('
nl|'\n'
name|'CONF'
op|'.'
name|'ldap_dns_servers'
op|'['
number|'0'
op|']'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'ldap_dns_soa_hostmaster'
op|','
nl|'\n'
name|'date'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'ldap_dns_soa_refresh'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'ldap_dns_soa_retry'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'ldap_dns_soa_expiry'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'ldap_dns_soa_minimum'
op|')'
newline|'\n'
name|'return'
name|'utils'
op|'.'
name|'utf8'
op|'('
name|'soa'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'classmethod'
newline|'\n'
DECL|member|create_domain
name|'def'
name|'create_domain'
op|'('
name|'cls'
op|','
name|'lobj'
op|','
name|'domain'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Create a new domain entry, and return an object that wraps it."""'
newline|'\n'
name|'entry'
op|'='
name|'cls'
op|'.'
name|'_get_tuple_for_domain'
op|'('
name|'lobj'
op|','
name|'domain'
op|')'
newline|'\n'
name|'if'
name|'entry'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'FloatingIpDNSExists'
op|'('
name|'name'
op|'='
name|'domain'
op|','
name|'domain'
op|'='
string|"''"
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'newdn'
op|'='
string|"'dc=%s,%s'"
op|'%'
op|'('
name|'domain'
op|','
name|'CONF'
op|'.'
name|'ldap_dns_base_dn'
op|')'
newline|'\n'
name|'attrs'
op|'='
op|'{'
string|"'objectClass'"
op|':'
op|'['
string|"'domainrelatedobject'"
op|','
string|"'dnsdomain'"
op|','
nl|'\n'
string|"'domain'"
op|','
string|"'dcobject'"
op|','
string|"'top'"
op|']'
op|','
nl|'\n'
string|"'sOARecord'"
op|':'
op|'['
name|'cls'
op|'.'
name|'_soa'
op|'('
op|')'
op|']'
op|','
nl|'\n'
string|"'associatedDomain'"
op|':'
op|'['
name|'domain'
op|']'
op|','
nl|'\n'
string|"'dc'"
op|':'
op|'['
name|'domain'
op|']'
op|'}'
newline|'\n'
name|'lobj'
op|'.'
name|'add_s'
op|'('
name|'newdn'
op|','
name|'create_modlist'
op|'('
name|'attrs'
op|')'
op|')'
newline|'\n'
name|'return'
name|'DomainEntry'
op|'('
name|'lobj'
op|','
name|'domain'
op|')'
newline|'\n'
nl|'\n'
DECL|member|__init__
dedent|''
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'ldap_object'
op|','
name|'domain'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'DomainEntry'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
name|'ldap_object'
op|')'
newline|'\n'
name|'entry'
op|'='
name|'self'
op|'.'
name|'_get_tuple_for_domain'
op|'('
name|'self'
op|'.'
name|'lobj'
op|','
name|'domain'
op|')'
newline|'\n'
name|'if'
name|'not'
name|'entry'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'NotFound'
op|'('
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'_set_tuple'
op|'('
name|'entry'
op|')'
newline|'\n'
name|'assert'
op|'('
name|'entry'
op|'['
number|'1'
op|']'
op|'['
string|"'associatedDomain'"
op|']'
op|'['
number|'0'
op|']'
op|'=='
name|'domain'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'qualified_domain'
op|'='
name|'domain'
newline|'\n'
nl|'\n'
DECL|member|delete
dedent|''
name|'def'
name|'delete'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Delete the domain that this entry refers to."""'
newline|'\n'
name|'entries'
op|'='
name|'self'
op|'.'
name|'lobj'
op|'.'
name|'search_s'
op|'('
name|'self'
op|'.'
name|'dn'
op|','
nl|'\n'
name|'ldap'
op|'.'
name|'SCOPE_SUBTREE'
op|','
nl|'\n'
string|"'(aRecord=*)'"
op|')'
newline|'\n'
name|'for'
name|'entry'
name|'in'
name|'entries'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'lobj'
op|'.'
name|'delete_s'
op|'('
name|'entry'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'lobj'
op|'.'
name|'delete_s'
op|'('
name|'self'
op|'.'
name|'dn'
op|')'
newline|'\n'
nl|'\n'
DECL|member|update_soa
dedent|''
name|'def'
name|'update_soa'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mlist'
op|'='
op|'['
op|'('
name|'ldap'
op|'.'
name|'MOD_REPLACE'
op|','
string|"'sOARecord'"
op|','
name|'self'
op|'.'
name|'_soa'
op|'('
op|')'
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'lobj'
op|'.'
name|'modify_s'
op|'('
name|'self'
op|'.'
name|'dn'
op|','
name|'mlist'
op|')'
newline|'\n'
nl|'\n'
DECL|member|subentry_with_name
dedent|''
name|'def'
name|'subentry_with_name'
op|'('
name|'self'
op|','
name|'name'
op|')'
op|':'
newline|'\n'
indent|' '
name|'entry'
op|'='
name|'self'
op|'.'
name|'lobj'
op|'.'
name|'search_s'
op|'('
name|'self'
op|'.'
name|'dn'
op|','
name|'ldap'
op|'.'
name|'SCOPE_SUBTREE'
op|','
nl|'\n'
string|"'(associatedDomain=%s.%s)'"
op|'%'
nl|'\n'
op|'('
name|'utils'
op|'.'
name|'utf8'
op|'('
name|'name'
op|')'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'utf8'
op|'('
name|'self'
op|'.'
name|'qualified_domain'
op|')'
op|')'
op|')'
newline|'\n'
name|'if'
name|'entry'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'HostEntry'
op|'('
name|'self'
op|','
name|'entry'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'None'
newline|'\n'
nl|'\n'
DECL|member|subentries_with_ip
dedent|''
dedent|''
name|'def'
name|'subentries_with_ip'
op|'('
name|'self'
op|','
name|'ip'
op|')'
op|':'
newline|'\n'
indent|' '
name|'entries'
op|'='
name|'self'
op|'.'
name|'lobj'
op|'.'
name|'search_s'
op|'('
name|'self'
op|'.'
name|'dn'
op|','
name|'ldap'
op|'.'
name|'SCOPE_SUBTREE'
op|','
nl|'\n'
string|"'(aRecord=%s)'"
op|'%'
name|'utils'
op|'.'
name|'utf8'
op|'('
name|'ip'
op|')'
op|')'
newline|'\n'
name|'objs'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'entry'
name|'in'
name|'entries'
op|':'
newline|'\n'
indent|' '
name|'if'
string|"'associatedDomain'"
name|'in'
name|'entry'
op|'['
number|'1'
op|']'
op|':'
newline|'\n'
indent|' '
name|'objs'
op|'.'
name|'append'
op|'('
name|'HostEntry'
op|'('
name|'self'
op|','
name|'entry'
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'return'
name|'objs'
newline|'\n'
nl|'\n'
DECL|member|add_entry
dedent|''
name|'def'
name|'add_entry'
op|'('
name|'self'
op|','
name|'name'
op|','
name|'address'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'self'
op|'.'
name|'subentry_with_name'
op|'('
name|'name'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'FloatingIpDNSExists'
op|'('
name|'name'
op|'='
name|'name'
op|','
nl|'\n'
name|'domain'
op|'='
name|'self'
op|'.'
name|'qualified_domain'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'entries'
op|'='
name|'self'
op|'.'
name|'subentries_with_ip'
op|'('
name|'address'
op|')'
newline|'\n'
name|'if'
name|'entries'
op|':'
newline|'\n'
comment|'# We already have an ldap entry for this IP, so we just'
nl|'\n'
comment|'# need to add the new name.'
nl|'\n'
indent|' '
name|'existingdn'
op|'='
name|'entries'
op|'['
number|'0'
op|']'
op|'.'
name|'dn'
newline|'\n'
name|'self'
op|'.'
name|'lobj'
op|'.'
name|'modify_s'
op|'('
name|'existingdn'
op|','
op|'['
op|'('
name|'ldap'
op|'.'
name|'MOD_ADD'
op|','
nl|'\n'
string|"'associatedDomain'"
op|','
nl|'\n'
name|'utils'
op|'.'
name|'utf8'
op|'('
name|'self'
op|'.'
name|'_qualify'
op|'('
name|'name'
op|')'
op|')'
op|')'
op|']'
op|')'
newline|'\n'
nl|'\n'
name|'return'
name|'self'
op|'.'
name|'subentry_with_name'
op|'('
name|'name'
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
comment|'# We need to create an entirely new entry.'
nl|'\n'
indent|' '
name|'newdn'
op|'='
string|"'dc=%s,%s'"
op|'%'
op|'('
name|'name'
op|','
name|'self'
op|'.'
name|'dn'
op|')'
newline|'\n'
name|'attrs'
op|'='
op|'{'
string|"'objectClass'"
op|':'
op|'['
string|"'domainrelatedobject'"
op|','
string|"'dnsdomain'"
op|','
nl|'\n'
string|"'domain'"
op|','
string|"'dcobject'"
op|','
string|"'top'"
op|']'
op|','
nl|'\n'
string|"'aRecord'"
op|':'
op|'['
name|'address'
op|']'
op|','
nl|'\n'
string|"'associatedDomain'"
op|':'
op|'['
name|'self'
op|'.'
name|'_qualify'
op|'('
name|'name'
op|')'
op|']'
op|','
nl|'\n'
string|"'dc'"
op|':'
op|'['
name|'name'
op|']'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'lobj'
op|'.'
name|'add_s'
op|'('
name|'newdn'
op|','
name|'create_modlist'
op|'('
name|'attrs'
op|')'
op|')'
newline|'\n'
name|'return'
name|'self'
op|'.'
name|'subentry_with_name'
op|'('
name|'name'
op|')'
newline|'\n'
nl|'\n'
DECL|member|remove_entry
dedent|''
dedent|''
name|'def'
name|'remove_entry'
op|'('
name|'self'
op|','
name|'name'
op|')'
op|':'
newline|'\n'
indent|' '
name|'entry'
op|'='
name|'self'
op|'.'
name|'subentry_with_name'
op|'('
name|'name'
op|')'
newline|'\n'
name|'if'
name|'not'
name|'entry'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'NotFound'
op|'('
op|')'
newline|'\n'
dedent|''
name|'entry'
op|'.'
name|'remove_name'
op|'('
name|'name'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'update_soa'
op|'('
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|HostEntry
dedent|''
dedent|''
name|'class'
name|'HostEntry'
op|'('
name|'DNSEntry'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|member|__init__
indent|' '
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'parent'
op|','
name|'tuple'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'HostEntry'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
name|'parent'
op|'.'
name|'lobj'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'parent_entry'
op|'='
name|'parent'
newline|'\n'
name|'self'
op|'.'
name|'_set_tuple'
op|'('
name|'tuple'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'qualified_domain'
op|'='
name|'parent'
op|'.'
name|'qualified_domain'
newline|'\n'
nl|'\n'
DECL|member|remove_name
dedent|''
name|'def'
name|'remove_name'
op|'('
name|'self'
op|','
name|'name'
op|')'
op|':'
newline|'\n'
indent|' '
name|'names'
op|'='
name|'self'
op|'.'
name|'ldap_tuple'
op|'['
number|'1'
op|']'
op|'['
string|"'associatedDomain'"
op|']'
newline|'\n'
name|'if'
name|'not'
name|'names'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'NotFound'
op|'('
op|')'
newline|'\n'
dedent|''
name|'if'
name|'len'
op|'('
name|'names'
op|')'
op|'>'
number|'1'
op|':'
newline|'\n'
comment|'# We just have to remove the requested domain.'
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'lobj'
op|'.'
name|'modify_s'
op|'('
name|'self'
op|'.'
name|'dn'
op|','
op|'['
op|'('
name|'ldap'
op|'.'
name|'MOD_DELETE'
op|','
string|"'associatedDomain'"
op|','
nl|'\n'
name|'self'
op|'.'
name|'_qualify'
op|'('
name|'utils'
op|'.'
name|'utf8'
op|'('
name|'name'
op|')'
op|')'
op|')'
op|']'
op|')'
newline|'\n'
name|'if'
op|'('
name|'self'
op|'.'
name|'rdn'
op|'['
number|'1'
op|']'
op|'=='
name|'name'
op|')'
op|':'
newline|'\n'
comment|'# We just removed the rdn, so we need to move this entry.'
nl|'\n'
indent|' '
name|'names'
op|'.'
name|'remove'
op|'('
name|'self'
op|'.'
name|'_qualify'
op|'('
name|'name'
op|')'
op|')'
newline|'\n'
name|'newrdn'
op|'='
string|"'dc=%s'"
op|'%'
name|'self'
op|'.'
name|'_dequalify'
op|'('
name|'names'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'lobj'
op|'.'
name|'modrdn_s'
op|'('
name|'self'
op|'.'
name|'dn'
op|','
op|'['
name|'newrdn'
op|']'
op|')'
newline|'\n'
dedent|''
dedent|''
name|'else'
op|':'
newline|'\n'
comment|'# We should delete the entire record.'
nl|'\n'
indent|' '
name|'self'
op|'.'
name|'lobj'
op|'.'
name|'delete_s'
op|'('
name|'self'
op|'.'
name|'dn'
op|')'
newline|'\n'
nl|'\n'
DECL|member|modify_address
dedent|''
dedent|''
name|'def'
name|'modify_address'
op|'('
name|'self'
op|','
name|'name'
op|','
name|'address'
op|')'
op|':'
newline|'\n'
indent|' '
name|'names'
op|'='
name|'self'
op|'.'
name|'ldap_tuple'
op|'['
number|'1'
op|']'
op|'['
string|"'associatedDomain'"
op|']'
newline|'\n'
name|'if'
name|'not'
name|'names'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'NotFound'
op|'('
op|')'
newline|'\n'
dedent|''
name|'if'
name|'len'
op|'('
name|'names'
op|')'
op|'=='
number|'1'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'lobj'
op|'.'
name|'modify_s'
op|'('
name|'self'
op|'.'
name|'dn'
op|','
op|'['
op|'('
name|'ldap'
op|'.'
name|'MOD_REPLACE'
op|','
string|"'aRecord'"
op|','
nl|'\n'
op|'['
name|'utils'
op|'.'
name|'utf8'
op|'('
name|'address'
op|')'
op|']'
op|')'
op|']'
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'remove_name'
op|'('
name|'name'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'parent'
op|'.'
name|'add_entry'
op|'('
name|'name'
op|','
name|'address'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_names
dedent|''
dedent|''
name|'def'
name|'_names'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'names'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'domain'
name|'in'
name|'self'
op|'.'
name|'ldap_tuple'
op|'['
number|'1'
op|']'
op|'['
string|"'associatedDomain'"
op|']'
op|':'
newline|'\n'
indent|' '
name|'names'
op|'.'
name|'append'
op|'('
name|'self'
op|'.'
name|'_dequalify'
op|'('
name|'domain'
op|')'
op|')'
newline|'\n'
dedent|''
name|'return'
name|'names'
newline|'\n'
DECL|variable|names
dedent|''
name|'names'
op|'='
name|'property'
op|'('
name|'_names'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_ip
name|'def'
name|'_ip'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'ip'
op|'='
name|'self'
op|'.'
name|'ldap_tuple'
op|'['
number|'1'
op|']'
op|'['
string|"'aRecord'"
op|']'
op|'['
number|'0'
op|']'
newline|'\n'
name|'return'
name|'ip'
newline|'\n'
DECL|variable|ip
dedent|''
name|'ip'
op|'='
name|'property'
op|'('
name|'_ip'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_parent
name|'def'
name|'_parent'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'self'
op|'.'
name|'parent_entry'
newline|'\n'
DECL|variable|parent
dedent|''
name|'parent'
op|'='
name|'property'
op|'('
name|'_parent'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|LdapDNS
dedent|''
name|'class'
name|'LdapDNS'
op|'('
name|'dns_driver'
op|'.'
name|'DNSDriver'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Driver for PowerDNS using ldap as a back end.\n\n This driver assumes ldap-method=strict, with all domains\n in the top-level, aRecords only.\n """'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'not'
name|'ldap'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'ImportError'
op|'('
name|'_'
op|'('
string|"'ldap not installed'"
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'lobj'
op|'='
name|'ldap'
op|'.'
name|'initialize'
op|'('
name|'CONF'
op|'.'
name|'ldap_dns_url'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'lobj'
op|'.'
name|'simple_bind_s'
op|'('
name|'CONF'
op|'.'
name|'ldap_dns_user'
op|','
nl|'\n'
name|'CONF'
op|'.'
name|'ldap_dns_password'
op|')'
newline|'\n'
nl|'\n'
DECL|member|get_domains
dedent|''
name|'def'
name|'get_domains'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'DomainEntry'
op|'.'
name|'_get_all_domains'
op|'('
name|'self'
op|'.'
name|'lobj'
op|')'
newline|'\n'
nl|'\n'
DECL|member|create_entry
dedent|''
name|'def'
name|'create_entry'
op|'('
name|'self'
op|','
name|'name'
op|','
name|'address'
op|','
name|'type'
op|','
name|'domain'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'type'
op|'.'
name|'lower'
op|'('
op|')'
op|'!='
string|"'a'"
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'InvalidInput'
op|'('
name|'_'
op|'('
string|'"This driver only supports "'
nl|'\n'
string|'"type \'a\' entries."'
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'dEntry'
op|'='
name|'DomainEntry'
op|'('
name|'self'
op|'.'
name|'lobj'
op|','
name|'domain'
op|')'
newline|'\n'
name|'dEntry'
op|'.'
name|'add_entry'
op|'('
name|'name'
op|','
name|'address'
op|')'
newline|'\n'
nl|'\n'
DECL|member|delete_entry
dedent|''
name|'def'
name|'delete_entry'
op|'('
name|'self'
op|','
name|'name'
op|','
name|'domain'
op|')'
op|':'
newline|'\n'
indent|' '
name|'dEntry'
op|'='
name|'DomainEntry'
op|'('
name|'self'
op|'.'
name|'lobj'
op|','
name|'domain'
op|')'
newline|'\n'
name|'dEntry'
op|'.'
name|'remove_entry'
op|'('
name|'name'
op|')'
newline|'\n'
nl|'\n'
DECL|member|get_entries_by_address
dedent|''
name|'def'
name|'get_entries_by_address'
op|'('
name|'self'
op|','
name|'address'
op|','
name|'domain'
op|')'
op|':'
newline|'\n'
indent|' '
name|'try'
op|':'
newline|'\n'
indent|' '
name|'dEntry'
op|'='
name|'DomainEntry'
op|'('
name|'self'
op|'.'
name|'lobj'
op|','
name|'domain'
op|')'
newline|'\n'
dedent|''
name|'except'
name|'exception'
op|'.'
name|'NotFound'
op|':'
newline|'\n'
indent|' '
name|'return'
op|'['
op|']'
newline|'\n'
dedent|''
name|'entries'
op|'='
name|'dEntry'
op|'.'
name|'subentries_with_ip'
op|'('
name|'address'
op|')'
newline|'\n'
name|'names'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'entry'
name|'in'
name|'entries'
op|':'
newline|'\n'
indent|' '
name|'names'
op|'.'
name|'extend'
op|'('
name|'entry'
op|'.'
name|'names'
op|')'
newline|'\n'
dedent|''
name|'return'
name|'names'
newline|'\n'
nl|'\n'
DECL|member|get_entries_by_name
dedent|''
name|'def'
name|'get_entries_by_name'
op|'('
name|'self'
op|','
name|'name'
op|','
name|'domain'
op|')'
op|':'
newline|'\n'
indent|' '
name|'try'
op|':'
newline|'\n'
indent|' '
name|'dEntry'
op|'='
name|'DomainEntry'
op|'('
name|'self'
op|'.'
name|'lobj'
op|','
name|'domain'
op|')'
newline|'\n'
dedent|''
name|'except'
name|'exception'
op|'.'
name|'NotFound'
op|':'
newline|'\n'
indent|' '
name|'return'
op|'['
op|']'
newline|'\n'
dedent|''
name|'nEntry'
op|'='
name|'dEntry'
op|'.'
name|'subentry_with_name'
op|'('
name|'name'
op|')'
newline|'\n'
name|'if'
name|'nEntry'
op|':'
newline|'\n'
indent|' '
name|'return'
op|'['
name|'nEntry'
op|'.'
name|'ip'
op|']'
newline|'\n'
nl|'\n'
DECL|member|modify_address
dedent|''
dedent|''
name|'def'
name|'modify_address'
op|'('
name|'self'
op|','
name|'name'
op|','
name|'address'
op|','
name|'domain'
op|')'
op|':'
newline|'\n'
indent|' '
name|'dEntry'
op|'='
name|'DomainEntry'
op|'('
name|'self'
op|'.'
name|'lobj'
op|','
name|'domain'
op|')'
newline|'\n'
name|'nEntry'
op|'='
name|'dEntry'
op|'.'
name|'subentry_with_name'
op|'('
name|'name'
op|')'
newline|'\n'
name|'nEntry'
op|'.'
name|'modify_address'
op|'('
name|'name'
op|','
name|'address'
op|')'
newline|'\n'
nl|'\n'
DECL|member|create_domain
dedent|''
name|'def'
name|'create_domain'
op|'('
name|'self'
op|','
name|'domain'
op|')'
op|':'
newline|'\n'
indent|' '
name|'DomainEntry'
op|'.'
name|'create_domain'
op|'('
name|'self'
op|'.'
name|'lobj'
op|','
name|'domain'
op|')'
newline|'\n'
nl|'\n'
DECL|member|delete_domain
dedent|''
name|'def'
name|'delete_domain'
op|'('
name|'self'
op|','
name|'domain'
op|')'
op|':'
newline|'\n'
indent|' '
name|'dEntry'
op|'='
name|'DomainEntry'
op|'('
name|'self'
op|'.'
name|'lobj'
op|','
name|'domain'
op|')'
newline|'\n'
name|'dEntry'
op|'.'
name|'delete'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|delete_dns_file
dedent|''
name|'def'
name|'delete_dns_file'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'LOG'
op|'.'
name|'warning'
op|'('
name|'_LW'
op|'('
string|'"This shouldn\'t be getting called except during "'
nl|'\n'
string|'"testing."'
op|')'
op|')'
newline|'\n'
name|'pass'
newline|'\n'
dedent|''
dedent|''
endmarker|''
end_unit
| 12.044473
| 174
| 0.578276
| 4,103
| 27,895
| 3.863758
| 0.067024
| 0.169936
| 0.110389
| 0.081246
| 0.793099
| 0.737337
| 0.676276
| 0.613196
| 0.55018
| 0.466978
| 0
| 0.001835
| 0.120846
| 27,895
| 2,315
| 175
| 12.049676
| 0.644593
| 0
| 0
| 0.942981
| 0
| 0.000864
| 0.338484
| 0.00803
| 0
| 0
| 0
| 0
| 0.000432
| 0
| null | null | 0.001296
| 0.005184
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
f81ca2ce592e84428e81a66ce38e515a6ee5edcf
| 42
|
py
|
Python
|
firecloud/__about__.py
|
jnktsj/fiss
|
2cfce1f6dc0c43f62c51e8a9296946b9990a76fa
|
[
"BSD-3-Clause"
] | 20
|
2017-08-05T08:44:51.000Z
|
2022-03-24T15:33:48.000Z
|
firecloud/__about__.py
|
jnktsj/fiss
|
2cfce1f6dc0c43f62c51e8a9296946b9990a76fa
|
[
"BSD-3-Clause"
] | 117
|
2016-10-26T15:31:48.000Z
|
2022-02-16T23:06:33.000Z
|
firecloud/__about__.py
|
jnktsj/fiss
|
2cfce1f6dc0c43f62c51e8a9296946b9990a76fa
|
[
"BSD-3-Clause"
] | 21
|
2017-03-13T15:16:03.000Z
|
2022-02-25T19:14:36.000Z
|
# Package version
__version__ = "0.16.31"
| 14
| 23
| 0.714286
| 6
| 42
| 4.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138889
| 0.142857
| 42
| 2
| 24
| 21
| 0.583333
| 0.357143
| 0
| 0
| 0
| 0
| 0.28
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
f83bc822a6f47feb415380dd8f541756419c1e6c
| 265
|
py
|
Python
|
tests/conftest.py
|
sparkythehuman/sms-service--send-message
|
8f095ba181f1d42df3968fe34d5e20f30851e021
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
sparkythehuman/sms-service--send-message
|
8f095ba181f1d42df3968fe34d5e20f30851e021
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
sparkythehuman/sms-service--send-message
|
8f095ba181f1d42df3968fe34d5e20f30851e021
|
[
"MIT"
] | null | null | null |
import pytest
@pytest.fixture(autouse=True)
def set_up(monkeypatch):
monkeypatch.setenv('TABLE_NAME', 'test-table')
monkeypatch.setenv('TWILIO_ACCOUNT_SID', 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX')
monkeypatch.setenv('TWILIO_AUTH_TOKEN', 'my_auth_token')
| 33.125
| 82
| 0.784906
| 30
| 265
| 6.666667
| 0.666667
| 0.255
| 0.23
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090566
| 265
| 8
| 83
| 33.125
| 0.829876
| 0
| 0
| 0
| 0
| 0
| 0.383459
| 0.12782
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.166667
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
f873639a13e98ee3a4151d1be3542d91c969ac64
| 530
|
py
|
Python
|
djangobmf/contrib/team/views.py
|
dmatthes/django-bmf
|
3a97167de7841b13f1ddd23b33ae65e98dc49dfd
|
[
"BSD-3-Clause"
] | 1
|
2020-05-11T08:00:49.000Z
|
2020-05-11T08:00:49.000Z
|
djangobmf/contrib/team/views.py
|
dmatthes/django-bmf
|
3a97167de7841b13f1ddd23b33ae65e98dc49dfd
|
[
"BSD-3-Clause"
] | null | null | null |
djangobmf/contrib/team/views.py
|
dmatthes/django-bmf
|
3a97167de7841b13f1ddd23b33ae65e98dc49dfd
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/python
# ex:set fileencoding=utf-8:
from __future__ import unicode_literals
from djangobmf.views import ModuleCreateView
from djangobmf.views import ModuleUpdateView
from djangobmf.views import ModuleDetailView
from .forms import BMFTeamUpdateForm
from .forms import BMFTeamCreateForm
class TeamCreateView(ModuleCreateView):
form_class = BMFTeamCreateForm
class TeamUpdateView(ModuleUpdateView):
form_class = BMFTeamUpdateForm
class TeamDetailView(ModuleDetailView):
form_class = BMFTeamUpdateForm
| 22.083333
| 44
| 0.828302
| 54
| 530
| 7.981481
| 0.481481
| 0.090487
| 0.12529
| 0.167053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002146
| 0.120755
| 530
| 23
| 45
| 23.043478
| 0.922747
| 0.081132
| 0
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 3
|
f88f6e13c4185abcf8cceff79dbfda6d0f9a19ba
| 486
|
py
|
Python
|
wsgi/settings.py
|
zhemao/speakeasy
|
793bcca6d30fe31b1579bb8464f1eafacd6eb593
|
[
"BSD-2-Clause"
] | 1
|
2022-02-02T10:40:59.000Z
|
2022-02-02T10:40:59.000Z
|
wsgi/settings.py
|
zhemao/speakeasy
|
793bcca6d30fe31b1579bb8464f1eafacd6eb593
|
[
"BSD-2-Clause"
] | null | null | null |
wsgi/settings.py
|
zhemao/speakeasy
|
793bcca6d30fe31b1579bb8464f1eafacd6eb593
|
[
"BSD-2-Clause"
] | null | null | null |
import os
MONGO_HOST = os.getenv('OPENSHIFT_NOSQL_DB_HOST')
MONGO_PORT = os.getenv('OPENSHIFT_NOSQL_DB_PORT')
MONGO_USERNAME = os.getenv('OPENSHIFT_NOSQL_DB_USERNAME')
MONGO_PASSWORD = os.getenv('OPENSHIFT_NOSQL_DB_PASSWORD')
MONGO_DBNAME = 'speakeasy'
PRIV_KEY_FILE = os.getenv('OPENSHIFT_DATA_DIR') + '/server_private.pem'
PUB_KEY_FILE = os.getenv('OPENSHIFT_DATA_DIR') + '/server_public.pem'
PRIV_KEY = open(PRIV_KEY_FILE).read()
PUB_KEY = open(PUB_KEY_FILE).read()
DEBUG = True
| 30.375
| 71
| 0.790123
| 75
| 486
| 4.68
| 0.36
| 0.136752
| 0.290598
| 0.250712
| 0.48433
| 0.210826
| 0.210826
| 0.210826
| 0
| 0
| 0
| 0
| 0.082305
| 486
| 15
| 72
| 32.4
| 0.786996
| 0
| 0
| 0
| 0
| 0
| 0.374486
| 0.205761
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.090909
| 0.090909
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 3
|
f8afd1b0a1d62c5e20c07db83d59c2c494f17348
| 13,343
|
py
|
Python
|
source/rttov_test/profile-datasets-py/div83/077.py
|
bucricket/projectMAScorrection
|
89489026c8e247ec7c364e537798e766331fe569
|
[
"BSD-3-Clause"
] | null | null | null |
source/rttov_test/profile-datasets-py/div83/077.py
|
bucricket/projectMAScorrection
|
89489026c8e247ec7c364e537798e766331fe569
|
[
"BSD-3-Clause"
] | 1
|
2022-03-12T12:19:59.000Z
|
2022-03-12T12:19:59.000Z
|
source/rttov_test/profile-datasets-py/div83/077.py
|
bucricket/projectMAScorrection
|
89489026c8e247ec7c364e537798e766331fe569
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Profile ../profile-datasets-py/div83/077.py
file automaticaly created by prof_gen.py script
"""
self["ID"] = "../profile-datasets-py/div83/077.py"
self["Q"] = numpy.array([ 3.01408100e+00, 3.40341800e+00, 3.94918400e+00,
4.08209300e+00, 4.65722800e+00, 5.59385900e+00,
5.96882400e+00, 5.96578400e+00, 6.02361400e+00,
6.13266200e+00, 5.61561800e+00, 5.17541300e+00,
4.73120800e+00, 4.38244100e+00, 4.13858300e+00,
3.94732400e+00, 3.82339500e+00, 3.74146600e+00,
3.68389600e+00, 3.64322700e+00, 3.61384700e+00,
3.58783700e+00, 3.57544700e+00, 3.57424700e+00,
3.57814700e+00, 3.57652700e+00, 3.56295700e+00,
3.53513800e+00, 3.51090800e+00, 3.50409800e+00,
3.51977800e+00, 3.54417700e+00, 3.53987700e+00,
3.51452800e+00, 3.48830800e+00, 3.47651800e+00,
3.48119800e+00, 3.49274800e+00, 3.50137800e+00,
3.50850800e+00, 3.52815800e+00, 3.56910700e+00,
3.61097700e+00, 3.71830600e+00, 3.89014500e+00,
3.89370500e+00, 3.85655500e+00, 3.87925500e+00,
3.95365400e+00, 4.00917400e+00, 4.16308300e+00,
4.52899900e+00, 5.18923300e+00, 6.26899100e+00,
7.92153700e+00, 1.00846000e+01, 1.24507400e+01,
1.47046800e+01, 1.67259200e+01, 1.84705600e+01,
1.96999100e+01, 2.08678600e+01, 2.23955000e+01,
2.44190000e+01, 2.71340600e+01, 3.11191300e+01,
3.80605500e+01, 4.93422700e+01, 7.03837500e+01,
1.05079000e+02, 1.47056400e+02, 1.80304500e+02,
2.22368500e+02, 2.73803000e+02, 3.33293900e+02,
4.05331600e+02, 4.94623200e+02, 6.04438400e+02,
7.36045800e+02, 8.86931700e+02, 1.05317000e+03,
1.23561100e+03, 1.43888700e+03, 1.66709600e+03,
1.91848200e+03, 2.17581600e+03, 2.42905500e+03,
2.65031700e+03, 2.83038600e+03, 2.95328200e+03,
2.87015800e+03, 2.97041000e+03, 3.22605900e+03,
3.13244700e+03, 3.04276300e+03, 2.95681100e+03,
2.87439400e+03, 2.79532400e+03, 2.71943500e+03,
2.64656700e+03, 2.57657400e+03])
self["P"] = numpy.array([ 5.00000000e-03, 1.61000000e-02, 3.84000000e-02,
7.69000000e-02, 1.37000000e-01, 2.24400000e-01,
3.45400000e-01, 5.06400000e-01, 7.14000000e-01,
9.75300000e-01, 1.29720000e+00, 1.68720000e+00,
2.15260000e+00, 2.70090000e+00, 3.33980000e+00,
4.07700000e+00, 4.92040000e+00, 5.87760000e+00,
6.95670000e+00, 8.16550000e+00, 9.51190000e+00,
1.10038000e+01, 1.26492000e+01, 1.44559000e+01,
1.64318000e+01, 1.85847000e+01, 2.09224000e+01,
2.34526000e+01, 2.61829000e+01, 2.91210000e+01,
3.22744000e+01, 3.56505000e+01, 3.92566000e+01,
4.31001000e+01, 4.71882000e+01, 5.15278000e+01,
5.61260000e+01, 6.09895000e+01, 6.61253000e+01,
7.15398000e+01, 7.72396000e+01, 8.32310000e+01,
8.95204000e+01, 9.61138000e+01, 1.03017000e+02,
1.10237000e+02, 1.17778000e+02, 1.25646000e+02,
1.33846000e+02, 1.42385000e+02, 1.51266000e+02,
1.60496000e+02, 1.70078000e+02, 1.80018000e+02,
1.90320000e+02, 2.00989000e+02, 2.12028000e+02,
2.23442000e+02, 2.35234000e+02, 2.47408000e+02,
2.59969000e+02, 2.72919000e+02, 2.86262000e+02,
3.00000000e+02, 3.14137000e+02, 3.28675000e+02,
3.43618000e+02, 3.58966000e+02, 3.74724000e+02,
3.90893000e+02, 4.07474000e+02, 4.24470000e+02,
4.41882000e+02, 4.59712000e+02, 4.77961000e+02,
4.96630000e+02, 5.15720000e+02, 5.35232000e+02,
5.55167000e+02, 5.75525000e+02, 5.96306000e+02,
6.17511000e+02, 6.39140000e+02, 6.61192000e+02,
6.83667000e+02, 7.06565000e+02, 7.29886000e+02,
7.53628000e+02, 7.77790000e+02, 8.02371000e+02,
8.27371000e+02, 8.52788000e+02, 8.78620000e+02,
9.04866000e+02, 9.31524000e+02, 9.58591000e+02,
9.86067000e+02, 1.01395000e+03, 1.04223000e+03,
1.07092000e+03, 1.10000000e+03])
self["CO2"] = numpy.array([ 376.9289, 376.9267, 376.9235, 376.9185, 376.9102, 376.8979,
376.8878, 376.8858, 376.8987, 376.9157, 376.9379, 376.967 ,
377.0032, 377.0483, 377.0994, 377.1415, 377.1806, 377.2196,
377.2566, 377.2936, 377.3406, 377.4116, 377.4967, 377.5807,
377.6576, 377.7326, 377.7957, 377.8617, 377.9087, 377.9647,
378.0677, 378.1777, 378.3247, 378.4827, 378.5467, 378.5667,
378.6177, 378.7377, 378.8647, 379.1987, 379.5707, 379.8876,
380.1336, 380.3936, 380.7865, 381.1975, 381.5395, 381.8335,
382.1145, 382.2825, 382.4564, 382.5853, 382.705 , 382.8186,
382.922 , 383.0401, 383.2172, 383.4014, 383.6806, 383.9769,
384.2704, 384.569 , 384.8374, 385.0826, 385.3235, 385.554 ,
385.7733, 385.971 , 386.1468, 386.2794, 386.3942, 386.4873,
386.57 , 386.6411, 386.7101, 386.7782, 386.8426, 386.902 ,
386.948 , 386.9845, 387.009 , 387.0222, 387.0443, 387.0946,
387.2027, 387.3873, 387.5952, 387.7785, 388.0156, 388.3936,
388.8278, 389.5115, 390.0167, 390.5358, 390.9229, 391.1729,
391.2791, 391.3101, 391.3399, 391.3685, 391.3959])
self["CO"] = numpy.array([ 0.4988025 , 0.4837694 , 0.4549212 , 0.4091083 , 0.3466384 ,
0.2724125 , 0.2529705 , 0.3556049 , 0.3436299 , 0.3118041 ,
0.2360657 , 0.1332083 , 0.06529029, 0.04917818, 0.04630671,
0.04344553, 0.04531133, 0.04861692, 0.05090421, 0.05133911,
0.05167021, 0.04959452, 0.04651663, 0.04325405, 0.04006766,
0.03693337, 0.03511297, 0.03345558, 0.03285768, 0.03228319,
0.03236039, 0.03244319, 0.03296888, 0.03355668, 0.03477178,
0.03638897, 0.03844617, 0.04135936, 0.04467554, 0.05192792,
0.06128788, 0.07390404, 0.09132347, 0.1136636 , 0.1258555 ,
0.1400065 , 0.1455584 , 0.1438834 , 0.1412344 , 0.1340595 ,
0.1269835 , 0.1253564 , 0.1252244 , 0.1268982 , 0.131127 ,
0.1360076 , 0.1437272 , 0.1521708 , 0.1615463 , 0.1718538 ,
0.1819464 , 0.192305 , 0.2043344 , 0.2183397 , 0.2317967 ,
0.2427394 , 0.2529074 , 0.2590382 , 0.2647594 , 0.2679568 ,
0.2707162 , 0.2713961 , 0.2720315 , 0.2725024 , 0.2734588 ,
0.2752834 , 0.2772678 , 0.279404 , 0.2813058 , 0.2830447 ,
0.2835061 , 0.2839138 , 0.2842594 , 0.2847725 , 0.2852078 ,
0.2855673 , 0.2859427 , 0.2860628 , 0.2855026 , 0.2845641 ,
0.2836705 , 0.2825133 , 0.28085 , 0.2789235 , 0.2766895 ,
0.2739117 , 0.2711194 , 0.2683139 , 0.265496 , 0.262667 ,
0.2598288 ])
self["T"] = numpy.array([ 197.478, 204.431, 217.518, 232.024, 240.06 , 241.488,
237.615, 229.648, 222.059, 220.002, 221.016, 222.004,
224.079, 226.704, 229.151, 230.726, 232.026, 233.278,
234.389, 235.37 , 236.325, 237.27 , 238.285, 239.125,
239.562, 239.408, 239.074, 238.623, 237.788, 236.618,
235.366, 234.287, 233.649, 232.492, 231.082, 230.178,
230.011, 230.065, 229.721, 228.916, 227.9 , 226.942,
226.202, 225.266, 224.187, 223.613, 222.971, 222.094,
221.208, 220.74 , 220.537, 220.284, 219.887, 219.382,
218.843, 218.328, 217.832, 217.287, 216.618, 215.885,
215.461, 215.505, 215.981, 216.806, 217.881, 219.112,
220.38 , 221.707, 223.156, 224.773, 226.616, 228.678,
230.824, 232.99 , 235.092, 237.21 , 239.397, 241.632,
243.87 , 246.04 , 248.097, 250.061, 251.959, 253.803,
255.582, 257.221, 258.709, 259.968, 261.008, 261.803,
262.785, 263.963, 265.207, 265.207, 265.207, 265.207,
265.207, 265.207, 265.207, 265.207, 265.207])
self["N2O"] = numpy.array([ 0.00386999, 0.00306999, 0.00246999, 0.00239999, 0.00190999,
0.00132999, 0.00145999, 0.00159999, 0.00196999, 0.00296998,
0.00447997, 0.00696996, 0.01015995, 0.01528993, 0.02053991,
0.02663989, 0.03359987, 0.04270984, 0.05238981, 0.06860975,
0.0840797 , 0.1006696 , 0.1174596 , 0.1335495 , 0.1466895 ,
0.1590294 , 0.1708994 , 0.1853993 , 0.2003893 , 0.2148792 ,
0.2275592 , 0.2346592 , 0.2415291 , 0.2481891 , 0.2543991 ,
0.2593091 , 0.2640791 , 0.2687091 , 0.272769 , 0.276369 ,
0.279779 , 0.282829 , 0.285849 , 0.2887289 , 0.2917289 ,
0.2947289 , 0.2977089 , 0.3006588 , 0.3035488 , 0.3063688 ,
0.3090987 , 0.3116986 , 0.3141584 , 0.316448 , 0.3185375 ,
0.3203968 , 0.321996 , 0.3226453 , 0.3232446 , 0.323774 ,
0.3242236 , 0.3245932 , 0.3248727 , 0.3250421 , 0.3251012 ,
0.3250999 , 0.3250976 , 0.325094 , 0.3250871 , 0.3250758 ,
0.3250622 , 0.3250514 , 0.3250377 , 0.325021 , 0.3250016 ,
0.3249782 , 0.3249492 , 0.3249135 , 0.3248707 , 0.3248216 ,
0.3247676 , 0.3247083 , 0.3246422 , 0.324568 , 0.3244863 ,
0.3244026 , 0.3243203 , 0.3242484 , 0.3241898 , 0.3241499 ,
0.3241769 , 0.3241443 , 0.3240612 , 0.3240916 , 0.3241208 ,
0.3241487 , 0.3241755 , 0.3242012 , 0.3242259 , 0.3242496 ,
0.3242723 ])
self["O3"] = numpy.array([ 0.4650166 , 0.3722967 , 0.25801 , 0.3565255 , 0.5657804 ,
0.8310854 , 1.275442 , 1.941668 , 2.751043 , 3.509408 ,
4.226426 , 4.982314 , 5.571684 , 5.950054 , 6.172944 ,
6.354005 , 6.459525 , 6.548995 , 6.644776 , 6.735845 ,
6.790695 , 6.827586 , 6.854715 , 6.862505 , 6.844556 ,
6.799816 , 6.769176 , 6.719646 , 6.545257 , 6.195258 ,
5.70893 , 5.274831 , 4.976922 , 4.668544 , 4.294395 ,
3.893666 , 3.566958 , 3.348398 , 3.139029 , 2.84919 ,
2.451261 , 2.077143 , 1.888043 , 1.731914 , 1.409245 ,
1.303475 , 1.292365 , 1.148696 , 0.9682352 , 0.8581456 ,
0.728699 , 0.5758034 , 0.4443007 , 0.3535498 , 0.2938877 ,
0.2527305 , 0.2229512 , 0.1999561 , 0.178937 , 0.1568281 ,
0.1353713 , 0.1190565 , 0.1065156 , 0.09549277, 0.08532788,
0.07889094, 0.07722016, 0.07431793, 0.06841198, 0.05924887,
0.04912847, 0.04243675, 0.03862361, 0.03721341, 0.03721039,
0.03694232, 0.03630673, 0.03556769, 0.03509875, 0.03521114,
0.03578028, 0.03636931, 0.03676782, 0.03692234, 0.03683649,
0.03651109, 0.03619457, 0.03624089, 0.03710717, 0.03880576,
0.04014684, 0.03936333, 0.03682412, 0.03682758, 0.03683089,
0.03683407, 0.03683711, 0.03684003, 0.03684284, 0.03684553,
0.03684811])
self["CH4"] = numpy.array([ 0.3005231, 0.2351152, 0.1864963, 0.1572414, 0.1760812,
0.1975499, 0.2255547, 0.2531465, 0.2866593, 0.31921 ,
0.3721429, 0.4494437, 0.5428354, 0.6650551, 0.7814038,
0.8957425, 0.9909472, 1.054496 , 1.115956 , 1.182386 ,
1.245675 , 1.300155 , 1.349285 , 1.396355 , 1.424055 ,
1.448405 , 1.471845 , 1.501435 , 1.532305 , 1.537285 ,
1.542625 , 1.548345 , 1.554454 , 1.551955 , 1.549555 ,
1.547285 , 1.545195 , 1.543345 , 1.549515 , 1.556025 ,
1.562874 , 1.570084 , 1.577644 , 1.617574 , 1.657994 ,
1.700273 , 1.730303 , 1.751553 , 1.771163 , 1.779313 ,
1.787803 , 1.793322 , 1.798201 , 1.802289 , 1.805406 ,
1.808492 , 1.811087 , 1.813773 , 1.81631 , 1.818906 ,
1.821484 , 1.824092 , 1.826669 , 1.829225 , 1.83172 ,
1.834133 , 1.83637 , 1.838309 , 1.84005 , 1.841406 ,
1.842689 , 1.843857 , 1.84505 , 1.846294 , 1.847644 ,
1.84909 , 1.850584 , 1.85209 , 1.853525 , 1.854883 ,
1.856113 , 1.857182 , 1.858302 , 1.859615 , 1.861771 ,
1.864195 , 1.866575 , 1.868445 , 1.869743 , 1.871178 ,
1.873019 , 1.875702 , 1.877623 , 1.879713 , 1.881288 ,
1.882308 , 1.882753 , 1.882902 , 1.883045 , 1.883183 ,
1.883315 ])
self["CTP"] = 500.0
self["CFRACTION"] = 0.0
self["IDG"] = 0
self["ISH"] = 0
self["ELEVATION"] = 0.0
self["S2M"]["T"] = 265.207
self["S2M"]["Q"] = 2576.57411645
self["S2M"]["O"] = 0.0368481128494
self["S2M"]["P"] = 876.30151
self["S2M"]["U"] = 0.0
self["S2M"]["V"] = 0.0
self["S2M"]["WFETC"] = 100000.0
self["SKIN"]["SURFTYPE"] = 0
self["SKIN"]["WATERTYPE"] = 1
self["SKIN"]["T"] = 265.207
self["SKIN"]["SALINITY"] = 35.0
self["SKIN"]["FOAM_FRACTION"] = 0.0
self["SKIN"]["FASTEM"] = numpy.array([ 3. , 5. , 15. , 0.1, 0.3])
self["ZENANGLE"] = 0.0
self["AZANGLE"] = 0.0
self["SUNZENANGLE"] = 0.0
self["SUNAZANGLE"] = 0.0
self["LATITUDE"] = 60.824
self["GAS_UNITS"] = 2
self["BE"] = 0.0
self["COSBK"] = 0.0
self["DATE"] = numpy.array([2006, 12, 10])
self["TIME"] = numpy.array([0, 0, 0])
| 57.512931
| 92
| 0.566739
| 2,008
| 13,343
| 3.764442
| 0.462151
| 0.014684
| 0.008731
| 0.0127
| 0.014288
| 0.014288
| 0.007144
| 0.007144
| 0.007144
| 0.007144
| 0
| 0.700165
| 0.271378
| 13,343
| 231
| 93
| 57.761905
| 0.07735
| 0.00712
| 0
| 0
| 0
| 0
| 0.018818
| 0.002645
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
f8b309e5e28868df32235aef95ba627c1ca50e48
| 1,888
|
py
|
Python
|
tests/examples/c_decisions/tests_decisions.py
|
MSGP117/acc-cosc-1336-spring-2022-MSGP117
|
46fdfa5da8f8eb887d2c79fe205b8a0064d6903d
|
[
"MIT"
] | null | null | null |
tests/examples/c_decisions/tests_decisions.py
|
MSGP117/acc-cosc-1336-spring-2022-MSGP117
|
46fdfa5da8f8eb887d2c79fe205b8a0064d6903d
|
[
"MIT"
] | null | null | null |
tests/examples/c_decisions/tests_decisions.py
|
MSGP117/acc-cosc-1336-spring-2022-MSGP117
|
46fdfa5da8f8eb887d2c79fe205b8a0064d6903d
|
[
"MIT"
] | 1
|
2022-02-12T03:50:32.000Z
|
2022-02-12T03:50:32.000Z
|
import unittest
from src.examples.c_decisions.decisions import is_letter_consonant, logical_op_precedence, num_is_not_in_range_or, number_is_in_range_and, test_config
from src.examples.c_decisions.decisions import get_letter_grade
from src.examples.c_decisions.decisions import logical_op_precedence
from src.examples.c_decisions.decisions import number_is_not_in_range
class Test_Config(unittest.TestCase):
def test_configuration(self):
self.assertEqual(True, test_config())
def test_get_letter_grade(self):
self.assertEqual('A', get_letter_grade(90))
self.assertEqual('B', get_letter_grade(85))
self.assertEqual('C', get_letter_grade(75))
self.assertEqual('D', get_letter_grade(65))
self.assertEqual('F', get_letter_grade(55))
self.assertEqual('Invalid Number', get_letter_grade(-10))
def test_logical_op_precedence(self):
self.assertEqual(True, logical_op_precedence(True, False, True))
self.assertEqual(False, logical_op_precedence(False, False, False))
def test_number_is_in_range(self):
self.assertEqual(True, number_is_in_range_and(20, 100, 50))
self.assertEqual(False, number_is_in_range_and(20, 100, 0))
self.assertEqual(True, number_is_in_range_and(20, 100, 100))
self.assertEqual(False, number_is_in_range_and(20, 100, 101))
def test_number_is_not_in_range(self):
self.assertEqual(True, number_is_not_in_range(20, 100, 101))
self.assertEqual(True, number_is_not_in_range(20, 100, 50))
def test_num_is_not_in_range_or(self):
self.assertEqual(True, num_is_not_in_range_or(20, 100, 101))
self.assertEqual(False, num_is_not_in_range_or(20, 100, 50))
def test_is_letter_consonant(self):
self.assertEqual(False, is_letter_consonant('a'))
self.assertEqual(True, is_letter_consonant('z'))
| 46.04878
| 150
| 0.743114
| 281
| 1,888
| 4.6121
| 0.174377
| 0.219907
| 0.04321
| 0.074074
| 0.462963
| 0.396605
| 0.37037
| 0.246914
| 0.195988
| 0.195988
| 0
| 0.044514
| 0.155191
| 1,888
| 41
| 151
| 46.04878
| 0.768025
| 0
| 0
| 0
| 0
| 0
| 0.011117
| 0
| 0
| 0
| 0
| 0
| 0.59375
| 1
| 0.21875
| false
| 0
| 0.15625
| 0
| 0.40625
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
f8bf2336876845664f66233eebc0fc4562e2c6e0
| 296
|
py
|
Python
|
stock.py
|
lokesh-lraj/30-Day-LeetCoding-Challenge-april_2020
|
270e99be4a178587b2fc89113cdf3a931210e84f
|
[
"MIT"
] | 3
|
2020-04-18T09:03:21.000Z
|
2020-04-18T09:03:25.000Z
|
stock.py
|
lokesh-lraj/30-Day-LeetCoding-Challenge-april_2020
|
270e99be4a178587b2fc89113cdf3a931210e84f
|
[
"MIT"
] | null | null | null |
stock.py
|
lokesh-lraj/30-Day-LeetCoding-Challenge-april_2020
|
270e99be4a178587b2fc89113cdf3a931210e84f
|
[
"MIT"
] | null | null | null |
def maxProfit(self, prices: List[int]) -> int:
with_stock = -2147483647
without_stock = 0
for stock in prices :
with_stock = max(with_stock, without_stock - stock)
without_stock = max(without_stock, with_stock + stock)
return without_stock
| 37
| 66
| 0.628378
| 36
| 296
| 4.916667
| 0.416667
| 0.338983
| 0.19209
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052632
| 0.293919
| 296
| 7
| 67
| 42.285714
| 0.794258
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
3e11bd4f5fe50f533d78b84480d62520eb696807
| 151
|
py
|
Python
|
NhMedicalSite/panel/urls.py
|
Dogruyer/ecommerce
|
aa505b401e42882a96e6ef6375bd1a1ed95c5b85
|
[
"Apache-2.0"
] | null | null | null |
NhMedicalSite/panel/urls.py
|
Dogruyer/ecommerce
|
aa505b401e42882a96e6ef6375bd1a1ed95c5b85
|
[
"Apache-2.0"
] | null | null | null |
NhMedicalSite/panel/urls.py
|
Dogruyer/ecommerce
|
aa505b401e42882a96e6ef6375bd1a1ed95c5b85
|
[
"Apache-2.0"
] | 1
|
2018-11-01T11:10:58.000Z
|
2018-11-01T11:10:58.000Z
|
from django.conf.urls import url
from django.conf import settings
from panel.views import *
urlpatterns = [
url(r'^$', index, name='index'),
]
| 13.727273
| 36
| 0.688742
| 21
| 151
| 4.952381
| 0.619048
| 0.192308
| 0.269231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178808
| 151
| 10
| 37
| 15.1
| 0.83871
| 0
| 0
| 0
| 0
| 0
| 0.046358
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
3e13f1a614fdcd99556bcda63d31e15a470031fa
| 998
|
py
|
Python
|
entity/address_entity.py
|
rpinaa/python-flask-microservice
|
3e479bd1653f8e846ae0b84ffb4f20481dfc7c5d
|
[
"MIT"
] | 1
|
2019-09-23T20:00:52.000Z
|
2019-09-23T20:00:52.000Z
|
entity/address_entity.py
|
rpinaa/python-flask-microservice
|
3e479bd1653f8e846ae0b84ffb4f20481dfc7c5d
|
[
"MIT"
] | null | null | null |
entity/address_entity.py
|
rpinaa/python-flask-microservice
|
3e479bd1653f8e846ae0b84ffb4f20481dfc7c5d
|
[
"MIT"
] | null | null | null |
from db import db
class AddressEntity(db.Model):
__tablename__ = "address"
id = db.Column(db.Integer, primary_key=True)
int_number = db.Column(db.String(15), nullable=False)
ext_number = db.Column(db.String(15), nullable=False)
block = db.Column(db.String(15), nullable=False)
number = db.Column(db.String(15), nullable=False)
street = db.Column(db.String(75), nullable=False)
colony = db.Column(db.String(75), nullable=False)
municipality = db.Column(db.String(50), nullable=False)
state = db.Column(db.String(50), nullable=False)
country = db.Column(db.String(50), nullable=False)
created_at = db.Column(db.DateTime, nullable=True, server_default=db.func.current_timestamp())
updated_at = db.Column(db.DateTime, nullable=True, server_onupdate=db.func.current_timestamp())
deleted = db.Column(db.DateTime, nullable=False, default=False)
@classmethod
def save(cls) -> None:
db.session.add(cls)
db.session.commit()
| 39.92
| 99
| 0.700401
| 141
| 998
| 4.865248
| 0.340426
| 0.151604
| 0.189504
| 0.209913
| 0.581633
| 0.543732
| 0.543732
| 0.272595
| 0
| 0
| 0
| 0.021352
| 0.155311
| 998
| 24
| 100
| 41.583333
| 0.792408
| 0
| 0
| 0
| 0
| 0
| 0.007014
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0
| 0.05
| 0
| 0.85
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 3
|
3e46f25a0298cc777cd9c283c93eaadaceb537e7
| 324
|
py
|
Python
|
tests/test_client.py
|
yakhinvadim/enterprise-search-python
|
a2010e8773a6250cb81ea48f760088bb23466bb1
|
[
"Apache-2.0"
] | null | null | null |
tests/test_client.py
|
yakhinvadim/enterprise-search-python
|
a2010e8773a6250cb81ea48f760088bb23466bb1
|
[
"Apache-2.0"
] | null | null | null |
tests/test_client.py
|
yakhinvadim/enterprise-search-python
|
a2010e8773a6250cb81ea48f760088bb23466bb1
|
[
"Apache-2.0"
] | null | null | null |
from unittest import TestCase
from elastic_workplace_search.client import Client
class TestClient(TestCase):
dummy_authorization_token = 'authorization_token'
def setUp(self):
self.client = Client('authorization_token')
def test_constructor(self):
self.assertIsInstance(self.client, Client)
| 23.142857
| 53
| 0.756173
| 36
| 324
| 6.611111
| 0.527778
| 0.226891
| 0.176471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169753
| 324
| 13
| 54
| 24.923077
| 0.884758
| 0
| 0
| 0
| 0
| 0
| 0.117284
| 0
| 0
| 0
| 0
| 0
| 0.125
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 3
|
3e5961792d37ca4a7091e59c1c74180b0cb0ef47
| 337,498
|
py
|
Python
|
fitparse/profile.py
|
vlcvboyer/python-fitparse
|
bef76231a1c8dddfafc23070b43684e7d0c6e916
|
[
"MIT"
] | null | null | null |
fitparse/profile.py
|
vlcvboyer/python-fitparse
|
bef76231a1c8dddfafc23070b43684e7d0c6e916
|
[
"MIT"
] | 1
|
2019-01-03T08:56:11.000Z
|
2019-06-05T09:24:13.000Z
|
fitparse/profile.py
|
vlcvboyer/python-fitparse
|
bef76231a1c8dddfafc23070b43684e7d0c6e916
|
[
"MIT"
] | 1
|
2018-08-30T15:06:12.000Z
|
2018-08-30T15:06:12.000Z
|
################# BEGIN AUTOMATICALLY GENERATED FIT PROFILE ##################
########################### DO NOT EDIT THIS FILE ############################
####### EXPORTED PROFILE FROM SDK VERSION 20.33 AT 2017-05-17 22:36:12 #######
########## PARSED 118 TYPES (1699 VALUES), 76 MESSAGES (950 FIELDS) ##########
from fitparse.records import (
ComponentField,
Field,
FieldType,
MessageType,
ReferenceField,
SubField,
BASE_TYPES,
)
FIELD_TYPES = {
'activity': FieldType(
name='activity',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'manual',
1: 'auto_multi_sport',
},
),
'activity_class': FieldType(
name='activity_class',
base_type=BASE_TYPES[0x00], # enum
values={
100: 'level_max',
0x7F: 'level', # 0 to 100
0x80: 'athlete',
},
),
'activity_level': FieldType(
name='activity_level',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'low',
1: 'medium',
2: 'high',
},
),
'activity_subtype': FieldType(
name='activity_subtype',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'generic',
1: 'treadmill', # Run
2: 'street', # Run
3: 'trail', # Run
4: 'track', # Run
5: 'spin', # Cycling
6: 'indoor_cycling', # Cycling
7: 'road', # Cycling
8: 'mountain', # Cycling
9: 'downhill', # Cycling
10: 'recumbent', # Cycling
11: 'cyclocross', # Cycling
12: 'hand_cycling', # Cycling
13: 'track_cycling', # Cycling
14: 'indoor_rowing', # Fitness Equipment
15: 'elliptical', # Fitness Equipment
16: 'stair_climbing', # Fitness Equipment
17: 'lap_swimming', # Swimming
18: 'open_water', # Swimming
254: 'all',
},
),
'activity_type': FieldType(
name='activity_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'generic',
1: 'running',
2: 'cycling',
3: 'transition', # Mulitsport transition
4: 'fitness_equipment',
5: 'swimming',
6: 'walking',
8: 'sedentary',
254: 'all', # All is for goals only to include all sports.
},
),
'analog_watchface_layout': FieldType(
name='analog_watchface_layout',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'minimal',
1: 'traditional',
2: 'modern',
},
),
'ant_network': FieldType(
name='ant_network',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'public',
1: 'antplus',
2: 'antfs',
3: 'private',
},
),
'antplus_device_type': FieldType(
name='antplus_device_type',
base_type=BASE_TYPES[0x02], # uint8
values={
1: 'antfs',
11: 'bike_power',
12: 'environment_sensor_legacy',
15: 'multi_sport_speed_distance',
16: 'control',
17: 'fitness_equipment',
18: 'blood_pressure',
19: 'geocache_node',
20: 'light_electric_vehicle',
25: 'env_sensor',
26: 'racquet',
27: 'control_hub',
31: 'muscle_oxygen',
35: 'bike_light_main',
36: 'bike_light_shared',
38: 'exd',
40: 'bike_radar',
119: 'weight_scale',
120: 'heart_rate',
121: 'bike_speed_cadence',
122: 'bike_cadence',
123: 'bike_speed',
124: 'stride_speed_distance',
},
),
'attitude_stage': FieldType(
name='attitude_stage',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'failed',
1: 'aligning',
2: 'degraded',
3: 'valid',
},
),
'attitude_validity': FieldType(
name='attitude_validity',
base_type=BASE_TYPES[0x84], # uint16
values={
0x0001: 'track_angle_heading_valid',
0x0002: 'pitch_valid',
0x0004: 'roll_valid',
0x0008: 'lateral_body_accel_valid',
0x0010: 'normal_body_accel_valid',
0x0020: 'turn_rate_valid',
0x0040: 'hw_fail',
0x0080: 'mag_invalid',
0x0100: 'no_gps',
0x0200: 'gps_invalid',
0x0400: 'solution_coasting',
0x0800: 'true_track_angle',
0x1000: 'magnetic_heading',
},
),
'auto_activity_detect': FieldType(
name='auto_activity_detect',
base_type=BASE_TYPES[0x86], # uint32
values={
0x00000000: 'none',
0x00000001: 'running',
0x00000002: 'cycling',
0x00000004: 'swimming',
0x00000008: 'walking',
0x00000020: 'elliptical',
0x00000400: 'sedentary',
},
),
'auto_sync_frequency': FieldType(
name='auto_sync_frequency',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'never',
1: 'occasionally',
2: 'frequent',
3: 'once_a_day',
4: 'remote',
},
),
'autolap_trigger': FieldType(
name='autolap_trigger',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'time',
1: 'distance',
2: 'position_start',
3: 'position_lap',
4: 'position_waypoint',
5: 'position_marked',
6: 'off',
},
),
'autoscroll': FieldType(
name='autoscroll',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'none',
1: 'slow',
2: 'medium',
3: 'fast',
},
),
'backlight_mode': FieldType(
name='backlight_mode',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'off',
1: 'manual',
2: 'key_and_messages',
3: 'auto_brightness',
4: 'smart_notifications',
5: 'key_and_messages_night',
6: 'key_and_messages_and_smart_notifications',
},
),
'battery_status': FieldType(
name='battery_status',
base_type=BASE_TYPES[0x02], # uint8
values={
1: 'new',
2: 'good',
3: 'ok',
4: 'low',
5: 'critical',
6: 'charging',
7: 'unknown',
},
),
'bike_light_beam_angle_mode': FieldType(
name='bike_light_beam_angle_mode',
base_type=BASE_TYPES[0x02], # uint8
values={
0: 'manual',
1: 'auto',
},
),
'bike_light_network_config_type': FieldType(
name='bike_light_network_config_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'auto',
4: 'individual',
5: 'high_visibility',
6: 'trail',
},
),
'body_location': FieldType(
name='body_location',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'left_leg',
1: 'left_calf',
2: 'left_shin',
3: 'left_hamstring',
4: 'left_quad',
5: 'left_glute',
6: 'right_leg',
7: 'right_calf',
8: 'right_shin',
9: 'right_hamstring',
10: 'right_quad',
11: 'right_glute',
12: 'torso_back',
13: 'left_lower_back',
14: 'left_upper_back',
15: 'right_lower_back',
16: 'right_upper_back',
17: 'torso_front',
18: 'left_abdomen',
19: 'left_chest',
20: 'right_abdomen',
21: 'right_chest',
22: 'left_arm',
23: 'left_shoulder',
24: 'left_bicep',
25: 'left_tricep',
26: 'left_brachioradialis', # Left anterior forearm
27: 'left_forearm_extensors', # Left posterior forearm
28: 'right_arm',
29: 'right_shoulder',
30: 'right_bicep',
31: 'right_tricep',
32: 'right_brachioradialis', # Right anterior forearm
33: 'right_forearm_extensors', # Right posterior forearm
34: 'neck',
35: 'throat',
36: 'waist_mid_back',
37: 'waist_front',
38: 'waist_left',
39: 'waist_right',
},
),
'bool': FieldType(
name='bool',
base_type=BASE_TYPES[0x00], # enum
),
'bp_status': FieldType(
name='bp_status',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'no_error',
1: 'error_incomplete_data',
2: 'error_no_measurement',
3: 'error_data_out_of_range',
4: 'error_irregular_heart_rate',
},
),
'camera_event_type': FieldType(
name='camera_event_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'video_start', # Start of video recording
1: 'video_split', # Mark of video file split (end of one file, beginning of the other)
2: 'video_end', # End of video recording
3: 'photo_taken', # Still photo taken
4: 'video_second_stream_start',
5: 'video_second_stream_split',
6: 'video_second_stream_end',
7: 'video_split_start', # Mark of video file split start
8: 'video_second_stream_split_start',
11: 'video_pause', # Mark when a video recording has been paused
12: 'video_second_stream_pause',
13: 'video_resume', # Mark when a video recording has been resumed
14: 'video_second_stream_resume',
},
),
'camera_orientation_type': FieldType(
name='camera_orientation_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'camera_orientation_0',
1: 'camera_orientation_90',
2: 'camera_orientation_180',
3: 'camera_orientation_270',
},
),
'checksum': FieldType(
name='checksum',
base_type=BASE_TYPES[0x02], # uint8
values={
0: 'clear', # Allows clear of checksum for flash memory where can only write 1 to 0 without erasing sector.
1: 'ok', # Set to mark checksum as valid if computes to invalid values 0 or 0xFF. Checksum can also be set to ok to save encoding computation time.
},
),
'comm_timeout_type': FieldType(
name='comm_timeout_type',
base_type=BASE_TYPES[0x84], # uint16
values={
0: 'wildcard_pairing_timeout', # Timeout pairing to any device
1: 'pairing_timeout', # Timeout pairing to previously paired device
2: 'connection_lost', # Temporary loss of communications
3: 'connection_timeout', # Connection closed due to extended bad communications
},
),
'connectivity_capabilities': FieldType(
name='connectivity_capabilities',
base_type=BASE_TYPES[0x8C], # uint32z
values={
0x00000001: 'bluetooth',
0x00000002: 'bluetooth_le',
0x00000004: 'ant',
0x00000008: 'activity_upload',
0x00000010: 'course_download',
0x00000020: 'workout_download',
0x00000040: 'live_track',
0x00000080: 'weather_conditions',
0x00000100: 'weather_alerts',
0x00000200: 'gps_ephemeris_download',
0x00000400: 'explicit_archive',
0x00000800: 'setup_incomplete',
0x00001000: 'continue_sync_after_software_update',
0x00002000: 'connect_iq_app_download',
0x00004000: 'golf_course_download',
0x00008000: 'device_initiates_sync', # Indicates device is in control of initiating all syncs
0x00010000: 'connect_iq_watch_app_download',
0x00020000: 'connect_iq_widget_download',
0x00040000: 'connect_iq_watch_face_download',
0x00080000: 'connect_iq_data_field_download',
0x00100000: 'connect_iq_app_managment', # Device supports delete and reorder of apps via GCM
0x00200000: 'swing_sensor',
0x00400000: 'swing_sensor_remote',
0x00800000: 'incident_detection', # Device supports incident detection
0x01000000: 'audio_prompts',
0x02000000: 'wifi_verification', # Device supports reporting wifi verification via GCM
0x04000000: 'true_up', # Device supports True Up
0x08000000: 'find_my_watch', # Device supports Find My Watch
0x10000000: 'remote_manual_sync',
0x20000000: 'live_track_auto_start', # Device supports LiveTrack auto start
0x40000000: 'live_track_messaging', # Device supports LiveTrack Messaging
0x80000000: 'instant_input', # Device supports instant input feature
},
),
'course_capabilities': FieldType(
name='course_capabilities',
base_type=BASE_TYPES[0x8C], # uint32z
values={
0x00000001: 'processed',
0x00000002: 'valid',
0x00000004: 'time',
0x00000008: 'distance',
0x00000010: 'position',
0x00000020: 'heart_rate',
0x00000040: 'power',
0x00000080: 'cadence',
0x00000100: 'training',
0x00000200: 'navigation',
0x00000400: 'bikeway',
},
),
'course_point': FieldType(
name='course_point',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'generic',
1: 'summit',
2: 'valley',
3: 'water',
4: 'food',
5: 'danger',
6: 'left',
7: 'right',
8: 'straight',
9: 'first_aid',
10: 'fourth_category',
11: 'third_category',
12: 'second_category',
13: 'first_category',
14: 'hors_category',
15: 'sprint',
16: 'left_fork',
17: 'right_fork',
18: 'middle_fork',
19: 'slight_left',
20: 'sharp_left',
21: 'slight_right',
22: 'sharp_right',
23: 'u_turn',
24: 'segment_start',
25: 'segment_end',
},
),
'date_mode': FieldType(
name='date_mode',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'day_month',
1: 'month_day',
},
),
'date_time': FieldType( # seconds since UTC 00:00 Dec 31 1989
name='date_time',
base_type=BASE_TYPES[0x86], # uint32
),
'day_of_week': FieldType(
name='day_of_week',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'sunday',
1: 'monday',
2: 'tuesday',
3: 'wednesday',
4: 'thursday',
5: 'friday',
6: 'saturday',
},
),
'device_index': FieldType(
name='device_index',
base_type=BASE_TYPES[0x02], # uint8
values={
0: 'creator', # Creator of the file is always device index 0.
},
),
'digital_watchface_layout': FieldType(
name='digital_watchface_layout',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'traditional',
1: 'modern',
2: 'bold',
},
),
'display_heart': FieldType(
name='display_heart',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'bpm',
1: 'max',
2: 'reserve',
},
),
'display_measure': FieldType(
name='display_measure',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'metric',
1: 'statute',
2: 'nautical',
},
),
'display_orientation': FieldType(
name='display_orientation',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'auto', # automatic if the device supports it
1: 'portrait',
2: 'landscape',
3: 'portrait_flipped', # portrait mode but rotated 180 degrees
4: 'landscape_flipped', # landscape mode but rotated 180 degrees
},
),
'display_position': FieldType(
name='display_position',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'degree', # dd.dddddd
1: 'degree_minute', # dddmm.mmm
2: 'degree_minute_second', # dddmmss
3: 'austrian_grid', # Austrian Grid (BMN)
4: 'british_grid', # British National Grid
5: 'dutch_grid', # Dutch grid system
6: 'hungarian_grid', # Hungarian grid system
7: 'finnish_grid', # Finnish grid system Zone3 KKJ27
8: 'german_grid', # Gausss Krueger (German)
9: 'icelandic_grid', # Icelandic Grid
10: 'indonesian_equatorial', # Indonesian Equatorial LCO
11: 'indonesian_irian', # Indonesian Irian LCO
12: 'indonesian_southern', # Indonesian Southern LCO
13: 'india_zone_0', # India zone 0
14: 'india_zone_IA', # India zone IA
15: 'india_zone_IB', # India zone IB
16: 'india_zone_IIA', # India zone IIA
17: 'india_zone_IIB', # India zone IIB
18: 'india_zone_IIIA', # India zone IIIA
19: 'india_zone_IIIB', # India zone IIIB
20: 'india_zone_IVA', # India zone IVA
21: 'india_zone_IVB', # India zone IVB
22: 'irish_transverse', # Irish Transverse Mercator
23: 'irish_grid', # Irish Grid
24: 'loran', # Loran TD
25: 'maidenhead_grid', # Maidenhead grid system
26: 'mgrs_grid', # MGRS grid system
27: 'new_zealand_grid', # New Zealand grid system
28: 'new_zealand_transverse', # New Zealand Transverse Mercator
29: 'qatar_grid', # Qatar National Grid
30: 'modified_swedish_grid', # Modified RT-90 (Sweden)
31: 'swedish_grid', # RT-90 (Sweden)
32: 'south_african_grid', # South African Grid
33: 'swiss_grid', # Swiss CH-1903 grid
34: 'taiwan_grid', # Taiwan Grid
35: 'united_states_grid', # United States National Grid
36: 'utm_ups_grid', # UTM/UPS grid system
37: 'west_malayan', # West Malayan RSO
38: 'borneo_rso', # Borneo RSO
39: 'estonian_grid', # Estonian grid system
40: 'latvian_grid', # Latvian Transverse Mercator
41: 'swedish_ref_99_grid', # Reference Grid 99 TM (Swedish)
},
),
'display_power': FieldType(
name='display_power',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'watts',
1: 'percent_ftp',
},
),
'event': FieldType(
name='event',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'timer', # Group 0. Start / stop_all
3: 'workout', # start / stop
4: 'workout_step', # Start at beginning of workout. Stop at end of each step.
5: 'power_down', # stop_all group 0
6: 'power_up', # stop_all group 0
7: 'off_course', # start / stop group 0
8: 'session', # Stop at end of each session.
9: 'lap', # Stop at end of each lap.
10: 'course_point', # marker
11: 'battery', # marker
12: 'virtual_partner_pace', # Group 1. Start at beginning of activity if VP enabled, when VP pace is changed during activity or VP enabled mid activity. stop_disable when VP disabled.
13: 'hr_high_alert', # Group 0. Start / stop when in alert condition.
14: 'hr_low_alert', # Group 0. Start / stop when in alert condition.
15: 'speed_high_alert', # Group 0. Start / stop when in alert condition.
16: 'speed_low_alert', # Group 0. Start / stop when in alert condition.
17: 'cad_high_alert', # Group 0. Start / stop when in alert condition.
18: 'cad_low_alert', # Group 0. Start / stop when in alert condition.
19: 'power_high_alert', # Group 0. Start / stop when in alert condition.
20: 'power_low_alert', # Group 0. Start / stop when in alert condition.
21: 'recovery_hr', # marker
22: 'battery_low', # marker
23: 'time_duration_alert', # Group 1. Start if enabled mid activity (not required at start of activity). Stop when duration is reached. stop_disable if disabled.
24: 'distance_duration_alert', # Group 1. Start if enabled mid activity (not required at start of activity). Stop when duration is reached. stop_disable if disabled.
25: 'calorie_duration_alert', # Group 1. Start if enabled mid activity (not required at start of activity). Stop when duration is reached. stop_disable if disabled.
26: 'activity', # Group 1.. Stop at end of activity.
27: 'fitness_equipment', # marker
28: 'length', # Stop at end of each length.
32: 'user_marker', # marker
33: 'sport_point', # marker
36: 'calibration', # start/stop/marker
42: 'front_gear_change', # marker
43: 'rear_gear_change', # marker
44: 'rider_position_change', # marker
45: 'elev_high_alert', # Group 0. Start / stop when in alert condition.
46: 'elev_low_alert', # Group 0. Start / stop when in alert condition.
47: 'comm_timeout', # marker
},
),
'event_type': FieldType(
name='event_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'start',
1: 'stop',
2: 'consecutive_depreciated',
3: 'marker',
4: 'stop_all',
5: 'begin_depreciated',
6: 'end_depreciated',
7: 'end_all_depreciated',
8: 'stop_disable',
9: 'stop_disable_all',
},
),
'exd_data_units': FieldType(
name='exd_data_units',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'no_units',
1: 'laps',
2: 'miles_per_hour',
3: 'kilometers_per_hour',
4: 'feet_per_hour',
5: 'meters_per_hour',
6: 'degrees_celsius',
7: 'degrees_farenheit',
8: 'zone',
9: 'gear',
10: 'rpm',
11: 'bpm',
12: 'degrees',
13: 'millimeters',
14: 'meters',
15: 'kilometers',
16: 'feet',
17: 'yards',
18: 'kilofeet',
19: 'miles',
20: 'time',
21: 'enum_turn_type',
22: 'percent',
23: 'watts',
24: 'watts_per_kilogram',
25: 'enum_battery_status',
26: 'enum_bike_light_beam_angle_mode',
27: 'enum_bike_light_battery_status',
28: 'enum_bike_light_network_config_type',
29: 'lights',
30: 'seconds',
31: 'minutes',
32: 'hours',
33: 'calories',
34: 'kilojoules',
35: 'milliseconds',
36: 'second_per_mile',
37: 'second_per_kilometer',
38: 'centimeter',
39: 'enum_course_point',
40: 'bradians',
41: 'enum_sport',
42: 'inches_hg',
43: 'mm_hg',
44: 'mbars',
45: 'hecto_pascals',
46: 'feet_per_min',
47: 'meters_per_min',
48: 'meters_per_sec',
49: 'eight_cardinal',
},
),
'exd_descriptors': FieldType(
name='exd_descriptors',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'bike_light_battery_status',
1: 'beam_angle_status',
2: 'batery_level',
3: 'light_network_mode',
4: 'number_lights_connected',
5: 'cadence',
6: 'distance',
7: 'estimated_time_of_arrival',
8: 'heading',
9: 'time',
10: 'battery_level',
11: 'trainer_resistance',
12: 'trainer_target_power',
13: 'time_seated',
14: 'time_standing',
15: 'elevation',
16: 'grade',
17: 'ascent',
18: 'descent',
19: 'vertical_speed',
20: 'di2_battery_level',
21: 'front_gear',
22: 'rear_gear',
23: 'gear_ratio',
24: 'heart_rate',
25: 'heart_rate_zone',
26: 'time_in_heart_rate_zone',
27: 'heart_rate_reserve',
28: 'calories',
29: 'gps_accuracy',
30: 'gps_signal_strength',
31: 'temperature',
32: 'time_of_day',
33: 'balance',
34: 'pedal_smoothness',
35: 'power',
36: 'functional_threshold_power',
37: 'intensity_factor',
38: 'work',
39: 'power_ratio',
40: 'normalized_power',
41: 'training_stress_Score',
42: 'time_on_zone',
43: 'speed',
44: 'laps',
45: 'reps',
46: 'workout_step',
47: 'course_distance',
48: 'navigation_distance',
49: 'course_estimated_time_of_arrival',
50: 'navigation_estimated_time_of_arrival',
51: 'course_time',
52: 'navigation_time',
53: 'course_heading',
54: 'navigation_heading',
55: 'power_zone',
56: 'torque_effectiveness',
57: 'timer_time',
58: 'power_weight_ratio',
59: 'left_platform_center_offset',
60: 'right_platform_center_offset',
61: 'left_power_phase_start_angle',
62: 'right_power_phase_start_angle',
63: 'left_power_phase_finish_angle',
64: 'right_power_phase_finish_angle',
65: 'gears', # Combined gear information
66: 'pace',
67: 'training_effect',
68: 'vertical_oscillation',
69: 'vertical_ratio',
70: 'ground_contact_time',
71: 'left_ground_contact_time_balance',
72: 'right_ground_contact_time_balance',
73: 'stride_length',
74: 'running_cadence',
75: 'performance_condition',
76: 'course_type',
77: 'time_in_power_zone',
78: 'navigation_turn',
79: 'course_location',
80: 'navigation_location',
81: 'compass',
82: 'gear_combo',
83: 'muscle_oxygen',
84: 'icon',
85: 'compass_heading',
86: 'gps_heading',
87: 'gps_elevation',
88: 'anaerobic_training_effect',
89: 'course',
90: 'off_course',
91: 'glide_ratio',
92: 'vertical_distance',
93: 'vmg',
94: 'ambient_pressure',
95: 'pressure',
},
),
'exd_display_type': FieldType(
name='exd_display_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'numerical',
1: 'simple',
2: 'graph',
3: 'bar',
4: 'circle_graph',
5: 'virtual_partner',
6: 'balance',
7: 'string_list',
8: 'string',
9: 'simple_dynamic_icon',
10: 'gauge',
},
),
'exd_layout': FieldType(
name='exd_layout',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'full_screen',
1: 'half_vertical',
2: 'half_horizontal',
3: 'half_vertical_right_split',
4: 'half_horizontal_bottom_split',
5: 'full_quarter_split',
6: 'half_vertical_left_split',
7: 'half_horizontal_top_split',
},
),
'exd_qualifiers': FieldType(
name='exd_qualifiers',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'no_qualifier',
1: 'instantaneous',
2: 'average',
3: 'lap',
4: 'maximum',
5: 'maximum_average',
6: 'maximum_lap',
7: 'last_lap',
8: 'average_lap',
9: 'to_destination',
10: 'to_go',
11: 'to_next',
12: 'next_course_point',
13: 'total',
14: 'three_second_average',
15: 'ten_second_average',
16: 'thirty_second_average',
17: 'percent_maximum',
18: 'percent_maximum_average',
19: 'lap_percent_maximum',
20: 'elapsed',
21: 'sunrise',
22: 'sunset',
23: 'compared_to_virtual_partner',
24: 'maximum_24h',
25: 'minimum_24h',
26: 'minimum',
27: 'first',
28: 'second',
29: 'third',
30: 'shifter',
31: 'last_sport',
32: 'moving',
33: 'stopped',
242: 'zone_9',
243: 'zone_8',
244: 'zone_7',
245: 'zone_6',
246: 'zone_5',
247: 'zone_4',
248: 'zone_3',
249: 'zone_2',
250: 'zone_1',
},
),
'file': FieldType(
name='file',
base_type=BASE_TYPES[0x00], # enum
values={
1: 'device', # Read only, single file. Must be in root directory.
2: 'settings', # Read/write, single file. Directory=Settings
3: 'sport', # Read/write, multiple files, file number = sport type. Directory=Sports
4: 'activity', # Read/erase, multiple files. Directory=Activities
5: 'workout', # Read/write/erase, multiple files. Directory=Workouts
6: 'course', # Read/write/erase, multiple files. Directory=Courses
7: 'schedules', # Read/write, single file. Directory=Schedules
9: 'weight', # Read only, single file. Circular buffer. All message definitions at start of file. Directory=Weight
10: 'totals', # Read only, single file. Directory=Totals
11: 'goals', # Read/write, single file. Directory=Goals
14: 'blood_pressure', # Read only. Directory=Blood Pressure
15: 'monitoring_a', # Read only. Directory=Monitoring. File number=sub type.
20: 'activity_summary', # Read/erase, multiple files. Directory=Activities
28: 'monitoring_daily',
32: 'monitoring_b', # Read only. Directory=Monitoring. File number=identifier
34: 'segment', # Read/write/erase. Multiple Files. Directory=Segments
35: 'segment_list', # Read/write/erase. Single File. Directory=Segments
40: 'exd_configuration', # Read/write/erase. Single File. Directory=Settings
0xF7: 'mfg_range_min', # 0xF7 - 0xFE reserved for manufacturer specific file types
0xFE: 'mfg_range_max', # 0xF7 - 0xFE reserved for manufacturer specific file types
},
),
'file_flags': FieldType(
name='file_flags',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x02: 'read',
0x04: 'write',
0x08: 'erase',
},
),
'fit_base_type': FieldType(
name='fit_base_type',
base_type=BASE_TYPES[0x02], # uint8
values={
0: 'enum',
1: 'sint8',
2: 'uint8',
7: 'string',
10: 'uint8z',
13: 'byte',
131: 'sint16',
132: 'uint16',
133: 'sint32',
134: 'uint32',
136: 'float32',
137: 'float64',
139: 'uint16z',
140: 'uint32z',
142: 'sint64',
143: 'uint64',
144: 'uint64z',
},
),
'fit_base_unit': FieldType(
name='fit_base_unit',
base_type=BASE_TYPES[0x84], # uint16
values={
0: 'other',
1: 'kilogram',
2: 'pound',
},
),
'fitness_equipment_state': FieldType( # fitness equipment event data
name='fitness_equipment_state',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'ready',
1: 'in_use',
2: 'paused',
3: 'unknown', # lost connection to fitness equipment
},
),
'garmin_product': FieldType(
name='garmin_product',
base_type=BASE_TYPES[0x84], # uint16
values={
1: 'hrm1',
2: 'axh01', # AXH01 HRM chipset
3: 'axb01',
4: 'axb02',
5: 'hrm2ss',
6: 'dsi_alf02',
7: 'hrm3ss',
8: 'hrm_run_single_byte_product_id', # hrm_run model for HRM ANT+ messaging
9: 'bsm', # BSM model for ANT+ messaging
10: 'bcm', # BCM model for ANT+ messaging
11: 'axs01', # AXS01 HRM Bike Chipset model for ANT+ messaging
12: 'hrm_tri_single_byte_product_id', # hrm_tri model for HRM ANT+ messaging
14: 'fr225_single_byte_product_id', # fr225 model for HRM ANT+ messaging
473: 'fr301_china',
474: 'fr301_japan',
475: 'fr301_korea',
494: 'fr301_taiwan',
717: 'fr405', # Forerunner 405
782: 'fr50', # Forerunner 50
987: 'fr405_japan',
988: 'fr60', # Forerunner 60
1011: 'dsi_alf01',
1018: 'fr310xt', # Forerunner 310
1036: 'edge500',
1124: 'fr110', # Forerunner 110
1169: 'edge800',
1199: 'edge500_taiwan',
1213: 'edge500_japan',
1253: 'chirp',
1274: 'fr110_japan',
1325: 'edge200',
1328: 'fr910xt',
1333: 'edge800_taiwan',
1334: 'edge800_japan',
1341: 'alf04',
1345: 'fr610',
1360: 'fr210_japan',
1380: 'vector_ss',
1381: 'vector_cp',
1386: 'edge800_china',
1387: 'edge500_china',
1410: 'fr610_japan',
1422: 'edge500_korea',
1436: 'fr70',
1446: 'fr310xt_4t',
1461: 'amx',
1482: 'fr10',
1497: 'edge800_korea',
1499: 'swim',
1537: 'fr910xt_china',
1551: 'fenix',
1555: 'edge200_taiwan',
1561: 'edge510',
1567: 'edge810',
1570: 'tempe',
1600: 'fr910xt_japan',
1623: 'fr620',
1632: 'fr220',
1664: 'fr910xt_korea',
1688: 'fr10_japan',
1721: 'edge810_japan',
1735: 'virb_elite',
1736: 'edge_touring', # Also Edge Touring Plus
1742: 'edge510_japan',
1743: 'hrm_tri',
1752: 'hrm_run',
1765: 'fr920xt',
1821: 'edge510_asia',
1822: 'edge810_china',
1823: 'edge810_taiwan',
1836: 'edge1000',
1837: 'vivo_fit',
1853: 'virb_remote',
1885: 'vivo_ki',
1903: 'fr15',
1907: 'vivo_active',
1918: 'edge510_korea',
1928: 'fr620_japan',
1929: 'fr620_china',
1930: 'fr220_japan',
1931: 'fr220_china',
1936: 'approach_s6',
1956: 'vivo_smart',
1967: 'fenix2',
1988: 'epix',
2050: 'fenix3',
2052: 'edge1000_taiwan',
2053: 'edge1000_japan',
2061: 'fr15_japan',
2067: 'edge520',
2070: 'edge1000_china',
2072: 'fr620_russia',
2073: 'fr220_russia',
2079: 'vector_s',
2100: 'edge1000_korea',
2130: 'fr920xt_taiwan',
2131: 'fr920xt_china',
2132: 'fr920xt_japan',
2134: 'virbx',
2135: 'vivo_smart_apac',
2140: 'etrex_touch',
2147: 'edge25',
2148: 'fr25',
2150: 'vivo_fit2',
2153: 'fr225',
2156: 'fr630',
2157: 'fr230',
2160: 'vivo_active_apac',
2161: 'vector_2',
2162: 'vector_2s',
2172: 'virbxe',
2173: 'fr620_taiwan',
2174: 'fr220_taiwan',
2175: 'truswing',
2188: 'fenix3_china',
2189: 'fenix3_twn',
2192: 'varia_headlight',
2193: 'varia_taillight_old',
2204: 'edge_explore_1000',
2219: 'fr225_asia',
2225: 'varia_radar_taillight',
2226: 'varia_radar_display',
2238: 'edge20',
2262: 'd2_bravo',
2266: 'approach_s20',
2276: 'varia_remote',
2327: 'hrm4_run',
2337: 'vivo_active_hr',
2347: 'vivo_smart_gps_hr',
2348: 'vivo_smart_hr',
2368: 'vivo_move',
2398: 'varia_vision',
2406: 'vivo_fit3',
2413: 'fenix3_hr',
2417: 'virb_ultra_30',
2429: 'index_smart_scale',
2431: 'fr235',
2432: 'fenix3_chronos',
2441: 'oregon7xx',
2444: 'rino7xx',
2496: 'nautix',
2530: 'edge_820',
2531: 'edge_explore_820',
2544: 'fenix5s',
2547: 'd2_bravo_titanium',
2593: 'running_dynamics_pod',
2604: 'fenix5x',
2606: 'vivo_fit_jr',
2691: 'fr935',
2697: 'fenix5',
10007: 'sdm4', # SDM4 footpod
10014: 'edge_remote',
20119: 'training_center',
65531: 'connectiq_simulator',
65532: 'android_antplus_plugin',
65534: 'connect', # Garmin Connect website
},
),
'gender': FieldType(
name='gender',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'female',
1: 'male',
},
),
'goal': FieldType(
name='goal',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'time',
1: 'distance',
2: 'calories',
3: 'frequency',
4: 'steps',
5: 'ascent',
6: 'active_minutes',
},
),
'goal_recurrence': FieldType(
name='goal_recurrence',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'off',
1: 'daily',
2: 'weekly',
3: 'monthly',
4: 'yearly',
5: 'custom',
},
),
'goal_source': FieldType(
name='goal_source',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'auto', # Device generated
1: 'community', # Social network sourced goal
2: 'user', # Manually generated
},
),
'hr_type': FieldType(
name='hr_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'normal',
1: 'irregular',
},
),
'hr_zone_calc': FieldType(
name='hr_zone_calc',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'custom',
1: 'percent_max_hr',
2: 'percent_hrr',
},
),
'intensity': FieldType(
name='intensity',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'active',
1: 'rest',
2: 'warmup',
3: 'cooldown',
},
),
'language': FieldType(
name='language',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'english',
1: 'french',
2: 'italian',
3: 'german',
4: 'spanish',
5: 'croatian',
6: 'czech',
7: 'danish',
8: 'dutch',
9: 'finnish',
10: 'greek',
11: 'hungarian',
12: 'norwegian',
13: 'polish',
14: 'portuguese',
15: 'slovakian',
16: 'slovenian',
17: 'swedish',
18: 'russian',
19: 'turkish',
20: 'latvian',
21: 'ukrainian',
22: 'arabic',
23: 'farsi',
24: 'bulgarian',
25: 'romanian',
26: 'chinese',
27: 'japanese',
28: 'korean',
29: 'taiwanese',
30: 'thai',
31: 'hebrew',
32: 'brazilian_portuguese',
33: 'indonesian',
34: 'malaysian',
35: 'vietnamese',
36: 'burmese',
37: 'mongolian',
254: 'custom',
},
),
'language_bits_0': FieldType( # Bit field corresponding to language enum type (1 << language).
name='language_bits_0',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'english',
0x02: 'french',
0x04: 'italian',
0x08: 'german',
0x10: 'spanish',
0x20: 'croatian',
0x40: 'czech',
0x80: 'danish',
},
),
'language_bits_1': FieldType(
name='language_bits_1',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'dutch',
0x02: 'finnish',
0x04: 'greek',
0x08: 'hungarian',
0x10: 'norwegian',
0x20: 'polish',
0x40: 'portuguese',
0x80: 'slovakian',
},
),
'language_bits_2': FieldType(
name='language_bits_2',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'slovenian',
0x02: 'swedish',
0x04: 'russian',
0x08: 'turkish',
0x10: 'latvian',
0x20: 'ukrainian',
0x40: 'arabic',
0x80: 'farsi',
},
),
'language_bits_3': FieldType(
name='language_bits_3',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'bulgarian',
0x02: 'romanian',
0x04: 'chinese',
0x08: 'japanese',
0x10: 'korean',
0x20: 'taiwanese',
0x40: 'thai',
0x80: 'hebrew',
},
),
'language_bits_4': FieldType(
name='language_bits_4',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'brazilian_portuguese',
0x02: 'indonesian',
0x04: 'malaysian',
0x08: 'vietnamese',
0x10: 'burmese',
0x20: 'mongolian',
},
),
'lap_trigger': FieldType(
name='lap_trigger',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'manual',
1: 'time',
2: 'distance',
3: 'position_start',
4: 'position_lap',
5: 'position_waypoint',
6: 'position_marked',
7: 'session_end',
8: 'fitness_equipment',
},
),
'left_right_balance': FieldType(
name='left_right_balance',
base_type=BASE_TYPES[0x02], # uint8
values={
0x7F: 'mask', # % contribution
0x80: 'right', # data corresponds to right if set, otherwise unknown
},
),
'left_right_balance_100': FieldType(
name='left_right_balance_100',
base_type=BASE_TYPES[0x84], # uint16
values={
0x3FFF: 'mask', # % contribution scaled by 100
0x8000: 'right', # data corresponds to right if set, otherwise unknown
},
),
'length_type': FieldType(
name='length_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'idle', # Rest period. Length with no strokes
1: 'active', # Length with strokes.
},
),
'local_date_time': FieldType( # seconds since 00:00 Dec 31 1989 in local time zone
name='local_date_time',
base_type=BASE_TYPES[0x86], # uint32
values={
0x10000000: 'min', # if date_time is < 0x10000000 then it is system time (seconds from device power on)
},
),
'localtime_into_day': FieldType( # number of seconds into the day since local 00:00:00
name='localtime_into_day',
base_type=BASE_TYPES[0x86], # uint32
),
'manufacturer': FieldType(
name='manufacturer',
base_type=BASE_TYPES[0x84], # uint16
values={
1: 'garmin',
2: 'garmin_fr405_antfs', # Do not use. Used by FR405 for ANTFS man id.
3: 'zephyr',
4: 'dayton',
5: 'idt',
6: 'srm',
7: 'quarq',
8: 'ibike',
9: 'saris',
10: 'spark_hk',
11: 'tanita',
12: 'echowell',
13: 'dynastream_oem',
14: 'nautilus',
15: 'dynastream',
16: 'timex',
17: 'metrigear',
18: 'xelic',
19: 'beurer',
20: 'cardiosport',
21: 'a_and_d',
22: 'hmm',
23: 'suunto',
24: 'thita_elektronik',
25: 'gpulse',
26: 'clean_mobile',
27: 'pedal_brain',
28: 'peaksware',
29: 'saxonar',
30: 'lemond_fitness',
31: 'dexcom',
32: 'wahoo_fitness',
33: 'octane_fitness',
34: 'archinoetics',
35: 'the_hurt_box',
36: 'citizen_systems',
37: 'magellan',
38: 'osynce',
39: 'holux',
40: 'concept2',
42: 'one_giant_leap',
43: 'ace_sensor',
44: 'brim_brothers',
45: 'xplova',
46: 'perception_digital',
47: 'bf1systems',
48: 'pioneer',
49: 'spantec',
50: 'metalogics',
51: '4iiiis',
52: 'seiko_epson',
53: 'seiko_epson_oem',
54: 'ifor_powell',
55: 'maxwell_guider',
56: 'star_trac',
57: 'breakaway',
58: 'alatech_technology_ltd',
59: 'mio_technology_europe',
60: 'rotor',
61: 'geonaute',
62: 'id_bike',
63: 'specialized',
64: 'wtek',
65: 'physical_enterprises',
66: 'north_pole_engineering',
67: 'bkool',
68: 'cateye',
69: 'stages_cycling',
70: 'sigmasport',
71: 'tomtom',
72: 'peripedal',
73: 'wattbike',
76: 'moxy',
77: 'ciclosport',
78: 'powerbahn',
79: 'acorn_projects_aps',
80: 'lifebeam',
81: 'bontrager',
82: 'wellgo',
83: 'scosche',
84: 'magura',
85: 'woodway',
86: 'elite',
87: 'nielsen_kellerman',
88: 'dk_city',
89: 'tacx',
90: 'direction_technology',
91: 'magtonic',
92: '1partcarbon',
93: 'inside_ride_technologies',
94: 'sound_of_motion',
95: 'stryd',
96: 'icg', # Indoorcycling Group
97: 'MiPulse',
98: 'bsx_athletics',
99: 'look',
100: 'campagnolo_srl',
101: 'body_bike_smart',
102: 'praxisworks',
103: 'limits_technology', # Limits Technology Ltd.
104: 'topaction_technology', # TopAction Technology Inc.
105: 'cosinuss',
106: 'fitcare',
107: 'magene',
108: 'giant_manufacturing_co',
109: 'tigrasport', # Tigrasport
110: 'salutron',
111: 'technogym',
112: 'bryton_sensors',
255: 'development',
257: 'healthandlife',
258: 'lezyne',
259: 'scribe_labs',
260: 'zwift',
261: 'watteam',
262: 'recon',
263: 'favero_electronics',
264: 'dynovelo',
265: 'strava',
266: 'precor', # Amer Sports
267: 'bryton',
268: 'sram',
269: 'navman', # MiTAC Global Corporation (Mio Technology)
270: 'cobi', # COBI GmbH
271: 'spivi',
272: 'mio_magellan',
273: 'evesports',
274: 'sensitivus_gauge',
275: 'podoon',
276: 'life_time_fitness',
277: 'falco_e_motors', # Falco eMotors Inc.
5759: 'actigraphcorp',
},
),
'mesg_count': FieldType(
name='mesg_count',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'num_per_file',
1: 'max_per_file',
2: 'max_per_file_type',
},
),
'mesg_num': FieldType(
name='mesg_num',
base_type=BASE_TYPES[0x84], # uint16
values={
0: 'file_id',
1: 'capabilities',
2: 'device_settings',
3: 'user_profile',
4: 'hrm_profile',
5: 'sdm_profile',
6: 'bike_profile',
7: 'zones_target',
8: 'hr_zone',
9: 'power_zone',
10: 'met_zone',
12: 'sport',
15: 'goal',
18: 'session',
19: 'lap',
20: 'record',
21: 'event',
23: 'device_info',
26: 'workout',
27: 'workout_step',
28: 'schedule',
30: 'weight_scale',
31: 'course',
32: 'course_point',
33: 'totals',
34: 'activity',
35: 'software',
37: 'file_capabilities',
38: 'mesg_capabilities',
39: 'field_capabilities',
49: 'file_creator',
51: 'blood_pressure',
53: 'speed_zone',
55: 'monitoring',
72: 'training_file',
78: 'hrv',
80: 'ant_rx',
81: 'ant_tx',
82: 'ant_channel_id',
101: 'length',
103: 'monitoring_info',
105: 'pad',
106: 'slave_device',
127: 'connectivity',
128: 'weather_conditions',
129: 'weather_alert',
131: 'cadence_zone',
132: 'hr',
142: 'segment_lap',
145: 'memo_glob',
148: 'segment_id',
149: 'segment_leaderboard_entry',
150: 'segment_point',
151: 'segment_file',
158: 'workout_session',
159: 'watchface_settings',
160: 'gps_metadata',
161: 'camera_event',
162: 'timestamp_correlation',
164: 'gyroscope_data',
165: 'accelerometer_data',
167: 'three_d_sensor_calibration',
169: 'video_frame',
174: 'obdii_data',
177: 'nmea_sentence',
178: 'aviation_attitude',
184: 'video',
185: 'video_title',
186: 'video_description',
187: 'video_clip',
188: 'ohr_settings',
200: 'exd_screen_configuration',
201: 'exd_data_field_configuration',
202: 'exd_data_concept_configuration',
206: 'field_description',
207: 'developer_data_id',
208: 'magnetometer_data',
},
),
'message_index': FieldType(
name='message_index',
base_type=BASE_TYPES[0x84], # uint16
values={
0x0FFF: 'mask', # index
0x7000: 'reserved', # reserved (default 0)
0x8000: 'selected', # message is selected if set
},
),
'power_phase_type': FieldType(
name='power_phase_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'power_phase_start_angle',
1: 'power_phase_end_angle',
2: 'power_phase_arc_length',
3: 'power_phase_center',
},
),
'pwr_zone_calc': FieldType(
name='pwr_zone_calc',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'custom',
1: 'percent_ftp',
},
),
'rider_position_type': FieldType(
name='rider_position_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'seated',
1: 'standing',
2: 'transition_to_seated',
3: 'transition_to_standing',
},
),
'schedule': FieldType(
name='schedule',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'workout',
1: 'course',
},
),
'segment_delete_status': FieldType(
name='segment_delete_status',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'do_not_delete',
1: 'delete_one',
2: 'delete_all',
},
),
'segment_lap_status': FieldType(
name='segment_lap_status',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'end',
1: 'fail',
},
),
'segment_leaderboard_type': FieldType(
name='segment_leaderboard_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'overall',
1: 'personal_best',
2: 'connections',
3: 'group',
4: 'challenger',
5: 'kom',
6: 'qom',
7: 'pr',
8: 'goal',
9: 'rival',
10: 'club_leader',
},
),
'segment_selection_type': FieldType(
name='segment_selection_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'starred',
1: 'suggested',
},
),
'sensor_type': FieldType(
name='sensor_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'accelerometer',
1: 'gyroscope',
2: 'compass', # Magnetometer
},
),
'session_trigger': FieldType(
name='session_trigger',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'activity_end',
1: 'manual', # User changed sport.
2: 'auto_multi_sport', # Auto multi-sport feature is enabled and user pressed lap button to advance session.
3: 'fitness_equipment', # Auto sport change caused by user linking to fitness equipment.
},
),
'side': FieldType(
name='side',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'right',
1: 'left',
},
),
'source_type': FieldType(
name='source_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'ant', # External device connected with ANT
1: 'antplus', # External device connected with ANT+
2: 'bluetooth', # External device connected with BT
3: 'bluetooth_low_energy', # External device connected with BLE
4: 'wifi', # External device connected with Wifi
5: 'local', # Onboard device
},
),
'sport': FieldType(
name='sport',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'generic',
1: 'running',
2: 'cycling',
3: 'transition', # Mulitsport transition
4: 'fitness_equipment',
5: 'swimming',
6: 'basketball',
7: 'soccer',
8: 'tennis',
9: 'american_football',
10: 'training',
11: 'walking',
12: 'cross_country_skiing',
13: 'alpine_skiing',
14: 'snowboarding',
15: 'rowing',
16: 'mountaineering',
17: 'hiking',
18: 'multisport',
19: 'paddling',
20: 'flying',
21: 'e_biking',
22: 'motorcycling',
23: 'boating',
24: 'driving',
25: 'golf',
26: 'hang_gliding',
27: 'horseback_riding',
28: 'hunting',
29: 'fishing',
30: 'inline_skating',
31: 'rock_climbing',
32: 'sailing',
33: 'ice_skating',
34: 'sky_diving',
35: 'snowshoeing',
36: 'snowmobiling',
37: 'stand_up_paddleboarding',
38: 'surfing',
39: 'wakeboarding',
40: 'water_skiing',
41: 'kayaking',
42: 'rafting',
43: 'windsurfing',
44: 'kitesurfing',
45: 'tactical',
46: 'jumpmaster',
47: 'boxing',
48: 'floor_climbing',
254: 'all', # All is for goals only to include all sports.
},
),
'sport_bits_0': FieldType( # Bit field corresponding to sport enum type (1 << sport).
name='sport_bits_0',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'generic',
0x02: 'running',
0x04: 'cycling',
0x08: 'transition', # Mulitsport transition
0x10: 'fitness_equipment',
0x20: 'swimming',
0x40: 'basketball',
0x80: 'soccer',
},
),
'sport_bits_1': FieldType( # Bit field corresponding to sport enum type (1 << (sport-8)).
name='sport_bits_1',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'tennis',
0x02: 'american_football',
0x04: 'training',
0x08: 'walking',
0x10: 'cross_country_skiing',
0x20: 'alpine_skiing',
0x40: 'snowboarding',
0x80: 'rowing',
},
),
'sport_bits_2': FieldType( # Bit field corresponding to sport enum type (1 << (sport-16)).
name='sport_bits_2',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'mountaineering',
0x02: 'hiking',
0x04: 'multisport',
0x08: 'paddling',
0x10: 'flying',
0x20: 'e_biking',
0x40: 'motorcycling',
0x80: 'boating',
},
),
'sport_bits_3': FieldType( # Bit field corresponding to sport enum type (1 << (sport-24)).
name='sport_bits_3',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'driving',
0x02: 'golf',
0x04: 'hang_gliding',
0x08: 'horseback_riding',
0x10: 'hunting',
0x20: 'fishing',
0x40: 'inline_skating',
0x80: 'rock_climbing',
},
),
'sport_bits_4': FieldType( # Bit field corresponding to sport enum type (1 << (sport-32)).
name='sport_bits_4',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'sailing',
0x02: 'ice_skating',
0x04: 'sky_diving',
0x08: 'snowshoeing',
0x10: 'snowmobiling',
0x20: 'stand_up_paddleboarding',
0x40: 'surfing',
0x80: 'wakeboarding',
},
),
'sport_bits_5': FieldType( # Bit field corresponding to sport enum type (1 << (sport-40)).
name='sport_bits_5',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'water_skiing',
0x02: 'kayaking',
0x04: 'rafting',
0x08: 'windsurfing',
0x10: 'kitesurfing',
0x20: 'tactical',
0x40: 'jumpmaster',
0x80: 'boxing',
},
),
'sport_bits_6': FieldType( # Bit field corresponding to sport enum type (1 << (sport-48)).
name='sport_bits_6',
base_type=BASE_TYPES[0x0A], # uint8z
values={
0x01: 'floor_climbing',
},
),
'sport_event': FieldType(
name='sport_event',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'uncategorized',
1: 'geocaching',
2: 'fitness',
3: 'recreation',
4: 'race',
5: 'special_event',
6: 'training',
7: 'transportation',
8: 'touring',
},
),
'stroke_type': FieldType(
name='stroke_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'no_event',
1: 'other', # stroke was detected but cannot be identified
2: 'serve',
3: 'forehand',
4: 'backhand',
5: 'smash',
},
),
'sub_sport': FieldType(
name='sub_sport',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'generic',
1: 'treadmill', # Run/Fitness Equipment
2: 'street', # Run
3: 'trail', # Run
4: 'track', # Run
5: 'spin', # Cycling
6: 'indoor_cycling', # Cycling/Fitness Equipment
7: 'road', # Cycling
8: 'mountain', # Cycling
9: 'downhill', # Cycling
10: 'recumbent', # Cycling
11: 'cyclocross', # Cycling
12: 'hand_cycling', # Cycling
13: 'track_cycling', # Cycling
14: 'indoor_rowing', # Fitness Equipment
15: 'elliptical', # Fitness Equipment
16: 'stair_climbing', # Fitness Equipment
17: 'lap_swimming', # Swimming
18: 'open_water', # Swimming
19: 'flexibility_training', # Training
20: 'strength_training', # Training
21: 'warm_up', # Tennis
22: 'match', # Tennis
23: 'exercise', # Tennis
24: 'challenge', # Tennis
25: 'indoor_skiing', # Fitness Equipment
26: 'cardio_training', # Training
27: 'indoor_walking', # Walking/Fitness Equipment
28: 'e_bike_fitness', # E-Biking
29: 'bmx', # Cycling
30: 'casual_walking', # Walking
31: 'speed_walking', # Walking
32: 'bike_to_run_transition', # Transition
33: 'run_to_bike_transition', # Transition
34: 'swim_to_bike_transition', # Transition
35: 'atv', # Motorcycling
36: 'motocross', # Motorcycling
37: 'backcountry', # Alpine Skiing/Snowboarding
38: 'resort', # Alpine Skiing/Snowboarding
39: 'rc_drone', # Flying
40: 'wingsuit', # Flying
41: 'whitewater', # Kayaking/Rafting
42: 'skate_skiing', # Cross Country Skiing
43: 'yoga', # Training
44: 'pilates', # Training
45: 'indoor_running', # Run
46: 'gravel_cycling', # Cycling
47: 'e_bike_mountain', # Cycling
48: 'commuting', # Cycling
49: 'mixed_surface', # Cycling
50: 'navigate',
51: 'track_me',
52: 'map',
254: 'all',
},
),
'supported_exd_screen_layouts': FieldType(
name='supported_exd_screen_layouts',
base_type=BASE_TYPES[0x8C], # uint32z
values={
0x00000001: 'full_screen',
0x00000002: 'half_vertical',
0x00000004: 'half_horizontal',
0x00000008: 'half_vertical_right_split',
0x00000010: 'half_horizontal_bottom_split',
0x00000020: 'full_quarter_split',
0x00000040: 'half_vertical_left_split',
0x00000080: 'half_horizontal_top_split',
},
),
'swim_stroke': FieldType(
name='swim_stroke',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'freestyle',
1: 'backstroke',
2: 'breaststroke',
3: 'butterfly',
4: 'drill',
5: 'mixed',
6: 'im', # IM is a mixed interval containing the same number of lengths for each of: Butterfly, Backstroke, Breaststroke, Freestyle, swam in that order.
},
),
'switch': FieldType(
name='switch',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'off',
1: 'on',
2: 'auto',
},
),
'time_into_day': FieldType( # number of seconds into the day since 00:00:00 UTC
name='time_into_day',
base_type=BASE_TYPES[0x86], # uint32
),
'time_mode': FieldType(
name='time_mode',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'hour12',
1: 'hour24', # Does not use a leading zero and has a colon
2: 'military', # Uses a leading zero and does not have a colon
3: 'hour_12_with_seconds',
4: 'hour_24_with_seconds',
5: 'utc',
},
),
'time_zone': FieldType(
name='time_zone',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'almaty',
1: 'bangkok',
2: 'bombay',
3: 'brasilia',
4: 'cairo',
5: 'cape_verde_is',
6: 'darwin',
7: 'eniwetok',
8: 'fiji',
9: 'hong_kong',
10: 'islamabad',
11: 'kabul',
12: 'magadan',
13: 'mid_atlantic',
14: 'moscow',
15: 'muscat',
16: 'newfoundland',
17: 'samoa',
18: 'sydney',
19: 'tehran',
20: 'tokyo',
21: 'us_alaska',
22: 'us_atlantic',
23: 'us_central',
24: 'us_eastern',
25: 'us_hawaii',
26: 'us_mountain',
27: 'us_pacific',
28: 'other',
29: 'auckland',
30: 'kathmandu',
31: 'europe_western_wet',
32: 'europe_central_cet',
33: 'europe_eastern_eet',
34: 'jakarta',
35: 'perth',
36: 'adelaide',
37: 'brisbane',
38: 'tasmania',
39: 'iceland',
40: 'amsterdam',
41: 'athens',
42: 'barcelona',
43: 'berlin',
44: 'brussels',
45: 'budapest',
46: 'copenhagen',
47: 'dublin',
48: 'helsinki',
49: 'lisbon',
50: 'london',
51: 'madrid',
52: 'munich',
53: 'oslo',
54: 'paris',
55: 'prague',
56: 'reykjavik',
57: 'rome',
58: 'stockholm',
59: 'vienna',
60: 'warsaw',
61: 'zurich',
62: 'quebec',
63: 'ontario',
64: 'manitoba',
65: 'saskatchewan',
66: 'alberta',
67: 'british_columbia',
68: 'boise',
69: 'boston',
70: 'chicago',
71: 'dallas',
72: 'denver',
73: 'kansas_city',
74: 'las_vegas',
75: 'los_angeles',
76: 'miami',
77: 'minneapolis',
78: 'new_york',
79: 'new_orleans',
80: 'phoenix',
81: 'santa_fe',
82: 'seattle',
83: 'washington_dc',
84: 'us_arizona',
85: 'chita',
86: 'ekaterinburg',
87: 'irkutsk',
88: 'kaliningrad',
89: 'krasnoyarsk',
90: 'novosibirsk',
91: 'petropavlovsk_kamchatskiy',
92: 'samara',
93: 'vladivostok',
94: 'mexico_central',
95: 'mexico_mountain',
96: 'mexico_pacific',
97: 'cape_town',
98: 'winkhoek',
99: 'lagos',
100: 'riyahd',
101: 'venezuela',
102: 'australia_lh',
103: 'santiago',
253: 'manual',
254: 'automatic',
},
),
'timer_trigger': FieldType( # timer event data
name='timer_trigger',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'manual',
1: 'auto',
2: 'fitness_equipment',
},
),
'turn_type': FieldType(
name='turn_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'arriving_idx',
1: 'arriving_left_idx',
2: 'arriving_right_idx',
3: 'arriving_via_idx',
4: 'arriving_via_left_idx',
5: 'arriving_via_right_idx',
6: 'bear_keep_left_idx',
7: 'bear_keep_right_idx',
8: 'continue_idx',
9: 'exit_left_idx',
10: 'exit_right_idx',
11: 'ferry_idx',
12: 'roundabout_45_idx',
13: 'roundabout_90_idx',
14: 'roundabout_135_idx',
15: 'roundabout_180_idx',
16: 'roundabout_225_idx',
17: 'roundabout_270_idx',
18: 'roundabout_315_idx',
19: 'roundabout_360_idx',
20: 'roundabout_neg_45_idx',
21: 'roundabout_neg_90_idx',
22: 'roundabout_neg_135_idx',
23: 'roundabout_neg_180_idx',
24: 'roundabout_neg_225_idx',
25: 'roundabout_neg_270_idx',
26: 'roundabout_neg_315_idx',
27: 'roundabout_neg_360_idx',
28: 'roundabout_generic_idx',
29: 'roundabout_neg_generic_idx',
30: 'sharp_turn_left_idx',
31: 'sharp_turn_right_idx',
32: 'turn_left_idx',
33: 'turn_right_idx',
34: 'uturn_left_idx',
35: 'uturn_right_idx',
36: 'icon_inv_idx',
37: 'icon_idx_cnt',
},
),
'user_local_id': FieldType(
name='user_local_id',
base_type=BASE_TYPES[0x84], # uint16
values={
0x0000: 'local_min',
0x000F: 'local_max',
0x0010: 'stationary_min',
0x00FF: 'stationary_max',
0x0100: 'portable_min',
0xFFFE: 'portable_max',
},
),
'watchface_mode': FieldType(
name='watchface_mode',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'digital',
1: 'analog',
2: 'connect_iq',
3: 'disabled',
},
),
'weather_report': FieldType(
name='weather_report',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'current',
1: 'forecast', # Deprecated use hourly_forecast instead
1: 'hourly_forecast',
2: 'daily_forecast',
},
),
'weather_severe_type': FieldType(
name='weather_severe_type',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'unspecified',
1: 'tornado',
2: 'tsunami',
3: 'hurricane',
4: 'extreme_wind',
5: 'typhoon',
6: 'inland_hurricane',
7: 'hurricane_force_wind',
8: 'waterspout',
9: 'severe_thunderstorm',
10: 'wreckhouse_winds',
11: 'les_suetes_wind',
12: 'avalanche',
13: 'flash_flood',
14: 'tropical_storm',
15: 'inland_tropical_storm',
16: 'blizzard',
17: 'ice_storm',
18: 'freezing_rain',
19: 'debris_flow',
20: 'flash_freeze',
21: 'dust_storm',
22: 'high_wind',
23: 'winter_storm',
24: 'heavy_freezing_spray',
25: 'extreme_cold',
26: 'wind_chill',
27: 'cold_wave',
28: 'heavy_snow_alert',
29: 'lake_effect_blowing_snow',
30: 'snow_squall',
31: 'lake_effect_snow',
32: 'winter_weather',
33: 'sleet',
34: 'snowfall',
35: 'snow_and_blowing_snow',
36: 'blowing_snow',
37: 'snow_alert',
38: 'arctic_outflow',
39: 'freezing_drizzle',
40: 'storm',
41: 'storm_surge',
42: 'rainfall',
43: 'areal_flood',
44: 'coastal_flood',
45: 'lakeshore_flood',
46: 'excessive_heat',
47: 'heat',
48: 'weather',
49: 'high_heat_and_humidity',
50: 'humidex_and_health',
51: 'humidex',
52: 'gale',
53: 'freezing_spray',
54: 'special_marine',
55: 'squall',
56: 'strong_wind',
57: 'lake_wind',
58: 'marine_weather',
59: 'wind',
60: 'small_craft_hazardous_seas',
61: 'hazardous_seas',
62: 'small_craft',
63: 'small_craft_winds',
64: 'small_craft_rough_bar',
65: 'high_water_level',
66: 'ashfall',
67: 'freezing_fog',
68: 'dense_fog',
69: 'dense_smoke',
70: 'blowing_dust',
71: 'hard_freeze',
72: 'freeze',
73: 'frost',
74: 'fire_weather',
75: 'flood',
76: 'rip_tide',
77: 'high_surf',
78: 'smog',
79: 'air_quality',
80: 'brisk_wind',
81: 'air_stagnation',
82: 'low_water',
83: 'hydrological',
84: 'special_weather',
},
),
'weather_severity': FieldType(
name='weather_severity',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'unknown',
1: 'warning',
2: 'watch',
3: 'advisory',
4: 'statement',
},
),
'weather_status': FieldType(
name='weather_status',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'clear',
1: 'partly_cloudy',
2: 'mostly_cloudy',
3: 'rain',
4: 'snow',
5: 'windy',
6: 'thunderstorms',
7: 'wintry_mix',
8: 'fog',
11: 'hazy',
12: 'hail',
13: 'scattered_showers',
14: 'scattered_thunderstorms',
15: 'unknown_precipitation',
16: 'light_rain',
17: 'heavy_rain',
18: 'light_snow',
19: 'heavy_snow',
20: 'light_rain_snow',
21: 'heavy_rain_snow',
22: 'cloudy',
},
),
'weight': FieldType(
name='weight',
base_type=BASE_TYPES[0x84], # uint16
values={
0xFFFE: 'calculating',
},
),
'wkt_step_duration': FieldType(
name='wkt_step_duration',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'time',
1: 'distance',
2: 'hr_less_than',
3: 'hr_greater_than',
4: 'calories',
5: 'open',
6: 'repeat_until_steps_cmplt',
7: 'repeat_until_time',
8: 'repeat_until_distance',
9: 'repeat_until_calories',
10: 'repeat_until_hr_less_than',
11: 'repeat_until_hr_greater_than',
12: 'repeat_until_power_less_than',
13: 'repeat_until_power_greater_than',
14: 'power_less_than',
15: 'power_greater_than',
16: 'training_peaks_tss',
17: 'repeat_until_power_last_lap_less_than',
18: 'repeat_until_max_power_last_lap_less_than',
19: 'power_3s_less_than',
20: 'power_10s_less_than',
21: 'power_30s_less_than',
22: 'power_3s_greater_than',
23: 'power_10s_greater_than',
24: 'power_30s_greater_than',
25: 'power_lap_less_than',
26: 'power_lap_greater_than',
27: 'repeat_until_training_peaks_tss',
28: 'repetition_time',
},
),
'wkt_step_target': FieldType(
name='wkt_step_target',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'speed',
1: 'heart_rate',
2: 'open',
3: 'cadence',
4: 'power',
5: 'grade',
6: 'resistance',
7: 'power_3s',
8: 'power_10s',
9: 'power_30s',
10: 'power_lap',
11: 'swim_stroke',
12: 'speed_lap',
13: 'heart_rate_lap',
},
),
'workout_capabilities': FieldType(
name='workout_capabilities',
base_type=BASE_TYPES[0x8C], # uint32z
values={
0x00000001: 'interval',
0x00000002: 'custom',
0x00000004: 'fitness_equipment',
0x00000008: 'firstbeat',
0x00000010: 'new_leaf',
0x00000020: 'tcx', # For backwards compatibility. Watch should add missing id fields then clear flag.
0x00000080: 'speed', # Speed source required for workout step.
0x00000100: 'heart_rate', # Heart rate source required for workout step.
0x00000200: 'distance', # Distance source required for workout step.
0x00000400: 'cadence', # Cadence source required for workout step.
0x00000800: 'power', # Power source required for workout step.
0x00001000: 'grade', # Grade source required for workout step.
0x00002000: 'resistance', # Resistance source required for workout step.
0x00004000: 'protected',
},
),
'workout_equipment': FieldType(
name='workout_equipment',
base_type=BASE_TYPES[0x00], # enum
values={
0: 'none',
1: 'swim_fins',
2: 'swim_kickboard',
3: 'swim_paddles',
4: 'swim_pull_buoy',
5: 'swim_snorkel',
},
),
'workout_hr': FieldType( # 0 - 100 indicates% of max hr; >100 indicates bpm (255 max) plus 100
name='workout_hr',
base_type=BASE_TYPES[0x86], # uint32
values={
100: 'bpm_offset',
},
),
'workout_power': FieldType( # 0 - 1000 indicates % of functional threshold power; >1000 indicates watts plus 1000.
name='workout_power',
base_type=BASE_TYPES[0x86], # uint32
values={
1000: 'watts_offset',
},
),
}
FIELD_TYPE_TIMESTAMP = Field(name='timestamp', type=FIELD_TYPES['date_time'], def_num=253, units='s')
MESSAGE_TYPES = {
############################ Common Messages #############################
0: MessageType( # Must be first message in file.
name='file_id',
mesg_num=0,
fields={
0: Field(
name='type',
type=FIELD_TYPES['file'],
def_num=0,
),
1: Field(
name='manufacturer',
type=FIELD_TYPES['manufacturer'],
def_num=1,
),
2: Field(
name='product',
type=BASE_TYPES[0x84], # uint16
def_num=2,
subfields=(
SubField(
name='garmin_product',
def_num=2,
type=FIELD_TYPES['garmin_product'],
ref_fields=(
ReferenceField(
name='manufacturer',
def_num=1,
value='garmin',
raw_value=1,
),
ReferenceField(
name='manufacturer',
def_num=1,
value='dynastream',
raw_value=15,
),
ReferenceField(
name='manufacturer',
def_num=1,
value='dynastream_oem',
raw_value=13,
),
),
),
),
),
3: Field(
name='serial_number',
type=BASE_TYPES[0x8C], # uint32z
def_num=3,
),
4: Field( # Only set for files that are can be created/erased.
name='time_created',
type=FIELD_TYPES['date_time'],
def_num=4,
),
5: Field( # Only set for files that are not created/erased.
name='number',
type=BASE_TYPES[0x84], # uint16
def_num=5,
),
8: Field( # Optional free form string to indicate the devices name or model
name='product_name',
type=BASE_TYPES[0x07], # string
def_num=8,
),
},
),
#################################### ####################################
1: MessageType(
name='capabilities',
mesg_num=1,
fields={
0: Field( # Use language_bits_x types where x is index of array.
name='languages',
type=BASE_TYPES[0x0A], # uint8z
def_num=0,
),
1: Field( # Use sport_bits_x types where x is index of array.
name='sports',
type=FIELD_TYPES['sport_bits_0'],
def_num=1,
),
21: Field(
name='workouts_supported',
type=FIELD_TYPES['workout_capabilities'],
def_num=21,
),
23: Field(
name='connectivity_supported',
type=FIELD_TYPES['connectivity_capabilities'],
def_num=23,
),
},
),
3: MessageType(
name='user_profile',
mesg_num=3,
fields={
0: Field(
name='friendly_name',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field(
name='gender',
type=FIELD_TYPES['gender'],
def_num=1,
),
2: Field(
name='age',
type=BASE_TYPES[0x02], # uint8
def_num=2,
units='years',
),
3: Field(
name='height',
type=BASE_TYPES[0x02], # uint8
def_num=3,
scale=100,
units='m',
),
4: Field(
name='weight',
type=BASE_TYPES[0x84], # uint16
def_num=4,
scale=10,
units='kg',
),
5: Field(
name='language',
type=FIELD_TYPES['language'],
def_num=5,
),
6: Field(
name='elev_setting',
type=FIELD_TYPES['display_measure'],
def_num=6,
),
7: Field(
name='weight_setting',
type=FIELD_TYPES['display_measure'],
def_num=7,
),
8: Field(
name='resting_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=8,
units='bpm',
),
9: Field(
name='default_max_running_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=9,
units='bpm',
),
10: Field(
name='default_max_biking_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=10,
units='bpm',
),
11: Field(
name='default_max_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=11,
units='bpm',
),
12: Field(
name='hr_setting',
type=FIELD_TYPES['display_heart'],
def_num=12,
),
13: Field(
name='speed_setting',
type=FIELD_TYPES['display_measure'],
def_num=13,
),
14: Field(
name='dist_setting',
type=FIELD_TYPES['display_measure'],
def_num=14,
),
16: Field(
name='power_setting',
type=FIELD_TYPES['display_power'],
def_num=16,
),
17: Field(
name='activity_class',
type=FIELD_TYPES['activity_class'],
def_num=17,
),
18: Field(
name='position_setting',
type=FIELD_TYPES['display_position'],
def_num=18,
),
21: Field(
name='temperature_setting',
type=FIELD_TYPES['display_measure'],
def_num=21,
),
22: Field(
name='local_id',
type=FIELD_TYPES['user_local_id'],
def_num=22,
),
23: Field(
name='global_id',
type=BASE_TYPES[0x0D], # byte
def_num=23,
),
28: Field( # Typical wake time
name='wake_time',
type=FIELD_TYPES['localtime_into_day'],
def_num=28,
),
29: Field( # Typical bed time
name='sleep_time',
type=FIELD_TYPES['localtime_into_day'],
def_num=29,
),
30: Field(
name='height_setting',
type=FIELD_TYPES['display_measure'],
def_num=30,
),
31: Field( # User defined running step length set to 0 for auto length
name='user_running_step_length',
type=BASE_TYPES[0x84], # uint16
def_num=31,
scale=1000,
units='m',
),
32: Field( # User defined walking step length set to 0 for auto length
name='user_walking_step_length',
type=BASE_TYPES[0x84], # uint16
def_num=32,
scale=1000,
units='m',
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
4: MessageType(
name='hrm_profile',
mesg_num=4,
fields={
0: Field(
name='enabled',
type=FIELD_TYPES['bool'],
def_num=0,
),
1: Field(
name='hrm_ant_id',
type=BASE_TYPES[0x8B], # uint16z
def_num=1,
),
2: Field(
name='log_hrv',
type=FIELD_TYPES['bool'],
def_num=2,
),
3: Field(
name='hrm_ant_id_trans_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=3,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
5: MessageType(
name='sdm_profile',
mesg_num=5,
fields={
0: Field(
name='enabled',
type=FIELD_TYPES['bool'],
def_num=0,
),
1: Field(
name='sdm_ant_id',
type=BASE_TYPES[0x8B], # uint16z
def_num=1,
),
2: Field(
name='sdm_cal_factor',
type=BASE_TYPES[0x84], # uint16
def_num=2,
scale=10,
units='%',
),
3: Field(
name='odometer',
type=BASE_TYPES[0x86], # uint32
def_num=3,
scale=100,
units='m',
),
4: Field( # Use footpod for speed source instead of GPS
name='speed_source',
type=FIELD_TYPES['bool'],
def_num=4,
),
5: Field(
name='sdm_ant_id_trans_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=5,
),
7: Field( # Rollover counter that can be used to extend the odometer
name='odometer_rollover',
type=BASE_TYPES[0x02], # uint8
def_num=7,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
6: MessageType(
name='bike_profile',
mesg_num=6,
fields={
0: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=1,
),
2: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=2,
),
3: Field(
name='odometer',
type=BASE_TYPES[0x86], # uint32
def_num=3,
scale=100,
units='m',
),
4: Field(
name='bike_spd_ant_id',
type=BASE_TYPES[0x8B], # uint16z
def_num=4,
),
5: Field(
name='bike_cad_ant_id',
type=BASE_TYPES[0x8B], # uint16z
def_num=5,
),
6: Field(
name='bike_spdcad_ant_id',
type=BASE_TYPES[0x8B], # uint16z
def_num=6,
),
7: Field(
name='bike_power_ant_id',
type=BASE_TYPES[0x8B], # uint16z
def_num=7,
),
8: Field(
name='custom_wheelsize',
type=BASE_TYPES[0x84], # uint16
def_num=8,
scale=1000,
units='m',
),
9: Field(
name='auto_wheelsize',
type=BASE_TYPES[0x84], # uint16
def_num=9,
scale=1000,
units='m',
),
10: Field(
name='bike_weight',
type=BASE_TYPES[0x84], # uint16
def_num=10,
scale=10,
units='kg',
),
11: Field(
name='power_cal_factor',
type=BASE_TYPES[0x84], # uint16
def_num=11,
scale=10,
units='%',
),
12: Field(
name='auto_wheel_cal',
type=FIELD_TYPES['bool'],
def_num=12,
),
13: Field(
name='auto_power_zero',
type=FIELD_TYPES['bool'],
def_num=13,
),
14: Field(
name='id',
type=BASE_TYPES[0x02], # uint8
def_num=14,
),
15: Field(
name='spd_enabled',
type=FIELD_TYPES['bool'],
def_num=15,
),
16: Field(
name='cad_enabled',
type=FIELD_TYPES['bool'],
def_num=16,
),
17: Field(
name='spdcad_enabled',
type=FIELD_TYPES['bool'],
def_num=17,
),
18: Field(
name='power_enabled',
type=FIELD_TYPES['bool'],
def_num=18,
),
19: Field(
name='crank_length',
type=BASE_TYPES[0x02], # uint8
def_num=19,
scale=2,
offset=-110,
units='mm',
),
20: Field(
name='enabled',
type=FIELD_TYPES['bool'],
def_num=20,
),
21: Field(
name='bike_spd_ant_id_trans_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=21,
),
22: Field(
name='bike_cad_ant_id_trans_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=22,
),
23: Field(
name='bike_spdcad_ant_id_trans_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=23,
),
24: Field(
name='bike_power_ant_id_trans_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=24,
),
37: Field( # Rollover counter that can be used to extend the odometer
name='odometer_rollover',
type=BASE_TYPES[0x02], # uint8
def_num=37,
),
38: Field( # Number of front gears
name='front_gear_num',
type=BASE_TYPES[0x0A], # uint8z
def_num=38,
),
39: Field( # Number of teeth on each gear 0 is innermost
name='front_gear',
type=BASE_TYPES[0x0A], # uint8z
def_num=39,
),
40: Field( # Number of rear gears
name='rear_gear_num',
type=BASE_TYPES[0x0A], # uint8z
def_num=40,
),
41: Field( # Number of teeth on each gear 0 is innermost
name='rear_gear',
type=BASE_TYPES[0x0A], # uint8z
def_num=41,
),
44: Field(
name='shimano_di2_enabled',
type=FIELD_TYPES['bool'],
def_num=44,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
8: MessageType(
name='hr_zone',
mesg_num=8,
fields={
1: Field(
name='high_bpm',
type=BASE_TYPES[0x02], # uint8
def_num=1,
units='bpm',
),
2: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=2,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
9: MessageType(
name='power_zone',
mesg_num=9,
fields={
1: Field(
name='high_value',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='watts',
),
2: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=2,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
10: MessageType(
name='met_zone',
mesg_num=10,
fields={
1: Field(
name='high_bpm',
type=BASE_TYPES[0x02], # uint8
def_num=1,
),
2: Field(
name='calories',
type=BASE_TYPES[0x84], # uint16
def_num=2,
scale=10,
units='kcal/min',
),
3: Field(
name='fat_calories',
type=BASE_TYPES[0x02], # uint8
def_num=3,
scale=10,
units='kcal/min',
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
12: MessageType(
name='sport',
mesg_num=12,
fields={
0: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=0,
),
1: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=1,
),
3: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=3,
),
},
),
18: MessageType(
name='session',
mesg_num=18,
fields={
0: Field( # session
name='event',
type=FIELD_TYPES['event'],
def_num=0,
),
1: Field( # stop
name='event_type',
type=FIELD_TYPES['event_type'],
def_num=1,
),
2: Field(
name='start_time',
type=FIELD_TYPES['date_time'],
def_num=2,
),
3: Field(
name='start_position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=3,
units='semicircles',
),
4: Field(
name='start_position_long',
type=BASE_TYPES[0x85], # sint32
def_num=4,
units='semicircles',
),
5: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=5,
),
6: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=6,
),
7: Field( # Time (includes pauses)
name='total_elapsed_time',
type=BASE_TYPES[0x86], # uint32
def_num=7,
scale=1000,
units='s',
),
8: Field( # Timer Time (excludes pauses)
name='total_timer_time',
type=BASE_TYPES[0x86], # uint32
def_num=8,
scale=1000,
units='s',
),
9: Field(
name='total_distance',
type=BASE_TYPES[0x86], # uint32
def_num=9,
scale=100,
units='m',
),
10: Field(
name='total_cycles',
type=BASE_TYPES[0x86], # uint32
def_num=10,
units='cycles',
subfields=(
SubField(
name='total_strides',
def_num=10,
type=BASE_TYPES[0x86], # uint32
units='strides',
ref_fields=(
ReferenceField(
name='sport',
def_num=5,
value='running',
raw_value=1,
),
ReferenceField(
name='sport',
def_num=5,
value='walking',
raw_value=11,
),
),
),
),
),
11: Field(
name='total_calories',
type=BASE_TYPES[0x84], # uint16
def_num=11,
units='kcal',
),
13: Field( # If New Leaf
name='total_fat_calories',
type=BASE_TYPES[0x84], # uint16
def_num=13,
units='kcal',
),
14: Field( # total_distance / total_timer_time
name='avg_speed',
type=BASE_TYPES[0x84], # uint16
def_num=14,
scale=1000,
units='m/s',
components=(
ComponentField(
name='enhanced_avg_speed',
def_num=124,
scale=1000,
units='m/s',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
15: Field(
name='max_speed',
type=BASE_TYPES[0x84], # uint16
def_num=15,
scale=1000,
units='m/s',
components=(
ComponentField(
name='enhanced_max_speed',
def_num=125,
scale=1000,
units='m/s',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
16: Field( # average heart rate (excludes pause time)
name='avg_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=16,
units='bpm',
),
17: Field(
name='max_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=17,
units='bpm',
),
18: Field( # total_cycles / total_timer_time if non_zero_avg_cadence otherwise total_cycles / total_elapsed_time
name='avg_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=18,
units='rpm',
subfields=(
SubField(
name='avg_running_cadence',
def_num=18,
type=BASE_TYPES[0x02], # uint8
units='strides/min',
ref_fields=(
ReferenceField(
name='sport',
def_num=5,
value='running',
raw_value=1,
),
),
),
),
),
19: Field(
name='max_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=19,
units='rpm',
subfields=(
SubField(
name='max_running_cadence',
def_num=19,
type=BASE_TYPES[0x02], # uint8
units='strides/min',
ref_fields=(
ReferenceField(
name='sport',
def_num=5,
value='running',
raw_value=1,
),
),
),
),
),
20: Field( # total_power / total_timer_time if non_zero_avg_power otherwise total_power / total_elapsed_time
name='avg_power',
type=BASE_TYPES[0x84], # uint16
def_num=20,
units='watts',
),
21: Field(
name='max_power',
type=BASE_TYPES[0x84], # uint16
def_num=21,
units='watts',
),
22: Field(
name='total_ascent',
type=BASE_TYPES[0x84], # uint16
def_num=22,
units='m',
),
23: Field(
name='total_descent',
type=BASE_TYPES[0x84], # uint16
def_num=23,
units='m',
),
24: Field(
name='total_training_effect',
type=BASE_TYPES[0x02], # uint8
def_num=24,
scale=10,
),
25: Field(
name='first_lap_index',
type=BASE_TYPES[0x84], # uint16
def_num=25,
),
26: Field(
name='num_laps',
type=BASE_TYPES[0x84], # uint16
def_num=26,
),
27: Field(
name='event_group',
type=BASE_TYPES[0x02], # uint8
def_num=27,
),
28: Field(
name='trigger',
type=FIELD_TYPES['session_trigger'],
def_num=28,
),
29: Field(
name='nec_lat',
type=BASE_TYPES[0x85], # sint32
def_num=29,
units='semicircles',
),
30: Field(
name='nec_long',
type=BASE_TYPES[0x85], # sint32
def_num=30,
units='semicircles',
),
31: Field(
name='swc_lat',
type=BASE_TYPES[0x85], # sint32
def_num=31,
units='semicircles',
),
32: Field(
name='swc_long',
type=BASE_TYPES[0x85], # sint32
def_num=32,
units='semicircles',
),
34: Field(
name='normalized_power',
type=BASE_TYPES[0x84], # uint16
def_num=34,
units='watts',
),
35: Field(
name='training_stress_score',
type=BASE_TYPES[0x84], # uint16
def_num=35,
scale=10,
units='tss',
),
36: Field(
name='intensity_factor',
type=BASE_TYPES[0x84], # uint16
def_num=36,
scale=1000,
units='if',
),
37: Field(
name='left_right_balance',
type=FIELD_TYPES['left_right_balance_100'],
def_num=37,
),
41: Field(
name='avg_stroke_count',
type=BASE_TYPES[0x86], # uint32
def_num=41,
scale=10,
units='strokes/lap',
),
42: Field(
name='avg_stroke_distance',
type=BASE_TYPES[0x84], # uint16
def_num=42,
scale=100,
units='m',
),
43: Field(
name='swim_stroke',
type=FIELD_TYPES['swim_stroke'],
def_num=43,
units='swim_stroke',
),
44: Field(
name='pool_length',
type=BASE_TYPES[0x84], # uint16
def_num=44,
scale=100,
units='m',
),
45: Field(
name='threshold_power',
type=BASE_TYPES[0x84], # uint16
def_num=45,
units='watts',
),
46: Field(
name='pool_length_unit',
type=FIELD_TYPES['display_measure'],
def_num=46,
),
47: Field( # # of active lengths of swim pool
name='num_active_lengths',
type=BASE_TYPES[0x84], # uint16
def_num=47,
units='lengths',
),
48: Field(
name='total_work',
type=BASE_TYPES[0x86], # uint32
def_num=48,
units='J',
),
49: Field(
name='avg_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=49,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_avg_altitude',
def_num=126,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
50: Field(
name='max_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=50,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_max_altitude',
def_num=128,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
51: Field(
name='gps_accuracy',
type=BASE_TYPES[0x02], # uint8
def_num=51,
units='m',
),
52: Field(
name='avg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=52,
scale=100,
units='%',
),
53: Field(
name='avg_pos_grade',
type=BASE_TYPES[0x83], # sint16
def_num=53,
scale=100,
units='%',
),
54: Field(
name='avg_neg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=54,
scale=100,
units='%',
),
55: Field(
name='max_pos_grade',
type=BASE_TYPES[0x83], # sint16
def_num=55,
scale=100,
units='%',
),
56: Field(
name='max_neg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=56,
scale=100,
units='%',
),
57: Field(
name='avg_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=57,
units='C',
),
58: Field(
name='max_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=58,
units='C',
),
59: Field(
name='total_moving_time',
type=BASE_TYPES[0x86], # uint32
def_num=59,
scale=1000,
units='s',
),
60: Field(
name='avg_pos_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=60,
scale=1000,
units='m/s',
),
61: Field(
name='avg_neg_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=61,
scale=1000,
units='m/s',
),
62: Field(
name='max_pos_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=62,
scale=1000,
units='m/s',
),
63: Field(
name='max_neg_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=63,
scale=1000,
units='m/s',
),
64: Field(
name='min_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=64,
units='bpm',
),
65: Field(
name='time_in_hr_zone',
type=BASE_TYPES[0x86], # uint32
def_num=65,
scale=1000,
units='s',
),
66: Field(
name='time_in_speed_zone',
type=BASE_TYPES[0x86], # uint32
def_num=66,
scale=1000,
units='s',
),
67: Field(
name='time_in_cadence_zone',
type=BASE_TYPES[0x86], # uint32
def_num=67,
scale=1000,
units='s',
),
68: Field(
name='time_in_power_zone',
type=BASE_TYPES[0x86], # uint32
def_num=68,
scale=1000,
units='s',
),
69: Field(
name='avg_lap_time',
type=BASE_TYPES[0x86], # uint32
def_num=69,
scale=1000,
units='s',
),
70: Field(
name='best_lap_index',
type=BASE_TYPES[0x84], # uint16
def_num=70,
),
71: Field(
name='min_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=71,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_min_altitude',
def_num=127,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
82: Field(
name='player_score',
type=BASE_TYPES[0x84], # uint16
def_num=82,
),
83: Field(
name='opponent_score',
type=BASE_TYPES[0x84], # uint16
def_num=83,
),
84: Field(
name='opponent_name',
type=BASE_TYPES[0x07], # string
def_num=84,
),
85: Field( # stroke_type enum used as the index
name='stroke_count',
type=BASE_TYPES[0x84], # uint16
def_num=85,
units='counts',
),
86: Field( # zone number used as the index
name='zone_count',
type=BASE_TYPES[0x84], # uint16
def_num=86,
units='counts',
),
87: Field(
name='max_ball_speed',
type=BASE_TYPES[0x84], # uint16
def_num=87,
scale=100,
units='m/s',
),
88: Field(
name='avg_ball_speed',
type=BASE_TYPES[0x84], # uint16
def_num=88,
scale=100,
units='m/s',
),
89: Field(
name='avg_vertical_oscillation',
type=BASE_TYPES[0x84], # uint16
def_num=89,
scale=10,
units='mm',
),
90: Field(
name='avg_stance_time_percent',
type=BASE_TYPES[0x84], # uint16
def_num=90,
scale=100,
units='percent',
),
91: Field(
name='avg_stance_time',
type=BASE_TYPES[0x84], # uint16
def_num=91,
scale=10,
units='ms',
),
92: Field( # fractional part of the avg_cadence
name='avg_fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=92,
scale=128,
units='rpm',
),
93: Field( # fractional part of the max_cadence
name='max_fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=93,
scale=128,
units='rpm',
),
94: Field( # fractional part of the total_cycles
name='total_fractional_cycles',
type=BASE_TYPES[0x02], # uint8
def_num=94,
scale=128,
units='cycles',
),
95: Field( # Avg saturated and unsaturated hemoglobin
name='avg_total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=95,
scale=100,
units='g/dL',
),
96: Field( # Min saturated and unsaturated hemoglobin
name='min_total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=96,
scale=100,
units='g/dL',
),
97: Field( # Max saturated and unsaturated hemoglobin
name='max_total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=97,
scale=100,
units='g/dL',
),
98: Field( # Avg percentage of hemoglobin saturated with oxygen
name='avg_saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=98,
scale=10,
units='%',
),
99: Field( # Min percentage of hemoglobin saturated with oxygen
name='min_saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=99,
scale=10,
units='%',
),
100: Field( # Max percentage of hemoglobin saturated with oxygen
name='max_saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=100,
scale=10,
units='%',
),
101: Field(
name='avg_left_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=101,
scale=2,
units='percent',
),
102: Field(
name='avg_right_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=102,
scale=2,
units='percent',
),
103: Field(
name='avg_left_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=103,
scale=2,
units='percent',
),
104: Field(
name='avg_right_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=104,
scale=2,
units='percent',
),
105: Field(
name='avg_combined_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=105,
scale=2,
units='percent',
),
111: Field(
name='sport_index',
type=BASE_TYPES[0x02], # uint8
def_num=111,
),
112: Field( # Total time spend in the standing position
name='time_standing',
type=BASE_TYPES[0x86], # uint32
def_num=112,
scale=1000,
units='s',
),
113: Field( # Number of transitions to the standing state
name='stand_count',
type=BASE_TYPES[0x84], # uint16
def_num=113,
),
114: Field( # Average platform center offset Left
name='avg_left_pco',
type=BASE_TYPES[0x01], # sint8
def_num=114,
units='mm',
),
115: Field( # Average platform center offset Right
name='avg_right_pco',
type=BASE_TYPES[0x01], # sint8
def_num=115,
units='mm',
),
116: Field( # Average left power phase angles. Indexes defined by power_phase_type.
name='avg_left_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=116,
scale=0.7111111,
units='degrees',
),
117: Field( # Average left power phase peak angles. Data value indexes defined by power_phase_type.
name='avg_left_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=117,
scale=0.7111111,
units='degrees',
),
118: Field( # Average right power phase angles. Data value indexes defined by power_phase_type.
name='avg_right_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=118,
scale=0.7111111,
units='degrees',
),
119: Field( # Average right power phase peak angles data value indexes defined by power_phase_type.
name='avg_right_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=119,
scale=0.7111111,
units='degrees',
),
120: Field( # Average power by position. Data value indexes defined by rider_position_type.
name='avg_power_position',
type=BASE_TYPES[0x84], # uint16
def_num=120,
units='watts',
),
121: Field( # Maximum power by position. Data value indexes defined by rider_position_type.
name='max_power_position',
type=BASE_TYPES[0x84], # uint16
def_num=121,
units='watts',
),
122: Field( # Average cadence by position. Data value indexes defined by rider_position_type.
name='avg_cadence_position',
type=BASE_TYPES[0x02], # uint8
def_num=122,
units='rpm',
),
123: Field( # Maximum cadence by position. Data value indexes defined by rider_position_type.
name='max_cadence_position',
type=BASE_TYPES[0x02], # uint8
def_num=123,
units='rpm',
),
124: Field( # total_distance / total_timer_time
name='enhanced_avg_speed',
type=BASE_TYPES[0x86], # uint32
def_num=124,
scale=1000,
units='m/s',
),
125: Field(
name='enhanced_max_speed',
type=BASE_TYPES[0x86], # uint32
def_num=125,
scale=1000,
units='m/s',
),
126: Field(
name='enhanced_avg_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=126,
scale=5,
offset=500,
units='m',
),
127: Field(
name='enhanced_min_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=127,
scale=5,
offset=500,
units='m',
),
128: Field(
name='enhanced_max_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=128,
scale=5,
offset=500,
units='m',
),
129: Field( # lev average motor power during session
name='avg_lev_motor_power',
type=BASE_TYPES[0x84], # uint16
def_num=129,
units='watts',
),
130: Field( # lev maximum motor power during session
name='max_lev_motor_power',
type=BASE_TYPES[0x84], # uint16
def_num=130,
units='watts',
),
131: Field( # lev battery consumption during session
name='lev_battery_consumption',
type=BASE_TYPES[0x02], # uint8
def_num=131,
scale=2,
units='percent',
),
132: Field(
name='avg_vertical_ratio',
type=BASE_TYPES[0x84], # uint16
def_num=132,
scale=100,
units='percent',
),
133: Field(
name='avg_stance_time_balance',
type=BASE_TYPES[0x84], # uint16
def_num=133,
scale=100,
units='percent',
),
134: Field(
name='avg_step_length',
type=BASE_TYPES[0x84], # uint16
def_num=134,
scale=10,
units='mm',
),
137: Field(
name='total_anaerobic_training_effect',
type=BASE_TYPES[0x02], # uint8
def_num=137,
scale=10,
),
139: Field(
name='avg_vam',
type=BASE_TYPES[0x84], # uint16
def_num=139,
scale=1000,
units='m/s',
),
253: FIELD_TYPE_TIMESTAMP, # Sesson end time.
254: Field( # Selected bit is set for the current session.
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
19: MessageType(
name='lap',
mesg_num=19,
fields={
0: Field(
name='event',
type=FIELD_TYPES['event'],
def_num=0,
),
1: Field(
name='event_type',
type=FIELD_TYPES['event_type'],
def_num=1,
),
2: Field(
name='start_time',
type=FIELD_TYPES['date_time'],
def_num=2,
),
3: Field(
name='start_position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=3,
units='semicircles',
),
4: Field(
name='start_position_long',
type=BASE_TYPES[0x85], # sint32
def_num=4,
units='semicircles',
),
5: Field(
name='end_position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=5,
units='semicircles',
),
6: Field(
name='end_position_long',
type=BASE_TYPES[0x85], # sint32
def_num=6,
units='semicircles',
),
7: Field( # Time (includes pauses)
name='total_elapsed_time',
type=BASE_TYPES[0x86], # uint32
def_num=7,
scale=1000,
units='s',
),
8: Field( # Timer Time (excludes pauses)
name='total_timer_time',
type=BASE_TYPES[0x86], # uint32
def_num=8,
scale=1000,
units='s',
),
9: Field(
name='total_distance',
type=BASE_TYPES[0x86], # uint32
def_num=9,
scale=100,
units='m',
),
10: Field(
name='total_cycles',
type=BASE_TYPES[0x86], # uint32
def_num=10,
units='cycles',
subfields=(
SubField(
name='total_strides',
def_num=10,
type=BASE_TYPES[0x86], # uint32
units='strides',
ref_fields=(
ReferenceField(
name='sport',
def_num=25,
value='running',
raw_value=1,
),
ReferenceField(
name='sport',
def_num=25,
value='walking',
raw_value=11,
),
),
),
),
),
11: Field(
name='total_calories',
type=BASE_TYPES[0x84], # uint16
def_num=11,
units='kcal',
),
12: Field( # If New Leaf
name='total_fat_calories',
type=BASE_TYPES[0x84], # uint16
def_num=12,
units='kcal',
),
13: Field(
name='avg_speed',
type=BASE_TYPES[0x84], # uint16
def_num=13,
scale=1000,
units='m/s',
components=(
ComponentField(
name='enhanced_avg_speed',
def_num=110,
scale=1000,
units='m/s',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
14: Field(
name='max_speed',
type=BASE_TYPES[0x84], # uint16
def_num=14,
scale=1000,
units='m/s',
components=(
ComponentField(
name='enhanced_max_speed',
def_num=111,
scale=1000,
units='m/s',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
15: Field(
name='avg_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=15,
units='bpm',
),
16: Field(
name='max_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=16,
units='bpm',
),
17: Field( # total_cycles / total_timer_time if non_zero_avg_cadence otherwise total_cycles / total_elapsed_time
name='avg_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=17,
units='rpm',
subfields=(
SubField(
name='avg_running_cadence',
def_num=17,
type=BASE_TYPES[0x02], # uint8
units='strides/min',
ref_fields=(
ReferenceField(
name='sport',
def_num=25,
value='running',
raw_value=1,
),
),
),
),
),
18: Field(
name='max_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=18,
units='rpm',
subfields=(
SubField(
name='max_running_cadence',
def_num=18,
type=BASE_TYPES[0x02], # uint8
units='strides/min',
ref_fields=(
ReferenceField(
name='sport',
def_num=25,
value='running',
raw_value=1,
),
),
),
),
),
19: Field( # total_power / total_timer_time if non_zero_avg_power otherwise total_power / total_elapsed_time
name='avg_power',
type=BASE_TYPES[0x84], # uint16
def_num=19,
units='watts',
),
20: Field(
name='max_power',
type=BASE_TYPES[0x84], # uint16
def_num=20,
units='watts',
),
21: Field(
name='total_ascent',
type=BASE_TYPES[0x84], # uint16
def_num=21,
units='m',
),
22: Field(
name='total_descent',
type=BASE_TYPES[0x84], # uint16
def_num=22,
units='m',
),
23: Field(
name='intensity',
type=FIELD_TYPES['intensity'],
def_num=23,
),
24: Field(
name='lap_trigger',
type=FIELD_TYPES['lap_trigger'],
def_num=24,
),
25: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=25,
),
26: Field(
name='event_group',
type=BASE_TYPES[0x02], # uint8
def_num=26,
),
32: Field( # # of lengths of swim pool
name='num_lengths',
type=BASE_TYPES[0x84], # uint16
def_num=32,
units='lengths',
),
33: Field(
name='normalized_power',
type=BASE_TYPES[0x84], # uint16
def_num=33,
units='watts',
),
34: Field(
name='left_right_balance',
type=FIELD_TYPES['left_right_balance_100'],
def_num=34,
),
35: Field(
name='first_length_index',
type=BASE_TYPES[0x84], # uint16
def_num=35,
),
37: Field(
name='avg_stroke_distance',
type=BASE_TYPES[0x84], # uint16
def_num=37,
scale=100,
units='m',
),
38: Field(
name='swim_stroke',
type=FIELD_TYPES['swim_stroke'],
def_num=38,
),
39: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=39,
),
40: Field( # # of active lengths of swim pool
name='num_active_lengths',
type=BASE_TYPES[0x84], # uint16
def_num=40,
units='lengths',
),
41: Field(
name='total_work',
type=BASE_TYPES[0x86], # uint32
def_num=41,
units='J',
),
42: Field(
name='avg_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=42,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_avg_altitude',
def_num=112,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
43: Field(
name='max_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=43,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_max_altitude',
def_num=114,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
44: Field(
name='gps_accuracy',
type=BASE_TYPES[0x02], # uint8
def_num=44,
units='m',
),
45: Field(
name='avg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=45,
scale=100,
units='%',
),
46: Field(
name='avg_pos_grade',
type=BASE_TYPES[0x83], # sint16
def_num=46,
scale=100,
units='%',
),
47: Field(
name='avg_neg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=47,
scale=100,
units='%',
),
48: Field(
name='max_pos_grade',
type=BASE_TYPES[0x83], # sint16
def_num=48,
scale=100,
units='%',
),
49: Field(
name='max_neg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=49,
scale=100,
units='%',
),
50: Field(
name='avg_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=50,
units='C',
),
51: Field(
name='max_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=51,
units='C',
),
52: Field(
name='total_moving_time',
type=BASE_TYPES[0x86], # uint32
def_num=52,
scale=1000,
units='s',
),
53: Field(
name='avg_pos_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=53,
scale=1000,
units='m/s',
),
54: Field(
name='avg_neg_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=54,
scale=1000,
units='m/s',
),
55: Field(
name='max_pos_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=55,
scale=1000,
units='m/s',
),
56: Field(
name='max_neg_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=56,
scale=1000,
units='m/s',
),
57: Field(
name='time_in_hr_zone',
type=BASE_TYPES[0x86], # uint32
def_num=57,
scale=1000,
units='s',
),
58: Field(
name='time_in_speed_zone',
type=BASE_TYPES[0x86], # uint32
def_num=58,
scale=1000,
units='s',
),
59: Field(
name='time_in_cadence_zone',
type=BASE_TYPES[0x86], # uint32
def_num=59,
scale=1000,
units='s',
),
60: Field(
name='time_in_power_zone',
type=BASE_TYPES[0x86], # uint32
def_num=60,
scale=1000,
units='s',
),
61: Field(
name='repetition_num',
type=BASE_TYPES[0x84], # uint16
def_num=61,
),
62: Field(
name='min_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=62,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_min_altitude',
def_num=113,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
63: Field(
name='min_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=63,
units='bpm',
),
71: Field(
name='wkt_step_index',
type=FIELD_TYPES['message_index'],
def_num=71,
),
74: Field(
name='opponent_score',
type=BASE_TYPES[0x84], # uint16
def_num=74,
),
75: Field( # stroke_type enum used as the index
name='stroke_count',
type=BASE_TYPES[0x84], # uint16
def_num=75,
units='counts',
),
76: Field( # zone number used as the index
name='zone_count',
type=BASE_TYPES[0x84], # uint16
def_num=76,
units='counts',
),
77: Field(
name='avg_vertical_oscillation',
type=BASE_TYPES[0x84], # uint16
def_num=77,
scale=10,
units='mm',
),
78: Field(
name='avg_stance_time_percent',
type=BASE_TYPES[0x84], # uint16
def_num=78,
scale=100,
units='percent',
),
79: Field(
name='avg_stance_time',
type=BASE_TYPES[0x84], # uint16
def_num=79,
scale=10,
units='ms',
),
80: Field( # fractional part of the avg_cadence
name='avg_fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=80,
scale=128,
units='rpm',
),
81: Field( # fractional part of the max_cadence
name='max_fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=81,
scale=128,
units='rpm',
),
82: Field( # fractional part of the total_cycles
name='total_fractional_cycles',
type=BASE_TYPES[0x02], # uint8
def_num=82,
scale=128,
units='cycles',
),
83: Field(
name='player_score',
type=BASE_TYPES[0x84], # uint16
def_num=83,
),
84: Field( # Avg saturated and unsaturated hemoglobin
name='avg_total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=84,
scale=100,
units='g/dL',
),
85: Field( # Min saturated and unsaturated hemoglobin
name='min_total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=85,
scale=100,
units='g/dL',
),
86: Field( # Max saturated and unsaturated hemoglobin
name='max_total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=86,
scale=100,
units='g/dL',
),
87: Field( # Avg percentage of hemoglobin saturated with oxygen
name='avg_saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=87,
scale=10,
units='%',
),
88: Field( # Min percentage of hemoglobin saturated with oxygen
name='min_saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=88,
scale=10,
units='%',
),
89: Field( # Max percentage of hemoglobin saturated with oxygen
name='max_saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=89,
scale=10,
units='%',
),
91: Field(
name='avg_left_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=91,
scale=2,
units='percent',
),
92: Field(
name='avg_right_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=92,
scale=2,
units='percent',
),
93: Field(
name='avg_left_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=93,
scale=2,
units='percent',
),
94: Field(
name='avg_right_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=94,
scale=2,
units='percent',
),
95: Field(
name='avg_combined_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=95,
scale=2,
units='percent',
),
98: Field( # Total time spent in the standing position
name='time_standing',
type=BASE_TYPES[0x86], # uint32
def_num=98,
scale=1000,
units='s',
),
99: Field( # Number of transitions to the standing state
name='stand_count',
type=BASE_TYPES[0x84], # uint16
def_num=99,
),
100: Field( # Average left platform center offset
name='avg_left_pco',
type=BASE_TYPES[0x01], # sint8
def_num=100,
units='mm',
),
101: Field( # Average right platform center offset
name='avg_right_pco',
type=BASE_TYPES[0x01], # sint8
def_num=101,
units='mm',
),
102: Field( # Average left power phase angles. Data value indexes defined by power_phase_type.
name='avg_left_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=102,
scale=0.7111111,
units='degrees',
),
103: Field( # Average left power phase peak angles. Data value indexes defined by power_phase_type.
name='avg_left_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=103,
scale=0.7111111,
units='degrees',
),
104: Field( # Average right power phase angles. Data value indexes defined by power_phase_type.
name='avg_right_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=104,
scale=0.7111111,
units='degrees',
),
105: Field( # Average right power phase peak angles. Data value indexes defined by power_phase_type.
name='avg_right_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=105,
scale=0.7111111,
units='degrees',
),
106: Field( # Average power by position. Data value indexes defined by rider_position_type.
name='avg_power_position',
type=BASE_TYPES[0x84], # uint16
def_num=106,
units='watts',
),
107: Field( # Maximum power by position. Data value indexes defined by rider_position_type.
name='max_power_position',
type=BASE_TYPES[0x84], # uint16
def_num=107,
units='watts',
),
108: Field( # Average cadence by position. Data value indexes defined by rider_position_type.
name='avg_cadence_position',
type=BASE_TYPES[0x02], # uint8
def_num=108,
units='rpm',
),
109: Field( # Maximum cadence by position. Data value indexes defined by rider_position_type.
name='max_cadence_position',
type=BASE_TYPES[0x02], # uint8
def_num=109,
units='rpm',
),
110: Field(
name='enhanced_avg_speed',
type=BASE_TYPES[0x86], # uint32
def_num=110,
scale=1000,
units='m/s',
),
111: Field(
name='enhanced_max_speed',
type=BASE_TYPES[0x86], # uint32
def_num=111,
scale=1000,
units='m/s',
),
112: Field(
name='enhanced_avg_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=112,
scale=5,
offset=500,
units='m',
),
113: Field(
name='enhanced_min_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=113,
scale=5,
offset=500,
units='m',
),
114: Field(
name='enhanced_max_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=114,
scale=5,
offset=500,
units='m',
),
115: Field( # lev average motor power during lap
name='avg_lev_motor_power',
type=BASE_TYPES[0x84], # uint16
def_num=115,
units='watts',
),
116: Field( # lev maximum motor power during lap
name='max_lev_motor_power',
type=BASE_TYPES[0x84], # uint16
def_num=116,
units='watts',
),
117: Field( # lev battery consumption during lap
name='lev_battery_consumption',
type=BASE_TYPES[0x02], # uint8
def_num=117,
scale=2,
units='percent',
),
118: Field(
name='avg_vertical_ratio',
type=BASE_TYPES[0x84], # uint16
def_num=118,
scale=100,
units='percent',
),
119: Field(
name='avg_stance_time_balance',
type=BASE_TYPES[0x84], # uint16
def_num=119,
scale=100,
units='percent',
),
120: Field(
name='avg_step_length',
type=BASE_TYPES[0x84], # uint16
def_num=120,
scale=10,
units='mm',
),
121: Field(
name='avg_vam',
type=BASE_TYPES[0x84], # uint16
def_num=121,
scale=1000,
units='m/s',
),
253: FIELD_TYPE_TIMESTAMP, # Lap end time.
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
20: MessageType(
name='record',
mesg_num=20,
fields={
0: Field(
name='position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=0,
units='semicircles',
),
1: Field(
name='position_long',
type=BASE_TYPES[0x85], # sint32
def_num=1,
units='semicircles',
),
2: Field(
name='altitude',
type=BASE_TYPES[0x84], # uint16
def_num=2,
scale=5,
offset=500,
units='m',
components=(
ComponentField(
name='enhanced_altitude',
def_num=78,
scale=5,
offset=500,
units='m',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
3: Field(
name='heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=3,
units='bpm',
),
4: Field(
name='cadence',
type=BASE_TYPES[0x02], # uint8
def_num=4,
units='rpm',
),
5: Field(
name='distance',
type=BASE_TYPES[0x86], # uint32
def_num=5,
scale=100,
units='m',
),
6: Field(
name='speed',
type=BASE_TYPES[0x84], # uint16
def_num=6,
scale=1000,
units='m/s',
components=(
ComponentField(
name='enhanced_speed',
def_num=73,
scale=1000,
units='m/s',
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
7: Field(
name='power',
type=BASE_TYPES[0x84], # uint16
def_num=7,
units='watts',
),
8: Field(
name='compressed_speed_distance',
type=BASE_TYPES[0x0D], # byte
def_num=8,
components=(
ComponentField(
name='speed',
def_num=6,
scale=100,
units='m/s',
accumulate=False,
bits=12,
bit_offset=0,
),
ComponentField(
name='distance',
def_num=5,
scale=16,
units='m',
accumulate=True,
bits=12,
bit_offset=12,
),
),
),
9: Field(
name='grade',
type=BASE_TYPES[0x83], # sint16
def_num=9,
scale=100,
units='%',
),
10: Field( # Relative. 0 is none 254 is Max.
name='resistance',
type=BASE_TYPES[0x02], # uint8
def_num=10,
),
11: Field(
name='time_from_course',
type=BASE_TYPES[0x85], # sint32
def_num=11,
scale=1000,
units='s',
),
12: Field(
name='cycle_length',
type=BASE_TYPES[0x02], # uint8
def_num=12,
scale=100,
units='m',
),
13: Field(
name='temperature',
type=BASE_TYPES[0x01], # sint8
def_num=13,
units='C',
),
17: Field( # Speed at 1s intervals. Timestamp field indicates time of last array element.
name='speed_1s',
type=BASE_TYPES[0x02], # uint8
def_num=17,
scale=16,
units='m/s',
),
18: Field(
name='cycles',
type=BASE_TYPES[0x02], # uint8
def_num=18,
components=(
ComponentField(
name='total_cycles',
def_num=19,
units='cycles',
accumulate=True,
bits=8,
bit_offset=0,
),
),
),
19: Field(
name='total_cycles',
type=BASE_TYPES[0x86], # uint32
def_num=19,
units='cycles',
),
28: Field(
name='compressed_accumulated_power',
type=BASE_TYPES[0x84], # uint16
def_num=28,
components=(
ComponentField(
name='accumulated_power',
def_num=29,
units='watts',
accumulate=True,
bits=16,
bit_offset=0,
),
),
),
29: Field(
name='accumulated_power',
type=BASE_TYPES[0x86], # uint32
def_num=29,
units='watts',
),
30: Field(
name='left_right_balance',
type=FIELD_TYPES['left_right_balance'],
def_num=30,
),
31: Field(
name='gps_accuracy',
type=BASE_TYPES[0x02], # uint8
def_num=31,
units='m',
),
32: Field(
name='vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=32,
scale=1000,
units='m/s',
),
33: Field(
name='calories',
type=BASE_TYPES[0x84], # uint16
def_num=33,
units='kcal',
),
39: Field(
name='vertical_oscillation',
type=BASE_TYPES[0x84], # uint16
def_num=39,
scale=10,
units='mm',
),
40: Field(
name='stance_time_percent',
type=BASE_TYPES[0x84], # uint16
def_num=40,
scale=100,
units='percent',
),
41: Field(
name='stance_time',
type=BASE_TYPES[0x84], # uint16
def_num=41,
scale=10,
units='ms',
),
42: Field(
name='activity_type',
type=FIELD_TYPES['activity_type'],
def_num=42,
),
43: Field(
name='left_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=43,
scale=2,
units='percent',
),
44: Field(
name='right_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=44,
scale=2,
units='percent',
),
45: Field(
name='left_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=45,
scale=2,
units='percent',
),
46: Field(
name='right_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=46,
scale=2,
units='percent',
),
47: Field(
name='combined_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=47,
scale=2,
units='percent',
),
48: Field(
name='time128',
type=BASE_TYPES[0x02], # uint8
def_num=48,
scale=128,
units='s',
),
49: Field(
name='stroke_type',
type=FIELD_TYPES['stroke_type'],
def_num=49,
),
50: Field(
name='zone',
type=BASE_TYPES[0x02], # uint8
def_num=50,
),
51: Field(
name='ball_speed',
type=BASE_TYPES[0x84], # uint16
def_num=51,
scale=100,
units='m/s',
),
52: Field( # Log cadence and fractional cadence for backwards compatability
name='cadence256',
type=BASE_TYPES[0x84], # uint16
def_num=52,
scale=256,
units='rpm',
),
53: Field(
name='fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=53,
scale=128,
units='rpm',
),
54: Field( # Total saturated and unsaturated hemoglobin
name='total_hemoglobin_conc',
type=BASE_TYPES[0x84], # uint16
def_num=54,
scale=100,
units='g/dL',
),
55: Field( # Min saturated and unsaturated hemoglobin
name='total_hemoglobin_conc_min',
type=BASE_TYPES[0x84], # uint16
def_num=55,
scale=100,
units='g/dL',
),
56: Field( # Max saturated and unsaturated hemoglobin
name='total_hemoglobin_conc_max',
type=BASE_TYPES[0x84], # uint16
def_num=56,
scale=100,
units='g/dL',
),
57: Field( # Percentage of hemoglobin saturated with oxygen
name='saturated_hemoglobin_percent',
type=BASE_TYPES[0x84], # uint16
def_num=57,
scale=10,
units='%',
),
58: Field( # Min percentage of hemoglobin saturated with oxygen
name='saturated_hemoglobin_percent_min',
type=BASE_TYPES[0x84], # uint16
def_num=58,
scale=10,
units='%',
),
59: Field( # Max percentage of hemoglobin saturated with oxygen
name='saturated_hemoglobin_percent_max',
type=BASE_TYPES[0x84], # uint16
def_num=59,
scale=10,
units='%',
),
62: Field(
name='device_index',
type=FIELD_TYPES['device_index'],
def_num=62,
),
67: Field( # Left platform center offset
name='left_pco',
type=BASE_TYPES[0x01], # sint8
def_num=67,
units='mm',
),
68: Field( # Right platform center offset
name='right_pco',
type=BASE_TYPES[0x01], # sint8
def_num=68,
units='mm',
),
69: Field( # Left power phase angles. Data value indexes defined by power_phase_type.
name='left_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=69,
scale=0.7111111,
units='degrees',
),
70: Field( # Left power phase peak angles. Data value indexes defined by power_phase_type.
name='left_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=70,
scale=0.7111111,
units='degrees',
),
71: Field( # Right power phase angles. Data value indexes defined by power_phase_type.
name='right_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=71,
scale=0.7111111,
units='degrees',
),
72: Field( # Right power phase peak angles. Data value indexes defined by power_phase_type.
name='right_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=72,
scale=0.7111111,
units='degrees',
),
73: Field(
name='enhanced_speed',
type=BASE_TYPES[0x86], # uint32
def_num=73,
scale=1000,
units='m/s',
),
78: Field(
name='enhanced_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=78,
scale=5,
offset=500,
units='m',
),
81: Field( # lev battery state of charge
name='battery_soc',
type=BASE_TYPES[0x02], # uint8
def_num=81,
scale=2,
units='percent',
),
82: Field( # lev motor power
name='motor_power',
type=BASE_TYPES[0x84], # uint16
def_num=82,
units='watts',
),
83: Field(
name='vertical_ratio',
type=BASE_TYPES[0x84], # uint16
def_num=83,
scale=100,
units='percent',
),
84: Field(
name='stance_time_balance',
type=BASE_TYPES[0x84], # uint16
def_num=84,
scale=100,
units='percent',
),
85: Field(
name='step_length',
type=BASE_TYPES[0x84], # uint16
def_num=85,
scale=10,
units='mm',
),
253: FIELD_TYPE_TIMESTAMP,
},
),
21: MessageType(
name='event',
mesg_num=21,
fields={
0: Field(
name='event',
type=FIELD_TYPES['event'],
def_num=0,
),
1: Field(
name='event_type',
type=FIELD_TYPES['event_type'],
def_num=1,
),
2: Field(
name='data16',
type=BASE_TYPES[0x84], # uint16
def_num=2,
components=(
ComponentField(
name='data',
def_num=3,
accumulate=False,
bits=16,
bit_offset=0,
),
),
),
3: Field(
name='data',
type=BASE_TYPES[0x86], # uint32
def_num=3,
subfields=(
SubField(
name='battery_level',
def_num=3,
type=BASE_TYPES[0x84], # uint16
scale=1000,
units='V',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='battery',
raw_value=11,
),
),
),
SubField(
name='cad_high_alert',
def_num=3,
type=BASE_TYPES[0x84], # uint16
units='rpm',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='cad_high_alert',
raw_value=17,
),
),
),
SubField(
name='cad_low_alert',
def_num=3,
type=BASE_TYPES[0x84], # uint16
units='rpm',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='cad_low_alert',
raw_value=18,
),
),
),
SubField(
name='calorie_duration_alert',
def_num=3,
type=BASE_TYPES[0x86], # uint32
units='calories',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='calorie_duration_alert',
raw_value=25,
),
),
),
SubField(
name='comm_timeout',
def_num=3,
type=FIELD_TYPES['comm_timeout_type'],
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='comm_timeout',
raw_value=47,
),
),
),
SubField(
name='course_point_index',
def_num=3,
type=FIELD_TYPES['message_index'],
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='course_point',
raw_value=10,
),
),
),
SubField(
name='distance_duration_alert',
def_num=3,
type=BASE_TYPES[0x86], # uint32
scale=100,
units='m',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='distance_duration_alert',
raw_value=24,
),
),
),
SubField(
name='fitness_equipment_state',
def_num=3,
type=FIELD_TYPES['fitness_equipment_state'],
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='fitness_equipment',
raw_value=27,
),
),
),
SubField(
name='gear_change_data',
def_num=3,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='front_gear_change',
raw_value=42,
),
ReferenceField(
name='event',
def_num=0,
value='rear_gear_change',
raw_value=43,
),
),
components=(
ComponentField(
name='rear_gear_num',
def_num=11,
accumulate=False,
bits=8,
bit_offset=0,
),
ComponentField(
name='rear_gear',
def_num=12,
accumulate=False,
bits=8,
bit_offset=8,
),
ComponentField(
name='front_gear_num',
def_num=9,
accumulate=False,
bits=8,
bit_offset=16,
),
ComponentField(
name='front_gear',
def_num=10,
accumulate=False,
bits=8,
bit_offset=24,
),
),
),
SubField(
name='hr_high_alert',
def_num=3,
type=BASE_TYPES[0x02], # uint8
units='bpm',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='hr_high_alert',
raw_value=13,
),
),
),
SubField(
name='hr_low_alert',
def_num=3,
type=BASE_TYPES[0x02], # uint8
units='bpm',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='hr_low_alert',
raw_value=14,
),
),
),
SubField(
name='power_high_alert',
def_num=3,
type=BASE_TYPES[0x84], # uint16
units='watts',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='power_high_alert',
raw_value=19,
),
),
),
SubField(
name='power_low_alert',
def_num=3,
type=BASE_TYPES[0x84], # uint16
units='watts',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='power_low_alert',
raw_value=20,
),
),
),
SubField( # Indicates the rider position value.
name='rider_position',
def_num=3,
type=FIELD_TYPES['rider_position_type'],
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='rider_position_change',
raw_value=44,
),
),
),
SubField(
name='speed_high_alert',
def_num=3,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='m/s',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='speed_high_alert',
raw_value=15,
),
),
),
SubField(
name='speed_low_alert',
def_num=3,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='m/s',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='speed_low_alert',
raw_value=16,
),
),
),
SubField(
name='sport_point',
def_num=3,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='sport_point',
raw_value=33,
),
),
components=(
ComponentField(
name='score',
def_num=7,
accumulate=False,
bits=16,
bit_offset=0,
),
ComponentField(
name='opponent_score',
def_num=8,
accumulate=False,
bits=16,
bit_offset=16,
),
),
),
SubField(
name='time_duration_alert',
def_num=3,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='s',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='time_duration_alert',
raw_value=23,
),
),
),
SubField(
name='timer_trigger',
def_num=3,
type=FIELD_TYPES['timer_trigger'],
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='timer',
raw_value=0,
),
),
),
SubField(
name='virtual_partner_speed',
def_num=3,
type=BASE_TYPES[0x84], # uint16
scale=1000,
units='m/s',
ref_fields=(
ReferenceField(
name='event',
def_num=0,
value='virtual_partner_pace',
raw_value=12,
),
),
),
),
),
4: Field(
name='event_group',
type=BASE_TYPES[0x02], # uint8
def_num=4,
),
7: Field( # Do not populate directly. Autogenerated by decoder for sport_point subfield components
name='score',
type=BASE_TYPES[0x84], # uint16
def_num=7,
),
8: Field( # Do not populate directly. Autogenerated by decoder for sport_point subfield components
name='opponent_score',
type=BASE_TYPES[0x84], # uint16
def_num=8,
),
9: Field( # Do not populate directly. Autogenerated by decoder for gear_change subfield components. Front gear number. 1 is innermost.
name='front_gear_num',
type=BASE_TYPES[0x0A], # uint8z
def_num=9,
),
10: Field( # Do not populate directly. Autogenerated by decoder for gear_change subfield components. Number of front teeth.
name='front_gear',
type=BASE_TYPES[0x0A], # uint8z
def_num=10,
),
11: Field( # Do not populate directly. Autogenerated by decoder for gear_change subfield components. Rear gear number. 1 is innermost.
name='rear_gear_num',
type=BASE_TYPES[0x0A], # uint8z
def_num=11,
),
12: Field( # Do not populate directly. Autogenerated by decoder for gear_change subfield components. Number of rear teeth.
name='rear_gear',
type=BASE_TYPES[0x0A], # uint8z
def_num=12,
),
13: Field(
name='device_index',
type=FIELD_TYPES['device_index'],
def_num=13,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
23: MessageType(
name='device_info',
mesg_num=23,
fields={
0: Field(
name='device_index',
type=FIELD_TYPES['device_index'],
def_num=0,
),
1: Field(
name='device_type',
type=FIELD_TYPES['antplus_device_type'], # uint8
def_num=1,
subfields=(
SubField(
name='ant_device_type',
def_num=1,
type=BASE_TYPES[0x02], # uint8
ref_fields=(
ReferenceField(
name='source_type',
def_num=25,
value='ant',
raw_value=0,
),
),
),
SubField(
name='antplus_device_type',
def_num=1,
type=FIELD_TYPES['antplus_device_type'],
ref_fields=(
ReferenceField(
name='source_type',
def_num=25,
value='antplus',
raw_value=1,
),
),
),
),
),
2: Field(
name='manufacturer',
type=FIELD_TYPES['manufacturer'],
def_num=2,
),
3: Field(
name='serial_number',
type=BASE_TYPES[0x8C], # uint32z
def_num=3,
),
4: Field(
name='product',
type=BASE_TYPES[0x84], # uint16
def_num=4,
subfields=(
SubField(
name='garmin_product',
def_num=4,
type=FIELD_TYPES['garmin_product'],
ref_fields=(
ReferenceField(
name='manufacturer',
def_num=2,
value='garmin',
raw_value=1,
),
ReferenceField(
name='manufacturer',
def_num=2,
value='dynastream',
raw_value=15,
),
ReferenceField(
name='manufacturer',
def_num=2,
value='dynastream_oem',
raw_value=13,
),
),
),
),
),
5: Field(
name='software_version',
type=BASE_TYPES[0x84], # uint16
def_num=5,
scale=100,
),
6: Field(
name='hardware_version',
type=BASE_TYPES[0x02], # uint8
def_num=6,
),
7: Field( # Reset by new battery or charge.
name='cum_operating_time',
type=BASE_TYPES[0x86], # uint32
def_num=7,
units='s',
),
10: Field(
name='battery_voltage',
type=BASE_TYPES[0x84], # uint16
def_num=10,
scale=256,
units='V',
),
11: Field(
name='battery_status',
type=FIELD_TYPES['battery_status'],
def_num=11,
),
18: Field( # Indicates the location of the sensor
name='sensor_position',
type=FIELD_TYPES['body_location'],
def_num=18,
),
19: Field( # Used to describe the sensor or location
name='descriptor',
type=BASE_TYPES[0x07], # string
def_num=19,
),
20: Field(
name='ant_transmission_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=20,
),
21: Field(
name='ant_device_number',
type=BASE_TYPES[0x8B], # uint16z
def_num=21,
),
22: Field(
name='ant_network',
type=FIELD_TYPES['ant_network'],
def_num=22,
),
25: Field(
name='source_type',
type=FIELD_TYPES['source_type'],
def_num=25,
),
27: Field( # Optional free form string to indicate the devices name or model
name='product_name',
type=BASE_TYPES[0x07], # string
def_num=27,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
27: MessageType(
name='workout_step',
mesg_num=27,
fields={
0: Field(
name='wkt_step_name',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field(
name='duration_type',
type=FIELD_TYPES['wkt_step_duration'],
def_num=1,
),
2: Field(
name='duration_value',
type=BASE_TYPES[0x86], # uint32
def_num=2,
subfields=(
SubField(
name='duration_calories',
def_num=2,
type=BASE_TYPES[0x86], # uint32
units='calories',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='calories',
raw_value=4,
),
),
),
SubField(
name='duration_distance',
def_num=2,
type=BASE_TYPES[0x86], # uint32
scale=100,
units='m',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='distance',
raw_value=1,
),
),
),
SubField(
name='duration_hr',
def_num=2,
type=FIELD_TYPES['workout_hr'],
units='% or bpm',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='hr_less_than',
raw_value=2,
),
ReferenceField(
name='duration_type',
def_num=1,
value='hr_greater_than',
raw_value=3,
),
),
),
SubField(
name='duration_power',
def_num=2,
type=FIELD_TYPES['workout_power'],
units='% or watts',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='power_less_than',
raw_value=14,
),
ReferenceField(
name='duration_type',
def_num=1,
value='power_greater_than',
raw_value=15,
),
),
),
SubField( # message_index of step to loop back to. Steps are assumed to be in the order by message_index. custom_name and intensity members are undefined for this duration type.
name='duration_step',
def_num=2,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_steps_cmplt',
raw_value=6,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_time',
raw_value=7,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_distance',
raw_value=8,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_calories',
raw_value=9,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_hr_less_than',
raw_value=10,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_hr_greater_than',
raw_value=11,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_power_less_than',
raw_value=12,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_power_greater_than',
raw_value=13,
),
),
),
SubField(
name='duration_time',
def_num=2,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='s',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='time',
raw_value=0,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repetition_time',
raw_value=28,
),
),
),
),
),
3: Field(
name='target_type',
type=FIELD_TYPES['wkt_step_target'],
def_num=3,
),
4: Field(
name='target_value',
type=BASE_TYPES[0x86], # uint32
def_num=4,
subfields=(
SubField(
name='repeat_calories',
def_num=4,
type=BASE_TYPES[0x86], # uint32
units='calories',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_calories',
raw_value=9,
),
),
),
SubField(
name='repeat_distance',
def_num=4,
type=BASE_TYPES[0x86], # uint32
scale=100,
units='m',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_distance',
raw_value=8,
),
),
),
SubField(
name='repeat_hr',
def_num=4,
type=FIELD_TYPES['workout_hr'],
units='% or bpm',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_hr_less_than',
raw_value=10,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_hr_greater_than',
raw_value=11,
),
),
),
SubField(
name='repeat_power',
def_num=4,
type=FIELD_TYPES['workout_power'],
units='% or watts',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_power_less_than',
raw_value=12,
),
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_power_greater_than',
raw_value=13,
),
),
),
SubField( # # of repetitions
name='repeat_steps',
def_num=4,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_steps_cmplt',
raw_value=6,
),
),
),
SubField(
name='repeat_time',
def_num=4,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='s',
ref_fields=(
ReferenceField(
name='duration_type',
def_num=1,
value='repeat_until_time',
raw_value=7,
),
),
),
SubField( # Zone (1-?); Custom = 0;
name='target_cadence_zone',
def_num=4,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='cadence',
raw_value=3,
),
),
),
SubField( # hr zone (1-5);Custom =0;
name='target_hr_zone',
def_num=4,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='heart_rate',
raw_value=1,
),
),
),
SubField( # Power Zone ( 1-7); Custom = 0;
name='target_power_zone',
def_num=4,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='power',
raw_value=4,
),
),
),
SubField( # speed zone (1-10);Custom =0;
name='target_speed_zone',
def_num=4,
type=BASE_TYPES[0x86], # uint32
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='speed',
raw_value=0,
),
),
),
SubField(
name='target_stroke_type',
def_num=4,
type=FIELD_TYPES['swim_stroke'],
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='swim_stroke',
raw_value=11,
),
),
),
),
),
5: Field(
name='custom_target_value_low',
type=BASE_TYPES[0x86], # uint32
def_num=5,
subfields=(
SubField(
name='custom_target_cadence_low',
def_num=5,
type=BASE_TYPES[0x86], # uint32
units='rpm',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='cadence',
raw_value=3,
),
),
),
SubField(
name='custom_target_heart_rate_low',
def_num=5,
type=FIELD_TYPES['workout_hr'],
units='% or bpm',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='heart_rate',
raw_value=1,
),
),
),
SubField(
name='custom_target_power_low',
def_num=5,
type=FIELD_TYPES['workout_power'],
units='% or watts',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='power',
raw_value=4,
),
),
),
SubField(
name='custom_target_speed_low',
def_num=5,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='m/s',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='speed',
raw_value=0,
),
),
),
),
),
6: Field(
name='custom_target_value_high',
type=BASE_TYPES[0x86], # uint32
def_num=6,
subfields=(
SubField(
name='custom_target_cadence_high',
def_num=6,
type=BASE_TYPES[0x86], # uint32
units='rpm',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='cadence',
raw_value=3,
),
),
),
SubField(
name='custom_target_heart_rate_high',
def_num=6,
type=FIELD_TYPES['workout_hr'],
units='% or bpm',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='heart_rate',
raw_value=1,
),
),
),
SubField(
name='custom_target_power_high',
def_num=6,
type=FIELD_TYPES['workout_power'],
units='% or watts',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='power',
raw_value=4,
),
),
),
SubField(
name='custom_target_speed_high',
def_num=6,
type=BASE_TYPES[0x86], # uint32
scale=1000,
units='m/s',
ref_fields=(
ReferenceField(
name='target_type',
def_num=3,
value='speed',
raw_value=0,
),
),
),
),
),
7: Field(
name='intensity',
type=FIELD_TYPES['intensity'],
def_num=7,
),
8: Field(
name='notes',
type=BASE_TYPES[0x07], # string
def_num=8,
),
9: Field(
name='equipment',
type=FIELD_TYPES['workout_equipment'],
def_num=9,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
32: MessageType(
name='course_point',
mesg_num=32,
fields={
1: Field(
name='timestamp',
type=FIELD_TYPES['date_time'],
def_num=1,
),
2: Field(
name='position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=2,
units='semicircles',
),
3: Field(
name='position_long',
type=BASE_TYPES[0x85], # sint32
def_num=3,
units='semicircles',
),
4: Field(
name='distance',
type=BASE_TYPES[0x86], # uint32
def_num=4,
scale=100,
units='m',
),
5: Field(
name='type',
type=FIELD_TYPES['course_point'],
def_num=5,
),
6: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=6,
),
8: Field(
name='favorite',
type=FIELD_TYPES['bool'],
def_num=8,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
37: MessageType(
name='file_capabilities',
mesg_num=37,
fields={
0: Field(
name='type',
type=FIELD_TYPES['file'],
def_num=0,
),
1: Field(
name='flags',
type=FIELD_TYPES['file_flags'],
def_num=1,
),
2: Field(
name='directory',
type=BASE_TYPES[0x07], # string
def_num=2,
),
3: Field(
name='max_count',
type=BASE_TYPES[0x84], # uint16
def_num=3,
),
4: Field(
name='max_size',
type=BASE_TYPES[0x86], # uint32
def_num=4,
units='bytes',
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
38: MessageType(
name='mesg_capabilities',
mesg_num=38,
fields={
0: Field(
name='file',
type=FIELD_TYPES['file'],
def_num=0,
),
1: Field(
name='mesg_num',
type=FIELD_TYPES['mesg_num'],
def_num=1,
),
2: Field(
name='count_type',
type=FIELD_TYPES['mesg_count'],
def_num=2,
),
3: Field(
name='count',
type=BASE_TYPES[0x84], # uint16
def_num=3,
subfields=(
SubField(
name='max_per_file',
def_num=3,
type=BASE_TYPES[0x84], # uint16
ref_fields=(
ReferenceField(
name='count_type',
def_num=2,
value='max_per_file',
raw_value=1,
),
),
),
SubField(
name='max_per_file_type',
def_num=3,
type=BASE_TYPES[0x84], # uint16
ref_fields=(
ReferenceField(
name='count_type',
def_num=2,
value='max_per_file_type',
raw_value=2,
),
),
),
SubField(
name='num_per_file',
def_num=3,
type=BASE_TYPES[0x84], # uint16
ref_fields=(
ReferenceField(
name='count_type',
def_num=2,
value='num_per_file',
raw_value=0,
),
),
),
),
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
39: MessageType(
name='field_capabilities',
mesg_num=39,
fields={
0: Field(
name='file',
type=FIELD_TYPES['file'],
def_num=0,
),
1: Field(
name='mesg_num',
type=FIELD_TYPES['mesg_num'],
def_num=1,
),
2: Field(
name='field_num',
type=BASE_TYPES[0x02], # uint8
def_num=2,
),
3: Field(
name='count',
type=BASE_TYPES[0x84], # uint16
def_num=3,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
49: MessageType(
name='file_creator',
mesg_num=49,
fields={
0: Field(
name='software_version',
type=BASE_TYPES[0x84], # uint16
def_num=0,
),
1: Field(
name='hardware_version',
type=BASE_TYPES[0x02], # uint8
def_num=1,
),
},
),
53: MessageType(
name='speed_zone',
mesg_num=53,
fields={
0: Field(
name='high_value',
type=BASE_TYPES[0x84], # uint16
def_num=0,
scale=1000,
units='m/s',
),
1: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=1,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
55: MessageType(
name='monitoring',
mesg_num=55,
fields={
0: Field( # Associates this data to device_info message. Not required for file with single device (sensor).
name='device_index',
type=FIELD_TYPES['device_index'],
def_num=0,
),
1: Field( # Accumulated total calories. Maintained by MonitoringReader for each activity_type. See SDK documentation
name='calories',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='kcal',
),
2: Field( # Accumulated distance. Maintained by MonitoringReader for each activity_type. See SDK documentation.
name='distance',
type=BASE_TYPES[0x86], # uint32
def_num=2,
scale=100,
units='m',
),
3: Field( # Accumulated cycles. Maintained by MonitoringReader for each activity_type. See SDK documentation.
name='cycles',
type=BASE_TYPES[0x86], # uint32
def_num=3,
scale=2,
units='cycles',
subfields=(
SubField(
name='steps',
def_num=3,
type=BASE_TYPES[0x86], # uint32
units='steps',
ref_fields=(
ReferenceField(
name='activity_type',
def_num=5,
value='walking',
raw_value=6,
),
ReferenceField(
name='activity_type',
def_num=5,
value='running',
raw_value=1,
),
),
),
SubField(
name='strokes',
def_num=3,
type=BASE_TYPES[0x86], # uint32
scale=2,
units='strokes',
ref_fields=(
ReferenceField(
name='activity_type',
def_num=5,
value='cycling',
raw_value=2,
),
ReferenceField(
name='activity_type',
def_num=5,
value='swimming',
raw_value=5,
),
),
),
),
),
4: Field(
name='active_time',
type=BASE_TYPES[0x86], # uint32
def_num=4,
scale=1000,
units='s',
),
5: Field(
name='activity_type',
type=FIELD_TYPES['activity_type'],
def_num=5,
),
6: Field(
name='activity_subtype',
type=FIELD_TYPES['activity_subtype'],
def_num=6,
),
7: Field(
name='activity_level',
type=FIELD_TYPES['activity_level'],
def_num=7,
),
8: Field(
name='distance_16',
type=BASE_TYPES[0x84], # uint16
def_num=8,
units='100*m',
),
9: Field(
name='cycles_16',
type=BASE_TYPES[0x84], # uint16
def_num=9,
units='2*cycles or steps',
),
10: Field(
name='active_time_16',
type=BASE_TYPES[0x84], # uint16
def_num=10,
units='s',
),
11: Field( # Must align to logging interval, for example, time must be 00:00:00 for daily log.
name='local_timestamp',
type=FIELD_TYPES['local_date_time'],
def_num=11,
),
12: Field( # Avg temperature during the logging interval ended at timestamp
name='temperature',
type=BASE_TYPES[0x83], # sint16
def_num=12,
scale=100,
units='C',
),
14: Field( # Min temperature during the logging interval ended at timestamp
name='temperature_min',
type=BASE_TYPES[0x83], # sint16
def_num=14,
scale=100,
units='C',
),
15: Field( # Max temperature during the logging interval ended at timestamp
name='temperature_max',
type=BASE_TYPES[0x83], # sint16
def_num=15,
scale=100,
units='C',
),
16: Field( # Indexed using minute_activity_level enum
name='activity_time',
type=BASE_TYPES[0x84], # uint16
def_num=16,
units='minutes',
),
19: Field(
name='active_calories',
type=BASE_TYPES[0x84], # uint16
def_num=19,
units='kcal',
),
24: Field( # Indicates single type / intensity for duration since last monitoring message.
name='current_activity_type_intensity',
type=BASE_TYPES[0x0D], # byte
def_num=24,
components=(
ComponentField(
name='activity_type',
def_num=5,
accumulate=False,
bits=5,
bit_offset=0,
),
ComponentField(
name='intensity',
def_num=28,
accumulate=False,
bits=3,
bit_offset=5,
),
),
),
25: Field(
name='timestamp_min_8',
type=BASE_TYPES[0x02], # uint8
def_num=25,
units='min',
),
26: Field(
name='timestamp_16',
type=BASE_TYPES[0x84], # uint16
def_num=26,
units='s',
),
27: Field(
name='heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=27,
units='bpm',
),
28: Field(
name='intensity',
type=BASE_TYPES[0x02], # uint8
def_num=28,
scale=10,
),
29: Field(
name='duration_min',
type=BASE_TYPES[0x84], # uint16
def_num=29,
units='min',
),
30: Field(
name='duration',
type=BASE_TYPES[0x86], # uint32
def_num=30,
units='s',
),
31: Field(
name='ascent',
type=BASE_TYPES[0x86], # uint32
def_num=31,
scale=1000,
units='m',
),
32: Field(
name='descent',
type=BASE_TYPES[0x86], # uint32
def_num=32,
scale=1000,
units='m',
),
33: Field(
name='moderate_activity_minutes',
type=BASE_TYPES[0x84], # uint16
def_num=33,
units='minutes',
),
34: Field(
name='vigorous_activity_minutes',
type=BASE_TYPES[0x84], # uint16
def_num=34,
units='minutes',
),
253: FIELD_TYPE_TIMESTAMP, # Must align to logging interval, for example, time must be 00:00:00 for daily log.
},
),
72: MessageType( # Corresponds to file_id of workout or course.
name='training_file',
mesg_num=72,
fields={
0: Field(
name='type',
type=FIELD_TYPES['file'],
def_num=0,
),
1: Field(
name='manufacturer',
type=FIELD_TYPES['manufacturer'],
def_num=1,
),
2: Field(
name='product',
type=BASE_TYPES[0x84], # uint16
def_num=2,
subfields=(
SubField(
name='garmin_product',
def_num=2,
type=FIELD_TYPES['garmin_product'],
ref_fields=(
ReferenceField(
name='manufacturer',
def_num=1,
value='garmin',
raw_value=1,
),
ReferenceField(
name='manufacturer',
def_num=1,
value='dynastream',
raw_value=15,
),
ReferenceField(
name='manufacturer',
def_num=1,
value='dynastream_oem',
raw_value=13,
),
),
),
),
),
3: Field(
name='serial_number',
type=BASE_TYPES[0x8C], # uint32z
def_num=3,
),
4: Field(
name='time_created',
type=FIELD_TYPES['date_time'],
def_num=4,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
78: MessageType( # Heart rate variability
name='hrv',
mesg_num=78,
fields={
0: Field( # Time between beats
name='time',
type=BASE_TYPES[0x84], # uint16
def_num=0,
scale=1000,
units='s',
),
},
),
80: MessageType(
name='ant_rx',
mesg_num=80,
fields={
0: Field(
name='fractional_timestamp',
type=BASE_TYPES[0x84], # uint16
def_num=0,
scale=32768,
units='s',
),
1: Field(
name='mesg_id',
type=BASE_TYPES[0x0D], # byte
def_num=1,
),
2: Field(
name='mesg_data',
type=BASE_TYPES[0x0D], # byte
def_num=2,
components=(
ComponentField(
name='channel_number',
def_num=3,
accumulate=False,
bits=8,
bit_offset=0,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=8,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=16,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=24,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=32,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=40,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=48,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=56,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=64,
),
),
),
3: Field(
name='channel_number',
type=BASE_TYPES[0x02], # uint8
def_num=3,
),
4: Field(
name='data',
type=BASE_TYPES[0x0D], # byte
def_num=4,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
81: MessageType(
name='ant_tx',
mesg_num=81,
fields={
0: Field(
name='fractional_timestamp',
type=BASE_TYPES[0x84], # uint16
def_num=0,
scale=32768,
units='s',
),
1: Field(
name='mesg_id',
type=BASE_TYPES[0x0D], # byte
def_num=1,
),
2: Field(
name='mesg_data',
type=BASE_TYPES[0x0D], # byte
def_num=2,
components=(
ComponentField(
name='channel_number',
def_num=3,
accumulate=False,
bits=8,
bit_offset=0,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=8,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=16,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=24,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=32,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=40,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=48,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=56,
),
ComponentField(
name='data',
def_num=4,
accumulate=False,
bits=8,
bit_offset=64,
),
),
),
3: Field(
name='channel_number',
type=BASE_TYPES[0x02], # uint8
def_num=3,
),
4: Field(
name='data',
type=BASE_TYPES[0x0D], # byte
def_num=4,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
82: MessageType(
name='ant_channel_id',
mesg_num=82,
fields={
0: Field(
name='channel_number',
type=BASE_TYPES[0x02], # uint8
def_num=0,
),
1: Field(
name='device_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=1,
),
2: Field(
name='device_number',
type=BASE_TYPES[0x8B], # uint16z
def_num=2,
),
3: Field(
name='transmission_type',
type=BASE_TYPES[0x0A], # uint8z
def_num=3,
),
4: Field(
name='device_index',
type=FIELD_TYPES['device_index'],
def_num=4,
),
},
),
101: MessageType(
name='length',
mesg_num=101,
fields={
0: Field(
name='event',
type=FIELD_TYPES['event'],
def_num=0,
),
1: Field(
name='event_type',
type=FIELD_TYPES['event_type'],
def_num=1,
),
2: Field(
name='start_time',
type=FIELD_TYPES['date_time'],
def_num=2,
),
3: Field(
name='total_elapsed_time',
type=BASE_TYPES[0x86], # uint32
def_num=3,
scale=1000,
units='s',
),
4: Field(
name='total_timer_time',
type=BASE_TYPES[0x86], # uint32
def_num=4,
scale=1000,
units='s',
),
5: Field(
name='total_strokes',
type=BASE_TYPES[0x84], # uint16
def_num=5,
units='strokes',
),
6: Field(
name='avg_speed',
type=BASE_TYPES[0x84], # uint16
def_num=6,
scale=1000,
units='m/s',
),
7: Field(
name='swim_stroke',
type=FIELD_TYPES['swim_stroke'],
def_num=7,
units='swim_stroke',
),
9: Field(
name='avg_swimming_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=9,
units='strokes/min',
),
10: Field(
name='event_group',
type=BASE_TYPES[0x02], # uint8
def_num=10,
),
11: Field(
name='total_calories',
type=BASE_TYPES[0x84], # uint16
def_num=11,
units='kcal',
),
12: Field(
name='length_type',
type=FIELD_TYPES['length_type'],
def_num=12,
),
18: Field(
name='player_score',
type=BASE_TYPES[0x84], # uint16
def_num=18,
),
19: Field(
name='opponent_score',
type=BASE_TYPES[0x84], # uint16
def_num=19,
),
20: Field( # stroke_type enum used as the index
name='stroke_count',
type=BASE_TYPES[0x84], # uint16
def_num=20,
units='counts',
),
21: Field( # zone number used as the index
name='zone_count',
type=BASE_TYPES[0x84], # uint16
def_num=21,
units='counts',
),
253: FIELD_TYPE_TIMESTAMP,
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
106: MessageType(
name='slave_device',
mesg_num=106,
fields={
0: Field(
name='manufacturer',
type=FIELD_TYPES['manufacturer'],
def_num=0,
),
1: Field(
name='product',
type=BASE_TYPES[0x84], # uint16
def_num=1,
subfields=(
SubField(
name='garmin_product',
def_num=1,
type=FIELD_TYPES['garmin_product'],
ref_fields=(
ReferenceField(
name='manufacturer',
def_num=0,
value='garmin',
raw_value=1,
),
ReferenceField(
name='manufacturer',
def_num=0,
value='dynastream',
raw_value=15,
),
ReferenceField(
name='manufacturer',
def_num=0,
value='dynastream_oem',
raw_value=13,
),
),
),
),
),
},
),
127: MessageType(
name='connectivity',
mesg_num=127,
fields={
0: Field( # Use Bluetooth for connectivity features
name='bluetooth_enabled',
type=FIELD_TYPES['bool'],
def_num=0,
),
1: Field( # Use Bluetooth Low Energy for connectivity features
name='bluetooth_le_enabled',
type=FIELD_TYPES['bool'],
def_num=1,
),
2: Field( # Use ANT for connectivity features
name='ant_enabled',
type=FIELD_TYPES['bool'],
def_num=2,
),
3: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=3,
),
4: Field(
name='live_tracking_enabled',
type=FIELD_TYPES['bool'],
def_num=4,
),
5: Field(
name='weather_conditions_enabled',
type=FIELD_TYPES['bool'],
def_num=5,
),
6: Field(
name='weather_alerts_enabled',
type=FIELD_TYPES['bool'],
def_num=6,
),
7: Field(
name='auto_activity_upload_enabled',
type=FIELD_TYPES['bool'],
def_num=7,
),
8: Field(
name='course_download_enabled',
type=FIELD_TYPES['bool'],
def_num=8,
),
9: Field(
name='workout_download_enabled',
type=FIELD_TYPES['bool'],
def_num=9,
),
10: Field(
name='gps_ephemeris_download_enabled',
type=FIELD_TYPES['bool'],
def_num=10,
),
11: Field(
name='incident_detection_enabled',
type=FIELD_TYPES['bool'],
def_num=11,
),
12: Field(
name='grouptrack_enabled',
type=FIELD_TYPES['bool'],
def_num=12,
),
},
),
128: MessageType(
name='weather_conditions',
mesg_num=128,
fields={
0: Field( # Current or forecast
name='weather_report',
type=FIELD_TYPES['weather_report'],
def_num=0,
),
1: Field(
name='temperature',
type=BASE_TYPES[0x01], # sint8
def_num=1,
units='C',
),
2: Field( # Corresponds to GSC Response weatherIcon field
name='condition',
type=FIELD_TYPES['weather_status'],
def_num=2,
),
3: Field(
name='wind_direction',
type=BASE_TYPES[0x84], # uint16
def_num=3,
units='degrees',
),
4: Field(
name='wind_speed',
type=BASE_TYPES[0x84], # uint16
def_num=4,
scale=1000,
units='m/s',
),
5: Field( # range 0-100
name='precipitation_probability',
type=BASE_TYPES[0x02], # uint8
def_num=5,
),
6: Field( # Heat Index if GCS heatIdx above or equal to 90F or wind chill if GCS windChill below or equal to 32F
name='temperature_feels_like',
type=BASE_TYPES[0x01], # sint8
def_num=6,
units='C',
),
7: Field(
name='relative_humidity',
type=BASE_TYPES[0x02], # uint8
def_num=7,
),
8: Field( # string corresponding to GCS response location string
name='location',
type=BASE_TYPES[0x07], # string
def_num=8,
),
9: Field(
name='observed_at_time',
type=FIELD_TYPES['date_time'],
def_num=9,
),
10: Field(
name='observed_location_lat',
type=BASE_TYPES[0x85], # sint32
def_num=10,
units='semicircles',
),
11: Field(
name='observed_location_long',
type=BASE_TYPES[0x85], # sint32
def_num=11,
units='semicircles',
),
12: Field(
name='day_of_week',
type=FIELD_TYPES['day_of_week'],
def_num=12,
),
13: Field(
name='high_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=13,
units='C',
),
14: Field(
name='low_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=14,
units='C',
),
253: FIELD_TYPE_TIMESTAMP, # time of update for current conditions, else forecast time
},
),
129: MessageType(
name='weather_alert',
mesg_num=129,
fields={
0: Field( # Unique identifier from GCS report ID string, length is 12
name='report_id',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field( # Time alert was issued
name='issue_time',
type=FIELD_TYPES['date_time'],
def_num=1,
),
2: Field( # Time alert expires
name='expire_time',
type=FIELD_TYPES['date_time'],
def_num=2,
),
3: Field( # Warning, Watch, Advisory, Statement
name='severity',
type=FIELD_TYPES['weather_severity'],
def_num=3,
),
4: Field( # Tornado, Severe Thunderstorm, etc.
name='type',
type=FIELD_TYPES['weather_severe_type'],
def_num=4,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
131: MessageType(
name='cadence_zone',
mesg_num=131,
fields={
0: Field(
name='high_value',
type=BASE_TYPES[0x02], # uint8
def_num=0,
units='rpm',
),
1: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=1,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
132: MessageType(
name='hr',
mesg_num=132,
fields={
0: Field(
name='fractional_timestamp',
type=BASE_TYPES[0x84], # uint16
def_num=0,
scale=32768,
units='s',
),
1: Field(
name='time256',
type=BASE_TYPES[0x02], # uint8
def_num=1,
components=(
ComponentField(
name='fractional_timestamp',
def_num=0,
scale=256,
units='s',
accumulate=False,
bits=8,
bit_offset=0,
),
),
),
6: Field(
name='filtered_bpm',
type=BASE_TYPES[0x02], # uint8
def_num=6,
units='bpm',
),
9: Field(
name='event_timestamp',
type=BASE_TYPES[0x86], # uint32
def_num=9,
scale=1024,
units='s',
),
10: Field(
name='event_timestamp_12',
type=BASE_TYPES[0x0D], # byte
def_num=10,
components=(
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=0,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=12,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=24,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=36,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=48,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=60,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=72,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=84,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=96,
),
ComponentField(
name='event_timestamp',
def_num=9,
scale=1024,
units='s',
accumulate=True,
bits=12,
bit_offset=108,
),
),
),
253: FIELD_TYPE_TIMESTAMP,
},
),
142: MessageType(
name='segment_lap',
mesg_num=142,
fields={
0: Field(
name='event',
type=FIELD_TYPES['event'],
def_num=0,
),
1: Field(
name='event_type',
type=FIELD_TYPES['event_type'],
def_num=1,
),
2: Field(
name='start_time',
type=FIELD_TYPES['date_time'],
def_num=2,
),
3: Field(
name='start_position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=3,
units='semicircles',
),
4: Field(
name='start_position_long',
type=BASE_TYPES[0x85], # sint32
def_num=4,
units='semicircles',
),
5: Field(
name='end_position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=5,
units='semicircles',
),
6: Field(
name='end_position_long',
type=BASE_TYPES[0x85], # sint32
def_num=6,
units='semicircles',
),
7: Field( # Time (includes pauses)
name='total_elapsed_time',
type=BASE_TYPES[0x86], # uint32
def_num=7,
scale=1000,
units='s',
),
8: Field( # Timer Time (excludes pauses)
name='total_timer_time',
type=BASE_TYPES[0x86], # uint32
def_num=8,
scale=1000,
units='s',
),
9: Field(
name='total_distance',
type=BASE_TYPES[0x86], # uint32
def_num=9,
scale=100,
units='m',
),
10: Field(
name='total_cycles',
type=BASE_TYPES[0x86], # uint32
def_num=10,
units='cycles',
subfields=(
SubField(
name='total_strokes',
def_num=10,
type=BASE_TYPES[0x86], # uint32
units='strokes',
ref_fields=(
ReferenceField(
name='sport',
def_num=23,
value='cycling',
raw_value=2,
),
),
),
),
),
11: Field(
name='total_calories',
type=BASE_TYPES[0x84], # uint16
def_num=11,
units='kcal',
),
12: Field( # If New Leaf
name='total_fat_calories',
type=BASE_TYPES[0x84], # uint16
def_num=12,
units='kcal',
),
13: Field(
name='avg_speed',
type=BASE_TYPES[0x84], # uint16
def_num=13,
scale=1000,
units='m/s',
),
14: Field(
name='max_speed',
type=BASE_TYPES[0x84], # uint16
def_num=14,
scale=1000,
units='m/s',
),
15: Field(
name='avg_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=15,
units='bpm',
),
16: Field(
name='max_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=16,
units='bpm',
),
17: Field( # total_cycles / total_timer_time if non_zero_avg_cadence otherwise total_cycles / total_elapsed_time
name='avg_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=17,
units='rpm',
),
18: Field(
name='max_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=18,
units='rpm',
),
19: Field( # total_power / total_timer_time if non_zero_avg_power otherwise total_power / total_elapsed_time
name='avg_power',
type=BASE_TYPES[0x84], # uint16
def_num=19,
units='watts',
),
20: Field(
name='max_power',
type=BASE_TYPES[0x84], # uint16
def_num=20,
units='watts',
),
21: Field(
name='total_ascent',
type=BASE_TYPES[0x84], # uint16
def_num=21,
units='m',
),
22: Field(
name='total_descent',
type=BASE_TYPES[0x84], # uint16
def_num=22,
units='m',
),
23: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=23,
),
24: Field(
name='event_group',
type=BASE_TYPES[0x02], # uint8
def_num=24,
),
25: Field( # North east corner latitude.
name='nec_lat',
type=BASE_TYPES[0x85], # sint32
def_num=25,
units='semicircles',
),
26: Field( # North east corner longitude.
name='nec_long',
type=BASE_TYPES[0x85], # sint32
def_num=26,
units='semicircles',
),
27: Field( # South west corner latitude.
name='swc_lat',
type=BASE_TYPES[0x85], # sint32
def_num=27,
units='semicircles',
),
28: Field( # South west corner latitude.
name='swc_long',
type=BASE_TYPES[0x85], # sint32
def_num=28,
units='semicircles',
),
29: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=29,
),
30: Field(
name='normalized_power',
type=BASE_TYPES[0x84], # uint16
def_num=30,
units='watts',
),
31: Field(
name='left_right_balance',
type=FIELD_TYPES['left_right_balance_100'],
def_num=31,
),
32: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=32,
),
33: Field(
name='total_work',
type=BASE_TYPES[0x86], # uint32
def_num=33,
units='J',
),
34: Field(
name='avg_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=34,
scale=5,
offset=500,
units='m',
),
35: Field(
name='max_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=35,
scale=5,
offset=500,
units='m',
),
36: Field(
name='gps_accuracy',
type=BASE_TYPES[0x02], # uint8
def_num=36,
units='m',
),
37: Field(
name='avg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=37,
scale=100,
units='%',
),
38: Field(
name='avg_pos_grade',
type=BASE_TYPES[0x83], # sint16
def_num=38,
scale=100,
units='%',
),
39: Field(
name='avg_neg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=39,
scale=100,
units='%',
),
40: Field(
name='max_pos_grade',
type=BASE_TYPES[0x83], # sint16
def_num=40,
scale=100,
units='%',
),
41: Field(
name='max_neg_grade',
type=BASE_TYPES[0x83], # sint16
def_num=41,
scale=100,
units='%',
),
42: Field(
name='avg_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=42,
units='C',
),
43: Field(
name='max_temperature',
type=BASE_TYPES[0x01], # sint8
def_num=43,
units='C',
),
44: Field(
name='total_moving_time',
type=BASE_TYPES[0x86], # uint32
def_num=44,
scale=1000,
units='s',
),
45: Field(
name='avg_pos_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=45,
scale=1000,
units='m/s',
),
46: Field(
name='avg_neg_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=46,
scale=1000,
units='m/s',
),
47: Field(
name='max_pos_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=47,
scale=1000,
units='m/s',
),
48: Field(
name='max_neg_vertical_speed',
type=BASE_TYPES[0x83], # sint16
def_num=48,
scale=1000,
units='m/s',
),
49: Field(
name='time_in_hr_zone',
type=BASE_TYPES[0x86], # uint32
def_num=49,
scale=1000,
units='s',
),
50: Field(
name='time_in_speed_zone',
type=BASE_TYPES[0x86], # uint32
def_num=50,
scale=1000,
units='s',
),
51: Field(
name='time_in_cadence_zone',
type=BASE_TYPES[0x86], # uint32
def_num=51,
scale=1000,
units='s',
),
52: Field(
name='time_in_power_zone',
type=BASE_TYPES[0x86], # uint32
def_num=52,
scale=1000,
units='s',
),
53: Field(
name='repetition_num',
type=BASE_TYPES[0x84], # uint16
def_num=53,
),
54: Field(
name='min_altitude',
type=BASE_TYPES[0x84], # uint16
def_num=54,
scale=5,
offset=500,
units='m',
),
55: Field(
name='min_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=55,
units='bpm',
),
56: Field(
name='active_time',
type=BASE_TYPES[0x86], # uint32
def_num=56,
scale=1000,
units='s',
),
57: Field(
name='wkt_step_index',
type=FIELD_TYPES['message_index'],
def_num=57,
),
58: Field(
name='sport_event',
type=FIELD_TYPES['sport_event'],
def_num=58,
),
59: Field(
name='avg_left_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=59,
scale=2,
units='percent',
),
60: Field(
name='avg_right_torque_effectiveness',
type=BASE_TYPES[0x02], # uint8
def_num=60,
scale=2,
units='percent',
),
61: Field(
name='avg_left_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=61,
scale=2,
units='percent',
),
62: Field(
name='avg_right_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=62,
scale=2,
units='percent',
),
63: Field(
name='avg_combined_pedal_smoothness',
type=BASE_TYPES[0x02], # uint8
def_num=63,
scale=2,
units='percent',
),
64: Field(
name='status',
type=FIELD_TYPES['segment_lap_status'],
def_num=64,
),
65: Field(
name='uuid',
type=BASE_TYPES[0x07], # string
def_num=65,
),
66: Field( # fractional part of the avg_cadence
name='avg_fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=66,
scale=128,
units='rpm',
),
67: Field( # fractional part of the max_cadence
name='max_fractional_cadence',
type=BASE_TYPES[0x02], # uint8
def_num=67,
scale=128,
units='rpm',
),
68: Field( # fractional part of the total_cycles
name='total_fractional_cycles',
type=BASE_TYPES[0x02], # uint8
def_num=68,
scale=128,
units='cycles',
),
69: Field(
name='front_gear_shift_count',
type=BASE_TYPES[0x84], # uint16
def_num=69,
),
70: Field(
name='rear_gear_shift_count',
type=BASE_TYPES[0x84], # uint16
def_num=70,
),
71: Field( # Total time spent in the standing position
name='time_standing',
type=BASE_TYPES[0x86], # uint32
def_num=71,
scale=1000,
units='s',
),
72: Field( # Number of transitions to the standing state
name='stand_count',
type=BASE_TYPES[0x84], # uint16
def_num=72,
),
73: Field( # Average left platform center offset
name='avg_left_pco',
type=BASE_TYPES[0x01], # sint8
def_num=73,
units='mm',
),
74: Field( # Average right platform center offset
name='avg_right_pco',
type=BASE_TYPES[0x01], # sint8
def_num=74,
units='mm',
),
75: Field( # Average left power phase angles. Data value indexes defined by power_phase_type.
name='avg_left_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=75,
scale=0.7111111,
units='degrees',
),
76: Field( # Average left power phase peak angles. Data value indexes defined by power_phase_type.
name='avg_left_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=76,
scale=0.7111111,
units='degrees',
),
77: Field( # Average right power phase angles. Data value indexes defined by power_phase_type.
name='avg_right_power_phase',
type=BASE_TYPES[0x02], # uint8
def_num=77,
scale=0.7111111,
units='degrees',
),
78: Field( # Average right power phase peak angles. Data value indexes defined by power_phase_type.
name='avg_right_power_phase_peak',
type=BASE_TYPES[0x02], # uint8
def_num=78,
scale=0.7111111,
units='degrees',
),
79: Field( # Average power by position. Data value indexes defined by rider_position_type.
name='avg_power_position',
type=BASE_TYPES[0x84], # uint16
def_num=79,
units='watts',
),
80: Field( # Maximum power by position. Data value indexes defined by rider_position_type.
name='max_power_position',
type=BASE_TYPES[0x84], # uint16
def_num=80,
units='watts',
),
81: Field( # Average cadence by position. Data value indexes defined by rider_position_type.
name='avg_cadence_position',
type=BASE_TYPES[0x02], # uint8
def_num=81,
units='rpm',
),
82: Field( # Maximum cadence by position. Data value indexes defined by rider_position_type.
name='max_cadence_position',
type=BASE_TYPES[0x02], # uint8
def_num=82,
units='rpm',
),
83: Field( # Manufacturer that produced the segment
name='manufacturer',
type=FIELD_TYPES['manufacturer'],
def_num=83,
),
253: FIELD_TYPE_TIMESTAMP, # Lap end time.
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
149: MessageType( # Unique Identification data for an individual segment leader within a segment file
name='segment_leaderboard_entry',
mesg_num=149,
fields={
0: Field( # Friendly name assigned to leader
name='name',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field( # Leader classification
name='type',
type=FIELD_TYPES['segment_leaderboard_type'],
def_num=1,
),
2: Field( # Primary user ID of this leader
name='group_primary_key',
type=BASE_TYPES[0x86], # uint32
def_num=2,
),
3: Field( # ID of the activity associated with this leader time
name='activity_id',
type=BASE_TYPES[0x86], # uint32
def_num=3,
),
4: Field( # Segment Time (includes pauses)
name='segment_time',
type=BASE_TYPES[0x86], # uint32
def_num=4,
scale=1000,
units='s',
),
5: Field( # String version of the activity_id. 21 characters long, express in decimal
name='activity_id_string',
type=BASE_TYPES[0x07], # string
def_num=5,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
150: MessageType( # Navigation and race evaluation point for a segment decribing a point along the segment path and time it took each segment leader to reach that point
name='segment_point',
mesg_num=150,
fields={
1: Field(
name='position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=1,
units='semicircles',
),
2: Field(
name='position_long',
type=BASE_TYPES[0x85], # sint32
def_num=2,
units='semicircles',
),
3: Field( # Accumulated distance along the segment at the described point
name='distance',
type=BASE_TYPES[0x86], # uint32
def_num=3,
scale=100,
units='m',
),
4: Field( # Accumulated altitude along the segment at the described point
name='altitude',
type=BASE_TYPES[0x84], # uint16
def_num=4,
scale=5,
offset=500,
units='m',
),
5: Field( # Accumualted time each leader board member required to reach the described point. This value is zero for all leader board members at the starting point of the segment.
name='leader_time',
type=BASE_TYPES[0x86], # uint32
def_num=5,
scale=1000,
units='s',
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
158: MessageType(
name='workout_session',
mesg_num=158,
fields={
0: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=0,
),
1: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=1,
),
2: Field(
name='num_valid_steps',
type=BASE_TYPES[0x84], # uint16
def_num=2,
),
3: Field(
name='first_step_index',
type=BASE_TYPES[0x84], # uint16
def_num=3,
),
4: Field(
name='pool_length',
type=BASE_TYPES[0x84], # uint16
def_num=4,
scale=100,
units='m',
),
5: Field(
name='pool_length_unit',
type=FIELD_TYPES['display_measure'],
def_num=5,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
159: MessageType(
name='watchface_settings',
mesg_num=159,
fields={
0: Field(
name='mode',
type=FIELD_TYPES['watchface_mode'],
def_num=0,
),
1: Field(
name='layout',
type=BASE_TYPES[0x0D], # byte
def_num=1,
subfields=(
SubField(
name='analog_layout',
def_num=1,
type=FIELD_TYPES['analog_watchface_layout'],
ref_fields=(
ReferenceField(
name='mode',
def_num=0,
value='analog',
raw_value=1,
),
),
),
SubField(
name='digital_layout',
def_num=1,
type=FIELD_TYPES['digital_watchface_layout'],
ref_fields=(
ReferenceField(
name='mode',
def_num=0,
value='digital',
raw_value=0,
),
),
),
),
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
160: MessageType(
name='gps_metadata',
mesg_num=160,
fields={
0: Field( # Millisecond part of the timestamp.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field(
name='position_lat',
type=BASE_TYPES[0x85], # sint32
def_num=1,
units='semicircles',
),
2: Field(
name='position_long',
type=BASE_TYPES[0x85], # sint32
def_num=2,
units='semicircles',
),
3: Field(
name='enhanced_altitude',
type=BASE_TYPES[0x86], # uint32
def_num=3,
scale=5,
offset=500,
units='m',
),
4: Field(
name='enhanced_speed',
type=BASE_TYPES[0x86], # uint32
def_num=4,
scale=1000,
units='m/s',
),
5: Field(
name='heading',
type=BASE_TYPES[0x84], # uint16
def_num=5,
scale=100,
units='degrees',
),
6: Field( # Used to correlate UTC to system time if the timestamp of the message is in system time. This UTC time is derived from the GPS data.
name='utc_timestamp',
type=FIELD_TYPES['date_time'],
def_num=6,
units='s',
),
7: Field( # velocity[0] is lon velocity. Velocity[1] is lat velocity. Velocity[2] is altitude velocity.
name='velocity',
type=BASE_TYPES[0x83], # sint16
def_num=7,
scale=100,
units='m/s',
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp.
},
),
161: MessageType(
name='camera_event',
mesg_num=161,
fields={
0: Field( # Millisecond part of the timestamp.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field(
name='camera_event_type',
type=FIELD_TYPES['camera_event_type'],
def_num=1,
),
2: Field(
name='camera_file_uuid',
type=BASE_TYPES[0x07], # string
def_num=2,
),
3: Field(
name='camera_orientation',
type=FIELD_TYPES['camera_orientation_type'],
def_num=3,
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp.
},
),
162: MessageType(
name='timestamp_correlation',
mesg_num=162,
fields={
0: Field( # Fractional part of the UTC timestamp at the time the system timestamp was recorded.
name='fractional_timestamp',
type=BASE_TYPES[0x84], # uint16
def_num=0,
scale=32768,
units='s',
),
1: Field( # Whole second part of the system timestamp
name='system_timestamp',
type=FIELD_TYPES['date_time'],
def_num=1,
units='s',
),
2: Field( # Fractional part of the system timestamp
name='fractional_system_timestamp',
type=BASE_TYPES[0x84], # uint16
def_num=2,
scale=32768,
units='s',
),
3: Field( # timestamp epoch expressed in local time used to convert timestamps to local time
name='local_timestamp',
type=FIELD_TYPES['local_date_time'],
def_num=3,
units='s',
),
4: Field( # Millisecond part of the UTC timestamp at the time the system timestamp was recorded.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=4,
units='ms',
),
5: Field( # Millisecond part of the system timestamp
name='system_timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=5,
units='ms',
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of UTC timestamp at the time the system timestamp was recorded.
},
),
164: MessageType(
name='gyroscope_data',
mesg_num=164,
fields={
0: Field( # Millisecond part of the timestamp.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # Each time in the array describes the time at which the gyro sample with the corrosponding index was taken. Limited to 30 samples in each message. The samples may span across seconds. Array size must match the number of samples in gyro_x and gyro_y and gyro_z
name='sample_time_offset',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='ms',
),
2: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='gyro_x',
type=BASE_TYPES[0x84], # uint16
def_num=2,
units='counts',
),
3: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='gyro_y',
type=BASE_TYPES[0x84], # uint16
def_num=3,
units='counts',
),
4: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='gyro_z',
type=BASE_TYPES[0x84], # uint16
def_num=4,
units='counts',
),
5: Field( # Calibrated gyro reading
name='calibrated_gyro_x',
type=BASE_TYPES[0x88], # float32
def_num=5,
units='deg/s',
),
6: Field( # Calibrated gyro reading
name='calibrated_gyro_y',
type=BASE_TYPES[0x88], # float32
def_num=6,
units='deg/s',
),
7: Field( # Calibrated gyro reading
name='calibrated_gyro_z',
type=BASE_TYPES[0x88], # float32
def_num=7,
units='deg/s',
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp
},
),
165: MessageType(
name='accelerometer_data',
mesg_num=165,
fields={
0: Field( # Millisecond part of the timestamp.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # Each time in the array describes the time at which the accelerometer sample with the corrosponding index was taken. Limited to 30 samples in each message. The samples may span across seconds. Array size must match the number of samples in accel_x and accel_y and accel_z
name='sample_time_offset',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='ms',
),
2: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='accel_x',
type=BASE_TYPES[0x84], # uint16
def_num=2,
units='counts',
),
3: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='accel_y',
type=BASE_TYPES[0x84], # uint16
def_num=3,
units='counts',
),
4: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='accel_z',
type=BASE_TYPES[0x84], # uint16
def_num=4,
units='counts',
),
5: Field( # Calibrated accel reading
name='calibrated_accel_x',
type=BASE_TYPES[0x88], # float32
def_num=5,
units='g',
),
6: Field( # Calibrated accel reading
name='calibrated_accel_y',
type=BASE_TYPES[0x88], # float32
def_num=6,
units='g',
),
7: Field( # Calibrated accel reading
name='calibrated_accel_z',
type=BASE_TYPES[0x88], # float32
def_num=7,
units='g',
),
8: Field( # Calibrated accel reading
name='compressed_calibrated_accel_x',
type=BASE_TYPES[0x83], # sint16
def_num=8,
units='mG',
),
9: Field( # Calibrated accel reading
name='compressed_calibrated_accel_y',
type=BASE_TYPES[0x83], # sint16
def_num=9,
units='mG',
),
10: Field( # Calibrated accel reading
name='compressed_calibrated_accel_z',
type=BASE_TYPES[0x83], # sint16
def_num=10,
units='mG',
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp
},
),
167: MessageType(
name='three_d_sensor_calibration',
mesg_num=167,
fields={
0: Field( # Indicates which sensor the calibration is for
name='sensor_type',
type=FIELD_TYPES['sensor_type'],
def_num=0,
),
1: Field( # Calibration factor used to convert from raw ADC value to degrees, g, etc.
name='calibration_factor',
type=BASE_TYPES[0x86], # uint32
def_num=1,
subfields=(
SubField( # Accelerometer calibration factor
name='accel_cal_factor',
def_num=1,
type=BASE_TYPES[0x86], # uint32
units='g',
ref_fields=(
ReferenceField(
name='sensor_type',
def_num=0,
value='accelerometer',
raw_value=0,
),
),
),
SubField( # Gyro calibration factor
name='gyro_cal_factor',
def_num=1,
type=BASE_TYPES[0x86], # uint32
units='deg/s',
ref_fields=(
ReferenceField(
name='sensor_type',
def_num=0,
value='gyroscope',
raw_value=1,
),
),
),
),
),
2: Field( # Calibration factor divisor
name='calibration_divisor',
type=BASE_TYPES[0x86], # uint32
def_num=2,
units='counts',
),
3: Field( # Level shift value used to shift the ADC value back into range
name='level_shift',
type=BASE_TYPES[0x86], # uint32
def_num=3,
),
4: Field( # Internal calibration factors, one for each: xy, yx, zx
name='offset_cal',
type=BASE_TYPES[0x85], # sint32
def_num=4,
),
5: Field( # 3 x 3 rotation matrix (row major)
name='orientation_matrix',
type=BASE_TYPES[0x85], # sint32
def_num=5,
scale=65535,
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp
},
),
169: MessageType(
name='video_frame',
mesg_num=169,
fields={
0: Field( # Millisecond part of the timestamp.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # Number of the frame that the timestamp and timestamp_ms correlate to
name='frame_number',
type=BASE_TYPES[0x86], # uint32
def_num=1,
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp
},
),
174: MessageType(
name='obdii_data',
mesg_num=174,
fields={
0: Field( # Fractional part of timestamp, added to timestamp
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # Offset of PID reading [i] from start_timestamp+start_timestamp_ms. Readings may span accross seconds.
name='time_offset',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='ms',
),
2: Field( # Parameter ID
name='pid',
type=BASE_TYPES[0x0D], # byte
def_num=2,
),
3: Field( # Raw parameter data
name='raw_data',
type=BASE_TYPES[0x0D], # byte
def_num=3,
),
4: Field( # Optional, data size of PID[i]. If not specified refer to SAE J1979.
name='pid_data_size',
type=BASE_TYPES[0x02], # uint8
def_num=4,
),
5: Field( # System time associated with sample expressed in ms, can be used instead of time_offset. There will be a system_time value for each raw_data element. For multibyte pids the system_time is repeated.
name='system_time',
type=BASE_TYPES[0x86], # uint32
def_num=5,
),
6: Field( # Timestamp of first sample recorded in the message. Used with time_offset to generate time of each sample
name='start_timestamp',
type=FIELD_TYPES['date_time'],
def_num=6,
),
7: Field( # Fractional part of start_timestamp
name='start_timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=7,
units='ms',
),
253: FIELD_TYPE_TIMESTAMP, # Timestamp message was output
},
),
177: MessageType(
name='nmea_sentence',
mesg_num=177,
fields={
0: Field( # Fractional part of timestamp, added to timestamp
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # NMEA sentence
name='sentence',
type=BASE_TYPES[0x07], # string
def_num=1,
),
253: FIELD_TYPE_TIMESTAMP, # Timestamp message was output
},
),
178: MessageType(
name='aviation_attitude',
mesg_num=178,
fields={
0: Field( # Fractional part of timestamp, added to timestamp
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # System time associated with sample expressed in ms.
name='system_time',
type=BASE_TYPES[0x86], # uint32
def_num=1,
units='ms',
),
2: Field( # Range -PI/2 to +PI/2
name='pitch',
type=BASE_TYPES[0x83], # sint16
def_num=2,
scale=10430.38,
units='radians',
),
3: Field( # Range -PI to +PI
name='roll',
type=BASE_TYPES[0x83], # sint16
def_num=3,
scale=10430.38,
units='radians',
),
4: Field( # Range -78.4 to +78.4 (-8 Gs to 8 Gs)
name='accel_lateral',
type=BASE_TYPES[0x83], # sint16
def_num=4,
scale=100,
units='m/s^2',
),
5: Field( # Range -78.4 to +78.4 (-8 Gs to 8 Gs)
name='accel_normal',
type=BASE_TYPES[0x83], # sint16
def_num=5,
scale=100,
units='m/s^2',
),
6: Field( # Range -8.727 to +8.727 (-500 degs/sec to +500 degs/sec)
name='turn_rate',
type=BASE_TYPES[0x83], # sint16
def_num=6,
scale=1024,
units='radians/second',
),
7: Field(
name='stage',
type=FIELD_TYPES['attitude_stage'],
def_num=7,
),
8: Field( # The percent complete of the current attitude stage. Set to 0 for attitude stages 0, 1 and 2 and to 100 for attitude stage 3 by AHRS modules that do not support it. Range - 100
name='attitude_stage_complete',
type=BASE_TYPES[0x02], # uint8
def_num=8,
units='%',
),
9: Field( # Track Angle/Heading Range 0 - 2pi
name='track',
type=BASE_TYPES[0x84], # uint16
def_num=9,
scale=10430.38,
units='radians',
),
10: Field(
name='validity',
type=FIELD_TYPES['attitude_validity'],
def_num=10,
),
253: FIELD_TYPE_TIMESTAMP, # Timestamp message was output
},
),
184: MessageType(
name='video',
mesg_num=184,
fields={
0: Field(
name='url',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field(
name='hosting_provider',
type=BASE_TYPES[0x07], # string
def_num=1,
),
2: Field( # Playback time of video
name='duration',
type=BASE_TYPES[0x86], # uint32
def_num=2,
units='ms',
),
},
),
185: MessageType(
name='video_title',
mesg_num=185,
fields={
0: Field( # Total number of title parts
name='message_count',
type=BASE_TYPES[0x84], # uint16
def_num=0,
),
1: Field(
name='text',
type=BASE_TYPES[0x07], # string
def_num=1,
),
254: Field( # Long titles will be split into multiple parts
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
186: MessageType(
name='video_description',
mesg_num=186,
fields={
0: Field( # Total number of description parts
name='message_count',
type=BASE_TYPES[0x84], # uint16
def_num=0,
),
1: Field(
name='text',
type=BASE_TYPES[0x07], # string
def_num=1,
),
254: Field( # Long descriptions will be split into multiple parts
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
187: MessageType(
name='video_clip',
mesg_num=187,
fields={
0: Field(
name='clip_number',
type=BASE_TYPES[0x84], # uint16
def_num=0,
),
1: Field(
name='start_timestamp',
type=FIELD_TYPES['date_time'],
def_num=1,
),
2: Field(
name='start_timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=2,
),
3: Field(
name='end_timestamp',
type=FIELD_TYPES['date_time'],
def_num=3,
),
4: Field(
name='end_timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=4,
),
6: Field( # Start of clip in video time
name='clip_start',
type=BASE_TYPES[0x86], # uint32
def_num=6,
units='ms',
),
7: Field( # End of clip in video time
name='clip_end',
type=BASE_TYPES[0x86], # uint32
def_num=7,
units='ms',
),
},
),
188: MessageType(
name='ohr_settings',
mesg_num=188,
fields={
0: Field(
name='enabled',
type=FIELD_TYPES['switch'],
def_num=0,
),
},
),
200: MessageType(
name='exd_screen_configuration',
mesg_num=200,
fields={
0: Field(
name='screen_index',
type=BASE_TYPES[0x02], # uint8
def_num=0,
),
1: Field( # number of fields in screen
name='field_count',
type=BASE_TYPES[0x02], # uint8
def_num=1,
),
2: Field(
name='layout',
type=FIELD_TYPES['exd_layout'],
def_num=2,
),
3: Field(
name='screen_enabled',
type=FIELD_TYPES['bool'],
def_num=3,
),
},
),
201: MessageType(
name='exd_data_field_configuration',
mesg_num=201,
fields={
0: Field(
name='screen_index',
type=BASE_TYPES[0x02], # uint8
def_num=0,
),
1: Field(
name='concept_field',
type=BASE_TYPES[0x0D], # byte
def_num=1,
components=(
ComponentField(
name='field_id',
def_num=2,
accumulate=False,
bits=4,
bit_offset=0,
),
ComponentField(
name='concept_count',
def_num=3,
accumulate=False,
bits=4,
bit_offset=4,
),
),
),
2: Field(
name='field_id',
type=BASE_TYPES[0x02], # uint8
def_num=2,
),
3: Field(
name='concept_count',
type=BASE_TYPES[0x02], # uint8
def_num=3,
),
4: Field(
name='display_type',
type=FIELD_TYPES['exd_display_type'],
def_num=4,
),
5: Field(
name='title',
type=BASE_TYPES[0x07], # string
def_num=5,
),
},
),
202: MessageType(
name='exd_data_concept_configuration',
mesg_num=202,
fields={
0: Field(
name='screen_index',
type=BASE_TYPES[0x02], # uint8
def_num=0,
),
1: Field(
name='concept_field',
type=BASE_TYPES[0x0D], # byte
def_num=1,
components=(
ComponentField(
name='field_id',
def_num=2,
accumulate=False,
bits=4,
bit_offset=0,
),
ComponentField(
name='concept_index',
def_num=3,
accumulate=False,
bits=4,
bit_offset=4,
),
),
),
2: Field(
name='field_id',
type=BASE_TYPES[0x02], # uint8
def_num=2,
),
3: Field(
name='concept_index',
type=BASE_TYPES[0x02], # uint8
def_num=3,
),
4: Field(
name='data_page',
type=BASE_TYPES[0x02], # uint8
def_num=4,
),
5: Field(
name='concept_key',
type=BASE_TYPES[0x02], # uint8
def_num=5,
),
6: Field(
name='scaling',
type=BASE_TYPES[0x02], # uint8
def_num=6,
),
8: Field(
name='data_units',
type=FIELD_TYPES['exd_data_units'],
def_num=8,
),
9: Field(
name='qualifier',
type=FIELD_TYPES['exd_qualifiers'],
def_num=9,
),
10: Field(
name='descriptor',
type=FIELD_TYPES['exd_descriptors'],
def_num=10,
),
11: Field(
name='is_signed',
type=FIELD_TYPES['bool'],
def_num=11,
),
},
),
206: MessageType( # Must be logged before developer field is used
name='field_description',
mesg_num=206,
fields={
0: Field(
name='developer_data_index',
type=BASE_TYPES[0x02], # uint8
def_num=0,
),
1: Field(
name='field_definition_number',
type=BASE_TYPES[0x02], # uint8
def_num=1,
),
2: Field(
name='fit_base_type_id',
type=FIELD_TYPES['fit_base_type'],
def_num=2,
),
3: Field(
name='field_name',
type=BASE_TYPES[0x07], # string
def_num=3,
),
4: Field(
name='array',
type=BASE_TYPES[0x02], # uint8
def_num=4,
),
5: Field(
name='components',
type=BASE_TYPES[0x07], # string
def_num=5,
),
6: Field(
name='scale',
type=BASE_TYPES[0x02], # uint8
def_num=6,
),
7: Field(
name='offset',
type=BASE_TYPES[0x01], # sint8
def_num=7,
),
8: Field(
name='units',
type=BASE_TYPES[0x07], # string
def_num=8,
),
9: Field(
name='bits',
type=BASE_TYPES[0x07], # string
def_num=9,
),
10: Field(
name='accumulate',
type=BASE_TYPES[0x07], # string
def_num=10,
),
13: Field(
name='fit_base_unit_id',
type=FIELD_TYPES['fit_base_unit'],
def_num=13,
),
14: Field(
name='native_mesg_num',
type=FIELD_TYPES['mesg_num'],
def_num=14,
),
15: Field(
name='native_field_num',
type=BASE_TYPES[0x02], # uint8
def_num=15,
),
},
),
207: MessageType( # Must be logged before field description
name='developer_data_id',
mesg_num=207,
fields={
0: Field(
name='developer_id',
type=BASE_TYPES[0x0D], # byte
def_num=0,
),
1: Field(
name='application_id',
type=BASE_TYPES[0x0D], # byte
def_num=1,
),
2: Field(
name='manufacturer_id',
type=FIELD_TYPES['manufacturer'],
def_num=2,
),
3: Field(
name='developer_data_index',
type=BASE_TYPES[0x02], # uint8
def_num=3,
),
4: Field(
name='application_version',
type=BASE_TYPES[0x86], # uint32
def_num=4,
),
},
),
208: MessageType(
name='magnetometer_data',
mesg_num=208,
fields={
0: Field( # Millisecond part of the timestamp.
name='timestamp_ms',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='ms',
),
1: Field( # Each time in the array describes the time at which the compass sample with the corrosponding index was taken. Limited to 30 samples in each message. The samples may span across seconds. Array size must match the number of samples in cmps_x and cmps_y and cmps_z
name='sample_time_offset',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='ms',
),
2: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='mag_x',
type=BASE_TYPES[0x84], # uint16
def_num=2,
units='counts',
),
3: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='mag_y',
type=BASE_TYPES[0x84], # uint16
def_num=3,
units='counts',
),
4: Field( # These are the raw ADC reading. Maximum number of samples is 30 in each message. The samples may span across seconds. A conversion will need to be done on this data once read.
name='mag_z',
type=BASE_TYPES[0x84], # uint16
def_num=4,
units='counts',
),
5: Field( # Calibrated Magnetometer reading
name='calibrated_mag_x',
type=BASE_TYPES[0x88], # float32
def_num=5,
units='G',
),
6: Field( # Calibrated Magnetometer reading
name='calibrated_mag_y',
type=BASE_TYPES[0x88], # float32
def_num=6,
units='G',
),
7: Field( # Calibrated Magnetometer reading
name='calibrated_mag_z',
type=BASE_TYPES[0x88], # float32
def_num=7,
units='G',
),
253: FIELD_TYPE_TIMESTAMP, # Whole second part of the timestamp
},
),
######################### Activity File Messages #########################
34: MessageType(
name='activity',
mesg_num=34,
fields={
0: Field( # Exclude pauses
name='total_timer_time',
type=BASE_TYPES[0x86], # uint32
def_num=0,
scale=1000,
units='s',
),
1: Field(
name='num_sessions',
type=BASE_TYPES[0x84], # uint16
def_num=1,
),
2: Field(
name='type',
type=FIELD_TYPES['activity'],
def_num=2,
),
3: Field(
name='event',
type=FIELD_TYPES['event'],
def_num=3,
),
4: Field(
name='event_type',
type=FIELD_TYPES['event_type'],
def_num=4,
),
5: Field( # timestamp epoch expressed in local time, used to convert activity timestamps to local time
name='local_timestamp',
type=FIELD_TYPES['local_date_time'],
def_num=5,
),
6: Field(
name='event_group',
type=BASE_TYPES[0x02], # uint8
def_num=6,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
###################### Blood Pressure File Messages ######################
51: MessageType(
name='blood_pressure',
mesg_num=51,
fields={
0: Field(
name='systolic_pressure',
type=BASE_TYPES[0x84], # uint16
def_num=0,
units='mmHg',
),
1: Field(
name='diastolic_pressure',
type=BASE_TYPES[0x84], # uint16
def_num=1,
units='mmHg',
),
2: Field(
name='mean_arterial_pressure',
type=BASE_TYPES[0x84], # uint16
def_num=2,
units='mmHg',
),
3: Field(
name='map_3_sample_mean',
type=BASE_TYPES[0x84], # uint16
def_num=3,
units='mmHg',
),
4: Field(
name='map_morning_values',
type=BASE_TYPES[0x84], # uint16
def_num=4,
units='mmHg',
),
5: Field(
name='map_evening_values',
type=BASE_TYPES[0x84], # uint16
def_num=5,
units='mmHg',
),
6: Field(
name='heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=6,
units='bpm',
),
7: Field(
name='heart_rate_type',
type=FIELD_TYPES['hr_type'],
def_num=7,
),
8: Field(
name='status',
type=FIELD_TYPES['bp_status'],
def_num=8,
),
9: Field( # Associates this blood pressure message to a user. This corresponds to the index of the user profile message in the blood pressure file.
name='user_profile_index',
type=FIELD_TYPES['message_index'],
def_num=9,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
########################## Course File Messages ##########################
31: MessageType(
name='course',
mesg_num=31,
fields={
4: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=4,
),
5: Field(
name='name',
type=BASE_TYPES[0x07], # string
def_num=5,
),
6: Field(
name='capabilities',
type=FIELD_TYPES['course_capabilities'],
def_num=6,
),
7: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=7,
),
},
),
########################## Device File Messages ##########################
35: MessageType(
name='software',
mesg_num=35,
fields={
3: Field(
name='version',
type=BASE_TYPES[0x84], # uint16
def_num=3,
scale=100,
),
5: Field(
name='part_number',
type=BASE_TYPES[0x07], # string
def_num=5,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
########################## Goals File Messages ###########################
15: MessageType(
name='goal',
mesg_num=15,
fields={
0: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=0,
),
1: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=1,
),
2: Field(
name='start_date',
type=FIELD_TYPES['date_time'],
def_num=2,
),
3: Field(
name='end_date',
type=FIELD_TYPES['date_time'],
def_num=3,
),
4: Field(
name='type',
type=FIELD_TYPES['goal'],
def_num=4,
),
5: Field(
name='value',
type=BASE_TYPES[0x86], # uint32
def_num=5,
),
6: Field(
name='repeat',
type=FIELD_TYPES['bool'],
def_num=6,
),
7: Field(
name='target_value',
type=BASE_TYPES[0x86], # uint32
def_num=7,
),
8: Field(
name='recurrence',
type=FIELD_TYPES['goal_recurrence'],
def_num=8,
),
9: Field(
name='recurrence_value',
type=BASE_TYPES[0x84], # uint16
def_num=9,
),
10: Field(
name='enabled',
type=FIELD_TYPES['bool'],
def_num=10,
),
11: Field(
name='source',
type=FIELD_TYPES['goal_source'],
def_num=11,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
######################## Monitoring File Messages ########################
103: MessageType(
name='monitoring_info',
mesg_num=103,
fields={
0: Field( # Use to convert activity timestamps to local time if device does not support time zone and daylight savings time correction.
name='local_timestamp',
type=FIELD_TYPES['local_date_time'],
def_num=0,
units='s',
),
1: Field(
name='activity_type',
type=FIELD_TYPES['activity_type'],
def_num=1,
),
3: Field( # Indexed by activity_type
name='cycles_to_distance',
type=BASE_TYPES[0x84], # uint16
def_num=3,
scale=5000,
units='m/cycle',
),
4: Field( # Indexed by activity_type
name='cycles_to_calories',
type=BASE_TYPES[0x84], # uint16
def_num=4,
scale=5000,
units='kcal/cycle',
),
5: Field(
name='resting_metabolic_rate',
type=BASE_TYPES[0x84], # uint16
def_num=5,
units='kcal/day',
),
253: FIELD_TYPE_TIMESTAMP,
},
),
############################# Other Messages #############################
145: MessageType(
name='memo_glob',
mesg_num=145,
fields={
0: Field( # Block of utf8 bytes
name='memo',
type=BASE_TYPES[0x0D], # byte
def_num=0,
),
1: Field( # Allows relating glob to another mesg If used only required for first part of each memo_glob
name='message_number',
type=BASE_TYPES[0x84], # uint16
def_num=1,
),
2: Field( # Index of external mesg
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=2,
),
250: Field( # Sequence number of memo blocks
name='part_index',
type=BASE_TYPES[0x86], # uint32
def_num=250,
),
},
),
######################### Schedule File Messages #########################
28: MessageType(
name='schedule',
mesg_num=28,
fields={
0: Field( # Corresponds to file_id of scheduled workout / course.
name='manufacturer',
type=FIELD_TYPES['manufacturer'],
def_num=0,
),
1: Field( # Corresponds to file_id of scheduled workout / course.
name='product',
type=BASE_TYPES[0x84], # uint16
def_num=1,
subfields=(
SubField(
name='garmin_product',
def_num=1,
type=FIELD_TYPES['garmin_product'],
ref_fields=(
ReferenceField(
name='manufacturer',
def_num=0,
value='garmin',
raw_value=1,
),
ReferenceField(
name='manufacturer',
def_num=0,
value='dynastream',
raw_value=15,
),
ReferenceField(
name='manufacturer',
def_num=0,
value='dynastream_oem',
raw_value=13,
),
),
),
),
),
2: Field( # Corresponds to file_id of scheduled workout / course.
name='serial_number',
type=BASE_TYPES[0x8C], # uint32z
def_num=2,
),
3: Field( # Corresponds to file_id of scheduled workout / course.
name='time_created',
type=FIELD_TYPES['date_time'],
def_num=3,
),
4: Field( # TRUE if this activity has been started
name='completed',
type=FIELD_TYPES['bool'],
def_num=4,
),
5: Field(
name='type',
type=FIELD_TYPES['schedule'],
def_num=5,
),
6: Field(
name='scheduled_time',
type=FIELD_TYPES['local_date_time'],
def_num=6,
),
},
),
######################### Segment File Messages ##########################
148: MessageType( # Unique Identification data for a segment file
name='segment_id',
mesg_num=148,
fields={
0: Field( # Friendly name assigned to segment
name='name',
type=BASE_TYPES[0x07], # string
def_num=0,
),
1: Field( # UUID of the segment
name='uuid',
type=BASE_TYPES[0x07], # string
def_num=1,
),
2: Field( # Sport associated with the segment
name='sport',
type=FIELD_TYPES['sport'],
def_num=2,
),
3: Field( # Segment enabled for evaluation
name='enabled',
type=FIELD_TYPES['bool'],
def_num=3,
),
4: Field( # Primary key of the user that created the segment
name='user_profile_primary_key',
type=BASE_TYPES[0x86], # uint32
def_num=4,
),
5: Field( # ID of the device that created the segment
name='device_id',
type=BASE_TYPES[0x86], # uint32
def_num=5,
),
6: Field( # Index for the Leader Board entry selected as the default race participant
name='default_race_leader',
type=BASE_TYPES[0x02], # uint8
def_num=6,
),
7: Field( # Indicates if any segments should be deleted
name='delete_status',
type=FIELD_TYPES['segment_delete_status'],
def_num=7,
),
8: Field( # Indicates how the segment was selected to be sent to the device
name='selection_type',
type=FIELD_TYPES['segment_selection_type'],
def_num=8,
),
},
),
####################### Segment List File Messages #######################
151: MessageType( # Summary of the unique segment and leaderboard information associated with a segment file. This message is used to compile a segment list file describing all segment files on a device. The segment list file is used when refreshing the contents of a segment file with the latest available leaderboard information.
name='segment_file',
mesg_num=151,
fields={
1: Field( # UUID of the segment file
name='file_uuid',
type=BASE_TYPES[0x07], # string
def_num=1,
),
3: Field( # Enabled state of the segment file
name='enabled',
type=FIELD_TYPES['bool'],
def_num=3,
),
4: Field( # Primary key of the user that created the segment file
name='user_profile_primary_key',
type=BASE_TYPES[0x86], # uint32
def_num=4,
),
7: Field( # Leader type of each leader in the segment file
name='leader_type',
type=FIELD_TYPES['segment_leaderboard_type'],
def_num=7,
),
8: Field( # Group primary key of each leader in the segment file
name='leader_group_primary_key',
type=BASE_TYPES[0x86], # uint32
def_num=8,
),
9: Field( # Activity ID of each leader in the segment file
name='leader_activity_id',
type=BASE_TYPES[0x86], # uint32
def_num=9,
),
10: Field( # String version of the activity ID of each leader in the segment file. 21 characters long for each ID, express in decimal
name='leader_activity_id_string',
type=BASE_TYPES[0x07], # string
def_num=10,
),
11: Field( # Index for the Leader Board entry selected as the default race participant
name='default_race_leader',
type=BASE_TYPES[0x02], # uint8
def_num=11,
),
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
######################### Settings File Messages #########################
2: MessageType(
name='device_settings',
mesg_num=2,
fields={
0: Field( # Index into time zone arrays.
name='active_time_zone',
type=BASE_TYPES[0x02], # uint8
def_num=0,
),
1: Field( # Offset from system time. Required to convert timestamp from system time to UTC.
name='utc_offset',
type=BASE_TYPES[0x86], # uint32
def_num=1,
),
2: Field( # Offset from system time.
name='time_offset',
type=BASE_TYPES[0x86], # uint32
def_num=2,
units='s',
),
4: Field( # Display mode for the time
name='time_mode',
type=FIELD_TYPES['time_mode'],
def_num=4,
),
5: Field( # timezone offset in 1/4 hour increments
name='time_zone_offset',
type=BASE_TYPES[0x01], # sint8
def_num=5,
scale=4,
units='hr',
),
12: Field( # Mode for backlight
name='backlight_mode',
type=FIELD_TYPES['backlight_mode'],
def_num=12,
),
36: Field( # Enabled state of the activity tracker functionality
name='activity_tracker_enabled',
type=FIELD_TYPES['bool'],
def_num=36,
),
39: Field( # UTC timestamp used to set the devices clock and date
name='clock_time',
type=FIELD_TYPES['date_time'],
def_num=39,
),
40: Field( # Bitfield to configure enabled screens for each supported loop
name='pages_enabled',
type=BASE_TYPES[0x84], # uint16
def_num=40,
),
46: Field( # Enabled state of the move alert
name='move_alert_enabled',
type=FIELD_TYPES['bool'],
def_num=46,
),
47: Field( # Display mode for the date
name='date_mode',
type=FIELD_TYPES['date_mode'],
def_num=47,
),
55: Field(
name='display_orientation',
type=FIELD_TYPES['display_orientation'],
def_num=55,
),
56: Field(
name='mounting_side',
type=FIELD_TYPES['side'],
def_num=56,
),
57: Field( # Bitfield to indicate one page as default for each supported loop
name='default_page',
type=BASE_TYPES[0x84], # uint16
def_num=57,
),
58: Field( # Minimum steps before an autosync can occur
name='autosync_min_steps',
type=BASE_TYPES[0x84], # uint16
def_num=58,
units='steps',
),
59: Field( # Minimum minutes before an autosync can occur
name='autosync_min_time',
type=BASE_TYPES[0x84], # uint16
def_num=59,
units='minutes',
),
80: Field( # Enable auto-detect setting for the lactate threshold feature.
name='lactate_threshold_autodetect_enabled',
type=FIELD_TYPES['bool'],
def_num=80,
),
86: Field( # Automatically upload using BLE
name='ble_auto_upload_enabled',
type=FIELD_TYPES['bool'],
def_num=86,
),
89: Field( # Helps to conserve battery by changing modes
name='auto_sync_frequency',
type=FIELD_TYPES['auto_sync_frequency'],
def_num=89,
),
90: Field( # Allows setting specific activities auto-activity detect enabled/disabled settings
name='auto_activity_detect',
type=FIELD_TYPES['auto_activity_detect'],
def_num=90,
),
94: Field( # Number of screens configured to display
name='number_of_screens',
type=BASE_TYPES[0x02], # uint8
def_num=94,
),
95: Field( # Smart Notification display orientation
name='smart_notification_display_orientation',
type=FIELD_TYPES['display_orientation'],
def_num=95,
),
},
),
###################### Sport Settings File Messages ######################
7: MessageType(
name='zones_target',
mesg_num=7,
fields={
1: Field(
name='max_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=1,
),
2: Field(
name='threshold_heart_rate',
type=BASE_TYPES[0x02], # uint8
def_num=2,
),
3: Field(
name='functional_threshold_power',
type=BASE_TYPES[0x84], # uint16
def_num=3,
),
5: Field(
name='hr_calc_type',
type=FIELD_TYPES['hr_zone_calc'],
def_num=5,
),
7: Field(
name='pwr_calc_type',
type=FIELD_TYPES['pwr_zone_calc'],
def_num=7,
),
},
),
########################## Totals File Messages ##########################
33: MessageType(
name='totals',
mesg_num=33,
fields={
0: Field( # Excludes pauses
name='timer_time',
type=BASE_TYPES[0x86], # uint32
def_num=0,
units='s',
),
1: Field(
name='distance',
type=BASE_TYPES[0x86], # uint32
def_num=1,
units='m',
),
2: Field(
name='calories',
type=BASE_TYPES[0x86], # uint32
def_num=2,
units='kcal',
),
3: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=3,
),
4: Field( # Includes pauses
name='elapsed_time',
type=BASE_TYPES[0x86], # uint32
def_num=4,
units='s',
),
5: Field(
name='sessions',
type=BASE_TYPES[0x84], # uint16
def_num=5,
),
6: Field(
name='active_time',
type=BASE_TYPES[0x86], # uint32
def_num=6,
units='s',
),
9: Field(
name='sport_index',
type=BASE_TYPES[0x02], # uint8
def_num=9,
),
253: FIELD_TYPE_TIMESTAMP,
254: Field(
name='message_index',
type=FIELD_TYPES['message_index'],
def_num=254,
),
},
),
####################### Weight Scale File Messages #######################
30: MessageType(
name='weight_scale',
mesg_num=30,
fields={
0: Field(
name='weight',
type=FIELD_TYPES['weight'],
def_num=0,
scale=100,
units='kg',
),
1: Field(
name='percent_fat',
type=BASE_TYPES[0x84], # uint16
def_num=1,
scale=100,
units='%',
),
2: Field(
name='percent_hydration',
type=BASE_TYPES[0x84], # uint16
def_num=2,
scale=100,
units='%',
),
3: Field(
name='visceral_fat_mass',
type=BASE_TYPES[0x84], # uint16
def_num=3,
scale=100,
units='kg',
),
4: Field(
name='bone_mass',
type=BASE_TYPES[0x84], # uint16
def_num=4,
scale=100,
units='kg',
),
5: Field(
name='muscle_mass',
type=BASE_TYPES[0x84], # uint16
def_num=5,
scale=100,
units='kg',
),
7: Field(
name='basal_met',
type=BASE_TYPES[0x84], # uint16
def_num=7,
scale=4,
units='kcal/day',
),
8: Field(
name='physique_rating',
type=BASE_TYPES[0x02], # uint8
def_num=8,
),
9: Field( # ~4kJ per kcal, 0.25 allows max 16384 kcal
name='active_met',
type=BASE_TYPES[0x84], # uint16
def_num=9,
scale=4,
units='kcal/day',
),
10: Field(
name='metabolic_age',
type=BASE_TYPES[0x02], # uint8
def_num=10,
units='years',
),
11: Field(
name='visceral_fat_rating',
type=BASE_TYPES[0x02], # uint8
def_num=11,
),
12: Field( # Associates this weight scale message to a user. This corresponds to the index of the user profile message in the weight scale file.
name='user_profile_index',
type=FIELD_TYPES['message_index'],
def_num=12,
),
253: FIELD_TYPE_TIMESTAMP,
},
),
######################### Workout File Messages ##########################
26: MessageType(
name='workout',
mesg_num=26,
fields={
4: Field(
name='sport',
type=FIELD_TYPES['sport'],
def_num=4,
),
5: Field(
name='capabilities',
type=FIELD_TYPES['workout_capabilities'],
def_num=5,
),
6: Field( # number of valid steps
name='num_valid_steps',
type=BASE_TYPES[0x84], # uint16
def_num=6,
),
8: Field(
name='wkt_name',
type=BASE_TYPES[0x07], # string
def_num=8,
),
11: Field(
name='sub_sport',
type=FIELD_TYPES['sub_sport'],
def_num=11,
),
14: Field(
name='pool_length',
type=BASE_TYPES[0x84], # uint16
def_num=14,
scale=100,
units='m',
),
15: Field(
name='pool_length_unit',
type=FIELD_TYPES['display_measure'],
def_num=15,
),
},
),
}
| 33.844565
| 336
| 0.397916
| 28,750
| 337,498
| 4.448835
| 0.079096
| 0.053571
| 0.085783
| 0.034026
| 0.625672
| 0.592093
| 0.567996
| 0.543275
| 0.497076
| 0.441214
| 0
| 0.080312
| 0.503787
| 337,498
| 9,971
| 337
| 33.847959
| 0.683426
| 0.099103
| 0
| 0.716376
| 1
| 0
| 0.147265
| 0.028552
| 0
| 0
| 0.015385
| 0
| 0
| 1
| 0
| false
| 0.000303
| 0.000101
| 0
| 0.000101
| 0.000101
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
3e7fe9149a1b5f7c3cd431d38f69f6e9b05ff08e
| 108
|
py
|
Python
|
loops_part2/sequence_2k+1.py
|
MaggieIllustrations/softuni-github-programming
|
f5695cb14602f3d2974359f6d8734332acc650d3
|
[
"MIT"
] | null | null | null |
loops_part2/sequence_2k+1.py
|
MaggieIllustrations/softuni-github-programming
|
f5695cb14602f3d2974359f6d8734332acc650d3
|
[
"MIT"
] | null | null | null |
loops_part2/sequence_2k+1.py
|
MaggieIllustrations/softuni-github-programming
|
f5695cb14602f3d2974359f6d8734332acc650d3
|
[
"MIT"
] | 1
|
2022-01-14T17:12:44.000Z
|
2022-01-14T17:12:44.000Z
|
number = int(input())
counter = 1
while counter <= number:
print(counter)
counter = 2 * counter + 1
| 18
| 29
| 0.62963
| 14
| 108
| 4.857143
| 0.571429
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.036585
| 0.240741
| 108
| 6
| 29
| 18
| 0.792683
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
3e8fb96193b2244d64a924fa63c9c59dfafd9741
| 557
|
py
|
Python
|
Software_University/fundamentals/functions/lecture/orders.py
|
Ivanazzz/SoftUni-W3resource-Python
|
892321a290e22a91ff2ac2fef5316179a93f2f17
|
[
"MIT"
] | 1
|
2022-01-26T07:38:11.000Z
|
2022-01-26T07:38:11.000Z
|
Software_University/fundamentals/functions/lecture/orders.py
|
Ivanazzz/SoftUni-W3resource-Python
|
892321a290e22a91ff2ac2fef5316179a93f2f17
|
[
"MIT"
] | null | null | null |
Software_University/fundamentals/functions/lecture/orders.py
|
Ivanazzz/SoftUni-W3resource-Python
|
892321a290e22a91ff2ac2fef5316179a93f2f17
|
[
"MIT"
] | null | null | null |
product_type = input("Enter the product type(coffee, water, coke, snacks): ")
quantity = int(input("Enter the quantity: "))
def price():
if product_type == "coffee":
total_price = quantity * 1.50
return total_price
elif product_type == "water":
total_price = quantity * 1.00
return total_price
elif product_type == "coke":
total_price = quantity * 1.40
return total_price
elif product_type == "snacks":
total_price = quantity * 2.00
return total_price
print(f"{price():.2f}")
| 30.944444
| 77
| 0.626571
| 71
| 557
| 4.732394
| 0.352113
| 0.238095
| 0.214286
| 0.169643
| 0.276786
| 0.276786
| 0
| 0
| 0
| 0
| 0
| 0.031477
| 0.258528
| 557
| 18
| 78
| 30.944444
| 0.782082
| 0
| 0
| 0.25
| 0
| 0
| 0.191756
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0
| 0
| 0
| 0.3125
| 0.0625
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
e436ff03150d44e0196337e442c791322d057adb
| 95
|
py
|
Python
|
python/p287ex5.py
|
ThePeeps191/dmoj-solutions
|
7137e945f3f595c481ad4d29e1dc3a77d8b26e55
|
[
"MIT"
] | 1
|
2022-01-23T16:02:14.000Z
|
2022-01-23T16:02:14.000Z
|
python/p287ex5.py
|
ThePeeps191/dmoj-solutions
|
7137e945f3f595c481ad4d29e1dc3a77d8b26e55
|
[
"MIT"
] | 5
|
2022-01-23T00:16:49.000Z
|
2022-01-30T04:37:45.000Z
|
python/p287ex5.py
|
ThePeeps191/dmoj-solutions
|
7137e945f3f595c481ad4d29e1dc3a77d8b26e55
|
[
"MIT"
] | 1
|
2022-01-23T00:03:47.000Z
|
2022-01-23T00:03:47.000Z
|
# not yet finished
for _ in range(int(input())):print(len(list(set(input().replace("-", "")))))
| 47.5
| 76
| 0.631579
| 14
| 95
| 4.214286
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084211
| 95
| 2
| 76
| 47.5
| 0.678161
| 0.168421
| 0
| 0
| 0
| 0
| 0.012821
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 3
|
e43f5553851f44ad5911378e9d31bfdce168b90d
| 1,207
|
py
|
Python
|
rfid/eggplant/pigeon/migrations/0003_auto_20160328_0809.py
|
psiyan/rfid
|
401a093958ffafdcd10259cc9e19b7bd9f0c0e8c
|
[
"Apache-2.0"
] | null | null | null |
rfid/eggplant/pigeon/migrations/0003_auto_20160328_0809.py
|
psiyan/rfid
|
401a093958ffafdcd10259cc9e19b7bd9f0c0e8c
|
[
"Apache-2.0"
] | null | null | null |
rfid/eggplant/pigeon/migrations/0003_auto_20160328_0809.py
|
psiyan/rfid
|
401a093958ffafdcd10259cc9e19b7bd9f0c0e8c
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-03-28 08:09
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pigeon', '0002_auto_20160327_0723'),
]
operations = [
migrations.AlterField(
model_name='scroll',
name='pubDate',
field=models.DateTimeField(blank=True, null=True, verbose_name='date published'),
),
migrations.AlterField(
model_name='scroll',
name='scrollFrom',
field=models.CharField(max_length=80, verbose_name='from'),
),
migrations.AlterField(
model_name='scroll',
name='scrollID',
field=models.CharField(max_length=40, verbose_name='RFID'),
),
migrations.AlterField(
model_name='scroll',
name='scrollMessage',
field=models.CharField(max_length=1024, verbose_name='message'),
),
migrations.AlterField(
model_name='scroll',
name='scrollTo',
field=models.CharField(max_length=80, verbose_name='to'),
),
]
| 29.439024
| 93
| 0.584093
| 119
| 1,207
| 5.739496
| 0.487395
| 0.146413
| 0.183016
| 0.212299
| 0.493411
| 0.408492
| 0.122987
| 0.122987
| 0
| 0
| 0
| 0.049296
| 0.294118
| 1,207
| 40
| 94
| 30.175
| 0.752347
| 0.05551
| 0
| 0.454545
| 1
| 0
| 0.119613
| 0.020229
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.060606
| 0
| 0.151515
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
e44176bdde09e0e534875279d12d7f2e7e878bfb
| 40,102
|
py
|
Python
|
pyboto3/workdocs.py
|
thecraftman/pyboto3
|
653a0db2b00b06708334431da8f169d1f7c7734f
|
[
"MIT"
] | null | null | null |
pyboto3/workdocs.py
|
thecraftman/pyboto3
|
653a0db2b00b06708334431da8f169d1f7c7734f
|
[
"MIT"
] | null | null | null |
pyboto3/workdocs.py
|
thecraftman/pyboto3
|
653a0db2b00b06708334431da8f169d1f7c7734f
|
[
"MIT"
] | null | null | null |
'''
The MIT License (MIT)
Copyright (c) 2016 WavyCloud
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
def abort_document_version_upload(DocumentId=None, VersionId=None):
"""
Aborts the upload of the specified document version that was previously initiated by InitiateDocumentVersionUpload . The client should make this call only when it no longer intends or fails to upload the document version.
See also: AWS API Documentation
:example: response = client.abort_document_version_upload(
DocumentId='string',
VersionId='string'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
:type VersionId: string
:param VersionId: [REQUIRED]
The ID of the version.
"""
pass
def activate_user(UserId=None):
"""
Activates the specified user. Only active users can access Amazon WorkDocs.
See also: AWS API Documentation
:example: response = client.activate_user(
UserId='string'
)
:type UserId: string
:param UserId: [REQUIRED]
The ID of the user.
:rtype: dict
:return: {
'User': {
'Id': 'string',
'Username': 'string',
'EmailAddress': 'string',
'GivenName': 'string',
'Surname': 'string',
'OrganizationId': 'string',
'RootFolderId': 'string',
'RecycleBinFolderId': 'string',
'Status': 'ACTIVE'|'INACTIVE'|'PENDING',
'Type': 'USER'|'ADMIN',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'TimeZoneId': 'string',
'Locale': 'en'|'fr'|'ko'|'de'|'es'|'ja'|'ru'|'zh_CN'|'zh_TW'|'pt_BR'|'default',
'Storage': {
'StorageUtilizedInBytes': 123,
'StorageRule': {
'StorageAllocatedInBytes': 123,
'StorageType': 'UNLIMITED'|'QUOTA'
}
}
}
}
"""
pass
def add_resource_permissions(ResourceId=None, Principals=None):
"""
Creates a set of permissions for the specified folder or document. The resource permissions are overwritten if the principals already have different permissions.
See also: AWS API Documentation
:example: response = client.add_resource_permissions(
ResourceId='string',
Principals=[
{
'Id': 'string',
'Type': 'USER'|'GROUP'|'INVITE'|'ANONYMOUS'|'ORGANIZATION',
'Role': 'VIEWER'|'CONTRIBUTOR'|'OWNER'|'COOWNER'
},
]
)
:type ResourceId: string
:param ResourceId: [REQUIRED]
The ID of the resource.
:type Principals: list
:param Principals: [REQUIRED]
The users, groups, or organization being granted permission.
(dict) --Describes the recipient type and ID, if available.
Id (string) -- [REQUIRED]The ID of the recipient.
Type (string) -- [REQUIRED]The type of the recipient.
Role (string) -- [REQUIRED]The role of the recipient.
:rtype: dict
:return: {
'ShareResults': [
{
'PrincipalId': 'string',
'Role': 'VIEWER'|'CONTRIBUTOR'|'OWNER'|'COOWNER',
'Status': 'SUCCESS'|'FAILURE',
'ShareId': 'string',
'StatusMessage': 'string'
},
]
}
"""
pass
def can_paginate(operation_name=None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is create_foo, and you'd normally invoke the
operation as client.create_foo(**kwargs), if the
create_foo operation can be paginated, you can use the
call client.get_paginator('create_foo').
"""
pass
def create_folder(Name=None, ParentFolderId=None):
"""
Creates a folder with the specified name and parent folder.
See also: AWS API Documentation
:example: response = client.create_folder(
Name='string',
ParentFolderId='string'
)
:type Name: string
:param Name: The name of the new folder.
:type ParentFolderId: string
:param ParentFolderId: [REQUIRED]
The ID of the parent folder.
:rtype: dict
:return: {
'Metadata': {
'Id': 'string',
'Name': 'string',
'CreatorId': 'string',
'ParentFolderId': 'string',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ResourceState': 'ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED',
'Signature': 'string'
}
}
"""
pass
def create_notification_subscription(OrganizationId=None, Endpoint=None, Protocol=None, SubscriptionType=None):
"""
Configure WorkDocs to use Amazon SNS notifications.
The endpoint receives a confirmation message, and must confirm the subscription. For more information, see Confirm the Subscription in the Amazon Simple Notification Service Developer Guide .
See also: AWS API Documentation
:example: response = client.create_notification_subscription(
OrganizationId='string',
Endpoint='string',
Protocol='HTTPS',
SubscriptionType='ALL'
)
:type OrganizationId: string
:param OrganizationId: [REQUIRED]
The ID of the organization.
:type Endpoint: string
:param Endpoint: [REQUIRED]
The endpoint to receive the notifications. If the protocol is HTTPS, the endpoint is a URL that begins with 'https://'.
:type Protocol: string
:param Protocol: [REQUIRED]
The protocol to use. The supported value is https, which delivers JSON-encoded messasges using HTTPS POST.
:type SubscriptionType: string
:param SubscriptionType: [REQUIRED]
The notification type.
:rtype: dict
:return: {
'Subscription': {
'SubscriptionId': 'string',
'EndPoint': 'string',
'Protocol': 'HTTPS'
}
}
"""
pass
def create_user(OrganizationId=None, Username=None, GivenName=None, Surname=None, Password=None, TimeZoneId=None, StorageRule=None):
"""
Creates a user in a Simple AD or Microsoft AD directory. The status of a newly created user is "ACTIVE". New users can access Amazon WorkDocs.
See also: AWS API Documentation
:example: response = client.create_user(
OrganizationId='string',
Username='string',
GivenName='string',
Surname='string',
Password='string',
TimeZoneId='string',
StorageRule={
'StorageAllocatedInBytes': 123,
'StorageType': 'UNLIMITED'|'QUOTA'
}
)
:type OrganizationId: string
:param OrganizationId: The ID of the organization.
:type Username: string
:param Username: [REQUIRED]
The login name of the user.
:type GivenName: string
:param GivenName: [REQUIRED]
The given name of the user.
:type Surname: string
:param Surname: [REQUIRED]
The surname of the user.
:type Password: string
:param Password: [REQUIRED]
The password of the user.
:type TimeZoneId: string
:param TimeZoneId: The time zone ID of the user.
:type StorageRule: dict
:param StorageRule: The amount of storage for the user.
StorageAllocatedInBytes (integer) --The amount of storage allocated, in bytes.
StorageType (string) --The type of storage.
:rtype: dict
:return: {
'User': {
'Id': 'string',
'Username': 'string',
'EmailAddress': 'string',
'GivenName': 'string',
'Surname': 'string',
'OrganizationId': 'string',
'RootFolderId': 'string',
'RecycleBinFolderId': 'string',
'Status': 'ACTIVE'|'INACTIVE'|'PENDING',
'Type': 'USER'|'ADMIN',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'TimeZoneId': 'string',
'Locale': 'en'|'fr'|'ko'|'de'|'es'|'ja'|'ru'|'zh_CN'|'zh_TW'|'pt_BR'|'default',
'Storage': {
'StorageUtilizedInBytes': 123,
'StorageRule': {
'StorageAllocatedInBytes': 123,
'StorageType': 'UNLIMITED'|'QUOTA'
}
}
}
}
"""
pass
def deactivate_user(UserId=None):
"""
Deactivates the specified user, which revokes the user's access to Amazon WorkDocs.
See also: AWS API Documentation
:example: response = client.deactivate_user(
UserId='string'
)
:type UserId: string
:param UserId: [REQUIRED]
The ID of the user.
"""
pass
def delete_document(DocumentId=None):
"""
Permanently deletes the specified document and its associated metadata.
See also: AWS API Documentation
:example: response = client.delete_document(
DocumentId='string'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
"""
pass
def delete_folder(FolderId=None):
"""
Permanently deletes the specified folder and its contents.
See also: AWS API Documentation
:example: response = client.delete_folder(
FolderId='string'
)
:type FolderId: string
:param FolderId: [REQUIRED]
The ID of the folder.
"""
pass
def delete_folder_contents(FolderId=None):
"""
Deletes the contents of the specified folder.
See also: AWS API Documentation
:example: response = client.delete_folder_contents(
FolderId='string'
)
:type FolderId: string
:param FolderId: [REQUIRED]
The ID of the folder.
"""
pass
def delete_notification_subscription(SubscriptionId=None, OrganizationId=None):
"""
Deletes the specified subscription from the specified organization.
See also: AWS API Documentation
:example: response = client.delete_notification_subscription(
SubscriptionId='string',
OrganizationId='string'
)
:type SubscriptionId: string
:param SubscriptionId: [REQUIRED]
The ID of the subscription.
:type OrganizationId: string
:param OrganizationId: [REQUIRED]
The ID of the organization.
"""
pass
def delete_user(UserId=None):
"""
Deletes the specified user from a Simple AD or Microsoft AD directory.
See also: AWS API Documentation
:example: response = client.delete_user(
UserId='string'
)
:type UserId: string
:param UserId: [REQUIRED]
The ID of the user.
"""
pass
def describe_document_versions(DocumentId=None, Marker=None, Limit=None, Include=None, Fields=None):
"""
Retrieves the document versions for the specified document.
By default, only active versions are returned.
See also: AWS API Documentation
:example: response = client.describe_document_versions(
DocumentId='string',
Marker='string',
Limit=123,
Include='string',
Fields='string'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
:type Marker: string
:param Marker: The marker for the next set of results. (You received this marker from a previous call.)
:type Limit: integer
:param Limit: The maximum number of versions to return with this call.
:type Include: string
:param Include: A comma-separated list of values. Specify 'INITIALIZED' to include incomplete versions.
:type Fields: string
:param Fields: Specify 'SOURCE' to include initialized versions and a URL for the source document.
:rtype: dict
:return: {
'DocumentVersions': [
{
'Id': 'string',
'Name': 'string',
'ContentType': 'string',
'Size': 123,
'Signature': 'string',
'Status': 'INITIALIZED'|'ACTIVE',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ContentCreatedTimestamp': datetime(2015, 1, 1),
'ContentModifiedTimestamp': datetime(2015, 1, 1),
'CreatorId': 'string',
'Thumbnail': {
'string': 'string'
},
'Source': {
'string': 'string'
}
},
],
'Marker': 'string'
}
:returns:
(string) --
(string) --
"""
pass
def describe_folder_contents(FolderId=None, Sort=None, Order=None, Limit=None, Marker=None, Type=None, Include=None):
"""
Describes the contents of the specified folder, including its documents and sub-folders.
By default, Amazon WorkDocs returns the first 100 active document and folder metadata items. If there are more results, the response includes a marker that you can use to request the next set of results. You can also request initialized documents.
See also: AWS API Documentation
:example: response = client.describe_folder_contents(
FolderId='string',
Sort='DATE'|'NAME',
Order='ASCENDING'|'DESCENDING',
Limit=123,
Marker='string',
Type='ALL'|'DOCUMENT'|'FOLDER',
Include='string'
)
:type FolderId: string
:param FolderId: [REQUIRED]
The ID of the folder.
:type Sort: string
:param Sort: The sorting criteria.
:type Order: string
:param Order: The order for the contents of the folder.
:type Limit: integer
:param Limit: The maximum number of items to return with this call.
:type Marker: string
:param Marker: The marker for the next set of results. (You received this marker from a previous call.)
:type Type: string
:param Type: The type of items.
:type Include: string
:param Include: The contents to include. Specify 'INITIALIZED' to include initialized documents.
:rtype: dict
:return: {
'Folders': [
{
'Id': 'string',
'Name': 'string',
'CreatorId': 'string',
'ParentFolderId': 'string',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ResourceState': 'ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED',
'Signature': 'string'
},
],
'Documents': [
{
'Id': 'string',
'CreatorId': 'string',
'ParentFolderId': 'string',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'LatestVersionMetadata': {
'Id': 'string',
'Name': 'string',
'ContentType': 'string',
'Size': 123,
'Signature': 'string',
'Status': 'INITIALIZED'|'ACTIVE',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ContentCreatedTimestamp': datetime(2015, 1, 1),
'ContentModifiedTimestamp': datetime(2015, 1, 1),
'CreatorId': 'string',
'Thumbnail': {
'string': 'string'
},
'Source': {
'string': 'string'
}
},
'ResourceState': 'ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED'
},
],
'Marker': 'string'
}
:returns:
(string) --
(string) --
"""
pass
def describe_notification_subscriptions(OrganizationId=None, Marker=None, Limit=None):
"""
Lists the specified notification subscriptions.
See also: AWS API Documentation
:example: response = client.describe_notification_subscriptions(
OrganizationId='string',
Marker='string',
Limit=123
)
:type OrganizationId: string
:param OrganizationId: [REQUIRED]
The ID of the organization.
:type Marker: string
:param Marker: The marker for the next set of results. (You received this marker from a previous call.)
:type Limit: integer
:param Limit: The maximum number of items to return with this call.
:rtype: dict
:return: {
'Subscriptions': [
{
'SubscriptionId': 'string',
'EndPoint': 'string',
'Protocol': 'HTTPS'
},
],
'Marker': 'string'
}
"""
pass
def describe_resource_permissions(ResourceId=None, Limit=None, Marker=None):
"""
Describes the permissions of a specified resource.
See also: AWS API Documentation
:example: response = client.describe_resource_permissions(
ResourceId='string',
Limit=123,
Marker='string'
)
:type ResourceId: string
:param ResourceId: [REQUIRED]
The ID of the resource.
:type Limit: integer
:param Limit: The maximum number of items to return with this call.
:type Marker: string
:param Marker: The marker for the next set of results. (You received this marker from a previous call)
:rtype: dict
:return: {
'Principals': [
{
'Id': 'string',
'Type': 'USER'|'GROUP'|'INVITE'|'ANONYMOUS'|'ORGANIZATION',
'Roles': [
{
'Role': 'VIEWER'|'CONTRIBUTOR'|'OWNER'|'COOWNER',
'Type': 'DIRECT'|'INHERITED'
},
]
},
],
'Marker': 'string'
}
"""
pass
def describe_users(OrganizationId=None, UserIds=None, Query=None, Include=None, Order=None, Sort=None, Marker=None, Limit=None, Fields=None):
"""
Describes the specified users. You can describe all users or filter the results (for example, by status or organization).
By default, Amazon WorkDocs returns the first 24 active or pending users. If there are more results, the response includes a marker that you can use to request the next set of results.
See also: AWS API Documentation
:example: response = client.describe_users(
OrganizationId='string',
UserIds='string',
Query='string',
Include='ALL'|'ACTIVE_PENDING',
Order='ASCENDING'|'DESCENDING',
Sort='USER_NAME'|'FULL_NAME'|'STORAGE_LIMIT'|'USER_STATUS'|'STORAGE_USED',
Marker='string',
Limit=123,
Fields='string'
)
:type OrganizationId: string
:param OrganizationId: The ID of the organization.
:type UserIds: string
:param UserIds: The IDs of the users.
:type Query: string
:param Query: A query to filter users by user name.
:type Include: string
:param Include: The state of the users. Specify 'ALL' to include inactive users.
:type Order: string
:param Order: The order for the results.
:type Sort: string
:param Sort: The sorting criteria.
:type Marker: string
:param Marker: The marker for the next set of results. (You received this marker from a previous call.)
:type Limit: integer
:param Limit: The maximum number of items to return.
:type Fields: string
:param Fields: A comma-separated list of values. Specify 'STORAGE_METADATA' to include the user storage quota and utilization information.
:rtype: dict
:return: {
'Users': [
{
'Id': 'string',
'Username': 'string',
'EmailAddress': 'string',
'GivenName': 'string',
'Surname': 'string',
'OrganizationId': 'string',
'RootFolderId': 'string',
'RecycleBinFolderId': 'string',
'Status': 'ACTIVE'|'INACTIVE'|'PENDING',
'Type': 'USER'|'ADMIN',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'TimeZoneId': 'string',
'Locale': 'en'|'fr'|'ko'|'de'|'es'|'ja'|'ru'|'zh_CN'|'zh_TW'|'pt_BR'|'default',
'Storage': {
'StorageUtilizedInBytes': 123,
'StorageRule': {
'StorageAllocatedInBytes': 123,
'StorageType': 'UNLIMITED'|'QUOTA'
}
}
},
],
'TotalNumberOfUsers': 123,
'Marker': 'string'
}
"""
pass
def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to
ClientMethod.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid
for. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By
default, the http method is whatever is used in the method's model.
"""
pass
def get_document(DocumentId=None):
"""
Retrieves the specified document object.
See also: AWS API Documentation
:example: response = client.get_document(
DocumentId='string'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document object.
:rtype: dict
:return: {
'Metadata': {
'Id': 'string',
'CreatorId': 'string',
'ParentFolderId': 'string',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'LatestVersionMetadata': {
'Id': 'string',
'Name': 'string',
'ContentType': 'string',
'Size': 123,
'Signature': 'string',
'Status': 'INITIALIZED'|'ACTIVE',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ContentCreatedTimestamp': datetime(2015, 1, 1),
'ContentModifiedTimestamp': datetime(2015, 1, 1),
'CreatorId': 'string',
'Thumbnail': {
'string': 'string'
},
'Source': {
'string': 'string'
}
},
'ResourceState': 'ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED'
}
}
:returns:
(string) --
(string) --
"""
pass
def get_document_path(DocumentId=None, Limit=None, Fields=None, Marker=None):
"""
Retrieves the path information (the hierarchy from the root folder) for the requested document.
By default, Amazon WorkDocs returns a maximum of 100 levels upwards from the requested document and only includes the IDs of the parent folders in the path. You can limit the maximum number of levels. You can also request the names of the parent folders.
See also: AWS API Documentation
:example: response = client.get_document_path(
DocumentId='string',
Limit=123,
Fields='string',
Marker='string'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
:type Limit: integer
:param Limit: The maximum number of levels in the hierarchy to return.
:type Fields: string
:param Fields: A comma-separated list of values. Specify 'NAME' to include the names of the parent folders.
:type Marker: string
:param Marker: This value is not supported.
:rtype: dict
:return: {
'Path': {
'Components': [
{
'Id': 'string',
'Name': 'string'
},
]
}
}
"""
pass
def get_document_version(DocumentId=None, VersionId=None, Fields=None):
"""
Retrieves version metadata for the specified document.
See also: AWS API Documentation
:example: response = client.get_document_version(
DocumentId='string',
VersionId='string',
Fields='string'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
:type VersionId: string
:param VersionId: [REQUIRED]
The version ID of the document.
:type Fields: string
:param Fields: A comma-separated list of values. Specify 'SOURCE' to include a URL for the source document.
:rtype: dict
:return: {
'Metadata': {
'Id': 'string',
'Name': 'string',
'ContentType': 'string',
'Size': 123,
'Signature': 'string',
'Status': 'INITIALIZED'|'ACTIVE',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ContentCreatedTimestamp': datetime(2015, 1, 1),
'ContentModifiedTimestamp': datetime(2015, 1, 1),
'CreatorId': 'string',
'Thumbnail': {
'string': 'string'
},
'Source': {
'string': 'string'
}
}
}
:returns:
(string) --
(string) --
"""
pass
def get_folder(FolderId=None):
"""
Retrieves the metadata of the specified folder.
See also: AWS API Documentation
:example: response = client.get_folder(
FolderId='string'
)
:type FolderId: string
:param FolderId: [REQUIRED]
The ID of the folder.
:rtype: dict
:return: {
'Metadata': {
'Id': 'string',
'Name': 'string',
'CreatorId': 'string',
'ParentFolderId': 'string',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ResourceState': 'ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED',
'Signature': 'string'
}
}
"""
pass
def get_folder_path(FolderId=None, Limit=None, Fields=None, Marker=None):
"""
Retrieves the path information (the hierarchy from the root folder) for the specified folder.
By default, Amazon WorkDocs returns a maximum of 100 levels upwards from the requested folder and only includes the IDs of the parent folders in the path. You can limit the maximum number of levels. You can also request the parent folder names.
See also: AWS API Documentation
:example: response = client.get_folder_path(
FolderId='string',
Limit=123,
Fields='string',
Marker='string'
)
:type FolderId: string
:param FolderId: [REQUIRED]
The ID of the folder.
:type Limit: integer
:param Limit: The maximum number of levels in the hierarchy to return.
:type Fields: string
:param Fields: A comma-separated list of values. Specify 'NAME' to include the names of the parent folders.
:type Marker: string
:param Marker: This value is not supported.
:rtype: dict
:return: {
'Path': {
'Components': [
{
'Id': 'string',
'Name': 'string'
},
]
}
}
"""
pass
def get_paginator(operation_name=None):
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is create_foo, and you'd normally invoke the
operation as client.create_foo(**kwargs), if the
create_foo operation can be paginated, you can use the
call client.get_paginator('create_foo').
:rtype: L{botocore.paginate.Paginator}
"""
pass
def get_waiter():
"""
"""
pass
def initiate_document_version_upload(Id=None, Name=None, ContentCreatedTimestamp=None, ContentModifiedTimestamp=None, ContentType=None, DocumentSizeInBytes=None, ParentFolderId=None):
"""
Creates a new document object and version object.
The client specifies the parent folder ID and name of the document to upload. The ID is optionally specified when creating a new version of an existing document. This is the first step to upload a document. Next, upload the document to the URL returned from the call, and then call UpdateDocumentVersion .
To cancel the document upload, call AbortDocumentVersionUpload .
See also: AWS API Documentation
:example: response = client.initiate_document_version_upload(
Id='string',
Name='string',
ContentCreatedTimestamp=datetime(2015, 1, 1),
ContentModifiedTimestamp=datetime(2015, 1, 1),
ContentType='string',
DocumentSizeInBytes=123,
ParentFolderId='string'
)
:type Id: string
:param Id: The ID of the document.
:type Name: string
:param Name: The name of the document.
:type ContentCreatedTimestamp: datetime
:param ContentCreatedTimestamp: The time stamp when the content of the document was originally created.
:type ContentModifiedTimestamp: datetime
:param ContentModifiedTimestamp: The time stamp when the content of the document was modified.
:type ContentType: string
:param ContentType: The content type of the document.
:type DocumentSizeInBytes: integer
:param DocumentSizeInBytes: The size of the document, in bytes.
:type ParentFolderId: string
:param ParentFolderId: [REQUIRED]
The ID of the parent folder.
:rtype: dict
:return: {
'Metadata': {
'Id': 'string',
'CreatorId': 'string',
'ParentFolderId': 'string',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'LatestVersionMetadata': {
'Id': 'string',
'Name': 'string',
'ContentType': 'string',
'Size': 123,
'Signature': 'string',
'Status': 'INITIALIZED'|'ACTIVE',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'ContentCreatedTimestamp': datetime(2015, 1, 1),
'ContentModifiedTimestamp': datetime(2015, 1, 1),
'CreatorId': 'string',
'Thumbnail': {
'string': 'string'
},
'Source': {
'string': 'string'
}
},
'ResourceState': 'ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED'
},
'UploadMetadata': {
'UploadUrl': 'string',
'SignedHeaders': {
'string': 'string'
}
}
}
:returns:
(string) --
(string) --
"""
pass
def remove_all_resource_permissions(ResourceId=None):
"""
Removes all the permissions from the specified resource.
See also: AWS API Documentation
:example: response = client.remove_all_resource_permissions(
ResourceId='string'
)
:type ResourceId: string
:param ResourceId: [REQUIRED]
The ID of the resource.
"""
pass
def remove_resource_permission(ResourceId=None, PrincipalId=None, PrincipalType=None):
"""
Removes the permission for the specified principal from the specified resource.
See also: AWS API Documentation
:example: response = client.remove_resource_permission(
ResourceId='string',
PrincipalId='string',
PrincipalType='USER'|'GROUP'|'INVITE'|'ANONYMOUS'|'ORGANIZATION'
)
:type ResourceId: string
:param ResourceId: [REQUIRED]
The ID of the resource.
:type PrincipalId: string
:param PrincipalId: [REQUIRED]
The principal ID of the resource.
:type PrincipalType: string
:param PrincipalType: The principal type of the resource.
"""
pass
def update_document(DocumentId=None, Name=None, ParentFolderId=None, ResourceState=None):
"""
Updates the specified attributes of the specified document. The user must have access to both the document and its parent folder, if applicable.
See also: AWS API Documentation
:example: response = client.update_document(
DocumentId='string',
Name='string',
ParentFolderId='string',
ResourceState='ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
:type Name: string
:param Name: The name of the document.
:type ParentFolderId: string
:param ParentFolderId: The ID of the parent folder.
:type ResourceState: string
:param ResourceState: The resource state of the document. Note that only ACTIVE and RECYCLED are supported.
"""
pass
def update_document_version(DocumentId=None, VersionId=None, VersionStatus=None):
"""
Changes the status of the document version to ACTIVE.
Amazon WorkDocs also sets its document container to ACTIVE. This is the last step in a document upload, after the client uploads the document to an S3-presigned URL returned by InitiateDocumentVersionUpload .
See also: AWS API Documentation
:example: response = client.update_document_version(
DocumentId='string',
VersionId='string',
VersionStatus='ACTIVE'
)
:type DocumentId: string
:param DocumentId: [REQUIRED]
The ID of the document.
:type VersionId: string
:param VersionId: [REQUIRED]
The version ID of the document.
:type VersionStatus: string
:param VersionStatus: The status of the version.
"""
pass
def update_folder(FolderId=None, Name=None, ParentFolderId=None, ResourceState=None):
"""
Updates the specified attributes of the specified folder. The user must have access to both the folder and its parent folder, if applicable.
See also: AWS API Documentation
:example: response = client.update_folder(
FolderId='string',
Name='string',
ParentFolderId='string',
ResourceState='ACTIVE'|'RESTORING'|'RECYCLING'|'RECYCLED'
)
:type FolderId: string
:param FolderId: [REQUIRED]
The ID of the folder.
:type Name: string
:param Name: The name of the folder.
:type ParentFolderId: string
:param ParentFolderId: The ID of the parent folder.
:type ResourceState: string
:param ResourceState: The resource state of the folder. Note that only ACTIVE and RECYCLED are accepted values from the API.
"""
pass
def update_user(UserId=None, GivenName=None, Surname=None, Type=None, StorageRule=None, TimeZoneId=None, Locale=None):
"""
Updates the specified attributes of the specified user, and grants or revokes administrative privileges to the Amazon WorkDocs site.
See also: AWS API Documentation
:example: response = client.update_user(
UserId='string',
GivenName='string',
Surname='string',
Type='USER'|'ADMIN',
StorageRule={
'StorageAllocatedInBytes': 123,
'StorageType': 'UNLIMITED'|'QUOTA'
},
TimeZoneId='string',
Locale='en'|'fr'|'ko'|'de'|'es'|'ja'|'ru'|'zh_CN'|'zh_TW'|'pt_BR'|'default'
)
:type UserId: string
:param UserId: [REQUIRED]
The ID of the user.
:type GivenName: string
:param GivenName: The given name of the user.
:type Surname: string
:param Surname: The surname of the user.
:type Type: string
:param Type: The type of the user.
:type StorageRule: dict
:param StorageRule: The amount of storage for the user.
StorageAllocatedInBytes (integer) --The amount of storage allocated, in bytes.
StorageType (string) --The type of storage.
:type TimeZoneId: string
:param TimeZoneId: The time zone ID of the user.
:type Locale: string
:param Locale: The locale of the user.
:rtype: dict
:return: {
'User': {
'Id': 'string',
'Username': 'string',
'EmailAddress': 'string',
'GivenName': 'string',
'Surname': 'string',
'OrganizationId': 'string',
'RootFolderId': 'string',
'RecycleBinFolderId': 'string',
'Status': 'ACTIVE'|'INACTIVE'|'PENDING',
'Type': 'USER'|'ADMIN',
'CreatedTimestamp': datetime(2015, 1, 1),
'ModifiedTimestamp': datetime(2015, 1, 1),
'TimeZoneId': 'string',
'Locale': 'en'|'fr'|'ko'|'de'|'es'|'ja'|'ru'|'zh_CN'|'zh_TW'|'pt_BR'|'default',
'Storage': {
'StorageUtilizedInBytes': 123,
'StorageRule': {
'StorageAllocatedInBytes': 123,
'StorageType': 'UNLIMITED'|'QUOTA'
}
}
}
}
"""
pass
| 29.143895
| 310
| 0.573039
| 3,993
| 40,102
| 5.72026
| 0.109692
| 0.040935
| 0.023904
| 0.025743
| 0.664069
| 0.617924
| 0.587978
| 0.574843
| 0.550983
| 0.50834
| 0
| 0.012775
| 0.328512
| 40,102
| 1,375
| 311
| 29.165091
| 0.83545
| 0.808588
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.515152
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 3
|
e4423151d9e155eac596c2c27348cae0215b843a
| 983
|
py
|
Python
|
binding/python/ddls/feeder/feeder.py
|
huzelin/ddls
|
3333a669c59ce2e525945f814a54784dafc6191b
|
[
"MIT"
] | 3
|
2019-01-03T07:34:01.000Z
|
2020-02-13T19:53:35.000Z
|
binding/python/ddls/feeder/feeder.py
|
huzelin/ddls
|
3333a669c59ce2e525945f814a54784dafc6191b
|
[
"MIT"
] | null | null | null |
binding/python/ddls/feeder/feeder.py
|
huzelin/ddls
|
3333a669c59ce2e525945f814a54784dafc6191b
|
[
"MIT"
] | 1
|
2020-05-06T11:08:07.000Z
|
2020-05-06T11:08:07.000Z
|
""" Feeder for batch production"""
from __future__ import absolute_import
import ctypes
from ddls.base import check_call, LIB, c_str, c_array
from ddls.feeder.batch_iterator import BatchIterator
class Feeder(object):
""" The feeder
"""
def __init__(self):
""" create a new Tensor instance
"""
pass
def start(self, thread_num):
""" Start feeder,
Note: now must called after schedule, for dynamic scheduleing should fix it.
"""
check_call(LIB.HPPS_FeederStart(thread_num))
def schedule(self, plan, max_queue_size = 1):
""" Schedule the plan
"""
out = ctypes.c_void_p()
check_call(LIB.HPPS_FeederSchedule(plan.handle,
max_queue_size,
ctypes.byref(out)))
return BatchIterator(out)
def stop(self):
""" Stop feeder
"""
check_call(LIB.HPPS_FeederStop())
| 27.305556
| 84
| 0.580875
| 111
| 983
| 4.900901
| 0.54955
| 0.066176
| 0.088235
| 0.088235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001508
| 0.325534
| 983
| 35
| 85
| 28.085714
| 0.819005
| 0.223805
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.235294
| false
| 0.058824
| 0.235294
| 0
| 0.588235
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 3
|
e4751dd89498b1da7109ee5f07994f5fbd04447a
| 95
|
py
|
Python
|
vulture/whitelists/logging_whitelist.py
|
kianmeng/vulture
|
b8cbc44dac89b2a96f6da7033424f52525d6f574
|
[
"MIT"
] | 2,081
|
2017-03-06T14:45:21.000Z
|
2022-03-31T13:29:34.000Z
|
vulture/whitelists/logging_whitelist.py
|
kianmeng/vulture
|
b8cbc44dac89b2a96f6da7033424f52525d6f574
|
[
"MIT"
] | 248
|
2017-03-06T12:13:37.000Z
|
2022-03-15T11:21:27.000Z
|
vulture/whitelists/logging_whitelist.py
|
kianmeng/vulture
|
b8cbc44dac89b2a96f6da7033424f52525d6f574
|
[
"MIT"
] | 111
|
2017-03-06T20:48:04.000Z
|
2022-03-17T09:49:32.000Z
|
import logging
logging.Filter.filter
logging.getLogger().propagate
logging.StreamHandler.emit
| 15.833333
| 29
| 0.852632
| 11
| 95
| 7.363636
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.063158
| 95
| 5
| 30
| 19
| 0.910112
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
e48fad25c05c483e3b144a00ff76a128d96f4a18
| 89
|
py
|
Python
|
colossalai/utils/commons/__init__.py
|
mrriteshranjan/ColossalAI
|
0d057a1bae67b915a385be7edab7da83413cb645
|
[
"Apache-2.0"
] | null | null | null |
colossalai/utils/commons/__init__.py
|
mrriteshranjan/ColossalAI
|
0d057a1bae67b915a385be7edab7da83413cb645
|
[
"Apache-2.0"
] | null | null | null |
colossalai/utils/commons/__init__.py
|
mrriteshranjan/ColossalAI
|
0d057a1bae67b915a385be7edab7da83413cb645
|
[
"Apache-2.0"
] | null | null | null |
from .bucket_tensor_copy import BucketizedTensorCopy
__all__ = ['BucketizedTensorCopy']
| 22.25
| 52
| 0.842697
| 8
| 89
| 8.625
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089888
| 89
| 3
| 53
| 29.666667
| 0.851852
| 0
| 0
| 0
| 0
| 0
| 0.224719
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
e494dbf6ede35cd65a3c40c381a319f33cf3e78d
| 2,563
|
py
|
Python
|
app/models.py
|
MilanMathew/machine_test_focaloid
|
fa179e655c531825167e97aed4e2d6affea9c736
|
[
"MIT"
] | null | null | null |
app/models.py
|
MilanMathew/machine_test_focaloid
|
fa179e655c531825167e97aed4e2d6affea9c736
|
[
"MIT"
] | null | null | null |
app/models.py
|
MilanMathew/machine_test_focaloid
|
fa179e655c531825167e97aed4e2d6affea9c736
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from app import db
class City(db.Model):
__tablename__ = 'Cities'
__table_args__ = {'sqlite_autoincrement': True}
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50), index=True)
class Venue(db.Model):
__tablename__ = 'Venues'
__table_args__ = {'sqlite_autoincrement': True}
id = db.Column(db.Integer, primary_key=True)
city_id = db.Column(db.Integer, db.ForeignKey('Cities.id'))
name = db.Column(db.String(100), index=True)
class Team(db.Model):
__tablename__ = 'Teams'
__table_args__ = {'sqlite_autoincrement': True}
id = db.Column(db.Integer, primary_key=True)
# venue_id = db.Column(db.Integer, db.ForeignKey('Venues.id'))
name = db.Column(db.String(100), index=True, unique=True)
# squad = db.relationship('Player', backref='team', lazy='dynamic')
def __repr__(self):
return '<Team {}>'.format(self.name)
class Player(db.Model):
__tablename__ = 'Players'
__table_args__ = {'sqlite_autoincrement': True}
id = db.Column(db.Integer, primary_key=True)
team_id = db.Column(db.Integer, db.ForeignKey('Teams.id'))
name = db.Column(db.String(100), index=True)
def __repr__(self):
return '<Player {}>'.format(self.name)
class Official(db.Model):
__tablename__ = 'Officials'
__table_args__ = {'sqlite_autoincrement': True}
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100), index=True)
class Match(db.Model):
__tablename__ = 'Matches'
__table_args__ = {'sqlite_autoincrement': True}
id = db.Column(db.Integer, primary_key=True)
city = db.Column(db.Integer, db.ForeignKey('Cities.id'))
venue = db.Column(db.Integer, db.ForeignKey('Venues.id'))
umpire1 = db.Column(db.Integer, db.ForeignKey('Officials.id'))
umpire2 = db.Column(db.Integer, db.ForeignKey('Officials.id'))
team1 = db.Column(db.Integer, db.ForeignKey('Teams.id'))
team2 = db.Column(db.Integer, db.ForeignKey('Teams.id'))
toss_winner = db.Column(db.Integer, db.ForeignKey('Teams.id'))
winner = db.Column(db.Integer, db.ForeignKey('Teams.id'))
player_of_match = db.Column(db.String(100), index=True)
toss_decision = db.Column(db.String(10), index=True)
result = db.Column(db.String(10), index=True)
win_by_runs = db.Column(db.Integer, index=True)
win_by_wickets = db.Column(db.Integer, index=True)
season = db.Column(db.Integer, index=True)
date = db.Column(db.DateTime)
dl_applied = db.Column(db.Integer, index=True)
| 37.144928
| 71
| 0.684354
| 356
| 2,563
| 4.685393
| 0.182584
| 0.143885
| 0.179856
| 0.214029
| 0.70024
| 0.70024
| 0.63789
| 0.585132
| 0.38729
| 0.26259
| 0
| 0.011601
| 0.159188
| 2,563
| 69
| 72
| 37.144928
| 0.762413
| 0.049161
| 0
| 0.320755
| 0
| 0
| 0.111294
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037736
| false
| 0
| 0.037736
| 0.037736
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 3
|
e4ad4d1b1a19faa8dce0b003b788008a58802470
| 10,457
|
py
|
Python
|
HW10/b06502027_hw10.py
|
Pyrojewel-zard/ML
|
d8a11d893eed3e889b9af0d6aeb3ab08cd60d997
|
[
"MIT"
] | 5
|
2021-11-26T10:05:03.000Z
|
2022-03-17T11:45:46.000Z
|
HW10/b06502027_hw10.py
|
Pyrojewel-zard/ML
|
d8a11d893eed3e889b9af0d6aeb3ab08cd60d997
|
[
"MIT"
] | null | null | null |
HW10/b06502027_hw10.py
|
Pyrojewel-zard/ML
|
d8a11d893eed3e889b9af0d6aeb3ab08cd60d997
|
[
"MIT"
] | 1
|
2022-01-09T02:17:19.000Z
|
2022-01-09T02:17:19.000Z
|
# -*- coding: utf-8 -*-
"""「hw10_adversarial_attack.ipynb」的副本
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1yPa2ushzqw8FNobfonL79PHzudn0vjrN
# **Homework 10 - Adversarial Attack**
Slides: https://reurl.cc/v5kXkk
Videos:
TA: [email protected]
## Enviroment & Download
We make use of [pytorchcv](https://pypi.org/project/pytorchcv/) to obtain CIFAR-10 pretrained model, so we need to set up the enviroment first. We also need to download the data (200 images) which we want to attack.
"""
!nvidia-smi
# set up environment
!pip install pytorchcv
# download
!gdown --id 1fHi1ko7wr80wXkXpqpqpOxuYH1mClXoX -O data.zip
# unzip
!unzip ./data.zip
!rm ./data.zip
"""## Global Settings
* $\epsilon$ is fixed to be 8. But on **Data section**, we will first apply transforms on raw pixel value (0-255 scale) **by ToTensor (to 0-1 scale)** and then **Normalize (subtract mean divide std)**. $\epsilon$ should be set to $\frac{8}{255 * std}$ during attack.
* Explaination (optional)
* Denote the first pixel of original image as $p$, and the first pixel of adversarial image as $a$.
* The $\epsilon$ constraints tell us $\left| p-a \right| <= 8$.
* ToTensor() can be seen as a function where $T(x) = x/255$.
* Normalize() can be seen as a function where $N(x) = (x-mean)/std$ where $mean$ and $std$ are constants.
* After applying ToTensor() and Normalize() on $p$ and $a$, the constraint becomes $\left| N(T(p))-N(T(a)) \right| = \left| \frac{\frac{p}{255}-mean}{std}-\frac{\frac{a}{255}-mean}{std} \right| = \frac{1}{255 * std} \left| p-a \right| <= \frac{8}{255 * std}.$
* So, we should set $\epsilon$ to be $\frac{8}{255 * std}$ after ToTensor() and Normalize().
"""
import torch
import torch.nn as nn
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
batch_size = 8
# the mean and std are the calculated statistics from cifar_10 dataset
cifar_10_mean = (0.491, 0.482, 0.447) # mean for the three channels of cifar_10 images
cifar_10_std = (0.202, 0.199, 0.201) # std for the three channels of cifar_10 images
# convert mean and std to 3-dimensional tensors for future operations
mean = torch.tensor(cifar_10_mean).to(device).view(3, 1, 1)
std = torch.tensor(cifar_10_std).to(device).view(3, 1, 1)
epsilon = 8/255/std
# TODO: iterative fgsm attack
# alpha (step size) can be decided by yourself
alpha = 0.01/255/std
root = './data' # directory for storing benign images
# benign images: images which do not contain adversarial perturbations
# adversarial images: images which include adversarial perturbations
"""## Data
Construct dataset and dataloader from root directory. Note that we store the filename of each image for future usage.
"""
import os
import glob
import shutil
import numpy as np
from PIL import Image
from torchvision.transforms import transforms
from torch.utils.data import Dataset, DataLoader
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(cifar_10_mean, cifar_10_std)
])
class AdvDataset(Dataset):
def __init__(self, data_dir, transform):
self.images = []
self.labels = []
self.names = []
'''
data_dir
├── class_dir
│ ├── class1.png
│ ├── ...
│ ├── class20.png
'''
for i, class_dir in enumerate(sorted(glob.glob(f'{data_dir}/*'))):
images = sorted(glob.glob(f'{class_dir}/*'))
self.images += images
self.labels += ([i] * len(images))
self.names += [os.path.relpath(imgs, data_dir) for imgs in images]
self.transform = transform
def __getitem__(self, idx):
image = self.transform(Image.open(self.images[idx]))
label = self.labels[idx]
return image, label
def __getname__(self):
return self.names
def __len__(self):
return len(self.images)
adv_set = AdvDataset(root, transform=transform)
adv_names = adv_set.__getname__()
adv_loader = DataLoader(adv_set, batch_size=batch_size, shuffle=False)
print(f'number of images = {adv_set.__len__()}')
"""## Utils -- Benign Images Evaluation"""
# to evaluate the performance of model on benign images
def epoch_benign(model, loader, loss_fn):
model.eval()
train_acc, train_loss = 0.0, 0.0
for x, y in loader:
x, y = x.to(device), y.to(device)
yp = model(x)
loss = loss_fn(yp, y)
train_acc += (yp.argmax(dim=1) == y).sum().item()
train_loss += loss.item() * x.shape[0]
return train_acc / len(loader.dataset), train_loss / len(loader.dataset)
"""## Utils -- Attack Algorithm"""
# perform fgsm attack
def fgsm(model, x, y, loss_fn, epsilon=epsilon):
x_adv = x.detach().clone() # initialize x_adv as original benign image x
x_adv.requires_grad = True # need to obtain gradient of x_adv, thus set required grad
loss = loss_fn(model(x_adv), y) # calculate loss
loss.backward() # calculate gradient
# fgsm: use gradient ascent on x_adv to maximize loss
x_adv = x_adv + epsilon * x_adv.grad.detach().sign()
return x_adv
# TODO: perform iterative fgsm attack
# set alpha as the step size in Global Settings section
# alpha and num_iter can be decided by yourself
def ifgsm(model, x, y, loss_fn, epsilon=epsilon, alpha=alpha, num_iter=1600):
# initialize x_adv as original benign image x
x_adv = x.detach().clone()
# write a loop of num_iter to represent the iterative times
# for each loop
for i in range(num_iter):
# call fgsm with (epsilon = alpha) to obtain new x_adv
x_adv = fgsm(model, x_adv, y, loss_fn, alpha)
# clip new x_adv back to [x-epsilon, x+epsilon]
x_adv = torch.max(torch.min(x_adv,x+epsilon), x-epsilon)
return x_adv
"""## Utils -- Attack
* Recall
* ToTensor() can be seen as a function where $T(x) = x/255$.
* Normalize() can be seen as a function where $N(x) = (x-mean)/std$ where $mean$ and $std$ are constants.
* Inverse function
* Inverse Normalize() can be seen as a function where $N^{-1}(x) = x*std+mean$ where $mean$ and $std$ are constants.
* Inverse ToTensor() can be seen as a function where $T^{-1}(x) = x*255$.
* Special Noted
* ToTensor() will also convert the image from shape (height, width, channel) to shape (channel, height, width), so we also need to transpose the shape back to original shape.
* Since our dataloader samples a batch of data, what we need here is to transpose **(batch_size, channel, height, width)** back to **(batch_size, height, width, channel)** using np.transpose.
"""
# perform adversarial attack and generate adversarial examples
def gen_adv_examples(model, loader, attack, loss_fn):
model.eval()
adv_names = []
train_acc, train_loss = 0.0, 0.0
for i, (x, y) in enumerate(loader):
x, y = x.to(device), y.to(device)
x_adv = attack(model, x, y, loss_fn) # obtain adversarial examples
yp = model(x_adv)
loss = loss_fn(yp, y)
train_acc += (yp.argmax(dim=1) == y).sum().item()
train_loss += loss.item() * x.shape[0]
# store adversarial examples
adv_ex = ((x_adv) * std + mean).clamp(0, 1) # to 0-1 scale
adv_ex = (adv_ex * 255).clamp(0, 255) # 0-255 scale
adv_ex = adv_ex.detach().cpu().data.numpy().round() # round to remove decimal part
adv_ex = adv_ex.transpose((0, 2, 3, 1)) # transpose (bs, C, H, W) back to (bs, H, W, C)
adv_examples = adv_ex if i == 0 else np.r_[adv_examples, adv_ex]
return adv_examples, train_acc / len(loader.dataset), train_loss / len(loader.dataset)
# create directory which stores adversarial examples
def create_dir(data_dir, adv_dir, adv_examples, adv_names):
if os.path.exists(adv_dir) is not True:
_ = shutil.copytree(data_dir, adv_dir)
for example, name in zip(adv_examples, adv_names):
im = Image.fromarray(example.astype(np.uint8)) # image pixel value should be unsigned int
im.save(os.path.join(adv_dir, name))
"""## Model / Loss Function
Model list is available [here](https://github.com/osmr/imgclsmob/blob/master/pytorch/pytorchcv/model_provider.py). Please select models which has _cifar10 suffix. Some of the models cannot be accessed/loaded. You can safely skip them since TA's model will not use those kinds of models.
"""
from pytorchcv.model_provider import get_model as ptcv_get_model
model = ptcv_get_model('preresnet110_cifar10', pretrained=True).to(device)
loss_fn = nn.CrossEntropyLoss()
benign_acc, benign_loss = epoch_benign(model, adv_loader, loss_fn)
print(f'benign_acc = {benign_acc:.5f}, benign_loss = {benign_loss:.5f}')
"""## FGSM"""
adv_examples, fgsm_acc, fgsm_loss = gen_adv_examples(model, adv_loader, fgsm, loss_fn)
print(f'fgsm_acc = {fgsm_acc:.5f}, fgsm_loss = {fgsm_loss:.5f}')
create_dir(root, 'fgsm', adv_examples, adv_names)
"""## I-FGSM"""
# TODO: iterative fgsm attack
adv_examples, ifgsm_acc, ifgsm_loss = gen_adv_examples(model, adv_loader, ifgsm, loss_fn)
print(f'ifgsm_acc = {ifgsm_acc:.5f}, ifgsm_loss = {ifgsm_loss:.5f}')
create_dir(root, 'ifgsm', adv_examples, adv_names)
"""## Compress the images"""
# Commented out IPython magic to ensure Python compatibility.
# %cd fgsm
# !tar zcvf ../fgsm.tgz *
# %cd ..
# %cd ifgsm
!tar zcvf ../ifgsm_preresnet110_1600.tgz *
# %cd ..
"""## Visualization"""
import matplotlib.pyplot as plt
classes = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']
plt.figure(figsize=(10, 20))
cnt = 0
for i, cls_name in enumerate(classes):
path = f'{cls_name}/{cls_name}1.png'
# benign image
cnt += 1
plt.subplot(len(classes), 4, cnt)
im = Image.open(f'./data/{path}')
logit = model(transform(im).unsqueeze(0).to(device))[0]
predict = logit.argmax(-1).item()
prob = logit.softmax(-1)[predict].item()
plt.title(f'benign: {cls_name}1.png\n{classes[predict]}: {prob:.2%}')
plt.axis('off')
plt.imshow(np.array(im))
# adversarial image
cnt += 1
plt.subplot(len(classes), 4, cnt)
im = Image.open(f'./fgsm/{path}')
logit = model(transform(im).unsqueeze(0).to(device))[0]
predict = logit.argmax(-1).item()
prob = logit.softmax(-1)[predict].item()
plt.title(f'adversarial: {cls_name}1.png\n{classes[predict]}: {prob:.2%}')
plt.axis('off')
plt.imshow(np.array(im))
plt.tight_layout()
plt.show()
| 38.025455
| 286
| 0.67505
| 1,611
| 10,457
| 4.271881
| 0.247672
| 0.012787
| 0.007847
| 0.00959
| 0.221302
| 0.19907
| 0.194711
| 0.171607
| 0.161726
| 0.137169
| 0
| 0.024723
| 0.187721
| 10,457
| 275
| 287
| 38.025455
| 0.78373
| 0.155494
| 0
| 0.223881
| 1
| 0
| 0.08896
| 0.017367
| 0
| 0
| 0
| 0.010909
| 0
| 0
| null | null | 0
| 0.08209
| null | null | 0.029851
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
e4be20e88f19e786bc1af90364e0539952a470e3
| 88
|
py
|
Python
|
django_mix_admin/__init__.py
|
longminxiang/django_mix_admin
|
375cca608b3128c7f96045628bb863e9b49a8b15
|
[
"MIT"
] | null | null | null |
django_mix_admin/__init__.py
|
longminxiang/django_mix_admin
|
375cca608b3128c7f96045628bb863e9b49a8b15
|
[
"MIT"
] | null | null | null |
django_mix_admin/__init__.py
|
longminxiang/django_mix_admin
|
375cca608b3128c7f96045628bb863e9b49a8b15
|
[
"MIT"
] | null | null | null |
__version_info__ = (0, 1, 5)
__version__ = '.'.join([str(v) for v in __version_info__])
| 29.333333
| 58
| 0.681818
| 14
| 88
| 3.285714
| 0.714286
| 0.478261
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.039474
| 0.136364
| 88
| 2
| 59
| 44
| 0.565789
| 0
| 0
| 0
| 0
| 0
| 0.011364
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
e4e19e9928517728d0a9a1ccaaa4cb714b4df75b
| 197
|
py
|
Python
|
Python/count-negative-numbers-in-a-sorted-matrix.py
|
kuanhungchen/leetcode-practice
|
b75e773ada60b685da1576ae5f2234b70bc27842
|
[
"CNRI-Python"
] | 1
|
2020-04-29T06:19:44.000Z
|
2020-04-29T06:19:44.000Z
|
Python/count-negative-numbers-in-a-sorted-matrix.py
|
kuanhungchen/leetcode-practice
|
b75e773ada60b685da1576ae5f2234b70bc27842
|
[
"CNRI-Python"
] | null | null | null |
Python/count-negative-numbers-in-a-sorted-matrix.py
|
kuanhungchen/leetcode-practice
|
b75e773ada60b685da1576ae5f2234b70bc27842
|
[
"CNRI-Python"
] | null | null | null |
class Solution:
def countNegatives(self, grid):
ans = 0
for row in grid:
for ele in row:
if ele < 0:
ans += 1
return ans
| 21.888889
| 35
| 0.426396
| 23
| 197
| 3.652174
| 0.652174
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03125
| 0.51269
| 197
| 8
| 36
| 24.625
| 0.84375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0
| 0
| 0.375
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
e4e8ee9a0f629d72298e4d9e7b1a98a2e8e24f7f
| 368
|
py
|
Python
|
server/apps/gallery/models.py
|
arthamtrust/backend
|
d1981766297b1cf2888b37af927f69fde750a23e
|
[
"MIT"
] | null | null | null |
server/apps/gallery/models.py
|
arthamtrust/backend
|
d1981766297b1cf2888b37af927f69fde750a23e
|
[
"MIT"
] | null | null | null |
server/apps/gallery/models.py
|
arthamtrust/backend
|
d1981766297b1cf2888b37af927f69fde750a23e
|
[
"MIT"
] | null | null | null |
from django.db import models
# Create your models here.
class Gallery(models.Model):
"""
Model for gallery. Cards with title and thumbnail
along with a link to google album
"""
title = models.CharField(max_length=200)
thumbnail = models.CharField(max_length=100)
url = models.URLField()
def __str__(self):
return self.title
| 21.647059
| 53
| 0.682065
| 49
| 368
| 5
| 0.693878
| 0.122449
| 0.146939
| 0.195918
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021201
| 0.230978
| 368
| 16
| 54
| 23
| 0.844523
| 0.296196
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.142857
| 0.142857
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
e4eb75089052398010f0fca752c0fa03e64af7f4
| 206
|
py
|
Python
|
trade_system/users/admin.py
|
Artsiom-Shlapakou/trade-system
|
41c7bd6779b159d1c867968f5230d4ccbc37995a
|
[
"MIT"
] | null | null | null |
trade_system/users/admin.py
|
Artsiom-Shlapakou/trade-system
|
41c7bd6779b159d1c867968f5230d4ccbc37995a
|
[
"MIT"
] | null | null | null |
trade_system/users/admin.py
|
Artsiom-Shlapakou/trade-system
|
41c7bd6779b159d1c867968f5230d4ccbc37995a
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from trade_system.users.models import Wallet
from django.contrib.auth import get_user_model
User = get_user_model()
admin.site.register(User)
admin.site.register(Wallet)
| 20.6
| 46
| 0.825243
| 32
| 206
| 5.15625
| 0.5
| 0.121212
| 0.206061
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097087
| 206
| 9
| 47
| 22.888889
| 0.887097
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
900c0665baf28282fa05f41faa6b983f942bdbf2
| 343
|
py
|
Python
|
crossasr/estimator.py
|
mhilmiasyrofi/CrossASRv2
|
202b9a7caadf5f8d6f115f776526960af35a73a3
|
[
"MIT"
] | 3
|
2021-05-12T02:48:06.000Z
|
2021-12-21T14:45:56.000Z
|
crossasr/estimator.py
|
mhilmiasyrofi/CrossASRv2
|
202b9a7caadf5f8d6f115f776526960af35a73a3
|
[
"MIT"
] | null | null | null |
crossasr/estimator.py
|
mhilmiasyrofi/CrossASRv2
|
202b9a7caadf5f8d6f115f776526960af35a73a3
|
[
"MIT"
] | 1
|
2021-06-14T11:15:35.000Z
|
2021-06-14T11:15:35.000Z
|
class Estimator:
def __init__(self, name:str):
self.name = name
def getName(self) -> str :
return self.name
def setName(self, name:str):
self.name = name
def fit(self, X:[str], y:[int]):
raise NotImplementedError()
def predict(self, X:[str]):
raise NotImplementedError()
| 21.4375
| 36
| 0.571429
| 41
| 343
| 4.682927
| 0.414634
| 0.208333
| 0.114583
| 0.15625
| 0.270833
| 0.270833
| 0.270833
| 0
| 0
| 0
| 0
| 0
| 0.303207
| 343
| 15
| 37
| 22.866667
| 0.803347
| 0
| 0
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.454545
| false
| 0
| 0
| 0.090909
| 0.636364
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 3
|
9015d694e84b3f0cc392aae6d053e1e708a338df
| 627
|
py
|
Python
|
verifai/samplers/__init__.py
|
jst-qaml/VerifAI
|
d91bc1289d720c055a36fa0e1ad9f68b986ca1a4
|
[
"BSD-3-Clause"
] | 1
|
2020-07-27T13:32:01.000Z
|
2020-07-27T13:32:01.000Z
|
verifai/samplers/__init__.py
|
shromonag/VerifAI
|
ace214d1c3282ed5ea63ee3f52457e35f54ebb62
|
[
"BSD-3-Clause"
] | null | null | null |
verifai/samplers/__init__.py
|
shromonag/VerifAI
|
ace214d1c3282ed5ea63ee3f52457e35f54ebb62
|
[
"BSD-3-Clause"
] | null | null | null |
from .domain_sampler import SamplingError, SplitSampler
from .feature_sampler import FeatureSampler, LateFeatureSampler
from .halton import HaltonSampler
from .cross_entropy import (CrossEntropySampler, ContinuousCrossEntropySampler,
DiscreteCrossEntropySampler)
from .random_sampler import RandomSampler
from .bayesian_optimization import BayesOptSampler
from .simulated_annealing import SimulatedAnnealingSampler
# only import ScenicSampler if Scenic is installed
try:
import scenic
except ModuleNotFoundError:
pass # do not attempt to import ScenicSampler
else:
from .scenic_sampler import ScenicSampler
| 36.882353
| 79
| 0.845295
| 63
| 627
| 8.301587
| 0.619048
| 0.099426
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.124402
| 627
| 16
| 80
| 39.1875
| 0.952641
| 0.138756
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.071429
| 0.642857
| 0
| 0.642857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 3
|
90171f9ff5db50b3b195ffb3fb8c3f04d8941fed
| 159
|
py
|
Python
|
sb3_training/gym-iotmarket/setup.py
|
prasoonpatidar/multiagentRL-resource-sharing
|
e63ba7fc3c7ab019e9fd109cd45b739e3322152f
|
[
"MIT"
] | null | null | null |
sb3_training/gym-iotmarket/setup.py
|
prasoonpatidar/multiagentRL-resource-sharing
|
e63ba7fc3c7ab019e9fd109cd45b739e3322152f
|
[
"MIT"
] | null | null | null |
sb3_training/gym-iotmarket/setup.py
|
prasoonpatidar/multiagentRL-resource-sharing
|
e63ba7fc3c7ab019e9fd109cd45b739e3322152f
|
[
"MIT"
] | null | null | null |
from setuptools import setup
setup(name='gym_iotmarket',
version='0.0.1',
install_requires=['gym','scipy','numpy'] # And any other dependencies
)
| 26.5
| 76
| 0.685535
| 21
| 159
| 5.095238
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022556
| 0.163522
| 159
| 6
| 77
| 26.5
| 0.781955
| 0.163522
| 0
| 0
| 0
| 0
| 0.234848
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.2
| 0
| 0.2
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
903c8cc3beaefd54c725e04177cee3cf91f69504
| 348
|
py
|
Python
|
myproject/myapp/admin.py
|
wasit7/cs459_2018
|
78243bbc939fcc2ed7528df8c14ad75e4b78d9a2
|
[
"BSD-2-Clause"
] | null | null | null |
myproject/myapp/admin.py
|
wasit7/cs459_2018
|
78243bbc939fcc2ed7528df8c14ad75e4b78d9a2
|
[
"BSD-2-Clause"
] | null | null | null |
myproject/myapp/admin.py
|
wasit7/cs459_2018
|
78243bbc939fcc2ed7528df8c14ad75e4b78d9a2
|
[
"BSD-2-Clause"
] | null | null | null |
from django.contrib import admin
from .models import Product
class ProductAdmin(admin.ModelAdmin):
fields = ('name', 'price', 'category', 'image')
list_display = ('name', 'price', 'category', 'image')
list_filter = ('category', 'price', )
list_editable = ('price', 'category', 'image', )
admin.site.register(Product, ProductAdmin)
| 34.8
| 57
| 0.678161
| 38
| 348
| 6.131579
| 0.552632
| 0.167382
| 0.23176
| 0.188841
| 0.223176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149425
| 348
| 10
| 58
| 34.8
| 0.787162
| 0
| 0
| 0
| 0
| 0
| 0.2149
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.875
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 3
|
5f468ef647d08df9b7e435bbbbaaf01ef4277cf4
| 148
|
py
|
Python
|
src/cortexpy/test/constants.py
|
karljohanw/cortexpy
|
70dcce771136f98edb5250ad8abd2a46bda7f0a6
|
[
"Apache-2.0"
] | 2
|
2020-04-08T15:31:12.000Z
|
2020-07-01T11:04:47.000Z
|
src/cortexpy/test/constants.py
|
karljohanw/cortexpy
|
70dcce771136f98edb5250ad8abd2a46bda7f0a6
|
[
"Apache-2.0"
] | 9
|
2018-09-12T09:29:43.000Z
|
2020-03-15T09:11:25.000Z
|
src/cortexpy/test/constants.py
|
karljohanw/cortexpy
|
70dcce771136f98edb5250ad8abd2a46bda7f0a6
|
[
"Apache-2.0"
] | 1
|
2019-03-29T10:59:13.000Z
|
2019-03-29T10:59:13.000Z
|
import struct
MAX_UINT = 2 ** (struct.calcsize('I') * 8) - 1
MAX_ULONG = 2 ** (struct.calcsize('L') * 8) - 1
UINT8_T = 1
UINT32_T = 4
UINT64_T = 8
| 18.5
| 47
| 0.614865
| 27
| 148
| 3.185185
| 0.592593
| 0.162791
| 0.348837
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118644
| 0.202703
| 148
| 7
| 48
| 21.142857
| 0.610169
| 0
| 0
| 0
| 0
| 0
| 0.013514
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
5f4ba7ea00a9b4ae2bec68e16163449e185187d1
| 2,612
|
py
|
Python
|
simulation/battery/base_battery.py
|
BillMakwae/Simulation
|
8d0ec274643f23bc0e78c96e50508b60791c11d2
|
[
"MIT"
] | 8
|
2020-03-29T01:44:16.000Z
|
2022-03-26T23:15:34.000Z
|
simulation/battery/base_battery.py
|
BillMakwae/Simulation
|
8d0ec274643f23bc0e78c96e50508b60791c11d2
|
[
"MIT"
] | 60
|
2020-02-08T22:07:16.000Z
|
2022-03-26T23:51:55.000Z
|
simulation/battery/base_battery.py
|
BillMakwae/Simulation
|
8d0ec274643f23bc0e78c96e50508b60791c11d2
|
[
"MIT"
] | 1
|
2021-10-20T20:07:06.000Z
|
2021-10-20T20:07:06.000Z
|
from simulation.common import Storage
from simulation.common import BatteryEmptyError
class BaseBattery(Storage):
def __init__(self, initial_energy, max_current_capacity, max_energy_capacity,
max_voltage, min_voltage, voltage, state_of_charge):
super().__init__()
# Constants
self.max_current_capacity = max_current_capacity # max capacity of battery (Ah)
self.max_energy_capacity = max_energy_capacity # max energy inside battery (Wh)
self.max_voltage = max_voltage # maximum battery voltage (V)
self.min_voltage = min_voltage # battery cut-off voltage (V)
# Variables
self.stored_energy = initial_energy # energy inside battery (Wh)
self.state_of_charge = state_of_charge # battery state of charge
self.voltage = voltage # terminal voltage of the battery (V)
if self.state_of_charge > 0:
self.empty = False # 1 if battery is empty, 0 if battery is not empty
else:
self.empty = True
def update(self, tick):
raise NotImplementedError
def charge(self, energy):
# handles the possibility that adding energy exceeds the max capacity of the battery
if self.stored_energy + energy >= self.max_energy_capacity:
self.stored_energy = self.max_energy_capacity
else:
self.stored_energy += energy
def discharge(self, energy):
# in the case that the required energy is more than what the battery currently stores
if self.stored_energy - energy <= 0:
# currently the remaining energy in the battery just evaporates but this should be changed in the future
self.stored_energy = 0
self.empty = True
# TODO: consider removing exception
raise BatteryEmptyError("ERROR: Battery is empty.\n")
else:
self.stored_energy -= energy
return energy
def is_empty(self):
return self.empty
def get_stored_energy(self):
return self.stored_energy
def get_state_of_charge(self):
return self.state_of_charge
def get_output_voltage(self):
return self.voltage
def __str__(self):
return (f"Battery stored energy: {self.stored_energy:.2f}Wh\n"
f"Battery state of charge: {self.state_of_charge * 100:.1f}%\n"
f"Battery voltage: {self.voltage:.2f}V\n")
| 39.575758
| 116
| 0.61562
| 312
| 2,612
| 4.945513
| 0.272436
| 0.085548
| 0.075826
| 0.04407
| 0.201555
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006201
| 0.320827
| 2,612
| 65
| 117
| 40.184615
| 0.863585
| 0.220138
| 0
| 0.113636
| 0
| 0
| 0.086548
| 0.034619
| 0
| 0
| 0
| 0.015385
| 0
| 1
| 0.204545
| false
| 0
| 0.045455
| 0.113636
| 0.409091
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 3
|
5f63c4934790515bb6fc74d4d7ecc9a70d977a36
| 646
|
py
|
Python
|
tests/test_get_image.py
|
kortizceballos/codeastro-group6
|
9f0ceb8a0fca3e619dbabe97105a3f283e59fa04
|
[
"BSD-3-Clause"
] | 1
|
2021-06-25T21:20:42.000Z
|
2021-06-25T21:20:42.000Z
|
tests/test_get_image.py
|
kortizceballos/codeastro-group6
|
9f0ceb8a0fca3e619dbabe97105a3f283e59fa04
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_get_image.py
|
kortizceballos/codeastro-group6
|
9f0ceb8a0fca3e619dbabe97105a3f283e59fa04
|
[
"BSD-3-Clause"
] | null | null | null |
from matplotlib.pyplot import get
import pyhips
from pyhips import get_image
def test_get_image():
"""
Tests the get_image() function to make sure no errors are thrown.
"""
assert get_image("Vega", frame="ICRS", survey="DSS", cmap="plasma") == 0
assert get_image("notanid", frame="ICRS", survey="DSS", cmap="plasma") == 1
assert get_image("Vega", frame="notaframe", survey="DSS", cmap="plasma") == 1
assert get_image("Vega", frame="ICRS", survey="notasurvey", cmap="plasma") == 1
assert get_image("Vega", frame="ICRS", survey="DSS", cmap="notacolormap") == 1
if __name__ == "__main__":
test_get_image()
| 35.888889
| 83
| 0.662539
| 89
| 646
| 4.595506
| 0.404494
| 0.176039
| 0.171149
| 0.176039
| 0.493888
| 0.493888
| 0.457213
| 0.457213
| 0.457213
| 0.342298
| 0
| 0.009294
| 0.167183
| 646
| 18
| 84
| 35.888889
| 0.750929
| 0.100619
| 0
| 0
| 0
| 0
| 0.201413
| 0
| 0
| 0
| 0
| 0
| 0.454545
| 1
| 0.090909
| true
| 0
| 0.272727
| 0
| 0.363636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
5f979d09341797e001c31791e45f05729f30d0c6
| 933
|
py
|
Python
|
symopt/objective.py
|
spcornelius/symopt
|
6f276ca07cc266af1cd58758a0cf413ab85f2591
|
[
"MIT"
] | null | null | null |
symopt/objective.py
|
spcornelius/symopt
|
6f276ca07cc266af1cd58758a0cf413ab85f2591
|
[
"MIT"
] | null | null | null |
symopt/objective.py
|
spcornelius/symopt
|
6f276ca07cc266af1cd58758a0cf413ab85f2591
|
[
"MIT"
] | null | null | null |
from symopt.base import SymOptExpr
import sympy as sym
class ObjectiveFunction(SymOptExpr):
""" Symbolic (non)linear optimization objective function. """
def __init__(self, obj, prob, **kwargs):
""" Symbolic (non)linear optimization objective function.
Parameters
----------
obj : `~sympy.core.expr.Expr`
Symbolic expression representing the objective function,
in terms of :py:attr:`prob.vars` and :py:attr:`prob.params`.
prob : `.OptimizationProblem`
The containing optimization problem.
**kwargs
Keyword args to pass to `.SymOptBase`.
"""
self.obj = sym.sympify(obj)
super().__init__(prob, **kwargs)
@property
def expr(self):
return self.obj
@property
def sympified(self):
return self.obj
def __repr__(self):
return f"ObjectiveFunction('{self.obj}')"
| 27.441176
| 72
| 0.608789
| 98
| 933
| 5.673469
| 0.520408
| 0.06295
| 0.061151
| 0.104317
| 0.165468
| 0.165468
| 0
| 0
| 0
| 0
| 0
| 0
| 0.276527
| 933
| 33
| 73
| 28.272727
| 0.823704
| 0.439443
| 0
| 0.285714
| 0
| 0
| 0.072261
| 0.072261
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0.214286
| 0.714286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
5f9b8fe1beadc23d6a4c015ccb7948ee8af7a618
| 322
|
py
|
Python
|
test/test_coverage.py
|
atupilojon/-resources--pytest
|
eae62b54828bb82dc534b37d9b46b83cb6d31c03
|
[
"MIT"
] | null | null | null |
test/test_coverage.py
|
atupilojon/-resources--pytest
|
eae62b54828bb82dc534b37d9b46b83cb6d31c03
|
[
"MIT"
] | null | null | null |
test/test_coverage.py
|
atupilojon/-resources--pytest
|
eae62b54828bb82dc534b37d9b46b83cb6d31c03
|
[
"MIT"
] | null | null | null |
from pytest import mark
# if setup.py present, code could be installed as library
# so that there's no need include path
# pip install -e .
from pytest_resources import do_lower_case
# from src.for_testing import do_lower_case
@mark.coverage
def check_lower_case():
assert do_lower_case('SomeThing') == 'something'
| 24.769231
| 57
| 0.773292
| 52
| 322
| 4.596154
| 0.711538
| 0.150628
| 0.138075
| 0.142259
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15528
| 322
| 12
| 58
| 26.833333
| 0.878676
| 0.468944
| 0
| 0
| 0
| 0
| 0.108434
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| true
| 0
| 0.4
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 3
|
5f9c87648a4e17596d684c15485c9c92d81abb57
| 304
|
py
|
Python
|
pyexlatex/models/format/hline.py
|
whoopnip/py-ex-latex
|
66f5fadc35a0bfdce5f1ccb3c80dce8885b061b6
|
[
"MIT"
] | 4
|
2020-06-08T07:17:12.000Z
|
2021-11-04T21:39:52.000Z
|
pyexlatex/models/format/hline.py
|
nickderobertis/py-ex-latex
|
66f5fadc35a0bfdce5f1ccb3c80dce8885b061b6
|
[
"MIT"
] | 24
|
2020-02-17T17:20:44.000Z
|
2021-12-20T00:10:19.000Z
|
pyexlatex/models/format/hline.py
|
nickderobertis/py-ex-latex
|
66f5fadc35a0bfdce5f1ccb3c80dce8885b061b6
|
[
"MIT"
] | null | null | null |
from pyexlatex.models.sizes.textwidth import TextWidth
from pyexlatex.models.format.rule import Rule
class HLine(Rule):
"""
Draws a horizontal line across the text width.
"""
def __init__(self, thickness: float = 0.4):
super().__init__(length=TextWidth(), thickness=thickness)
| 25.333333
| 65
| 0.710526
| 38
| 304
| 5.473684
| 0.710526
| 0.125
| 0.182692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008032
| 0.180921
| 304
| 11
| 66
| 27.636364
| 0.827309
| 0.151316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 3
|
5f9ec6c74b57542c9787a229e40967ba3e06098c
| 56
|
py
|
Python
|
NumpyUtility/__init__.py
|
PaulKGrimes/NumpyUtility
|
35607725d07952deca10d7342043db7e77756278
|
[
"MIT"
] | null | null | null |
NumpyUtility/__init__.py
|
PaulKGrimes/NumpyUtility
|
35607725d07952deca10d7342043db7e77756278
|
[
"MIT"
] | null | null | null |
NumpyUtility/__init__.py
|
PaulKGrimes/NumpyUtility
|
35607725d07952deca10d7342043db7e77756278
|
[
"MIT"
] | null | null | null |
__all__ = ["NumpyUtility"]
from .NumpyUtility import *
| 14
| 27
| 0.732143
| 5
| 56
| 7.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 56
| 3
| 28
| 18.666667
| 0.770833
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
5f9f9ecefb3439db4ca570e4a61b0846cf1331d6
| 188
|
py
|
Python
|
09-Data-Analysis/Sweetviz/ReprotViz.py
|
NguyenQuangBinh803/Python-Heritage
|
7da72b2926cefc4903086a1cab7de3a64764d648
|
[
"MIT"
] | 1
|
2021-01-10T12:06:26.000Z
|
2021-01-10T12:06:26.000Z
|
09-Data-Analysis/Sweetviz/ReprotViz.py
|
NguyenQuangBinh803/Python-Heritage
|
7da72b2926cefc4903086a1cab7de3a64764d648
|
[
"MIT"
] | null | null | null |
09-Data-Analysis/Sweetviz/ReprotViz.py
|
NguyenQuangBinh803/Python-Heritage
|
7da72b2926cefc4903086a1cab7de3a64764d648
|
[
"MIT"
] | null | null | null |
import sweetviz
import pandas as pd
if __name__ == '__main__':
df = pd.read_csv("BankChurners_clean.csv")
report = sweetviz.analyze(df, "Attrition_Flag")
report.show_html()
| 20.888889
| 51
| 0.707447
| 25
| 188
| 4.84
| 0.76
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175532
| 188
| 8
| 52
| 23.5
| 0.780645
| 0
| 0
| 0
| 0
| 0
| 0.234043
| 0.117021
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
5fa29ec1b9e32e73683aab09293ca2018836774b
| 397
|
py
|
Python
|
firldBuzzUserEntryApp/login/loginForm.py
|
sir-rasel/backend-api-integration
|
41e3d44caa6ec10382efbb482cb9d0f77bd4a5fb
|
[
"MIT"
] | 2
|
2020-12-11T12:45:34.000Z
|
2021-11-09T11:25:23.000Z
|
firldBuzzUserEntryApp/login/loginForm.py
|
sir-rasel/backend-api-integration
|
41e3d44caa6ec10382efbb482cb9d0f77bd4a5fb
|
[
"MIT"
] | null | null | null |
firldBuzzUserEntryApp/login/loginForm.py
|
sir-rasel/backend-api-integration
|
41e3d44caa6ec10382efbb482cb9d0f77bd4a5fb
|
[
"MIT"
] | null | null | null |
from django import forms
class LoginForm(forms.Form):
userName = forms.EmailField(label='User Name', max_length=55, required=True, \
widget=forms.EmailInput(attrs={'placeholder': 'Username that sends via mail'}))
password = forms.CharField(label='Password', max_length=55, required=True, \
widget=forms.PasswordInput(attrs={'placeholder': 'Password that send via mail'}))
| 49.625
| 89
| 0.722922
| 49
| 397
| 5.816327
| 0.591837
| 0.063158
| 0.077193
| 0.133333
| 0.238596
| 0.238596
| 0.238596
| 0
| 0
| 0
| 0
| 0.01173
| 0.141058
| 397
| 7
| 90
| 56.714286
| 0.824047
| 0
| 0
| 0
| 0
| 0
| 0.236776
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.333333
| 0.166667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 3
|
5fafc8dcb4215c91fc9ae3f825e9c6da430bff4a
| 326
|
py
|
Python
|
software/glasgow/applet/video/__init__.py
|
electroniceel/Glasgow
|
f6d8fda1d5baec006a6c43fa3d2547a33bdee666
|
[
"Apache-2.0",
"0BSD"
] | 1,014
|
2019-10-05T16:21:43.000Z
|
2022-03-31T09:26:43.000Z
|
software/glasgow/applet/video/__init__.py
|
attie/glasgow
|
eca2cb278478d9cb9a102e6e99dfc5bd2d77a549
|
[
"Apache-2.0",
"0BSD"
] | 113
|
2019-10-06T07:49:37.000Z
|
2022-03-24T04:33:08.000Z
|
software/glasgow/applet/video/__init__.py
|
attie/glasgow
|
eca2cb278478d9cb9a102e6e99dfc5bd2d77a549
|
[
"Apache-2.0",
"0BSD"
] | 79
|
2019-10-08T07:36:03.000Z
|
2022-03-21T07:00:27.000Z
|
"""
The ``video`` taxon groups applets implementing video interfaces, that is, interfaces for periodic
transfers of 2d arrays of samples of electromagnetic wave properties.
Examples: VGA output, TFT LCD capture, TFT LCD output.
Counterexamples: SCSI scanner (use taxon ``photo``), SPI LCD output (use taxon ``display``).
"""
| 40.75
| 98
| 0.757669
| 44
| 326
| 5.613636
| 0.727273
| 0.048583
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003584
| 0.144172
| 326
| 7
| 99
| 46.571429
| 0.88172
| 0.972393
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
397163cbc30071660c1df03a91c22f9cdffa46d3
| 496
|
py
|
Python
|
helpdesk/simple/views.py
|
fratoj/helpdesk
|
302c41491f26432bd65e468f015cdb123a47bcad
|
[
"MIT"
] | null | null | null |
helpdesk/simple/views.py
|
fratoj/helpdesk
|
302c41491f26432bd65e468f015cdb123a47bcad
|
[
"MIT"
] | 4
|
2021-04-08T21:51:21.000Z
|
2021-06-10T20:21:24.000Z
|
helpdesk/simple/views.py
|
fratoj/helpdesk
|
302c41491f26432bd65e468f015cdb123a47bcad
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
import numpy as np
def index(request):
return render(request, 'simple/index.html')
def room(request, room_name):
safe = np.random.normal(size=20, loc=0, scale=1)
return render(request, 'simple/room.html', {
'room_name': room_name,
'some_thing': {
'yolo': 'fish',
'test': [1,2,3],
},
'stay': safe.tolist()
})
def question(request):
return render(request, 'simple/question.html')
| 21.565217
| 52
| 0.59879
| 63
| 496
| 4.650794
| 0.555556
| 0.122867
| 0.194539
| 0.255973
| 0.21843
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018817
| 0.25
| 496
| 22
| 53
| 22.545455
| 0.768817
| 0
| 0
| 0
| 0
| 0
| 0.177419
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1875
| false
| 0
| 0.125
| 0.125
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 3
|
39846d963efc3c25f62f763940ae6d00481112ea
| 237
|
py
|
Python
|
coffeebar/admin.py
|
viktor-yakubiv/django-coffee
|
0a7d62a53db6af48fdc852fbb4dae43a0fc2b2ef
|
[
"MIT"
] | null | null | null |
coffeebar/admin.py
|
viktor-yakubiv/django-coffee
|
0a7d62a53db6af48fdc852fbb4dae43a0fc2b2ef
|
[
"MIT"
] | null | null | null |
coffeebar/admin.py
|
viktor-yakubiv/django-coffee
|
0a7d62a53db6af48fdc852fbb4dae43a0fc2b2ef
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Account, Product, Drink, Topping, Order
admin.site.register(Account)
admin.site.register(Product)
admin.site.register(Drink)
admin.site.register(Topping)
admin.site.register(Order)
| 21.545455
| 59
| 0.805907
| 33
| 237
| 5.787879
| 0.393939
| 0.235602
| 0.445026
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084388
| 237
| 10
| 60
| 23.7
| 0.880184
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.285714
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
39972511fba92d415fe55b1c71b33e08a7f6d99e
| 6,079
|
py
|
Python
|
pythorn/data_structures/queue.py
|
Gourav-KP/pythorn
|
f7130721c02292af0e23bd8bcf31d41990c0d48b
|
[
"MIT"
] | 5
|
2020-11-23T14:10:28.000Z
|
2021-05-07T16:25:38.000Z
|
pythorn/data_structures/queue.py
|
Gourav-KP/pythorn
|
f7130721c02292af0e23bd8bcf31d41990c0d48b
|
[
"MIT"
] | null | null | null |
pythorn/data_structures/queue.py
|
Gourav-KP/pythorn
|
f7130721c02292af0e23bd8bcf31d41990c0d48b
|
[
"MIT"
] | 3
|
2020-11-25T11:00:14.000Z
|
2021-10-01T12:16:30.000Z
|
"""
Author : Robin Singh
Programs List:
1.Queue
2.Circular Queue
3.Double Ended Queue
"""
import inspect
class Queue(object):
def __init__(self, length=5):
"""
:param length: pass queue length while making object otherwise default value will be 5
"""
self.items = []
self.size = 0
self.front = 0
self.rear = 0
self.limit = length
def isEmpty(self):
"""
checks the queue if its empty or not
"""
if self.items == []:
return True
else:
return False
def enqueue(self, data):
"""
inserts an element into the queue
"""
if self.size >= self.limit:
return -1
else:
self.items.append(data)
if self.front == None:
self.front = self.rear = 0
else:
self.rear = self.size
self.size = self.size+1
def dequeue(self):
"""
removes an element from the queue
"""
if self.isEmpty():
return -1
else:
self.size = self.size-1
if self.size == 0:
self.front = self.rear = 0
else:
self.rear = self.size - 1
return self.items.pop(0)
def Size(self):
"""
returns size of the queue
"""
return self.size
def display(self):
"""
displays full queue
"""
if self.items == []:
return -1
else:
print(self.items)
@staticmethod
def get_code():
"""
:return: source code
"""
return inspect.getsource(Queue)
@staticmethod
def time_complexity():
"""
:return: time complexity
"""
return " Time Complexity of enqueue: O(1) "\
" Time Complexity of dequeue: O(n)"\
" Optimizations : We can implement both enqueu and dequeue operations in O(1) time. To achive this, we can either use linked list implementaion of queue or circular implementation of queue"
class CircularQueue(object):
"""
:param length: pass queue length while making object otherwise default value will be 5
"""
def __init__(self, length=5):
"""
"""
self.items = []
self.rear = 0
self.front = 0
self.length = length
def isEmpty(self):
"""
Checks whether queue is empty or not
"""
if self.items == []:
return True
else:
return False
def isQueuefull(self):
"""
checks whether queue is full or not
"""
if len(self.items) == self.length:
return True
else:
return False
def enqueue(self, data):
"""
inserts an element into the queue
"""
if (self.isQueuefull()):
# Queue is full then return print
return print("queue is full")
elif self.isEmpty():
self.front = self.rear = 0
self.items.append(data)
else:
self.rear += 1
self.items.append(data)
def dequeue(self):
"""
removes an element from the queue
"""
if self.isEmpty():
return -1
else:
self.front += 1
return self.items.pop(0)
def display(self):
"""
displays full queue
"""
if self.items == []:
return True
else:
print(self.items)
@staticmethod
def get_code():
"""
:return: source code
"""
return inspect.getsource(CircularQueue)
@staticmethod
def time_complexity():
"""
:return: time complexity
"""
return " Time Complexity of enqueue: O(1)"\
" Time Complexity of dequeue: O(1)"
class Deque(object):
"""
:param length: pass queue length while making object otherwise default value will be 5
"""
def __init__(self, length=5):
"""
:param length: pass queue length while making object otherwise default value will be 5
"""
self.items = []
self.length = length
def isFull(self):
"""
checks whether queue is full or not
"""
if len(self.items) == self.length:
return True
else:
return False
def isEmpty(self):
"""
Checks whether queue is empty or not
"""
if self.items == []:
return True
else:
return False
def enqueue_start(self, element):
"""
inserts an element at the start of the queue
"""
if (self.isFull()):
return print("queue is full")
else:
self.items.insert(0, element)
def enqueue_end(self, ele):
"""
inserts an element at the end of the queue
"""
if (self.isFull()):
return print("queue is full")
else:
self.items.append(ele)
def dequeue_start(self):
"""
deletes an element from the start of the queue
"""
if (self.isEmpty()):
return print("empty queue..!")
else:
return self.items.pop(0)
def dequeue_end(self):
"""
deletes an element from the end of the queue
"""
if (self.isEmpty()):
return print("empty queue")
else:
return self.items.pop()
def display(self):
"""
displays full queue
"""
if self.items == []:
return True
else:
print(self.items)
@staticmethod
def get_code():
"""
:return: source code
"""
return inspect.getsource(Deque)
@staticmethod
def time_complexity():
"""
:return: time complexity
"""
return " Time Complexity of all the above operations is constant : O(1)"
| 22.853383
| 204
| 0.497779
| 655
| 6,079
| 4.58626
| 0.167939
| 0.068908
| 0.04028
| 0.037284
| 0.763648
| 0.703395
| 0.686085
| 0.66245
| 0.66245
| 0.66245
| 0
| 0.01019
| 0.402698
| 6,079
| 265
| 205
| 22.939623
| 0.81713
| 0.196907
| 0
| 0.727941
| 0
| 0.007353
| 0.10493
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.191176
| false
| 0
| 0.007353
| 0
| 0.455882
| 0.058824
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
39a05a3ae20bd7b9b573cc3402d91e45b4b3aa9a
| 594
|
py
|
Python
|
samples/module_snapcheck.py
|
luislezcair/jsnapy
|
86381aa389cf19394a6165fe34bcfd95ee8a7f67
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 101
|
2016-07-04T13:18:48.000Z
|
2022-02-11T19:18:15.000Z
|
samples/module_snapcheck.py
|
luislezcair/jsnapy
|
86381aa389cf19394a6165fe34bcfd95ee8a7f67
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 187
|
2016-07-06T14:58:03.000Z
|
2022-03-15T09:19:11.000Z
|
samples/module_snapcheck.py
|
luislezcair/jsnapy
|
86381aa389cf19394a6165fe34bcfd95ee8a7f67
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 70
|
2016-07-12T15:20:58.000Z
|
2022-03-25T05:14:40.000Z
|
### performing function similar to --snapcheck option in command line ######
from jnpr.jsnapy import SnapAdmin
from pprint import pprint
from jnpr.junos import Device
js = SnapAdmin()
config_file = "/etc/jsnapy/testfiles/config_single_snapcheck.yml"
snapvalue = js.snapcheck(config_file, "snap")
for snapcheck in snapvalue:
print "\n -----------snapcheck----------"
print "Tested on", snapcheck.device
print "Final result: ", snapcheck.result
print "Total passed: ", snapcheck.no_passed
print "Total failed:", snapcheck.no_failed
pprint(dict(snapcheck.test_details))
| 33
| 76
| 0.720539
| 74
| 594
| 5.689189
| 0.540541
| 0.038005
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144781
| 594
| 17
| 77
| 34.941176
| 0.82874
| 0.109428
| 0
| 0
| 0
| 0
| 0.262548
| 0.15251
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.076923
| 0.230769
| null | null | 0.538462
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
|
0
| 3
|
39cd092c9896194e7d5884416a86b0b247f8dee4
| 486
|
py
|
Python
|
markflow/detectors/__init__.py
|
jmholla/markflow
|
1accc4a23f9c06d9ab77d6c180c586da3d9ec69b
|
[
"Apache-2.0"
] | 14
|
2020-08-14T03:09:53.000Z
|
2022-03-22T22:46:50.000Z
|
markflow/detectors/__init__.py
|
jmholla/markflow
|
1accc4a23f9c06d9ab77d6c180c586da3d9ec69b
|
[
"Apache-2.0"
] | 6
|
2020-08-19T18:13:24.000Z
|
2021-02-11T03:56:34.000Z
|
markflow/detectors/__init__.py
|
jmholla/markflow
|
1accc4a23f9c06d9ab77d6c180c586da3d9ec69b
|
[
"Apache-2.0"
] | 3
|
2020-08-13T16:40:13.000Z
|
2022-01-18T12:31:37.000Z
|
# flake8: noqa
"""
MarkFlow MarkDown Section Detection Library
This library provide this functions MarkFlow uses to split a document into it's
individual text types.
"""
from .atx_heading import *
from .blank_line import *
from .block_quote import *
from .fenced_code_block import *
from .indented_code_block import *
from .link_reference_definition import *
from .list import *
from .paragraph import *
from .setext_heading import *
from .table import *
from .thematic_break import *
| 25.578947
| 79
| 0.788066
| 68
| 486
| 5.470588
| 0.602941
| 0.268817
| 0.091398
| 0.102151
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00241
| 0.146091
| 486
| 18
| 80
| 27
| 0.893976
| 0.331276
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
f2d4d9817772d3d480a3be486cdd4fa4ac3b04f2
| 672
|
py
|
Python
|
src/OTLMOW/OTLModel/Classes/Infiltratievoorziening.py
|
davidvlaminck/OTLClassPython
|
71330afeb37c3ea6d9981f521ff8f4a3f8b946fc
|
[
"MIT"
] | 2
|
2022-02-01T08:58:11.000Z
|
2022-02-08T13:35:17.000Z
|
src/OTLMOW/OTLModel/Classes/Infiltratievoorziening.py
|
davidvlaminck/OTLMOW
|
71330afeb37c3ea6d9981f521ff8f4a3f8b946fc
|
[
"MIT"
] | null | null | null |
src/OTLMOW/OTLModel/Classes/Infiltratievoorziening.py
|
davidvlaminck/OTLMOW
|
71330afeb37c3ea6d9981f521ff8f4a3f8b946fc
|
[
"MIT"
] | null | null | null |
# coding=utf-8
from OTLMOW.OTLModel.Classes.Put import Put
from OTLMOW.OTLModel.Classes.PutRelatie import PutRelatie
from OTLMOW.GeometrieArtefact.VlakGeometrie import VlakGeometrie
# Generated with OTLClassCreator. To modify: extend, do not edit
class Infiltratievoorziening(Put, PutRelatie, VlakGeometrie):
"""Voorziening voor infiltratie van onvervuild water."""
typeURI = 'https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#Infiltratievoorziening'
"""De URI van het object volgens https://www.w3.org/2001/XMLSchema#anyURI."""
def __init__(self):
Put.__init__(self)
PutRelatie.__init__(self)
VlakGeometrie.__init__(self)
| 37.333333
| 93
| 0.763393
| 77
| 672
| 6.454545
| 0.662338
| 0.064386
| 0.072435
| 0.100604
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010363
| 0.138393
| 672
| 17
| 94
| 39.529412
| 0.848014
| 0.188988
| 0
| 0
| 1
| 0
| 0.167028
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 3
|
f2da20f8cd9ede45ff2e1e9791b316945d38036c
| 418
|
py
|
Python
|
openwater/utils/decorator.py
|
jeradM/openwater
|
740b7e76622a1ee909b970d9e5c612a840466cec
|
[
"MIT"
] | null | null | null |
openwater/utils/decorator.py
|
jeradM/openwater
|
740b7e76622a1ee909b970d9e5c612a840466cec
|
[
"MIT"
] | null | null | null |
openwater/utils/decorator.py
|
jeradM/openwater
|
740b7e76622a1ee909b970d9e5c612a840466cec
|
[
"MIT"
] | null | null | null |
from typing import Callable
def blocking(func: Callable):
setattr(func, "_ow_blocking", True)
return func
def is_blocking(func: Callable):
return getattr(func, "_ow_blocking", False) is True
def nonblocking(func: Callable) -> Callable:
setattr(func, "_ow_nonblocking", True)
return func
def is_nonblocking(func: Callable) -> bool:
return getattr(func, "_ow_nonblocking", False) is True
| 20.9
| 58
| 0.717703
| 54
| 418
| 5.37037
| 0.296296
| 0.165517
| 0.137931
| 0.144828
| 0.131034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177033
| 418
| 19
| 59
| 22
| 0.843023
| 0
| 0
| 0.181818
| 0
| 0
| 0.129187
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.363636
| false
| 0
| 0.090909
| 0.181818
| 0.818182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
f2e1fc7cc5cf4031b844d0facd03421c1cb64cd2
| 15,633
|
py
|
Python
|
ProyectoFinal.py
|
T0N1R/Recommendation-System-python-neo4J
|
09dd1bbefa7e436a1aeedf9ccc9160719ec3a353
|
[
"MIT"
] | null | null | null |
ProyectoFinal.py
|
T0N1R/Recommendation-System-python-neo4J
|
09dd1bbefa7e436a1aeedf9ccc9160719ec3a353
|
[
"MIT"
] | null | null | null |
ProyectoFinal.py
|
T0N1R/Recommendation-System-python-neo4J
|
09dd1bbefa7e436a1aeedf9ccc9160719ec3a353
|
[
"MIT"
] | null | null | null |
# -*- coding: cp1252 -*-
# -*- coding: utf-8 -*-
"""
Algoritmos y Estructuras de Datos
Proyecto Final
Antonio Reyes #17273
Esteban Cabrera #17781
Miguel #17102
"""
import random
import xlrd
file_location = "C:/Users/Antonio/Desktop/Recommendation-System-python-neo4J-master/Database.xlsx"
workbook = xlrd.open_workbook(file_location)
sheet = workbook.sheet_by_index(0)
from neo4jrestclient.client import GraphDatabase
db = GraphDatabase("http://localhost:7474",username="neo4j", password="1111")
dataB = db.labels.create("Database")
gen = db.labels.create("Genero")
#se crea un diccionario (como vimos en hashmaps)
database = {}
#donde se guardan los generos de las series que ya se vieron
historial = []
#en el for se puede poner sheet.nrows para imprimir todo
def add_Excel():
lista_gen = []
for x in range(sheet.nrows):
name = sheet.cell_value(x,0)
gen1 = sheet.cell_value(x,1)
gen2 = sheet.cell_value(x,2)
gen3 = sheet.cell_value(x,3)
lista_gen = []
lista_gen.append(gen1)
lista_gen.append(gen2)
lista_gen.append(gen3)
lista_gen.sort()
gen1 = lista_gen[0]
gen2 = lista_gen[1]
gen3 = lista_gen[2]
generos = []
generos.append(gen1)
generos.append(gen2)
generos.append(gen3)
database[name] = generos
unidad = db.nodes.create(nombre=name, genero1=gen1, genero2=gen2, genero3=gen3)
dataB.add(unidad)
try:
unidad.relationships.create("contains", gen.get(genero=gen1)[0])
gen.get(genero=gen1)[0].relationships.create("contains", unidad)
except Exception:
genNode = db.nodes.create(genero=gen1)
gen.add(genNode)
unidad.relationships.create("contains", gen.get(genero=gen1)[0])
gen.get(genero=gen1)[0].relationships.create("contains", unidad)
try:
unidad.relationships.create("contains", gen.get(genero=gen2)[0])
gen.get(genero=gen2)[0].relationships.create("contains", unidad)
except Exception:
genNode = db.nodes.create(genero=gen2)
gen.add(genNode)
unidad.relationships.create("contains", gen.get(genero=gen2)[0])
gen.get(genero=gen2)[0].relationships.create("contains", unidad)
try:
unidad.relationships.create("contains", gen.get(genero=gen3)[0])
gen.get(genero=gen3)[0].relationships.create("contains", unidad)
except Exception:
genNode = db.nodes.create(genero=gen3)
gen.add(genNode)
unidad.relationships.create("contains", gen.get(genero=gen3)[0])
gen.get(genero=gen3)[0].relationships.create("contains", unidad)
def add_database():
listaOrden = []
name = raw_input("Insert name: ")
gen1 = raw_input("Insert genre1 ")
gen2 = raw_input("Insert genre2: ")
gen3 = raw_input("Insert genre3: ")
listaOrden.append(gen1)
listaOrden.append(gen2)
listaOrden.append(gen3)
listaOrden.sort()
gen1 = listaOrden[0]
gen2 = listaOrden[1]
gen3 = listaOrden[2]
unidad = db.nodes.create(nombre=name, genero1=gen1, genero2=gen2, genero3=gen3)
dataB.add(unidad)
try:
unidad.relationships.create("contains", gen.get(genero=gen1)[0])
gen.get(genero=gen1)[0].relationships.create("contains", unidad)
except Exception:
genNode = db.nodes.create(genero=gen1)
gen.add(genNode)
unidad.relationships.create("contains", gen.get(genero=gen1)[0])
gen.get(genero=gen1)[0].relationships.create("contains", unidad)
try:
unidad.relationships.create("contains", gen.get(genero=gen2)[0])
gen.get(genero=gen2)[0].relationships.create("contains", unidad)
except Exception:
genNode = db.nodes.create(genero=gen2)
gen.add(genNode)
unidad.relationships.create("contains", gen.get(genero=gen2)[0])
gen.get(genero=gen2)[0].relationships.create("contains", unidad)
try:
unidad.relationships.create("contains", gen.get(genero=gen3)[0])
gen.get(genero=gen3)[0].relationships.create("contains", unidad)
except Exception:
genNode = db.nodes.create(genero=gen3)
gen.add(genNode)
unidad.relationships.create("contains", gen.get(genero=gen3)[0])
gen.get(genero=gen3)[0].relationships.create("contains", unidad)
database[name] = [gen1,gen2,gen3]
def watch():
name = raw_input("Insert name: ")
try:
query = "MATCH (n:Database) WHERE n.nombre='"+name+"' RETURN n.genero1, n.genero2, n.genero3"
results = db.query(query, data_contents=True)
a = results.rows
for x in a:
historial.append(x[0])
historial.append(x[1])
historial.append(x[2])
except Exception:
print("The movie or TV show you were looking for is not in the database, you can add it by going to option 1")
popular_topics(name)
#se utiliza el código mostrado en este link para mostrar los generos que se repiten más veces
#https://stackoverflow.com/questions/3594514/how-to-find-most-common-elements-of-a-list
def popular_topics(name):
nombre = name
#diccionario que determinará cuales son los 5 generos más vistos
top_5 = []
#por cada genero en la lista....
word_counter = {}
for word in historial:
if word in word_counter:
word_counter[word] += 1
else:
word_counter[word] = 1
popular_words = sorted(word_counter, key = word_counter.get, reverse = True)
top_5 = popular_words[:5]
#se ordenan los generos en orden alfabetico
lista = []
print "Most watched genres: "
for x in top_5:
lista.append(x)
print x
print "We recommend: "
print "-----------------"
print "-----------------"
try:
query = "match (n:Database{nombre:'"+nombre+"'})-[:contains*1..3]->(a:Database{genero1:'"+top_5[0]+"'}) return collect(distinct a.nombre)"
#query = "MATCH (n:Database {genero1:'"+top_5[0]+"', genero2:'"+top_5[1]+"', genero3:'"+top_5[2]+"'}) RETURN n.nombre"
results = db.query(query, data_contents=True)
#print results
a = results.rows
#print len(a[0][0])
b = []
print a[0][0][0]
for x in a[0][0]:
if x not in b:
b.append(x)
valor = random.sample(range(0, len(b)+1), 3)
print b[valor[0]]
print b[valor[1]]
print b[valor[2]]
except Exception:
pass
try:
query = "match (n:Database{nombre:'"+nombre+"'})-[:contains*1..3]->(a:Database{genero2:'"+top_5[0]+"'}) return collect(distinct a.nombre)"
#query = "MATCH (n:Database {genero1:'"+top_5[0]+"', genero2:'"+top_5[1]+"', genero3:'"+top_5[2]+"'}) RETURN n.nombre"
results = db.query(query, data_contents=True)
#print results
a = results.rows
#print len(a[0][0])
b = []
print a[0][0][0]
for x in a[0][0]:
if x not in b:
b.append(x)
valor = random.sample(range(0, len(b)+1), 3)
print b[valor[0]]
print b[valor[1]]
print b[valor[2]]
except Exception:
pass
try:
query = "match (n:Database{nombre:'"+nombre+"'})-[:contains*1..3]->(a:Database{genero3:'"+top_5[0]+"'}) return collect(distinct a.nombre)"
#query = "MATCH (n:Database {genero1:'"+top_5[0]+"', genero2:'"+top_5[1]+"', genero3:'"+top_5[2]+"'}) RETURN n.nombre"
results = db.query(query, data_contents=True)
#print results
a = results.rows
#print len(a[0][0])
b = []
print a[0][0][0]
for x in a[0][0]:
if x not in b:
b.append(x)
valor = random.sample(range(0, len(b)+1), 3)
print b[valor[0]]
print b[valor[1]]
print b[valor[2]]
except Exception:
pass
try:
query = "match (n:Database{nombre:'"+nombre+"'})-[:contains*1..3]->(a:Database{genero1:'"+top_5[1]+"'}) return collect(distinct a.nombre)"
#query = "MATCH (n:Database {genero1:'"+top_5[0]+"', genero2:'"+top_5[1]+"', genero3:'"+top_5[2]+"'}) RETURN n.nombre"
results = db.query(query, data_contents=True)
#print results
a = results.rows
#print len(a[0][0])
b = []
print a[0][0][0]
for x in a[0][0]:
if x not in b:
b.append(x)
valor = random.sample(range(0, len(b)+1), 3)
print b[valor[0]]
print b[valor[1]]
print b[valor[2]]
except Exception:
pass
try:
query = "match (n:Database{nombre:'"+nombre+"'})-[:contains*1..3]->(a:Database{genero2:'"+top_5[1]+"'}) return collect(distinct a.nombre)"
#query = "MATCH (n:Database {genero1:'"+top_5[0]+"', genero2:'"+top_5[1]+"', genero3:'"+top_5[2]+"'}) RETURN n.nombre"
results = db.query(query, data_contents=True)
#print results
a = results.rows
#print len(a[0][0])
b = []
print a[0][0][0]
for x in a[0][0]:
if x not in b:
b.append(x)
valor = random.sample(range(0, len(b)+1), 3)
print b[valor[0]]
print b[valor[1]]
print b[valor[2]]
except Exception:
pass
try:
query = "match (n:Database{nombre:'"+nombre+"'})-[:contains*1..3]->(a:Database{genero3:'"+top_5[1]+"'}) return collect(distinct a.nombre)"
#query = "MATCH (n:Database {genero1:'"+top_5[0]+"', genero2:'"+top_5[1]+"', genero3:'"+top_5[2]+"'}) RETURN n.nombre"
results = db.query(query, data_contents=True)
#print results
a = results.rows
#print len(a[0][0])
b = []
print a[0][0][0]
for x in a[0][0]:
if x not in b:
b.append(x)
valor = random.sample(range(0, len(b)+1), 3)
print b[valor[0]]
print b[valor[1]]
print b[valor[2]]
except Exception:
pass
try:
query = "match (n:Database{nombre:'"+nombre+"'})-[:contains*1..3]->(a:Database{genero1:'"+top_5[2]+"'}) return collect(distinct a.nombre)"
#query = "MATCH (n:Database {genero1:'"+top_5[0]+"', genero2:'"+top_5[1]+"', genero3:'"+top_5[2]+"'}) RETURN n.nombre"
results = db.query(query, data_contents=True)
#print results
a = results.rows
#print len(a[0][0])
b = []
print a[0][0][0]
for x in a[0][0]:
if x not in b:
b.append(x)
valor = random.sample(range(0, len(b)+1), 3)
print b[valor[0]]
print b[valor[1]]
print b[valor[2]]
except Exception:
pass
try:
query = "match (n:Database{nombre:'"+nombre+"'})-[:contains*1..3]->(a:Database{genero2:'"+top_5[2]+"'}) return collect(distinct a.nombre)"
#query = "MATCH (n:Database {genero1:'"+top_5[0]+"', genero2:'"+top_5[1]+"', genero3:'"+top_5[2]+"'}) RETURN n.nombre"
results = db.query(query, data_contents=True)
#print results
a = results.rows
#print len(a[0][0])
b = []
print a[0][0][0]
for x in a[0][0]:
if x not in b:
b.append(x)
valor = random.sample(range(0, len(b)+1), 3)
print b[valor[0]]
print b[valor[1]]
print b[valor[2]]
except Exception:
pass
try:
query = "match (n:Database{nombre:'"+nombre+"'})-[:contains*1..3]->(a:Database{genero3:'"+top_5[2]+"'}) return collect(distinct a.nombre)"
#query = "MATCH (n:Database {genero1:'"+top_5[0]+"', genero2:'"+top_5[1]+"', genero3:'"+top_5[2]+"'}) RETURN n.nombre"
results = db.query(query, data_contents=True)
#print results
a = results.rows
#print len(a[0][0])
b = []
print a[0][0][0]
for x in a[0][0]:
if x not in b:
b.append(x)
valor = random.sample(range(0, len(b)+1), 3)
print b[valor[0]]
print b[valor[1]]
print b[valor[2]]
except Exception:
pass
#YourList.OrderBy(x => rnd.Next()).Take(5)
#recomendation(name, top_5[0], top_5[1], top_5[2], top_5[3])
#método para mostrar todas las series y peliculas de un genero
def show_genre():
genre = raw_input("Insert genre: ")
try:
query = "MATCH (n:Database {genero1:'"+genre+"'}) RETURN n.nombre"
results = db.query(query, data_contents=True)
a = results.rows
b = []
for x in a:
if x not in b:
b.append(x)
print x
except Exception:
pass
try:
query = "MATCH (n:Database {genero2:'"+genre+"'}) RETURN n.nombre"
results = db.query(query, data_contents=True)
a = results.rows
b = []
for x in a:
if x not in b:
b.append(x)
print x
except Exception:
pass
try:
query = "MATCH (n:Database {genero3:'"+genre+"'}) RETURN n.nombre"
results = db.query(query, data_contents=True)
a = results.rows
b = []
for x in a:
if x not in b:
b.append(x)
print x
except Exception:
pass
#******************************************************************************************************
#*******************************************************************************************************
def menu():
print("0. Add movies and TV shows to from Excel to Database")
print("1. Add move or TV show to Database")
print("2. Watch movie or TV Show")
print("3. List of movies and TV shows by genre")
print("9. Exit")
menu()
opcion = input("Option: ")
print ("**********************************")
print ("**********************************")
while(opcion != 9):
if(opcion == 0):
add_Excel()
print ("**********************************")
print ("**********************************")
print ("Values added to Database")
menu()
opcion = input("Option: ")
elif(opcion == 1):
add_database()
print ("**********************************")
print ("**********************************")
menu()
opcion = input("Option: ")
elif(opcion == 2):
watch()
print ("**********************************")
print ("**********************************")
menu()
opcion = input("Option: ")
elif(opcion == 3):
show_genre()
print ("**********************************")
print ("**********************************")
menu()
opcion = input("Option: ")
else:
print("This option is not valid")
print ("**********************************")
print ("**********************************")
menu()
opcion = input("Option: ")
print ("Thanks for using the program")
| 30.414397
| 147
| 0.515832
| 1,935
| 15,633
| 4.119897
| 0.11938
| 0.021576
| 0.010161
| 0.052434
| 0.704215
| 0.688409
| 0.676744
| 0.676744
| 0.666458
| 0.666458
| 0
| 0.038235
| 0.293993
| 15,633
| 513
| 148
| 30.473684
| 0.6837
| 0.142135
| 0
| 0.719298
| 0
| 0.005848
| 0.190644
| 0.068676
| 0
| 0
| 0
| 0.001949
| 0
| 0
| null | null | 0.038012
| 0.008772
| null | null | 0.190058
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
f2f174769c76e5752b21c530463b089bffb53275
| 1,076
|
py
|
Python
|
mkmk/extend.py
|
tundra/mkmk
|
4ca7a3e337dcc3345fb01ea205ae05c397f396b0
|
[
"Apache-2.0"
] | null | null | null |
mkmk/extend.py
|
tundra/mkmk
|
4ca7a3e337dcc3345fb01ea205ae05c397f396b0
|
[
"Apache-2.0"
] | null | null | null |
mkmk/extend.py
|
tundra/mkmk
|
4ca7a3e337dcc3345fb01ea205ae05c397f396b0
|
[
"Apache-2.0"
] | null | null | null |
#- Copyright 2014 GOTO 10.
#- Licensed under the Apache License, Version 2.0 (see LICENSE).
## Utilities used for creating build extensions.
from abc import ABCMeta, abstractmethod
# Abstract superclass of the tool sets loaded implicitly into each context.
# There can be many of these, one for each context.
class ToolSet(object):
__metaclass__ = ABCMeta
def __init__(self, context):
self.context = context
# Returns the context this tool set belongs to.
def get_context(self):
return self.context
# Controller for this kind of extension. There is only one of these for each
# kind of extension.
class ToolController(object):
__metaclass__ = ABCMeta
def __init__(self, env):
self.env = env
# Returns the build environment.
def get_environment(self):
return self.env
# Gives this controller an opportunity to add some extra custom flags. By
# default does nothing.
def add_custom_flags(self, parser):
pass
# Returns a toolset instance, given a concrete context.
@abstractmethod
def get_tools(self, context):
pass
| 25.619048
| 76
| 0.737918
| 150
| 1,076
| 5.153333
| 0.54
| 0.056921
| 0.056921
| 0.064683
| 0.085382
| 0.085382
| 0
| 0
| 0
| 0
| 0
| 0.009249
| 0.196097
| 1,076
| 41
| 77
| 26.243902
| 0.884393
| 0.536245
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.111111
| 0.055556
| 0.111111
| 0.722222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
|
0
| 3
|
8439225f8d80c110768afbd91dc3a48cb1f55f67
| 1,914
|
py
|
Python
|
users/migrations/0004_auto_20201228_1613.py
|
hhdMrLion/django-crm
|
9f6f021e0cddc323c88280b733144366a0cb9fa6
|
[
"Apache-2.0"
] | 1
|
2021-06-18T03:03:43.000Z
|
2021-06-18T03:03:43.000Z
|
users/migrations/0004_auto_20201228_1613.py
|
hhdMrLion/django-crm
|
9f6f021e0cddc323c88280b733144366a0cb9fa6
|
[
"Apache-2.0"
] | null | null | null |
users/migrations/0004_auto_20201228_1613.py
|
hhdMrLion/django-crm
|
9f6f021e0cddc323c88280b733144366a0cb9fa6
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 2.2.17 on 2020-12-28 08:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0003_auto_20201228_1605'),
]
operations = [
migrations.CreateModel(
name='Count',
fields=[
('user_id', models.IntegerField(primary_key=True, serialize=False, verbose_name='用户id')),
('name', models.CharField(blank=True, max_length=64, null=True, verbose_name='姓名')),
('day_customer', models.IntegerField(default=0, verbose_name='今天新增客户数量')),
('day_liaison', models.IntegerField(default=0, verbose_name='今天新增联系人数量')),
('day_record', models.IntegerField(default=0, verbose_name='今天新增拜访记录数量')),
('day_business', models.IntegerField(default=0, verbose_name='今天新增商机数量')),
('mouth_customer', models.IntegerField(default=0, verbose_name='本月新增客户数量')),
('mouth_liaison', models.IntegerField(default=0, verbose_name='本月新增联系人数量')),
('mouth_record', models.IntegerField(default=0, verbose_name='本月新增拜访记录数量')),
('mouth_business', models.IntegerField(default=0, verbose_name='本月新增商机数量')),
('all_customer', models.IntegerField(default=0, verbose_name='全部客户数量')),
('all_liaison', models.IntegerField(default=0, verbose_name='全部联系人数量')),
('all_record', models.IntegerField(default=0, verbose_name='全部拜访记录数量')),
('all_business', models.IntegerField(default=0, verbose_name='全部商机数量')),
],
options={
'verbose_name': '用户数据统计',
'verbose_name_plural': '用户数据统计',
'db_table': 'count',
},
),
migrations.DeleteModel(
name='UserCount',
),
]
| 46.682927
| 106
| 0.581505
| 182
| 1,914
| 5.917582
| 0.401099
| 0.163417
| 0.278552
| 0.289694
| 0.493036
| 0.493036
| 0.493036
| 0
| 0
| 0
| 0
| 0.033261
| 0.277429
| 1,914
| 40
| 107
| 47.85
| 0.745481
| 0.024033
| 0
| 0.058824
| 1
| 0
| 0.194414
| 0.012596
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.029412
| 0
| 0.117647
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
84425e6e37d98a459d555c6b47a64806ebbb0769
| 246
|
py
|
Python
|
app/newsletter/views.py
|
valeriansaliou/waaave-web
|
8a0cde773563865a905af38f5a0b723a43b17341
|
[
"RSA-MD"
] | 1
|
2020-04-06T10:04:43.000Z
|
2020-04-06T10:04:43.000Z
|
app/newsletter/views.py
|
valeriansaliou/waaave-web
|
8a0cde773563865a905af38f5a0b723a43b17341
|
[
"RSA-MD"
] | null | null | null |
app/newsletter/views.py
|
valeriansaliou/waaave-web
|
8a0cde773563865a905af38f5a0b723a43b17341
|
[
"RSA-MD"
] | null | null | null |
from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
def root(request):
"""
Newsletter > Root
"""
return render(request, 'newsletter/newsletter_root.jade')
| 24.6
| 61
| 0.756098
| 28
| 246
| 6.607143
| 0.571429
| 0.162162
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.158537
| 246
| 10
| 61
| 24.6
| 0.89372
| 0.069106
| 0
| 0
| 0
| 0
| 0.14486
| 0.14486
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.6
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 3
|
8446c1c3c431374432b1f4f4b191c7dc6650169d
| 1,580
|
py
|
Python
|
src/cltk/phonology/gmh/phonology.py
|
yelircaasi/cltk
|
1583aa24682543a1f33434a21918f039ca27d60c
|
[
"MIT"
] | 757
|
2015-11-20T00:58:52.000Z
|
2022-03-31T06:34:24.000Z
|
src/cltk/phonology/gmh/phonology.py
|
yelircaasi/cltk
|
1583aa24682543a1f33434a21918f039ca27d60c
|
[
"MIT"
] | 950
|
2015-11-17T05:38:29.000Z
|
2022-03-14T16:09:34.000Z
|
src/cltk/phonology/gmh/phonology.py
|
yelircaasi/cltk
|
1583aa24682543a1f33434a21918f039ca27d60c
|
[
"MIT"
] | 482
|
2015-11-22T18:13:02.000Z
|
2022-03-20T21:22:02.000Z
|
"""Middle High German phonology tools
"""
from typing import List
from cltk.phonology.gmh.transcription import Transcriber
from cltk.phonology.syllabify import Syllabifier
__author__ = ["Clément Besnier <[email protected]>"]
class MiddleHighGermanTranscription:
"""
Middle High German Transcriber
"""
def __init__(self):
self.transcriber = Transcriber()
def transcribe(self, word):
"""
>>> MiddleHighGermanTranscription().transcribe("Brynhild")
'Brynχɪld̥'
:param word: word to transcribe
:return: transcribed word
"""
return self.transcriber.transcribe(word, with_squared_brackets=False)
def __repr__(self):
return f"<MiddleHighGermanTranscription>"
def __call__(self, word):
return self.transcribe(word)
class MiddleHighGermanSyllabifier:
"""
Middle High German syllabifier based on sonority phoneme hierarchy for MHG.
Source: Resonances in Middle High German: New Methodologies in Prosody, Christopher Leo Hench, 2017
"""
def __init__(self):
self.syllabifier = Syllabifier(language="gmh")
def syllabify(self, word: str) -> List[str]:
"""
>>> MiddleHighGermanSyllabifier().syllabify("Gunther")
['Gunt', 'her']
:param word: word to syllabify
:return: syllabified word
"""
return self.syllabifier.syllabify(word, mode="MOP")
def __repr__(self):
return f"<MiddleHighGermanSyllabifier>"
def __call__(self, word):
return self.syllabify(word)
| 26.333333
| 103
| 0.666456
| 156
| 1,580
| 6.564103
| 0.435897
| 0.039063
| 0.0625
| 0.029297
| 0.083984
| 0.048828
| 0
| 0
| 0
| 0
| 0
| 0.003295
| 0.231646
| 1,580
| 59
| 104
| 26.779661
| 0.839374
| 0.316456
| 0
| 0.272727
| 0
| 0
| 0.112169
| 0.088889
| 0
| 0
| 0
| 0
| 0
| 1
| 0.363636
| false
| 0
| 0.136364
| 0.181818
| 0.863636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
8459ea4275ad26f2fdfb1430948999a41ff39caf
| 408
|
py
|
Python
|
dailypy/__init__.py
|
HuangJiaLian/dailypy
|
b838a4f6743fca8ccc5c4fa73142d0f7095dbbc3
|
[
"CC0-1.0"
] | null | null | null |
dailypy/__init__.py
|
HuangJiaLian/dailypy
|
b838a4f6743fca8ccc5c4fa73142d0f7095dbbc3
|
[
"CC0-1.0"
] | 1
|
2020-08-19T13:42:52.000Z
|
2020-08-19T14:32:31.000Z
|
dailypy/__init__.py
|
HuangJiaLian/dailypy
|
b838a4f6743fca8ccc5c4fa73142d0f7095dbbc3
|
[
"CC0-1.0"
] | null | null | null |
import numpy as np
import os
# Data manipulate
class dm:
def __init__(self):
pass
def saveNp(self, a, name, path='.'):
np.save(os.path.join(path,name), a)
def saveTxt(self, a, name, path='.'):
np.savetxt(os.path.join(path,name)+'.txt', a)
def saveArrows(self, a1, a2, name='cols'):
a1a2 = np.stack((a1,a2), axis=1)
self.saveTxt(a1a2, name)
| 20.4
| 53
| 0.568627
| 61
| 408
| 3.737705
| 0.47541
| 0.04386
| 0.078947
| 0.114035
| 0.289474
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0299
| 0.262255
| 408
| 19
| 54
| 21.473684
| 0.727575
| 0.036765
| 0
| 0
| 0
| 0
| 0.025641
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.083333
| 0.166667
| 0
| 0.583333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 3
|
845d03992ff2924ffdc957b51de3c6b486a7c0ea
| 880
|
py
|
Python
|
src/las_util/serializers.py
|
dcslagel/las-util-django
|
cea8437813969b3b22182de6b9553b9e6694c548
|
[
"BSD-3-Clause"
] | 3
|
2020-05-15T05:40:43.000Z
|
2021-11-09T06:19:41.000Z
|
src/las_util/serializers.py
|
dcslagel/las-util-django
|
cea8437813969b3b22182de6b9553b9e6694c548
|
[
"BSD-3-Clause"
] | 26
|
2020-05-20T13:03:02.000Z
|
2021-09-23T19:36:39.000Z
|
src/las_util/serializers.py
|
dcslagel/las-util-django
|
cea8437813969b3b22182de6b9553b9e6694c548
|
[
"BSD-3-Clause"
] | 1
|
2021-11-09T01:40:05.000Z
|
2021-11-09T01:40:05.000Z
|
"""
File-Name: [app]/serializers.py
File-Desc: Rest API serializers for las_util
App-Name: las_util
Project-Name: Las-Util-Django
Copyright: Copyright (c) 2019, DC Slagel
License-Identifier: BSD-3-Clause
"""
from rest_framework import serializers
from las_util.models import SectionInfo
class DocSerializer(serializers.ModelSerializer):
"""Link ModelSerializer to the SectionInfo model"""
class Meta:
model = SectionInfo
fields = '__all__'
class ListSerializer(serializers.ModelSerializer):
"""Link ModelSerializer to the SectionInfo model"""
class Meta:
model = SectionInfo
fields = ['filename']
# TODO: replace view.api_upload with to use this
# class UploadSerializer(serializer.ModelSerializer):
# """Link ModelSerializer to the Upload model"""
# class Meta:
# model = Upload
# fields = ['filename',]
| 29.333333
| 55
| 0.7125
| 101
| 880
| 6.118812
| 0.485149
| 0.045307
| 0.165049
| 0.174757
| 0.377023
| 0.313916
| 0.313916
| 0.313916
| 0.313916
| 0.313916
| 0
| 0.007003
| 0.188636
| 880
| 29
| 56
| 30.344828
| 0.858543
| 0.581818
| 0
| 0.4
| 0
| 0
| 0.043478
| 0
| 0
| 0
| 0
| 0.034483
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 3
|
ffce3c914809fe508a87b5cc18e2cdab125e42d4
| 402
|
py
|
Python
|
public_goods_str_nonoise/tests.py
|
bocchan/costly
|
ba52f82e36e28012a63a78805963bdf384679955
|
[
"BSD-3-Clause"
] | null | null | null |
public_goods_str_nonoise/tests.py
|
bocchan/costly
|
ba52f82e36e28012a63a78805963bdf384679955
|
[
"BSD-3-Clause"
] | null | null | null |
public_goods_str_nonoise/tests.py
|
bocchan/costly
|
ba52f82e36e28012a63a78805963bdf384679955
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import division
import random
from otree.common import Currency as c, currency_range
from . import views
from ._builtin import Bot
from .models import Constants
class PlayerBot(Bot):
"""Bot that plays one round"""
def play_round(self):
self.submit(views.MyPage)
self.submit(views.Results)
def validate_play(self):
pass
| 18.272727
| 54
| 0.691542
| 54
| 402
| 5
| 0.611111
| 0.074074
| 0.111111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003175
| 0.216418
| 402
| 21
| 55
| 19.142857
| 0.853968
| 0.116915
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0.083333
| 0.5
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 3
|
ffde4731dad77ca75123679807fabb3875a76017
| 176
|
py
|
Python
|
src/registration/urls.py
|
jtrussell/swindle
|
914f9ddc7b155cf895fc233b9f3f0c1804bf23e3
|
[
"MIT"
] | 1
|
2021-04-07T20:14:43.000Z
|
2021-04-07T20:14:43.000Z
|
src/registration/urls.py
|
jtrussell/swindle
|
914f9ddc7b155cf895fc233b9f3f0c1804bf23e3
|
[
"MIT"
] | null | null | null |
src/registration/urls.py
|
jtrussell/swindle
|
914f9ddc7b155cf895fc233b9f3f0c1804bf23e3
|
[
"MIT"
] | null | null | null |
from . import views
from django.urls import path
urlpatterns = [
path('', views.profile, name='profile'),
path('sign-up', views.sign_up, name='show_sign_up_form')
]
| 17.6
| 60
| 0.681818
| 25
| 176
| 4.64
| 0.52
| 0.155172
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164773
| 176
| 9
| 61
| 19.555556
| 0.789116
| 0
| 0
| 0
| 0
| 0
| 0.177143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
ffe68f15e3bf96bdad0cec4870fd34ce0d8fbf6c
| 223
|
py
|
Python
|
src/methods/Addition.py
|
svanschooten/Flow
|
c7c158f986f7b108a255cbaa67ec7fff3518b637
|
[
"MIT"
] | null | null | null |
src/methods/Addition.py
|
svanschooten/Flow
|
c7c158f986f7b108a255cbaa67ec7fff3518b637
|
[
"MIT"
] | null | null | null |
src/methods/Addition.py
|
svanschooten/Flow
|
c7c158f986f7b108a255cbaa67ec7fff3518b637
|
[
"MIT"
] | null | null | null |
from methods.AbstactMethod import AbstractMethod
class Addition(AbstractMethod):
name = 'Addition'
def apply(self, args: dict) -> dict:
return {
'res': args.get('x') + args.get('y')
}
| 20.272727
| 48
| 0.591928
| 24
| 223
| 5.5
| 0.75
| 0.106061
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.273543
| 223
| 10
| 49
| 22.3
| 0.814815
| 0
| 0
| 0
| 0
| 0
| 0.058296
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.142857
| 0.142857
| 0.714286
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
ffeabfb85c362b4fd5f28c9b1e056f66d191fed5
| 100
|
py
|
Python
|
9.py
|
sarika228/React-Projects
|
24c342f71f839c257150f4b5e096c127b51d525c
|
[
"MIT"
] | null | null | null |
9.py
|
sarika228/React-Projects
|
24c342f71f839c257150f4b5e096c127b51d525c
|
[
"MIT"
] | null | null | null |
9.py
|
sarika228/React-Projects
|
24c342f71f839c257150f4b5e096c127b51d525c
|
[
"MIT"
] | null | null | null |
i=1
while i<=4:
j=16
while j>=i:
print(i,end="")
j=j-1
print()
i=i+1
| 12.5
| 23
| 0.39
| 20
| 100
| 1.95
| 0.4
| 0.102564
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101695
| 0.41
| 100
| 8
| 24
| 12.5
| 0.559322
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
08092e15e7923e75bbc9274300846c3ee3fbd2d9
| 158
|
py
|
Python
|
tests/utils/TestTransaction.py
|
Shaid3r/reservations
|
43e17ae88eed74593879f9f8c5a9bed7252888f7
|
[
"MIT"
] | null | null | null |
tests/utils/TestTransaction.py
|
Shaid3r/reservations
|
43e17ae88eed74593879f9f8c5a9bed7252888f7
|
[
"MIT"
] | null | null | null |
tests/utils/TestTransaction.py
|
Shaid3r/reservations
|
43e17ae88eed74593879f9f8c5a9bed7252888f7
|
[
"MIT"
] | null | null | null |
import storage
import pytest
class TestTransaction:
@pytest.fixture(autouse=True)
def transact(self):
yield
storage.conn.rollback()
| 15.8
| 33
| 0.683544
| 17
| 158
| 6.352941
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.234177
| 158
| 9
| 34
| 17.555556
| 0.892562
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.285714
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 3
|
083ac0bbfaedec44e83a000de5fcb0cfa49ed48e
| 310
|
py
|
Python
|
tests/symmetry/test_point_group.py
|
kijanac/Materia
|
b49af518c8eff7d3a8c6caff39783e3daf80a7a0
|
[
"MIT"
] | null | null | null |
tests/symmetry/test_point_group.py
|
kijanac/Materia
|
b49af518c8eff7d3a8c6caff39783e3daf80a7a0
|
[
"MIT"
] | null | null | null |
tests/symmetry/test_point_group.py
|
kijanac/Materia
|
b49af518c8eff7d3a8c6caff39783e3daf80a7a0
|
[
"MIT"
] | null | null | null |
# import materia as mtr
# import numpy as np
# def test_point_group_C1():
# ctable = mtr.symmetry.C1().cayley_table()
# assert (ctable == np.array([[0]])).all()
# def test_point_group_Ci():
# ctable = mtr.symmetry.Ci().cayley_table()
# assert (ctable == np.array([[0, 1], [1, 0]])).all()
| 20.666667
| 57
| 0.606452
| 45
| 310
| 4
| 0.466667
| 0.077778
| 0.133333
| 0.188889
| 0.344444
| 0.344444
| 0.344444
| 0
| 0
| 0
| 0
| 0.027888
| 0.190323
| 310
| 14
| 58
| 22.142857
| 0.689243
| 0.925806
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
f242795159bdd7a9675c51d4615c5d8382e738be
| 2,419
|
py
|
Python
|
coinut.py
|
DanHenry4/AutoCoinut
|
f7c79f19a9193bc7c1193712088ca836b030f523
|
[
"MIT"
] | 4
|
2016-05-08T02:15:50.000Z
|
2020-07-01T08:16:45.000Z
|
coinut.py
|
DanHenry4/AutoCoinut
|
f7c79f19a9193bc7c1193712088ca836b030f523
|
[
"MIT"
] | null | null | null |
coinut.py
|
DanHenry4/AutoCoinut
|
f7c79f19a9193bc7c1193712088ca836b030f523
|
[
"MIT"
] | null | null | null |
import hmac
import hashlib
import json
import uuid
import httplib2
COINUT_URL = 'https://coinut.com/api/'
class Coinut():
def __init__(self, user = None, api_key = None):
self.user = user
self.api_key = api_key
self.http = httplib2.Http()
def request(self, api, content = {}):
url = COINUT_URL + api
headers = {}
content["nonce"] = uuid.uuid4().get_hex()
content = json.dumps(content)
if self.api_key is not None and self.user is not None:
sig = hmac.new(self.api_key, msg=content,
digestmod=hashlib.sha256).hexdigest()
headers = {'X-USER': self.user, "X-SIGNATURE": sig}
response, content = self.http.request(url, 'POST',
headers=headers, body=content)
return json.loads(content)
def tick(self, asset):
return self.request("tick/" + asset)
def balance(self):
return self.request("balance")
def assets(self, deriv_type):
return self.request("assets", {'deriv_type' : deriv_type})
def expiry_time(self, deriv_type, asset):
return self.request("expiry_time",
{'deriv_type' : deriv_type,
'asset': asset})
def strike_prices(self, deriv_type, asset, expiry_time):
m = {
'deriv_type' : deriv_type,
'asset': asset,
'expiry_time': expiry_time
}
return self.request("strike_prices", m)
def orderbook(self, deriv_type, asset, expiry_time, strike, put_call):
m = {
'deriv_type' : deriv_type,
'asset': asset,
'expiry_time': expiry_time,
'strike': strike,
'put_call': put_call
}
return self.request('orderbook', m)
def new_orders(self, orders):
return self.request("new_orders", {'orders': orders})
def orders(self):
return self.request("orders")
def cancel_orders(self, order_ids):
return self.request("cancel_orders", {'order_ids': order_ids})
def positions(self):
return self.request("positions")
def history_positions(self, start_timestamp, end_timestamp):
m = {'start_timestamp': start_timestamp,
'end_timestamp': end_timestamp}
return self.request("history_positions", m)
| 27.488636
| 76
| 0.574618
| 276
| 2,419
| 4.847826
| 0.242754
| 0.080717
| 0.139761
| 0.053812
| 0.136024
| 0.136024
| 0.073244
| 0.073244
| 0.073244
| 0.073244
| 0
| 0.003584
| 0.307979
| 2,419
| 87
| 77
| 27.804598
| 0.795699
| 0
| 0
| 0.098361
| 0
| 0
| 0.119471
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.213115
| false
| 0
| 0.081967
| 0.131148
| 0.508197
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
f26337b1b3af5eb32cdd87718a2212d8a63d5996
| 6,187
|
py
|
Python
|
nz_snow_tools/eval/brewster_calibration_TF.py
|
jonoconway/nz_snow_tools
|
7002fb401fb48225260fada6fd5b5b7ca5ad1184
|
[
"MIT"
] | 3
|
2020-09-01T07:53:05.000Z
|
2021-02-02T20:28:37.000Z
|
nz_snow_tools/eval/brewster_calibration_TF.py
|
jonoconway/nz_snow_tools
|
7002fb401fb48225260fada6fd5b5b7ca5ad1184
|
[
"MIT"
] | null | null | null |
nz_snow_tools/eval/brewster_calibration_TF.py
|
jonoconway/nz_snow_tools
|
7002fb401fb48225260fada6fd5b5b7ca5ad1184
|
[
"MIT"
] | null | null | null |
"""
code to call the snow model for a simple test case using brewster glacier data
"""
from __future__ import division
import numpy as np
import matplotlib.pylab as plt
import datetime as dt
from nz_snow_tools.util.utils import resample_to_fsca, nash_sut, mean_bias, rmsd, mean_absolute_error, coef_determ
seb_dat = np.genfromtxt(
'S:\Scratch\Jono\Final Brewster Datasets\SEB_output\cdf - code2p0_MC_meas_noQPS_single_fixed output_fixed_B\modelOUT_br1_headings.txt', skip_header=3)
sw_net = seb_dat[:, 14 - 1]
lw_net = seb_dat[:, 17 - 1]
qs = seb_dat[:, 19 - 1]
ql = seb_dat[:, 20 - 1]
qc = seb_dat[:, 21 - 1]
qprc = seb_dat[:, 22 - 1]
qst = seb_dat[:, 24 - 1]
qm = seb_dat[:, 25 - 1]
t_dep_flux = lw_net + qs + ql + qc + qst
qm_wo_sw_prc = qm - sw_net - qprc
qm_wo_sw_prc[(qm == 0)] = 0
ta = seb_dat[:, 8 - 1]
ea = seb_dat[:, 10 - 1]
ws = seb_dat[:, 7 - 1]
r2_ea = coef_determ(qm_wo_sw_prc, ea)
r2_ta = coef_determ(qm_wo_sw_prc, ta)
r2_ea_ws = coef_determ(qm_wo_sw_prc, ea*ws)
r2_ea_pos = coef_determ(qm_wo_sw_prc[(qm_wo_sw_prc > 0)], ea[(qm_wo_sw_prc > 0)])
r2_ta_pos = coef_determ(qm_wo_sw_prc[(qm_wo_sw_prc > 0)], ta[(qm_wo_sw_prc > 0)])
r2_ea_ws_pos = coef_determ(qm_wo_sw_prc[(qm_wo_sw_prc > 0)], ea[(qm_wo_sw_prc > 0)]*ws[(qm_wo_sw_prc > 0)])
print(r2_ea)
print(r2_ta)
print (r2_ea_ws)
print(r2_ea_pos)
print(r2_ta_pos)
print (r2_ea_ws_pos)
print(
np.sum(ta>0),
np.sum(np.logical_and(ta>0,qm_wo_sw_prc > 0)),
np.sum(qm_wo_sw_prc > 0),
np.sum(np.logical_and(ta>0,qm_wo_sw_prc > 0))/np.sum(ta>0),
)
print(
np.sum(ea>6.112),
np.sum(np.logical_and(ea>6.1120,qm_wo_sw_prc > 0)),
np.sum(qm_wo_sw_prc > 0),
np.sum(np.logical_and(ea>6.1120,qm_wo_sw_prc > 0))/np.sum(ea>6.112),
)
plt.figure()
plt.hexbin(qm_wo_sw_prc[(qm_wo_sw_prc > 0)], ta[(qm_wo_sw_prc > 0)], cmap=plt.cm.inferno_r)
plt.plot(range(200), np.arange(200) / 14.7,'k')
plt.plot(range(100), np.arange(100) / 8.7,'r')
plt.xlabel('QM - SWnet - Qprecip')
plt.ylabel('Air temperature (C)')
plt.savefig(r'D:\Snow project\Oct2018 Results\qm_wo_sw_prc vs ta posQM.png')
plt.figure()
plt.hexbin(qm_wo_sw_prc[(qm_wo_sw_prc > 0)], ea[(qm_wo_sw_prc > 0)], cmap=plt.cm.inferno_r)
plt.plot(range(200), 6.112 + np.arange(200) / 42.0,'k')
plt.xlabel('QM - SWnet - Qprecip')
plt.ylabel('Vapour pressure (hPa)')
plt.savefig(r'D:\Snow project\Oct2018 Results\qm_wo_sw_prc vs ea posQM.png')
plt.figure()
plt.hexbin(qm_wo_sw_prc[~(qm_wo_sw_prc == 0)], ta[~(qm_wo_sw_prc == 0)], cmap=plt.cm.inferno_r)
plt.plot(range(200), np.arange(200) / 14.7,'k')
plt.plot(range(100), np.arange(100) / 8.7,'r')
plt.xlabel('QM - SWnet - Qprecip')
plt.ylabel('Air temperature (C)')
plt.savefig(r'D:\Snow project\Oct2018 Results\qm_wo_sw_prc vs ta.png')
plt.figure()
plt.hexbin(qm_wo_sw_prc[~(qm_wo_sw_prc == 0)], ea[~(qm_wo_sw_prc == 0)], cmap=plt.cm.inferno_r)
plt.plot(range(200), 6.112 + np.arange(200) / 42.0,'k')
plt.xlabel('QM - SWnet - Qprecip')
plt.ylabel('Vapour pressure (hPa)')
plt.savefig(r'D:\Snow project\Oct2018 Results\qm_wo_sw_prc vs ea.png')
#plt.show()
print(
np.sum(qm_wo_sw_prc[qm>0])/sw_net.shape,# average positive melt energy from temp dep fluxes
np.sum(sw_net[qm>0])/sw_net.shape, # average melt energy from sw_net
np.sum(qprc[qm>0])/sw_net.shape # average melt energy from precipitation
)
qm_wo_sw_prc[qm_wo_sw_prc<0] = 0 # set all negative melt energy to zero
# find optimal parameters for ea and ta
from scipy.optimize import curve_fit
def f(x, A): # this is your 'straight line' y=f(x)
return A*x
# sum melt energy from ea and ta
# melt factor was 0.025 mm w.e. per hour per hPa
ea_pos = ea-6.112
ea_pos[ea_pos<0] = 0
A = curve_fit(f,ea_pos, qm_wo_sw_prc)[0] # find optimal ea_q factor = 41.9
np.median(qm_wo_sw_prc[qm_wo_sw_prc>0]/ea_pos[qm_wo_sw_prc>0]) # median Wm^-2 per K = 41.7
ea_q = ea_pos * 42
# Wm^-2 per K (melt rate of 0.05 mm w.e. per hour per K = 4.6 Wm^-2 per K)
ta_pos = ta - 0.
ta_pos[ta_pos<0] = 0
A = curve_fit(f,ta_pos, qm_wo_sw_prc)[0]# find optimal ta_q factor = 8.7
np.median(qm_wo_sw_prc[qm_wo_sw_prc>0]/ta_pos[qm_wo_sw_prc>0]) # median Wm^-2 per K = 14.7
ta_q = ta_pos * 8.7
#K * / (mm w.e. W) *
print(
np.sum(qm_wo_sw_prc[qm>0])/sw_net.shape,# average positive melt energy from temp dep fluxes
np.sum(ea_q)/sw_net.shape, # average calculated melt energy from temp dep fluxes using ea
np.sum(ta_q)/sw_net.shape, # average calculated melt energy from temp dep fluxes using ta
np.sum(sw_net[qm>0])/sw_net.shape, # average melt energy from sw_net
np.sum(sw_net[np.logical_and(qm>0,ta<0)])/sw_net.shape, # average melt energy from sw_net when temperature below 0
np.sum(sw_net[np.logical_and(qm>0,ta>0)])/sw_net.shape, # average melt energy from sw_net when temperature above 0
np.sum(qprc[qm>0])/sw_net.shape # average melt energy from precipitation
)
plt.figure()
plt.hexbin(qm_wo_sw_prc[np.logical_and(ta_q>0,qm_wo_sw_prc>0)],ta_q[np.logical_and(ta_q>0,qm_wo_sw_prc>0)])
plt.plot(range(300),range(300),'b--')
plt.ylabel('mod'),plt.xlabel('obs'),plt.title('ta_q vs qm_wo_sw_prc')
plt.savefig(r'D:\Snow project\Oct2018 Results\qm_wo_sw_prc vs ta_q.png')
plt.figure()
plt.hexbin(qm_wo_sw_prc[np.logical_and(ea_q>0,qm_wo_sw_prc>0)],ea_q[np.logical_and(ea_q>0,qm_wo_sw_prc>0)])
plt.ylabel('mod'),plt.xlabel('obs'),plt.title('ea_q vs qm_wo_sw_prc')
plt.plot(range(300),range(300),'b--')
plt.savefig(r'D:\Snow project\Oct2018 Results\qm_wo_sw_prc vs ea_q.png')
plt.figure()
plt.hist(qm_wo_sw_prc[np.logical_and(ta_pos>0.5,qm_wo_sw_prc>0)]/ta_pos[np.logical_and(ta_pos>0.5,qm_wo_sw_prc>0)],20)
plt.xlabel('ta_q_factor (W m-2 K-1)')
plt.savefig(r'D:\Snow project\Oct2018 Results\ta_q_factor_hist.png')
#plt.show()
print(
rmsd(qm_wo_sw_prc,ta_q),
rmsd(qm_wo_sw_prc,ea_q)
)
es = 6.1121 * np.exp(17.502*ta/(240.97+ta))
rh = (ea/es) * 100
plt.scatter(rh[np.logical_and(ta_pos>0.5,qm_wo_sw_prc>0)]*ws[np.logical_and(ta_pos>0.5,qm_wo_sw_prc>0)]/10.,qm_wo_sw_prc[np.logical_and(ta_pos>0.5,qm_wo_sw_prc>0)]/ta_pos[np.logical_and(ta_pos>0.5,qm_wo_sw_prc>0)],3)
plt.scatter(rh[np.logical_and(ta_pos>0.5,qm_wo_sw_prc>0)],qm_wo_sw_prc[np.logical_and(ta_pos>0.5,qm_wo_sw_prc>0)]/ta_pos[np.logical_and(ta_pos>0.5,qm_wo_sw_prc>0)])
plt.scatter(ql,qm_wo_sw_prc-ta_q)
plt.scatter(ta,qm_wo_sw_prc-ta_q)
| 38.66875
| 216
| 0.725715
| 1,369
| 6,187
| 2.996348
| 0.148283
| 0.073135
| 0.109703
| 0.164554
| 0.732569
| 0.713554
| 0.686738
| 0.66236
| 0.614334
| 0.608971
| 0
| 0.055776
| 0.104574
| 6,187
| 160
| 217
| 38.66875
| 0.684657
| 0.157265
| 0
| 0.292683
| 0
| 0
| 0.148698
| 0.028158
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00813
| false
| 0
| 0.04878
| 0.00813
| 0.065041
| 0.089431
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
f2689ab69abc970864477a6211da1d0af11f1927
| 168
|
py
|
Python
|
main.py
|
dansoliveira/pasc-compiler
|
642f2745395dcc5b4ebbdd1fa83169362f863e61
|
[
"MIT"
] | null | null | null |
main.py
|
dansoliveira/pasc-compiler
|
642f2745395dcc5b4ebbdd1fa83169362f863e61
|
[
"MIT"
] | 1
|
2018-05-10T13:03:04.000Z
|
2018-05-10T13:03:04.000Z
|
main.py
|
dansoliveira/pasc-compiler
|
642f2745395dcc5b4ebbdd1fa83169362f863e61
|
[
"MIT"
] | null | null | null |
from lexer import Lexer
from parser import Parser
if __name__ == "__main__":
lexer = Lexer("exemplos/teste2.pasc")
parser = Parser(lexer)
parser.executa()
| 21
| 41
| 0.702381
| 21
| 168
| 5.238095
| 0.52381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007353
| 0.190476
| 168
| 8
| 42
| 21
| 0.801471
| 0
| 0
| 0
| 0
| 0
| 0.16568
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
f27ae5b52fc981bd0a9765592021614aae946fe5
| 130
|
py
|
Python
|
day00/ex05/kata02.py
|
bcarlier75/python_bootcamp_42ai
|
916c258596f90a222f20329894048addb6f64dd9
|
[
"MIT"
] | 1
|
2020-04-17T18:47:46.000Z
|
2020-04-17T18:47:46.000Z
|
day00/ex05/kata02.py
|
bcarlier75/python_bootcamp_42ai
|
916c258596f90a222f20329894048addb6f64dd9
|
[
"MIT"
] | null | null | null |
day00/ex05/kata02.py
|
bcarlier75/python_bootcamp_42ai
|
916c258596f90a222f20329894048addb6f64dd9
|
[
"MIT"
] | null | null | null |
import datetime
t = (3, 30, 2019, 9, 25)
x = datetime.datetime(t[2], t[3], t[4], t[0], t[1])
print(x.strftime("%m/%d/%Y %H:%M"))
| 21.666667
| 51
| 0.546154
| 29
| 130
| 2.448276
| 0.655172
| 0.253521
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135135
| 0.146154
| 130
| 5
| 52
| 26
| 0.504505
| 0
| 0
| 0
| 0
| 0
| 0.107692
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0.25
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
f292addc6e3042f36d3fbfdde0bec8e8159cc0d4
| 193
|
py
|
Python
|
Desafio049.py
|
tmoura1981/Python_Exercicios
|
c873e2758dfd9058d2c2d83b5b38b522c6264029
|
[
"MIT"
] | 1
|
2021-11-25T11:19:59.000Z
|
2021-11-25T11:19:59.000Z
|
Desafio049.py
|
tmoura1981/Python_Exercicios
|
c873e2758dfd9058d2c2d83b5b38b522c6264029
|
[
"MIT"
] | null | null | null |
Desafio049.py
|
tmoura1981/Python_Exercicios
|
c873e2758dfd9058d2c2d83b5b38b522c6264029
|
[
"MIT"
] | null | null | null |
# Informe um nº e mostre sua tabuada
print('-' * 36)
n = int(input('Digite um nº e veja sua tabuada: '))
print('=' * 36)
for i in range(1, 11):
print(n, 'x', i, '=', n * i)
print('=' * 36)
| 24.125
| 51
| 0.554404
| 35
| 193
| 3.057143
| 0.6
| 0.196262
| 0.093458
| 0.317757
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06
| 0.222798
| 193
| 7
| 52
| 27.571429
| 0.653333
| 0.176166
| 0
| 0.333333
| 0
| 0
| 0.242038
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 3
|
f29df525d2aaa21035a1c17e65dbb2cbbc6a88ba
| 1,326
|
py
|
Python
|
levis/encoding.py
|
rawg/levis
|
33cd6c915f51134f79f3586dc0e4a6072247b568
|
[
"MIT"
] | 42
|
2016-06-29T21:13:02.000Z
|
2022-01-23T03:23:59.000Z
|
levis/encoding.py
|
rawg/levis
|
33cd6c915f51134f79f3586dc0e4a6072247b568
|
[
"MIT"
] | null | null | null |
levis/encoding.py
|
rawg/levis
|
33cd6c915f51134f79f3586dc0e4a6072247b568
|
[
"MIT"
] | 12
|
2016-07-18T20:46:55.000Z
|
2021-06-13T16:08:37.000Z
|
# coding=utf-8
"""
"""
from . import mutation
from . import crossover
from . import base
class BinaryGA(base.GeneticAlgorithm):
"""A binary encoded genetic algorithm."""
def num_bits(self):
"""Return the number of bits used by the encoding scheme.
Returns:
int: The number of bits used by the encoding scheme.
"""
raise NotImplementedError
def mutate(self, chromosome):
"""Use toggle (bit inversion) mutation on a chromosome."""
return mutation.toggle(chromosome, self.num_bits(), self.mutation_prob)
class ValueGA(base.GeneticAlgorithm):
"""A list encoded genetic algorithm."""
def get_value(self):
"""Retrieve a valid allele value at random."""
raise NotImplementedError
def mutate(self, chromosome):
return mutation.point(chromosome, self.get_value, self.mutation_prob)
class ListGA(ValueGA):
"""A list encoded genetic algorithm."""
def mutate(self, chromosome):
return mutation.heterogeneous_length(chromosome, self.get_value,
self.mutation_prob)
class PermutationGA(base.GeneticAlgorithm):
"""A permutation encoded genetic algorithm."""
def mutate(self, chromosome):
return mutation.swap(chromosome, self.mutation_prob)
| 26.52
| 79
| 0.662142
| 149
| 1,326
| 5.825503
| 0.375839
| 0.064516
| 0.105991
| 0.119816
| 0.490783
| 0.490783
| 0.324885
| 0.324885
| 0.225806
| 0
| 0
| 0.000991
| 0.239065
| 1,326
| 49
| 80
| 27.061224
| 0.859267
| 0.282051
| 0
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3
| false
| 0
| 0.15
| 0.15
| 0.85
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
f2a1b14f9c19a43e8614ebf25a3e38b7faa2cee4
| 126
|
py
|
Python
|
2375.py
|
ShawonBarman/URI-Online-judge-Ad-Hoc-level-problem-solution-in-python
|
9a0f0ad5efd4a9e73589c357ab4b34b7c73a11da
|
[
"MIT"
] | 1
|
2022-01-14T08:45:32.000Z
|
2022-01-14T08:45:32.000Z
|
2375.py
|
ShawonBarman/URI-Online-judge-Ad-Hoc-level-problem-solution-in-python
|
9a0f0ad5efd4a9e73589c357ab4b34b7c73a11da
|
[
"MIT"
] | null | null | null |
2375.py
|
ShawonBarman/URI-Online-judge-Ad-Hoc-level-problem-solution-in-python
|
9a0f0ad5efd4a9e73589c357ab4b34b7c73a11da
|
[
"MIT"
] | null | null | null |
n = int(input())
a, l, p = map(int, input().split())
if a >= n and l >= n and p >= n:
print("S")
else:
print("N")
| 21
| 36
| 0.460317
| 24
| 126
| 2.416667
| 0.541667
| 0.275862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 126
| 6
| 37
| 21
| 0.644444
| 0
| 0
| 0
| 0
| 0
| 0.016393
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
4b3d1227daae503aacdc59d3f5aff14a4ad6eda7
| 112
|
py
|
Python
|
week3DataStructuren/Dictionaries/dictionary.py
|
hanbioinformatica/owe2a
|
f572866ef3bc75689d2d571cb393c6d60480655b
|
[
"Apache-2.0"
] | null | null | null |
week3DataStructuren/Dictionaries/dictionary.py
|
hanbioinformatica/owe2a
|
f572866ef3bc75689d2d571cb393c6d60480655b
|
[
"Apache-2.0"
] | null | null | null |
week3DataStructuren/Dictionaries/dictionary.py
|
hanbioinformatica/owe2a
|
f572866ef3bc75689d2d571cb393c6d60480655b
|
[
"Apache-2.0"
] | 1
|
2018-12-04T15:23:47.000Z
|
2018-12-04T15:23:47.000Z
|
d1 = {"koe":4,"slang":0,"konijn":4,"zebra":4}
d1["koe"]
d2 = {"vis":0,"beer":4,"kip":2}
d1.update(d2)
print (d1)
| 22.4
| 45
| 0.553571
| 23
| 112
| 2.695652
| 0.608696
| 0.16129
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126214
| 0.080357
| 112
| 5
| 46
| 22.4
| 0.475728
| 0
| 0
| 0
| 0
| 0
| 0.283186
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
4b442e7411a54e834374d8e754640fa1c026849b
| 791
|
py
|
Python
|
src/linuxforhealth/x12/v5010/x12_834_005010X220A1/transaction_set.py
|
joewright/x12
|
3734589b5ffa554388174234aa7dc37c2543f46a
|
[
"Apache-2.0"
] | 4
|
2021-12-11T15:38:03.000Z
|
2021-12-22T13:18:31.000Z
|
src/linuxforhealth/x12/v5010/x12_834_005010X220A1/transaction_set.py
|
joewright/x12
|
3734589b5ffa554388174234aa7dc37c2543f46a
|
[
"Apache-2.0"
] | 55
|
2021-06-12T01:11:15.000Z
|
2022-02-03T19:28:32.000Z
|
src/linuxforhealth/x12/v5010/x12_834_005010X220A1/transaction_set.py
|
joewright/x12
|
3734589b5ffa554388174234aa7dc37c2543f46a
|
[
"Apache-2.0"
] | 3
|
2021-06-11T19:33:19.000Z
|
2021-11-19T23:33:58.000Z
|
"""
transaction_set.py
Defines the Enrollment 834 005010X220A1 transaction set model.
"""
from typing import List, Optional
from linuxforhealth.x12.models import X12SegmentGroup
from .loops import Footer, Header, Loop1000A, Loop1000B, Loop1000C, Loop2000
from pydantic import root_validator, Field
from linuxforhealth.x12.validators import validate_segment_count
class BenefitEnrollmentAndMaintenance(X12SegmentGroup):
"""
The ASC X12 834 (Benefit Enrollment and Maintenance) transaction model.
"""
header: Header
loop_1000a: Loop1000A
loop_1000b: Loop1000B
loop_1000c: Optional[List[Loop1000C]] = Field(max_items=2)
loop_2000: List[Loop2000]
footer: Footer
# _validate_segment_count = root_validator(allow_reuse=True)(validate_segment_count)
| 27.275862
| 88
| 0.78129
| 92
| 791
| 6.543478
| 0.543478
| 0.074751
| 0.099668
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111441
| 0.149178
| 791
| 28
| 89
| 28.25
| 0.783061
| 0.300885
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.416667
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 3
|
299875f6900cd7a8b095fbe70057acd505857f31
| 4,796
|
py
|
Python
|
finetune/target_models/multifield.py
|
IndicoDataSolutions/finetune-transformer-lm
|
3534658e5de281e5634c8481b0fb37635b0cb3af
|
[
"MIT"
] | null | null | null |
finetune/target_models/multifield.py
|
IndicoDataSolutions/finetune-transformer-lm
|
3534658e5de281e5634c8481b0fb37635b0cb3af
|
[
"MIT"
] | null | null | null |
finetune/target_models/multifield.py
|
IndicoDataSolutions/finetune-transformer-lm
|
3534658e5de281e5634c8481b0fb37635b0cb3af
|
[
"MIT"
] | null | null | null |
import copy
from finetune.errors import FinetuneError
from finetune.target_models.classifier import Classifier, ClassificationPipeline
from finetune.target_models.regressor import Regressor, RegressionPipeline
from finetune.base import BaseModel
class MultiFieldClassificationPipeline(ClassificationPipeline):
def _format_for_encoding(self, X):
return X
class MultiFieldRegressionPipeline(RegressionPipeline):
def _format_for_encoding(self, X):
return X
class MultiFieldClassifier(Classifier):
"""
Classifies a set of documents into 1 of N classes.
:param config: A :py:class:`finetune.config.Settings` object or None (for default config).
:param \**kwargs: key-value pairs of config items to override.
"""
defaults = {"chunk_long_sequences": False}
def __init__(self, **kwargs):
d = copy.deepcopy(MultiFieldClassifier.defaults)
d.update(kwargs)
super().__init__(**d)
if self.config.chunk_long_sequences:
raise FinetuneError(
"Multifield model is incompatible with chunk_long_sequences = True in config."
)
def _get_input_pipeline(self):
return MultiFieldClassificationPipeline(self.config)
def finetune(self, Xs, Y=None, context=None, **kwargs):
"""
:param \*Xs: lists of text inputs, shape [batch, n_fields]
:param Y: integer or string-valued class labels. It is necessary for the items of Y to be sortable.
:param batch_size: integer number of examples per batch. When N_GPUS > 1, this number
corresponds to the number of training examples provided to each GPU.
"""
return BaseModel.finetune(self, Xs, Y=Y, context=context, **kwargs)
def predict(self, Xs, context=None, **kwargs):
"""
Produces list of most likely class labels as determined by the fine-tuned model.
:param \*Xs: lists of text inputs, shape [batch, n_fields]
:returns: list of class labels.
"""
return BaseModel.predict(self, Xs, context=context, **kwargs)
def predict_proba(self, Xs, context=None, **kwargs):
"""
Produces probability distribution over classes for each example in X.
:param \*Xs: lists of text inputs, shape [batch, n_fields]
:returns: list of dictionaries. Each dictionary maps from X2 class label to its assigned class probability.
"""
return BaseModel.predict_proba(self, Xs, context=context, **kwargs)
def featurize(self, Xs, **kwargs):
"""
Embeds inputs in learned feature space. Can be called before or after calling :meth:`finetune`.
:param \*Xs: lists of text inputs, shape [batch, n_fields]
:returns: np.array of features of shape (n_examples, embedding_size).
"""
return BaseModel.featurize(self, Xs, **kwargs)
class MultiFieldRegressor(Regressor):
"""
Regresses one or more floating point values given a set of documents per example.
:param config: A :py:class:`finetune.config.Settings` object or None (for default config).
:param \**kwargs: key-value pairs of config items to override.
"""
def _get_input_pipeline(self):
return MultiFieldRegressionPipeline(self.config)
def finetune(self, Xs, Y=None, **kwargs):
"""
:param \*Xs: lists of text inputs, shape [batch, n_fields]
:param Y: floating point targets
:param batch_size: integer number of examples per batch. When N_GPUS > 1, this number
corresponds to the number of training examples provided to each GPU.
"""
return BaseModel.finetune(self, Xs, Y=Y, **kwargs)
def predict(self, Xs, **kwargs):
"""
Produces list of most likely class labels as determined by the fine-tuned model.
:param \*Xs: lists of text inputs, shape [batch, n_fields]
:returns: list of class labels.
"""
return BaseModel.predict(self, Xs, **kwargs)
def predict_proba(self, Xs, **kwargs):
"""
Produces probability distribution over classes for each example in X.
:param \*Xs: lists of text inputs, shape [batch, n_fields]
:returns: list of dictionaries. Each dictionary maps from X2 class label to its assigned class probability.
"""
return BaseModel.predict_proba(self, Xs, **kwargs)
def featurize(self, Xs, **kwargs):
"""
Embeds inputs in learned feature space. Can be called before or after calling :meth:`finetune`.
:param \*Xs: lists of text inputs, shape [batch, n_fields]
:returns: np.array of features of shape (n_examples, embedding_size).
"""
return BaseModel.featurize(self, Xs, **kwargs)
| 38.677419
| 116
| 0.66347
| 595
| 4,796
| 5.267227
| 0.24874
| 0.030632
| 0.030632
| 0.035737
| 0.721761
| 0.705169
| 0.651564
| 0.651564
| 0.632419
| 0.608807
| 0
| 0.001382
| 0.245413
| 4,796
| 123
| 117
| 38.99187
| 0.864603
| 0.483111
| 0
| 0.238095
| 0
| 0
| 0.045671
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.309524
| false
| 0
| 0.119048
| 0.095238
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
2998b411809973174ac82478a06ef6fa40c371df
| 157
|
py
|
Python
|
db_s3_backup/db_interface/dump_protocol.py
|
saurabhariyan/db-s3-backup
|
5b67737f43814f0841d47033c92825206a24e1a1
|
[
"MIT"
] | 9
|
2015-08-04T00:54:46.000Z
|
2021-08-29T04:21:13.000Z
|
db_s3_backup/db_interface/dump_protocol.py
|
saurabhariyan/db-s3-backup
|
5b67737f43814f0841d47033c92825206a24e1a1
|
[
"MIT"
] | 7
|
2015-05-28T15:57:15.000Z
|
2017-01-25T11:29:28.000Z
|
db_s3_backup/db_interface/dump_protocol.py
|
saurabhariyan/db-s3-backup
|
5b67737f43814f0841d47033c92825206a24e1a1
|
[
"MIT"
] | 9
|
2015-05-28T13:09:25.000Z
|
2021-02-12T04:57:04.000Z
|
from exceptions import ValueError
class DumpProtocol:
def dump(self, config=None, verbose=False):
raise ValueError('DumpProtocol not followed')
| 26.166667
| 53
| 0.751592
| 18
| 157
| 6.555556
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171975
| 157
| 5
| 54
| 31.4
| 0.907692
| 0
| 0
| 0
| 0
| 0
| 0.159236
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
299c9b32319909c8f36fc5af498db57a782db34f
| 437
|
py
|
Python
|
integration_tests/test_12_dmaap.py
|
krasm/python-onapsdk
|
87cd3017fc542a8afd3be51fbd89934ed87ed3a7
|
[
"Apache-2.0"
] | 4
|
2020-06-13T04:51:27.000Z
|
2021-01-06T15:00:51.000Z
|
integration_tests/test_12_dmaap.py
|
krasm/python-onapsdk
|
87cd3017fc542a8afd3be51fbd89934ed87ed3a7
|
[
"Apache-2.0"
] | 10
|
2021-09-20T15:42:47.000Z
|
2021-09-23T12:49:51.000Z
|
integration_tests/test_12_dmaap.py
|
krasm/python-onapsdk
|
87cd3017fc542a8afd3be51fbd89934ed87ed3a7
|
[
"Apache-2.0"
] | 8
|
2020-08-28T10:56:02.000Z
|
2022-02-11T17:06:03.000Z
|
# SPDX-License-Identifier: Apache-2.0
# Copyright 2020 Nokia
import pytest
import logging
import os
from onapsdk.dmaap.dmaap import Dmaap
logging.basicConfig(level=os.environ.get("LOGLEVEL", "DEBUG"))
@pytest.mark.integration
def test_should_get_all_topics_from_dmaap():
# given
# when
response = Dmaap.get_all_topics(basic_auth={'username': 'demo', 'password': 'demo123456!'})
# then
assert len(response) == 9
| 20.809524
| 95
| 0.729977
| 58
| 437
| 5.344828
| 0.724138
| 0.03871
| 0.077419
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034853
| 0.146453
| 437
| 20
| 96
| 21.85
| 0.796247
| 0.16476
| 0
| 0
| 0
| 0
| 0.122563
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 1
| 0.111111
| false
| 0.111111
| 0.444444
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 3
|
29a1b5f087c1d14e9f6ed91d094e1aa061d5a041
| 2,798
|
py
|
Python
|
phonotactics/onsets/onsets.py
|
shlomo-Kallner/coventreiya
|
aa0773693220025f8d2c23644a2c5d9d884773e9
|
[
"Apache-2.0"
] | null | null | null |
phonotactics/onsets/onsets.py
|
shlomo-Kallner/coventreiya
|
aa0773693220025f8d2c23644a2c5d9d884773e9
|
[
"Apache-2.0"
] | null | null | null |
phonotactics/onsets/onsets.py
|
shlomo-Kallner/coventreiya
|
aa0773693220025f8d2c23644a2c5d9d884773e9
|
[
"Apache-2.0"
] | null | null | null |
__name__ = 'onsets'
__version__ = '1.5.1'
__package__ = 'phonotactics'
# imports
#some import machinery checking and manipulations...
#import sys
#import os
#from os import path
#if '__file__' in dir():
# __mod_path = path.dirname(__file__)
# if __mod_path not in sys.path:
# sys.path.append(__mod_path)
# __pack_path = path.dirname(__mod_path)
# if __pack_path not in sys.path:
# sys.path.append(__pack_path)
from coventreiya.utils.ver import ver
from coventreiya.utils.ver import gen_ver
from coventreiya.utils.ver import Version_Registry
from coventreiya.phonotactics.abc import abc
########################################################
#
#
# Generating the Onsets
#
#
class Onsets(abc):
def __init__(self, min_length=0, max_length=0,
major=0, minor=0, patch=0, version=None):
super().__init__(min_length, max_length,
major, minor, patch, version)
pass
################################################################################
#
# Version Information Control & UnExported [but Versioned] Object Instantiation
#
#
__versions = Version_Registry( Onsets() )
def register( version, functor ):
if isinstance( version, Onsets ):
return __versions.register( version, functor )
else:
raise TypeError()
def get_version(major=0, minor=0, patch=0, version=None):
return __versions.get( major, minor, patch, version )
def gen_version( major=0, minor=0, patch=0, version=None ):
return __versions.gen( major, minor, patch, version )
def get_all_versions():
return list(__versions)
###################################################################################
#
# Getting/Setting the default/current version...
#
def get_current():
return __versions.current()
def get_current_version():
return __versions.current().version()
def reset_current_version( major=0, minor=0, patch=0, version=None ):
v = gen_ver(major, minor, patch, version)
return __versions.current(v)
###################################################################################
#
# The original default version -- used for the(now obsolete and removed)
# "default" gen_*_ functions and the pre-generated lists...
# Note: the *COMPATABILITY_ONLY* default gen_*_ functions will self-update to
# accomidate resets (they call into *THE_CURRENT_VERSION_OBJECT*!!)
# the PRE-GENERATED LISTS will not be updated at all..
# Note: VERSION 2_0: the *OLD* gen_*_ functions no longer self-update as
# they are now directly linked to version 1.5.1 only.
#
# from ver_1_5_1 import *
# __versions.current(gen_ver(1,5,1))
| 30.086022
| 84
| 0.592566
| 322
| 2,798
| 4.841615
| 0.326087
| 0.053881
| 0.007697
| 0.030789
| 0.234766
| 0.184092
| 0.14304
| 0.14304
| 0.087235
| 0.064144
| 0
| 0.012664
| 0.209793
| 2,798
| 92
| 85
| 30.413043
| 0.692447
| 0.372051
| 0
| 0
| 0
| 0
| 0.017358
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.03125
| 0.125
| 0.15625
| 0.625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 3
|
29a40a64c821d23b2e28418293629df23986810c
| 6,343
|
bzl
|
Python
|
tools/bzl/deps.bzl
|
xshaun/compiler-programl
|
f90bcd84700d0f245c80440a3d5fd29370d2f973
|
[
"Apache-2.0"
] | null | null | null |
tools/bzl/deps.bzl
|
xshaun/compiler-programl
|
f90bcd84700d0f245c80440a3d5fd29370d2f973
|
[
"Apache-2.0"
] | null | null | null |
tools/bzl/deps.bzl
|
xshaun/compiler-programl
|
f90bcd84700d0f245c80440a3d5fd29370d2f973
|
[
"Apache-2.0"
] | null | null | null |
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def programl_deps():
http_archive(
name="labm8",
sha256="e4bc669322533e7615f689e5e8a810446d0c803be2e3b21e99a4e0135709755e",
strip_prefix="labm8-b98301dec615465a6567bed4ec4131753d1f8b32",
urls=[
"https://github.com/ChrisCummins/labm8/archive/b98301dec615465a6567bed4ec4131753d1f8b32.tar.gz"
],
)
http_archive(
name="com_github_nelhage_rules_boost",
sha256="4031539fe0af832c6b6ed6974d820d350299a291ba7337d6c599d4854e47ed88",
strip_prefix="rules_boost-4ee400beca08f524e7ea3be3ca41cce34454272f",
urls=[
"https://github.com/nelhage/rules_boost/archive/4ee400beca08f524e7ea3be3ca41cce34454272f.tar.gz"
],
)
http_archive(
name="llvm",
sha256="ea0dbab56d11e78006c68c39bc99da672bb6adc7ca03237ba4eb88887bf91a93",
strip_prefix="bazel_llvm-ae9ef2a711c5744fe52c5666d76976a3c6a3128b",
urls=[
"https://github.com/ChrisCummins/bazel_llvm/archive/ae9ef2a711c5744fe52c5666d76976a3c6a3128b.tar.gz"
],
)
http_archive(
name="rules_python",
sha256="64a3c26f95db470c32ad86c924b23a821cd16c3879eed732a7841779a32a60f8",
strip_prefix="rules_python-748aa53d7701e71101dfd15d800e100f6ff8e5d1",
urls=[
"https://github.com/bazelbuild/rules_python/archive/748aa53d7701e71101dfd15d800e100f6ff8e5d1.tar.gz"
],
)
http_archive(
name="com_github_chriscummins_rules_bats",
strip_prefix="rules_bats-6600627545380d2b32485371bed36cef49e9ff68",
sha256="bfaa7a5818e7d6b142ac6e564f383f69f72ea593eb7de360e9aa15db69f67505",
urls=[
"https://github.com/ChrisCummins/rules_bats/archive/6600627545380d2b32485371bed36cef49e9ff68.tar.gz"
],
)
http_archive(
name="subprocess",
build_file="@programl//:third_party/subprocess.BUILD",
sha256="886df0a814a7bb7a3fdeead22f75400abd8d3235b81d05817bc8c1125eeebb8f",
strip_prefix="cpp-subprocess-2.0",
urls=["https://github.com/arun11299/cpp-subprocess/archive/v2.0.tar.gz",],
)
http_archive(
name="ctpl",
build_file="@programl//:third_party/ctpl.BUILD",
sha256="8c1cec7c570d6d84be1d29283af5039ea27c3e69703bd446d396424bf619816e",
strip_prefix="CTPL-ctpl_v.0.0.2",
urls=["https://github.com/vit-vit/CTPL/archive/ctpl_v.0.0.2.tar.gz"],
)
http_archive(
name="fmt",
build_file="@programl//:third_party/fmt.BUILD",
sha256="1cafc80701b746085dddf41bd9193e6d35089e1c6ec1940e037fcb9c98f62365",
strip_prefix="fmt-6.1.2",
urls=["https://github.com/fmtlib/fmt/archive/6.1.2.tar.gz"],
)
http_archive(
name="pybind11_json",
build_file="@programl//:third_party/pybind11_json.BUILD",
sha256="45957f8564e921a412a6de49c578ef1faf3b04e531e859464853e26e1c734ea5",
strip_prefix="pybind11_json-0.2.4/include",
urls=["https://github.com/pybind/pybind11_json/archive/0.2.4.tar.gz"],
)
http_archive(
name="nlohmann_json",
build_file="@programl//:third_party/nlohmann_json.BUILD",
sha256="87b5884741427220d3a33df1363ae0e8b898099fbc59f1c451113f6732891014",
strip_prefix="single_include",
urls=[
"https://github.com/nlohmann/json/releases/download/v3.7.3/include.zip"
],
)
http_archive(
name="build_stack_rules_proto",
sha256="85ccc69a964a9fe3859b1190a7c8246af2a4ead037ee82247378464276d4262a",
strip_prefix="rules_proto-d9a123032f8436dbc34069cfc3207f2810a494ee",
urls=[
"https://github.com/stackb/rules_proto/archive/d9a123032f8436dbc34069cfc3207f2810a494ee.tar.gz"
],
)
http_archive(
name="tbb_mac",
build_file="@programl//:third_party/tbb_mac.BUILD",
sha256="6ff553ec31c33b8340ce2113853be1c42e12b1a4571f711c529f8d4fa762a1bf",
strip_prefix="tbb2017_20170226oss",
urls=[
"https://github.com/01org/tbb/releases/download/2017_U5/tbb2017_20170226oss_mac.tgz"
],
)
http_archive(
name="tbb_lin",
build_file="@programl//:third_party/tbb_lin.BUILD",
sha256="c4cd712f8d58d77f7b47286c867eb6fd70a8e8aef097a5c40f6c6b53d9dd83e1",
strip_prefix="tbb2017_20170226oss",
urls=[
"https://github.com/01org/tbb/releases/download/2017_U5/tbb2017_20170226oss_lin.tgz"
],
)
http_archive(
name="pybind11",
build_file="@programl//:third_party/pybind11_bazel/pybind11.BUILD",
sha256="1eed57bc6863190e35637290f97a20c81cfe4d9090ac0a24f3bbf08f265eb71d",
strip_prefix="pybind11-2.4.3",
urls=["https://github.com/pybind/pybind11/archive/v2.4.3.tar.gz"],
)
http_archive(
name="com_google_absl",
sha256="d10f684f170eb36f3ce752d2819a0be8cc703b429247d7d662ba5b4b48dd7f65",
strip_prefix="abseil-cpp-3088e76c597e068479e82508b1770a7ad0c806b6",
url="https://github.com/abseil/abseil-cpp/archive/3088e76c597e068479e82508b1770a7ad0c806b6.tar.gz",
)
http_archive(
name="com_github_gflags_gflags",
sha256="34af2f15cf7367513b352bdcd2493ab14ce43692d2dcd9dfc499492966c64dcf",
strip_prefix="gflags-2.2.2",
urls=["https://github.com/gflags/gflags/archive/v2.2.2.tar.gz"],
)
http_archive(
name="gtest",
sha256="9dc9157a9a1551ec7a7e43daea9a694a0bb5fb8bec81235d8a1e6ef64c716dcb",
strip_prefix="googletest-release-1.10.0",
urls=[
"https://github.com/google/googletest/archive/release-1.10.0.tar.gz",
],
)
http_archive(
name="com_github_google_benchmark",
sha256="616f252f37d61b15037e3c2ef956905baf9c9eecfeab400cb3ad25bae714e214",
strip_prefix="benchmark-1.4.0",
url="https://github.com/google/benchmark/archive/v1.4.0.tar.gz",
)
http_archive(
name="org_tensorflow",
sha256="92116bfea188963a0e215e21e67c3494f6e1e6959f44dfbcc315f66eb70b5f83",
strip_prefix="tensorflow-f13f807c83c0d8d4d1ef290a17f26fe884ccfe2f",
urls=[
"https://github.com/ChrisCummins/tensorflow/archive/f13f807c83c0d8d4d1ef290a17f26fe884ccfe2f.tar.gz"
],
)
http_archive(
name="io_bazel_rules_closure",
sha256="5b00383d08dd71f28503736db0500b6fb4dda47489ff5fc6bed42557c07c6ba9",
strip_prefix="rules_closure-308b05b2419edb5c8ee0471b67a40403df940149",
urls=[
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/bazelbuild/rules_closure/archive/308b05b2419edb5c8ee0471b67a40403df940149.tar.gz",
"https://github.com/bazelbuild/rules_closure/archive/308b05b2419edb5c8ee0471b67a40403df940149.tar.gz", # 2019-06-13
],
)
| 35.836158
| 153
| 0.760366
| 590
| 6,343
| 7.972881
| 0.20678
| 0.049107
| 0.063776
| 0.065051
| 0.281888
| 0.176446
| 0.10034
| 0.07568
| 0.07568
| 0.040391
| 0
| 0.271413
| 0.116506
| 6,343
| 176
| 154
| 36.039773
| 0.567987
| 0.001577
| 0
| 0.296774
| 0
| 0.064516
| 0.677776
| 0.365977
| 0
| 0
| 0
| 0
| 0
| 1
| 0.006452
| true
| 0
| 0
| 0
| 0.006452
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
29a916eb7d2d8321665bd4ae8b4fed316f3bc30f
| 217
|
py
|
Python
|
sklearn-nlp/utils/data_utils.py
|
fmailhot/sklearn-nlp
|
3de76cb71fc85bc1231bdfa9cd78b5f98a0f14f7
|
[
"BSD-3-Clause"
] | null | null | null |
sklearn-nlp/utils/data_utils.py
|
fmailhot/sklearn-nlp
|
3de76cb71fc85bc1231bdfa9cd78b5f98a0f14f7
|
[
"BSD-3-Clause"
] | null | null | null |
sklearn-nlp/utils/data_utils.py
|
fmailhot/sklearn-nlp
|
3de76cb71fc85bc1231bdfa9cd78b5f98a0f14f7
|
[
"BSD-3-Clause"
] | null | null | null |
"""Data loading/munging utilities.
This will need to leverage a lot of existing stuff
(e.g. numpy.genfromtxt)...
"""
import logging
class DataLoader(object):
def __init__(self, data_files=None):
pass
| 15.5
| 50
| 0.700461
| 30
| 217
| 4.9
| 0.966667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.18894
| 217
| 13
| 51
| 16.692308
| 0.835227
| 0.506912
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.25
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 3
|
29aa6576959454006572496dfd5c5ae886a2c7c2
| 78
|
py
|
Python
|
Configuration/Eras/python/Modifier_run3_nanoAOD_devel_cff.py
|
malbouis/cmssw
|
16173a30d3f0c9ecc5419c474bb4d272c58b65c8
|
[
"Apache-2.0"
] | 852
|
2015-01-11T21:03:51.000Z
|
2022-03-25T21:14:00.000Z
|
Configuration/Eras/python/Modifier_run3_nanoAOD_devel_cff.py
|
gartung/cmssw
|
3072dde3ce94dcd1791d778988198a44cde02162
|
[
"Apache-2.0"
] | 30,371
|
2015-01-02T00:14:40.000Z
|
2022-03-31T23:26:05.000Z
|
Configuration/Eras/python/Modifier_run3_nanoAOD_devel_cff.py
|
gartung/cmssw
|
3072dde3ce94dcd1791d778988198a44cde02162
|
[
"Apache-2.0"
] | 3,240
|
2015-01-02T05:53:18.000Z
|
2022-03-31T17:24:21.000Z
|
import FWCore.ParameterSet.Config as cms
run3_nanoAOD_devel = cms.Modifier()
| 19.5
| 40
| 0.820513
| 11
| 78
| 5.636364
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014286
| 0.102564
| 78
| 3
| 41
| 26
| 0.871429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
29b245fab6ed28cf6c359207c9c4af61c43d22d1
| 102
|
py
|
Python
|
ch7/exercises/parrot.py
|
hewittaj/python_crash_course
|
52a3341eec79c2eb6c7f9f1cb7f0806c3b2d61aa
|
[
"MIT"
] | null | null | null |
ch7/exercises/parrot.py
|
hewittaj/python_crash_course
|
52a3341eec79c2eb6c7f9f1cb7f0806c3b2d61aa
|
[
"MIT"
] | null | null | null |
ch7/exercises/parrot.py
|
hewittaj/python_crash_course
|
52a3341eec79c2eb6c7f9f1cb7f0806c3b2d61aa
|
[
"MIT"
] | null | null | null |
# using the input() function
message = input("Tell me something, and I'll repeat it!")
print(message)
| 25.5
| 57
| 0.72549
| 16
| 102
| 4.625
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147059
| 102
| 4
| 58
| 25.5
| 0.850575
| 0.254902
| 0
| 0
| 0
| 0
| 0.506667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 3
|
29d39c3e482269db7c4ce7b3f24a9b213141989d
| 141
|
py
|
Python
|
api/server/utils/logger.py
|
ktolstikhin/vision-service
|
b87f10f5ec3d22b76c06a0e6c0105fd823e60c39
|
[
"MIT"
] | null | null | null |
api/server/utils/logger.py
|
ktolstikhin/vision-service
|
b87f10f5ec3d22b76c06a0e6c0105fd823e60c39
|
[
"MIT"
] | null | null | null |
api/server/utils/logger.py
|
ktolstikhin/vision-service
|
b87f10f5ec3d22b76c06a0e6c0105fd823e60c39
|
[
"MIT"
] | null | null | null |
import logging
def initialize(app):
level = logging.DEBUG if app.config.get('DEBUG') else logging.INFO
app.logger.setLevel(level)
| 17.625
| 70
| 0.723404
| 20
| 141
| 5.1
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163121
| 141
| 7
| 71
| 20.142857
| 0.864407
| 0
| 0
| 0
| 0
| 0
| 0.035714
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
29ecd75f594b19acd9901238ad242a2ae33df3f6
| 112
|
py
|
Python
|
layers/modules/__init__.py
|
Eralaf/ssd.pytorch
|
acad53fd801f32120ecb3ff57950556e35db3d1c
|
[
"MIT"
] | null | null | null |
layers/modules/__init__.py
|
Eralaf/ssd.pytorch
|
acad53fd801f32120ecb3ff57950556e35db3d1c
|
[
"MIT"
] | null | null | null |
layers/modules/__init__.py
|
Eralaf/ssd.pytorch
|
acad53fd801f32120ecb3ff57950556e35db3d1c
|
[
"MIT"
] | null | null | null |
from .l2norm import L2Norm
from .multibox_loss import MultiBoxLoss
__all__ = ['L2Norm', 'MultiBoxLoss']
| 22.4
| 39
| 0.732143
| 12
| 112
| 6.416667
| 0.583333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032609
| 0.178571
| 112
| 4
| 40
| 28
| 0.804348
| 0
| 0
| 0
| 0
| 0
| 0.160714
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 3
|
29f40d1447bb8284816ad9a4024cc926058b38fe
| 29,679
|
py
|
Python
|
serialized_data_converter.py
|
facelessuser/SerializedDataConverter
|
6cd0d59ae6cda98208e60e9c729d0eb047fe93db
|
[
"MIT"
] | 19
|
2015-02-13T08:16:32.000Z
|
2021-07-31T02:55:39.000Z
|
serialized_data_converter.py
|
facelessuser/SerializedDataConverter
|
6cd0d59ae6cda98208e60e9c729d0eb047fe93db
|
[
"MIT"
] | 9
|
2015-03-10T15:34:08.000Z
|
2019-11-18T01:57:20.000Z
|
serialized_data_converter.py
|
facelessuser/SerializedDataConverter
|
6cd0d59ae6cda98208e60e9c729d0eb047fe93db
|
[
"MIT"
] | 4
|
2015-10-01T16:04:52.000Z
|
2019-10-27T00:53:36.000Z
|
"""
Serialized Data Converter.
Licensed under MIT
Copyright (c) 2012 - 2015 Isaac Muse <[email protected]>
"""
import sublime
import sublime_plugin
import codecs
import re
import traceback
import os
from SerializedDataConverter.lib.log import error_msg
from SerializedDataConverter.lib import plist_includes as plist
from SerializedDataConverter.lib import yaml_includes as yaml
from SerializedDataConverter.lib import json_includes as json
PACKAGE_SETTINGS = "serialized_data_converter.sublime-settings"
def to_hex(value):
"""Convert int value to hex string."""
return "%02x" % value
class SerializedDataConverterListener(sublime_plugin.EventListener):
"""Listener to convert certain files on save."""
def on_post_save(self, view):
"""Convert after saves."""
ext2convert = self.get_save_ext()
filename = view.file_name()
command = None
if filename is not None:
for converter in ext2convert:
ext = converter.get("ext", None)
if ext is not None and filename.lower().endswith(ext.lower()):
command = converter.get("command", None)
break
if command is not None:
self.convert(view, command)
def get_save_ext(self):
"""Get the save extension."""
return sublime.load_settings(PACKAGE_SETTINGS).get("convert_on_save", [])
def convert(self, view, command):
"""Call the appropriate convert command."""
binary = False
save_binary = False
if command.startswith('bplist'):
command = command.replace('bplist', 'plist')
binary = True
elif command.endswith('bplist'):
command = command.replace('bplist', 'plist')
save_binary = True
view.run_command(
"serialized_%s" % command, {
"save_to_file": 'True',
"show_file": False,
"force": True,
"binary": binary,
'save_binary': save_binary
}
)
class _LanguageConverter(sublime_plugin.TextCommand):
"""Language converter base class."""
lang = None
default_lang = "Packages/Text/Plain text.tmLanguage"
errors = {
"filewrite": "Could not write file!\n"
"Please see console for more info.",
"bufferwrite": "Could not write view buffer!\n"
"Please see console for more info.",
"view2yaml": "Could not read view buffer as YAML!\n"
"Please see console for more info.",
"view2json": "Could not read view buffer as JSON!\n"
"Please see console for more info.",
"view2plist": "Could not read view buffer as PLIST!\n"
"Please see console for more info.",
"view2bplist": "Could not read view buffer as Binary PLIST!\n"
"Please see console for more info.",
"yaml2json": "Could not convert YAML to JSON!\n"
"Please see console for more info.",
"json2yaml": "Could not convert JSON to YAML!\n"
"Please see console for more info.",
"plist2yaml": "Could not convert PLIST to YAML!\n"
"Please see console for more info.",
"bplist2yaml": "Could not convert Binary PLIST to YAML!\n"
"Please see console for more info.",
"yaml2plist": "Could not convert YAML to PLIST!\n"
"Please see console for more info.",
"yaml2bplist": "Could not convert YAML to Binary PLIST!\n"
"Please see console for more info.",
"json2plist": "Could not convert JSON to PLIST!\n"
"Please see console for more info.",
"json2bplist": "Could not convert JSON to Binary PLIST!\n"
"Please see console for more info.",
"plist2json": "Could not convert PLIST to JSON!\n"
"Please see console for more info.",
"bplist2json": "Could not convert Binary PLIST to JSON!\n"
"Please see console for more info.",
"bplist2plist": "Could not convert Binary PLIST to PLIST!\n"
"Please see console for more info.",
"plist2bplist": "Could not convert PLIST to Binary PLIST!\n"
"Please see console for more info.",
"binwrite": "Source view does not exist on disk, so save name and location cannot be determined.\n"
"You can convert and save to disk as an XML PLIST and then convert it to BPLIST."
}
def __init__(self, *args, **kwargs):
"""General setup."""
self.settings = sublime.load_settings(PACKAGE_SETTINGS)
super().__init__(*args, **kwargs)
def set_syntax(self):
"""Set the view syntax."""
if self.output_view is not None:
# Get syntax language and set it
self.output_view.set_syntax_file(self.syntax)
def write_file(self, edit, show_file):
"""Write data to a file if a location can be acquired else save to a view buffer."""
errors = False
if self.save_filename is not None and os.path.exists(os.path.dirname(self.save_filename)):
# Save content to UTF file
try:
if self.save_binary:
with open(self.save_filename, "wb") as f:
f.write(self.output)
else:
with codecs.open(self.save_filename, "w", "utf-8") as f:
f.write(self.output)
self.output = None
if show_file:
self.output_view = self.view.window().open_file(self.save_filename)
except Exception:
errors = True
error_msg(self.errors["filewrite"], traceback.format_exc())
if not errors and show_file:
self.set_syntax()
else:
# Could not acquire a name that exists on disk
# Fallback to buffer write
self.write_buffer(edit, force_new_buffer=True)
def write_buffer(self, edit, force_new_buffer=False):
"""Write the data to a view buffer."""
errors = False
new_buffer = bool(self.settings.get("open_in_new_buffer", False))
# Save content to view buffer
try:
self.output_view = self.view.window().new_file() if new_buffer or force_new_buffer else self.view
if self.save_binary:
self.output_view.set_encoding('Hexadecimal')
bin_output = []
count = 0
for b in self.output:
if count % 16 == 0 and count != 0:
bin_output += ['\n', to_hex(b)]
else:
if count % 2 == 0 and count != 0:
bin_output += [' ', to_hex(b)]
else:
bin_output.append(to_hex(b))
count += 1
self.output = None
self.output_view.replace(
edit,
sublime.Region(0, self.view.size()),
''.join(bin_output)
)
bin_output = None
else:
self.output_view.set_encoding('UTF-8')
self.output_view.replace(
edit,
sublime.Region(0, self.view.size()),
self.output
)
self.output = None
except Exception:
errors = True
error_msg(self.errors["bufferwrite"], traceback.format_exc())
if not errors:
if new_buffer or force_new_buffer:
# If a name can be acquired from the original view,
# give buffer a modified derivative of the name.
if self.save_filename is not None:
self.output_view.set_name(os.path.basename(self.save_filename))
self.set_syntax()
def is_enabled(self, **kwargs):
"""Determine if the command should be enabled."""
enabled = True
filename = self.view.file_name()
view_okay = True
if (
kwargs.get('binary', False) and
(filename is None or not os.path.exists(filename)) and
self.view.encoding() != 'Hexadecimal'
):
view_okay = False
if not kwargs.get('force', False):
if (
kwargs.get('save_to_file', False) and
not bool(self.settings.get("enable_save_to_file_commands", False))
):
enabled = False
elif (
not kwargs.get('save_to_file', False) and
not bool(self.settings.get("enable_show_in_buffer_commands", False))
):
enabled = False
if not view_okay and enabled:
enabled = False
return enabled
def get_output_file(self, filename):
"""Get output filename to save to."""
return None
def read_source(self):
"""Read the source."""
return False
def convert(self, edit):
"""Convert the read data to the desired format."""
return False
def run(self, edit, **kwargs):
"""Begin conversion."""
self.binary = kwargs.get('binary', False)
self.save_binary = kwargs.get('save_binary', False)
self.syntax = self.settings.get(self.lang, self.default_lang) if self.lang is not None else self.default_lang
filename = self.view.file_name()
self.save_filename = self.get_output_file(filename) if filename is not None else None
if not self.read_source():
if not self.convert(edit):
if kwargs.get('save_to_file', False):
self.write_file(edit, kwargs.get('show_file', True))
else:
self.write_buffer(edit)
##########################
# Plist <-> YAML
##########################
class SerializedPlistToYamlCommand(_LanguageConverter):
"""Convert PLIST to YAML."""
lang = "yaml_language"
default_lang = "Packages/YAML/YAML.tmLanguage"
def get_output_file(self, filename):
"""Get output filename to save to."""
name = None
if self.binary:
setting = 'bplist_yaml_conversion_ext'
src = 'bplist'
else:
setting = 'plist_yaml_conversion_ext'
src = 'plist'
# Try and find file ext in the ext table
for ext in self.settings.get(setting, []):
m = re.match("^(.*)\\." + re.escape(ext[src]) + "$", filename, re.IGNORECASE)
if m is not None:
name = m.group(1) + "." + ext["yaml"]
break
# Could not find ext in table, replace current extension with default
if name is None:
name = os.path.splitext(filename)[0] + ".YAML"
return name
def read_source(self):
"""Read the source."""
errors = False
ext_tbl = self.settings.get("yaml_strip_tabs_from", [])
filename = self.view.file_name()
self.strip_tabs = False
if filename is not None:
for ext in ext_tbl:
m = re.match("^(.*)\\." + re.escape(ext) + "$", filename, re.IGNORECASE)
if m is not None:
self.strip_tabs = True
break
try:
# Ensure view buffer is in a UTF8 format.
# Wrap string in a file structure so it can be accessed by readPlist
# Read view buffer as PLIST and dump to Python dict
if self.binary and self.view.encoding() == 'Hexadecimal':
self.plist = plist.read_plist_from_hex_view(self.view)
elif self.binary and filename is not None and os.path.exists(filename):
self.plist = plist.read_plist_from_file(filename)
else:
self.plist = plist.read_plist_from_view(self.view)
except Exception:
errors = True
error_type = 'view2bplist' if self.binary else 'view2plist'
error_msg(self.errors[error_type], traceback.format_exc())
return errors
def convert(self, edit):
"""Convert the read data to the desired format."""
errors = False
try:
if not errors:
# Convert Python dict to JSON buffer.
default_flow_style = None
flow_setting = self.settings.get("yaml_default_flow_style", None)
if flow_setting == "true":
default_flow_style = True
elif flow_setting == "false":
default_flow_style = False
# Convert Python dict to Yaml buffer.
self.output = yaml.yaml_dumps(
self.plist,
default_flow_style=default_flow_style,
indent=self.settings.get("yaml_indent", 4),
strip_tabs=self.strip_tabs,
detect_timestamp=self.settings.get("yaml_detect_timestamp", True)
)
self.plist = None
except Exception:
errors = True
error_type = 'bplist2yaml' if self.binary else 'plist2yaml'
error_msg(self.errors[error_type], traceback.format_exc())
return errors
class SerializedYamlToPlistCommand(_LanguageConverter):
"""Convert YAML to PLIST."""
lang = "plist_language"
default_lang = "Packages/XML/XML.tmLanguage"
def get_output_file(self, filename):
"""Get output filename to save to."""
name = None
if self.save_binary:
setting = 'bplist_yaml_conversion_ext'
out = 'bplist'
else:
setting = 'plist_yaml_conversion_ext'
out = 'plist'
# Try and find file ext in the ext table
for ext in self.settings.get(setting, []):
m = re.match("^(.*)\\." + re.escape(ext["yaml"]) + "$", filename, re.IGNORECASE)
if m is not None:
name = m.group(1) + "." + ext[out]
break
# Could not find ext in table, replace current extension with default
if name is None:
name = os.path.splitext(filename)[0] + ".plist"
return name
def read_source(self):
"""Read the source."""
errors = False
try:
# Strip comments and dangling commas from view buffer
# Read view buffer as JSON
# Dump data to Python dict
self.yaml = yaml.read_yaml_from_view(self.view)
except Exception:
errors = True
error_msg(self.errors["view2yaml"], traceback.format_exc())
return errors
def convert(self, edit):
"""Convert the read data to the desired format."""
errors = False
try:
# Convert Python dict to PLIST buffer
if self.save_binary:
self.output = plist.plist_binary_dumps(
self.yaml,
detect_timestamp=self.settings.get("plist_detect_timestamp", True),
none_handler=self.settings.get("plist_none_handler", "fail")
)
else:
self.output = plist.plist_dumps(
self.yaml,
detect_timestamp=self.settings.get("plist_detect_timestamp", True),
none_handler=self.settings.get("plist_none_handler", "fail")
)
self.yaml = None
except Exception:
errors = True
error_type = 'yaml2bplist' if self.save_binary else 'yaml2plist'
error_msg(self.errors[error_type], traceback.format_exc())
return errors
def run(self, edit, **kwargs):
"""Begin conversion."""
if kwargs.get('save_binary', False):
self.lang = 'bplist_language'
self.default_lang = 'Packages/Text/Plain text.tmLanguage'
else:
self.lang = 'plist_language'
self.default_lang = 'Packages/XML/XML.tmLanguage'
super().run(edit, **kwargs)
##########################
# Plist <-> JSON
##########################
class SerializedPlistToJsonCommand(_LanguageConverter):
"""Convert PLIST to JSON."""
lang = "json_language"
default_lang = "Packages/JavaScript/JSON.tmLanguage"
def get_output_file(self, filename):
"""Get output filename to save to."""
name = None
if self.binary:
setting = 'bplist_json_conversion_ext'
src = 'bplist'
else:
setting = 'plist_json_conversion_ext'
src = 'plist'
# Try and find file ext in the ext table
for ext in self.settings.get(setting, []):
m = re.match("^(.*)\\." + re.escape(ext[src]) + "$", filename, re.IGNORECASE)
if m is not None:
name = m.group(1) + "." + ext["json"]
break
# Could not find ext in table, replace current extension with default
if name is None:
name = os.path.splitext(filename)[0] + ".JSON"
return name
def read_source(self):
"""Read the source."""
errors = False
try:
# Ensure view buffer is in a UTF8 format.
# Wrap string in a file structure so it can be accessed by readPlist
# Read view buffer as PLIST and dump to Python dict
filename = self.view.file_name()
if self.binary and self.view.encoding() == 'Hexadecimal':
self.plist = plist.read_plist_from_hex_view(self.view)
elif self.binary and filename is not None and os.path.exists(filename):
self.plist = plist.read_plist_from_file(filename)
else:
self.plist = plist.read_plist_from_view(self.view)
except Exception:
errors = True
error_type = 'view2bplist' if self.binary else 'view2plist'
error_msg(self.errors[error_type], traceback.format_exc())
return errors
def convert(self, edit):
"""Convert the read data to the desired format."""
errors = False
try:
if not errors:
self.output = json.json_dumps(
self.plist,
preserve_binary=self.settings.get("json_preserve_binary_data", True)
)
self.plist = None
except Exception:
errors = True
error_type = 'bplist2json' if self.binary else 'plist2json'
error_msg(self.errors[error_type], traceback.format_exc())
return errors
class SerializedJsonToPlistCommand(_LanguageConverter):
"""Convert JSON to PLIST."""
lang = "plist_language"
default_lang = "Packages/XML/XML.tmLanguage"
def get_output_file(self, filename):
"""Get output filename to save to."""
name = None
if self.save_binary:
setting = 'bplist_json_conversion_ext'
out = 'bplist'
else:
setting = 'plist_json_conversion_ext'
out = 'plist'
# Try and find file ext in the ext table
for ext in self.settings.get(setting, []):
m = re.match("^(.*)\\." + re.escape(ext["json"]) + "$", filename, re.IGNORECASE)
if m is not None:
name = m.group(1) + "." + ext[out]
break
# Could not find ext in table, replace current extension with default
if name is None:
name = os.path.splitext(filename)[0] + ".plist"
return name
def read_source(self):
"""Read the source."""
errors = False
try:
# Strip comments and dangling commas from view buffer
# Read view buffer as JSON
# Dump data to Python dict
self.json = json.read_json_from_view(self.view)
except Exception:
errors = True
error_msg(self.errors["view2json"], traceback.format_exc())
return errors
def convert(self, edit):
"""Convert the read data to the desired format."""
errors = False
try:
# Convert Python dict to PLIST buffer
if self.save_binary:
self.output = plist.plist_binary_dumps(
self.json,
detect_timestamp=self.settings.get("plist_detect_timestamp", True),
none_handler=self.settings.get("plist_none_handler", "fail")
)
else:
self.output = plist.plist_dumps(
self.json,
detect_timestamp=self.settings.get("plist_detect_timestamp", True),
none_handler=self.settings.get("plist_none_handler", "fail")
)
self.json = None
except Exception:
errors = True
error_type = 'json2bplist' if self.save_binary else 'json2plist'
error_msg(self.errors[error_type], traceback.format_exc())
return errors
def run(self, edit, **kwargs):
"""Begin conversion."""
if kwargs.get('save_binary', False):
self.lang = 'bplist_language'
self.default_lang = 'Packages/Text/Plain text.tmLanguage'
else:
self.lang = 'plist_language'
self.default_lang = 'Packages/XML/XML.tmLanguage'
super().run(edit, **kwargs)
##########################
# YAML <-> JSON
##########################
class SerializedJsonToYamlCommand(_LanguageConverter):
"""Convert JSON to YAML."""
lang = "yaml_language"
default_lang = "Packages/YAML/YAML.tmLanguage"
def get_output_file(self, filename):
"""Get output filename to save to."""
name = None
# Try and find file ext in the ext table
for ext in self.settings.get("json_yaml_conversion_ext", []):
m = re.match("^(.*)\\." + re.escape(ext["json"]) + "$", filename, re.IGNORECASE)
if m is not None:
name = m.group(1) + "." + ext["yaml"]
break
# Could not find ext in table, replace current extension with default
if name is None:
name = os.path.splitext(filename)[0] + ".YAML"
return name
def read_source(self):
"""Read the source."""
errors = False
ext_tbl = self.settings.get("yaml_strip_tabs_from", [])
filename = self.view.file_name()
self.strip_tabs = False
if filename is not None:
for ext in ext_tbl:
m = re.match("^(.*)\\." + re.escape(ext) + "$", filename, re.IGNORECASE)
if m is not None:
self.strip_tabs = True
break
try:
# Ensure view buffer is in a UTF8 format.
# Wrap string in a file structure so it can be accessed by readPlist
# Read view buffer as PLIST and dump to Python dict
self.json = json.read_json_from_view(self.view)
except Exception:
errors = True
error_msg(self.errors["view2json"], traceback.format_exc())
return errors
def convert(self, edit):
"""Convert the read data to the desired format."""
errors = False
try:
if not errors:
# Convert Python dict to JSON buffer.
default_flow_style = None
flow_setting = self.settings.get("yaml_default_flow_style", None)
if flow_setting == "true":
default_flow_style = True
elif flow_setting == "false":
default_flow_style = False
self.output = yaml.yaml_dumps(
self.json,
default_flow_style=default_flow_style,
indent=self.settings.get("yaml_indent", 4),
strip_tabs=self.strip_tabs,
detect_timestamp=self.settings.get("yaml_detect_timestamp", True)
)
self.json = None
except Exception:
errors = True
error_msg(self.errors["json2yaml"], traceback.format_exc())
return errors
class SerializedYamlToJsonCommand(_LanguageConverter):
"""Convert YAML to JSON."""
lang = "json_language"
default_lang = "Packages/JavaScript/JSON.tmLanguage"
def get_output_file(self, filename):
"""Get output filename to save to."""
name = None
# Try and find file ext in the ext table
for ext in self.settings.get("json_yaml_conversion_ext", []):
m = re.match("^(.*)\\." + re.escape(ext["yaml"]) + "$", filename, re.IGNORECASE)
if m is not None:
name = m.group(1) + "." + ext["json"]
break
# Could not find ext in table, replace current extension with default
if name is None:
name = os.path.splitext(filename)[0] + ".JSON"
return name
def read_source(self):
"""Read the source."""
errors = False
try:
# Strip comments and dangling commas from view buffer
# Read view buffer as JSON
# Dump data to Python dict
self.yaml = yaml.read_yaml_from_view(self.view)
except Exception:
errors = True
error_msg(self.errors["view2yaml"], traceback.format_exc())
return errors
def convert(self, edit):
"""Convert the read data to the desired format."""
errors = False
try:
# Convert Python dict to PLIST buffer
self.output = json.json_dumps(
self.yaml,
preserve_binary=self.settings.get("json_preserve_binary_data", True)
)
self.yaml = None
except Exception:
errors = True
error_msg(self.errors["yaml2json"], traceback.format_exc())
return errors
##########################
# BPLIST <-> PLIST
##########################
class SerializedPlistToPlistCommand(_LanguageConverter):
"""Convert BPLIST <-> PLIST."""
lang = 'plist_language'
default_lang = 'Packages/Text/Plain text.tmLanguage'
def get_output_file(self, filename):
"""Get output filename to save to."""
name = None
# Try and find file ext in the ext table
if self.binary:
src = 'bplist'
out = 'plist'
default_out = '.plist'
else:
src = 'plist'
out = 'bplist'
default_out = '.plist'
for ext in self.settings.get('bplist_plist_conversion_ext', []):
m = re.match("^(.*)\\." + re.escape(ext[src]) + "$", filename, re.IGNORECASE)
if m is not None:
name = m.group(1) + "." + ext[out]
break
# Could not find ext in table, replace current extension with default
if name is None:
name = os.path.splitext(filename)[0] + default_out
return name
def read_source(self):
"""Read the source."""
errors = False
try:
# Ensure view buffer is in a UTF8 format.
# Wrap string in a file structure so it can be accessed by readPlist
# Read view buffer as PLIST and dump to Python dict
filename = self.view.file_name()
if self.binary and self.view.encoding() == 'Hexadecimal':
self.plist = plist.read_plist_from_hex_view(self.view)
elif self.binary and filename is not None and os.path.exists(filename):
self.plist = plist.read_plist_from_file(filename)
else:
self.plist = plist.read_plist_from_view(self.view)
except Exception:
errors = True
error_type = 'view2bplist' if self.binary else 'view2plist'
error_msg(self.errors[error_type], traceback.format_exc())
return errors
def convert(self, edit):
"""Convert the read data to the desired format."""
errors = False
try:
# Convert Python dict to PLIST buffer
if self.save_binary:
self.output = plist.plist_binary_dumps(
self.plist,
detect_timestamp=self.settings.get("plist_detect_timestamp", True),
none_handler=self.settings.get("plist_none_handler", "fail")
)
else:
self.output = plist.plist_dumps(
self.plist,
detect_timestamp=self.settings.get("plist_detect_timestamp", True),
none_handler=self.settings.get("plist_none_handler", "fail")
)
self.plist = None
except Exception:
errors = True
error_type = "bplist2plist" if self.binary else 'plist2bplist'
error_msg(self.errors[error_type], traceback.format_exc())
return errors
def run(self, edit, **kwargs):
"""Begin conversion."""
if kwargs.get('save_binary', False):
self.lang = 'bplist_language'
self.default_lang = 'Packages/Text/Plain text.tmLanguage'
else:
self.lang = 'plist_language'
self.default_lang = 'Packages/XML/XML.tmLanguage'
super().run(edit, **kwargs)
| 36.371324
| 117
| 0.553253
| 3,364
| 29,679
| 4.749405
| 0.0761
| 0.023409
| 0.030982
| 0.019153
| 0.750955
| 0.722852
| 0.675033
| 0.642987
| 0.62615
| 0.601364
| 0
| 0.004264
| 0.344183
| 29,679
| 815
| 118
| 36.415951
| 0.816584
| 0.125375
| 0
| 0.724258
| 0
| 0
| 0.152553
| 0.035821
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064572
| false
| 0
| 0.017452
| 0
| 0.17452
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 3
|
4b1ec72cd59cc0bdeabf8053a9474d679e3c099c
| 963
|
py
|
Python
|
src/main/tools/dbpy/FotechUtils/dbUtils.py
|
inqwell/inq
|
31ce4cd6b9b123b1ec4462905ccbcf7c00d6efc3
|
[
"BSD-3-Clause"
] | 1
|
2016-09-25T16:41:57.000Z
|
2016-09-25T16:41:57.000Z
|
src/main/tools/dbpy/FotechUtils/dbUtils.py
|
inqwell/inq
|
31ce4cd6b9b123b1ec4462905ccbcf7c00d6efc3
|
[
"BSD-3-Clause"
] | null | null | null |
src/main/tools/dbpy/FotechUtils/dbUtils.py
|
inqwell/inq
|
31ce4cd6b9b123b1ec4462905ccbcf7c00d6efc3
|
[
"BSD-3-Clause"
] | 2
|
2016-09-25T16:48:49.000Z
|
2020-05-26T20:00:33.000Z
|
#
# $Header: /home/inqwell/cvsroot/dev/scripts/python/FotechUtils/dbUtils.py,v 1.1 2009/05/22 22:16:32 sanderst Exp $
#
import KBC.fotech
from Util import db
from dbConfig import configurationProvider
def getConnection( confile, system, level, access = "read", site = None, user = None, pwdfile = None ):
"""
Partial replacement for the db.py mess in cbtech/python2.5. You should use /prod/fotech/bin/generateDatabaseXml.py
to generate an xml file containing your system/level config from the old db.py. Then replace any call to db.getConnection
with dbUtils.getConnection and you should get back the same object that you would have got in the old strategy.
"""
config = configurationProvider( confile, pwdfile )
vendor, server, user, password, schema, host, port = config.getConnectionDetails( system, level, access, site, user )
return db._getConnection( vendor.upper(), server, schema, user, password )
| 45.857143
| 129
| 0.726895
| 133
| 963
| 5.255639
| 0.646617
| 0.04721
| 0.048641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022901
| 0.183801
| 963
| 20
| 130
| 48.15
| 0.866412
| 0.480789
| 0
| 0
| 0
| 0
| 0.008658
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0.285714
| 0.428571
| 0
| 0.714286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 3
|
4b27ad25bbde1311e3d80132c3a579efdb94319b
| 431
|
py
|
Python
|
HowloserUare/api/serializers.py
|
HowloserUare/HowloserUare-api
|
c5f8a111f1c4bacba8d0932d8da7ad72dd3ce5c0
|
[
"MIT"
] | null | null | null |
HowloserUare/api/serializers.py
|
HowloserUare/HowloserUare-api
|
c5f8a111f1c4bacba8d0932d8da7ad72dd3ce5c0
|
[
"MIT"
] | null | null | null |
HowloserUare/api/serializers.py
|
HowloserUare/HowloserUare-api
|
c5f8a111f1c4bacba8d0932d8da7ad72dd3ce5c0
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
from rest_framework.validators import UniqueValidator
from core.models import User
class UserSerializer(serializers.Serializer):
username = serializers.CharField(
max_length=16,
min_length=5,
validators=[UniqueValidator(User.objects.all()), ])
password = serializers.CharField(
max_length=64, min_length=6)
class Meta:
exclude = ('id',)
| 26.9375
| 59
| 0.712297
| 47
| 431
| 6.404255
| 0.595745
| 0.053156
| 0.112957
| 0.192691
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017391
| 0.199536
| 431
| 15
| 60
| 28.733333
| 0.855072
| 0
| 0
| 0
| 0
| 0
| 0.00464
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.083333
| 0.25
| 0
| 0.583333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 3
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.