repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
52
3.87M
func_documentation_string
stringlengths
1
47.2k
func_code_url
stringlengths
85
339
sporteasy/python-poeditor
poeditor/client.py
POEditorAPI.add_language_to_project
def add_language_to_project(self, project_id, language_code): """ Adds a new language to project """ self._run( url_path="languages/add", id=project_id, language=language_code ) return True
python
def add_language_to_project(self, project_id, language_code): """ Adds a new language to project """ self._run( url_path="languages/add", id=project_id, language=language_code ) return True
Adds a new language to project
https://github.com/sporteasy/python-poeditor/blob/e9c0a8ab08816903122f730b73ffaab46601076c/poeditor/client.py#L253-L262
sporteasy/python-poeditor
poeditor/client.py
POEditorAPI.update_terms
def update_terms(self, project_id, data, fuzzy_trigger=None): """ Updates project terms. Lets you change the text, context, reference, plural and tags. >>> data = [ { "term": "Add new list", "context": "", "new_term": "Save list", "new_context": "", "reference": "\/projects", "plural": "", "comment": "", "tags": [ "first_tag", "second_tag" ] }, { "term": "Display list", "context": "", "new_term": "Show list", "new_context": "" } ] """ kwargs = {} if fuzzy_trigger is not None: kwargs['fuzzy_trigger'] = fuzzy_trigger data = self._run( url_path="terms/update", id=project_id, data=json.dumps(data), **kwargs ) return data['result']['terms']
python
def update_terms(self, project_id, data, fuzzy_trigger=None): """ Updates project terms. Lets you change the text, context, reference, plural and tags. >>> data = [ { "term": "Add new list", "context": "", "new_term": "Save list", "new_context": "", "reference": "\/projects", "plural": "", "comment": "", "tags": [ "first_tag", "second_tag" ] }, { "term": "Display list", "context": "", "new_term": "Show list", "new_context": "" } ] """ kwargs = {} if fuzzy_trigger is not None: kwargs['fuzzy_trigger'] = fuzzy_trigger data = self._run( url_path="terms/update", id=project_id, data=json.dumps(data), **kwargs ) return data['result']['terms']
Updates project terms. Lets you change the text, context, reference, plural and tags. >>> data = [ { "term": "Add new list", "context": "", "new_term": "Save list", "new_context": "", "reference": "\/projects", "plural": "", "comment": "", "tags": [ "first_tag", "second_tag" ] }, { "term": "Display list", "context": "", "new_term": "Show list", "new_context": "" } ]
https://github.com/sporteasy/python-poeditor/blob/e9c0a8ab08816903122f730b73ffaab46601076c/poeditor/client.py#L344-L380
sporteasy/python-poeditor
poeditor/client.py
POEditorAPI.add_comment
def add_comment(self, project_id, data): """ Adds comments to existing terms. >>> data = [ { "term": "Add new list", "context": "", "comment": "This is a button" }, { "term": "one project found", "context": "", "comment": "Make sure you translate the plural forms" }, { "term": "Show all projects", "context": "", "comment": "This is a button" } ] """ data = self._run( url_path="terms/add_comment", id=project_id, data=json.dumps(data) ) return data['result']['terms']
python
def add_comment(self, project_id, data): """ Adds comments to existing terms. >>> data = [ { "term": "Add new list", "context": "", "comment": "This is a button" }, { "term": "one project found", "context": "", "comment": "Make sure you translate the plural forms" }, { "term": "Show all projects", "context": "", "comment": "This is a button" } ] """ data = self._run( url_path="terms/add_comment", id=project_id, data=json.dumps(data) ) return data['result']['terms']
Adds comments to existing terms. >>> data = [ { "term": "Add new list", "context": "", "comment": "This is a button" }, { "term": "one project found", "context": "", "comment": "Make sure you translate the plural forms" }, { "term": "Show all projects", "context": "", "comment": "This is a button" } ]
https://github.com/sporteasy/python-poeditor/blob/e9c0a8ab08816903122f730b73ffaab46601076c/poeditor/client.py#L403-L429
sporteasy/python-poeditor
poeditor/client.py
POEditorAPI.update_project_language
def update_project_language(self, project_id, language_code, data, fuzzy_trigger=None): """ Inserts / overwrites translations. >>> data = [ { "term": "Projects", "context": "project list", "translation": { "content": "Des projets", "fuzzy": 0 } } ] """ kwargs = {} if fuzzy_trigger is not None: kwargs['fuzzy_trigger'] = fuzzy_trigger data = self._run( url_path="languages/update", id=project_id, language=language_code, data=json.dumps(data), **kwargs ) return data['result']['translations']
python
def update_project_language(self, project_id, language_code, data, fuzzy_trigger=None): """ Inserts / overwrites translations. >>> data = [ { "term": "Projects", "context": "project list", "translation": { "content": "Des projets", "fuzzy": 0 } } ] """ kwargs = {} if fuzzy_trigger is not None: kwargs['fuzzy_trigger'] = fuzzy_trigger data = self._run( url_path="languages/update", id=project_id, language=language_code, data=json.dumps(data), **kwargs ) return data['result']['translations']
Inserts / overwrites translations. >>> data = [ { "term": "Projects", "context": "project list", "translation": { "content": "Des projets", "fuzzy": 0 } } ]
https://github.com/sporteasy/python-poeditor/blob/e9c0a8ab08816903122f730b73ffaab46601076c/poeditor/client.py#L474-L499
sporteasy/python-poeditor
poeditor/client.py
POEditorAPI.export
def export(self, project_id, language_code, file_type='po', filters=None, tags=None, local_file=None): """ Return terms / translations filters - filter by self._filter_by tags - filter results by tags; local_file - save content into it. If None, save content into random temp file. >>> tags = 'name-of-tag' >>> tags = ["name-of-tag"] >>> tags = ["name-of-tag", "name-of-another-tag"] >>> filters = 'translated' >>> filters = ["translated"] >>> filters = ["translated", "not_fuzzy"] """ if file_type not in self.FILE_TYPES: raise POEditorArgsException( 'content_type: file format {}'.format(self.FILE_TYPES)) if filters and isinstance(filters, str) and filters not in self.FILTER_BY: raise POEditorArgsException( "filters - filter results by {}".format(self.FILTER_BY)) elif filters and set(filters).difference(set(self.FILTER_BY)): raise POEditorArgsException( "filters - filter results by {}".format(self.FILTER_BY)) data = self._run( url_path="projects/export", id=project_id, language=language_code, type=file_type, filters=filters, tags=tags ) # The link of the file (expires after 10 minutes). file_url = data['result']['url'] # Download file content: res = requests.get(file_url, stream=True) if not local_file: tmp_file = tempfile.NamedTemporaryFile( delete=False, suffix='.{}'.format(file_type)) tmp_file.close() local_file = tmp_file.name with open(local_file, 'w+b') as po_file: for data in res.iter_content(chunk_size=1024): po_file.write(data) return file_url, local_file
python
def export(self, project_id, language_code, file_type='po', filters=None, tags=None, local_file=None): """ Return terms / translations filters - filter by self._filter_by tags - filter results by tags; local_file - save content into it. If None, save content into random temp file. >>> tags = 'name-of-tag' >>> tags = ["name-of-tag"] >>> tags = ["name-of-tag", "name-of-another-tag"] >>> filters = 'translated' >>> filters = ["translated"] >>> filters = ["translated", "not_fuzzy"] """ if file_type not in self.FILE_TYPES: raise POEditorArgsException( 'content_type: file format {}'.format(self.FILE_TYPES)) if filters and isinstance(filters, str) and filters not in self.FILTER_BY: raise POEditorArgsException( "filters - filter results by {}".format(self.FILTER_BY)) elif filters and set(filters).difference(set(self.FILTER_BY)): raise POEditorArgsException( "filters - filter results by {}".format(self.FILTER_BY)) data = self._run( url_path="projects/export", id=project_id, language=language_code, type=file_type, filters=filters, tags=tags ) # The link of the file (expires after 10 minutes). file_url = data['result']['url'] # Download file content: res = requests.get(file_url, stream=True) if not local_file: tmp_file = tempfile.NamedTemporaryFile( delete=False, suffix='.{}'.format(file_type)) tmp_file.close() local_file = tmp_file.name with open(local_file, 'w+b') as po_file: for data in res.iter_content(chunk_size=1024): po_file.write(data) return file_url, local_file
Return terms / translations filters - filter by self._filter_by tags - filter results by tags; local_file - save content into it. If None, save content into random temp file. >>> tags = 'name-of-tag' >>> tags = ["name-of-tag"] >>> tags = ["name-of-tag", "name-of-another-tag"] >>> filters = 'translated' >>> filters = ["translated"] >>> filters = ["translated", "not_fuzzy"]
https://github.com/sporteasy/python-poeditor/blob/e9c0a8ab08816903122f730b73ffaab46601076c/poeditor/client.py#L501-L552
sporteasy/python-poeditor
poeditor/client.py
POEditorAPI._upload
def _upload(self, project_id, updating, file_path, language_code=None, overwrite=False, sync_terms=False, tags=None, fuzzy_trigger=None): """ Internal: updates terms / translations File uploads are limited to one every 30 seconds """ options = [ self.UPDATING_TERMS, self.UPDATING_TERMS_TRANSLATIONS, self.UPDATING_TRANSLATIONS ] if updating not in options: raise POEditorArgsException( 'Updating arg must be in {}'.format(options) ) options = [ self.UPDATING_TERMS_TRANSLATIONS, self.UPDATING_TRANSLATIONS ] if language_code is None and updating in options: raise POEditorArgsException( 'Language code is required only if updating is ' 'terms_translations or translations)' ) if updating == self.UPDATING_TRANSLATIONS: tags = None sync_terms = None # Special content type: tags = tags or '' language_code = language_code or '' sync_terms = '1' if sync_terms else '0' overwrite = '1' if overwrite else '0' fuzzy_trigger = '1' if fuzzy_trigger else '0' project_id = str(project_id) with open(file_path, 'r+b') as local_file: data = self._run( url_path="projects/upload", id=project_id, language=language_code, file=local_file, updating=updating, tags=tags, sync_terms=sync_terms, overwrite=overwrite, fuzzy_trigger=fuzzy_trigger ) return data['result']
python
def _upload(self, project_id, updating, file_path, language_code=None, overwrite=False, sync_terms=False, tags=None, fuzzy_trigger=None): """ Internal: updates terms / translations File uploads are limited to one every 30 seconds """ options = [ self.UPDATING_TERMS, self.UPDATING_TERMS_TRANSLATIONS, self.UPDATING_TRANSLATIONS ] if updating not in options: raise POEditorArgsException( 'Updating arg must be in {}'.format(options) ) options = [ self.UPDATING_TERMS_TRANSLATIONS, self.UPDATING_TRANSLATIONS ] if language_code is None and updating in options: raise POEditorArgsException( 'Language code is required only if updating is ' 'terms_translations or translations)' ) if updating == self.UPDATING_TRANSLATIONS: tags = None sync_terms = None # Special content type: tags = tags or '' language_code = language_code or '' sync_terms = '1' if sync_terms else '0' overwrite = '1' if overwrite else '0' fuzzy_trigger = '1' if fuzzy_trigger else '0' project_id = str(project_id) with open(file_path, 'r+b') as local_file: data = self._run( url_path="projects/upload", id=project_id, language=language_code, file=local_file, updating=updating, tags=tags, sync_terms=sync_terms, overwrite=overwrite, fuzzy_trigger=fuzzy_trigger ) return data['result']
Internal: updates terms / translations File uploads are limited to one every 30 seconds
https://github.com/sporteasy/python-poeditor/blob/e9c0a8ab08816903122f730b73ffaab46601076c/poeditor/client.py#L554-L605
sporteasy/python-poeditor
poeditor/client.py
POEditorAPI.update_terms
def update_terms(self, project_id, file_path=None, language_code=None, overwrite=False, sync_terms=False, tags=None, fuzzy_trigger=None): """ Updates terms overwrite: set it to True if you want to overwrite translations sync_terms: set it to True if you want to sync your terms (terms that are not found in the uploaded file will be deleted from project and the new ones added). Ignored if updating = translations tags: Add tags to the project terms; available when updating terms or terms_translations; you can use the following keys: "all" - for the all the imported terms, "new" - for the terms which aren't already in the project, "obsolete" - for the terms which are in the project but not in the imported file and "overwritten_translations" - for the terms for which translations change fuzzy_trigger: set it to True to mark corresponding translations from the other languages as fuzzy for the updated values """ return self._upload( project_id=project_id, updating=self.UPDATING_TERMS, file_path=file_path, language_code=language_code, overwrite=overwrite, sync_terms=sync_terms, tags=tags, fuzzy_trigger=fuzzy_trigger )
python
def update_terms(self, project_id, file_path=None, language_code=None, overwrite=False, sync_terms=False, tags=None, fuzzy_trigger=None): """ Updates terms overwrite: set it to True if you want to overwrite translations sync_terms: set it to True if you want to sync your terms (terms that are not found in the uploaded file will be deleted from project and the new ones added). Ignored if updating = translations tags: Add tags to the project terms; available when updating terms or terms_translations; you can use the following keys: "all" - for the all the imported terms, "new" - for the terms which aren't already in the project, "obsolete" - for the terms which are in the project but not in the imported file and "overwritten_translations" - for the terms for which translations change fuzzy_trigger: set it to True to mark corresponding translations from the other languages as fuzzy for the updated values """ return self._upload( project_id=project_id, updating=self.UPDATING_TERMS, file_path=file_path, language_code=language_code, overwrite=overwrite, sync_terms=sync_terms, tags=tags, fuzzy_trigger=fuzzy_trigger )
Updates terms overwrite: set it to True if you want to overwrite translations sync_terms: set it to True if you want to sync your terms (terms that are not found in the uploaded file will be deleted from project and the new ones added). Ignored if updating = translations tags: Add tags to the project terms; available when updating terms or terms_translations; you can use the following keys: "all" - for the all the imported terms, "new" - for the terms which aren't already in the project, "obsolete" - for the terms which are in the project but not in the imported file and "overwritten_translations" - for the terms for which translations change fuzzy_trigger: set it to True to mark corresponding translations from the other languages as fuzzy for the updated values
https://github.com/sporteasy/python-poeditor/blob/e9c0a8ab08816903122f730b73ffaab46601076c/poeditor/client.py#L607-L633
sporteasy/python-poeditor
poeditor/client.py
POEditorAPI.update_terms_translations
def update_terms_translations(self, project_id, file_path=None, language_code=None, overwrite=False, sync_terms=False, tags=None, fuzzy_trigger=None): """ Updates terms translations overwrite: set it to True if you want to overwrite translations sync_terms: set it to True if you want to sync your terms (terms that are not found in the uploaded file will be deleted from project and the new ones added). Ignored if updating = translations tags: Add tags to the project terms; available when updating terms or terms_translations; you can use the following keys: "all" - for the all the imported terms, "new" - for the terms which aren't already in the project, "obsolete" - for the terms which are in the project but not in the imported file and "overwritten_translations" - for the terms for which translations change fuzzy_trigger: set it to True to mark corresponding translations from the other languages as fuzzy for the updated values """ return self._upload( project_id=project_id, updating=self.UPDATING_TERMS_TRANSLATIONS, file_path=file_path, language_code=language_code, overwrite=overwrite, sync_terms=sync_terms, tags=tags, fuzzy_trigger=fuzzy_trigger )
python
def update_terms_translations(self, project_id, file_path=None, language_code=None, overwrite=False, sync_terms=False, tags=None, fuzzy_trigger=None): """ Updates terms translations overwrite: set it to True if you want to overwrite translations sync_terms: set it to True if you want to sync your terms (terms that are not found in the uploaded file will be deleted from project and the new ones added). Ignored if updating = translations tags: Add tags to the project terms; available when updating terms or terms_translations; you can use the following keys: "all" - for the all the imported terms, "new" - for the terms which aren't already in the project, "obsolete" - for the terms which are in the project but not in the imported file and "overwritten_translations" - for the terms for which translations change fuzzy_trigger: set it to True to mark corresponding translations from the other languages as fuzzy for the updated values """ return self._upload( project_id=project_id, updating=self.UPDATING_TERMS_TRANSLATIONS, file_path=file_path, language_code=language_code, overwrite=overwrite, sync_terms=sync_terms, tags=tags, fuzzy_trigger=fuzzy_trigger )
Updates terms translations overwrite: set it to True if you want to overwrite translations sync_terms: set it to True if you want to sync your terms (terms that are not found in the uploaded file will be deleted from project and the new ones added). Ignored if updating = translations tags: Add tags to the project terms; available when updating terms or terms_translations; you can use the following keys: "all" - for the all the imported terms, "new" - for the terms which aren't already in the project, "obsolete" - for the terms which are in the project but not in the imported file and "overwritten_translations" - for the terms for which translations change fuzzy_trigger: set it to True to mark corresponding translations from the other languages as fuzzy for the updated values
https://github.com/sporteasy/python-poeditor/blob/e9c0a8ab08816903122f730b73ffaab46601076c/poeditor/client.py#L652-L679
sporteasy/python-poeditor
poeditor/client.py
POEditorAPI.update_translations
def update_translations(self, project_id, file_path=None, language_code=None, overwrite=False, fuzzy_trigger=None): """ Updates translations overwrite: set it to True if you want to overwrite definitions fuzzy_trigger: set it to True to mark corresponding translations from the other languages as fuzzy for the updated values """ return self._upload( project_id=project_id, updating=self.UPDATING_TRANSLATIONS, file_path=file_path, language_code=language_code, overwrite=overwrite, fuzzy_trigger=fuzzy_trigger )
python
def update_translations(self, project_id, file_path=None, language_code=None, overwrite=False, fuzzy_trigger=None): """ Updates translations overwrite: set it to True if you want to overwrite definitions fuzzy_trigger: set it to True to mark corresponding translations from the other languages as fuzzy for the updated values """ return self._upload( project_id=project_id, updating=self.UPDATING_TRANSLATIONS, file_path=file_path, language_code=language_code, overwrite=overwrite, fuzzy_trigger=fuzzy_trigger )
Updates translations overwrite: set it to True if you want to overwrite definitions fuzzy_trigger: set it to True to mark corresponding translations from the other languages as fuzzy for the updated values
https://github.com/sporteasy/python-poeditor/blob/e9c0a8ab08816903122f730b73ffaab46601076c/poeditor/client.py#L695-L711
sporteasy/python-poeditor
poeditor/client.py
POEditorAPI.list_contributors
def list_contributors(self, project_id=None, language_code=None): """ Returns the list of contributors """ data = self._run( url_path="contributors/list", id=project_id, language=language_code ) return data['result'].get('contributors', [])
python
def list_contributors(self, project_id=None, language_code=None): """ Returns the list of contributors """ data = self._run( url_path="contributors/list", id=project_id, language=language_code ) return data['result'].get('contributors', [])
Returns the list of contributors
https://github.com/sporteasy/python-poeditor/blob/e9c0a8ab08816903122f730b73ffaab46601076c/poeditor/client.py#L723-L732
sporteasy/python-poeditor
poeditor/client.py
POEditorAPI.add_contributor
def add_contributor(self, project_id, name, email, language_code): """ Adds a contributor to a project language """ self._run( url_path="contributors/add", id=project_id, name=name, email=email, language=language_code ) return True
python
def add_contributor(self, project_id, name, email, language_code): """ Adds a contributor to a project language """ self._run( url_path="contributors/add", id=project_id, name=name, email=email, language=language_code ) return True
Adds a contributor to a project language
https://github.com/sporteasy/python-poeditor/blob/e9c0a8ab08816903122f730b73ffaab46601076c/poeditor/client.py#L734-L745
sporteasy/python-poeditor
poeditor/client.py
POEditorAPI.add_administrator
def add_administrator(self, project_id, name, email): """ Adds a contributor to a project language """ self._run( url_path="contributors/add", id=project_id, name=name, email=email, admin=True ) return True
python
def add_administrator(self, project_id, name, email): """ Adds a contributor to a project language """ self._run( url_path="contributors/add", id=project_id, name=name, email=email, admin=True ) return True
Adds a contributor to a project language
https://github.com/sporteasy/python-poeditor/blob/e9c0a8ab08816903122f730b73ffaab46601076c/poeditor/client.py#L747-L758
sporteasy/python-poeditor
poeditor/client.py
POEditorAPI.remove_contributor
def remove_contributor(self, project_id, email, language): """ Removes a contributor """ self._run( url_path="contributors/remove", id=project_id, email=email, language=language ) return True
python
def remove_contributor(self, project_id, email, language): """ Removes a contributor """ self._run( url_path="contributors/remove", id=project_id, email=email, language=language ) return True
Removes a contributor
https://github.com/sporteasy/python-poeditor/blob/e9c0a8ab08816903122f730b73ffaab46601076c/poeditor/client.py#L760-L770
kennell/schiene
schiene/schiene.py
parse_stations
def parse_stations(html): """ Strips JS code, loads JSON """ html = html.replace('SLs.sls=', '').replace(';SLs.showSuggestion();', '') html = json.loads(html) return html['suggestions']
python
def parse_stations(html): """ Strips JS code, loads JSON """ html = html.replace('SLs.sls=', '').replace(';SLs.showSuggestion();', '') html = json.loads(html) return html['suggestions']
Strips JS code, loads JSON
https://github.com/kennell/schiene/blob/a8f1ba2bd30f9f4a373c7b0ced589bd60121aa1f/schiene/schiene.py#L40-L46
kennell/schiene
schiene/schiene.py
parse_delay
def parse_delay(data): """ Prase the delay """ # parse data from the details view rsp = requests.get(data['details']) soup = BeautifulSoup(rsp.text, "html.parser") # get departure delay delay_departure_raw = soup.find('div', class_="routeStart").find('span', class_=["delay", "delayOnTime"]) if delay_departure_raw: delay_departure = calculate_delay(data['departure'], delay_departure_raw.text) else: delay_departure = 0 # get arrival delay delay_arrival_raw = soup.find('div', class_=["routeEnd","routeEndAdditional"]).find('span', class_=["delay", "delayOnTime"]) if delay_arrival_raw: delay_arrival = calculate_delay(data['arrival'], delay_arrival_raw.text) else: delay_arrival = 0 # save the parsed data if delay_departure + delay_arrival == 0: data['ontime'] = True else: data['ontime'] = False data['delay'] = { 'delay_departure': int(delay_departure), 'delay_arrival': int(delay_arrival) } # TODO: this should not be hardcoded! data['canceled'] = False return data
python
def parse_delay(data): """ Prase the delay """ # parse data from the details view rsp = requests.get(data['details']) soup = BeautifulSoup(rsp.text, "html.parser") # get departure delay delay_departure_raw = soup.find('div', class_="routeStart").find('span', class_=["delay", "delayOnTime"]) if delay_departure_raw: delay_departure = calculate_delay(data['departure'], delay_departure_raw.text) else: delay_departure = 0 # get arrival delay delay_arrival_raw = soup.find('div', class_=["routeEnd","routeEndAdditional"]).find('span', class_=["delay", "delayOnTime"]) if delay_arrival_raw: delay_arrival = calculate_delay(data['arrival'], delay_arrival_raw.text) else: delay_arrival = 0 # save the parsed data if delay_departure + delay_arrival == 0: data['ontime'] = True else: data['ontime'] = False data['delay'] = { 'delay_departure': int(delay_departure), 'delay_arrival': int(delay_arrival) } # TODO: this should not be hardcoded! data['canceled'] = False return data
Prase the delay
https://github.com/kennell/schiene/blob/a8f1ba2bd30f9f4a373c7b0ced589bd60121aa1f/schiene/schiene.py#L48-L85
kennell/schiene
schiene/schiene.py
calculate_delay
def calculate_delay(original, delay): """ Calculate the delay """ original = datetime.strptime(original, '%H:%M') delayed = datetime.strptime(delay, '%H:%M') diff = delayed - original return diff.total_seconds() // 60
python
def calculate_delay(original, delay): """ Calculate the delay """ original = datetime.strptime(original, '%H:%M') delayed = datetime.strptime(delay, '%H:%M') diff = delayed - original return diff.total_seconds() // 60
Calculate the delay
https://github.com/kennell/schiene/blob/a8f1ba2bd30f9f4a373c7b0ced589bd60121aa1f/schiene/schiene.py#L87-L94
kennell/schiene
schiene/schiene.py
Schiene.stations
def stations(self, station, limit=10): """ Find stations for given queries Args: station (str): search query limit (int): limit number of results """ query = { 'start': 1, 'S': station + '?', 'REQ0JourneyStopsB': limit } rsp = requests.get('http://reiseauskunft.bahn.de/bin/ajax-getstop.exe/dn', params=query) return parse_stations(rsp.text)
python
def stations(self, station, limit=10): """ Find stations for given queries Args: station (str): search query limit (int): limit number of results """ query = { 'start': 1, 'S': station + '?', 'REQ0JourneyStopsB': limit } rsp = requests.get('http://reiseauskunft.bahn.de/bin/ajax-getstop.exe/dn', params=query) return parse_stations(rsp.text)
Find stations for given queries Args: station (str): search query limit (int): limit number of results
https://github.com/kennell/schiene/blob/a8f1ba2bd30f9f4a373c7b0ced589bd60121aa1f/schiene/schiene.py#L99-L113
kennell/schiene
schiene/schiene.py
Schiene.connections
def connections(self, origin, destination, dt=datetime.now(), only_direct=False): """ Find connections between two stations Args: origin (str): origin station destination (str): destination station dt (datetime): date and time for query only_direct (bool): only direct connections """ query = { 'S': origin, 'Z': destination, 'date': dt.strftime("%d.%m.%y"), 'time': dt.strftime("%H:%M"), 'start': 1, 'REQ0JourneyProduct_opt0': 1 if only_direct else 0 } rsp = requests.get('http://mobile.bahn.de/bin/mobil/query.exe/dox?', params=query) return parse_connections(rsp.text)
python
def connections(self, origin, destination, dt=datetime.now(), only_direct=False): """ Find connections between two stations Args: origin (str): origin station destination (str): destination station dt (datetime): date and time for query only_direct (bool): only direct connections """ query = { 'S': origin, 'Z': destination, 'date': dt.strftime("%d.%m.%y"), 'time': dt.strftime("%H:%M"), 'start': 1, 'REQ0JourneyProduct_opt0': 1 if only_direct else 0 } rsp = requests.get('http://mobile.bahn.de/bin/mobil/query.exe/dox?', params=query) return parse_connections(rsp.text)
Find connections between two stations Args: origin (str): origin station destination (str): destination station dt (datetime): date and time for query only_direct (bool): only direct connections
https://github.com/kennell/schiene/blob/a8f1ba2bd30f9f4a373c7b0ced589bd60121aa1f/schiene/schiene.py#L116-L135
seung-lab/python-task-queue
taskqueue/taskqueue.py
_scatter
def _scatter(sequence, n): """Scatters elements of ``sequence`` into ``n`` blocks.""" chunklen = int(math.ceil(float(len(sequence)) / float(n))) return [ sequence[ i*chunklen : (i+1)*chunklen ] for i in range(n) ]
python
def _scatter(sequence, n): """Scatters elements of ``sequence`` into ``n`` blocks.""" chunklen = int(math.ceil(float(len(sequence)) / float(n))) return [ sequence[ i*chunklen : (i+1)*chunklen ] for i in range(n) ]
Scatters elements of ``sequence`` into ``n`` blocks.
https://github.com/seung-lab/python-task-queue/blob/e2742d0373e49c2a19badce102a355894924b4fb/taskqueue/taskqueue.py#L634-L640
seung-lab/python-task-queue
taskqueue/taskqueue.py
SuperTaskQueue.lease
def lease(self, seconds=600, num_tasks=1, tag=None): """ Acquires a lease on the topmost N unowned tasks in the specified queue. Required query parameters: leaseSecs, numTasks """ tag = tag if tag else None tasks = self._api.lease( numTasks=num_tasks, seconds=seconds, groupByTag=(tag is not None), tag=tag, ) if not len(tasks): raise QueueEmpty task = tasks[0] return totask(task)
python
def lease(self, seconds=600, num_tasks=1, tag=None): """ Acquires a lease on the topmost N unowned tasks in the specified queue. Required query parameters: leaseSecs, numTasks """ tag = tag if tag else None tasks = self._api.lease( numTasks=num_tasks, seconds=seconds, groupByTag=(tag is not None), tag=tag, ) if not len(tasks): raise QueueEmpty task = tasks[0] return totask(task)
Acquires a lease on the topmost N unowned tasks in the specified queue. Required query parameters: leaseSecs, numTasks
https://github.com/seung-lab/python-task-queue/blob/e2742d0373e49c2a19badce102a355894924b4fb/taskqueue/taskqueue.py#L168-L185
seung-lab/python-task-queue
taskqueue/taskqueue.py
SuperTaskQueue.purge
def purge(self): """Deletes all tasks in the queue.""" try: return self._api.purge() except AttributeError: while True: lst = self.list() if len(lst) == 0: break for task in lst: self.delete(task) self.wait() return self
python
def purge(self): """Deletes all tasks in the queue.""" try: return self._api.purge() except AttributeError: while True: lst = self.list() if len(lst) == 0: break for task in lst: self.delete(task) self.wait() return self
Deletes all tasks in the queue.
https://github.com/seung-lab/python-task-queue/blob/e2742d0373e49c2a19badce102a355894924b4fb/taskqueue/taskqueue.py#L194-L207
seung-lab/python-task-queue
taskqueue/taskqueue.py
SuperTaskQueue.poll
def poll( self, lease_seconds=LEASE_SECONDS, tag=None, verbose=False, execute_args=[], execute_kwargs={}, stop_fn=None, backoff_exceptions=[], min_backoff_window=30, max_backoff_window=120, log_fn=None ): """ Poll a queue until a stop condition is reached (default forever). Note that this function is not thread safe as it requires a global variable to intercept SIGINT. lease_seconds: each task should be leased for this many seconds tag: if specified, query for only tasks that match this tag execute_args / execute_kwargs: pass these arguments to task execution backoff_exceptions: A list of exceptions that instead of causing a crash, instead cause the polling to back off for an increasing exponential random window. min_backoff_window: The minimum sized window (in seconds) to select a random backoff time. max_backoff_window: The window doubles each retry. This is the maximum value in seconds. stop_fn: A boolean returning function that accepts no parameters. When it returns True, the task execution loop will terminate. It is evaluated once after every task. log_fn: Feed error messages to this function, default print (when verbose is enabled). verbose: print out the status of each step Return: number of tasks executed """ global LOOP if not callable(stop_fn) and stop_fn is not None: raise ValueError("stop_fn must be a callable. " + str(stop_fn)) elif not callable(stop_fn): stop_fn = lambda: False def random_exponential_window_backoff(n): n = min(n, min_backoff_window) # 120 sec max b/c on avg a request every ~250msec if 500 containers # in contention which seems like a quite reasonable volume of traffic # to handle high = min(2 ** n, max_backoff_window) return random.uniform(0, high) def printv(*args, **kwargs): if verbose: print(*args, **kwargs) LOOP = True def sigint_handler(signum, frame): global LOOP printv("Interrupted. Exiting after this task completes...") LOOP = False prev_sigint_handler = signal.getsignal(signal.SIGINT) signal.signal(signal.SIGINT, sigint_handler) if log_fn is None: log_fn = printv tries = 0 executed = 0 backoff = False backoff_exceptions = tuple(list(backoff_exceptions) + [ QueueEmpty ]) while LOOP: task = 'unknown' # for error message prior to leasing try: task = self.lease(seconds=int(lease_seconds)) tries += 1 printv(task) task.execute(*execute_args, **execute_kwargs) executed += 1 printv("Delete enqueued task...") self.delete(task) log_fn('INFO', task , "succesfully executed") tries = 0 except backoff_exceptions: backoff = True except Exception as e: printv('ERROR', task, "raised {}\n {}".format(e , traceback.format_exc())) raise #this will restart the container in kubernetes if stop_fn(): break if backoff: time.sleep(random_exponential_window_backoff(tries)) backoff = False printv("Task execution loop exited.") signal.signal(signal.SIGINT, prev_sigint_handler) return executed
python
def poll( self, lease_seconds=LEASE_SECONDS, tag=None, verbose=False, execute_args=[], execute_kwargs={}, stop_fn=None, backoff_exceptions=[], min_backoff_window=30, max_backoff_window=120, log_fn=None ): """ Poll a queue until a stop condition is reached (default forever). Note that this function is not thread safe as it requires a global variable to intercept SIGINT. lease_seconds: each task should be leased for this many seconds tag: if specified, query for only tasks that match this tag execute_args / execute_kwargs: pass these arguments to task execution backoff_exceptions: A list of exceptions that instead of causing a crash, instead cause the polling to back off for an increasing exponential random window. min_backoff_window: The minimum sized window (in seconds) to select a random backoff time. max_backoff_window: The window doubles each retry. This is the maximum value in seconds. stop_fn: A boolean returning function that accepts no parameters. When it returns True, the task execution loop will terminate. It is evaluated once after every task. log_fn: Feed error messages to this function, default print (when verbose is enabled). verbose: print out the status of each step Return: number of tasks executed """ global LOOP if not callable(stop_fn) and stop_fn is not None: raise ValueError("stop_fn must be a callable. " + str(stop_fn)) elif not callable(stop_fn): stop_fn = lambda: False def random_exponential_window_backoff(n): n = min(n, min_backoff_window) # 120 sec max b/c on avg a request every ~250msec if 500 containers # in contention which seems like a quite reasonable volume of traffic # to handle high = min(2 ** n, max_backoff_window) return random.uniform(0, high) def printv(*args, **kwargs): if verbose: print(*args, **kwargs) LOOP = True def sigint_handler(signum, frame): global LOOP printv("Interrupted. Exiting after this task completes...") LOOP = False prev_sigint_handler = signal.getsignal(signal.SIGINT) signal.signal(signal.SIGINT, sigint_handler) if log_fn is None: log_fn = printv tries = 0 executed = 0 backoff = False backoff_exceptions = tuple(list(backoff_exceptions) + [ QueueEmpty ]) while LOOP: task = 'unknown' # for error message prior to leasing try: task = self.lease(seconds=int(lease_seconds)) tries += 1 printv(task) task.execute(*execute_args, **execute_kwargs) executed += 1 printv("Delete enqueued task...") self.delete(task) log_fn('INFO', task , "succesfully executed") tries = 0 except backoff_exceptions: backoff = True except Exception as e: printv('ERROR', task, "raised {}\n {}".format(e , traceback.format_exc())) raise #this will restart the container in kubernetes if stop_fn(): break if backoff: time.sleep(random_exponential_window_backoff(tries)) backoff = False printv("Task execution loop exited.") signal.signal(signal.SIGINT, prev_sigint_handler) return executed
Poll a queue until a stop condition is reached (default forever). Note that this function is not thread safe as it requires a global variable to intercept SIGINT. lease_seconds: each task should be leased for this many seconds tag: if specified, query for only tasks that match this tag execute_args / execute_kwargs: pass these arguments to task execution backoff_exceptions: A list of exceptions that instead of causing a crash, instead cause the polling to back off for an increasing exponential random window. min_backoff_window: The minimum sized window (in seconds) to select a random backoff time. max_backoff_window: The window doubles each retry. This is the maximum value in seconds. stop_fn: A boolean returning function that accepts no parameters. When it returns True, the task execution loop will terminate. It is evaluated once after every task. log_fn: Feed error messages to this function, default print (when verbose is enabled). verbose: print out the status of each step Return: number of tasks executed
https://github.com/seung-lab/python-task-queue/blob/e2742d0373e49c2a19badce102a355894924b4fb/taskqueue/taskqueue.py#L209-L303
seung-lab/python-task-queue
taskqueue/taskqueue.py
TaskQueue.insert
def insert(self, task, args=[], kwargs={}, delay_seconds=0): """ Insert a task into an existing queue. """ body = { "payload": task.payload(), "queueName": self._queue_name, "groupByTag": True, "tag": task.__class__.__name__ } def cloud_insertion(api): api.insert(body, delay_seconds) if len(self._threads): self.put(cloud_insertion) else: cloud_insertion(self._api) return self
python
def insert(self, task, args=[], kwargs={}, delay_seconds=0): """ Insert a task into an existing queue. """ body = { "payload": task.payload(), "queueName": self._queue_name, "groupByTag": True, "tag": task.__class__.__name__ } def cloud_insertion(api): api.insert(body, delay_seconds) if len(self._threads): self.put(cloud_insertion) else: cloud_insertion(self._api) return self
Insert a task into an existing queue.
https://github.com/seung-lab/python-task-queue/blob/e2742d0373e49c2a19badce102a355894924b4fb/taskqueue/taskqueue.py#L332-L351
seung-lab/python-task-queue
taskqueue/taskqueue.py
TaskQueue.delete
def delete(self, task_id): """Deletes a task from a TaskQueue.""" if isinstance(task_id, RegisteredTask): task_id = task_id.id def cloud_delete(api): api.delete(task_id) if len(self._threads): self.put(cloud_delete) else: cloud_delete(self._api) return self
python
def delete(self, task_id): """Deletes a task from a TaskQueue.""" if isinstance(task_id, RegisteredTask): task_id = task_id.id def cloud_delete(api): api.delete(task_id) if len(self._threads): self.put(cloud_delete) else: cloud_delete(self._api) return self
Deletes a task from a TaskQueue.
https://github.com/seung-lab/python-task-queue/blob/e2742d0373e49c2a19badce102a355894924b4fb/taskqueue/taskqueue.py#L381-L394
seung-lab/python-task-queue
taskqueue/taskqueue.py
GreenTaskQueue.insert
def insert(self, task, args=[], kwargs={}, delay_seconds=0): """ Insert a task into an existing queue. """ body = { "payload": task.payload(), "queueName": self._queue_name, "groupByTag": True, "tag": task.__class__.__name__ } def cloud_insertion(): self._api.insert(body, delay_seconds) self._pool.spawn(cloud_insertion) return self
python
def insert(self, task, args=[], kwargs={}, delay_seconds=0): """ Insert a task into an existing queue. """ body = { "payload": task.payload(), "queueName": self._queue_name, "groupByTag": True, "tag": task.__class__.__name__ } def cloud_insertion(): self._api.insert(body, delay_seconds) self._pool.spawn(cloud_insertion) return self
Insert a task into an existing queue.
https://github.com/seung-lab/python-task-queue/blob/e2742d0373e49c2a19badce102a355894924b4fb/taskqueue/taskqueue.py#L439-L455
Chris7/django-djangui
djangui/backend/argparse_specs.py
is_upload
def is_upload(action): """Checks if this should be a user upload :param action: :return: True if this is a file we intend to upload from the user """ return 'r' in action.type._mode and (action.default is None or getattr(action.default, 'name') not in (sys.stderr.name, sys.stdout.name))
python
def is_upload(action): """Checks if this should be a user upload :param action: :return: True if this is a file we intend to upload from the user """ return 'r' in action.type._mode and (action.default is None or getattr(action.default, 'name') not in (sys.stderr.name, sys.stdout.name))
Checks if this should be a user upload :param action: :return: True if this is a file we intend to upload from the user
https://github.com/Chris7/django-djangui/blob/4e9e37ac92e128dbaa0c809e5e33618d2c96808f/djangui/backend/argparse_specs.py#L14-L21
Chris7/django-djangui
djangui/backend/argparse_specs.py
ArgParseNode.to_django
def to_django(self): """ This is a debug function to see what equivalent django models are being generated """ exclude = {'name', 'model'} field_module = 'models' django_kwargs = {} if self.node_attrs['model'] == 'CharField': django_kwargs['max_length'] = 255 django_kwargs['blank'] = not self.node_attrs['required'] try: django_kwargs['default'] = self.node_attrs['value'] except KeyError: pass return u'{0} = {1}.{2}({3})'.format(self.node_attrs['name'], field_module, self.node_attrs['model'], ', '.join(['{0}={1}'.format(i,v) for i,v in six.iteritems(django_kwargs)]),)
python
def to_django(self): """ This is a debug function to see what equivalent django models are being generated """ exclude = {'name', 'model'} field_module = 'models' django_kwargs = {} if self.node_attrs['model'] == 'CharField': django_kwargs['max_length'] = 255 django_kwargs['blank'] = not self.node_attrs['required'] try: django_kwargs['default'] = self.node_attrs['value'] except KeyError: pass return u'{0} = {1}.{2}({3})'.format(self.node_attrs['name'], field_module, self.node_attrs['model'], ', '.join(['{0}={1}'.format(i,v) for i,v in six.iteritems(django_kwargs)]),)
This is a debug function to see what equivalent django models are being generated
https://github.com/Chris7/django-djangui/blob/4e9e37ac92e128dbaa0c809e5e33618d2c96808f/djangui/backend/argparse_specs.py#L152-L167
mozilla/configman
configman/converters.py
str_dict_keys
def str_dict_keys(a_dict): """return a modified dict where all the keys that are anything but str get converted to str. E.g. >>> result = str_dict_keys({u'name': u'Peter', u'age': 99, 1: 2}) >>> # can't compare whole dicts in doctests >>> result['name'] u'Peter' >>> result['age'] 99 >>> result[1] 2 The reason for this is that in Python <= 2.6.4 doing ``MyClass(**{u'name': u'Peter'})`` would raise a TypeError Note that only unicode types are converted to str types. The reason for that is you might have a class that looks like this:: class Option(object): def __init__(self, foo=None, bar=None, **kwargs): ... And it's being used like this:: Option(**{u'foo':1, u'bar':2, 3:4}) Then you don't want to change that {3:4} part which becomes part of `**kwargs` inside the __init__ method. Using integers as parameter keys is a silly example but the point is that due to the python 2.6.4 bug only unicode keys are converted to str. """ new_dict = {} for key in a_dict: if six.PY2 and isinstance(key, six.text_type): new_dict[str(key)] = a_dict[key] else: new_dict[key] = a_dict[key] return new_dict
python
def str_dict_keys(a_dict): """return a modified dict where all the keys that are anything but str get converted to str. E.g. >>> result = str_dict_keys({u'name': u'Peter', u'age': 99, 1: 2}) >>> # can't compare whole dicts in doctests >>> result['name'] u'Peter' >>> result['age'] 99 >>> result[1] 2 The reason for this is that in Python <= 2.6.4 doing ``MyClass(**{u'name': u'Peter'})`` would raise a TypeError Note that only unicode types are converted to str types. The reason for that is you might have a class that looks like this:: class Option(object): def __init__(self, foo=None, bar=None, **kwargs): ... And it's being used like this:: Option(**{u'foo':1, u'bar':2, 3:4}) Then you don't want to change that {3:4} part which becomes part of `**kwargs` inside the __init__ method. Using integers as parameter keys is a silly example but the point is that due to the python 2.6.4 bug only unicode keys are converted to str. """ new_dict = {} for key in a_dict: if six.PY2 and isinstance(key, six.text_type): new_dict[str(key)] = a_dict[key] else: new_dict[key] = a_dict[key] return new_dict
return a modified dict where all the keys that are anything but str get converted to str. E.g. >>> result = str_dict_keys({u'name': u'Peter', u'age': 99, 1: 2}) >>> # can't compare whole dicts in doctests >>> result['name'] u'Peter' >>> result['age'] 99 >>> result[1] 2 The reason for this is that in Python <= 2.6.4 doing ``MyClass(**{u'name': u'Peter'})`` would raise a TypeError Note that only unicode types are converted to str types. The reason for that is you might have a class that looks like this:: class Option(object): def __init__(self, foo=None, bar=None, **kwargs): ... And it's being used like this:: Option(**{u'foo':1, u'bar':2, 3:4}) Then you don't want to change that {3:4} part which becomes part of `**kwargs` inside the __init__ method. Using integers as parameter keys is a silly example but the point is that due to the python 2.6.4 bug only unicode keys are converted to str.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/converters.py#L34-L73
mozilla/configman
configman/converters.py
str_to_boolean
def str_to_boolean(input_str): """ a conversion function for boolean """ if not isinstance(input_str, six.string_types): raise ValueError(input_str) input_str = str_quote_stripper(input_str) return input_str.lower() in ("true", "t", "1", "y", "yes")
python
def str_to_boolean(input_str): """ a conversion function for boolean """ if not isinstance(input_str, six.string_types): raise ValueError(input_str) input_str = str_quote_stripper(input_str) return input_str.lower() in ("true", "t", "1", "y", "yes")
a conversion function for boolean
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/converters.py#L127-L133
mozilla/configman
configman/converters.py
str_to_python_object
def str_to_python_object(input_str): """ a conversion that will import a module and class name """ if not input_str: return None if six.PY3 and isinstance(input_str, six.binary_type): input_str = to_str(input_str) if not isinstance(input_str, six.string_types): # gosh, we didn't get a string, we can't convert anything but strings # we're going to assume that what we got is actually what was wanted # as the output return input_str input_str = str_quote_stripper(input_str) if '.' not in input_str and input_str in known_mapping_str_to_type: return known_mapping_str_to_type[input_str] parts = [x.strip() for x in input_str.split('.') if x.strip()] try: try: # first try as a complete module package = __import__(input_str) except ImportError: # it must be a class from a module if len(parts) == 1: # since it has only one part, it must be a class from __main__ parts = ('__main__', input_str) package = __import__('.'.join(parts[:-1]), globals(), locals(), []) obj = package for name in parts[1:]: obj = getattr(obj, name) return obj except AttributeError as x: raise CannotConvertError("%s cannot be found" % input_str) except ImportError as x: raise CannotConvertError(str(x))
python
def str_to_python_object(input_str): """ a conversion that will import a module and class name """ if not input_str: return None if six.PY3 and isinstance(input_str, six.binary_type): input_str = to_str(input_str) if not isinstance(input_str, six.string_types): # gosh, we didn't get a string, we can't convert anything but strings # we're going to assume that what we got is actually what was wanted # as the output return input_str input_str = str_quote_stripper(input_str) if '.' not in input_str and input_str in known_mapping_str_to_type: return known_mapping_str_to_type[input_str] parts = [x.strip() for x in input_str.split('.') if x.strip()] try: try: # first try as a complete module package = __import__(input_str) except ImportError: # it must be a class from a module if len(parts) == 1: # since it has only one part, it must be a class from __main__ parts = ('__main__', input_str) package = __import__('.'.join(parts[:-1]), globals(), locals(), []) obj = package for name in parts[1:]: obj = getattr(obj, name) return obj except AttributeError as x: raise CannotConvertError("%s cannot be found" % input_str) except ImportError as x: raise CannotConvertError(str(x))
a conversion that will import a module and class name
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/converters.py#L139-L172
mozilla/configman
configman/converters.py
str_to_classes_in_namespaces
def str_to_classes_in_namespaces( template_for_namespace="cls%d", name_of_class_option='cls', instantiate_classes=False ): """take a comma delimited list of class names, convert each class name into an actual class as an option within a numbered namespace. This function creates a closure over a new function. That new function, in turn creates a class derived from RequiredConfig. The inner function, 'class_list_converter', populates the InnerClassList with a Namespace for each of the classes in the class list. In addition, it puts the each class itself into the subordinate Namespace. The requirement discovery mechanism of configman then reads the InnerClassList's requried config, pulling in the namespaces and associated classes within. For example, if we have a class list like this: "Alpha, Beta", then this converter will add the following Namespaces and options to the configuration: "cls0" - the subordinate Namespace for Alpha "cls0.cls" - the option containing the class Alpha itself "cls1" - the subordinate Namespace for Beta "cls1.cls" - the option containing the class Beta itself Optionally, the 'class_list_converter' inner function can embue the InnerClassList's subordinate namespaces with aggregates that will instantiate classes from the class list. This is a convenience to the programmer who would otherwise have to know ahead of time what the namespace names were so that the classes could be instantiated within the context of the correct namespace. Remember the user could completely change the list of classes at run time, so prediction could be difficult. "cls0" - the subordinate Namespace for Alpha "cls0.cls" - the option containing the class Alpha itself "cls0.cls_instance" - an instance of the class Alpha "cls1" - the subordinate Namespace for Beta "cls1.cls" - the option containing the class Beta itself "cls1.cls_instance" - an instance of the class Beta parameters: template_for_namespace - a template for the names of the namespaces that will contain the classes and their associated required config options. The namespaces will be numbered sequentially. By default, they will be "cls1", "cls2", etc. class_option_name - the name to be used for the class option within the nested namespace. By default, it will choose: "cls1.cls", "cls2.cls", etc. instantiate_classes - a boolean to determine if there should be an aggregator added to each namespace that instantiates each class. If True, then each Namespace will contain elements for the class, as well as an aggregator that will instantiate the class. """ # these are only used within this method. No need to pollute the module # scope with them and avoid potential circular imports from configman.namespace import Namespace from configman.required_config import RequiredConfig #-------------------------------------------------------------------------- def class_list_converter(class_list_str): """This function becomes the actual converter used by configman to take a string and convert it into the nested sequence of Namespaces, one for each class in the list. It does this by creating a proxy class stuffed with its own 'required_config' that's dynamically generated.""" if isinstance(class_list_str, six.string_types): class_list = [x.strip() for x in class_list_str.split(',')] if class_list == ['']: class_list = [] else: raise TypeError('must be derivative of %s' % six.string_types) #====================================================================== class InnerClassList(RequiredConfig): """This nested class is a proxy list for the classes. It collects all the config requirements for the listed classes and places them each into their own Namespace. """ # we're dynamically creating a class here. The following block of # code is actually adding class level attributes to this new class required_config = Namespace() # 1st requirement for configman subordinate_namespace_names = [] # to help the programmer know # what Namespaces we added namespace_template = template_for_namespace # save the template # for future reference class_option_name = name_of_class_option # save the class's option # name for the future # for each class in the class list for namespace_index, a_class in enumerate(class_list): # figure out the Namespace name namespace_name = template_for_namespace % namespace_index subordinate_namespace_names.append(namespace_name) # create the new Namespace required_config[namespace_name] = Namespace() # add the option for the class itself required_config[namespace_name].add_option( name_of_class_option, #doc=a_class.__doc__ # not helpful if too verbose default=a_class, from_string_converter=class_converter ) if instantiate_classes: # add an aggregator to instantiate the class required_config[namespace_name].add_aggregation( "%s_instance" % name_of_class_option, lambda c, lc, a: lc[name_of_class_option](lc) ) @classmethod def to_str(cls): """this method takes this inner class object and turns it back into the original string of classnames. This is used primarily as for the output of the 'help' option""" return ', '.join( py_obj_to_str(v[name_of_class_option].value) for v in cls.get_required_config().values() if isinstance(v, Namespace) ) return InnerClassList # result of class_list_converter return class_list_converter
python
def str_to_classes_in_namespaces( template_for_namespace="cls%d", name_of_class_option='cls', instantiate_classes=False ): """take a comma delimited list of class names, convert each class name into an actual class as an option within a numbered namespace. This function creates a closure over a new function. That new function, in turn creates a class derived from RequiredConfig. The inner function, 'class_list_converter', populates the InnerClassList with a Namespace for each of the classes in the class list. In addition, it puts the each class itself into the subordinate Namespace. The requirement discovery mechanism of configman then reads the InnerClassList's requried config, pulling in the namespaces and associated classes within. For example, if we have a class list like this: "Alpha, Beta", then this converter will add the following Namespaces and options to the configuration: "cls0" - the subordinate Namespace for Alpha "cls0.cls" - the option containing the class Alpha itself "cls1" - the subordinate Namespace for Beta "cls1.cls" - the option containing the class Beta itself Optionally, the 'class_list_converter' inner function can embue the InnerClassList's subordinate namespaces with aggregates that will instantiate classes from the class list. This is a convenience to the programmer who would otherwise have to know ahead of time what the namespace names were so that the classes could be instantiated within the context of the correct namespace. Remember the user could completely change the list of classes at run time, so prediction could be difficult. "cls0" - the subordinate Namespace for Alpha "cls0.cls" - the option containing the class Alpha itself "cls0.cls_instance" - an instance of the class Alpha "cls1" - the subordinate Namespace for Beta "cls1.cls" - the option containing the class Beta itself "cls1.cls_instance" - an instance of the class Beta parameters: template_for_namespace - a template for the names of the namespaces that will contain the classes and their associated required config options. The namespaces will be numbered sequentially. By default, they will be "cls1", "cls2", etc. class_option_name - the name to be used for the class option within the nested namespace. By default, it will choose: "cls1.cls", "cls2.cls", etc. instantiate_classes - a boolean to determine if there should be an aggregator added to each namespace that instantiates each class. If True, then each Namespace will contain elements for the class, as well as an aggregator that will instantiate the class. """ # these are only used within this method. No need to pollute the module # scope with them and avoid potential circular imports from configman.namespace import Namespace from configman.required_config import RequiredConfig #-------------------------------------------------------------------------- def class_list_converter(class_list_str): """This function becomes the actual converter used by configman to take a string and convert it into the nested sequence of Namespaces, one for each class in the list. It does this by creating a proxy class stuffed with its own 'required_config' that's dynamically generated.""" if isinstance(class_list_str, six.string_types): class_list = [x.strip() for x in class_list_str.split(',')] if class_list == ['']: class_list = [] else: raise TypeError('must be derivative of %s' % six.string_types) #====================================================================== class InnerClassList(RequiredConfig): """This nested class is a proxy list for the classes. It collects all the config requirements for the listed classes and places them each into their own Namespace. """ # we're dynamically creating a class here. The following block of # code is actually adding class level attributes to this new class required_config = Namespace() # 1st requirement for configman subordinate_namespace_names = [] # to help the programmer know # what Namespaces we added namespace_template = template_for_namespace # save the template # for future reference class_option_name = name_of_class_option # save the class's option # name for the future # for each class in the class list for namespace_index, a_class in enumerate(class_list): # figure out the Namespace name namespace_name = template_for_namespace % namespace_index subordinate_namespace_names.append(namespace_name) # create the new Namespace required_config[namespace_name] = Namespace() # add the option for the class itself required_config[namespace_name].add_option( name_of_class_option, #doc=a_class.__doc__ # not helpful if too verbose default=a_class, from_string_converter=class_converter ) if instantiate_classes: # add an aggregator to instantiate the class required_config[namespace_name].add_aggregation( "%s_instance" % name_of_class_option, lambda c, lc, a: lc[name_of_class_option](lc) ) @classmethod def to_str(cls): """this method takes this inner class object and turns it back into the original string of classnames. This is used primarily as for the output of the 'help' option""" return ', '.join( py_obj_to_str(v[name_of_class_option].value) for v in cls.get_required_config().values() if isinstance(v, Namespace) ) return InnerClassList # result of class_list_converter return class_list_converter
take a comma delimited list of class names, convert each class name into an actual class as an option within a numbered namespace. This function creates a closure over a new function. That new function, in turn creates a class derived from RequiredConfig. The inner function, 'class_list_converter', populates the InnerClassList with a Namespace for each of the classes in the class list. In addition, it puts the each class itself into the subordinate Namespace. The requirement discovery mechanism of configman then reads the InnerClassList's requried config, pulling in the namespaces and associated classes within. For example, if we have a class list like this: "Alpha, Beta", then this converter will add the following Namespaces and options to the configuration: "cls0" - the subordinate Namespace for Alpha "cls0.cls" - the option containing the class Alpha itself "cls1" - the subordinate Namespace for Beta "cls1.cls" - the option containing the class Beta itself Optionally, the 'class_list_converter' inner function can embue the InnerClassList's subordinate namespaces with aggregates that will instantiate classes from the class list. This is a convenience to the programmer who would otherwise have to know ahead of time what the namespace names were so that the classes could be instantiated within the context of the correct namespace. Remember the user could completely change the list of classes at run time, so prediction could be difficult. "cls0" - the subordinate Namespace for Alpha "cls0.cls" - the option containing the class Alpha itself "cls0.cls_instance" - an instance of the class Alpha "cls1" - the subordinate Namespace for Beta "cls1.cls" - the option containing the class Beta itself "cls1.cls_instance" - an instance of the class Beta parameters: template_for_namespace - a template for the names of the namespaces that will contain the classes and their associated required config options. The namespaces will be numbered sequentially. By default, they will be "cls1", "cls2", etc. class_option_name - the name to be used for the class option within the nested namespace. By default, it will choose: "cls1.cls", "cls2.cls", etc. instantiate_classes - a boolean to determine if there should be an aggregator added to each namespace that instantiates each class. If True, then each Namespace will contain elements for the class, as well as an aggregator that will instantiate the class.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/converters.py#L178-L301
mozilla/configman
configman/converters.py
str_to_list
def str_to_list( input_str, item_converter=lambda x: x, item_separator=',', list_to_collection_converter=None, ): """ a conversion function for list """ if not isinstance(input_str, six.string_types): raise ValueError(input_str) input_str = str_quote_stripper(input_str) result = [ item_converter(x.strip()) for x in input_str.split(item_separator) if x.strip() ] if list_to_collection_converter is not None: return list_to_collection_converter(result) return result
python
def str_to_list( input_str, item_converter=lambda x: x, item_separator=',', list_to_collection_converter=None, ): """ a conversion function for list """ if not isinstance(input_str, six.string_types): raise ValueError(input_str) input_str = str_quote_stripper(input_str) result = [ item_converter(x.strip()) for x in input_str.split(item_separator) if x.strip() ] if list_to_collection_converter is not None: return list_to_collection_converter(result) return result
a conversion function for list
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/converters.py#L317-L334
mozilla/configman
configman/converters.py
arbitrary_object_to_string
def arbitrary_object_to_string(a_thing): """take a python object of some sort, and convert it into a human readable string. this function is used extensively to convert things like "subject" into "subject_key, function -> function_key, etc.""" # is it None? if a_thing is None: return '' # is it already a string? if isinstance(a_thing, six.string_types): return a_thing if six.PY3 and isinstance(a_thing, six.binary_type): try: return a_thing.decode('utf-8') except UnicodeDecodeError: pass # does it have a to_str function? try: return a_thing.to_str() except (AttributeError, KeyError, TypeError): # AttributeError - no to_str function? # KeyError - DotDict has no to_str? # TypeError - problem converting # nope, no to_str function pass # is this a type proxy? try: return arbitrary_object_to_string(a_thing.a_type) except (AttributeError, KeyError, TypeError): # # nope, no a_type property pass # is it a built in? try: return known_mapping_type_to_str[a_thing] except (KeyError, TypeError): # nope, not a builtin pass # is it something from a loaded module? try: if a_thing.__module__ not in ('__builtin__', 'builtins', 'exceptions'): if a_thing.__module__ == "__main__": module_name = ( sys.modules['__main__'] .__file__[:-3] .replace('/', '.') .strip('.') ) else: module_name = a_thing.__module__ return "%s.%s" % (module_name, a_thing.__name__) except AttributeError: # nope, not one of these pass # maybe it has a __name__ attribute? try: return a_thing.__name__ except AttributeError: # nope, not one of these pass # punt and see what happens if we just cast it to string return str(a_thing)
python
def arbitrary_object_to_string(a_thing): """take a python object of some sort, and convert it into a human readable string. this function is used extensively to convert things like "subject" into "subject_key, function -> function_key, etc.""" # is it None? if a_thing is None: return '' # is it already a string? if isinstance(a_thing, six.string_types): return a_thing if six.PY3 and isinstance(a_thing, six.binary_type): try: return a_thing.decode('utf-8') except UnicodeDecodeError: pass # does it have a to_str function? try: return a_thing.to_str() except (AttributeError, KeyError, TypeError): # AttributeError - no to_str function? # KeyError - DotDict has no to_str? # TypeError - problem converting # nope, no to_str function pass # is this a type proxy? try: return arbitrary_object_to_string(a_thing.a_type) except (AttributeError, KeyError, TypeError): # # nope, no a_type property pass # is it a built in? try: return known_mapping_type_to_str[a_thing] except (KeyError, TypeError): # nope, not a builtin pass # is it something from a loaded module? try: if a_thing.__module__ not in ('__builtin__', 'builtins', 'exceptions'): if a_thing.__module__ == "__main__": module_name = ( sys.modules['__main__'] .__file__[:-3] .replace('/', '.') .strip('.') ) else: module_name = a_thing.__module__ return "%s.%s" % (module_name, a_thing.__name__) except AttributeError: # nope, not one of these pass # maybe it has a __name__ attribute? try: return a_thing.__name__ except AttributeError: # nope, not one of these pass # punt and see what happens if we just cast it to string return str(a_thing)
take a python object of some sort, and convert it into a human readable string. this function is used extensively to convert things like "subject" into "subject_key, function -> function_key, etc.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/converters.py#L370-L430
Chris7/django-djangui
djangui/backend/ast/codegen.py
to_source
def to_source(node, indent_with=' ' * 4, add_line_information=False): """This function can convert a node tree back into python sourcecode. This is useful for debugging purposes, especially if you're dealing with custom asts not generated by python itself. It could be that the sourcecode is evaluable when the AST itself is not compilable / evaluable. The reason for this is that the AST contains some more data than regular sourcecode does, which is dropped during conversion. Each level of indentation is replaced with `indent_with`. Per default this parameter is equal to four spaces as suggested by PEP 8, but it might be adjusted to match the application's styleguide. If `add_line_information` is set to `True` comments for the line numbers of the nodes are added to the output. This can be used to spot wrong line number information of statement nodes. """ generator = SourceGenerator(indent_with, add_line_information) generator.visit(node) return ''.join(str(s) for s in generator.result)
python
def to_source(node, indent_with=' ' * 4, add_line_information=False): """This function can convert a node tree back into python sourcecode. This is useful for debugging purposes, especially if you're dealing with custom asts not generated by python itself. It could be that the sourcecode is evaluable when the AST itself is not compilable / evaluable. The reason for this is that the AST contains some more data than regular sourcecode does, which is dropped during conversion. Each level of indentation is replaced with `indent_with`. Per default this parameter is equal to four spaces as suggested by PEP 8, but it might be adjusted to match the application's styleguide. If `add_line_information` is set to `True` comments for the line numbers of the nodes are added to the output. This can be used to spot wrong line number information of statement nodes. """ generator = SourceGenerator(indent_with, add_line_information) generator.visit(node) return ''.join(str(s) for s in generator.result)
This function can convert a node tree back into python sourcecode. This is useful for debugging purposes, especially if you're dealing with custom asts not generated by python itself. It could be that the sourcecode is evaluable when the AST itself is not compilable / evaluable. The reason for this is that the AST contains some more data than regular sourcecode does, which is dropped during conversion. Each level of indentation is replaced with `indent_with`. Per default this parameter is equal to four spaces as suggested by PEP 8, but it might be adjusted to match the application's styleguide. If `add_line_information` is set to `True` comments for the line numbers of the nodes are added to the output. This can be used to spot wrong line number information of statement nodes.
https://github.com/Chris7/django-djangui/blob/4e9e37ac92e128dbaa0c809e5e33618d2c96808f/djangui/backend/ast/codegen.py#L59-L79
mozilla/configman
demo/advanced_demo3.py
PostgresPooled.connection
def connection(self, name=None): """return a named connection. This function will return a named connection by either finding one in its pool by the name or creating a new one. If no name is given, it will use the name of the current executing thread as the name of the connection. parameters: name - a name as a string """ if not name: name = threading.currentThread().getName() if name in self.pool: return self.pool[name] self.pool[name] = FakeDatabaseConnection(self.dsn) return self.pool[name]
python
def connection(self, name=None): """return a named connection. This function will return a named connection by either finding one in its pool by the name or creating a new one. If no name is given, it will use the name of the current executing thread as the name of the connection. parameters: name - a name as a string """ if not name: name = threading.currentThread().getName() if name in self.pool: return self.pool[name] self.pool[name] = FakeDatabaseConnection(self.dsn) return self.pool[name]
return a named connection. This function will return a named connection by either finding one in its pool by the name or creating a new one. If no name is given, it will use the name of the current executing thread as the name of the connection. parameters: name - a name as a string
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/demo/advanced_demo3.py#L242-L258
mozilla/configman
demo/advanced_demo3.py
PostgresPooled.close_connection
def close_connection(self, connection, force=False): """overriding the baseclass function, this routine will decline to close a connection at the end of a transaction context. This allows for reuse of connections.""" if force: print('PostgresPooled - delegating connection closure') try: super(PostgresPooled, self).close_connection(connection, force) except self.operational_exceptions: print('PostgresPooled - failed closing') for name, conn in self.pool.iteritems(): if conn is connection: break del self.pool[name] else: print('PostgresPooled - refusing to close connection')
python
def close_connection(self, connection, force=False): """overriding the baseclass function, this routine will decline to close a connection at the end of a transaction context. This allows for reuse of connections.""" if force: print('PostgresPooled - delegating connection closure') try: super(PostgresPooled, self).close_connection(connection, force) except self.operational_exceptions: print('PostgresPooled - failed closing') for name, conn in self.pool.iteritems(): if conn is connection: break del self.pool[name] else: print('PostgresPooled - refusing to close connection')
overriding the baseclass function, this routine will decline to close a connection at the end of a transaction context. This allows for reuse of connections.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/demo/advanced_demo3.py#L261-L277
mozilla/configman
demo/advanced_demo3.py
TransactionExecutor.do_transaction
def do_transaction(self, function, *args, **kwargs): """execute a function within the context of a transaction""" with self.config.db_transaction() as trans: function(trans, *args, **kwargs)
python
def do_transaction(self, function, *args, **kwargs): """execute a function within the context of a transaction""" with self.config.db_transaction() as trans: function(trans, *args, **kwargs)
execute a function within the context of a transaction
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/demo/advanced_demo3.py#L323-L326
mozilla/configman
demo/advanced_demo3.py
TransactionExecutorWithBackoff.responsive_sleep
def responsive_sleep(self, seconds, wait_reason=''): """Sleep for the specified number of seconds, logging every 'wait_log_interval' seconds with progress info.""" for x in range(int(seconds)): if (self.config.wait_log_interval and not x % self.config.wait_log_interval): print('%s: %dsec of %dsec' % (wait_reason, x, seconds)) time.sleep(1.0)
python
def responsive_sleep(self, seconds, wait_reason=''): """Sleep for the specified number of seconds, logging every 'wait_log_interval' seconds with progress info.""" for x in range(int(seconds)): if (self.config.wait_log_interval and not x % self.config.wait_log_interval): print('%s: %dsec of %dsec' % (wait_reason, x, seconds)) time.sleep(1.0)
Sleep for the specified number of seconds, logging every 'wait_log_interval' seconds with progress info.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/demo/advanced_demo3.py#L354-L363
mozilla/configman
demo/advanced_demo3.py
TransactionExecutorWithBackoff.do_transaction
def do_transaction(self, function, *args, **kwargs): """execute a function within the context of a transaction""" for wait_in_seconds in self.backoff_generator(): try: with self.config.db_transaction() as trans: function(trans, *args, **kwargs) trans.commit() break except self.config.db_transaction.operational_exceptions: pass print(('failure in transaction - retry in %s seconds' % wait_in_seconds)) self.responsive_sleep(wait_in_seconds, "waiting for retry after failure in " "transaction")
python
def do_transaction(self, function, *args, **kwargs): """execute a function within the context of a transaction""" for wait_in_seconds in self.backoff_generator(): try: with self.config.db_transaction() as trans: function(trans, *args, **kwargs) trans.commit() break except self.config.db_transaction.operational_exceptions: pass print(('failure in transaction - retry in %s seconds' % wait_in_seconds)) self.responsive_sleep(wait_in_seconds, "waiting for retry after failure in " "transaction")
execute a function within the context of a transaction
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/demo/advanced_demo3.py#L366-L380
mozilla/configman
configman/value_sources/for_configobj.py
ConfigObjWithIncludes._expand_files
def _expand_files(self, file_name, original_path, indent=""): """This recursive function accepts a file name, opens the file and then spools the contents of the file into a list, examining each line as it does so. If it detects a line beginning with "+include", it assumes the string immediately following is a file name. Recursing, the file new file is openned and its contents are spooled into the accumulating list.""" expanded_file_contents = [] with open(file_name) as f: for a_line in f: match = ConfigObjWithIncludes._include_re.match(a_line) if match: include_file = match.group(2) include_file = os.path.join( original_path, include_file ) new_lines = self._expand_files( include_file, os.path.dirname(include_file), indent + match.group(1) ) expanded_file_contents.extend(new_lines) else: expanded_file_contents.append(indent + a_line.rstrip()) return expanded_file_contents
python
def _expand_files(self, file_name, original_path, indent=""): """This recursive function accepts a file name, opens the file and then spools the contents of the file into a list, examining each line as it does so. If it detects a line beginning with "+include", it assumes the string immediately following is a file name. Recursing, the file new file is openned and its contents are spooled into the accumulating list.""" expanded_file_contents = [] with open(file_name) as f: for a_line in f: match = ConfigObjWithIncludes._include_re.match(a_line) if match: include_file = match.group(2) include_file = os.path.join( original_path, include_file ) new_lines = self._expand_files( include_file, os.path.dirname(include_file), indent + match.group(1) ) expanded_file_contents.extend(new_lines) else: expanded_file_contents.append(indent + a_line.rstrip()) return expanded_file_contents
This recursive function accepts a file name, opens the file and then spools the contents of the file into a list, examining each line as it does so. If it detects a line beginning with "+include", it assumes the string immediately following is a file name. Recursing, the file new file is openned and its contents are spooled into the accumulating list.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/value_sources/for_configobj.py#L73-L98
mozilla/configman
configman/value_sources/for_configobj.py
ConfigObjWithIncludes._load
def _load(self, infile, configspec): """this overrides the original ConfigObj method of the same name. It runs through the input file collecting lines into a list. When completed, this method submits the list of lines to the super class' function of the same name. ConfigObj proceeds, completely unaware that it's input file has been preprocessed.""" if isinstance(infile, (six.binary_type, six.text_type)): infile = to_str(infile) original_path = os.path.dirname(infile) expanded_file_contents = self._expand_files(infile, original_path) super(ConfigObjWithIncludes, self)._load( expanded_file_contents, configspec ) else: super(ConfigObjWithIncludes, self)._load(infile, configspec)
python
def _load(self, infile, configspec): """this overrides the original ConfigObj method of the same name. It runs through the input file collecting lines into a list. When completed, this method submits the list of lines to the super class' function of the same name. ConfigObj proceeds, completely unaware that it's input file has been preprocessed.""" if isinstance(infile, (six.binary_type, six.text_type)): infile = to_str(infile) original_path = os.path.dirname(infile) expanded_file_contents = self._expand_files(infile, original_path) super(ConfigObjWithIncludes, self)._load( expanded_file_contents, configspec ) else: super(ConfigObjWithIncludes, self)._load(infile, configspec)
this overrides the original ConfigObj method of the same name. It runs through the input file collecting lines into a list. When completed, this method submits the list of lines to the super class' function of the same name. ConfigObj proceeds, completely unaware that it's input file has been preprocessed.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/value_sources/for_configobj.py#L101-L116
mozilla/configman
configman/value_sources/for_configobj.py
ValueSource.get_values
def get_values(self, config_manager, ignore_mismatches, obj_hook=DotDict): """Return a nested dictionary representing the values in the ini file. In the case of this ValueSource implementation, both parameters are dummies.""" if self.delayed_parser_instantiation: try: app = config_manager._get_option('admin.application') source = "%s%s" % (app.value.app_name, file_name_extension) self.config_obj = configobj.ConfigObj(source) self.delayed_parser_instantiation = False except AttributeError: # we don't have enough information to get the ini file # yet. we'll ignore the error for now return obj_hook() # return empty dict of the obj_hook type if isinstance(self.config_obj, obj_hook): return self.config_obj return obj_hook(initializer=self.config_obj)
python
def get_values(self, config_manager, ignore_mismatches, obj_hook=DotDict): """Return a nested dictionary representing the values in the ini file. In the case of this ValueSource implementation, both parameters are dummies.""" if self.delayed_parser_instantiation: try: app = config_manager._get_option('admin.application') source = "%s%s" % (app.value.app_name, file_name_extension) self.config_obj = configobj.ConfigObj(source) self.delayed_parser_instantiation = False except AttributeError: # we don't have enough information to get the ini file # yet. we'll ignore the error for now return obj_hook() # return empty dict of the obj_hook type if isinstance(self.config_obj, obj_hook): return self.config_obj return obj_hook(initializer=self.config_obj)
Return a nested dictionary representing the values in the ini file. In the case of this ValueSource implementation, both parameters are dummies.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/value_sources/for_configobj.py#L168-L184
mozilla/configman
configman/value_sources/for_configobj.py
ValueSource._write_ini
def _write_ini(source_dict, namespace_name=None, level=0, indent_size=4, output_stream=sys.stdout): """this function prints the components of a configobj ini file. It is recursive for outputing the nested sections of the ini file.""" options = [ value for value in source_dict.values() if isinstance(value, Option) ] options.sort(key=lambda x: x.name) indent_spacer = " " * (level * indent_size) for an_option in options: print("%s# %s" % (indent_spacer, an_option.doc), file=output_stream) option_value = to_str(an_option) if an_option.reference_value_from: print( '%s# see "%s.%s" for the default or override it here' % ( indent_spacer, an_option.reference_value_from, an_option.name ), file=output_stream ) if an_option.likely_to_be_changed or an_option.has_changed: option_format = '%s%s=%s\n' else: option_format = '%s#%s=%s\n' if isinstance(option_value, six.string_types) and \ ',' in option_value: # quote lists unless they're already quoted if option_value[0] not in '\'"': option_value = '"%s"' % option_value print(option_format % (indent_spacer, an_option.name, option_value), file=output_stream) next_level = level + 1 namespaces = [ (key, value) for key, value in source_dict.items() if isinstance(value, Namespace) ] namespaces.sort(key=ValueSource._namespace_reference_value_from_sort) for key, namespace in namespaces: next_level_spacer = " " * next_level * indent_size print("%s%s%s%s\n" % (indent_spacer, "[" * next_level, key, "]" * next_level), file=output_stream) if namespace._doc: print("%s%s" % (next_level_spacer, namespace._doc), file=output_stream) if namespace._reference_value_from: print("%s#+include ./common_%s.ini\n" % (next_level_spacer, key), file=output_stream) if namespace_name: ValueSource._write_ini( source_dict=namespace, namespace_name="%s.%s" % (namespace_name, key), level=level+1, indent_size=indent_size, output_stream=output_stream ) else: ValueSource._write_ini( source_dict=namespace, namespace_name=key, level=level+1, indent_size=indent_size, output_stream=output_stream )
python
def _write_ini(source_dict, namespace_name=None, level=0, indent_size=4, output_stream=sys.stdout): """this function prints the components of a configobj ini file. It is recursive for outputing the nested sections of the ini file.""" options = [ value for value in source_dict.values() if isinstance(value, Option) ] options.sort(key=lambda x: x.name) indent_spacer = " " * (level * indent_size) for an_option in options: print("%s# %s" % (indent_spacer, an_option.doc), file=output_stream) option_value = to_str(an_option) if an_option.reference_value_from: print( '%s# see "%s.%s" for the default or override it here' % ( indent_spacer, an_option.reference_value_from, an_option.name ), file=output_stream ) if an_option.likely_to_be_changed or an_option.has_changed: option_format = '%s%s=%s\n' else: option_format = '%s#%s=%s\n' if isinstance(option_value, six.string_types) and \ ',' in option_value: # quote lists unless they're already quoted if option_value[0] not in '\'"': option_value = '"%s"' % option_value print(option_format % (indent_spacer, an_option.name, option_value), file=output_stream) next_level = level + 1 namespaces = [ (key, value) for key, value in source_dict.items() if isinstance(value, Namespace) ] namespaces.sort(key=ValueSource._namespace_reference_value_from_sort) for key, namespace in namespaces: next_level_spacer = " " * next_level * indent_size print("%s%s%s%s\n" % (indent_spacer, "[" * next_level, key, "]" * next_level), file=output_stream) if namespace._doc: print("%s%s" % (next_level_spacer, namespace._doc), file=output_stream) if namespace._reference_value_from: print("%s#+include ./common_%s.ini\n" % (next_level_spacer, key), file=output_stream) if namespace_name: ValueSource._write_ini( source_dict=namespace, namespace_name="%s.%s" % (namespace_name, key), level=level+1, indent_size=indent_size, output_stream=output_stream ) else: ValueSource._write_ini( source_dict=namespace, namespace_name=key, level=level+1, indent_size=indent_size, output_stream=output_stream )
this function prints the components of a configobj ini file. It is recursive for outputing the nested sections of the ini file.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/value_sources/for_configobj.py#L202-L276
mozilla/configman
configman/__init__.py
configuration
def configuration(*args, **kwargs): """this function just instantiates a ConfigurationManager and returns the configuration dictionary. It accepts all the same parameters as the constructor for the ConfigurationManager class.""" try: config_kwargs = {'mapping_class': kwargs.pop('mapping_class')} except KeyError: config_kwargs = {} cm = ConfigurationManager(*args, **kwargs) return cm.get_config(**config_kwargs)
python
def configuration(*args, **kwargs): """this function just instantiates a ConfigurationManager and returns the configuration dictionary. It accepts all the same parameters as the constructor for the ConfigurationManager class.""" try: config_kwargs = {'mapping_class': kwargs.pop('mapping_class')} except KeyError: config_kwargs = {} cm = ConfigurationManager(*args, **kwargs) return cm.get_config(**config_kwargs)
this function just instantiates a ConfigurationManager and returns the configuration dictionary. It accepts all the same parameters as the constructor for the ConfigurationManager class.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/__init__.py#L38-L47
mozilla/configman
configman/value_sources/for_argparse.py
IntermediateConfigmanParser.error
def error(self, message): """we need to suppress errors that might happen in earlier phases of the expansion/overlay process. """ if ( "not allowed" in message or "ignored" in message or "expected" in message or "invalid" in message or self.add_help ): # when we have "help" then we must also have proper error # processing. Without "help", we suppress the errors by # doing nothing here super(IntermediateConfigmanParser, self).error(message)
python
def error(self, message): """we need to suppress errors that might happen in earlier phases of the expansion/overlay process. """ if ( "not allowed" in message or "ignored" in message or "expected" in message or "invalid" in message or self.add_help ): # when we have "help" then we must also have proper error # processing. Without "help", we suppress the errors by # doing nothing here super(IntermediateConfigmanParser, self).error(message)
we need to suppress errors that might happen in earlier phases of the expansion/overlay process.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/value_sources/for_argparse.py#L83-L96
mozilla/configman
configman/dotdict.py
iteritems_breadth_first
def iteritems_breadth_first(a_mapping, include_dicts=False): """a generator that returns all the keys in a set of nested Mapping instances. The keys take the form X.Y.Z""" subordinate_mappings = [] for key, value in six.iteritems(a_mapping): if isinstance(value, collections.Mapping): subordinate_mappings.append((key, value)) if include_dicts: yield key, value else: yield key, value for key, a_map in subordinate_mappings: for sub_key, value in iteritems_breadth_first(a_map, include_dicts): yield '%s.%s' % (key, sub_key), value
python
def iteritems_breadth_first(a_mapping, include_dicts=False): """a generator that returns all the keys in a set of nested Mapping instances. The keys take the form X.Y.Z""" subordinate_mappings = [] for key, value in six.iteritems(a_mapping): if isinstance(value, collections.Mapping): subordinate_mappings.append((key, value)) if include_dicts: yield key, value else: yield key, value for key, a_map in subordinate_mappings: for sub_key, value in iteritems_breadth_first(a_map, include_dicts): yield '%s.%s' % (key, sub_key), value
a generator that returns all the keys in a set of nested Mapping instances. The keys take the form X.Y.Z
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/dotdict.py#L15-L28
mozilla/configman
configman/dotdict.py
configman_keys
def configman_keys(a_mapping): """return a DotDict that is a copy of the provided mapping with keys transformed into a configman compatible form: if the key is not all uppercase then all doubled underscores will be replaced with the '.' character. This has a specific use with the os.environ. Linux shells generally do not allow the dot character in an identifier. Configman relies on the dot character to separate namespaces. If the environment is processed through this function, then doubled underscores will be interpretted as if they were the dot character. """ configmanized_keys_dict = DotDict() for k, v in iteritems_breadth_first(a_mapping): if '__' in k and k != k.upper(): k = k.replace('__', '.') configmanized_keys_dict[k] = v return configmanized_keys_dict
python
def configman_keys(a_mapping): """return a DotDict that is a copy of the provided mapping with keys transformed into a configman compatible form: if the key is not all uppercase then all doubled underscores will be replaced with the '.' character. This has a specific use with the os.environ. Linux shells generally do not allow the dot character in an identifier. Configman relies on the dot character to separate namespaces. If the environment is processed through this function, then doubled underscores will be interpretted as if they were the dot character. """ configmanized_keys_dict = DotDict() for k, v in iteritems_breadth_first(a_mapping): if '__' in k and k != k.upper(): k = k.replace('__', '.') configmanized_keys_dict[k] = v return configmanized_keys_dict
return a DotDict that is a copy of the provided mapping with keys transformed into a configman compatible form: if the key is not all uppercase then all doubled underscores will be replaced with the '.' character. This has a specific use with the os.environ. Linux shells generally do not allow the dot character in an identifier. Configman relies on the dot character to separate namespaces. If the environment is processed through this function, then doubled underscores will be interpretted as if they were the dot character.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/dotdict.py#L32-L50
mozilla/configman
configman/dotdict.py
create_key_translating_dot_dict
def create_key_translating_dot_dict( new_class_name, translation_tuples, base_class=DotDict ): """this function will generate a DotDict derivative class that has key translation built in. If the key is not found, translations (as specified by the translation_tuples) are performed on the key and the lookup is tried again. Only on failure of this second lookup will the KeyError exception be raised. parameters: new_class_name - the name of the returned class translation_tuples - a sequence of 2-tuples of the form: (original_substring, substitution_string) base_class - the baseclass on which this new class is to be based """ #========================================================================== class DotDictWithKeyTranslations(base_class): def __init__(self, *args, **kwargs): self.__dict__['_translation_tuples'] = translation_tuples super(DotDictWithKeyTranslations, self).__init__(*args, **kwargs) #---------------------------------------------------------------------- @memoize() def _translate_key(self, key): for original, replacement in self._translation_tuples: key = key.replace(original, replacement) return key #---------------------------------------------------------------------- def assign(self, key, value): super(DotDictWithKeyTranslations, self).assign( self._translate_key(key), value ) #---------------------------------------------------------------------- def __setattr__(self, key, value): super(DotDictWithKeyTranslations, self).__setattr__( self._translate_key(key), value ) #---------------------------------------------------------------------- def __getattr__(self, key): alt_key = self._translate_key(key) if alt_key == key: return super(DotDictWithKeyTranslations, self).__getattr__(key) try: return getattr(self, alt_key) except KeyError: raise KeyError(key) #---------------------------------------------------------------------- def __delattr__(self, key): super(DotDictWithKeyTranslations, self).__delattr__( self._translate_key(key) ) if six.PY2: new_class_name = six.binary_type(new_class_name) DotDictWithKeyTranslations.__name__ = new_class_name return DotDictWithKeyTranslations
python
def create_key_translating_dot_dict( new_class_name, translation_tuples, base_class=DotDict ): """this function will generate a DotDict derivative class that has key translation built in. If the key is not found, translations (as specified by the translation_tuples) are performed on the key and the lookup is tried again. Only on failure of this second lookup will the KeyError exception be raised. parameters: new_class_name - the name of the returned class translation_tuples - a sequence of 2-tuples of the form: (original_substring, substitution_string) base_class - the baseclass on which this new class is to be based """ #========================================================================== class DotDictWithKeyTranslations(base_class): def __init__(self, *args, **kwargs): self.__dict__['_translation_tuples'] = translation_tuples super(DotDictWithKeyTranslations, self).__init__(*args, **kwargs) #---------------------------------------------------------------------- @memoize() def _translate_key(self, key): for original, replacement in self._translation_tuples: key = key.replace(original, replacement) return key #---------------------------------------------------------------------- def assign(self, key, value): super(DotDictWithKeyTranslations, self).assign( self._translate_key(key), value ) #---------------------------------------------------------------------- def __setattr__(self, key, value): super(DotDictWithKeyTranslations, self).__setattr__( self._translate_key(key), value ) #---------------------------------------------------------------------- def __getattr__(self, key): alt_key = self._translate_key(key) if alt_key == key: return super(DotDictWithKeyTranslations, self).__getattr__(key) try: return getattr(self, alt_key) except KeyError: raise KeyError(key) #---------------------------------------------------------------------- def __delattr__(self, key): super(DotDictWithKeyTranslations, self).__delattr__( self._translate_key(key) ) if six.PY2: new_class_name = six.binary_type(new_class_name) DotDictWithKeyTranslations.__name__ = new_class_name return DotDictWithKeyTranslations
this function will generate a DotDict derivative class that has key translation built in. If the key is not found, translations (as specified by the translation_tuples) are performed on the key and the lookup is tried again. Only on failure of this second lookup will the KeyError exception be raised. parameters: new_class_name - the name of the returned class translation_tuples - a sequence of 2-tuples of the form: (original_substring, substitution_string) base_class - the baseclass on which this new class is to be based
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/dotdict.py#L367-L431
mozilla/configman
configman/dotdict.py
DotDict.keys_breadth_first
def keys_breadth_first(self, include_dicts=False): """a generator that returns all the keys in a set of nested DotDict instances. The keys take the form X.Y.Z""" namespaces = [] for key in self._key_order: if isinstance(getattr(self, key), DotDict): namespaces.append(key) if include_dicts: yield key else: yield key for a_namespace in namespaces: for key in self[a_namespace].keys_breadth_first(include_dicts): yield '%s.%s' % (a_namespace, key)
python
def keys_breadth_first(self, include_dicts=False): """a generator that returns all the keys in a set of nested DotDict instances. The keys take the form X.Y.Z""" namespaces = [] for key in self._key_order: if isinstance(getattr(self, key), DotDict): namespaces.append(key) if include_dicts: yield key else: yield key for a_namespace in namespaces: for key in self[a_namespace].keys_breadth_first(include_dicts): yield '%s.%s' % (a_namespace, key)
a generator that returns all the keys in a set of nested DotDict instances. The keys take the form X.Y.Z
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/dotdict.py#L207-L220
mozilla/configman
configman/dotdict.py
DotDict.assign
def assign(self, key, value): """an alternative method for assigning values to nested DotDict instances. It accepts keys in the form of X.Y.Z. If any nested DotDict instances don't yet exist, they will be created.""" key_split = key.split('.') cur_dict = self for k in key_split[:-1]: try: cur_dict = cur_dict[k] except KeyError: cur_dict[k] = self.__class__() # so that derived classes # remain true to type cur_dict = cur_dict[k] cur_dict[key_split[-1]] = value
python
def assign(self, key, value): """an alternative method for assigning values to nested DotDict instances. It accepts keys in the form of X.Y.Z. If any nested DotDict instances don't yet exist, they will be created.""" key_split = key.split('.') cur_dict = self for k in key_split[:-1]: try: cur_dict = cur_dict[k] except KeyError: cur_dict[k] = self.__class__() # so that derived classes # remain true to type cur_dict = cur_dict[k] cur_dict[key_split[-1]] = value
an alternative method for assigning values to nested DotDict instances. It accepts keys in the form of X.Y.Z. If any nested DotDict instances don't yet exist, they will be created.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/dotdict.py#L223-L236
mozilla/configman
configman/dotdict.py
DotDict.parent
def parent(self, key): """when given a key of the form X.Y.Z, this method will return the parent DotDict of the 'Z' key.""" parent_key = '.'.join(key.split('.')[:-1]) if not parent_key: return None else: return self[parent_key]
python
def parent(self, key): """when given a key of the form X.Y.Z, this method will return the parent DotDict of the 'Z' key.""" parent_key = '.'.join(key.split('.')[:-1]) if not parent_key: return None else: return self[parent_key]
when given a key of the form X.Y.Z, this method will return the parent DotDict of the 'Z' key.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/dotdict.py#L239-L246
mozilla/configman
configman/memoize.py
memoize
def memoize(max_cache_size=1000): """Python 2.4 compatible memoize decorator. It creates a cache that has a maximum size. If the cache exceeds the max, it is thrown out and a new one made. With such behavior, it is wise to set the cache just a little larger that the maximum expected need. Parameters: max_cache_size - the size to which a cache can grow """ def wrapper(f): @wraps(f) def fn(*args, **kwargs): if kwargs: key = (args, tuple(kwargs.items())) else: key = args try: return fn.cache[key] except KeyError: if fn.count >= max_cache_size: fn.cache = {} fn.count = 0 result = f(*args, **kwargs) fn.cache[key] = result fn.count += 1 return result except TypeError: return f(*args, **kwargs) fn.cache = {} fn.count = 0 return fn return wrapper
python
def memoize(max_cache_size=1000): """Python 2.4 compatible memoize decorator. It creates a cache that has a maximum size. If the cache exceeds the max, it is thrown out and a new one made. With such behavior, it is wise to set the cache just a little larger that the maximum expected need. Parameters: max_cache_size - the size to which a cache can grow """ def wrapper(f): @wraps(f) def fn(*args, **kwargs): if kwargs: key = (args, tuple(kwargs.items())) else: key = args try: return fn.cache[key] except KeyError: if fn.count >= max_cache_size: fn.cache = {} fn.count = 0 result = f(*args, **kwargs) fn.cache[key] = result fn.count += 1 return result except TypeError: return f(*args, **kwargs) fn.cache = {} fn.count = 0 return fn return wrapper
Python 2.4 compatible memoize decorator. It creates a cache that has a maximum size. If the cache exceeds the max, it is thrown out and a new one made. With such behavior, it is wise to set the cache just a little larger that the maximum expected need. Parameters: max_cache_size - the size to which a cache can grow
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/memoize.py#L9-L40
mozilla/configman
configman/option.py
Option.set_default
def set_default(self, val, force=False): """this function allows a default to be set on an option that dosen't have one. It is used when a base class defines an Option for use in derived classes but cannot predict what value would useful to the derived classes. This gives the derived classes the opportunity to set a logical default appropriate for the derived class' context. For example: class A(RequiredConfig): required_config = Namespace() required_config.add_option( 'x', default=None ) class B(A): A.required_config.x.set_default(68) parameters: val - the value for the default force - normally this function only works on Options that have not had a default set (default is None). This boolean allows you to override an existing default. """ if self.default is None or force: self.default = val self.set_value(val) self.has_changed = True else: raise OptionError( "cannot override existing default without using the 'force' " "option" )
python
def set_default(self, val, force=False): """this function allows a default to be set on an option that dosen't have one. It is used when a base class defines an Option for use in derived classes but cannot predict what value would useful to the derived classes. This gives the derived classes the opportunity to set a logical default appropriate for the derived class' context. For example: class A(RequiredConfig): required_config = Namespace() required_config.add_option( 'x', default=None ) class B(A): A.required_config.x.set_default(68) parameters: val - the value for the default force - normally this function only works on Options that have not had a default set (default is None). This boolean allows you to override an existing default. """ if self.default is None or force: self.default = val self.set_value(val) self.has_changed = True else: raise OptionError( "cannot override existing default without using the 'force' " "option" )
this function allows a default to be set on an option that dosen't have one. It is used when a base class defines an Option for use in derived classes but cannot predict what value would useful to the derived classes. This gives the derived classes the opportunity to set a logical default appropriate for the derived class' context. For example: class A(RequiredConfig): required_config = Namespace() required_config.add_option( 'x', default=None ) class B(A): A.required_config.x.set_default(68) parameters: val - the value for the default force - normally this function only works on Options that have not had a default set (default is None). This boolean allows you to override an existing default.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/option.py#L147-L180
mozilla/configman
configman/option.py
Option.copy
def copy(self): """return a copy""" o = Option( name=self.name, default=self.default, doc=self.doc, from_string_converter=self.from_string_converter, to_string_converter=self.to_string_converter, value=self.value, short_form=self.short_form, exclude_from_print_conf=self.exclude_from_print_conf, exclude_from_dump_conf=self.exclude_from_dump_conf, is_argument=self.is_argument, likely_to_be_changed=self.likely_to_be_changed, not_for_definition=self.not_for_definition, reference_value_from=self.reference_value_from, secret=self.secret, has_changed=self.has_changed, foreign_data=self.foreign_data, ) return o
python
def copy(self): """return a copy""" o = Option( name=self.name, default=self.default, doc=self.doc, from_string_converter=self.from_string_converter, to_string_converter=self.to_string_converter, value=self.value, short_form=self.short_form, exclude_from_print_conf=self.exclude_from_print_conf, exclude_from_dump_conf=self.exclude_from_dump_conf, is_argument=self.is_argument, likely_to_be_changed=self.likely_to_be_changed, not_for_definition=self.not_for_definition, reference_value_from=self.reference_value_from, secret=self.secret, has_changed=self.has_changed, foreign_data=self.foreign_data, ) return o
return a copy
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/option.py#L183-L203
mozilla/configman
configman/config_manager.py
ConfigurationManager.context
def context(self, mapping_class=DotDictWithAcquisition): """return a config as a context that calls close on every item when it goes out of scope""" config = None try: config = self.get_config(mapping_class=mapping_class) yield config finally: if config: self._walk_and_close(config)
python
def context(self, mapping_class=DotDictWithAcquisition): """return a config as a context that calls close on every item when it goes out of scope""" config = None try: config = self.get_config(mapping_class=mapping_class) yield config finally: if config: self._walk_and_close(config)
return a config as a context that calls close on every item when it goes out of scope
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/config_manager.py#L320-L329
mozilla/configman
configman/config_manager.py
ConfigurationManager.output_summary
def output_summary(self, output_stream=sys.stdout): """outputs a usage tip and the list of acceptable commands. This is useful as the output of the 'help' option. parameters: output_stream - an open file-like object suitable for use as the target of a print function """ if self.app_name or self.app_description: print('Application: ', end='', file=output_stream) if self.app_name: print(self.app_name, self.app_version, file=output_stream) if self.app_description: print(self.app_description, file=output_stream) if self.app_name or self.app_description: print('', file=output_stream) names_list = self.get_option_names() print( "usage:\n%s [OPTIONS]... " % self.app_invocation_name, end='', file=output_stream ) bracket_count = 0 # this section prints the non-switch command line arguments for key in names_list: an_option = self.option_definitions[key] if an_option.is_argument: if an_option.default is None: # there's no option, assume the user must set this print(an_option.name, end='', file=output_stream) elif ( inspect.isclass(an_option.value) or inspect.ismodule(an_option.value) ): # this is already set and it could have expanded, most # likely this is a case where a sub-command has been # loaded and we're looking to show the help for it. # display show it as a constant already provided rather # than as an option the user must provide print(an_option.default, end='', file=output_stream) else: # this is an argument that the user may alternatively # provide print("[ %s" % an_option.name, end='', file=output_stream) bracket_count += 1 print(']' * bracket_count, '\n', file=output_stream) names_list.sort() if names_list: print('OPTIONS:', file=output_stream) pad = ' ' * 4 for name in names_list: if name in self.options_banned_from_help: continue option = self._get_option(name) line = ' ' * 2 # always start with 2 spaces if option.short_form: line += '-%s, ' % option.short_form line += '--%s' % name line += '\n' doc = option.doc if option.doc is not None else '' if doc: line += '%s%s\n' % (pad, doc) try: value = option.value type_of_value = type(value) converter_function = to_string_converters[type_of_value] default = converter_function(value) except KeyError: default = option.value if default is not None: if ( (option.secret or 'password' in name.lower()) and not self.option_definitions.admin.expose_secrets.default ): default = '*********' if name not in ('help',): # don't bother with certain dead obvious ones line += '%s(default: %s)\n' % (pad, default) print(line, file=output_stream)
python
def output_summary(self, output_stream=sys.stdout): """outputs a usage tip and the list of acceptable commands. This is useful as the output of the 'help' option. parameters: output_stream - an open file-like object suitable for use as the target of a print function """ if self.app_name or self.app_description: print('Application: ', end='', file=output_stream) if self.app_name: print(self.app_name, self.app_version, file=output_stream) if self.app_description: print(self.app_description, file=output_stream) if self.app_name or self.app_description: print('', file=output_stream) names_list = self.get_option_names() print( "usage:\n%s [OPTIONS]... " % self.app_invocation_name, end='', file=output_stream ) bracket_count = 0 # this section prints the non-switch command line arguments for key in names_list: an_option = self.option_definitions[key] if an_option.is_argument: if an_option.default is None: # there's no option, assume the user must set this print(an_option.name, end='', file=output_stream) elif ( inspect.isclass(an_option.value) or inspect.ismodule(an_option.value) ): # this is already set and it could have expanded, most # likely this is a case where a sub-command has been # loaded and we're looking to show the help for it. # display show it as a constant already provided rather # than as an option the user must provide print(an_option.default, end='', file=output_stream) else: # this is an argument that the user may alternatively # provide print("[ %s" % an_option.name, end='', file=output_stream) bracket_count += 1 print(']' * bracket_count, '\n', file=output_stream) names_list.sort() if names_list: print('OPTIONS:', file=output_stream) pad = ' ' * 4 for name in names_list: if name in self.options_banned_from_help: continue option = self._get_option(name) line = ' ' * 2 # always start with 2 spaces if option.short_form: line += '-%s, ' % option.short_form line += '--%s' % name line += '\n' doc = option.doc if option.doc is not None else '' if doc: line += '%s%s\n' % (pad, doc) try: value = option.value type_of_value = type(value) converter_function = to_string_converters[type_of_value] default = converter_function(value) except KeyError: default = option.value if default is not None: if ( (option.secret or 'password' in name.lower()) and not self.option_definitions.admin.expose_secrets.default ): default = '*********' if name not in ('help',): # don't bother with certain dead obvious ones line += '%s(default: %s)\n' % (pad, default) print(line, file=output_stream)
outputs a usage tip and the list of acceptable commands. This is useful as the output of the 'help' option. parameters: output_stream - an open file-like object suitable for use as the target of a print function
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/config_manager.py#L341-L425
mozilla/configman
configman/config_manager.py
ConfigurationManager.print_conf
def print_conf(self): """write a config file to the pathname specified in the parameter. The file extention determines the type of file written and must match a registered type. parameters: config_pathname - the full path and filename of the target config file.""" config_file_type = self._get_option('admin.print_conf').value @contextlib.contextmanager def stdout_opener(): yield sys.stdout skip_keys = [ k for (k, v) in six.iteritems(self.option_definitions) if isinstance(v, Option) and v.exclude_from_print_conf ] self.write_conf(config_file_type, stdout_opener, skip_keys=skip_keys)
python
def print_conf(self): """write a config file to the pathname specified in the parameter. The file extention determines the type of file written and must match a registered type. parameters: config_pathname - the full path and filename of the target config file.""" config_file_type = self._get_option('admin.print_conf').value @contextlib.contextmanager def stdout_opener(): yield sys.stdout skip_keys = [ k for (k, v) in six.iteritems(self.option_definitions) if isinstance(v, Option) and v.exclude_from_print_conf ] self.write_conf(config_file_type, stdout_opener, skip_keys=skip_keys)
write a config file to the pathname specified in the parameter. The file extention determines the type of file written and must match a registered type. parameters: config_pathname - the full path and filename of the target config file.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/config_manager.py#L428-L448
mozilla/configman
configman/config_manager.py
ConfigurationManager.dump_conf
def dump_conf(self, config_pathname=None): """write a config file to the pathname specified in the parameter. The file extention determines the type of file written and must match a registered type. parameters: config_pathname - the full path and filename of the target config file.""" if not config_pathname: config_pathname = self._get_option('admin.dump_conf').value opener = functools.partial(open, config_pathname, 'w') config_file_type = os.path.splitext(config_pathname)[1][1:] skip_keys = [ k for (k, v) in six.iteritems(self.option_definitions) if isinstance(v, Option) and v.exclude_from_dump_conf ] self.write_conf(config_file_type, opener, skip_keys=skip_keys)
python
def dump_conf(self, config_pathname=None): """write a config file to the pathname specified in the parameter. The file extention determines the type of file written and must match a registered type. parameters: config_pathname - the full path and filename of the target config file.""" if not config_pathname: config_pathname = self._get_option('admin.dump_conf').value opener = functools.partial(open, config_pathname, 'w') config_file_type = os.path.splitext(config_pathname)[1][1:] skip_keys = [ k for (k, v) in six.iteritems(self.option_definitions) if isinstance(v, Option) and v.exclude_from_dump_conf ] self.write_conf(config_file_type, opener, skip_keys=skip_keys)
write a config file to the pathname specified in the parameter. The file extention determines the type of file written and must match a registered type. parameters: config_pathname - the full path and filename of the target config file.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/config_manager.py#L451-L472
mozilla/configman
configman/config_manager.py
ConfigurationManager.write_conf
def write_conf(self, config_file_type, opener, skip_keys=None): """write a configuration file to a file-like object. parameters: config_file_type - a string containing a registered file type OR a for_XXX module from the value_source package. Passing in an string that is unregistered will result in a KeyError opener - a callable object or function that returns a file like object that works as a context in a with statement.""" blocked_keys = self.keys_blocked_from_output if skip_keys: blocked_keys.extend(skip_keys) if blocked_keys: option_defs = self.option_definitions.safe_copy() for a_blocked_key in blocked_keys: try: del option_defs[a_blocked_key] except (AttributeError, KeyError): # okay that key isn't here pass # remove empty namespaces all_keys = [k for k in option_defs.keys_breadth_first(include_dicts=True)] for key in all_keys: candidate = option_defs[key] if (isinstance(candidate, Namespace) and not len(candidate)): del option_defs[key] else: option_defs = self.option_definitions # find all of the secret options and overwrite their values with # '*' * 16 if not self.option_definitions.admin.expose_secrets.default: for a_key in option_defs.keys_breadth_first(): an_option = option_defs[a_key] if ( (not a_key.startswith('admin')) and isinstance(an_option, Option) and an_option.secret ): # force the option to be a string of * option_defs[a_key].value = '*' * 16 option_defs[a_key].from_string_converter = str dispatch_request_to_write(config_file_type, option_defs, opener)
python
def write_conf(self, config_file_type, opener, skip_keys=None): """write a configuration file to a file-like object. parameters: config_file_type - a string containing a registered file type OR a for_XXX module from the value_source package. Passing in an string that is unregistered will result in a KeyError opener - a callable object or function that returns a file like object that works as a context in a with statement.""" blocked_keys = self.keys_blocked_from_output if skip_keys: blocked_keys.extend(skip_keys) if blocked_keys: option_defs = self.option_definitions.safe_copy() for a_blocked_key in blocked_keys: try: del option_defs[a_blocked_key] except (AttributeError, KeyError): # okay that key isn't here pass # remove empty namespaces all_keys = [k for k in option_defs.keys_breadth_first(include_dicts=True)] for key in all_keys: candidate = option_defs[key] if (isinstance(candidate, Namespace) and not len(candidate)): del option_defs[key] else: option_defs = self.option_definitions # find all of the secret options and overwrite their values with # '*' * 16 if not self.option_definitions.admin.expose_secrets.default: for a_key in option_defs.keys_breadth_first(): an_option = option_defs[a_key] if ( (not a_key.startswith('admin')) and isinstance(an_option, Option) and an_option.secret ): # force the option to be a string of * option_defs[a_key].value = '*' * 16 option_defs[a_key].from_string_converter = str dispatch_request_to_write(config_file_type, option_defs, opener)
write a configuration file to a file-like object. parameters: config_file_type - a string containing a registered file type OR a for_XXX module from the value_source package. Passing in an string that is unregistered will result in a KeyError opener - a callable object or function that returns a file like object that works as a context in a with statement.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/config_manager.py#L475-L522
mozilla/configman
configman/config_manager.py
ConfigurationManager.log_config
def log_config(self, logger): """write out the current configuration to a log-like object. parameters: logger - a object that implements a method called 'info' with the same semantics as the call to 'logger.info'""" logger.info("app_name: %s", self.app_name) logger.info("app_version: %s", self.app_version) logger.info("current configuration:") config = [(key, self.option_definitions[key].value) for key in self.option_definitions.keys_breadth_first() if key not in self.keys_blocked_from_output] config.sort() for key, val in config: if ( self.option_definitions[key].secret or 'password' in key.lower() ): logger.info('%s: *********', key) else: try: logger.info('%s: %s', key, to_string_converters[type(key)](val)) except KeyError: logger.info('%s: %s', key, val)
python
def log_config(self, logger): """write out the current configuration to a log-like object. parameters: logger - a object that implements a method called 'info' with the same semantics as the call to 'logger.info'""" logger.info("app_name: %s", self.app_name) logger.info("app_version: %s", self.app_version) logger.info("current configuration:") config = [(key, self.option_definitions[key].value) for key in self.option_definitions.keys_breadth_first() if key not in self.keys_blocked_from_output] config.sort() for key, val in config: if ( self.option_definitions[key].secret or 'password' in key.lower() ): logger.info('%s: *********', key) else: try: logger.info('%s: %s', key, to_string_converters[type(key)](val)) except KeyError: logger.info('%s: %s', key, val)
write out the current configuration to a log-like object. parameters: logger - a object that implements a method called 'info' with the same semantics as the call to 'logger.info
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/config_manager.py#L525-L550
mozilla/configman
configman/config_manager.py
ConfigurationManager.get_option_names
def get_option_names(self): """returns a list of fully qualified option names. returns: a list of strings representing the Options in the source Namespace list. Each item will be fully qualified with dot delimited Namespace names. """ return [x for x in self.option_definitions.keys_breadth_first() if isinstance(self.option_definitions[x], Option)]
python
def get_option_names(self): """returns a list of fully qualified option names. returns: a list of strings representing the Options in the source Namespace list. Each item will be fully qualified with dot delimited Namespace names. """ return [x for x in self.option_definitions.keys_breadth_first() if isinstance(self.option_definitions[x], Option)]
returns a list of fully qualified option names. returns: a list of strings representing the Options in the source Namespace list. Each item will be fully qualified with dot delimited Namespace names.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/config_manager.py#L553-L562
mozilla/configman
configman/config_manager.py
ConfigurationManager._create_reference_value_options
def _create_reference_value_options(self, keys, finished_keys): """this method steps through the option definitions looking for alt paths. On finding one, it creates the 'reference_value_from' links within the option definitions and populates it with copied options.""" # a set of known reference_value_from_links set_of_reference_value_option_names = set() for key in keys: if key in finished_keys: continue an_option = self.option_definitions[key] if an_option.reference_value_from: fully_qualified_reference_name = '.'.join(( an_option.reference_value_from, an_option.name )) if fully_qualified_reference_name in keys: continue # this referenced value has already been defined # no need to repeat it - skip on to the next key reference_option = an_option.copy() reference_option.reference_value_from = None reference_option.name = fully_qualified_reference_name # wait, aren't we setting a fully qualified dotted name into # the name field? Yes, 'add_option' below sees that # full pathname and does the right thing with it to ensure # that the reference_option is created within the # correct namespace set_of_reference_value_option_names.add( fully_qualified_reference_name ) self.option_definitions.add_option(reference_option) for a_reference_value_option_name in set_of_reference_value_option_names: for x in range(a_reference_value_option_name.count('.')): namespace_path = \ a_reference_value_option_name.rsplit('.', x + 1)[0] self.option_definitions[namespace_path].ref_value_namespace() return set_of_reference_value_option_names
python
def _create_reference_value_options(self, keys, finished_keys): """this method steps through the option definitions looking for alt paths. On finding one, it creates the 'reference_value_from' links within the option definitions and populates it with copied options.""" # a set of known reference_value_from_links set_of_reference_value_option_names = set() for key in keys: if key in finished_keys: continue an_option = self.option_definitions[key] if an_option.reference_value_from: fully_qualified_reference_name = '.'.join(( an_option.reference_value_from, an_option.name )) if fully_qualified_reference_name in keys: continue # this referenced value has already been defined # no need to repeat it - skip on to the next key reference_option = an_option.copy() reference_option.reference_value_from = None reference_option.name = fully_qualified_reference_name # wait, aren't we setting a fully qualified dotted name into # the name field? Yes, 'add_option' below sees that # full pathname and does the right thing with it to ensure # that the reference_option is created within the # correct namespace set_of_reference_value_option_names.add( fully_qualified_reference_name ) self.option_definitions.add_option(reference_option) for a_reference_value_option_name in set_of_reference_value_option_names: for x in range(a_reference_value_option_name.count('.')): namespace_path = \ a_reference_value_option_name.rsplit('.', x + 1)[0] self.option_definitions[namespace_path].ref_value_namespace() return set_of_reference_value_option_names
this method steps through the option definitions looking for alt paths. On finding one, it creates the 'reference_value_from' links within the option definitions and populates it with copied options.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/config_manager.py#L565-L603
mozilla/configman
configman/config_manager.py
ConfigurationManager._overlay_expand
def _overlay_expand(self): """This method overlays each of the value sources onto the default in each of the defined options. It does so using a breadth first iteration, overlaying and expanding each level of the tree in turn. As soon as no changes were made to any level, the loop breaks and the work is done. The actual action of the overlay is to take the value from the source and copy into the 'default' member of each Option object. "expansion" means converting an option value into its real type from string. The conversion is accomplished by simply calling the 'set_value' method of the Option object. If the resultant type has its own configuration options, bring those into the current namespace and then proceed to overlay/expand those. """ new_keys_have_been_discovered = True # loop control, False breaks loop finished_keys = set() all_reference_values = {} while new_keys_have_been_discovered: # loop until nothing more is done # names_of_all_exsting_options holds a list of all keys in the # option definitons in breadth first order using this form: # [ 'x', 'y', 'z', 'x.a', 'x.b', 'z.a', 'z.b', 'x.a.j', 'x.a.k', # 'x.b.h'] names_of_all_exsting_options = [ x for x in self.option_definitions.keys_breadth_first() if isinstance(self.option_definitions[x], Option) ] new_keys_have_been_discovered = False # setup to break loop # create alternate paths options set_of_reference_value_option_names = \ self._create_reference_value_options( names_of_all_exsting_options, finished_keys ) for a_ref_option_name in set_of_reference_value_option_names: if a_ref_option_name not in all_reference_values: all_reference_values[a_ref_option_name] = [] all_keys = list(set_of_reference_value_option_names) \ + names_of_all_exsting_options # previous versions of this method pulled the values from the # values sources deeper within the following nested loops. # that was not necessary and caused a lot of redundant work. # the 'values_from_all_sources' now holds all the the values # from each of the value sources. values_from_all_sources = [ a_value_source.get_values( self, # pass in the config_manager itself True, # ignore mismatches self.value_source_object_hook # build with this class ) for a_value_source in self.values_source_list ] # overlay process: # fetch all the default values from the value sources before # applying the from string conversions for key in all_keys: if key in finished_keys: continue #if not isinstance(an_option, Option): # continue # aggregations and other types are ignored # loop through all the value sources looking for values # that match this current key. if self.option_definitions[key].reference_value_from: reference_value_from = ( self.option_definitions[key].reference_value_from ) top_key = key.split('.')[-1] self.option_definitions[key].default = ( self.option_definitions[reference_value_from] [top_key].default ) all_reference_values[ '.'.join((reference_value_from, top_key)) ].append( key ) an_option = self.option_definitions[key] if key in all_reference_values: # make sure that this value gets propagated to keys # even if the keys have already been overlaid finished_keys -= set( all_reference_values[key] ) for val_src_dict in values_from_all_sources: try: # overlay the default with the new value from # the value source. This assignment may come # via acquisition, so the key given may not have # been an exact match for what was returned. an_option.has_changed = ( an_option.default != val_src_dict[key] ) an_option.default = val_src_dict[key] if key in all_reference_values: # make sure that this value gets propagated to keys # even if the keys have already been overlaid finished_keys -= set( all_reference_values[key] ) except KeyError as x: pass # okay, that source doesn't have this value # expansion process: # step through all the keys converting them to their proper # types and bringing in any new keys in the process for key in all_keys: if key in finished_keys: continue # mark this key as having been seen and processed finished_keys.add(key) an_option = self.option_definitions[key] #if not isinstance(an_option, Option): # continue # aggregations, namespaces are ignored # apply the from string conversion to make the real value an_option.set_value(an_option.default) # new values have been seen, don't let loop break new_keys_have_been_discovered = True try: try: # try to fetch new requirements from this value new_requirements = \ an_option.value.get_required_config() except (AttributeError, KeyError): new_requirements = getattr( an_option.value, 'required_config', None ) # make sure what we got as new_req is actually a # Mapping of some sort if not isinstance(new_requirements, collections.Mapping): # we didn't get a mapping, perhaps the option value # was a Mock object - in any case we can't try to # interpret 'new_req' as a configman requirement # collection. We must abandon processing this # option further continue if not isinstance(new_requirements, Namespace): new_requirements = Namespace( initializer=new_requirements ) # get the parent namespace current_namespace = self.option_definitions.parent(key) if current_namespace is None: # we're at the top level, use the base namespace current_namespace = self.option_definitions if current_namespace._reference_value_from: # don't expand things that are in reference value # namespaces, they will be populated by expanding the # targets continue # some new Options to be brought in may have already been # seen and in the finished_keys set. They must be reset # as unfinished so that a new default doesn't permanently # overwrite any of the values already placed by the # overlays. So we've got to remove those keys from the # finished keys list. # Before we can do that however, we need the fully # qualified names for the new keys. qualified_parent_name_list = key.rsplit('.', 1) if len(qualified_parent_name_list) > 1: qualified_parent_name = qualified_parent_name_list[0] else: qualified_parent_name = '' finished_keys = finished_keys.difference( '.'.join((qualified_parent_name, ref_option_name)) for ref_option_name in new_requirements ) # add the new Options to the namespace new_namespace = new_requirements.safe_copy( an_option.reference_value_from ) for new_key in new_namespace.keys_breadth_first(): if new_key not in current_namespace: current_namespace[new_key] = new_namespace[new_key] except AttributeError as x: # there are apparently no new Options to bring in from # this option's value pass return finished_keys
python
def _overlay_expand(self): """This method overlays each of the value sources onto the default in each of the defined options. It does so using a breadth first iteration, overlaying and expanding each level of the tree in turn. As soon as no changes were made to any level, the loop breaks and the work is done. The actual action of the overlay is to take the value from the source and copy into the 'default' member of each Option object. "expansion" means converting an option value into its real type from string. The conversion is accomplished by simply calling the 'set_value' method of the Option object. If the resultant type has its own configuration options, bring those into the current namespace and then proceed to overlay/expand those. """ new_keys_have_been_discovered = True # loop control, False breaks loop finished_keys = set() all_reference_values = {} while new_keys_have_been_discovered: # loop until nothing more is done # names_of_all_exsting_options holds a list of all keys in the # option definitons in breadth first order using this form: # [ 'x', 'y', 'z', 'x.a', 'x.b', 'z.a', 'z.b', 'x.a.j', 'x.a.k', # 'x.b.h'] names_of_all_exsting_options = [ x for x in self.option_definitions.keys_breadth_first() if isinstance(self.option_definitions[x], Option) ] new_keys_have_been_discovered = False # setup to break loop # create alternate paths options set_of_reference_value_option_names = \ self._create_reference_value_options( names_of_all_exsting_options, finished_keys ) for a_ref_option_name in set_of_reference_value_option_names: if a_ref_option_name not in all_reference_values: all_reference_values[a_ref_option_name] = [] all_keys = list(set_of_reference_value_option_names) \ + names_of_all_exsting_options # previous versions of this method pulled the values from the # values sources deeper within the following nested loops. # that was not necessary and caused a lot of redundant work. # the 'values_from_all_sources' now holds all the the values # from each of the value sources. values_from_all_sources = [ a_value_source.get_values( self, # pass in the config_manager itself True, # ignore mismatches self.value_source_object_hook # build with this class ) for a_value_source in self.values_source_list ] # overlay process: # fetch all the default values from the value sources before # applying the from string conversions for key in all_keys: if key in finished_keys: continue #if not isinstance(an_option, Option): # continue # aggregations and other types are ignored # loop through all the value sources looking for values # that match this current key. if self.option_definitions[key].reference_value_from: reference_value_from = ( self.option_definitions[key].reference_value_from ) top_key = key.split('.')[-1] self.option_definitions[key].default = ( self.option_definitions[reference_value_from] [top_key].default ) all_reference_values[ '.'.join((reference_value_from, top_key)) ].append( key ) an_option = self.option_definitions[key] if key in all_reference_values: # make sure that this value gets propagated to keys # even if the keys have already been overlaid finished_keys -= set( all_reference_values[key] ) for val_src_dict in values_from_all_sources: try: # overlay the default with the new value from # the value source. This assignment may come # via acquisition, so the key given may not have # been an exact match for what was returned. an_option.has_changed = ( an_option.default != val_src_dict[key] ) an_option.default = val_src_dict[key] if key in all_reference_values: # make sure that this value gets propagated to keys # even if the keys have already been overlaid finished_keys -= set( all_reference_values[key] ) except KeyError as x: pass # okay, that source doesn't have this value # expansion process: # step through all the keys converting them to their proper # types and bringing in any new keys in the process for key in all_keys: if key in finished_keys: continue # mark this key as having been seen and processed finished_keys.add(key) an_option = self.option_definitions[key] #if not isinstance(an_option, Option): # continue # aggregations, namespaces are ignored # apply the from string conversion to make the real value an_option.set_value(an_option.default) # new values have been seen, don't let loop break new_keys_have_been_discovered = True try: try: # try to fetch new requirements from this value new_requirements = \ an_option.value.get_required_config() except (AttributeError, KeyError): new_requirements = getattr( an_option.value, 'required_config', None ) # make sure what we got as new_req is actually a # Mapping of some sort if not isinstance(new_requirements, collections.Mapping): # we didn't get a mapping, perhaps the option value # was a Mock object - in any case we can't try to # interpret 'new_req' as a configman requirement # collection. We must abandon processing this # option further continue if not isinstance(new_requirements, Namespace): new_requirements = Namespace( initializer=new_requirements ) # get the parent namespace current_namespace = self.option_definitions.parent(key) if current_namespace is None: # we're at the top level, use the base namespace current_namespace = self.option_definitions if current_namespace._reference_value_from: # don't expand things that are in reference value # namespaces, they will be populated by expanding the # targets continue # some new Options to be brought in may have already been # seen and in the finished_keys set. They must be reset # as unfinished so that a new default doesn't permanently # overwrite any of the values already placed by the # overlays. So we've got to remove those keys from the # finished keys list. # Before we can do that however, we need the fully # qualified names for the new keys. qualified_parent_name_list = key.rsplit('.', 1) if len(qualified_parent_name_list) > 1: qualified_parent_name = qualified_parent_name_list[0] else: qualified_parent_name = '' finished_keys = finished_keys.difference( '.'.join((qualified_parent_name, ref_option_name)) for ref_option_name in new_requirements ) # add the new Options to the namespace new_namespace = new_requirements.safe_copy( an_option.reference_value_from ) for new_key in new_namespace.keys_breadth_first(): if new_key not in current_namespace: current_namespace[new_key] = new_namespace[new_key] except AttributeError as x: # there are apparently no new Options to bring in from # this option's value pass return finished_keys
This method overlays each of the value sources onto the default in each of the defined options. It does so using a breadth first iteration, overlaying and expanding each level of the tree in turn. As soon as no changes were made to any level, the loop breaks and the work is done. The actual action of the overlay is to take the value from the source and copy into the 'default' member of each Option object. "expansion" means converting an option value into its real type from string. The conversion is accomplished by simply calling the 'set_value' method of the Option object. If the resultant type has its own configuration options, bring those into the current namespace and then proceed to overlay/expand those.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/config_manager.py#L606-L798
mozilla/configman
configman/config_manager.py
ConfigurationManager._check_for_mismatches
def _check_for_mismatches(self, known_keys): """check for bad options from value sources""" for a_value_source in self.values_source_list: try: if a_value_source.always_ignore_mismatches: continue except AttributeError: # ok, this values source doesn't have the concept # always igoring mismatches, we won't tolerate mismatches pass # we want to fetch the keys from the value sources so that we can # check for mismatches. Commandline value sources, are different, # we never want to allow unmatched keys from the command line. # By detecting if this value source is a command line source, we # can employ the command line's own mismatch detection. The # boolean 'allow_mismatches' controls application of the tollerance # for mismatches. if hasattr(a_value_source, 'command_line_value_source'): allow_mismatches = False else: allow_mismatches = True # make a set of all the keys from a value source in the form # of strings like this: 'x.y.z' value_source_mapping = a_value_source.get_values( self, allow_mismatches, self.value_source_object_hook ) value_source_keys_set = set([ k for k in DotDict(value_source_mapping).keys_breadth_first() ]) # make a set of the keys that didn't match any of the known # keys in the requirements unmatched_keys = value_source_keys_set.difference(known_keys) # some of the unmatched keys may actually be ok because the were # used during acquisition. # remove keys of the form 'y.z' if they match a known key of the # form 'x.y.z' for key in unmatched_keys.copy(): key_is_okay = six.moves.reduce( lambda x, y: x or y, (known_key.endswith(key) for known_key in known_keys) ) if key_is_okay: unmatched_keys.remove(key) # anything left in the unmatched_key set is a badly formed key. # issue a warning if unmatched_keys: if self.option_definitions.admin.strict.default: # raise hell... if len(unmatched_keys) > 1: raise NotAnOptionError( "%s are not valid Options" % unmatched_keys ) elif len(unmatched_keys) == 1: raise NotAnOptionError( "%s is not a valid Option" % unmatched_keys.pop() ) else: warnings.warn( 'Invalid options: %s' % ', '.join(sorted(unmatched_keys)) )
python
def _check_for_mismatches(self, known_keys): """check for bad options from value sources""" for a_value_source in self.values_source_list: try: if a_value_source.always_ignore_mismatches: continue except AttributeError: # ok, this values source doesn't have the concept # always igoring mismatches, we won't tolerate mismatches pass # we want to fetch the keys from the value sources so that we can # check for mismatches. Commandline value sources, are different, # we never want to allow unmatched keys from the command line. # By detecting if this value source is a command line source, we # can employ the command line's own mismatch detection. The # boolean 'allow_mismatches' controls application of the tollerance # for mismatches. if hasattr(a_value_source, 'command_line_value_source'): allow_mismatches = False else: allow_mismatches = True # make a set of all the keys from a value source in the form # of strings like this: 'x.y.z' value_source_mapping = a_value_source.get_values( self, allow_mismatches, self.value_source_object_hook ) value_source_keys_set = set([ k for k in DotDict(value_source_mapping).keys_breadth_first() ]) # make a set of the keys that didn't match any of the known # keys in the requirements unmatched_keys = value_source_keys_set.difference(known_keys) # some of the unmatched keys may actually be ok because the were # used during acquisition. # remove keys of the form 'y.z' if they match a known key of the # form 'x.y.z' for key in unmatched_keys.copy(): key_is_okay = six.moves.reduce( lambda x, y: x or y, (known_key.endswith(key) for known_key in known_keys) ) if key_is_okay: unmatched_keys.remove(key) # anything left in the unmatched_key set is a badly formed key. # issue a warning if unmatched_keys: if self.option_definitions.admin.strict.default: # raise hell... if len(unmatched_keys) > 1: raise NotAnOptionError( "%s are not valid Options" % unmatched_keys ) elif len(unmatched_keys) == 1: raise NotAnOptionError( "%s is not a valid Option" % unmatched_keys.pop() ) else: warnings.warn( 'Invalid options: %s' % ', '.join(sorted(unmatched_keys)) )
check for bad options from value sources
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/config_manager.py#L801-L863
mozilla/configman
configman/config_manager.py
ConfigurationManager._generate_config
def _generate_config(self, mapping_class): """This routine generates a copy of the DotDict based config""" config = mapping_class() self._walk_config_copy_values( self.option_definitions, config, mapping_class ) return config
python
def _generate_config(self, mapping_class): """This routine generates a copy of the DotDict based config""" config = mapping_class() self._walk_config_copy_values( self.option_definitions, config, mapping_class ) return config
This routine generates a copy of the DotDict based config
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/config_manager.py#L879-L887
mozilla/configman
demo/advanced_demo2.py
PGPooledTransaction.close
def close(self): """close all pooled connections""" print("PGPooledTransaction - shutting down connection pool") for name, conn in self.pool.iteritems(): conn.close() print("PGPooledTransaction - connection %s closed" % name)
python
def close(self): """close all pooled connections""" print("PGPooledTransaction - shutting down connection pool") for name, conn in self.pool.iteritems(): conn.close() print("PGPooledTransaction - connection %s closed" % name)
close all pooled connections
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/demo/advanced_demo2.py#L182-L187
mozilla/configman
configman/def_sources/for_argparse.py
find_action_name_by_value
def find_action_name_by_value(registry, target_action_instance): """the association of a name of an action class with a human readable string is exposed externally only at the time of argument definitions. This routine, when given a reference to argparse's internal action registry and an action, will find that action and return the name under which it was registered. """ target_type = type(target_action_instance) for key, value in six.iteritems(registry['action']): if value is target_type: if key is None: return 'store' return key return None
python
def find_action_name_by_value(registry, target_action_instance): """the association of a name of an action class with a human readable string is exposed externally only at the time of argument definitions. This routine, when given a reference to argparse's internal action registry and an action, will find that action and return the name under which it was registered. """ target_type = type(target_action_instance) for key, value in six.iteritems(registry['action']): if value is target_type: if key is None: return 'store' return key return None
the association of a name of an action class with a human readable string is exposed externally only at the time of argument definitions. This routine, when given a reference to argparse's internal action registry and an action, will find that action and return the name under which it was registered.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/def_sources/for_argparse.py#L39-L52
mozilla/configman
configman/def_sources/for_argparse.py
get_args_and_values
def get_args_and_values(parser, an_action): """this rountine attempts to reconstruct the kwargs that were used in the creation of an action object""" args = inspect.getargspec(an_action.__class__.__init__).args kwargs = dict( (an_attr, getattr(an_action, an_attr)) for an_attr in args if ( an_attr not in ('self', 'required') and getattr(an_action, an_attr) is not None ) ) action_name = find_action_name_by_value( parser._optionals._registries, an_action ) if 'required' in kwargs: del kwargs['required'] kwargs['action'] = action_name if 'option_strings' in kwargs: args = tuple(kwargs['option_strings']) del kwargs['option_strings'] else: args = () return args, kwargs
python
def get_args_and_values(parser, an_action): """this rountine attempts to reconstruct the kwargs that were used in the creation of an action object""" args = inspect.getargspec(an_action.__class__.__init__).args kwargs = dict( (an_attr, getattr(an_action, an_attr)) for an_attr in args if ( an_attr not in ('self', 'required') and getattr(an_action, an_attr) is not None ) ) action_name = find_action_name_by_value( parser._optionals._registries, an_action ) if 'required' in kwargs: del kwargs['required'] kwargs['action'] = action_name if 'option_strings' in kwargs: args = tuple(kwargs['option_strings']) del kwargs['option_strings'] else: args = () return args, kwargs
this rountine attempts to reconstruct the kwargs that were used in the creation of an action object
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/def_sources/for_argparse.py#L56-L80
mozilla/configman
configman/def_sources/for_argparse.py
setup_definitions
def setup_definitions(source, destination): """this method stars the process of configman reading and using an argparse instance as a source of configuration definitions.""" #"""assume that source is of type argparse try: destination.update(source.get_required_config()) except AttributeError: # looks like the user passed in a real arpgapse parser rather than our # bastardized version of one. No problem, we can work with it, # though the translation won't be as perfect. our_parser = ArgumentParser() for i, an_action in enumerate(source._actions): args, kwargs = get_args_and_values(source, an_action) dest = kwargs.get('dest', '') if dest in ('help', 'version'): continue our_parser.add_argument(*args, **kwargs) destination.update(our_parser.get_required_config())
python
def setup_definitions(source, destination): """this method stars the process of configman reading and using an argparse instance as a source of configuration definitions.""" #"""assume that source is of type argparse try: destination.update(source.get_required_config()) except AttributeError: # looks like the user passed in a real arpgapse parser rather than our # bastardized version of one. No problem, we can work with it, # though the translation won't be as perfect. our_parser = ArgumentParser() for i, an_action in enumerate(source._actions): args, kwargs = get_args_and_values(source, an_action) dest = kwargs.get('dest', '') if dest in ('help', 'version'): continue our_parser.add_argument(*args, **kwargs) destination.update(our_parser.get_required_config())
this method stars the process of configman reading and using an argparse instance as a source of configuration definitions.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/def_sources/for_argparse.py#L662-L679
mozilla/configman
configman/def_sources/for_argparse.py
SubparserFromStringConverter.add_namespace
def add_namespace(self, name, a_namespace): """as we build up argparse, the actions that define a subparser are translated into configman options. Each of those options must be tagged with the value of the subparse to which they correspond.""" # save a local copy of the namespace self.namespaces[name] = a_namespace # iterate through the namespace branding each of the options with the # name of the subparser to which they belong for k in a_namespace.keys_breadth_first(): an_option = a_namespace[k] if not an_option.foreign_data: an_option.foreign_data = DotDict() an_option.foreign_data['argparse.owning_subparser_name'] = name
python
def add_namespace(self, name, a_namespace): """as we build up argparse, the actions that define a subparser are translated into configman options. Each of those options must be tagged with the value of the subparse to which they correspond.""" # save a local copy of the namespace self.namespaces[name] = a_namespace # iterate through the namespace branding each of the options with the # name of the subparser to which they belong for k in a_namespace.keys_breadth_first(): an_option = a_namespace[k] if not an_option.foreign_data: an_option.foreign_data = DotDict() an_option.foreign_data['argparse.owning_subparser_name'] = name
as we build up argparse, the actions that define a subparser are translated into configman options. Each of those options must be tagged with the value of the subparse to which they correspond.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/def_sources/for_argparse.py#L98-L110
mozilla/configman
configman/def_sources/for_argparse.py
ConfigmanSubParsersAction.add_parser
def add_parser(self, *args, **kwargs): """each time a subparser action is used to create a new parser object we must save the original args & kwargs. In a later phase of configman, we'll need to reproduce the subparsers exactly without resorting to copying. We save the args & kwargs in the 'foreign_data' section of the configman option that corresponds with the subparser action.""" command_name = args[0] new_kwargs = kwargs.copy() new_kwargs['configman_subparsers_option'] = self._configman_option new_kwargs['subparser_name'] = command_name subparsers = self._configman_option.foreign_data.argparse.subparsers a_subparser = super(ConfigmanSubParsersAction, self).add_parser( *args, **new_kwargs ) subparsers[command_name] = DotDict({ "args": args, "kwargs": new_kwargs, "subparser": a_subparser }) return a_subparser
python
def add_parser(self, *args, **kwargs): """each time a subparser action is used to create a new parser object we must save the original args & kwargs. In a later phase of configman, we'll need to reproduce the subparsers exactly without resorting to copying. We save the args & kwargs in the 'foreign_data' section of the configman option that corresponds with the subparser action.""" command_name = args[0] new_kwargs = kwargs.copy() new_kwargs['configman_subparsers_option'] = self._configman_option new_kwargs['subparser_name'] = command_name subparsers = self._configman_option.foreign_data.argparse.subparsers a_subparser = super(ConfigmanSubParsersAction, self).add_parser( *args, **new_kwargs ) subparsers[command_name] = DotDict({ "args": args, "kwargs": new_kwargs, "subparser": a_subparser }) return a_subparser
each time a subparser action is used to create a new parser object we must save the original args & kwargs. In a later phase of configman, we'll need to reproduce the subparsers exactly without resorting to copying. We save the args & kwargs in the 'foreign_data' section of the configman option that corresponds with the subparser action.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/def_sources/for_argparse.py#L171-L192
mozilla/configman
configman/def_sources/for_argparse.py
ArgumentParser.get_required_config
def get_required_config(self): """because of the exsistance of subparsers, the configman options that correspond with argparse arguments are not a constant. We need to produce a copy of the namespace rather than the actual embedded namespace.""" required_config = Namespace() # add current options to a copy of required config for k, v in iteritems_breadth_first(self.required_config): required_config[k] = v # get any option found in any subparsers try: subparser_namespaces = ( self.configman_subparsers_option.foreign_data .argparse.subprocessor_from_string_converter ) subparsers = ( self._argparse_subparsers._configman_option.foreign_data .argparse.subparsers ) # each subparser needs to have its configman options set up # in the subparser's configman option. This routine copies # the required_config of each subparser into the # SubparserFromStringConverter defined above. for subparser_name, subparser_data in six.iteritems(subparsers): subparser_namespaces.add_namespace( subparser_name, subparser_data.subparser.get_required_config() ) except AttributeError: # there is no subparser pass return required_config
python
def get_required_config(self): """because of the exsistance of subparsers, the configman options that correspond with argparse arguments are not a constant. We need to produce a copy of the namespace rather than the actual embedded namespace.""" required_config = Namespace() # add current options to a copy of required config for k, v in iteritems_breadth_first(self.required_config): required_config[k] = v # get any option found in any subparsers try: subparser_namespaces = ( self.configman_subparsers_option.foreign_data .argparse.subprocessor_from_string_converter ) subparsers = ( self._argparse_subparsers._configman_option.foreign_data .argparse.subparsers ) # each subparser needs to have its configman options set up # in the subparser's configman option. This routine copies # the required_config of each subparser into the # SubparserFromStringConverter defined above. for subparser_name, subparser_data in six.iteritems(subparsers): subparser_namespaces.add_namespace( subparser_name, subparser_data.subparser.get_required_config() ) except AttributeError: # there is no subparser pass return required_config
because of the exsistance of subparsers, the configman options that correspond with argparse arguments are not a constant. We need to produce a copy of the namespace rather than the actual embedded namespace.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/def_sources/for_argparse.py#L226-L257
mozilla/configman
configman/def_sources/for_argparse.py
ArgumentParser.add_argument
def add_argument(self, *args, **kwargs): """this method overrides the standard in order to create a parallel argument system in both the argparse and configman worlds. Each call to this method returns a standard argparse Action object as well as adding an equivalent configman Option object to the required_config for this object. The original args & kwargs that defined an argparse argument are preserved in the 'foreign_data' section of the corresponding configman Option.""" # pull out each of the argument definition components from the args # so that we can deal with them one at a time in a well labeled manner # In this section, variables beginning with the prefix "argparse" are # values that define Action object. Variables that begin with # "configman" are the arguments to create configman Options. argparse_action_name = kwargs.get('action', None) argparse_dest = kwargs.get('dest', None) argparse_const = kwargs.get('const', None) argparse_default = kwargs.get('default', None) if argparse_default is argparse.SUPPRESS: # we'll be forcing all options to have the attribute of # argparse.SUPPRESS later. It's our way of making sure that # argparse returns only values that the user explicitly added to # the command line. argparse_default = None argparse_nargs = kwargs.get('nargs', None) argparse_type = kwargs.get('type', None) argparse_suppress_help = kwargs.pop('suppress_help', False) if argparse_suppress_help: configman_doc = kwargs.get('help', '') kwargs['help'] = argparse.SUPPRESS else: argparse_help = kwargs.get('help', '') if argparse_help == argparse.SUPPRESS: configman_doc = '' else: configman_doc = argparse_help # we need to make sure that all arguments that the user has not # explicily set on the command line have this attribute. This means # that when the argparse parser returns the command line values, it # will not return values that the user did not mention on the command # line. The defaults that otherwise would have been returned will be # handled by configman. kwargs['default'] = argparse.SUPPRESS # forward all parameters to the underlying base class to create a # normal argparse action object. an_action = super(ArgumentParser, self).add_argument( *args, **kwargs ) argparse_option_strings = an_action.option_strings # get a human readable string that identifies the type of the argparse # action class that was created if argparse_action_name is None: argparse_action_name = find_action_name_by_value( self._optionals._registries, an_action ) configman_is_argument = False # each of argparse's Action types must be handled separately. #-------------------------------------------------------------------- # STORE if argparse_action_name == 'store': if argparse_dest is None: configman_name, configman_is_argument = self._get_option_name( args ) if not configman_name: configman_name = args[0] else: configman_name = argparse_dest configman_is_argument = not argparse_option_strings configman_default = argparse_default if argparse_nargs and argparse_nargs in "1?": if argparse_type: configman_from_string = argparse_type elif argparse_default: configman_from_string = ( str_to_instance_of_type_converters.get( type(argparse_default), str ) ) else: configman_from_string = str elif argparse_nargs and argparse_type: configman_from_string = partial( str_to_list, item_converter=argparse_type, item_separator=' ', ) elif argparse_nargs and argparse_default: configman_from_string = partial( str_to_list, item_converter=str_to_instance_of_type_converters.get( type(argparse_default), str ), item_separator=' ', ) elif argparse_nargs: configman_from_string = partial( str_to_list, item_converter=str, item_separator=' ', ) elif argparse_type: configman_from_string = argparse_type elif argparse_default: configman_from_string = str_to_instance_of_type_converters.get( type(argparse_default), str ) else: configman_from_string = str configman_to_string = to_str #-------------------------------------------------------------------- # STORE_CONST elif ( argparse_action_name == 'store_const' or argparse_action_name == 'count' ): if argparse_dest is None: configman_name, configman_is_argument = self._get_option_name( args ) if not configman_name: configman_name = args[0] else: configman_name = argparse_dest configman_default = argparse_default if argparse_type: configman_from_string = argparse_type else: configman_from_string = str_to_instance_of_type_converters.get( type(argparse_const), str ) configman_to_string = to_str #-------------------------------------------------------------------- # STORE_TRUE / STORE_FALSE elif ( argparse_action_name == 'store_true' or argparse_action_name == 'store_false' ): if argparse_dest is None: configman_name, configman_is_argument = self._get_option_name( args ) if not configman_name: configman_name = args[0] else: configman_name = argparse_dest configman_default = argparse_default configman_from_string = boolean_converter configman_to_string = to_str #-------------------------------------------------------------------- # APPEND elif argparse_action_name == 'append': if argparse_dest is None: configman_name, configman_is_argument = self._get_option_name( args ) if not configman_name: configman_name = args[0] else: configman_name = argparse_dest configman_default = argparse_default if argparse_type: configman_from_string = argparse_type else: configman_from_string = str configman_to_string = to_str #-------------------------------------------------------------------- # APPEND_CONST elif argparse_action_name == 'append_const': if argparse_dest is None: configman_name, configman_is_argument = self._get_option_name( args ) if not configman_name: configman_name = args[0] else: configman_name = argparse_dest configman_default = argparse_default if argparse_type: configman_from_string = argparse_type else: configman_from_string = str_to_instance_of_type_converters.get( type(argparse_const), str ) configman_to_string = to_str #-------------------------------------------------------------------- # VERSION elif argparse_action_name == 'version': return an_action #-------------------------------------------------------------------- # OTHER else: configman_name = argparse_dest # configman uses the switch name as the name of the key inwhich to # store values. argparse is able to use different names for each. # this means that configman may encounter repeated targets. Rather # than overwriting Options with new ones with the same name, configman # renames them by appending the '$' character. while configman_name in self.required_config: configman_name = "%s$" % configman_name configman_not_for_definition = configman_name.endswith('$') # it's finally time to create the configman Option object and add it # to the required_config. self.required_config.add_option( name=configman_name, default=configman_default, doc=configman_doc, from_string_converter=configman_from_string, to_string_converter=configman_to_string, #short_form=configman_short_form, is_argument=configman_is_argument, not_for_definition=configman_not_for_definition, # we're going to save the args & kwargs that created the # argparse Action. This enables us to perfectly reproduce the # the original Action object later during the configman overlay # process. foreign_data=DotDict({ 'argparse.flags.subcommand': False, 'argparse.args': args, 'argparse.kwargs': kwargs, 'argparse.owning_subparser_name': self.subparser_name, }) ) return an_action
python
def add_argument(self, *args, **kwargs): """this method overrides the standard in order to create a parallel argument system in both the argparse and configman worlds. Each call to this method returns a standard argparse Action object as well as adding an equivalent configman Option object to the required_config for this object. The original args & kwargs that defined an argparse argument are preserved in the 'foreign_data' section of the corresponding configman Option.""" # pull out each of the argument definition components from the args # so that we can deal with them one at a time in a well labeled manner # In this section, variables beginning with the prefix "argparse" are # values that define Action object. Variables that begin with # "configman" are the arguments to create configman Options. argparse_action_name = kwargs.get('action', None) argparse_dest = kwargs.get('dest', None) argparse_const = kwargs.get('const', None) argparse_default = kwargs.get('default', None) if argparse_default is argparse.SUPPRESS: # we'll be forcing all options to have the attribute of # argparse.SUPPRESS later. It's our way of making sure that # argparse returns only values that the user explicitly added to # the command line. argparse_default = None argparse_nargs = kwargs.get('nargs', None) argparse_type = kwargs.get('type', None) argparse_suppress_help = kwargs.pop('suppress_help', False) if argparse_suppress_help: configman_doc = kwargs.get('help', '') kwargs['help'] = argparse.SUPPRESS else: argparse_help = kwargs.get('help', '') if argparse_help == argparse.SUPPRESS: configman_doc = '' else: configman_doc = argparse_help # we need to make sure that all arguments that the user has not # explicily set on the command line have this attribute. This means # that when the argparse parser returns the command line values, it # will not return values that the user did not mention on the command # line. The defaults that otherwise would have been returned will be # handled by configman. kwargs['default'] = argparse.SUPPRESS # forward all parameters to the underlying base class to create a # normal argparse action object. an_action = super(ArgumentParser, self).add_argument( *args, **kwargs ) argparse_option_strings = an_action.option_strings # get a human readable string that identifies the type of the argparse # action class that was created if argparse_action_name is None: argparse_action_name = find_action_name_by_value( self._optionals._registries, an_action ) configman_is_argument = False # each of argparse's Action types must be handled separately. #-------------------------------------------------------------------- # STORE if argparse_action_name == 'store': if argparse_dest is None: configman_name, configman_is_argument = self._get_option_name( args ) if not configman_name: configman_name = args[0] else: configman_name = argparse_dest configman_is_argument = not argparse_option_strings configman_default = argparse_default if argparse_nargs and argparse_nargs in "1?": if argparse_type: configman_from_string = argparse_type elif argparse_default: configman_from_string = ( str_to_instance_of_type_converters.get( type(argparse_default), str ) ) else: configman_from_string = str elif argparse_nargs and argparse_type: configman_from_string = partial( str_to_list, item_converter=argparse_type, item_separator=' ', ) elif argparse_nargs and argparse_default: configman_from_string = partial( str_to_list, item_converter=str_to_instance_of_type_converters.get( type(argparse_default), str ), item_separator=' ', ) elif argparse_nargs: configman_from_string = partial( str_to_list, item_converter=str, item_separator=' ', ) elif argparse_type: configman_from_string = argparse_type elif argparse_default: configman_from_string = str_to_instance_of_type_converters.get( type(argparse_default), str ) else: configman_from_string = str configman_to_string = to_str #-------------------------------------------------------------------- # STORE_CONST elif ( argparse_action_name == 'store_const' or argparse_action_name == 'count' ): if argparse_dest is None: configman_name, configman_is_argument = self._get_option_name( args ) if not configman_name: configman_name = args[0] else: configman_name = argparse_dest configman_default = argparse_default if argparse_type: configman_from_string = argparse_type else: configman_from_string = str_to_instance_of_type_converters.get( type(argparse_const), str ) configman_to_string = to_str #-------------------------------------------------------------------- # STORE_TRUE / STORE_FALSE elif ( argparse_action_name == 'store_true' or argparse_action_name == 'store_false' ): if argparse_dest is None: configman_name, configman_is_argument = self._get_option_name( args ) if not configman_name: configman_name = args[0] else: configman_name = argparse_dest configman_default = argparse_default configman_from_string = boolean_converter configman_to_string = to_str #-------------------------------------------------------------------- # APPEND elif argparse_action_name == 'append': if argparse_dest is None: configman_name, configman_is_argument = self._get_option_name( args ) if not configman_name: configman_name = args[0] else: configman_name = argparse_dest configman_default = argparse_default if argparse_type: configman_from_string = argparse_type else: configman_from_string = str configman_to_string = to_str #-------------------------------------------------------------------- # APPEND_CONST elif argparse_action_name == 'append_const': if argparse_dest is None: configman_name, configman_is_argument = self._get_option_name( args ) if not configman_name: configman_name = args[0] else: configman_name = argparse_dest configman_default = argparse_default if argparse_type: configman_from_string = argparse_type else: configman_from_string = str_to_instance_of_type_converters.get( type(argparse_const), str ) configman_to_string = to_str #-------------------------------------------------------------------- # VERSION elif argparse_action_name == 'version': return an_action #-------------------------------------------------------------------- # OTHER else: configman_name = argparse_dest # configman uses the switch name as the name of the key inwhich to # store values. argparse is able to use different names for each. # this means that configman may encounter repeated targets. Rather # than overwriting Options with new ones with the same name, configman # renames them by appending the '$' character. while configman_name in self.required_config: configman_name = "%s$" % configman_name configman_not_for_definition = configman_name.endswith('$') # it's finally time to create the configman Option object and add it # to the required_config. self.required_config.add_option( name=configman_name, default=configman_default, doc=configman_doc, from_string_converter=configman_from_string, to_string_converter=configman_to_string, #short_form=configman_short_form, is_argument=configman_is_argument, not_for_definition=configman_not_for_definition, # we're going to save the args & kwargs that created the # argparse Action. This enables us to perfectly reproduce the # the original Action object later during the configman overlay # process. foreign_data=DotDict({ 'argparse.flags.subcommand': False, 'argparse.args': args, 'argparse.kwargs': kwargs, 'argparse.owning_subparser_name': self.subparser_name, }) ) return an_action
this method overrides the standard in order to create a parallel argument system in both the argparse and configman worlds. Each call to this method returns a standard argparse Action object as well as adding an equivalent configman Option object to the required_config for this object. The original args & kwargs that defined an argparse argument are preserved in the 'foreign_data' section of the corresponding configman Option.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/def_sources/for_argparse.py#L260-L501
mozilla/configman
configman/def_sources/for_argparse.py
ArgumentParser.add_subparsers
def add_subparsers(self, *args, **kwargs): """When adding a subparser, we need to ensure that our version of the SubparserAction object is returned. We also need to create the corresponding configman Option object for the subparser and pack it's foreign data section with the original args & kwargs.""" kwargs['parser_class'] = self.__class__ kwargs['action'] = ConfigmanSubParsersAction subparser_action = super(ArgumentParser, self).add_subparsers( *args, **kwargs ) self._argparse_subparsers = subparser_action if "dest" not in kwargs or kwargs['dest'] is None: kwargs['dest'] = 'subcommand' configman_name = kwargs['dest'] configman_default = None configman_doc = kwargs.get('help', '') subprocessor_from_string_converter = SubparserFromStringConverter() configman_to_string = str configman_is_argument = True configman_not_for_definition = True # it's finally time to create the configman Option object and add it # to the required_config. self.required_config.add_option( name=configman_name, default=configman_default, doc=configman_doc, from_string_converter=subprocessor_from_string_converter, to_string_converter=configman_to_string, is_argument=configman_is_argument, not_for_definition=configman_not_for_definition, # we're going to save the input parameters that created the # argparse Action. This enables us to perfectly reproduce the # the original Action object later during the configman overlay # process. foreign_data=DotDict({ 'argparse.flags.subcommand': subparser_action, 'argparse.args': args, 'argparse.kwargs': kwargs, 'argparse.subparsers': DotDict(), 'argparse.subprocessor_from_string_converter': subprocessor_from_string_converter }) ) self.configman_subparsers_option = self.required_config[configman_name] subparser_action.add_configman_option(self.configman_subparsers_option) return subparser_action
python
def add_subparsers(self, *args, **kwargs): """When adding a subparser, we need to ensure that our version of the SubparserAction object is returned. We also need to create the corresponding configman Option object for the subparser and pack it's foreign data section with the original args & kwargs.""" kwargs['parser_class'] = self.__class__ kwargs['action'] = ConfigmanSubParsersAction subparser_action = super(ArgumentParser, self).add_subparsers( *args, **kwargs ) self._argparse_subparsers = subparser_action if "dest" not in kwargs or kwargs['dest'] is None: kwargs['dest'] = 'subcommand' configman_name = kwargs['dest'] configman_default = None configman_doc = kwargs.get('help', '') subprocessor_from_string_converter = SubparserFromStringConverter() configman_to_string = str configman_is_argument = True configman_not_for_definition = True # it's finally time to create the configman Option object and add it # to the required_config. self.required_config.add_option( name=configman_name, default=configman_default, doc=configman_doc, from_string_converter=subprocessor_from_string_converter, to_string_converter=configman_to_string, is_argument=configman_is_argument, not_for_definition=configman_not_for_definition, # we're going to save the input parameters that created the # argparse Action. This enables us to perfectly reproduce the # the original Action object later during the configman overlay # process. foreign_data=DotDict({ 'argparse.flags.subcommand': subparser_action, 'argparse.args': args, 'argparse.kwargs': kwargs, 'argparse.subparsers': DotDict(), 'argparse.subprocessor_from_string_converter': subprocessor_from_string_converter }) ) self.configman_subparsers_option = self.required_config[configman_name] subparser_action.add_configman_option(self.configman_subparsers_option) return subparser_action
When adding a subparser, we need to ensure that our version of the SubparserAction object is returned. We also need to create the corresponding configman Option object for the subparser and pack it's foreign data section with the original args & kwargs.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/def_sources/for_argparse.py#L504-L556
mozilla/configman
configman/def_sources/for_argparse.py
ArgumentParser.parse_args
def parse_args(self, args=None, namespace=None): """this method hijacks the normal argparse Namespace generation, shimming configman into the process. The return value will be a configman DotDict rather than an argparse Namespace.""" # load the config_manager within the scope of the method that uses it # so that we avoid circular references in the outer scope from configman.config_manager import ConfigurationManager configuration_manager = ConfigurationManager( definition_source=[self.get_required_config()], values_source_list=self.value_source_list, argv_source=args, app_name=self.prog, app_version=self.version, app_description=self.description, use_auto_help=False, ) # it is apparent a common idiom that commandline options may have # embedded '-' characters in them. Configman requires that option # follow the Python Identifier rules. Fortunately, Configman has a # class that will perform dynamic translation of keys. In this # code fragment, we fetch the final configuration from configman # using a Mapping that will translate keys with '-' into keys with # '_' instead. conf = configuration_manager.get_config( mapping_class=create_key_translating_dot_dict( "HyphenUnderscoreDict", (('-', '_'),) ) ) # here is where we add the values given to "set_defaults" method # of argparse. if self.configman_subparsers_option: subparser_name = conf[self.configman_subparsers_option.name] try: conf.update( self.configman_subparsers_option.foreign_data.argparse .subparsers[subparser_name].subparser .extra_defaults ) except (AttributeError, KeyError): # no extra_defaults skip on pass if hasattr(self, 'extra_defaults'): conf.update(self.extra_defaults) return conf
python
def parse_args(self, args=None, namespace=None): """this method hijacks the normal argparse Namespace generation, shimming configman into the process. The return value will be a configman DotDict rather than an argparse Namespace.""" # load the config_manager within the scope of the method that uses it # so that we avoid circular references in the outer scope from configman.config_manager import ConfigurationManager configuration_manager = ConfigurationManager( definition_source=[self.get_required_config()], values_source_list=self.value_source_list, argv_source=args, app_name=self.prog, app_version=self.version, app_description=self.description, use_auto_help=False, ) # it is apparent a common idiom that commandline options may have # embedded '-' characters in them. Configman requires that option # follow the Python Identifier rules. Fortunately, Configman has a # class that will perform dynamic translation of keys. In this # code fragment, we fetch the final configuration from configman # using a Mapping that will translate keys with '-' into keys with # '_' instead. conf = configuration_manager.get_config( mapping_class=create_key_translating_dot_dict( "HyphenUnderscoreDict", (('-', '_'),) ) ) # here is where we add the values given to "set_defaults" method # of argparse. if self.configman_subparsers_option: subparser_name = conf[self.configman_subparsers_option.name] try: conf.update( self.configman_subparsers_option.foreign_data.argparse .subparsers[subparser_name].subparser .extra_defaults ) except (AttributeError, KeyError): # no extra_defaults skip on pass if hasattr(self, 'extra_defaults'): conf.update(self.extra_defaults) return conf
this method hijacks the normal argparse Namespace generation, shimming configman into the process. The return value will be a configman DotDict rather than an argparse Namespace.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/def_sources/for_argparse.py#L566-L615
mozilla/configman
configman/def_sources/for_argparse.py
ArgumentParser.parse_known_args
def parse_known_args(self, args=None, namespace=None): """this method hijacks the normal argparse Namespace generation, shimming configman into the process. The return value will be a configman DotDict rather than an argparse Namespace.""" # load the config_manager within the scope of the method that uses it # so that we avoid circular references in the outer scope from configman.config_manager import ConfigurationManager configuration_manager = ConfigurationManager( definition_source=[self.get_required_config()], values_source_list=self.value_source_list, argv_source=args, app_name=self.prog, app_version=self.version, app_description=self.description, use_auto_help=False, ) conf = configuration_manager.get_config( mapping_class=create_key_translating_dot_dict( "HyphenUnderscoreDict", (('-', '_'),) ) ) return conf
python
def parse_known_args(self, args=None, namespace=None): """this method hijacks the normal argparse Namespace generation, shimming configman into the process. The return value will be a configman DotDict rather than an argparse Namespace.""" # load the config_manager within the scope of the method that uses it # so that we avoid circular references in the outer scope from configman.config_manager import ConfigurationManager configuration_manager = ConfigurationManager( definition_source=[self.get_required_config()], values_source_list=self.value_source_list, argv_source=args, app_name=self.prog, app_version=self.version, app_description=self.description, use_auto_help=False, ) conf = configuration_manager.get_config( mapping_class=create_key_translating_dot_dict( "HyphenUnderscoreDict", (('-', '_'),) ) ) return conf
this method hijacks the normal argparse Namespace generation, shimming configman into the process. The return value will be a configman DotDict rather than an argparse Namespace.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/def_sources/for_argparse.py#L618-L640
mozilla/configman
configman/value_sources/for_modules.py
sequence_to_string
def sequence_to_string( a_list, open_bracket_char='[', close_bracket_char=']', delimiter=", " ): """a dedicated function that turns a list into a comma delimited string of items converted. This method will flatten nested lists.""" return "%s%s%s" % ( open_bracket_char, delimiter.join( local_to_str(x) for x in a_list ), close_bracket_char )
python
def sequence_to_string( a_list, open_bracket_char='[', close_bracket_char=']', delimiter=", " ): """a dedicated function that turns a list into a comma delimited string of items converted. This method will flatten nested lists.""" return "%s%s%s" % ( open_bracket_char, delimiter.join( local_to_str(x) for x in a_list ), close_bracket_char )
a dedicated function that turns a list into a comma delimited string of items converted. This method will flatten nested lists.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/value_sources/for_modules.py#L61-L76
mozilla/configman
configman/value_sources/for_modules.py
get_import_for_type
def get_import_for_type(t): """given a type, return a tuple of the (module-path, type_name) or (None, None) if it is a built in.""" t_as_string = to_str(t) if not is_identifier(t_as_string): # this class expanded into something other than a single identifier # we can ignore it. This is the case when we encounter something # like the configman.converter.str_to_classes_in_namespaces # InnerClassList. We can safely ignore these things here. return (None, None) if '.' in t_as_string: parts = t_as_string.split('.') return ('.'.join(parts[:-1]), parts[-1]) else: if t_as_string in known_mapping_str_to_type: return (None, None) return (None, t_as_string)
python
def get_import_for_type(t): """given a type, return a tuple of the (module-path, type_name) or (None, None) if it is a built in.""" t_as_string = to_str(t) if not is_identifier(t_as_string): # this class expanded into something other than a single identifier # we can ignore it. This is the case when we encounter something # like the configman.converter.str_to_classes_in_namespaces # InnerClassList. We can safely ignore these things here. return (None, None) if '.' in t_as_string: parts = t_as_string.split('.') return ('.'.join(parts[:-1]), parts[-1]) else: if t_as_string in known_mapping_str_to_type: return (None, None) return (None, t_as_string)
given a type, return a tuple of the (module-path, type_name) or (None, None) if it is a built in.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/value_sources/for_modules.py#L145-L161
mozilla/configman
configman/value_sources/for_modules.py
ValueSource.write
def write(source_mapping, output_stream=sys.stdout): """This method writes a Python module respresenting all the keys and values known to configman. """ # a set of classes, modules and/or functions that are values in # configman options. These values will have to be imported in the # module that this method is writing. set_of_classes_needing_imports = set() # once symbols are imported, they are in the namespace of the module, # but that's not where we want them. We only want them to be values # in configman Options. This set will be used to make a list of # these symbols, to forewarn a future configman that reads this # module, that it can ignore these symbols. This will prevent that # future configman from issuing a "mismatced symbols" warinng. symbols_to_ignore = set() # look ahead to see what sort of imports we're going to have to do for key in source_mapping.keys_breadth_first(): value = source_mapping[key] if isinstance(value, Aggregation): # Aggregations don't get included, skip on continue if '.' in key: # this indicates that there are things in nested namespaces, # we will use the DotDict class to represent namespaces set_of_classes_needing_imports.add(DotDict) option = None if isinstance(value, Option): # it's the value inside the option, not the option itself # that is of interest to us option = value value = option.value if value is None: # we don't need in import anything having to do with None continue if isclass(value) or ismodule(value) or isfunction(value): # we know we need to import any of these types set_of_classes_needing_imports.add(value) else: try: # perhaps the value is an instance of a class? If so, # we'll likely need to import that class, but only if # we don't have a way to convert a string to that class set_of_classes_needing_imports.add(value.__class__) except AttributeError: # it's not a class instance, we can skip on pass # for everyone of the imports that we're going to have to create # we need to know the dotted module pathname and the name of the # of the class/module/function. This routine make a list of 3-tuples # class, dotted_module_path, class_name class_and_module_path_and_class_name = [] for a_class in set_of_classes_needing_imports: module_path, class_name = get_import_for_type(a_class) if (not module_path) and (not class_name): continue class_and_module_path_and_class_name.append( (a_class, module_path, class_name) ) # using the collection of 3-tuples, create a lookup mapping where a # class is the key to a 2-tuple of the dotted_module_path & class_name. # This is also the appropriate time to detect any name collisions # and create a mapping of aliases, so we can resolve name collisions. class_name_by_module_path_list = defaultdict(list) alias_by_class = {} previously_used_names = set() for ( a_class, a_module_path, class_name ) in class_and_module_path_and_class_name: if class_name: if class_name in previously_used_names: new_class_name_alias = "%s_%s" % ( a_module_path.replace('.', '_'), class_name ) alias_by_class[a_class] = new_class_name_alias previously_used_names.add(new_class_name_alias) else: previously_used_names.add(class_name) class_name_by_module_path_list[a_module_path].append( (a_class, class_name) ) # start writing the output module print("# generated Python configman file\n", file=output_stream) # the first section that we're going to write is imports of the form: # from X import Y # and # from X import ( # A, # B, # ) sorted_list = [x.value for x in sorted([OrderableObj(x) for x in class_name_by_module_path_list.keys()])] for a_module_path in sorted_list: print(a_module_path) # if there is no module path, then it is something that we don't # need to import. If the module path begins with underscore then # it is private and we ought not step into that mire. If that # causes the output module to fail, it is up to the implementer # of the configman option to have created an approprate # "from_string" & "to_string" configman Option function references. if a_module_path is None or a_module_path.startswith('_'): continue list_of_class_names = \ class_name_by_module_path_list[a_module_path] if len(list_of_class_names) > 1: output_line = "from %s import (\n" % a_module_path sorted_list = [x.value for x in sorted([OrderableTuple(x) for x in list_of_class_names])] for a_class, a_class_name in sorted_list: if a_class in alias_by_class: output_line = "%s\n %s as %s," % ( output_line, a_class_name, alias_by_class[a_class] ) symbols_to_ignore.add(alias_by_class[a_class]) else: output_line = "%s %s,\n" % ( output_line, a_class_name ) symbols_to_ignore.add(a_class_name) output_line = output_line + ')' print(output_line.strip(), file=output_stream) else: a_class, a_class_name = list_of_class_names[0] output_line = "from %s import %s" % ( a_module_path, a_class_name ) if a_class in alias_by_class: output_line = "%s as %s" % ( output_line, alias_by_class[a_class] ) symbols_to_ignore.add(alias_by_class[a_class]) else: symbols_to_ignore.add(a_class_name) print(output_line.strip(), file=output_stream) print('', file=output_stream) # The next section to write will be the imports of the form: # import X sorted_list = [x.value for x in sorted([OrderableObj(x) for x in class_name_by_module_path_list.keys()])] for a_module_path in sorted_list: list_of_class_names = \ class_name_by_module_path_list[a_module_path] a_class, a_class_name = list_of_class_names[0] if a_module_path: continue import_str = ("import %s" % a_class_name).strip() symbols_to_ignore.add(a_class_name) print(import_str, file=output_stream) # See the explanation of 'symbols_to_ignore' above if symbols_to_ignore: print( "\n" \ "# the following symbols will be ignored by configman when\n" \ "# this module is used as a value source. This will\n" \ "# suppress the mismatch warning since these symbols are\n" \ "# values for options, not option names themselves.", file=output_stream ) print("ignore_symbol_list = [", file=output_stream) for a_symbol in sorted(symbols_to_ignore): print(' "%s",' % a_symbol, file=output_stream) print(']\n', file=output_stream) # finally, as the last step, we need to write out the keys and values # will be used by a future configman as Options and values. sorted_keys = sorted( source_mapping.keys_breadth_first(include_dicts=True) ) for key in sorted_keys: value = source_mapping[key] if isinstance(value, Namespace): ValueSource.write_namespace(key, value, output_stream) elif isinstance(value, Option): ValueSource.write_option( key, value, alias_by_class, output_stream ) elif isinstance(value, Aggregation): # skip Aggregations continue else: ValueSource.write_bare_value(key, value, output_stream)
python
def write(source_mapping, output_stream=sys.stdout): """This method writes a Python module respresenting all the keys and values known to configman. """ # a set of classes, modules and/or functions that are values in # configman options. These values will have to be imported in the # module that this method is writing. set_of_classes_needing_imports = set() # once symbols are imported, they are in the namespace of the module, # but that's not where we want them. We only want them to be values # in configman Options. This set will be used to make a list of # these symbols, to forewarn a future configman that reads this # module, that it can ignore these symbols. This will prevent that # future configman from issuing a "mismatced symbols" warinng. symbols_to_ignore = set() # look ahead to see what sort of imports we're going to have to do for key in source_mapping.keys_breadth_first(): value = source_mapping[key] if isinstance(value, Aggregation): # Aggregations don't get included, skip on continue if '.' in key: # this indicates that there are things in nested namespaces, # we will use the DotDict class to represent namespaces set_of_classes_needing_imports.add(DotDict) option = None if isinstance(value, Option): # it's the value inside the option, not the option itself # that is of interest to us option = value value = option.value if value is None: # we don't need in import anything having to do with None continue if isclass(value) or ismodule(value) or isfunction(value): # we know we need to import any of these types set_of_classes_needing_imports.add(value) else: try: # perhaps the value is an instance of a class? If so, # we'll likely need to import that class, but only if # we don't have a way to convert a string to that class set_of_classes_needing_imports.add(value.__class__) except AttributeError: # it's not a class instance, we can skip on pass # for everyone of the imports that we're going to have to create # we need to know the dotted module pathname and the name of the # of the class/module/function. This routine make a list of 3-tuples # class, dotted_module_path, class_name class_and_module_path_and_class_name = [] for a_class in set_of_classes_needing_imports: module_path, class_name = get_import_for_type(a_class) if (not module_path) and (not class_name): continue class_and_module_path_and_class_name.append( (a_class, module_path, class_name) ) # using the collection of 3-tuples, create a lookup mapping where a # class is the key to a 2-tuple of the dotted_module_path & class_name. # This is also the appropriate time to detect any name collisions # and create a mapping of aliases, so we can resolve name collisions. class_name_by_module_path_list = defaultdict(list) alias_by_class = {} previously_used_names = set() for ( a_class, a_module_path, class_name ) in class_and_module_path_and_class_name: if class_name: if class_name in previously_used_names: new_class_name_alias = "%s_%s" % ( a_module_path.replace('.', '_'), class_name ) alias_by_class[a_class] = new_class_name_alias previously_used_names.add(new_class_name_alias) else: previously_used_names.add(class_name) class_name_by_module_path_list[a_module_path].append( (a_class, class_name) ) # start writing the output module print("# generated Python configman file\n", file=output_stream) # the first section that we're going to write is imports of the form: # from X import Y # and # from X import ( # A, # B, # ) sorted_list = [x.value for x in sorted([OrderableObj(x) for x in class_name_by_module_path_list.keys()])] for a_module_path in sorted_list: print(a_module_path) # if there is no module path, then it is something that we don't # need to import. If the module path begins with underscore then # it is private and we ought not step into that mire. If that # causes the output module to fail, it is up to the implementer # of the configman option to have created an approprate # "from_string" & "to_string" configman Option function references. if a_module_path is None or a_module_path.startswith('_'): continue list_of_class_names = \ class_name_by_module_path_list[a_module_path] if len(list_of_class_names) > 1: output_line = "from %s import (\n" % a_module_path sorted_list = [x.value for x in sorted([OrderableTuple(x) for x in list_of_class_names])] for a_class, a_class_name in sorted_list: if a_class in alias_by_class: output_line = "%s\n %s as %s," % ( output_line, a_class_name, alias_by_class[a_class] ) symbols_to_ignore.add(alias_by_class[a_class]) else: output_line = "%s %s,\n" % ( output_line, a_class_name ) symbols_to_ignore.add(a_class_name) output_line = output_line + ')' print(output_line.strip(), file=output_stream) else: a_class, a_class_name = list_of_class_names[0] output_line = "from %s import %s" % ( a_module_path, a_class_name ) if a_class in alias_by_class: output_line = "%s as %s" % ( output_line, alias_by_class[a_class] ) symbols_to_ignore.add(alias_by_class[a_class]) else: symbols_to_ignore.add(a_class_name) print(output_line.strip(), file=output_stream) print('', file=output_stream) # The next section to write will be the imports of the form: # import X sorted_list = [x.value for x in sorted([OrderableObj(x) for x in class_name_by_module_path_list.keys()])] for a_module_path in sorted_list: list_of_class_names = \ class_name_by_module_path_list[a_module_path] a_class, a_class_name = list_of_class_names[0] if a_module_path: continue import_str = ("import %s" % a_class_name).strip() symbols_to_ignore.add(a_class_name) print(import_str, file=output_stream) # See the explanation of 'symbols_to_ignore' above if symbols_to_ignore: print( "\n" \ "# the following symbols will be ignored by configman when\n" \ "# this module is used as a value source. This will\n" \ "# suppress the mismatch warning since these symbols are\n" \ "# values for options, not option names themselves.", file=output_stream ) print("ignore_symbol_list = [", file=output_stream) for a_symbol in sorted(symbols_to_ignore): print(' "%s",' % a_symbol, file=output_stream) print(']\n', file=output_stream) # finally, as the last step, we need to write out the keys and values # will be used by a future configman as Options and values. sorted_keys = sorted( source_mapping.keys_breadth_first(include_dicts=True) ) for key in sorted_keys: value = source_mapping[key] if isinstance(value, Namespace): ValueSource.write_namespace(key, value, output_stream) elif isinstance(value, Option): ValueSource.write_option( key, value, alias_by_class, output_stream ) elif isinstance(value, Aggregation): # skip Aggregations continue else: ValueSource.write_bare_value(key, value, output_stream)
This method writes a Python module respresenting all the keys and values known to configman.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/value_sources/for_modules.py#L329-L532
mozilla/configman
configman/datetime_util.py
datetime_from_ISO_string
def datetime_from_ISO_string(s): """ Take an ISO date string of the form YYYY-MM-DDTHH:MM:SS.S and convert it into an instance of datetime.datetime """ try: return datetime.datetime.strptime(s, '%Y-%m-%dT%H:%M:%S') except ValueError: try: return datetime.datetime.strptime(s, '%Y-%m-%d') except ValueError: return datetime.datetime.strptime(s, '%Y-%m-%dT%H:%M:%S.%f')
python
def datetime_from_ISO_string(s): """ Take an ISO date string of the form YYYY-MM-DDTHH:MM:SS.S and convert it into an instance of datetime.datetime """ try: return datetime.datetime.strptime(s, '%Y-%m-%dT%H:%M:%S') except ValueError: try: return datetime.datetime.strptime(s, '%Y-%m-%d') except ValueError: return datetime.datetime.strptime(s, '%Y-%m-%dT%H:%M:%S.%f')
Take an ISO date string of the form YYYY-MM-DDTHH:MM:SS.S and convert it into an instance of datetime.datetime
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/datetime_util.py#L9-L19
mozilla/configman
configman/datetime_util.py
str_to_timedelta
def str_to_timedelta(input_str): """ a string conversion function for timedelta for strings in the format DD:HH:MM:SS or D HH:MM:SS """ try: input_str = input_str.replace(' ', ':') except (TypeError, AttributeError): from configman.converters import to_str raise TypeError('%s should have been a string' % to_str(input_str)) days, hours, minutes, seconds = 0, 0, 0, 0 details = input_str.split(':') if len(details) >= 4: days = int(details[-4]) if len(details) >= 3: hours = int(details[-3]) if len(details) >= 2: minutes = int(details[-2]) if len(details) >= 1: seconds = int(details[-1]) return datetime.timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds)
python
def str_to_timedelta(input_str): """ a string conversion function for timedelta for strings in the format DD:HH:MM:SS or D HH:MM:SS """ try: input_str = input_str.replace(' ', ':') except (TypeError, AttributeError): from configman.converters import to_str raise TypeError('%s should have been a string' % to_str(input_str)) days, hours, minutes, seconds = 0, 0, 0, 0 details = input_str.split(':') if len(details) >= 4: days = int(details[-4]) if len(details) >= 3: hours = int(details[-3]) if len(details) >= 2: minutes = int(details[-2]) if len(details) >= 1: seconds = int(details[-1]) return datetime.timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds)
a string conversion function for timedelta for strings in the format DD:HH:MM:SS or D HH:MM:SS
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/datetime_util.py#L49-L71
mozilla/configman
configman/datetime_util.py
timedelta_to_str
def timedelta_to_str(aTimedelta): """ a conversion function for time deltas to string in the form DD:HH:MM:SS """ days = aTimedelta.days temp_seconds = aTimedelta.seconds hours = int(temp_seconds / 3600) minutes = int((temp_seconds - hours * 3600) / 60) seconds = temp_seconds - hours * 3600 - minutes * 60 return '%d %02d:%02d:%02d' % (days, hours, minutes, seconds)
python
def timedelta_to_str(aTimedelta): """ a conversion function for time deltas to string in the form DD:HH:MM:SS """ days = aTimedelta.days temp_seconds = aTimedelta.seconds hours = int(temp_seconds / 3600) minutes = int((temp_seconds - hours * 3600) / 60) seconds = temp_seconds - hours * 3600 - minutes * 60 return '%d %02d:%02d:%02d' % (days, hours, minutes, seconds)
a conversion function for time deltas to string in the form DD:HH:MM:SS
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/datetime_util.py#L74-L83
Chris7/django-djangui
djangui/backend/ast/source_parser.py
parse_source_file
def parse_source_file(file_name): """ Parses the AST of Python file for lines containing references to the argparse module. returns the collection of ast objects found. Example client code: 1. parser = ArgumentParser(desc="My help Message") 2. parser.add_argument('filename', help="Name of the file to load") 3. parser.add_argument('-f', '--format', help='Format of output \nOptions: ['md', 'html'] 4. args = parser.parse_args() Variables: * nodes Primary syntax tree object * argparse_assignments The assignment of the ArgumentParser (line 1 in example code) * add_arg_assignments Calls to add_argument() (lines 2-3 in example code) * parser_var_name The instance variable of the ArgumentParser (line 1 in example code) * ast_source The curated collection of all parser related nodes in the client code """ with open(file_name, 'r') as f: s = f.read() nodes = ast.parse(s) module_imports = get_nodes_by_instance_type(nodes, _ast.Import) specific_imports = get_nodes_by_instance_type(nodes, _ast.ImportFrom) assignment_objs = get_nodes_by_instance_type(nodes, _ast.Assign) call_objects = get_nodes_by_instance_type(nodes, _ast.Call) argparse_assignments = get_nodes_by_containing_attr(assignment_objs, 'ArgumentParser') group_arg_assignments = get_nodes_by_containing_attr(assignment_objs, 'add_argument_group') add_arg_assignments = get_nodes_by_containing_attr(call_objects, 'add_argument') parse_args_assignment = get_nodes_by_containing_attr(call_objects, 'parse_args') # there are cases where we have custom argparsers, such as subclassing ArgumentParser. The above # will fail on this. However, we can use the methods known to ArgumentParser to do a duck-type like # approach to finding what is the arg parser if not argparse_assignments: aa_references = set([i.func.value.id for i in chain(add_arg_assignments, parse_args_assignment)]) argparse_like_objects = [getattr(i.value.func, 'id', None) for p_ref in aa_references for i in get_nodes_by_containing_attr(assignment_objs, p_ref)] argparse_like_objects = filter(None, argparse_like_objects) argparse_assignments = [get_nodes_by_containing_attr(assignment_objs, i) for i in argparse_like_objects] # for now, we just choose one try: argparse_assignments = argparse_assignments[0] except IndexError: pass # get things that are assigned inside ArgumentParser or its methods argparse_assigned_variables = get_node_args_and_keywords(assignment_objs, argparse_assignments, 'ArgumentParser') add_arg_assigned_variables = get_node_args_and_keywords(assignment_objs, add_arg_assignments, 'add_argument') parse_args_assigned_variables = get_node_args_and_keywords(assignment_objs, parse_args_assignment, 'parse_args') ast_argparse_source = chain( module_imports, specific_imports, argparse_assigned_variables, add_arg_assigned_variables, parse_args_assigned_variables, argparse_assignments, group_arg_assignments, add_arg_assignments, ) return ast_argparse_source
python
def parse_source_file(file_name): """ Parses the AST of Python file for lines containing references to the argparse module. returns the collection of ast objects found. Example client code: 1. parser = ArgumentParser(desc="My help Message") 2. parser.add_argument('filename', help="Name of the file to load") 3. parser.add_argument('-f', '--format', help='Format of output \nOptions: ['md', 'html'] 4. args = parser.parse_args() Variables: * nodes Primary syntax tree object * argparse_assignments The assignment of the ArgumentParser (line 1 in example code) * add_arg_assignments Calls to add_argument() (lines 2-3 in example code) * parser_var_name The instance variable of the ArgumentParser (line 1 in example code) * ast_source The curated collection of all parser related nodes in the client code """ with open(file_name, 'r') as f: s = f.read() nodes = ast.parse(s) module_imports = get_nodes_by_instance_type(nodes, _ast.Import) specific_imports = get_nodes_by_instance_type(nodes, _ast.ImportFrom) assignment_objs = get_nodes_by_instance_type(nodes, _ast.Assign) call_objects = get_nodes_by_instance_type(nodes, _ast.Call) argparse_assignments = get_nodes_by_containing_attr(assignment_objs, 'ArgumentParser') group_arg_assignments = get_nodes_by_containing_attr(assignment_objs, 'add_argument_group') add_arg_assignments = get_nodes_by_containing_attr(call_objects, 'add_argument') parse_args_assignment = get_nodes_by_containing_attr(call_objects, 'parse_args') # there are cases where we have custom argparsers, such as subclassing ArgumentParser. The above # will fail on this. However, we can use the methods known to ArgumentParser to do a duck-type like # approach to finding what is the arg parser if not argparse_assignments: aa_references = set([i.func.value.id for i in chain(add_arg_assignments, parse_args_assignment)]) argparse_like_objects = [getattr(i.value.func, 'id', None) for p_ref in aa_references for i in get_nodes_by_containing_attr(assignment_objs, p_ref)] argparse_like_objects = filter(None, argparse_like_objects) argparse_assignments = [get_nodes_by_containing_attr(assignment_objs, i) for i in argparse_like_objects] # for now, we just choose one try: argparse_assignments = argparse_assignments[0] except IndexError: pass # get things that are assigned inside ArgumentParser or its methods argparse_assigned_variables = get_node_args_and_keywords(assignment_objs, argparse_assignments, 'ArgumentParser') add_arg_assigned_variables = get_node_args_and_keywords(assignment_objs, add_arg_assignments, 'add_argument') parse_args_assigned_variables = get_node_args_and_keywords(assignment_objs, parse_args_assignment, 'parse_args') ast_argparse_source = chain( module_imports, specific_imports, argparse_assigned_variables, add_arg_assigned_variables, parse_args_assigned_variables, argparse_assignments, group_arg_assignments, add_arg_assignments, ) return ast_argparse_source
Parses the AST of Python file for lines containing references to the argparse module. returns the collection of ast objects found. Example client code: 1. parser = ArgumentParser(desc="My help Message") 2. parser.add_argument('filename', help="Name of the file to load") 3. parser.add_argument('-f', '--format', help='Format of output \nOptions: ['md', 'html'] 4. args = parser.parse_args() Variables: * nodes Primary syntax tree object * argparse_assignments The assignment of the ArgumentParser (line 1 in example code) * add_arg_assignments Calls to add_argument() (lines 2-3 in example code) * parser_var_name The instance variable of the ArgumentParser (line 1 in example code) * ast_source The curated collection of all parser related nodes in the client code
https://github.com/Chris7/django-djangui/blob/4e9e37ac92e128dbaa0c809e5e33618d2c96808f/djangui/backend/ast/source_parser.py#L17-L83
mozilla/configman
configman/value_sources/for_getopt.py
ValueSource.get_values
def get_values(self, config_manager, ignore_mismatches, obj_hook=DotDict): """This is the black sheep of the crowd of ValueSource implementations. It needs to know ahead of time all of the parameters that it will need, but we cannot give it. We may not know all the parameters because not all classes may have been expanded yet. The two parameters allow this ValueSource implementation to know what the parameters have already been defined. The 'ignore_mismatches' parameter tells the implementation if it can or cannot ignore extraneous commandline options. The last time this function is called, it will be required to test for illegal commandline options and respond accordingly. Unlike many of the Value sources, this method cannot be "memoized". The return result depends on an internal state within the parameter 'config_manager'. Any memoize decorator for this method would requrire capturing that internal state in the memoize cache key. """ short_options_str, long_options_list = self.getopt_create_opts( config_manager.option_definitions ) try: if ignore_mismatches: fn = ValueSource.getopt_with_ignore else: fn = getopt.gnu_getopt # here getopt looks through the command line arguments and # consumes the defined switches. The things that are not # consumed are then offered as the 'args' variable of the # parent configuration_manager getopt_options, config_manager.args = fn(self.argv_source, short_options_str, long_options_list) except getopt.GetoptError as x: raise NotAnOptionError(str(x)) command_line_values = obj_hook() for opt_name, opt_val in getopt_options: if opt_name.startswith('--'): name = opt_name[2:] else: name = self.find_name_with_short_form( opt_name[1:], config_manager.option_definitions, '' ) if not name: raise NotAnOptionError( '%s is not a valid short form option' % opt_name[1:] ) option_ = config_manager._get_option(name) if option_.from_string_converter == boolean_converter: command_line_values[name] = not option_.default else: command_line_values[name] = opt_val for name, value in zip( self._get_arguments( config_manager.option_definitions, command_line_values ), config_manager.args ): command_line_values[name] = value return command_line_values
python
def get_values(self, config_manager, ignore_mismatches, obj_hook=DotDict): """This is the black sheep of the crowd of ValueSource implementations. It needs to know ahead of time all of the parameters that it will need, but we cannot give it. We may not know all the parameters because not all classes may have been expanded yet. The two parameters allow this ValueSource implementation to know what the parameters have already been defined. The 'ignore_mismatches' parameter tells the implementation if it can or cannot ignore extraneous commandline options. The last time this function is called, it will be required to test for illegal commandline options and respond accordingly. Unlike many of the Value sources, this method cannot be "memoized". The return result depends on an internal state within the parameter 'config_manager'. Any memoize decorator for this method would requrire capturing that internal state in the memoize cache key. """ short_options_str, long_options_list = self.getopt_create_opts( config_manager.option_definitions ) try: if ignore_mismatches: fn = ValueSource.getopt_with_ignore else: fn = getopt.gnu_getopt # here getopt looks through the command line arguments and # consumes the defined switches. The things that are not # consumed are then offered as the 'args' variable of the # parent configuration_manager getopt_options, config_manager.args = fn(self.argv_source, short_options_str, long_options_list) except getopt.GetoptError as x: raise NotAnOptionError(str(x)) command_line_values = obj_hook() for opt_name, opt_val in getopt_options: if opt_name.startswith('--'): name = opt_name[2:] else: name = self.find_name_with_short_form( opt_name[1:], config_manager.option_definitions, '' ) if not name: raise NotAnOptionError( '%s is not a valid short form option' % opt_name[1:] ) option_ = config_manager._get_option(name) if option_.from_string_converter == boolean_converter: command_line_values[name] = not option_.default else: command_line_values[name] = opt_val for name, value in zip( self._get_arguments( config_manager.option_definitions, command_line_values ), config_manager.args ): command_line_values[name] = value return command_line_values
This is the black sheep of the crowd of ValueSource implementations. It needs to know ahead of time all of the parameters that it will need, but we cannot give it. We may not know all the parameters because not all classes may have been expanded yet. The two parameters allow this ValueSource implementation to know what the parameters have already been defined. The 'ignore_mismatches' parameter tells the implementation if it can or cannot ignore extraneous commandline options. The last time this function is called, it will be required to test for illegal commandline options and respond accordingly. Unlike many of the Value sources, this method cannot be "memoized". The return result depends on an internal state within the parameter 'config_manager'. Any memoize decorator for this method would requrire capturing that internal state in the memoize cache key.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/value_sources/for_getopt.py#L64-L124
mozilla/configman
configman/value_sources/for_getopt.py
ValueSource.getopt_with_ignore
def getopt_with_ignore(args, shortopts, longopts=[]): """my_getopt(args, options[, long_options]) -> opts, args This function works like gnu_getopt(), except that unknown parameters are ignored rather than raising an error. """ opts = [] prog_args = [] if isinstance(longopts, str): longopts = [longopts] else: longopts = list(longopts) while args: if args[0] == '--': prog_args += args[1:] break if args[0].startswith('--'): try: opts, args = getopt.do_longs( opts, args[0][2:], longopts, args[1:] ) except getopt.GetoptError: args = args[1:] elif args[0][0] == '-': try: opts, args = getopt.do_shorts( opts, args[0][1:], shortopts, args[1:] ) except getopt.GetoptError: args = args[1:] else: prog_args.append(args[0]) args = args[1:] return opts, prog_args
python
def getopt_with_ignore(args, shortopts, longopts=[]): """my_getopt(args, options[, long_options]) -> opts, args This function works like gnu_getopt(), except that unknown parameters are ignored rather than raising an error. """ opts = [] prog_args = [] if isinstance(longopts, str): longopts = [longopts] else: longopts = list(longopts) while args: if args[0] == '--': prog_args += args[1:] break if args[0].startswith('--'): try: opts, args = getopt.do_longs( opts, args[0][2:], longopts, args[1:] ) except getopt.GetoptError: args = args[1:] elif args[0][0] == '-': try: opts, args = getopt.do_shorts( opts, args[0][1:], shortopts, args[1:] ) except getopt.GetoptError: args = args[1:] else: prog_args.append(args[0]) args = args[1:] return opts, prog_args
my_getopt(args, options[, long_options]) -> opts, args This function works like gnu_getopt(), except that unknown parameters are ignored rather than raising an error.
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/value_sources/for_getopt.py#L171-L210
mozilla/configman
configman/namespace.py
Namespace.add_option
def add_option(self, name, *args, **kwargs): """add an option to the namespace. This can take two forms: 'name' is a string representing the name of an option and the kwargs are its parameters, or 'name' is an instance of an Option object """ if isinstance(name, Option): an_option = name name = an_option.name else: an_option = Option(name, *args, **kwargs) current_namespace = self name_parts = name.split('.') for a_path_component in name_parts[:-1]: if a_path_component not in current_namespace: current_namespace[a_path_component] = Namespace() current_namespace = current_namespace[a_path_component] an_option.name = name_parts[-1] setattr(current_namespace, an_option.name, an_option) return an_option
python
def add_option(self, name, *args, **kwargs): """add an option to the namespace. This can take two forms: 'name' is a string representing the name of an option and the kwargs are its parameters, or 'name' is an instance of an Option object """ if isinstance(name, Option): an_option = name name = an_option.name else: an_option = Option(name, *args, **kwargs) current_namespace = self name_parts = name.split('.') for a_path_component in name_parts[:-1]: if a_path_component not in current_namespace: current_namespace[a_path_component] = Namespace() current_namespace = current_namespace[a_path_component] an_option.name = name_parts[-1] setattr(current_namespace, an_option.name, an_option) return an_option
add an option to the namespace. This can take two forms: 'name' is a string representing the name of an option and the kwargs are its parameters, or 'name' is an instance of an Option object
https://github.com/mozilla/configman/blob/83159fed61cc4cbbe5a4a6a00d3acad8a0c39c96/configman/namespace.py#L31-L52
Gscorreia89/pyChemometrics
pyChemometrics/ChemometricsPLS_LDA.py
ChemometricsPLS_LDA._cummulativefit
def _cummulativefit(self, x, y): """ Measure the cumulative Regression sum of Squares for each individual component. :param x: Data matrix to fit the PLS model. :type x: numpy.ndarray, shape [n_samples, n_features] :param y: Data matrix to fit the PLS model. :type y: numpy.ndarray, shape [n_samples, n_features] :return: dictionary object containing the total Regression Sum of Squares and the Sum of Squares per components, for both the X and Y data blocks. :rtype: dict """ if y.ndim == 1: y = y.reshape(-1, 1) if x.ndim == 1: x = x.reshape(-1, 1) xscaled = self.x_scaler.fit_transform(x) yscaled = self.y_scaler.fit_transform(y) ssx_comp = list() ssy_comp = list() # Obtain residual sum of squares for whole data set and per component SSX = np.sum(xscaled ** 2) SSY = np.sum(yscaled ** 2) ssx_comp = list() ssy_comp = list() for curr_comp in range(1, self.ncomps + 1): model = self._reduce_ncomps(curr_comp) ypred = self.y_scaler.transform(model.predict(x, y=None)) xpred = self.x_scaler.transform(model.predict(x=None, y=y)) rssy = np.sum((yscaled - ypred) ** 2) rssx = np.sum((xscaled - xpred) ** 2) ssx_comp.append(rssx) ssy_comp.append(rssy) cumulative_fit = {'SSX': SSX, 'SSY': SSY, 'SSXcomp': np.array(ssx_comp), 'SSYcomp': np.array(ssy_comp)} return cumulative_fit
python
def _cummulativefit(self, x, y): """ Measure the cumulative Regression sum of Squares for each individual component. :param x: Data matrix to fit the PLS model. :type x: numpy.ndarray, shape [n_samples, n_features] :param y: Data matrix to fit the PLS model. :type y: numpy.ndarray, shape [n_samples, n_features] :return: dictionary object containing the total Regression Sum of Squares and the Sum of Squares per components, for both the X and Y data blocks. :rtype: dict """ if y.ndim == 1: y = y.reshape(-1, 1) if x.ndim == 1: x = x.reshape(-1, 1) xscaled = self.x_scaler.fit_transform(x) yscaled = self.y_scaler.fit_transform(y) ssx_comp = list() ssy_comp = list() # Obtain residual sum of squares for whole data set and per component SSX = np.sum(xscaled ** 2) SSY = np.sum(yscaled ** 2) ssx_comp = list() ssy_comp = list() for curr_comp in range(1, self.ncomps + 1): model = self._reduce_ncomps(curr_comp) ypred = self.y_scaler.transform(model.predict(x, y=None)) xpred = self.x_scaler.transform(model.predict(x=None, y=y)) rssy = np.sum((yscaled - ypred) ** 2) rssx = np.sum((xscaled - xpred) ** 2) ssx_comp.append(rssx) ssy_comp.append(rssy) cumulative_fit = {'SSX': SSX, 'SSY': SSY, 'SSXcomp': np.array(ssx_comp), 'SSYcomp': np.array(ssy_comp)} return cumulative_fit
Measure the cumulative Regression sum of Squares for each individual component. :param x: Data matrix to fit the PLS model. :type x: numpy.ndarray, shape [n_samples, n_features] :param y: Data matrix to fit the PLS model. :type y: numpy.ndarray, shape [n_samples, n_features] :return: dictionary object containing the total Regression Sum of Squares and the Sum of Squares per components, for both the X and Y data blocks. :rtype: dict
https://github.com/Gscorreia89/pyChemometrics/blob/539f5cd719795685271faa7fb1c6d53d7dd4de19/pyChemometrics/ChemometricsPLS_LDA.py#L955-L996
Gscorreia89/pyChemometrics
pyChemometrics/PCAPlotMixin.py
PCAPlotMixin.plot_leverages
def plot_leverages(self): """ Leverage (h) per observation, with a red line plotted at y = 1/Number of samples (expected :return: Plot with observation leverages (h) """ plt.figure() lev = self.leverages() plt.xlabel('Sample Index') plt.ylabel('Leverage') plt.bar(left=range(lev.size), height=lev) plt.hlines(y=1/lev.size, xmin=0, xmax=lev.size, colors='r', linestyles='--') plt.show() return None
python
def plot_leverages(self): """ Leverage (h) per observation, with a red line plotted at y = 1/Number of samples (expected :return: Plot with observation leverages (h) """ plt.figure() lev = self.leverages() plt.xlabel('Sample Index') plt.ylabel('Leverage') plt.bar(left=range(lev.size), height=lev) plt.hlines(y=1/lev.size, xmin=0, xmax=lev.size, colors='r', linestyles='--') plt.show() return None
Leverage (h) per observation, with a red line plotted at y = 1/Number of samples (expected :return: Plot with observation leverages (h)
https://github.com/Gscorreia89/pyChemometrics/blob/539f5cd719795685271faa7fb1c6d53d7dd4de19/pyChemometrics/PCAPlotMixin.py#L213-L225
Gscorreia89/pyChemometrics
pyChemometrics/ChemometricsScaler.py
_handle_zeros_in_scale
def _handle_zeros_in_scale(scale, copy=True): """ Makes sure that whenever scale is zero, we handle it correctly. This happens in most scalers when we have constant features. """ # if we are fitting on 1D arrays, scale might be a scalar if numpy.isscalar(scale): if scale == .0: scale = 1. return scale elif isinstance(scale, numpy.ndarray): if copy: # New array to avoid side-effects scale = scale.copy() scale[scale == 0.0] = 1.0 return scale
python
def _handle_zeros_in_scale(scale, copy=True): """ Makes sure that whenever scale is zero, we handle it correctly. This happens in most scalers when we have constant features. """ # if we are fitting on 1D arrays, scale might be a scalar if numpy.isscalar(scale): if scale == .0: scale = 1. return scale elif isinstance(scale, numpy.ndarray): if copy: # New array to avoid side-effects scale = scale.copy() scale[scale == 0.0] = 1.0 return scale
Makes sure that whenever scale is zero, we handle it correctly. This happens in most scalers when we have constant features.
https://github.com/Gscorreia89/pyChemometrics/blob/539f5cd719795685271faa7fb1c6d53d7dd4de19/pyChemometrics/ChemometricsScaler.py#L216-L232
Gscorreia89/pyChemometrics
pyChemometrics/ChemometricsScaler.py
ChemometricsScaler._reset
def _reset(self): """ Reset internal data-dependent state of the scaler, if necessary. __init__ parameters are not touched. """ # Checking one attribute is enough, because they are all set together # in partial_fit if hasattr(self, 'scale_'): del self.scale_ del self.n_samples_seen_ del self.mean_ del self.var_
python
def _reset(self): """ Reset internal data-dependent state of the scaler, if necessary. __init__ parameters are not touched. """ # Checking one attribute is enough, because they are all set together # in partial_fit if hasattr(self, 'scale_'): del self.scale_ del self.n_samples_seen_ del self.mean_ del self.var_
Reset internal data-dependent state of the scaler, if necessary. __init__ parameters are not touched.
https://github.com/Gscorreia89/pyChemometrics/blob/539f5cd719795685271faa7fb1c6d53d7dd4de19/pyChemometrics/ChemometricsScaler.py#L37-L50
Gscorreia89/pyChemometrics
pyChemometrics/ChemometricsScaler.py
ChemometricsScaler.fit
def fit(self, X, y=None): """ Compute the mean and standard deviation from a dataset to use in future scaling operations. :param X: Data matrix to scale. :type X: numpy.ndarray, shape [n_samples, n_features] :param y: Passthrough for Scikit-learn ``Pipeline`` compatibility. :type y: None :return: Fitted object. :rtype: pyChemometrics.ChemometricsScaler """ # Reset internal state before fitting self._reset() return self.partial_fit(X, y)
python
def fit(self, X, y=None): """ Compute the mean and standard deviation from a dataset to use in future scaling operations. :param X: Data matrix to scale. :type X: numpy.ndarray, shape [n_samples, n_features] :param y: Passthrough for Scikit-learn ``Pipeline`` compatibility. :type y: None :return: Fitted object. :rtype: pyChemometrics.ChemometricsScaler """ # Reset internal state before fitting self._reset() return self.partial_fit(X, y)
Compute the mean and standard deviation from a dataset to use in future scaling operations. :param X: Data matrix to scale. :type X: numpy.ndarray, shape [n_samples, n_features] :param y: Passthrough for Scikit-learn ``Pipeline`` compatibility. :type y: None :return: Fitted object. :rtype: pyChemometrics.ChemometricsScaler
https://github.com/Gscorreia89/pyChemometrics/blob/539f5cd719795685271faa7fb1c6d53d7dd4de19/pyChemometrics/ChemometricsScaler.py#L52-L66
Gscorreia89/pyChemometrics
pyChemometrics/ChemometricsScaler.py
ChemometricsScaler.partial_fit
def partial_fit(self, X, y=None): """ Performs online computation of mean and standard deviation on X for later scaling. All of X is processed as a single batch. This is intended for cases when `fit` is not feasible due to very large number of `n_samples` or because X is read from a continuous stream. The algorithm for incremental mean and std is given in Equation 1.5a,b in Chan, Tony F., Gene H. Golub, and Randall J. LeVeque. "Algorithms for computing the sample variance: Analysis and recommendations." The American Statistician 37.3 (1983): 242-247 :param X: Data matrix to scale. :type X: numpy.ndarray, shape [n_samples, n_features] :param y: Passthrough for Scikit-learn ``Pipeline`` compatibility. :type y: None :return: Fitted object. :rtype: pyChemometrics.ChemometricsScaler """ X = check_array(X, accept_sparse=('csr', 'csc'), copy=self.copy, warn_on_dtype=True, estimator=self, dtype=FLOAT_DTYPES) # Even in the case of `with_mean=False`, we update the mean anyway # This is needed for the incremental computation of the var # See incr_mean_variance_axis and _incremental_mean_variance_axis if sparse.issparse(X): if self.with_mean: raise ValueError( "Cannot center sparse matrices: pass `with_mean=False` " "instead. See docstring for motivation and alternatives.") if self.with_std: # First pass if not hasattr(self, 'n_samples_seen_'): self.mean_, self.var_ = mean_variance_axis(X, axis=0) self.n_samples_seen_ = X.shape[0] # Next passes else: self.mean_, self.var_, self.n_samples_seen_ = \ incr_mean_variance_axis(X, axis=0, last_mean=self.mean_, last_var=self.var_, last_n=self.n_samples_seen_) else: self.mean_ = None self.var_ = None else: # First pass if not hasattr(self, 'n_samples_seen_'): self.mean_ = .0 self.n_samples_seen_ = 0 if self.with_std: self.var_ = .0 else: self.var_ = None self.mean_, self.var_, self.n_samples_seen_ = \ _incremental_mean_and_var(X, self.mean_, self.var_, self.n_samples_seen_) if self.with_std: self.scale_ = _handle_zeros_in_scale(numpy.sqrt(self.var_)) ** self.scale_power else: self.scale_ = None return self
python
def partial_fit(self, X, y=None): """ Performs online computation of mean and standard deviation on X for later scaling. All of X is processed as a single batch. This is intended for cases when `fit` is not feasible due to very large number of `n_samples` or because X is read from a continuous stream. The algorithm for incremental mean and std is given in Equation 1.5a,b in Chan, Tony F., Gene H. Golub, and Randall J. LeVeque. "Algorithms for computing the sample variance: Analysis and recommendations." The American Statistician 37.3 (1983): 242-247 :param X: Data matrix to scale. :type X: numpy.ndarray, shape [n_samples, n_features] :param y: Passthrough for Scikit-learn ``Pipeline`` compatibility. :type y: None :return: Fitted object. :rtype: pyChemometrics.ChemometricsScaler """ X = check_array(X, accept_sparse=('csr', 'csc'), copy=self.copy, warn_on_dtype=True, estimator=self, dtype=FLOAT_DTYPES) # Even in the case of `with_mean=False`, we update the mean anyway # This is needed for the incremental computation of the var # See incr_mean_variance_axis and _incremental_mean_variance_axis if sparse.issparse(X): if self.with_mean: raise ValueError( "Cannot center sparse matrices: pass `with_mean=False` " "instead. See docstring for motivation and alternatives.") if self.with_std: # First pass if not hasattr(self, 'n_samples_seen_'): self.mean_, self.var_ = mean_variance_axis(X, axis=0) self.n_samples_seen_ = X.shape[0] # Next passes else: self.mean_, self.var_, self.n_samples_seen_ = \ incr_mean_variance_axis(X, axis=0, last_mean=self.mean_, last_var=self.var_, last_n=self.n_samples_seen_) else: self.mean_ = None self.var_ = None else: # First pass if not hasattr(self, 'n_samples_seen_'): self.mean_ = .0 self.n_samples_seen_ = 0 if self.with_std: self.var_ = .0 else: self.var_ = None self.mean_, self.var_, self.n_samples_seen_ = \ _incremental_mean_and_var(X, self.mean_, self.var_, self.n_samples_seen_) if self.with_std: self.scale_ = _handle_zeros_in_scale(numpy.sqrt(self.var_)) ** self.scale_power else: self.scale_ = None return self
Performs online computation of mean and standard deviation on X for later scaling. All of X is processed as a single batch. This is intended for cases when `fit` is not feasible due to very large number of `n_samples` or because X is read from a continuous stream. The algorithm for incremental mean and std is given in Equation 1.5a,b in Chan, Tony F., Gene H. Golub, and Randall J. LeVeque. "Algorithms for computing the sample variance: Analysis and recommendations." The American Statistician 37.3 (1983): 242-247 :param X: Data matrix to scale. :type X: numpy.ndarray, shape [n_samples, n_features] :param y: Passthrough for Scikit-learn ``Pipeline`` compatibility. :type y: None :return: Fitted object. :rtype: pyChemometrics.ChemometricsScaler
https://github.com/Gscorreia89/pyChemometrics/blob/539f5cd719795685271faa7fb1c6d53d7dd4de19/pyChemometrics/ChemometricsScaler.py#L68-L136
Gscorreia89/pyChemometrics
pyChemometrics/ChemometricsScaler.py
ChemometricsScaler.transform
def transform(self, X, y=None, copy=None): """ Perform standardization by centering and scaling using the parameters. :param X: Data matrix to scale. :type X: numpy.ndarray, shape [n_samples, n_features] :param y: Passthrough for scikit-learn ``Pipeline`` compatibility. :type y: None :param bool copy: Copy the X matrix. :return: Scaled version of the X data matrix. :rtype: numpy.ndarray, shape [n_samples, n_features] """ check_is_fitted(self, 'scale_') copy = copy if copy is not None else self.copy X = check_array(X, accept_sparse='csr', copy=copy, warn_on_dtype=True, estimator=self, dtype=FLOAT_DTYPES) if sparse.issparse(X): if self.with_mean: raise ValueError( "Cannot center sparse matrices: pass `with_mean=False` " "instead. See docstring for motivation and alternatives.") if self.scale_ is not None: inplace_column_scale(X, 1 / self.scale_) else: if self.with_mean: X -= self.mean_ if self.with_std: X /= self.scale_ return X
python
def transform(self, X, y=None, copy=None): """ Perform standardization by centering and scaling using the parameters. :param X: Data matrix to scale. :type X: numpy.ndarray, shape [n_samples, n_features] :param y: Passthrough for scikit-learn ``Pipeline`` compatibility. :type y: None :param bool copy: Copy the X matrix. :return: Scaled version of the X data matrix. :rtype: numpy.ndarray, shape [n_samples, n_features] """ check_is_fitted(self, 'scale_') copy = copy if copy is not None else self.copy X = check_array(X, accept_sparse='csr', copy=copy, warn_on_dtype=True, estimator=self, dtype=FLOAT_DTYPES) if sparse.issparse(X): if self.with_mean: raise ValueError( "Cannot center sparse matrices: pass `with_mean=False` " "instead. See docstring for motivation and alternatives.") if self.scale_ is not None: inplace_column_scale(X, 1 / self.scale_) else: if self.with_mean: X -= self.mean_ if self.with_std: X /= self.scale_ return X
Perform standardization by centering and scaling using the parameters. :param X: Data matrix to scale. :type X: numpy.ndarray, shape [n_samples, n_features] :param y: Passthrough for scikit-learn ``Pipeline`` compatibility. :type y: None :param bool copy: Copy the X matrix. :return: Scaled version of the X data matrix. :rtype: numpy.ndarray, shape [n_samples, n_features]
https://github.com/Gscorreia89/pyChemometrics/blob/539f5cd719795685271faa7fb1c6d53d7dd4de19/pyChemometrics/ChemometricsScaler.py#L138-L169
Gscorreia89/pyChemometrics
pyChemometrics/ChemometricsScaler.py
ChemometricsScaler.inverse_transform
def inverse_transform(self, X, copy=None): """ Scale back the data to the original representation. :param X: Scaled data matrix. :type X: numpy.ndarray, shape [n_samples, n_features] :param bool copy: Copy the X data matrix. :return: X data matrix with the scaling operation reverted. :rtype: numpy.ndarray, shape [n_samples, n_features] """ check_is_fitted(self, 'scale_') copy = copy if copy is not None else self.copy if sparse.issparse(X): if self.with_mean: raise ValueError( "Cannot uncenter sparse matrices: pass `with_mean=False` " "instead See docstring for motivation and alternatives.") if not sparse.isspmatrix_csr(X): X = X.tocsr() copy = False if copy: X = X.copy() if self.scale_ is not None: inplace_column_scale(X, self.scale_) else: X = numpy.asarray(X) if copy: X = X.copy() if self.with_std: X *= self.scale_ if self.with_mean: X += self.mean_ return X
python
def inverse_transform(self, X, copy=None): """ Scale back the data to the original representation. :param X: Scaled data matrix. :type X: numpy.ndarray, shape [n_samples, n_features] :param bool copy: Copy the X data matrix. :return: X data matrix with the scaling operation reverted. :rtype: numpy.ndarray, shape [n_samples, n_features] """ check_is_fitted(self, 'scale_') copy = copy if copy is not None else self.copy if sparse.issparse(X): if self.with_mean: raise ValueError( "Cannot uncenter sparse matrices: pass `with_mean=False` " "instead See docstring for motivation and alternatives.") if not sparse.isspmatrix_csr(X): X = X.tocsr() copy = False if copy: X = X.copy() if self.scale_ is not None: inplace_column_scale(X, self.scale_) else: X = numpy.asarray(X) if copy: X = X.copy() if self.with_std: X *= self.scale_ if self.with_mean: X += self.mean_ return X
Scale back the data to the original representation. :param X: Scaled data matrix. :type X: numpy.ndarray, shape [n_samples, n_features] :param bool copy: Copy the X data matrix. :return: X data matrix with the scaling operation reverted. :rtype: numpy.ndarray, shape [n_samples, n_features]
https://github.com/Gscorreia89/pyChemometrics/blob/539f5cd719795685271faa7fb1c6d53d7dd4de19/pyChemometrics/ChemometricsScaler.py#L171-L205
Gscorreia89/pyChemometrics
pyChemometrics/ChemometricsPLS.py
ChemometricsPLS.leverages
def leverages(self, block='X'): """ Calculate the leverages for each observation :return: :rtype: """ # TODO check with matlab and simca try: if block == 'X': return np.dot(self.scores_t, np.dot(np.linalg.inv(np.dot(self.scores_t.T, self.scores_t), self.scores_t.T))) elif block == 'Y': return np.dot(self.scores_u, np.dot(np.linalg.inv(np.dot(self.scores_u.T, self.scores_u), self.scores_u.T))) else: raise ValueError except ValueError as verr: raise ValueError('block option must be either X or Y')
python
def leverages(self, block='X'): """ Calculate the leverages for each observation :return: :rtype: """ # TODO check with matlab and simca try: if block == 'X': return np.dot(self.scores_t, np.dot(np.linalg.inv(np.dot(self.scores_t.T, self.scores_t), self.scores_t.T))) elif block == 'Y': return np.dot(self.scores_u, np.dot(np.linalg.inv(np.dot(self.scores_u.T, self.scores_u), self.scores_u.T))) else: raise ValueError except ValueError as verr: raise ValueError('block option must be either X or Y')
Calculate the leverages for each observation :return: :rtype:
https://github.com/Gscorreia89/pyChemometrics/blob/539f5cd719795685271faa7fb1c6d53d7dd4de19/pyChemometrics/ChemometricsPLS.py#L683-L698
CitrineInformatics/pypif-sdk
pypif_sdk/func/replace_funcs.py
_recurse_replace
def _recurse_replace(obj, key, new_key, sub, remove): """Recursive helper for `replace_by_key`""" if isinstance(obj, list): return [_recurse_replace(x, key, new_key, sub, remove) for x in obj] if isinstance(obj, dict): for k, v in list(obj.items()): if k == key and v in sub: obj[new_key] = sub[v] if remove: del obj[key] else: obj[k] = _recurse_replace(v, key, new_key, sub, remove) return obj
python
def _recurse_replace(obj, key, new_key, sub, remove): """Recursive helper for `replace_by_key`""" if isinstance(obj, list): return [_recurse_replace(x, key, new_key, sub, remove) for x in obj] if isinstance(obj, dict): for k, v in list(obj.items()): if k == key and v in sub: obj[new_key] = sub[v] if remove: del obj[key] else: obj[k] = _recurse_replace(v, key, new_key, sub, remove) return obj
Recursive helper for `replace_by_key`
https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/func/replace_funcs.py#L6-L18
CitrineInformatics/pypif-sdk
pypif_sdk/func/replace_funcs.py
replace_by_key
def replace_by_key(pif, key, subs, new_key=None, remove=False): """Replace values that match a key Deeply traverses the pif object, looking for `key` and replacing values in accordance with `subs`. If `new_key` is set, the replaced values are assigned to that key. If `remove` is `True`, the old `key` pairs are removed. """ if not new_key: new_key = key remove = False orig = pif.as_dictionary() new = _recurse_replace(orig, to_camel_case(key), to_camel_case(new_key), subs, remove) return pypif.pif.loads(json.dumps(new))
python
def replace_by_key(pif, key, subs, new_key=None, remove=False): """Replace values that match a key Deeply traverses the pif object, looking for `key` and replacing values in accordance with `subs`. If `new_key` is set, the replaced values are assigned to that key. If `remove` is `True`, the old `key` pairs are removed. """ if not new_key: new_key = key remove = False orig = pif.as_dictionary() new = _recurse_replace(orig, to_camel_case(key), to_camel_case(new_key), subs, remove) return pypif.pif.loads(json.dumps(new))
Replace values that match a key Deeply traverses the pif object, looking for `key` and replacing values in accordance with `subs`. If `new_key` is set, the replaced values are assigned to that key. If `remove` is `True`, the old `key` pairs are removed.
https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/func/replace_funcs.py#L21-L34
CitrineInformatics/pypif-sdk
pypif_sdk/accessor.py
get_propety_by_name
def get_propety_by_name(pif, name): """Get a property by name""" warn("This method has been deprecated in favor of get_property_by_name") return next((x for x in pif.properties if x.name == name), None)
python
def get_propety_by_name(pif, name): """Get a property by name""" warn("This method has been deprecated in favor of get_property_by_name") return next((x for x in pif.properties if x.name == name), None)
Get a property by name
https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/accessor.py#L3-L6
CitrineInformatics/pypif-sdk
pypif_sdk/accessor.py
get_property_by_name
def get_property_by_name(pif, name): """Get a property by name""" return next((x for x in pif.properties if x.name == name), None)
python
def get_property_by_name(pif, name): """Get a property by name""" return next((x for x in pif.properties if x.name == name), None)
Get a property by name
https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/accessor.py#L8-L10
CitrineInformatics/pypif-sdk
pypif_sdk/readview/core.py
new_keypair
def new_keypair(key, value, ambig, unambig): """ Check new keypair against existing unambiguous dict :param key: of pair :param value: of pair :param ambig: set of keys with ambig decoding :param unambig: set of keys with unambig decoding :return: """ if key in ambig: return if key in unambig and value != unambig[key]: ambig.add(key) del unambig[key] return unambig[key] = value return
python
def new_keypair(key, value, ambig, unambig): """ Check new keypair against existing unambiguous dict :param key: of pair :param value: of pair :param ambig: set of keys with ambig decoding :param unambig: set of keys with unambig decoding :return: """ if key in ambig: return if key in unambig and value != unambig[key]: ambig.add(key) del unambig[key] return unambig[key] = value return
Check new keypair against existing unambiguous dict :param key: of pair :param value: of pair :param ambig: set of keys with ambig decoding :param unambig: set of keys with unambig decoding :return:
https://github.com/CitrineInformatics/pypif-sdk/blob/8b01d10d9a1426d5eef12e4b2f31c4657aa0fe59/pypif_sdk/readview/core.py#L17-L36