query
stringlengths
9
9.05k
document
stringlengths
10
222k
metadata
dict
negatives
listlengths
30
30
negative_scores
listlengths
30
30
document_score
stringlengths
4
10
document_rank
stringclasses
2 values
Download all snaps that haven't already been downloaded.
def download_snaps(s): existing = get_downloaded() snaps = s.get_snaps() for snap in snaps: id = snap['id'] if id[-1] == 's' or id in existing: print 'Skipping:', id continue result = download(s, snap) if not result: print 'FAILED:', id else: print 'Downloaded:', id
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def downloadAll(self, force=False):\n if self.minutesSinceLastUpdate() == 0 and force == False:\n self.log(\"TOO SOON SINCE LAST DOWNLOAD!\")\n return\n for grabber in self.grabbers:\n self.downloadGrab(grabber[\"url\"], grabber[\"ID\"])+\"\\n\"", "def _download_all(update_path=True, verbose=None):\n\n # iterate over dataset\n for ds in dataset_list:\n # call download\n ds().download(update_path=True, verbose=verbose, accept=True)", "def download_all(): #@save\n for name in DATA_HUB:\n download(name)", "def download_data(self):\r\n \r\n for file in self.__files:\r\n file_to_download = os.path.join(self.__folder, os.path.basename(file))\r\n if not os.path.isfile(file_to_download):\r\n self.__download_file(file)", "def fetch_all_snapshots(archive_dir, wayback_filename, target_url):\n # Read the list of snapshots.\n with open(wayback_filename) as f:\n data = f.read()\n\n url_template = \"http://web.archive.org/web/{timestamp}/{target_url}\"\n snapshots = data.split(\"\\n\")\n pages_downloaded = 0\n pages_failed = 0\n pages_skipped = 0\n for snapshot in snapshots:\n fields = snapshot.split()\n if len(fields) < 1:\n print(\"Bad fields. End of data?\")\n break\n date_string = fields[1]\n assert 14 == len(date_string)\n ymd = date_string[:8]\n year = int(date_string[:4])\n month = int(date_string[4:6])\n day = int(date_string[6:8])\n assert 1900 < year < 2100 and 1 <= month <= 12 and 1 <= day <=31\n date_of_fire = datetime.date(year,month, day)\n filename = F\"firedata_{year}_{month:02}_{day:02}.html\"\n path = os.path.join(archive_dir, filename)\n if os.path.exists(path):\n print(\"Not replacing \", path)\n pages_skipped += 1\n continue\n else:\n print(\"Downloading for \", path)\n url = url_template.format(timestamp=date_string, target_url=target_url)\n print(url)\n\n page = fetch(url)\n if page is None:\n print(\"Fetching above url failed.\")\n pages_failed +=1\n continue\n\n pages_downloaded += 1\n with open(path, \"wb\") as f:\n f.write(page)\n print(\"Page saved\")\n sleep(2)\n return pages_downloaded, pages_failed, pages_skipped", "def download_files(self) -> None:\n\n for name, url in self.files.items():\n print(f\"Download {name.split('/')[-1]}\")\n wget.download(url, os.path.join(\"data\", name))", "def download(all):\n print(\"Downloading\")", "def download_all_datasets():\n print(\"Downloading all datasets ...\")\n for dataset in get_available_datasets():\n download_dataset(dataset)", "def download_files(self):", "def downloadImages(self):\n\t\ti = 0\n\t\tfor im in self.images:\n\t\t\t# Let's get the file extension and file name and make the final file path. \n\t\t\t# We need to do this to slugify the file name and avoid errors when loading images\n\t\t\tfile_name, file_extension = os.path.splitext(im['url'])\n\t\t\tfile_name = file_name.split(\"/\")[-1]\n\n\t\t\tfile_path = self.data_path + self.dataset + \"/\" + im['slug'] + '/' + str(im['id']) + '_' + slugify(file_name) + file_extension\n\n\t\t\t# If file is not in the file path, then download from the url\n\t\t\tif not os.path.exists(file_path):\n\t\t\t\ttry:\n\t\t\t\t\turllib.urlretrieve(im['url'], file_path )\n\t\t\t\t\tprint \"i:{} url:{}\".format(i,im['url'])\n\t\t\t\texcept Exception, e:\n\t\t\t\t\tprint e\n\t\t\ti += 1", "def download_all(self):\r\n # Fetch website list\r\n self.fetch_website_list()\r\n\r\n for website in self.website_list:\r\n self.download(website['id'])", "def run(self):\n urls_to_download = self._get_links()\n results = ThreadPool(8).imap_unordered(self._download_url, urls_to_download)\n for path in results:\n print(path)", "def download_artifacts(self):\n for artifact_set in self._sets:\n for artifact in artifact_set.artifacts:\n artifact.download(self._cache_dir)", "def download_all_files(self):\n self.server_handler.get_sensor_data_from_server()", "def download_all_maps(self):\n return self._download_all_maps_recur()", "def pullall():\n\tprint(red('\\t\\tStarting download of QNIBTerminal images\\t\\t'))\n\t# pull all the needed images\n\tdocker_images={'fd20','terminal','helixdns','elk','slurm','compute'}\n\tfor image in docker_images:\n\t\tprint 'docker pull qnib/'+image\n\t\t# run('docker pull qnib/'+image)", "def get_files(self):\n # self.folder= +str(int(time.time()))\n if not os.path.exists(self.folder):\n os.mkdir(self.folder)\n while len(self.url_queue): # If we have URLs to crawl - we crawl\n href = self.url_queue.popleft() # We grab a URL from the left of the list\n filename = href.rsplit('/', 1)[-1]\n print(\"Downloading %s to %s...\" % (href, filename))\n fullname = os.path.join(self.folder, filename)\n urlretrieve(href, fullname)\n self.xlfnames.append(filename)", "def download_images(image_urls):\n fetched = []\n count = 0\n for img_url in image_urls:\n if not db.is_image_in_db(img_url):\n filename = os.path.basename(img_url)\n if not os.path.exists(cfg.PHOTO_DIR + filename):\n referer_string = web.get_referrer_string(img_url) # to trick 4walled.org\n cmd = \"wget -t {retry_count} -T {timeout} {ref} {url} -O {save}\".format(url=img_url,\n save=os.path.join(cfg.PHOTO_DIR, filename),\n ref=referer_string,\n retry_count=cfg.WGET_RET,\n timeout=cfg.WGET_TIMEOUT)\n print cmd\n os.system(cmd)\n fetched.append(img_url)\n count += 1\n else:\n print(\"# {0} was already fetched once...\".format(img_url))\n\n print(\"# new imgage(s): {0}\".format(count))\n return fetched", "def download_all_avail(self, n_cores):\n if not os.path.exists(self.baseDownloadPath):\n raise EODataDownException(\"The download path does not exist, please create and run again.\")\n\n logger.debug(\"Creating Database Engine and Session.\")\n db_engine = sqlalchemy.create_engine(self.db_info_obj.dbConn)\n session_sqlalc = sqlalchemy.orm.sessionmaker(bind=db_engine)\n ses = session_sqlalc()\n\n query_result = ses.query(EDDSentinel1ASF).filter(EDDSentinel1ASF.Downloaded == False).filter(\n EDDSentinel1ASF.Remote_URL is not None).all()\n dwnld_params = list()\n downloaded_new_scns = False\n if query_result is not None:\n for record in query_result:\n scn_lcl_dwnld_path = os.path.join(self.baseDownloadPath, \"{}_{}\".format(record.Product_File_ID, record.PID))\n if not os.path.exists(scn_lcl_dwnld_path):\n os.mkdir(scn_lcl_dwnld_path)\n out_filename = record.Remote_FileName\n downloaded_new_scns = True\n dwnld_params.append([record.PID, record.Product_File_ID, record.Remote_URL, self.db_info_obj,\n os.path.join(scn_lcl_dwnld_path, out_filename), self.asfUser, self.asfPass])\n else:\n downloaded_new_scns = False\n logger.info(\"There are no scenes to be downloaded.\")\n\n ses.close()\n logger.debug(\"Closed the database session.\")\n\n logger.info(\"Start downloading the scenes.\")\n with multiprocessing.Pool(processes=n_cores) as pool:\n pool.map(_download_scn_asf, dwnld_params)\n logger.info(\"Finished downloading the scenes.\")\n edd_usage_db = EODataDownUpdateUsageLogDB(self.db_info_obj)\n edd_usage_db.add_entry(description_val=\"Checked downloaded new scenes.\", sensor_val=self.sensor_name,\n updated_lcl_db=True, downloaded_new_scns=downloaded_new_scns)", "def download_datasets():\n if not os.path.exists(\"__data__/cornell/movie_conversations.txt\") \\\n or not os.path.exists(\"__data__/cornell/movie_lines.txt\"):\n subprocess.call(['scripts/download_cornell.sh'])\n if not os.path.isdir('__data__/opensubs'):\n subprocess.call(['scripts/download_opensubs.sh'])", "def download(s, snap):\n\n id = snap['id']\n name = snap['sender']\n ts = str(snap['sent']).replace(':', '-')\n\n result = s.get_media(id)\n\n if not result:\n return False\n\n ext = s.is_media(result)\n filename = '{}+{}+{}.{}'.format(ts, name, id, ext)\n path = PATH + filename\n with open(path, 'wb') as fout:\n fout.write(result)\n return True", "def download_goes_data(folder, start, end, product, satellite=\"G17\"):\n \n assert isinstance(start, dt.datetime)\n assert isinstance(end, dt.datetime)\n assert end > start\n assert satellite == \"G17\" or satellite == \"G16\"\n \n if not isinstance(folder, Path):\n folder = Path(folder)\n \n assert folder.is_dir()\n \n start, bucket = _validate_satellite_dates(satellite, start, end)\n if start is None:\n return []\n \n # Get a list of files we already have downloaded.\n current_files = tuple(\n f for f in folder.iterdir()\n if \"ABI-L2\" in f.name and product in f.name and satellite in f.name and f.suffix == \".nc\"\n )\n \n # Files older than this are too old to be missing, and must be\n # permanently missing. So we shouldn't check for them again, just\n # remember that htey are missing so we can skip them.\n too_old_to_be_missing = dt.datetime.now(dt.timezone.utc) - dt.timedelta(days=1)\n \n # The list of hours with missing data.\n missing_data_path = folder / \"missing_data.txt\"\n if missing_data_path.exists():\n with open(missing_data_path, \"r\") as mdf:\n missing_data = list(l.strip() for l in mdf if l.strip() != \"\")\n else:\n missing_data = []\n \n current_time = start\n result_list = []\n while current_time < end:\n \n # Check to see how many matching files we have\n time_prefix = current_time.strftime(\"_s%Y%j%H\")\n missing_key = \"{}{}_{}\".format(satellite, time_prefix, product)\n local_files_this_hour = tuple(f for f in current_files if time_prefix in f.name)\n \n result_list.extend(local_files_this_hour)\n\n # Should be 12 per hour for CONUS\n if \"FDCC\" in missing_key and len(local_files_this_hour) >= 12:\n pass\n # Should be 6 per hour for Full Disk\n elif \"FDCF\" in missing_key and len(local_files_this_hour) >= 6:\n pass\n elif missing_key not in missing_data:\n \n result_list.extend(\n _download_files(\n current_time, bucket, product, folder, too_old_to_be_missing, missing_data,\n missing_key\n )\n )\n \n # Move ahead an hour\n current_time += dt.timedelta(hours=1)\n \n # Remember the missing!\n with open(missing_data_path, \"w\") as mdf:\n for line in missing_data:\n mdf.write(line)\n mdf.write(\"\\n\")\n \n return result_list", "def regular_download(self) -> NoReturn:\n\n if not path.isdir(self.name):\n mkdir(self.name)\n\n for chapter in self.chapters.keys():\n\n chapter_folder = f\"{self.name}/{chapter}/\"\n curr_chapter = self.chapters[chapter]\n base_url = f\"{curr_chapter['server']}{curr_chapter['hash']}/\"\n\n if not path.isdir(chapter_folder):\n mkdir(chapter_folder)\n\n for image in curr_chapter[\"images\"]:\n\n image_url = f\"{base_url}{image}\"\n image_file = f\"{chapter_folder}{image}\"\n response = requests.get(image_url, headers={\"Connection\":\"close\"})\n\n if response and response.status_code == 200:\n with open(image_file, \"wb\") as img_file:\n img_file.write(response.content)\n else:\n print(f\"Error downloading chapter: {curr_chapter['num']} Image: {image}\")", "def download_all(self):\r\n download_path = os.path.join(self.download_path, self.username)\r\n already_downloaded = []\r\n successful_downloads = []\r\n failed_downloads = []\r\n if not os.path.exists(download_path):\r\n os.makedirs(download_path)\r\n elif not os.path.isdir(download_path):\r\n raise NotADirectoryError(\"Download path is not a directory: \" + download_path)\r\n elif self.skip_downloaded:\r\n for item in os.listdir(download_path):\r\n file_path = str(os.path.join(download_path, item))\r\n if os.path.isfile(file_path):\r\n parsed_file = self._parse_file_name(os.path.basename(file_path))\r\n if parsed_file is not None:\r\n already_downloaded.append(parsed_file[\"id\"])\r\n for index, item in enumerate(self.videos):\r\n # Don't download it if the user has set that option, and the tiktok already exists on the disk\r\n if item[\"id\"] in already_downloaded:\r\n logger.info(\"Already downloaded video with id: \" + item[\"id\"])\r\n continue\r\n file_name = self._format_file_name(item[\"createTime\"], item[\"id\"])\r\n file_path = os.path.join(download_path, file_name)\r\n logger.info(\"Downloading video: \" + file_name + \" (\" + str(index + 1) + \"/\" + str(len(self.videos)) + \")\")\r\n video_url = self._format_video_url(item)\r\n success = self.download_video(file_path, video_url, item[\"createTime\"])\r\n if success:\r\n successful_downloads.append(video_url)\r\n else:\r\n failed_downloads.append(video_url)\r\n sleep_time = random.uniform(self.sleep_min, self.sleep_max)\r\n logger.info(\"Sleeping for: \" + str(sleep_time) + \" seconds\")\r\n sleep(sleep_time)\r\n logger.info(\"Processed all {} videos\".format(self.video_count))\r\n logger.debug(\"Fallback counter: \" + str(self.fallback_counter))\r\n logger.debug(\"YouTube-dl DownloadError counter: \" + str(self.fallback_counter))\r\n logger.debug(\"Other error counter: \" + str(self.other_error_counter))\r\n return {\"successful_downloads\": successful_downloads,\r\n \"failed_downloads\": failed_downloads,\r\n \"skipped_downloads\": already_downloaded}", "def init_script():\n for media_type in SNAPCHAT_MEDIA_URLS.keys():\n if SNAPCHAT_MEDIA_URLS[media_type] is None:\n parse_and_decode_urls(media_type)\n download_snapchat_memories(SNAPCHAT_MEDIA_URLS[media_type], media_type)\n LOGGER.success(f\"Completed downloading all Snapchat memories.\")", "def download(self) -> None:\n os.makedirs(self.root, exist_ok=True)\n\n for subset in self.subsets:\n if self._check_subset_integrity(subset):\n print(f\"{subset} already downloaded and verified\")\n continue\n path = os.path.join(self.root, subset + \".tar.gz\")\n\n already_present = os.path.isfile(path)\n if not already_present:\n subset_url = self.openslr_url + subset + \".tar.gz\"\n with requests.get(subset_url, stream=True) as r:\n r.raise_for_status()\n with open(path, \"wb\") as f:\n shutil.copyfileobj(r.raw, f)\n\n archive_md5 = self.data_files[subset][\"archive_md5\"]\n if utils.checksum_file(path, \"md5\") != archive_md5:\n raise utils.DownloadError(f\"invalid checksum for {path}\")\n\n with tarfile.open(path, mode=\"r|gz\") as tar:\n tar.extractall(self.root)\n\n if not already_present:\n os.remove(path)", "def get_snapshots(self):\r\n ec2 = self.get_ec2_connection()\r\n rs = ec2.get_all_snapshots()\r\n all_vols = [self.volume_id] + self.past_volume_ids\r\n snaps = []\r\n for snapshot in rs:\r\n if snapshot.volume_id in all_vols:\r\n if snapshot.progress == '100%':\r\n snapshot.date = dateutil.parser.parse(snapshot.start_time)\r\n snapshot.keep = True\r\n snaps.append(snapshot)\r\n snaps.sort(cmp=lambda x,y: cmp(x.date, y.date))\r\n return snaps", "def get_all_images(self):\n self.roses.save_image()\n all_images = Images.get_all_images()\n self.assertTrue(len(all_images)<1)", "def download_images(links):\n\n for link in links:\n print(\"Processing\", link)\n try:\n response = requests.get(link,\n timeout=METADATA_REQUEST_TIMEOUT, stream=True)\n except requests.exceptions.RequestException as e:\n print(e)\n sys.exit(1)\n\n artist_name = link.rsplit('/', 2)[1]\n image_name = link.rsplit('/', 2)[2]\n image_name = artist_name + image_name\n\n file_location = ASSET_PATH.joinpath(image_name)\n\n with open(str(file_location), 'wb') as outfile:\n shutil.copyfileobj(response.raw, outfile)", "def fetch_the_data():\n subprocess.run([\"wget\", \"https://storage.googleapis.com/recipe-box/recipes_raw.zip\"])\n subprocess.run([\"unzip\", \"recipes_raw.zip\", \"-d\", RECIPES_DIRPATH])\n subprocess.run([\"rm\", \"recipes_raw.zip\"])" ]
[ "0.66632015", "0.60274625", "0.60035586", "0.5957198", "0.5934118", "0.5919938", "0.5897004", "0.5831177", "0.58302766", "0.5794643", "0.5774174", "0.57472575", "0.57161885", "0.566392", "0.55329263", "0.5514549", "0.54934376", "0.5476704", "0.5452481", "0.543913", "0.54030526", "0.53951734", "0.53908074", "0.53704053", "0.53532696", "0.5352168", "0.5325482", "0.53129476", "0.52941567", "0.52925086" ]
0.7774038
0
Encodes a native Python value in a way that the API expects. Encodes lists and dicts to JSON and boolean values to 'true' or 'false'.
def api_encode(value): if type(value) in (dict, list): return json_encode(value) elif type(value) == bool: return str(value).lower() return value
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def JsonEncode(py_value):\n return JSON_ENCODER.encode(py_value)", "def _encode_value(data):\n\n if type(data) is bool:\n return f'{_TYPE_BOOL}{str(data).lower()}'\n elif type(data) is float:\n return f'{_TYPE_DOUBLE}{str(data)}'\n elif type(data) is int:\n return f'{_TYPE_INT}{str(data)}'\n elif type(data) is str:\n return f'{_TYPE_STRING}{data}'\n elif type(data) is list:\n return _encode_list(data)\n elif type(data) is dict:\n return _encode_dictionary(data)\n else:\n raise Exception(\n f'Data to encode should be of type bool, float, int, str, list, or dict.')", "def encode(self, value):\r\n pass", "def json_encode(value):\n return json.dumps(value)", "def jsonify(value):\n return (\n json.JSONEncoder().encode(value) if type(value) is not str else value\n )", "def encode(self, value):\r\n return value", "def json_encode(py_value, pretty=True):\n encoder = PRETTY_JSON_ENCODER if pretty else COMPACT_JSON_ENCODER\n return encoder.encode(py_value)", "def _encode(self, name, value, cond=1):\n if not cond: return value\n klass = types_factory.for_property(name)\n obj = klass(value)\n if hasattr(value, 'params') and len(value.params.keys()) > 0:\n # TODO: How can a python native value have params?\n obj.params = value.params\n return obj", "def jsonencode(val):\n return json.dumps(val)", "def encode(self, value):\n raise NotImplementedError()", "def encode(value: CLValue) -> bytes:\n encoder = ENCODERS[value.cl_type.typeof]\n if value.cl_type.typeof in {CLTypeKey.LIST, CLTypeKey.OPTION}:\n return encoder(\n value.parsed,\n ENCODERS[value.cl_type.inner_type.typeof]\n )\n else:\n return encoder(value.parsed)", "def _bool_encode(self, d):\n for k, v in d.items():\n if isinstance(v, bool):\n d[k] = str(v).lower()\n \n return d", "def encode_any(value: object) -> bytes:\n raise NotImplementedError()", "def marshal_value(self, value):\n\n return value", "def serialize_field(value):\r\n if isinstance(value, basestring):\r\n return value\r\n\r\n return json.dumps(value, cls=EdxJSONEncoder)", "def _encode_value(self, value):\n return pickle.dumps(value)", "def _encode(self, value):\n if value is None:\n return value\n if isinstance(value, six.binary_type):\n return value\n return value.encode(\"utf-8\")", "def to_python(self, value):\n return force_bool(value)", "def prepare_value(self, value, dumps=True):\n if dumps:\n value = json.dumps(value)\n\n if PY2:\n value = value.decode('utf-8').encode(self.options.get('encoding'))\n\n return value", "def serialize_to_python(cls, value):\n raise NotImplementedError", "def urlify(e):\n if isinstance(e, dict):\n try:\n iteritems = e.iteritems()\n except AttributeError:\n iteritems = e.items()\n\n return dict((k, urlify(v)) for k, v in iteritems)\n elif isinstance(e, (list, tuple)):\n return json.dumps(e, cls=CustomJSONEncoder)\n elif isinstance(e, bool):\n return 'true' if e else 'false'\n else:\n return encode(e)", "def encode_result(value: object) -> bytes:\n raise NotImplementedError()", "def json_serialize(value):\n if value is None or isinstance(value, (int, long, float, basestring, bool)):\n return value\n elif isinstance(value, (list, tuple, set)):\n return [json_serialize(v) for v in value]\n elif isinstance(value, dict):\n for k, v in value.items():\n value[k] = json_serialize(v)\n return value\n # return date/time in isoformat\n elif isinstance(value, (dt.datetime, dt.date, dt.time)):\n return value.isoformat()\n elif isinstance(value, ActiveRecordMixin):\n return _model_to_dict(value)\n else:\n return unicode(value)", "def bool_to_python(self, value):\r\n if value == 'true':\r\n return True\r\n elif value == 'false':\r\n return False", "def json_dumps(value, **kwargs):\n kwargs.setdefault('ensure_ascii', False)\n return json.dumps(value, **kwargs)", "def prepare_value(self, value):\n return json.dumps(value, indent=4)", "def encode_value(x):\n for serializer in string_serializers:\n if isinstance(x, serializer.type):\n return {\"$type\": serializer.name, \"$value\": serializer.to_json(x)}\n\n raise TypeError(type(x)) # pragma: no cover", "def bencode(data, f):\n\tif isinstance(data, int):\n\t\t_encode_int(data, f)\n\telif isinstance(data, (str, bytes)):\n\t\t_encode_buffer(data, f)\n\telif isinstance(data, Mapping):\n\t\t_encode_mapping(data, f)\n\telif isinstance(data, Iterable):\n\t\t_encode_iterable(data, f)", "def db_value(self, value):\n return value if value is None else json.dumps(value)", "def _encode(values, uniques=None, encode=False, check_unknown=True):\n try:\n res = _encode_python(values, uniques, encode)\n except TypeError:\n raise TypeError(\"argument must be a string or number\")\n return res" ]
[ "0.7380409", "0.73028606", "0.6974219", "0.6965896", "0.6948547", "0.69058067", "0.68838495", "0.67391557", "0.67146885", "0.67039376", "0.65755284", "0.64773065", "0.64079785", "0.64079565", "0.6391889", "0.6390388", "0.6380185", "0.63579655", "0.6314916", "0.62741226", "0.6187923", "0.61653537", "0.61582756", "0.6157682", "0.61523354", "0.61116403", "0.609928", "0.60819393", "0.608176", "0.6049848" ]
0.8075695
0
Lowlevel method for making API calls. It handles encoding the parameters, constructing authentication headers, decoding the response, and converting API error responses into Python exceptions.
def call(self, api_call, **kwargs): # Encode values for the API (JSON, bools, nulls) params = dict((key, api_encode(value)) for key, value in kwargs.iteritems() if value is not None) params.update(self.defaults) if api_call[0] != "/": api_call = "/" + api_call url = self.api_url + api_call self.logger.debug(url) # Signing the request modifies the request object and params in-place. # Sign the request *before* encoding and passing the params. request = Request(url) if self.sign_requests: self.sign_request(request, api_call, params) print_params = params.copy() if 'client_secret' in print_params: print_params['client_secret'] = "CLIENT_SECRET_REMOVED" self.logger.debug(urlencode(print_params)) request.add_data(urlencode(params)) if self.compress: request.add_header('Accept-encoding', 'gzip') try: with closing(urlopen(request)) as response: if response.info().get('Content-Encoding') == 'gzip': buf = StringIO( response.read()) f = gzip.GzipFile(fileobj=buf) body = f.read() else: body = response.read() except HTTPError as error: if error.code in (400, 401): # /oauth/token returns 400 or 401 body = error.fp.read() elif error.code == 404: raise InvalidApiCallError(api_call, error.code) else: raise error return self.parse_response(body)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _do_api_call(\n self,\n endpoint_info: tuple[str, str],\n json: dict[str, Any] | None = None,\n wrap_http_errors: bool = True,\n ):\n method, endpoint = endpoint_info\n\n # TODO: get rid of explicit 'api/' in the endpoint specification\n url = f\"https://{self.host}/{endpoint}\"\n\n aad_headers = self._get_aad_headers()\n headers = {**self.user_agent_header, **aad_headers}\n\n auth: AuthBase\n token = self._get_token()\n if token:\n auth = _TokenAuth(token)\n else:\n self.log.info(\"Using basic auth.\")\n auth = HTTPBasicAuth(self.databricks_conn.login, self.databricks_conn.password)\n\n request_func: Any\n if method == \"GET\":\n request_func = requests.get\n elif method == \"POST\":\n request_func = requests.post\n elif method == \"PATCH\":\n request_func = requests.patch\n elif method == \"DELETE\":\n request_func = requests.delete\n else:\n raise AirflowException(\"Unexpected HTTP Method: \" + method)\n\n try:\n for attempt in self._get_retry_object():\n with attempt:\n response = request_func(\n url,\n json=json if method in (\"POST\", \"PATCH\") else None,\n params=json if method == \"GET\" else None,\n auth=auth,\n headers=headers,\n timeout=self.timeout_seconds,\n )\n response.raise_for_status()\n return response.json()\n except RetryError:\n raise AirflowException(f\"API requests to Databricks failed {self.retry_limit} times. Giving up.\")\n except requests_exceptions.HTTPError as e:\n if wrap_http_errors:\n raise AirflowException(\n f\"Response: {e.response.content}, Status Code: {e.response.status_code}\"\n )\n else:\n raise e", "def call(self, http_method, api_path, params=None, raw_data=False):\n\n # get function of requests package\n requests_func = getattr(requests, http_method.lower())\n\n # parse parameters\n req_params = {}\n file_params = {}\n\n if params is not None:\n for key, value in six.iteritems(params):\n if isinstance(value, (datetime.date,\n datetime.datetime,\n float,\n int)):\n req_params[key] = six.text_type(value)\n elif isinstance(value, six.string_types):\n req_params[key] = six.text_type(value)\n elif hasattr(value, \"read\"):\n filename = os.path.split(value.name)[1]\n if not _is_string_ascii_encodeable(filename):\n b64_key = key + \"name_b64enc\"\n byte_value = filename.encode(\"utf-8\")\n b64_value = base64.b64encode(byte_value).decode(\"utf-8\")\n\n filename = \"@param=%s\" % b64_key\n req_params[b64_key] = b64_value\n file_params[key] = (filename, value, \"application/octet-stream\")\n else:\n raise VMRayRESTAPIError(\"Parameter \\\"{}\\\" has unknown type \\\"{}\\\"\".format(key, type(value)))\n\n # construct request\n if file_params:\n files = file_params\n else:\n files = None\n\n # we need to adjust some stuff for POST requests\n if http_method.lower() == \"post\":\n req_data = req_params\n req_params = None\n else:\n req_data = None\n\n # do request\n result = requests_func(self.server + api_path,\n data=req_data,\n params=req_params,\n headers={\"Authorization\": \"api_key {}\".format(self.api_key)},\n files=files,\n verify=self.verify_cert,\n stream=raw_data)\n handle_rest_api_result(result)\n\n if raw_data:\n return result.raw\n\n # parse result\n try:\n json_result = result.json()\n except ValueError:\n raise ValueError(\"API returned invalid JSON: {}\".format(result.text))\n\n # if there are no cached elements then return the data\n if \"continuation_id\" not in json_result:\n return json_result.get(\"data\", None)\n\n data = json_result[\"data\"]\n\n # get cached results\n while \"continuation_id\" in json_result:\n # send request to server\n result = requests.get(\"{}/rest/continuation/{}\".format(self.server, json_result[\"continuation_id\"]),\n headers={\"Authorization\": \"api_key {}\".format(self.api_key)},\n verify=self.verify_cert)\n handle_rest_api_result(result)\n\n # parse result\n try:\n json_result = result.json()\n except ValueError:\n raise ValueError(\"API returned invalid JSON: {}\".format(result.text))\n\n data.extend(json_result[\"data\"])\n\n return data", "def _execApiCall(headers, params, method_name,\r\n domain='ma.gnolia.com',\r\n urlhead='/api/rest/1/'):\r\n \r\n if 'api_key' not in params and method_name not in ['echo', 'get_key']:\r\n raise MagnoliaException('Required API Key parameter missing')\r\n conn = httplib.HTTPConnection(domain)\r\n conn.request('POST', urlhead + method_name, params, headers)\r\n return conn.getresponse()", "def _api_request(self, endpoint, params=None):\n \n if params:\n response = requests.get(url=f\"{self.api_url}/{endpoint}\", headers={\"Authorization\":self.auth_header},\n params=params)\n else:\n response = requests.get(url=f\"{self.api_url}/{endpoint}\", headers={\"Authorization\":self.auth_header})\n code = response.status_code\n if 200 <= code < 300:\n logging.debug(f\"API call: {self.api_url}/{endpoint} | {code}\")\n encoding = response.encoding\n raw = response.content\n return json.loads(raw.decode(encoding))\n elif code > 500:\n raise APIAuthException\n else:\n logging.error(f\"ERROR: Bad API call: {self.api_url}/{endpoint} | {code}\")", "def _request(self, method, url, body=None, headers=None, serialize=True):\n headers = headers or {}\n headers['Accept'] = 'application/json'\n headers['User-Agent'] = 'paxes-httpclient'\n if body and not 'Content-Type' in headers:\n headers['Content-Type'] = 'application/json'\n if self.auth_token:\n headers['X-Auth-Token'] = self.auth_token\n LOG.debug('>> %s %s, %s, %s' % (method, url, headers, body))\n conn = self._create_connection(url)\n if body and serialize:\n body = json.dumps(body)\n conn.request(method, url, body, headers)\n res = conn.getresponse()\n header_list = res.getheaders()\n header_dict = {}\n for ituple in header_list:\n header_dict[ituple[0].lower()] = ituple[1]\n response_info = {\n 'status': res.status,\n 'reason': res.reason,\n 'headers': header_dict,\n 'body': res.read()\n }\n LOG.debug('<< %d %s, %s, %s' % (response_info['status'],\n response_info['reason'],\n response_info['headers'],\n response_info['body']))\n conn.close()\n return response_info", "def _api_request(*args, **kwargs):\n response = requests.request(*args, **kwargs)\n return APIResponse(response)", "def _api_call(self, api_call, method=\"GET\", payload=None):\n # type: (str, str, Dict[str, str]) -> requests.Response\n\n headers = {\n \"accept\" : \"application/json\",\n \"Authorization\" : f\"Bearer {self.access_token}\",\n \"x-ibm-client-id\" : self.client_id,\n }\n self.__log.debug(headers)\n api_url = f\"{self.base_url}/{api_call}\"\n\n self.__log.debug(f\"Calling {api_url} with method {method}\")\n if method == \"GET\":\n resp = requests.get(api_url, headers=headers)\n elif method == \"POST\":\n resp = requests.post(api_url, headers=header, data=payload)\n elif method == \"PUT\":\n resp = requests.put(api_url, headers=header, data=payload)\n elif method == \"DELETE\":\n resp = requests.delete(api_url, headers=headers)\n elif method == \"HEAD\":\n resp = requests.head(api_url, headers=headers)\n elif method == \"OPTIONS\":\n resp = requests.options(api_url, headers=headers)\n else:\n raise Exception(f\"The method {method} is unsupported\")\n \n if (resp.ok):\n return resp\n else:\n self.__log.debug(resp.status_code)\n self.__log.debug(resp.text)\n return resp", "def _do_call(\n self,\n method: str = None,\n url: str = None,\n headers: t.Dict[str, str] = None,\n fields: t.Dict[str, t.Any] = None,\n body: JSONEncodable = None,\n **kwargs: t.Dict[str, t.Any],\n ) -> HTTPResponse:\n r = HTTPResponse()\n headers = self._add_auth_header(headers)\n if body is not None and method in ['POST', 'PUT', 'PATCH']:\n if 'Content-Type' not in headers:\n headers['Content-Type'] = 'application/json'\n r = self.http.request(\n method=method,\n url=url,\n body=self._encode(body),\n headers=headers\n )\n else:\n if headers['Content-Type'] == 'application/x-www-form-urlencoded':\n r = self.http.urlopen(\n method,\n url,\n body=self._encode(body, 'url'),\n headers=headers\n )\n elif headers['Content-Type'] == 'application/json':\n r = self.http.request(\n method=method,\n url=url,\n body=self._encode(body),\n headers=headers\n )\n else:\n msg = f''' The Content-Type header was set to {headers['Content-Type']}\\n\n However, anything else than 'application/json' or 'application/x-www-form-urlencoded'\\n\n is not accounted for in the client.\\n If you would like to add it, look for:\\n\\n\n \"_do_call\" to hook the logic\\n\n client_encoding_decoding_point_f for handling encoding\\n\\n\n '''\n raise NotImplementedError(msg)\n else:\n r = self.http.request_encode_url(\n method=method,\n url=url,\n headers=headers,\n fields=fields\n )\n return r", "async def _api_request(self,\n method: str,\n path_url: str,\n params: Dict[str, Any] = {}) -> Dict[str, Any]:\n base_url = f\"https://{global_config_map['gateway_api_host'].value}:\" \\\n f\"{global_config_map['gateway_api_port'].value}\"\n url = f\"{base_url}/{path_url}\"\n client = await self._http_client()\n if method == \"get\":\n if len(params) > 0:\n response = await client.get(url, params=params)\n else:\n response = await client.get(url)\n elif method == \"post\":\n response = await client.post(url, data=params)\n\n parsed_response = json.loads(await response.text())\n if response.status != 200:\n err_msg = \"\"\n if \"error\" in parsed_response:\n err_msg = f\" Message: {parsed_response['error']}\"\n raise IOError(f\"Error fetching data from {url}. HTTP status is {response.status}.{err_msg}\")\n if \"error\" in parsed_response:\n raise Exception(f\"Error: {parsed_response['error']}\")\n\n return parsed_response", "def api_call(endpoint, params, headers):\n\n api_response = get(BASE_URL.format(endpoint=endpoint), params=params,\n headers=headers)\n\n api_response.raise_for_status()\n json_resp = api_response.json()\n\n api_response.close()\n return json_resp", "def _make_request(self, url: str, parameters: dict = None,\n method: str = 'GET', *args, **kwargs):\n response = requests.request(\n method=method,\n url=build_url(\n self.BASE_API_URL, url, parameters\n ),\n headers={\n 'Authorization': 'Bearer {}'.format(self._access_token)\n }, **kwargs\n )\n if response.ok:\n return response.json()\n raise MondoApiException(response.json()['message'])", "def _api_call(self, method: str, endpoint: str, data: dict = None) -> requests.Response:\n\n headers = {\n \"Content-Type\": \"text/plain; charset=uft-8\"\n }\n\n auth = {\n \"usr\": self.user,\n \"key\": self.api_key\n }\n\n payload = {\"auth\": auth, \"data\": data}\n\n response = requests.request(method=method,\n url=self.api_url + endpoint,\n headers=headers,\n data=json.dumps(payload))\n return response", "def make_api_call(action, parameters = {}, method = 'get', data = {}):\n headers = {\n 'Content-type': 'application/json',\n 'Accept-Encoding': 'gzip',\n 'Authorization': 'Bearer %s' % ACCESS_TOKEN\n }\n if method == 'get':\n r = s.request(method, API_BASE_URL+action, headers=headers, params=parameters, timeout=30)\n elif method == 'post':\n r = s.request(method, API_BASE_URL+action, headers=headers, data=data, params=parameters, timeout=10)\n else:\n raise ValueError('Method should be get or post.')\n log('API %s call: %s' % (method, r.url) )\n if ((r.status_code == 200 and method == 'get') or (r.status_code == 201 and method == 'post')):\n return r.json()\n else:\n raise ValueError('API error when calling %s : %s' % (r.url, r.content))", "def _api_call(self, **kwargs):\n params = {\n 'format': 'json',\n }\n params.update(kwargs)\n r = requests.get(self.api_base_url, params=params)\n return r.json()", "def _api_request(self, path, method, data=None, query=None):\n\n url = request_url(\n self.config['secure'],\n self.config['hostname'],\n self.config['port'],\n path,\n query,\n )\n\n try:\n resp = request(\n url,\n method,\n self._headers(),\n data,\n self.config['timeout'],\n )\n\n return Response(\n resp.get('meta', {}),\n # Response info may have 'object' or 'objects' key, depending\n # on whether there are 1 or multiple results.\n resp.get('object', resp.get('objects'))\n )\n except HTTPError as e:\n response = e.read()\n fallback = '{0} {1}'.format(e.code, e.msg)\n\n if isinstance(response, bytes):\n data = response.decode('utf8')\n else:\n data = response\n\n error = json.loads(data).get('error', {})\n message = error.get('message', fallback)\n raise HTTPResponseError(message, status_code=e.code, cause=e)", "def http_request(endpoint, data, method='POST'):\n url = BASE_API + endpoint\n data['authkey'] = AUTH_KEY\n\n response = requests.request(method, url=url, data=data, timeout=300, verify=VERIFY)\n if response.status_code == 200:\n try:\n return response.json()\n except Exception as e:\n return_error('Response JSON decoding failed due to {}'.format(str(e)))\n\n else:\n return_error('API Returned, {}:{}'.format(response.status_code, response.reason))", "def _request(self, endpoint: str = \"/api/\", params: object = {}) -> dict:\n ret: dict = {}\n try:\n if not self.api_key:\n ret[\"error\"] = \"API key is empty\"\n raise APIError(ret['error'])\n\n r = requests.get(f\"{self.apibase}{endpoint}\",\n params=params,\n headers=self.headers,\n verify=self.verify_ssl)\n response_data = orjson.loads(r.text)\n except orjson.JSONDecodeError:\n ret[\"error\"] = \"Failed to parse response data to JSON\"\n if self.debug:\n ret[\"error\"] += \"\\nDescription: \" + r.reason\n ret[\"error\"] += \"\\nData: \" + r.text\n except requests.HTTPError:\n ret[\"error\"] = f\"{r.status_code}: {r.reason}\"\n if self.debug:\n ret[\"error\"] += \"\\nDescription: \" + r.reason\n ret[\"error\"] += \"\\nData: \" + r.text\n\n if ret.get('error', None):\n raise APIError(ret['error'])\n check_status_code(request=r, debug=self.debug, ret=ret)\n\n ret = response_data\n return ret", "def _call(self, method, url, params):\n if not url.startswith('http'):\n url = self.root + url\n headers = self._auth_headers()\n headers['Content-Type'] = 'application/json'\n\n r = self._session.request(method, url,\n headers=headers,\n proxies=self.proxies,\n params=params,\n timeout=self.requests_timeout)\n r.raise_for_status() # Check for error\n return r.json()", "async def request(\r\n self, method: str, url: str, params: dict = None, data: dict = None\r\n ):\r\n async with self._session.request(\r\n method,\r\n url,\r\n params=params,\r\n json=data,\r\n headers={\"Authorization\": \"Bearer \" + self._token},\r\n ) as resp:\r\n if resp.status == 200:\r\n return await resp.json()\r\n if resp.status in (400, 422, 429, 500):\r\n data = None\r\n try:\r\n data = await resp.json()\r\n except Exception: # pylint: disable=broad-except\r\n pass\r\n raise APIResponseError(\r\n resp.request_info,\r\n resp.history,\r\n status=resp.status,\r\n message=resp.reason,\r\n headers=resp.headers,\r\n data=data,\r\n )\r\n resp.raise_for_status()", "def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None,\n post_params=None, response_type=None, response_headers=None, auth_settings=None,\n collection_formats=None, request_type=None):\n return self.do_http_request(\n method=method,\n resource_path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body,\n post_params=post_params,\n response_type=response_type,\n response_headers=response_headers,\n collection_formats=collection_formats,\n request_type=request_type,\n\t async_request=True)", "def http_call(\n self,\n method,\n url,\n data=None,\n json_data=None,\n headers=None,\n verify=False,\n params=None,\n ):\n if data:\n _response = getattr(self.session, method.lower())(\n url, data=data, headers=headers, params=params, verify=verify\n )\n\n elif json_data:\n _response = getattr(self.session, method.lower())(\n url, json=json_data, headers=headers, params=params, verify=verify\n )\n\n else:\n _response = getattr(self.session, method.lower())(\n url, headers=headers, params=params, verify=verify\n )\n self.api_calls += 1\n\n try:\n _response.raise_for_status()\n except HTTPError:\n raise HTTPError(\n f\"{_response.json()['status']}: {_response.json()['message']}\"\n )\n\n return _response", "def _method_call(self, method, **kwargs):\n try:\n connection = httplib.HTTPConnection(self._api_address)\n except:\n raise FantasyDataError('Error: Cannot connect to the FantasyData API')\n\n try:\n method = method.format(format=self._response_format, **kwargs)\n request_url = \"/standard/{format}/{method}?{get_params}\".format(format=self._response_format, method=method,\n get_params=self._get_params)\n connection.request(\"GET\", request_url, \"\", self._headers)\n response = connection.getresponse()\n\n result = json.loads(response.read())\n\n if isinstance(result, dict) and \"statusCode\" in result:\n if (result['statusCode']) == 401:\n raise FantasyDataError('Error: Invalid API key')\n else:\n raise FantasyDataError('Error: Failed to get response')\n\n return result\n # except:\n # pass\n finally:\n connection.close()", "def call_api(\n self,\n url,\n data,\n token,\n status_code,\n api_type,\n data_format='json',\n response_check=None,\n debug=True):\n\n if JWT_AUTH:\n auth_string = 'JWT {}'.format(token)\n else:\n auth_string = 'Token {}'.format(token)\n\n rest_fn = None\n if api_type == 'get':\n rest_fn = self.client.get\n elif api_type == 'post':\n rest_fn = self.client.post\n elif api_type == 'put':\n rest_fn = self.client.put\n elif api_type == 'patch':\n rest_fn = self.client.patch\n elif api_type == 'delete':\n rest_fn = self.client.delete\n else:\n print(\n \"Invalid API function type provided. Allowed types are: \"\n \"[get, post, put, patch, delete]\")\n\n if data:\n response = rest_fn(\n url,\n data=data,\n format=data_format,\n HTTP_AUTHORIZATION=auth_string)\n else:\n response = rest_fn(\n url,\n HTTP_AUTHORIZATION=auth_string)\n if debug:\n if response.status_code != status_code:\n print('response:', response.data)\n if response_check:\n response_check(self, response.data)\n self.assertEqual(response.status_code, status_code)\n return response", "def call_api(axobjectinstance, **args):\n\n if AXAPI_LOGIN == 1:\n args[\"session_id\"] = AXAPI_SESSION_ID\n\n if args.has_key(\"post_data\"):\n data = args[\"post_data\"]\n del args[\"post_data\"]\n url_str = _get_request_url()+\"?\"+urllib.urlencode(args)\n else:\n data = urllib.urlencode(args)\n url_str = _get_request_url()\n print data\n print url_str\n \n resp = _send_request(url_str, data)\n print resp\n if args.has_key(\"format\"):\n fmt = args[\"format\"]\n if fmt == \"json\":\n # handle the json response to build the dict\n resp = json.loads(resp)\n else:\n # handle the xml/url response into the dict\n resp = _XmlDict(XML(resp), axobjectinstance.__xml_convrt__)\n print resp\n \n return resp", "def api_request(self, api_url: str, params: dict = None, headers: dict = None) -> APIResponse:\n if headers is None:\n headers = {}\n if params is None:\n params = {}\n\n link = self._session.get(url=api_url, params=params, headers=headers)\n\n return APIResponse(link.text)", "def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None,\n post_params=None, response_type=None, response_headers=None, auth_settings=None,\n collection_formats=None, request_type=None):\n return self.do_http_request(\n method=method,\n resource_path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body,\n post_params=post_params,\n response_type=response_type,\n response_headers=response_headers,\n collection_formats=collection_formats,\n request_type=request_type)", "def _call_api(self, verb, url, **request_kwargs):\n api = 'https://api.github.com{}'.format(url)\n auth_headers = {'Authorization': 'token {}'.format(self.api_token)}\n headers = {**auth_headers, **request_kwargs.pop('headers', {})}\n return getattr(requests, verb)(api, headers=headers, **request_kwargs)", "def api_call(self, client_http_method, path, data=None,\n follow_redirects=False, expected_status=200,\n expected_redirects=[], expected_headers={},\n expected_mimetype=None, expected_num_queries=None,\n expected_json=True, return_http_response=True, **extra):\n def _call_api():\n return client_http_method(path=path,\n data=data,\n follow=follow_redirects,\n HTTP_X_REQUESTED_WITH='XMLHttpRequest',\n **extra)\n\n # Normalize the API path so that the base URL containing the hostname\n # is stripped.\n if path.startswith(self.base_url):\n path = path[len(self.base_url):]\n\n # If the caller is explicitly requested multipart content, ensure we've\n # encoded the data.\n if extra.get('content_type') == MULTIPART_CONTENT:\n data = encode_multipart(BOUNDARY, data)\n\n # Log some details about the API request that's about to be performed.\n print('Performing HTTP %s for API %s'\n % (client_http_method.__name__.upper(), path))\n\n if data is not None:\n print('Request data = %r' % data)\n\n if expected_num_queries is None:\n response = _call_api()\n else:\n with self.assertNumQueries(expected_num_queries):\n response = _call_api()\n\n print('Raw API response: %r' % response.content)\n\n rsp = response.content\n\n self.assertEqual(response.status_code, expected_status)\n\n if expected_status in (204, 405):\n self.assertEqual(response.content, b'')\n rsp = None\n else:\n if expected_status != 302 and expected_json:\n rsp = json.loads(force_str(response.content))\n else:\n rsp = response.content\n\n print('Parsed API response:')\n pprint.pprint(rsp)\n\n if expected_status >= 400:\n # Error responses should be using the test's error mimetype\n # and not some valid response mimetype.\n self.assertIsNone(expected_mimetype)\n\n if expected_status != 405:\n self.assertEqual(response['Content-Type'],\n self.error_mimetype)\n elif expected_status != 302:\n # All status codes other than the few above should have a\n # response payload matching the expected mimetype.\n self.assertIsNotNone(expected_mimetype)\n self.assertEqual(response['Content-Type'], expected_mimetype)\n\n # Check if the response redirected the way the caller expected.\n if expected_redirects:\n self.assertEqual(len(response.redirect_chain),\n len(expected_redirects))\n\n for redirect in expected_redirects:\n self.assertEqual(response.redirect_chain[0][0],\n self.base_url + expected_redirects[0])\n\n # Check that all the expected headers are present in the response.\n for header, value in expected_headers.items():\n self.assertIn(header, response)\n self.assertEqual(response[header], value)\n\n if return_http_response:\n return rsp, response\n else:\n return rsp", "def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None,\n post_params=None, cname=None, response_type=None, response_headers=None, auth_settings=None,\n collection_formats=None, request_type=None):\n return self.do_http_request(\n method=method,\n resource_path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body,\n post_params=post_params,\n cname=cname,\n response_type=response_type,\n response_headers=response_headers,\n collection_formats=collection_formats,\n request_type=request_type)", "def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None,\n post_params=None, cname=None, response_type=None, response_headers=None, auth_settings=None,\n collection_formats=None, request_type=None):\n return self.do_http_request(\n method=method,\n resource_path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body,\n post_params=post_params,\n cname=cname,\n response_type=response_type,\n response_headers=response_headers,\n collection_formats=collection_formats,\n request_type=request_type)" ]
[ "0.67149675", "0.6664818", "0.66358113", "0.65830725", "0.6512704", "0.6511796", "0.6474514", "0.64474", "0.643962", "0.63296205", "0.6321636", "0.6317781", "0.62910086", "0.62778836", "0.6247308", "0.62412024", "0.6229673", "0.62191224", "0.6215646", "0.6215193", "0.6207017", "0.6200565", "0.61958456", "0.6175045", "0.61265814", "0.61131644", "0.6073264", "0.605827", "0.60552526", "0.60552526" ]
0.6754091
0
Parse the response from the API, decoding the JSON and converting errors into exceptions.
def parse_response(self, response): data = json_decode(response) if data['stat'] == 'error': self.logger.debug("Response:\n" + json_encode(data, indent=4)) try: message = data['error_description'] except KeyError: message = data['message'] raise ApiResponseError(data['code'], data['error'], message, data) return data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _parse_response(self, response, all_ops):\n try:\n parsed_response = json.loads(response)\n except Exception, e:\n raise ApiError(e)\n if 'error' in parsed_response: # needed anymore?\n raise ApiError(parsed_response['error'])\n # Return the true API return value.\n return parsed_response", "def _handle_response(self, resp):\n\n try:\n resp.raise_for_status()\n results = json.loads(resp.text)\n except requests.RequestException:\n raise Exception(resp.text)\n except JSONDecodeError:\n raise Exception(\"Error in parsing: {}\".format(resp.text))\n return results", "def parse_response(self, response):\n try:\n response = json.loads(response)\n if 'error' in response:\n if 'message' in response['error']:\n raise self.CMoreError(response['error']['message'])\n elif 'description' in response['error']:\n raise self.CMoreError(response['error']['description'])\n elif 'code' in response['error']:\n raise self.CMoreError(response['error']['error'])\n\n except ValueError: # when response is not in json\n pass\n\n return response", "def _parse_content(response):\n if response.status_code != 200:\n raise ApiError(f'unknown error: {response.content.decode()}')\n result = json.loads(response.content)\n if not result['ok']:\n raise ApiError(f'{result[\"error\"]}: {result.get(\"detail\")}')\n return result", "def parse(response):\n if isinstance(response, dict):\n json = response\n else:\n json = response.json()\n\n if json.get('Error'):\n raise Exception('Error in retrieval: ' + self.json['error'])\n\n return json", "def _json_parser(self, json_response):\n response = json_response.json()\n print(response)\n status = response.get('status', None)\n message = response.get('message', None)\n data = response.get('data', None)\n\n return json_response.status_code, status, data", "def _parse_json_response(self, res: requests.Response, token: str = None) -> dict:\n try:\n self._check_response(res, token)\n self._check_status_error(res)\n return res.json()\n except (json.JSONDecodeError, ValueError):\n return {\"error\": {\"message\": res.text, \"code\": 999}}", "def parse_response(self, response):\n\n return json.loads(response.text)", "def parse_response(response):\n return json.loads(response.text)", "async def _handle_response(response: ClientResponse) -> Dict:\n content = await response.json(encoding='utf-8', loads=loads)\n if response.status != 200:\n for member in JmRpcErrorType:\n if content['message'] != member.value:\n continue\n raise JmRpcError(response.status, content)\n response.raise_for_status()\n return content", "def process_json(self, data):\r\n rsp = json.loads(data)\r\n\r\n if rsp['stat'] == 'fail':\r\n raise APIError, rsp\r\n\r\n return rsp", "def _handle_api_call(self, url):\n response = urlopen(url)\n url_response = response.read()\n json_response = loads(url_response)\n \n if not json_response:\n raise ValueError('Error getting data from the api, no return was given.')\n elif \"Error Message\" in json_response:\n raise ValueError(json_response[\"Error Message\"])\n elif \"Information\" in json_response and self.treat_info_as_error:\n raise ValueError(json_response[\"Information\"])\n \n return json_response", "def parse_response(self, response, **kw):\n data = super().parse_response(response, **kw)\n error = data.get('error')\n if error is None:\n return data['result']\n else:\n # assume error object follows json-rpc 2.0 spec formatting\n self.handle_error(code=error['code'], msg=error['message'])", "def handle_response_json(self, http: Http, response, **kwargs) -> dict:\n try:\n data = response.json()\n except Exception as exc:\n raise JsonInvalid(msg=\"Response has invalid JSON\", response=response, exc=exc)\n return data", "def validate_response(response):\n\n r = response\n try:\n r.raise_for_status()\n except HTTPError as e:\n message = dict(status_code=r.status_code, exception=e)\n\n try:\n response = r.json()\n message['response'] = response\n except JSONDecodeError as e:\n message['response'] = r.content\n\n raise HTTPError(message)", "def _parse_response(self, callback, response):\n if response.error:\n logging.warning(\"HTTP error from Github: %s\", response.error)\n callback(None)\n return\n try:\n json = tornado.escape.json_decode(response.body)\n except Exception:\n logging.warning(\"Invalid JSON from Github: %r\", response.body)\n callback(None)\n return\n if isinstance(json, dict) and json.get(\"error_code\"):\n logging.warning(\"Facebook error: %d: %r\", json[\"error_code\"],\n json.get(\"error_msg\"))\n callback(None)\n return\n callback(json)", "def decode_response(self, response):\n url = response.url\n if response.status_code not in (200, 201, 202, 304):\n http_status_code = response.status_code\n raise HomityHubClientError('Got HTTP response code %d - %s for %s, %s' %\n (http_status_code,\n self.RESPONSE_CODES.get(http_status_code,\n 'Unknown!'),\n url,\n response.text))\n\n try:\n json_data = json.loads(response.text)\n return json_data\n except:\n return \"\"", "def parse_json(response):\r\n return json.loads(response.content)", "def _deserialize_response(self, response):\n text = response.content.decode(errors='replace')\n text = _remove_control_characters(text)\n doc = json.loads(text, cls=_TransmissionJSONDecoder)\n\n if doc['result'] != 'success':\n raise TransmissionError(\"Request failed: '%s'\" % doc['result'])\n\n if doc['tag'] != self.tag:\n raise TransmissionError(\"Tag mismatch: (got %d, expected %d)\" % (doc['tag'], self.tag))\n else:\n self.tag += 1\n\n if 'arguments' in doc:\n return doc['arguments'] or None\n\n return None", "def handle_api_error(resp):\n content = yield resp.json()\n\n headers = HeaderWrapper(resp.headers)\n\n try:\n err = content['error']\n except (KeyError, TypeError):\n raise error.APIError(\n \"Invalid response object from API: %r (HTTP response code \"\n \"was %d)\" % (content, resp.code),\n resp, resp.code, content, headers)\n\n if resp.code in [400, 404]:\n raise error.InvalidRequestError(\n err.get('message'), err.get('param'),\n resp, resp.code, content, headers)\n elif resp.code == 401:\n raise error.AuthenticationError(\n err.get('message'),\n resp, resp.code, content, headers)\n elif resp.code == 402:\n raise error.CardError(\n err.get('message'), err.get('param'), err.get('code'),\n content, resp.code, resp, headers)\n else:\n raise error.APIError(\n err.get('message'), content, resp.code, resp, headers)", "def _load_from_json(self, data):\n if \"errors\" in data:\n # TODO: handle responses with more than one error\n data = data[\"errors\"][0]\n self.code = data[\"code\"]\n if \"message\" in data:\n self.message = data[\"message\"]\n else:\n self.message = data[\"detail\"]", "def handle_error_response(resp):\n error_message = ''\n error_message_with_reason = ''\n try:\n error_message = (\n resp.json()\n .get('fireeyeapis', {})\n .get('description', '')\n .strip()\n )\n error_message = error_message.replace('\\n', '')\n if error_message:\n error_message_with_reason = f'Reason: {error_message}'\n except ValueError: # ignoring json parsing errors\n pass\n if resp.headers.get('Content-Type', '') == CONTENT_TYPE_ZIP:\n error_message = error_message_with_reason = resp.text\n\n status_code_messages = {\n 400: f\"{MESSAGES['BAD_REQUEST_ERROR']} {error_message_with_reason}\",\n 401: MESSAGES['AUTHENTICATION_ERROR'],\n 403: error_message,\n 404: error_message,\n 406: error_message,\n 407: MESSAGES['PROXY_ERROR'],\n 500: MESSAGES['INTERNAL_SERVER_ERROR'],\n 503: MESSAGES['INTERNAL_SERVER_ERROR'],\n }\n\n if resp.status_code in status_code_messages:\n demisto.debug(\n f'Response Code: {resp.status_code}, Reason: {status_code_messages[resp.status_code]}'\n )\n raise DemistoException(status_code_messages[resp.status_code])\n else:\n raise DemistoException(resp.raise_for_status())", "def parse(self, payload):\n payload = json.loads(payload)\n \n if payload['response'] in self.possible_responses:\n return self.possible_responses[payload['response']](payload)\n else:\n print 'Response not valid'", "def _parse_response(self, future, response):\n if response.error:\n logging.warning(\"HTTP error from Github get user: %s\", response.error)\n future.set_exception(AuthError('Github auth get user info error: %s' % str(response)))\n return\n try:\n json = tornado.escape.json_decode(response.body)\n except Exception:\n logging.warning(\"Invalid JSON from Github: %r\", response.body)\n future.set_exception(AuthError('Invalid JSON from Github: %s' % str(response)))\n return\n\n if isinstance(json, dict) and json.get(\"error_code\"):\n logging.warning(\"Github error: %d: %r\", json[\"error_code\"],\n json.get(\"error_msg\"))\n future.set_exception(AuthError(\"Github error: %d: %r\" % ( json[\"error_code\"],\n json.get(\"error_msg\")) ) )\n return\n future.set_result(json)", "def handle_rest_api_result(result):\n\n if (result.status_code < 200) or (result.status_code > 299):\n try:\n json_result = result.json()\n except ValueError:\n raise VMRayRESTAPIError(\"API returned error {}: {}\".format(result.status_code, result.text),\n status_code=result.status_code)\n\n raise VMRayRESTAPIError(json_result.get(\"error_msg\", \"Unknown error\"), status_code=result.status_code)", "def _process_url(self, url):\n response = requests.get(url, timeout=self.TIMEOUT)\n try:\n ret = response.json()\n except JSONDecodeError:\n self.log.exception(\"JSONDecodeError, response: %r, response.text: %r\", response, response.text)\n ret = {\"error\": \"The api broke.\"}\n return ret", "def json_or_error(response):\n if 200 <= response.status_code < 300:\n if response.content:\n return response.json()\n else:\n # Response has no body. Return a status in a way that is consistent with other requests\n return {\n 'status': 'SUCCESS',\n 'httpStatusCode': response.status_code,\n 'httpStatus': httplib.responses[response.status_code],\n }\n else:\n raise JsonApiError('API request to {} failed with HTTP status {}: {}'.format(\n response.url, response.status_code, response.text))", "def __handle_response(self, response: requests.Response) -> Union[Dict, List]:\n status = response.status_code\n content = response.content.decode(\"utf-8\")\n\n if 200 <= status <= 299:\n return json.loads(content) if content else {}\n elif status == 400:\n raise exceptions.BadRequest(response, content)\n elif status == 401:\n raise exceptions.UnauthorizedAccess(response, content)\n elif status == 403:\n raise exceptions.ForbiddenAccess(response, content)\n elif status == 404:\n raise exceptions.ResourceNotFound(response, content)\n elif status == 405:\n raise exceptions.MethodNotAllowed(response, content)\n elif 500 <= status <= 599:\n raise exceptions.ServerError(response, content)\n else:\n raise exceptions.ConnectionError(response, content)", "def _parse_json_response(self, json_response, expect_errors=False):\n if not expect_errors:\n self.assertEqual(json_response.status_int, 200)\n\n self.assertEqual(\n json_response.content_type, 'application/javascript')\n self.assertTrue(json_response.body.startswith(feconf.XSSI_PREFIX))\n\n return json.loads(json_response.body[len(feconf.XSSI_PREFIX):])", "def handle_api_error(self, response):\n code = response.status_code\n self.__log(f'Handling API error with status code {code}.', 'error')\n if code == 401:\n self.__log(f'Invalid credentials. Please make sure your token is correct.', 'error')\n raise InvalidCredentialsError\n if code == 404:\n self.__log(f'File not found on query. Make sure query URL is correct and retry.', 'error')\n raise FileNotFoundError\n if code == 422:\n content = json.loads(response.content)\n for error in content['errors']:\n self.__log(f'API could not process the request. Message: {error[\"message\"]}.', 'error')\n raise UnprocessableRequestError(f'Issue with field {error[\"field\"]}: {error[\"message\"]}')\n if code == 429:\n self.__log(f'Monthly request limits exceeded. Upgrade billing or change token.', 'error')\n raise MonthlyRequestLimitExceededError\n self.__log(f'Response for code: \"{code}\" was unhandled by wrapper. Sorry to not be more helpful.', 'error')\n raise UnknownApiError(\"An unhandled API exception occurred\")" ]
[ "0.8096695", "0.78386664", "0.77561957", "0.77350914", "0.7730008", "0.7284812", "0.7064459", "0.6913496", "0.68599725", "0.68185", "0.6777948", "0.6726493", "0.6713444", "0.6688297", "0.6655507", "0.66511434", "0.6546489", "0.6541335", "0.6493854", "0.6456989", "0.64395887", "0.6422299", "0.641193", "0.6404785", "0.64020723", "0.63876605", "0.6322361", "0.6309869", "0.62865573", "0.6278071" ]
0.8244902
0
Sign the API call by generating an "Authentication" header. This method will add headers to the request object and remove auth_token, client_id, and client_secret from the parameters if they exist.
def sign_request(self, request, api_call, params): for key, value in params.items(): params[key] = value.encode('utf-8') # Do not POST authentication parameters. Use them to create an # authentication header instead. access_token = params.pop('access_token', None) client_id = params.pop('client_id', None) client_secret = params.pop('client_secret', None) # create the authorization header if access_token: request.add_header("Authorization", "OAuth {}".format(access_token)) else: timestamp = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime()) data = "{}\n{}\n".format(api_call, timestamp) if params: kv_str = ["{}={}".format(k, v) for k, v in params.iteritems()] kv_str.sort() data = data + "\n".join(kv_str) + "\n" sha1_str = hmac.new(client_secret, data, sha1).digest() hash_str = b64encode(sha1_str) request.add_header("Date", timestamp) request.add_header("Authorization", "Signature {}:{}".format(client_id, hash_str))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _addAuthenticationToRequestHeader(request, client):\n request.addAuthorization(client.id, client.secret)", "def authenticate(self):\n\n headers = {\n 'Authorization': 'Bearer ' + self.access_token,\n 'ClientId': self.client_id,\n }\n self.headers.update(headers)", "def apply(cls, http_request, Configuration):\n # If this is API Key based authentication, we add the apiKey header\n if Configuration.api_key is not None:\n http_request.headers['apikey'] = Configuration.api_key\n return\n\n # If this is SessionId based authentication, we add the session-id header\n if Configuration.session_id is not None:\n http_request.headers['session-id'] = Configuration.session_id\n return\n\n # If this is Open-Id based authentication, we add the open-id-token header\n if Configuration.open_id_token is not None:\n http_request.headers['open-id-token'] = Configuration.open_id_token\n return\n\n cls.check_auth(Configuration)\n token = Configuration.auth_token.access_token\n token_type = Configuration.auth_token.token_type\n http_request.headers['Authorization'] = token_type+\" \"+token", "def mocked_sign(self, *args, **kwargs):\r\n # self is <oauthlib.oauth1.rfc5849.Client object> here:\r\n __, headers, __ = saved_sign(self, *args, **kwargs)\r\n # we should replace nonce, timestamp and signed_signature in headers:\r\n old = headers[u'Authorization']\r\n old_parsed = OrderedDict([param.strip().replace('\"', '').split('=') for param in old.split(',')])\r\n old_parsed[u'OAuth oauth_nonce'] = mocked_nonce\r\n old_parsed[u'oauth_timestamp'] = mocked_timestamp\r\n old_parsed[u'oauth_signature'] = mocked_signature_after_sign\r\n headers[u'Authorization'] = ', '.join([k+'=\"'+v+'\"' for k, v in old_parsed.items()])\r\n return None, headers, None", "def __call__(self, request):\n request.headers['Authorization'] = f'Token {self.token}'\n return request", "def auth_headers(self, path, payload=\"\"):\n rand = hexlify(Random.new().read(16))\n auth = self.souma.sign(\"\".join([self.souma.id, rand, path, payload]))\n return [(\"Glia-Rand\", rand), (\"Glia-Auth\", auth), (\"Glia-Souma\", self.souma.id)]", "def __call__(self, r):\n r.headers['Authorization'] = 'OAuth ' + self._access_token\n return r", "def __call__(self, request):\r\n # These checkings are necessary because type inconsisntecy of requests library\r\n # See request Github issue #230 https://github.com/kennethreitz/requests/pull/230\r\n if not request.params:\r\n request.params = {}\r\n if not request.data:\r\n request.data = {}\r\n if isinstance(request.params, list):\r\n request.params = dict(request.params)\r\n if isinstance(request.data, list):\r\n request.data = dict(request.data)\r\n\r\n # Dictionary to OAuth1 signing params\r\n request.oauth_params = {}\r\n\r\n # Adding OAuth params\r\n request.oauth_params['oauth_consumer_key'] = self.consumer.key\r\n request.oauth_params['oauth_timestamp'] = str(int(time.time()))\r\n request.oauth_params['oauth_nonce'] = str(random.randint(0, 100000000))\r\n request.oauth_params['oauth_version'] = self.OAUTH_VERSION\r\n if self.token:\r\n request.oauth_params['oauth_token'] = self.token.key\r\n if 'oauth_verifier' in request.data:\r\n request.oauth_params['oauth_verifier'] = request.data.pop('oauth_verifier')\r\n request.oauth_params['oauth_signature_method'] = self.signature.name\r\n\r\n # oauth_callback is an special parameter, we remove it out of the body\r\n # If it needs to go in the body, it will be overwritten later, otherwise not\r\n if 'oauth_callback' in request.data:\r\n request.oauth_params['oauth_callback'] = request.data.pop('oauth_callback')\r\n if 'oauth_callback' in request.params:\r\n request.oauth_params['oauth_callback'] = request.params.pop('oauth_callback')\r\n\r\n request.data_and_params = request.oauth_params.copy()\r\n request.oauth_params['oauth_signature'] = self.signature.sign(request, self.consumer, self.token)\r\n request.data_and_params['oauth_signature'] = request.oauth_params['oauth_signature']\r\n\r\n if self.header_auth:\r\n request.headers['Authorization'] = self.authorization_header(request.oauth_params)\r\n elif request.method in (\"GET\", \"DELETE\"):\r\n request.url = self.to_url(request)\r\n elif ('Content-Type' not in request.headers or \\\r\n request.headers['Content-Type'] != 'application/x-www-form-urlencoded') \\\r\n and not isinstance(request.data, basestring):\r\n # You can pass a string as data. See issues #10 and #12\r\n request.url = self.to_url(request)\r\n request.data = {}\r\n else:\r\n request.data = request.data_and_params\r\n\r\n return request", "def add_auth_to_headers(self):\n if not hasattr(self, \"headers\"):\n self.headers = {\"Content-Type\": \"application/json\"}\n\n login = {\"account_number\": self.account[\"account_number\"],\n \"pin\": self.account[\"pin\"]}\n token = json.loads(self.client.post(\n \"/accounts/login\",\n data=json.dumps(login),\n headers=self.headers).get_data())[\"token\"]\n self.headers[\"Authorization\"] = \"Bearer \" + token", "def _request_token(self):\n params = {\n 'grant_type': 'client_credentials',\n 'client_id': self.client_id,\n 'client_secret': self.client_secret\n }\n\n response = self._http_request(\n method='POST',\n headers={'Content-Type': 'application/x-www-form-urlencoded'},\n full_url=self.auth_url,\n data=params\n )\n access_token = response.get('access_token')\n auth_header = {'Authorization': f'Bearer {access_token}'}\n return auth_header", "def _sign(self, oauth_payload, request):\n\t\t# merge params\n\t\t# use oauth_payload to update request params might avoid \n\t\t# some oauth params's accidental overriding\n\t\tpayload = dict( request.params )\n\t\tpayload.update( oauth_payload )\n\n\t\t# here I assume that all keys contain only 'a-zA-Z_.-'\n\t\t# thus there is no necessity to percent-encode them\n\t\t# will now sort them according to their original value\n\n\t\tkeylist = sorted( payload.keys() )\n\t\trawlist = []\n\t\tfor k in keylist:\n\t\t\tencoded_value = percent_encode( payload[k] )\n\t\t\trawlist.append( \"%s=%s\" % (k, encoded_value) )\n\n\t\t# craft base string\n\t\tbase_string = request.method.upper()\n\t\tbase_string += '&'\n\t\tbase_string += percent_encode(request.base_url)\n\t\tbase_string += '&'\n\t\tbase_string += percent_encode( '&'.join( rawlist ) )\n\n\t\tself._print( \"Base string:\\n\" + base_string )\n\t\t# craft signing key\n\t\tif self.has_user():\n\t\t\tsigning_key = \"%s&%s\" % ( percent_encode(self.secret), percent_encode(self.a_secret) )\n\t\telse:\n\t\t\tsigning_key = \"%s&%s\" % ( percent_encode(self.secret), percent_encode(self.token_secret) )\n\n\t\t# sign base_string\n\t\thashed = hmac.new(signing_key, base_string, hashlib.sha1)\n\t\tsignature = binascii.b2a_base64(hashed.digest())[:-1]\n\t\t\n\t\t# append signature field\n\t\toauth_payload[\"oauth_signature\"] = signature\n\n\t\t# prepare relevant oauth values\n\t\toauth_entry = []\n\t\tfor k in oauth_payload.keys():\n\t\t\tencoded_value = percent_encode( oauth_payload[k] )\n\t\t\toauth_entry.append( '%s=\"%s\"' % (k, encoded_value) )\n\n\t\toauth_str = 'OAuth ' + ','.join(oauth_entry)\n\t\tself._print( \"OAuth header:\\n\" + oauth_str )\n\t\t# field crafted\n\t\treturn { \"Authorization\" : oauth_str }", "def sign(self, cred):\n desc = self.descriptor()\n key = cred.secret_key.encode(\"utf-8\")\n hasher = hmac.new(key, desc.encode(\"utf-8\"), hashlib.sha1)\n sign = b64encode(hasher.digest()).decode()\n self.headers[\"Authorization\"] = \"AWS %s:%s\" % (cred.access_key, sign)\n return sign", "def build_header(self):\n authstring = \"Bearer \" + self.auth_token\n header = {\n \"Authorization\": authstring,\n \"Content-Type\": \"application/json\",\n \"User-Agent\": self.user_agent,\n \"Accept-Encoding\": \"gzip\"\n }\n return header", "def __call__(self, r):\n r.headers['Authorization'] = 'Bearer %s' % self.get_access_token()\n return r", "def request_http_header( self ) -> dict:\n return {'content-type': 'application/json','Authorization':f'NLAuth nlauth_account={self._acct_number},nlauth_email={self._auth_email},nlauth_signature={self._acct_signature},nlauth_role=1090'}", "def buildHeader(self):\n if self.key:\n userString = self.user+b\":\"+self.key\n else:\n userString = self.user+b\":\"\n \n encodedUserString = b64encode(userString)\n decodedUserString = encodedUserString.decode(\"ascii\")\n self.basicAuthHeader = {\"Authorization\": \"Basic \" + decodedUserString}", "def __header_base64(self):\n header_base64 = base64.b64encode(f'{self.client_id}:{self.client_secret}'.encode('ascii'))\n header_base64 = str(header_base64).split(\"'\")[1]\n return {'Authorization': f'Basic {header_base64}'}", "def authenticate(self, api_key):\n self.headers['x-rapidapi-key'] = api_key", "def create_auth_header(api_token):\n return {'Authorization': f'token {api_token}'}", "def requestToken(self):\n #################\n # BEGIN ROUTINE #\n #################\n # clear everything\n self.clear()\n # initialization\n self.request_oauth_nonce = self._generate_nonce()\n self.request_oauth_timestamp = self._generate_timestamp()\n # create Signature Base String\n method = \"POST\"\n url = self.getRequestTokenURL()\n query_dict = {\"oauth_callback\": self.CALLBACK_URL,\n \"oauth_consumer_key\": self.API_KEY,\n \"oauth_nonce\": self.request_oauth_nonce,\n \"oauth_signature_method\": self.signature_method,\n \"oauth_timestamp\": self.request_oauth_timestamp,\n \"oauth_version\": self.version,\n }\n query_string = self._quote(self._urlencode(query_dict))\n signature_base_string = \"&\".join([self._quote(method), self._quote(url), query_string])\n # create actual signature\n hashed = hmac.new(self._quote(self.API_SECRET) + \"&\", signature_base_string, sha)\n signature = binascii.b2a_base64(hashed.digest())[:-1]\n # it is time to create the heaader of the http request that will be sent\n header = 'OAuth realm=\"https://rightsignature.com\", '\n header += 'oauth_nonce=\"%s\", '\n header += 'oauth_callback=\"%s\", '\n header += 'oauth_signature_method=\"%s\", '\n header += 'oauth_timestamp=\"%d\", '\n header += 'oauth_consumer_key=\"%s\", '\n header += 'oauth_signature=\"%s\", '\n header += 'oauth_version=\"%s\"'\n header = header % (self.request_oauth_nonce, self._quote(self.CALLBACK_URL),\n self.signature_method, self.request_oauth_timestamp,\n self._quote(self.API_KEY), self._quote(signature), self.version)\n\n\n # next step is to establish an HTTPS connection through the LinkedIn API\n # and fetch the request token.\n connection = httplib.HTTPSConnection(self.API_ENDPOINT)\n connection.request(method, self.REQUEST_TOKEN_URL, body = self._urlencode(query_dict), headers = {'Authorization': header})\n response = connection.getresponse()\n if response is None:\n self.request_oauth_error = \"No HTTP response received.\"\n connection.close()\n return False\n\n response = response.read()\n connection.close()\n\n oauth_problem = self._get_value_from_raw_qs(\"oauth_problem\", response)\n if oauth_problem:\n self.request_oauth_error = oauth_problem\n return False\n\n self.request_token = self._get_value_from_raw_qs(\"oauth_token\", response)\n self.request_token_secret = self._get_value_from_raw_qs(\"oauth_token_secret\", response)\n return True", "def create_authorization_header(self, **kwargs):\n return {\"Authorization\": \"Bearer {}\".format(self.create_jwt(**kwargs))}", "def get_headers(self):\r\n return {\r\n 'authenticate': {\r\n 'username': self.username,\r\n 'apiKey': self.api_key,\r\n }\r\n }", "def __call__(self, request):\n self._logger.debug(f'__call__, {request.url} adding Authorization header')\n request.headers[\"Authorization\"] = self._get_auth_value()\n request.register_hook(\"response\", self._handle_401)\n return request", "def _oauth_sign(self, url, body, content_type=u'application/x-www-form-urlencoded', method=u'POST'):\r\n client_key = self.server.config.get('client_key', self.DEFAULT_CLIENT_KEY)\r\n client_secret = self.server.config.get('client_secret', self.DEFAULT_CLIENT_SECRET)\r\n client = oauthlib.oauth1.Client(\r\n client_key=unicode(client_key),\r\n client_secret=unicode(client_secret)\r\n )\r\n headers = {\r\n # This is needed for body encoding:\r\n 'Content-Type': content_type,\r\n }\r\n\r\n # Calculate and encode body hash. See http://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html\r\n sha1 = hashlib.sha1()\r\n sha1.update(body)\r\n oauth_body_hash = unicode(base64.b64encode(sha1.digest())) # pylint: disable=too-many-function-args\r\n params = client.get_oauth_params()\r\n params.append((u'oauth_body_hash', oauth_body_hash))\r\n mock_request = mock.Mock(\r\n uri=unicode(urllib.unquote(url)),\r\n headers=headers,\r\n body=u\"\",\r\n decoded_body=u\"\",\r\n oauth_params=params,\r\n http_method=unicode(method),\r\n )\r\n sig = client.get_oauth_signature(mock_request)\r\n mock_request.oauth_params.append((u'oauth_signature', sig))\r\n new_headers = parameters.prepare_headers(mock_request.oauth_params, headers, realm=None)\r\n return new_headers['Authorization']", "def add_auth(self, http_request):\r\n pass", "def sign_request(request, token, secret):\n if isinstance(token, unicode):\n token = token.encode(\"ascii\")\n if isinstance(secret, unicode):\n secret = secret.encode(\"ascii\")\n # Use MAC parameters from the request if present.\n # Otherwise generate some fresh ones.\n params = parse_authz_header(request, {})\n if params and params.pop(\"scheme\") != \"MAC\":\n params.clear()\n params[\"id\"] = token\n if \"ts\" not in params:\n params[\"ts\"] = str(int(time.time()))\n if \"nonce\" not in params:\n params[\"nonce\"] = os.urandom(5).encode(\"hex\")\n # Calculate the signature and add it to the parameters.\n params[\"mac\"] = get_mac_signature(request, secret, params)\n # Serialize the parameters back into the authz header.\n # WebOb has logic to do this that's not perfect, but good enough for us.\n request.authorization = (\"MAC\", params)", "def add_headers():\n # the actual access token -\n g.x_tapis_token = request.headers.get('X-Tapis-Token')\n\n # the tenant associated with the subject of the request; used, for instance, when the subject is different\n # from the subject in the actual access_token (for example, when the access_token represents a service account).\n g.x_tapis_tenant = request.headers.get('X-Tapis-Tenant')\n\n # the user associated with the subject of the request. Similar to x_tapis_tenant, this is used, for instance, when\n # the subject is different from the subject in the actual access_token (for example, when the access_token\n # represents a service account).\n g.x_tapis_user = request.headers.get('X-Tapis-User')\n\n # a hash of the original user's access token. this can be used, for instance, to check if the original user's\n # access token has been revoked.\n g.x_tapis_user_token_hash = request.headers.get('X-Tapis-User-Token-Hash')", "def _create_auth_headers(self):\n auth_headers = {**self.get_headers()}\n auth_headers['Authorization'] = 'Bearer ' + self.get_access_token()\n return auth_headers", "def apply(self, headers):\n headers['Authorization'] = 'Bearer ' + self._metadata_service.auth_token", "def __call__(self, r):\n # modify and return the request\n nonce = ExchBitmexRestApiConnector.generate_nonce()\n r.headers['api-nonce'] = str(nonce)\n r.headers['api-key'] = self.apiKey\n r.headers['api-signature'] = ExchBitmexRestApiConnector.generate_signature(\n self.apiSecret, r.method, r.url, nonce, r.body or '')\n return r" ]
[ "0.68053204", "0.666631", "0.62823254", "0.6111792", "0.6090263", "0.6044804", "0.60245746", "0.6016039", "0.5999951", "0.59911007", "0.5989135", "0.59618855", "0.5948747", "0.5922021", "0.59038025", "0.5902988", "0.5885357", "0.5798895", "0.57671404", "0.5763989", "0.5737275", "0.5731331", "0.5729528", "0.56987965", "0.5685042", "0.5679649", "0.56733185", "0.56397164", "0.56256324", "0.562066" ]
0.7390419
0
Calculates softmax across a desired axis. Arguments
def softmax(x: jnp.DeviceArray, *, axis: int = 0) -> jnp.DeviceArray: return jnp.exp(x) / jnp.expand_dims(jnp.sum(jnp.exp(x), axis=axis), axis)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def softmax(x):\r\n e_x = np.exp(x - np.expand_dims(np.max(x, axis=-1), axis=-1))\r\n return e_x / np.expand_dims(e_x.sum(axis=-1), axis=-1) # only difference\r", "def softmax(x):\n \"\"\"\"\"\"\n return exp(x) / sum(exp(x), axis=0)", "def softmax(x):\n return np.exp(x)/np.sum(np.exp(x),axis=0)", "def softmax(x):\n return np.exp(x)/np.sum(np.exp(x),axis=0)", "def softmax(X, axis):\n y = np.atleast_2d(X)\n # subtract the max for numerical stability\n y = y - np.expand_dims(np.max(y, axis = axis), axis)\n # exponentiate y\n y = np.exp(y)\n # take the sum along the specified axis\n ax_sum = np.expand_dims(np.sum(y, axis = axis), axis)\n # finally: divide elementwise\n p = y / ax_sum\n return p", "def softmax(x):\n #pass # TODO: Compute and return softmax(x)\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x): \n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=1, keepdims=True)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)\n # return ( x / np.sum(x, axis=0) )", "def softmax(x: npt.NDArray) -> npt.NDArray:\n row_wise_max = np.max(x, axis=1).reshape(-1, 1)\n exp_x = np.exp(x - row_wise_max)\n return exp_x / np.sum(exp_x, axis=1).reshape(-1, 1)", "def softmax_my(x):\n dim = 1\n try:\n dim = x.ndim\n except AttributeError:\n return cal_1D_softmax(x)\n if dim == 1:\n return cal_1D_softmax(x)\n elif dim == 2:\n return cal_2D_softmax(x)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=1, keepdims=True)", "def softmax(x):\n # (n_samples, n_classes)\n if len(x.shape) == 2:\n row_max = np.max(x, axis=1)\n x -= row_max.reshape((x.shape[0], 1))\n x = np.exp(x)\n row_sum = np.sum(x, axis=1)\n x /= row_sum.reshape((x.shape[0], 1))\n # (n_samples, n_tasks, n_classes)\n elif len(x.shape) == 3:\n row_max = np.max(x, axis=2)\n x -= row_max.reshape(x.shape[:2] + (1,))\n x = np.exp(x)\n row_sum = np.sum(x, axis=2)\n x /= row_sum.reshape(x.shape[:2] + (1,))\n return x", "def softmax(x):\n xx = x\n x = x.reshape((-1, x.shape[-1]))\n e_x = np.exp(x - np.max(x, 1).reshape(-1, 1))\n res = e_x / e_x.sum(axis=1).reshape(-1, 1)\n return res.reshape(xx.shape)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(self, x):\n if x.ndim == 1:\n x = x.reshape((1, -1))\n max_x = np.max(x, axis=1).reshape((-1, 1))\n exp_x = np.exp(x - max_x)\n return exp_x / np.sum(exp_x, axis=1).reshape((-1, 1))", "def softmax(x):\n orig_shape = x.shape\n\n if len(x.shape) > 1:\n # Matrix\n tmp = np.max(x, axis=1)\n x -= tmp.reshape((x.shape[0], 1))\n x = np.exp(x)\n tmp = np.sum(x, axis=1)\n x /= tmp.reshape((x.shape[0], 1))\n else:\n # Vector\n tmp = np.max(x)\n x -= tmp\n x = np.exp(x)\n tmp = np.sum(x)\n x /= tmp\n\n assert x.shape == orig_shape\n return x", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=1)", "def softmax(x, axis=1):\n sf = np.exp(x)\n sf = sf/np.sum(sf, axis=axis)[:,np.newaxis]\n return sf", "def softmax(x):\n e_x = np.exp((x.transpose()-x.max(axis=1)).transpose())\n return e_x / np.sum(e_x,axis=1)[:,None]", "def softmax(x):\n e_x = np.exp(x - np.max(x))\n return e_x / e_x.sum(axis=0)", "def softmax(val, axis=-1):\n exp = np.exp(val - np.amax(val, axis=axis, keepdims=True))\n return exp / np.sum(exp, axis=axis, keepdims=True)", "def softmax(x):\n e_x = np.exp(x - np.max(x, axis=1).reshape(-1, 1))\n return e_x / np.sum(e_x, axis=1).reshape(-1, 1)" ]
[ "0.82091117", "0.8203848", "0.819648", "0.819648", "0.81928647", "0.81735945", "0.81680465", "0.8144589", "0.8144145", "0.813783", "0.8125608", "0.8100958", "0.80981773", "0.80939096", "0.80836487", "0.80836487", "0.80836487", "0.80836487", "0.80836487", "0.80836487", "0.80836487", "0.80836487", "0.80833846", "0.8066945", "0.8065284", "0.8054192", "0.80532527", "0.80067277", "0.80038804", "0.8001597" ]
0.8248852
0
Calculates logsoftmax across a desired axis. Arguments
def log_softmax(x: jnp.DeviceArray, *, axis: int = 0) -> jnp.DeviceArray: return x - jnp.expand_dims(jnp.log(jnp.sum(jnp.exp(x), axis=axis)), axis)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def log_softmax(input, dim, inplace=False):\n return FunctionLib.apply(\n 'LogSoftmax', input.device, [input],\n outputs=[input if inplace else None], axis=dim)", "def log_softmax_nd(logits, axes=(-1,)):\n logits -= tf.reduce_max(logits, axis=axes, keepdims=True)\n return logits - tf.reduce_logsumexp(logits, axis=axes, keepdims=True)", "def log_softmax_v2(logits, axis=None, name=None):\n if axis is None:\n axis = -1\n return _wrap_2d_function(logits, gen_nn_ops.log_softmax, axis, name)", "def log_softmax(logits, axis=None, name=None, dim=None):\n axis = deprecation.deprecated_argument_lookup(\"axis\", axis, \"dim\", dim)\n if axis is None:\n axis = -1\n return _wrap_2d_function(logits, gen_nn_ops.log_softmax, axis, name)", "def forward(self, x):\n return F.log_softmax(self.proj(x), dim=-1)", "def convert_logsoftmax(g, op, block):\n\n x = g.get_node(op.input(\"X\")[0])\n axis = op.attr(\"axis\")\n ndim = len(infer_shape(x))\n if axis < 0:\n axis += ndim\n m = _op.max(x, [axis], keepdims=True)\n e = _op.exp(x - m)\n s = _op.sum(e, [axis], keepdims=True)\n out = x - m - _op.log(s)\n g.add_node(op.output(\"Out\")[0], out)", "def softmax_ndarray(logits: jnp.DeviceArray) -> jnp.DeviceArray:\n assert len(logits.shape) == 2\n # Normalise for better stability.\n s = jnp.max(logits, axis=1, keepdims=True)\n e_x = jnp.exp(logits - s)\n return e_x / jnp.sum(e_x, axis=1, keepdims=True)", "def softmax(x):\n e_x = np.exp(x)# - np.max(x))\n print(e_x.sum())\n print(e_x)\n print(e_x / e_x.sum())\n return np.log(e_x / e_x.sum())", "def logsumexp(x, axis=None):\n xmax = K.max(x, axis=axis, keepdims=True)\n xmax_ = K.max(x, axis=axis)\n return xmax_ + K.log(K.sum(K.exp(x - xmax), axis=axis))", "def log_prob_from_logits(x):\n axis = len(x.get_shape())-1\n m = tf.reduce_max(x, axis, keep_dims=True)\n return x - m - tf.log(tf.reduce_sum(tf.exp(x-m), axis, keep_dims=True))", "def log_prob_from_logits(x):\n axis = len(x.get_shape()) - 1\n m = tf.reduce_max(x, axis, keep_dims=True)\n return x - m - tf.log(tf.reduce_sum(tf.exp(x - m), axis, keep_dims=True))", "def softmax(x):\n \"\"\"\"\"\"\n return exp(x) / sum(exp(x), axis=0)", "def softmax_v2(logits, axis=None, name=None):\n if axis is None:\n axis = -1\n return _wrap_2d_function(logits, gen_nn_ops.softmax, axis, name)", "def forward(self, x):\n out = self.net(x)\n out = self.avg(out)\n out = out.view(out.size(0), -1)\n out = self.fc1(out)\n\n return func.log_softmax(out, dim=-1)", "def softmax_my(x):\r\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x): \n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax_loss(x, y):\n\n eps = 1e-5\n \n N,C = x.shape\n p = softmax(x)\n llikelihood = -np.log(p[range(N),y] + eps)\n# print(llikelihood)\n loss = np.sum(llikelihood) / N\n\n dx = p\n dx[range(N),y] -= 1\n dx = dx/N\n \n return loss, dx", "def softmax(x):\n return np.exp(x)/np.sum(np.exp(x),axis=0)", "def softmax(x):\n return np.exp(x)/np.sum(np.exp(x),axis=0)", "def _softmax(self,x):\n e_x = np.exp(x - np.max(x))\n return np.nan_to_num(e_x / np.nan_to_num(e_x.sum(axis=0)))", "def _softmax(x):\n e = K.exp(x - K.max(x, axis=-1, keepdims=True))\n s = K.sum(e, axis=-1, keepdims=True)\n return e / s", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=1, keepdims=True)", "def convert_logsoftmax(node, **kwargs):\n name, input_nodes, attrs = get_inputs(node, kwargs)\n\n # Converting to int\n axis = int(attrs.get(\"axis\", -1))\n temp = attrs.get(\"temperature\", 'None')\n if temp != 'None':\n raise AttributeError(\"LogSoftMax: ONNX supports only temperature=None\")\n\n node = onnx.helper.make_node(\n 'LogSoftmax',\n input_nodes,\n [name],\n axis=axis,\n name=name\n )\n return [node]", "def softmax(x):\n #pass # TODO: Compute and return softmax(x)\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=1, keepdims=True)", "def softmax(x):\r\n output = np.exp(x)\r\n return output / np.sum(output, axis=1, keepdims=True)", "def softmax_loss1(x, y):\n # tmp = np.max(x, axis=1, keepdims=True)\n shifted_logits = x - np.max(x, axis=1, keepdims=True)\n Z = np.sum(np.exp(shifted_logits), axis=1, keepdims=True)\n log_probs = shifted_logits - np.log(Z)\n probs = np.exp(log_probs)\n N = x.shape[0]\n # tmp2 = np.arange(N)\n tmp3 = log_probs[np.arange(N), y]\n # tmp4 = log_probs[[0,1,2],[2,5,0]]\n loss = -np.sum(log_probs[np.arange(N), y]) / N\n dx = probs.copy()\n dx[np.arange(N), y] -= 1\n dx /= N\n return loss, dx", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)" ]
[ "0.79187274", "0.78436774", "0.78026515", "0.7745838", "0.7555003", "0.7547153", "0.7151152", "0.7116799", "0.70571184", "0.70393515", "0.7036909", "0.6993901", "0.69822043", "0.6971128", "0.6970583", "0.69682556", "0.69494355", "0.6942103", "0.6942103", "0.6929219", "0.69118255", "0.6904405", "0.6900669", "0.6900594", "0.6873833", "0.6871728", "0.68715304", "0.68538356", "0.68538356", "0.68538356" ]
0.8242526
0
Copy contents of one stream into another.
def copyStreamToStream(streamFrom, streamTo, input_length=sys.maxint, offset=0, buffer=2 ** 2 ** 2 ** 2): streamFrom.seek(offset, 0) nbytes = 0 while nbytes < input_length: chunk = streamFrom.read(min(input_length - nbytes, buffer)) if not chunk: break streamTo.write(chunk) nbytes += len(chunk)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def copy_to(self, stream, bufsize=None):\n bufsize = bufsize or PRETZEL_BUFSIZE\n if isinstance(stream.write(b''), int):\n # destination stream is synchronous python stream\n try:\n while True:\n stream.write((yield self.read(bufsize)))\n except BrokenPipeError:\n pass\n stream.flush()\n else:\n try:\n while True:\n yield stream.write((yield self.read(bufsize)))\n except BrokenPipeError:\n pass\n yield stream.flush()", "def streamCopy(is_: java.io.InputStream, os: java.io.OutputStream, monitor: ghidra.util.task.TaskMonitor) -> ghidra.formats.gfilesystem.FSUtilities.StreamCopyResult:\n ...", "def copy(self, data, fobject_factory=tempfile.TemporaryFile):\n datastream = fobject_factory()\n self.writestream(data, datastream)\n datastream.seek(0)\n self.copystream(datastream)\n datastream.close()", "def stream_compress(src, dst, blocksize=_STREAM_TO_STREAM_BLOCK_SIZE):\r\n compressor = StreamCompressor()\r\n while True:\r\n buf = src.read(blocksize)\r\n if not buf: break\r\n buf = compressor.add_chunk(buf)\r\n if buf: dst.write(buf)", "def threading_copy(self, data):\n r_fd, w_fd = os.pipe()\n rstream = os.fdopen(r_fd, \"rb\")\n wstream = os.fdopen(w_fd, \"wb\")\n copy_thread = threading.Thread(target=self.copystream, args=(rstream,))\n copy_thread.start()\n self.writestream(data, wstream)\n wstream.close()\n copy_thread.join()", "def copy_file(input, output):\n for f in input:\n while True:\n chunk = f.read(1024)\n if not chunk:\n break\n output.write(chunk)\n output.flush()", "def decompress_stream(src, dst):\n with gzip.GzipFile(fileobj=src, mode='rb') as gz:\n for block in iterfile(gz):\n dst.write(block)", "def stream_decompress(src, dst, blocksize=_STREAM_TO_STREAM_BLOCK_SIZE):\r\n decompressor = StreamDecompressor()\r\n while True:\r\n buf = src.read(blocksize)\r\n if not buf: break\r\n buf = decompressor.decompress(buf)\r\n if buf: dst.write(buf)\r\n decompressor.flush() # makes sure the stream ended well\r", "def copy(self):\r\n copy = StreamDecompressor()\r\n copy._buf, copy._header_found = self._buf, self._header_found\r\n return copy", "def compress_stream(src, dst):\n with gzip.GzipFile(fileobj=dst, mode='wb') as gz:\n for block in iterfile(src):\n gz.write(block)", "def copyTransactionsFrom(self, other, verbose=0):\n ZODB.BaseStorage.copy(other, self, verbose)", "def copy_from_other(self, other):\n self.data = other.data\n self.url = other.url\n self.container_factory = other.container_factory", "def copy_(self, other):\n self.share.copy_(other.share)\n self.encoder = other.encoder", "def inout(input_, output_):\n while True:\n chunk = input_.read(1024)\n if not chunk:\n break\n output_.write(chunk)", "def _copy(self):\n for d in self._current_chunk:\n self.out.write(d)", "def copyFile( src, dest ):\n\tinFile = open( src, 'r' )\n\toutFile = open( dest, 'w' )\n\tfor line in inFile:\n\t\toutFile.write( line )\n\toutFile.close()\n\tinFile.close()", "def set_contents_from_stream(self, fp, headers=None, replace=True,\r\n cb=None, num_cb=10, policy=None,\r\n reduced_redundancy=False, query_args=None):\r\n\r\n provider = self.bucket.connection.provider\r\n if not provider.supports_chunked_transfer():\r\n raise BotoClientError('%s does not support chunked transfer'\r\n % provider.get_provider_name())\r\n\r\n # Name of the Object should be specified explicitly for Streams.\r\n if not self.name or self.name == '':\r\n raise BotoClientError('Cannot determine the destination '\r\n 'object name for the given stream')\r\n\r\n if headers is None:\r\n headers = {}\r\n if policy:\r\n headers[provider.acl_header] = policy\r\n\r\n # Set the Transfer Encoding for Streams.\r\n headers['Transfer-Encoding'] = 'chunked'\r\n\r\n if reduced_redundancy:\r\n self.storage_class = 'REDUCED_REDUNDANCY'\r\n if provider.storage_class_header:\r\n headers[provider.storage_class_header] = self.storage_class\r\n\r\n if self.bucket != None:\r\n if not replace:\r\n k = self.bucket.lookup(self.name)\r\n if k:\r\n return\r\n self.send_file(fp, headers, cb, num_cb, query_args,\r\n chunked_transfer=True)", "def copyDataFrom (self, other):\n\n self.outErrorPackets=other.outErrorPackets\n self._myHasOutErrorPackets=other._myHasOutErrorPackets\n \n self.inErrorPackets=other.inErrorPackets\n self._myHasInErrorPackets=other._myHasInErrorPackets\n \n self.inDiscardPackets=other.inDiscardPackets\n self._myHasInDiscardPackets=other._myHasInDiscardPackets\n \n self.outUnicastPackets=other.outUnicastPackets\n self._myHasOutUnicastPackets=other._myHasOutUnicastPackets\n \n self.inMulticastPackets=other.inMulticastPackets\n self._myHasInMulticastPackets=other._myHasInMulticastPackets\n \n self.outBroadcastPackets=other.outBroadcastPackets\n self._myHasOutBroadcastPackets=other._myHasOutBroadcastPackets\n \n self.inBroadcastPackets=other.inBroadcastPackets\n self._myHasInBroadcastPackets=other._myHasInBroadcastPackets\n \n self.outMulticastPackets=other.outMulticastPackets\n self._myHasOutMulticastPackets=other._myHasOutMulticastPackets\n \n self.inUnknownProtocolPackets=other.inUnknownProtocolPackets\n self._myHasInUnknownProtocolPackets=other._myHasInUnknownProtocolPackets\n \n self.outDiscardPackets=other.outDiscardPackets\n self._myHasOutDiscardPackets=other._myHasOutDiscardPackets\n \n self.inUnicastPackets=other.inUnicastPackets\n self._myHasInUnicastPackets=other._myHasInUnicastPackets\n \n self.outOctets=other.outOctets\n self._myHasOutOctets=other._myHasOutOctets\n \n self.inOctets=other.inOctets\n self._myHasInOctets=other._myHasInOctets", "async def _stream_redirect(\n stream: asyncio.StreamReader, file_like_obj, write_str=False\n):\n while not stream.at_eof():\n data = await stream.readline()\n file_like_obj.write(data.decode(\"ascii\") if write_str else data)", "def copy(self, src_path: str, tgt_path: str) -> None:", "def duplicate(self):\n return _StreamIterator(self.stream)", "def cp(src, dest):\n _shutil.copy2(native(src), native(dest))", "def _read_to_buffer(cls, buf, stream):\n # We could read it in one step, but instead we'll read it in chunks to avoid big temporaries.\n # (See below.)\n # buf[:] = stream.read( len(buf) )\n\n # Read data from the stream in chunks\n remaining_bytes = len(buf)\n while remaining_bytes > 0:\n next_chunk_bytes = min( remaining_bytes, VoxelsNddataCodec.STREAM_CHUNK_SIZE )\n chunk_start = len(buf)-remaining_bytes\n chunk_stop = len(buf)-(remaining_bytes-next_chunk_bytes)\n buf[chunk_start:chunk_stop] = stream.read( next_chunk_bytes )\n remaining_bytes -= next_chunk_bytes", "def copyfile(source, dest, buffer_size=1024*1024):\n if not hasattr(source, 'read'):\n source = open(source, 'rb')\n if not hasattr(dest, 'write'):\n dest = open(dest, 'wb')\n while 1:\n copy_buffer = source.read(buffer_size)\n if copy_buffer:\n dest.write(copy_buffer)\n else:\n break\n source.close()\n dest.close()\n return True", "def copyDataFrom (self, other):\n\n self.localTimeString=other.localTimeString\n self._myHasLocalTimeString=other._myHasLocalTimeString\n \n self.utcTimeString=other.utcTimeString\n self._myHasUtcTimeString=other._myHasUtcTimeString\n \n self.daylightSavingTime=other.daylightSavingTime\n self._myHasDaylightSavingTime=other._myHasDaylightSavingTime\n \n self.epoch=other.epoch\n self._myHasEpoch=other._myHasEpoch\n \n self.utcOffsetMinutes=other.utcOffsetMinutes\n self._myHasUtcOffsetMinutes=other._myHasUtcOffsetMinutes", "def copyfileobj(fsrc, fdst, length=0):\r\n # Localize variable access to minimize overhead.\r\n if not length:\r\n length = COPY_BUFSIZE\r\n fsrc_read = fsrc.read\r\n fdst_write = fdst.write\r\n total_buf = 0\r\n while True: \r\n buf = fsrc_read(length)\r\n if not buf:\r\n break\r\n fdst_write(buf)\r\n total_buf += len(buf)\r\n ee.emit('onfilecopy', total_buf, size)", "def _copy_stream(self):\n return DataStream(\n data_type=self.data_type,\n name=self.name,\n labels=self.labels.copy(),\n callbacks=self.callbacks.copy(),\n uid=self.uid)", "def readfile(input_stream, offset, size):\n input_stream.seek(offset)\n dest = input_stream.read(size)\n if dest:\n return dest", "def copy(source: str, dest: str):\n source_auth = credentials.authenticate(source)\n dest_auth = credentials.authenticate(dest)\n copier = COPIER_REGISTRY.get_handler(source_auth.scheme + \"+\" + dest_auth.scheme)\n copier.copy(source_auth, dest_auth)", "def run_copy(self, src, dst):\n pass" ]
[ "0.6626283", "0.6452199", "0.61475044", "0.6086651", "0.5963692", "0.57964855", "0.5794431", "0.5788323", "0.5749928", "0.56529135", "0.5546919", "0.55270535", "0.5521073", "0.54890835", "0.5436751", "0.543479", "0.54131347", "0.5392881", "0.53848237", "0.53318536", "0.52919626", "0.52902603", "0.5254756", "0.52496415", "0.5206562", "0.5202671", "0.5199749", "0.5190385", "0.5135456", "0.5119295" ]
0.7215272
0
Printout memory usage statistics.
def print_memory_stats(location_tag="undef"): try: import psutil p = psutil.Process(os.getpid()) rm, vm = p.get_memory_info() print "MEM_STAT (%s) rm=%s, vm=%s" % (location_tag, rm, vm) except ImportError: print "psutil module not available"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def print_memory_diags(disable_print=False):\n process = psutil.Process(os.getpid())\n memory = process.memory_info().rss/1000000000.0\n if not disable_print:\n logging.info('\\tMemory usage: {:.3f} GB'.format(memory))\n return memory", "def print_current_mem_usage():\n mem = get_current_mem_usage()\n output = \"# Mem usage = {} MiB #\".format(mem)\n print(\"\\n\" + \"-\" * len(output))\n print(output)\n print(\"-\" * len(output) + \"\\n\")", "def gather_info_and_display():\n # Obtain total rss displayed in memory.stat for each group,\n # container and service.\n try:\n output_mem = pipe_command(GREP_CMD, AWK_CMD, cwd=MEMPATH)\n LOG.debug(\n 'command: %s\\n%s',\n \"grep -rs total_rss '/sys/fs/cgroup/memory/' \"\n \"| awk '$2>0{print$0}' \",\n output_mem)\n except subprocess.CalledProcessError as error:\n LOG.error('Could not get total_rss memory, error=%s', error)\n return 1\n\n mem_info = get_meminfo()\n pt_groups = gather_groups_memory(output_mem)\n pt_cont = gather_containers_memory(output_mem)\n pt_serv = sys_service_memory()\n\n # Dump the tables out\n print('\\nPer groups memory usage:')\n\n # Get string to be printed and create list of elements separated by \\n\n list_of_table_lines = pt_groups.get_string().split('\\n')\n\n # Use the first line (+---+-- ...) as horizontal rule to insert later\n horizontal_line = list_of_table_lines[0]\n\n # Print the table, except last two lines ( \"Total\" row + final separator).\n print(\"\\n\".join(list_of_table_lines[:-2]))\n # Print separator, and finally the \"Total\" row.\n print(horizontal_line)\n print(\"\\n\".join(list_of_table_lines[-2:]))\n\n pt_namespc = prettytable.PrettyTable(\n ['Namespace',\n 'Resident Set Size (MiB)',\n ], caching=False)\n pt_namespc.align = 'l'\n pt_namespc.align['Resident Set Size (MiB)'] = 'r'\n\n print('\\nPer namespace memory usage:')\n for n_s in MEMORY['namespaces']:\n pt_namespc.add_row(\n [n_s,\n MEMORY['namespaces'][n_s],\n ])\n print(pt_namespc)\n\n print('\\nPer container memory usage:')\n print(pt_cont)\n\n print('\\nPer service memory usage:')\n print(pt_serv)\n\n base_mebib = 0.0\n k8s_system = 0.0\n k8s_addon = 0.0\n platform_memory_percent = 0.0\n\n # Calculate base memory usage (i.e., normal memory, exclude K8S and VMs)\n # e.g., docker, system.slice, user.slice\n for group in MEMORY['cgroups']:\n if group in BASE_GROUPS:\n base_mebib += float(MEMORY['cgroups'][group])\n\n # K8S platform system usage (essential) and addons usage (non-essential)\n for n_s in MEMORY['namespaces']:\n if n_s in K8S_NAMESPACE_SYSTEM:\n k8s_system += MEMORY['namespaces'][n_s]\n elif n_s in K8S_NAMESPACE_ADDON:\n k8s_addon += MEMORY['namespaces'][n_s]\n\n # Calculate platform memory usage\n platform_mebib = base_mebib + k8s_system\n\n anon_mebib = float(mem_to_mebibytes(\n mem_info['Active(anon)'] + mem_info['Inactive(anon)'])) * KBYTE\n avail_mebib = float(mem_to_mebibytes(\n mem_info['MemAvailable'])) * KBYTE\n total_mebib = float(anon_mebib + avail_mebib)\n\n anon_percent = py2_round(100 * anon_mebib / total_mebib, DECIMAL_DIGITS) # pylint: disable=W1619\n\n reserved_mebib = get_platform_reserved_memory()\n # Calculate platform memory in terms of percent reserved\n if reserved_mebib > 0.0:\n platform_memory_percent = py2_round(\n 100 * platform_mebib / reserved_mebib, DECIMAL_DIGITS) # pylint: disable=W1619\n\n pt_platf = prettytable.PrettyTable(\n ['Reserved',\n 'Platform',\n 'Base',\n 'K8s Platform system',\n 'k8s-addon'\n ], caching=False)\n pt_platf.align = 'l'\n\n pt_platf.add_row(\n [reserved_mebib,\n '{} ({}%)'.format(platform_mebib, platform_memory_percent),\n base_mebib,\n k8s_system,\n k8s_addon\n ])\n print('\\nPlatform memory usage in MiB:')\n print(pt_platf)\n\n pt_4k = prettytable.PrettyTable(\n ['Anon',\n 'Cgroup-rss',\n 'Available',\n 'Total'\n ], caching=False)\n pt_4k.align = 'l'\n\n pt_4k.add_row(\n ['{} ({}%)'.format(anon_mebib, anon_percent),\n MEMORY['cgroups']['total_rss'],\n avail_mebib,\n total_mebib\n ])\n\n print('\\n4K memory usage in MiB:')\n print(pt_4k)\n\n return 0", "def show_mem_usage():\n gl = sys._getframe(1).f_globals\n vars = {}\n for k, v in list(gl.items()):\n # for pandas dataframes\n if hasattr(v, 'memory_usage'):\n mem = v.memory_usage(deep=True)\n if not np.isscalar(mem):\n mem = mem.sum()\n vars.setdefault(id(v), [mem]).append(k)\n # work around for a bug\n elif isinstance(v, pd.Panel):\n v = v.values\n vars.setdefault(id(v), [sys.getsizeof(v)]).append(k)\n total = 0\n for k, (value, *names) in vars.items():\n if value > 1e6:\n print(names, \"%.3fMB\" % (value / 1e6))\n total += value\n print(\"%.3fMB\" % (total / 1e6))", "def print_mem_usage(usage):\n for region in usage.keys():\n used = usage[region][\"used\"]\n free = usage[region][\"free\"]\n usage_msg = \"{region}:\\n used: {used} bytes\\n free: {free} bytes\"\n usage_msg = usage_msg.format(region=region, used=used, free=free)\n print(usage_msg)", "def show_process_memory( cls, call_msg = \"\", log_level = None, print_it = False ):\n process = psutil.Process(os.getpid()) # import psutil\n mem = process.memory_info().rss\n # convert to mega and format\n mem_mega = mem/( 1e6 )\n msg = f\"{call_msg}process memory = {mem_mega:10,.2f} mega bytes \"\n if print_it:\n print( msg )\n if not ( log_level is None ):\n cls.__logger.log( log_level, msg )\n msg = f\"{mem_mega:10,.2f} mega bytes \"\n return ( mem, msg )", "def test00(self):\n\n # Obtain memory info (only for Linux 2.6.x)\n for line in Path(\"/proc/self/status\").read_text().splitlines():\n if line.startswith(\"VmSize:\"):\n vmsize = int(line.split()[1])\n elif line.startswith(\"VmRSS:\"):\n vmrss = int(line.split()[1])\n elif line.startswith(\"VmData:\"):\n vmdata = int(line.split()[1])\n elif line.startswith(\"VmStk:\"):\n vmstk = int(line.split()[1])\n elif line.startswith(\"VmExe:\"):\n vmexe = int(line.split()[1])\n elif line.startswith(\"VmLib:\"):\n vmlib = int(line.split()[1])\n print(\"\\nWallClock time:\", clock() - self.tref)\n print(\"Memory usage: ******* %s *******\" % self._getName())\n print(f\"VmSize: {vmsize:>7} kB\\tVmRSS: {vmrss:>7} kB\")\n print(f\"VmData: {vmdata:>7} kB\\tVmStk: {vmstk:>7} kB\")\n print(f\"VmExe: {vmexe:>7} kB\\tVmLib: {vmlib:>7} kB\")", "def show_mem(cmd, cnt, args):\n if cpu is None:\n log(\"Load program first\") \n return\n elif len(cpu.memory) == 0:\n log(\"Load program first\") \n return \n chunk = 0\n chunk_count = len(cpu.memory)\n while chunk < chunk_count: \n chunk_start = cpu.memory[chunk][MEMADDR]\n chunk_end = chunk_start + cpu.memory[chunk][MEMSIZE] \n log(\"{:d} {:#x}..{:#x}\".format(chunk, chunk_start, chunk_end)) \n chunk += 1\n if machine == \"ARM\":\n if len(cpu.high_memory) != 0:\n log(\"High memory\")\n for addr in sorted(cpu.high_memory):\n log(\"{:#x}\".format(addr))", "def _mem_report(tensors: Iterable, mem_type: str) -> None:\n print(f\"Storage on {mem_type}\")\n print(\"-\" * LEN)\n total_numel = 0\n total_mem = 0\n visited_data: List[Any] = []\n for tensor in tensors:\n if tensor.is_sparse:\n continue\n # a data_ptr indicates a memory block allocated\n data_ptr = tensor.storage().data_ptr()\n if data_ptr in visited_data:\n continue\n visited_data.append(data_ptr)\n\n numel = tensor.storage().size()\n total_numel += numel\n element_size = tensor.storage().element_size()\n mem = numel * element_size / 1024 / 1024 # 32bit=4Byte, MByte\n total_mem += mem\n element_type = type(tensor).__name__\n size = tuple(tensor.size())\n\n if print_all:\n print(f\"{element_type}\\t\\t{size}\\t\\t{mem}\")\n print(\"-\" * LEN)\n print(f\"Total Tensors: {total_numel} \\tUsed Memory Space: {total_mem}\")\n print(\"-\" * LEN)", "def print_stats():\n if spritegroup_stats[0] > 0:\n generic.print_info(\"Concurrent spritegroups: {}/{} ({})\".format(spritegroup_stats[0], total_action2_ids, str(spritegroup_stats[1])))\n if a2register_stats[0] > 0:\n generic.print_info(\"Concurrent Action2 registers: {}/{} ({})\".format(a2register_stats[0], total_tmp_locations, str(a2register_stats[1])))", "def mem_info(self):\n\t\t\tavailable, total = cuda.mem_get_info() #Note: pycuda._driver.LogicError: cuMemGetInfo failed: context is destroyed\n\t\t\tprint(\"Available: %.2f GB\\nTotal: %.2f GB\"%(available/1e9, total/1e9))", "def logMemoryStats():\n class MemoryStatusEx(ctypes.Structure):\n \"\"\" MEMORYSTATUSEX \"\"\"\n kaFields = [\n ( 'dwLength', ctypes.c_ulong ),\n ( 'dwMemoryLoad', ctypes.c_ulong ),\n ( 'ullTotalPhys', ctypes.c_ulonglong ),\n ( 'ullAvailPhys', ctypes.c_ulonglong ),\n ( 'ullTotalPageFile', ctypes.c_ulonglong ),\n ( 'ullAvailPageFile', ctypes.c_ulonglong ),\n ( 'ullTotalVirtual', ctypes.c_ulonglong ),\n ( 'ullAvailVirtual', ctypes.c_ulonglong ),\n ( 'ullAvailExtendedVirtual', ctypes.c_ulonglong ),\n ];\n _fields_ = kaFields; # pylint: disable=invalid-name\n\n def __init__(self):\n super(MemoryStatusEx, self).__init__();\n self.dwLength = ctypes.sizeof(self);\n\n try:\n oStats = MemoryStatusEx();\n ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(oStats));\n except:\n reporter.logXcpt();\n return False;\n\n reporter.log('Memory statistics:');\n for sField, _ in MemoryStatusEx.kaFields:\n reporter.log(' %32s: %s' % (sField, getattr(oStats, sField)));\n return True;", "def memory(self):\n # Run 'free -m' command and make a list from output.\n mem_data = self.execCMD('free', '-m').split()\n total_mem = int(mem_data[7]) / 1024.\n used_mem = int(mem_data[15]) / 1024.\n # Caculate percentage\n used_mem_percent = int(used_mem / (total_mem / 100))\n\n # Results are in kilobyte.\n return total_mem, used_mem, used_mem_percent", "def memory():\n\n mem_info = {}\n\n if platform.linux_distribution()[0]:\n with open('/proc/meminfo') as file:\n c = 0\n for line in file:\n lst = line.split()\n if str(lst[0]) == 'MemTotal:':\n mem_info['total'] = int(lst[1])\n elif str(lst[0]) in ('MemFree:', 'Buffers:', 'Cached:'):\n c += int(lst[1])\n mem_info['free'] = c\n mem_info['used'] = (mem_info['total']) - c\n elif platform.mac_ver()[0]:\n ps = subprocess.Popen(['ps', '-caxm', '-orss,comm'], stdout=subprocess.PIPE).communicate()[0]\n vm = subprocess.Popen(['vm_stat'], stdout=subprocess.PIPE).communicate()[0]\n\n # Iterate processes\n process_lines = ps.split('\\n')\n sep = re.compile('[\\s]+')\n rss_total = 0 # kB\n for row in range(1, len(process_lines)):\n row_text = process_lines[row].strip()\n row_elements = sep.split(row_text)\n try:\n rss = float(row_elements[0]) * 1024\n except:\n rss = 0 # ignore...\n rss_total += rss\n\n # Process vm_stat\n vm_lines = vm.split('\\n')\n sep = re.compile(':[\\s]+')\n vm_stats = {}\n for row in range(1, len(vm_lines) - 2):\n row_text = vm_lines[row].strip()\n row_elements = sep.split(row_text)\n vm_stats[(row_elements[0])] = int(row_elements[1].strip('\\.')) * 4096\n\n mem_info['total'] = rss_total\n mem_info['used'] = vm_stats[\"Pages active\"]\n mem_info['free'] = vm_stats[\"Pages free\"]\n else:\n raise('Unsupported Operating System.\\n')\n exit(1)\n\n return mem_info", "def get_memory_usage():\n\n memory_usage = {'total' : 0, 'used' : 0}\n meminfo = subprocess.Popen(['free', '-m'], shell=False, stdout=subprocess.PIPE)\n meminfo.stdout.readline()\n total_used = meminfo.stdout.readline()\n memory_usage['total'] = total_used.split()[1]\n memory_usage['used'] = total_used.split()[2]\n return memory_usage", "def print_numa_stats(numafiles):\n for numafile in numafiles:\n numafile.seek(0)\n node_id = int(numafile.name[numafile.name.find(\"/node/node\")+10:-9])\n ts = int(time.time())\n stats = dict(line.split() for line in numafile.read().splitlines())\n for stat, tag in (# hit: process wanted memory from this node and got it\n (\"numa_hit\", \"hit\"),\n # miss: process wanted another node and got it from\n # this one instead.\n (\"numa_miss\", \"miss\")):\n print (\"sys.numa.zoneallocs %d %s node=%d type=%s\"\n % (ts, stats[stat], node_id, tag))\n # Count this one as a separate metric because we can't sum up hit +\n # miss + foreign, this would result in double-counting of all misses.\n # See `zone_statistics' in the code of the kernel.\n # foreign: process wanted memory from this node but got it from\n # another node. So maybe this node is out of free pages.\n print (\"sys.numa.foreign_allocs %d %s node=%d\"\n % (ts, stats[\"numa_foreign\"], node_id))\n # When is memory allocated to a node that's local or remote to where\n # the process is running.\n for stat, tag in ((\"local_node\", \"local\"),\n (\"other_node\", \"remote\")):\n print (\"sys.numa.allocation %d %s node=%d type=%s\"\n % (ts, stats[stat], node_id, tag))\n # Pages successfully allocated with the interleave policy.\n print (\"sys.numa.interleave %d %s node=%d type=hit\"\n % (ts, stats[\"interleave_hit\"], node_id))", "def get_memory_info(dut):\n command = \"top -n 1 b | grep 'KiB Mem' \"\n output = st.show(dut, command)\n include_keys = ['total', 'used', 'free', 'buff_cache']\n rv = {each_key: ast.literal_eval(output[0][each_key]) for each_key in output[0] if each_key in include_keys}\n return rv", "def dump(self):\n fmt='%20s:%10.4fs%6.1f%%'\n print('\\n----------------TIME MANAGER PROFILE----------------\\n\\n')\n total_t=time.time()-self.tic0\n for rec in self.record:\n print(fmt % (rec[0],rec[1],100.0*rec[1]/total_t))\n print(fmt % ('TOTAL ELAPSED TIME', total_t, 100.0))\n print('\\n----------------TIME MANAGER PROFILE----------------\\n\\n')", "def get_memory_usage(cls):\n\n mem_stats = psutil.virtual_memory()\n\n mem_stats_dict = { StatsKeys.MEMORY :\n {\n StatsKeys.TOTAL : mem_stats.total,\n StatsKeys.AVAILABLE : mem_stats.available,\n StatsKeys.USED : mem_stats.used\n }\n }\n logger.debug(\"Memory stats: {}\".format(mem_stats_dict))\n\n return mem_stats_dict", "def get_memory():\n with open('/proc/meminfo', 'r') as mem:\n free_memory = 0\n for i in mem:\n sline = i.split()\n if str(sline[0]) in ('MemFree:', 'Buffers:', 'Cached:'):\n free_memory += int(sline[1])\n print(\"____________________ \" + str(free_memory) + \"____________________\")\n return free_memory", "def ShowPipeStats(cmd_args=None):\n print \"Number of pipes: {: d}\".format(kern.globals.amountpipes)\n print \"Memory used by pipes: {:s}\".format(sizeof_fmt(int(kern.globals.amountpipekva)))\n print \"Max memory allowed for pipes: {:s}\".format(sizeof_fmt(int(kern.globals.maxpipekva)))", "def memory():\n\twith open('/proc/meminfo','r') as mem:\n\t\tret = {}\n\t\ttmp = 0\n\t\tfor i in mem:\n\t\t\tsline = i.split()\n\t\t\tif str(sline[0])=='MemTotal:':\n\t\t\t\tret['total'] = int(sline[1]*1.0e-6)\n\treturn ret", "def _get_mem_info(self):\n memory_usage_pct = None\n try:\n memory_usage = self._get_cgroups_current_memory_usage()\n if self._max_memory_usage and memory_usage:\n memory_usage_pct = round((memory_usage / self._max_memory_usage) * 100, 1)\n except BaseException:\n self._log.warning(f'Unable to determine memory usage', exc_info=True)\n return memory_usage_pct", "def print_performance_info(self):\n pass", "def mem_report(print_all: bool = False) -> None:\n\n def _mem_report(tensors: Iterable, mem_type: str) -> None:\n \"\"\"Print the selected tensors of type\n\n There are two major storage types in our major concern:\n - GPU: tensors transferred to CUDA devices\n - CPU: tensors remaining on the system memory (usually unimportant)\n\n Args:\n - tensors: the tensors of specified type\n - mem_type: 'CPU' or 'GPU' in current implementation \"\"\"\n print(f\"Storage on {mem_type}\")\n print(\"-\" * LEN)\n total_numel = 0\n total_mem = 0\n visited_data: List[Any] = []\n for tensor in tensors:\n if tensor.is_sparse:\n continue\n # a data_ptr indicates a memory block allocated\n data_ptr = tensor.storage().data_ptr()\n if data_ptr in visited_data:\n continue\n visited_data.append(data_ptr)\n\n numel = tensor.storage().size()\n total_numel += numel\n element_size = tensor.storage().element_size()\n mem = numel * element_size / 1024 / 1024 # 32bit=4Byte, MByte\n total_mem += mem\n element_type = type(tensor).__name__\n size = tuple(tensor.size())\n\n if print_all:\n print(f\"{element_type}\\t\\t{size}\\t\\t{mem}\")\n print(\"-\" * LEN)\n print(f\"Total Tensors: {total_numel} \\tUsed Memory Space: {total_mem}\")\n print(\"-\" * LEN)\n\n LEN = 65\n if print_all:\n print(\"=\" * LEN)\n print(\"Element type\\tSize\\t\\t\\tUsed MEM(MBytes)\")\n tensors = []\n for obj in gc.get_objects():\n try:\n if t.is_tensor(obj) or (hasattr(obj, \"data\") and t.is_tensor(obj.data)):\n tensors.append(obj)\n except Exception:\n pass\n cuda_tensors = [tensor for tensor in tensors if tensor.is_cuda]\n host_tensors = [tensor for tensor in tensors if not tensor.is_cuda]\n _mem_report(cuda_tensors, \"GPU\")\n _mem_report(host_tensors, \"CPU\")\n if print_all:\n print(\"=\" * LEN)", "def print_usage(self):\n print('Total Usage: %f compute seconds' % self.box_usage)\n cost = self.box_usage * 0.14\n print('Approximate Cost: $%f' % cost)", "def get_mem_info():\n import psutil\n vm = psutil.virtual_memory()\n return {\n \"memtotal\": vm.total,\n \"memavailable\": vm.available,\n }", "def print_usage(self):\r\n print 'Total Usage: %f compute seconds' % self.box_usage\r\n cost = self.box_usage * 0.14\r\n print 'Approximate Cost: $%f' % cost", "def ufree(verbose=False):\n import gc\n import os\n F = gc.mem_free()\n A = gc.mem_alloc()\n T = F+A\n P = '{0:.2f}%'.format(F/T*100)\n if not verbose:\n return P\n return ('Total: {} Free: {} ({})'.format(T ,F, P))", "def get_mem_usage():\n \n with open('/proc/meminfo') as f:\n for line in f:\n if line.startswith('MemTotal:'):\n mem_total = int(line.split()[1])\n elif line.startswith('MemFree:'):\n mem_free = int(line.split()[1])\n elif line.startswith('VmallocTotal:'):\n vm_total = int(line.split()[1])\n elif line.startswith('Cached:'):\n mem_cached = int(line.split()[1])\n \n return {\n 'total': mem_total,\n 'res': mem_total - mem_free,\n 'virt': vm_total,\n 'cached': mem_cached\n }" ]
[ "0.76514274", "0.7634883", "0.7584728", "0.7564569", "0.736413", "0.7283178", "0.6955679", "0.6937322", "0.6876623", "0.6779077", "0.671467", "0.6708236", "0.66627175", "0.6650455", "0.6588355", "0.6583832", "0.6578967", "0.65692943", "0.6550809", "0.65389204", "0.6524489", "0.65236104", "0.6523278", "0.64516133", "0.645109", "0.6436865", "0.64298445", "0.6420272", "0.64022094", "0.6399331" ]
0.7896127
0
Cambiamos la potencia de disparo
def cambiar_potencia(self, potencia): self.potencia += potencia self.partida.actualizar_marcador()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mover_bm_derecha(self):\n self.nueva_posicion_posible_parte_superior = self.mapa.consultar_casilla_por_movimiento([self.casilla[0] + 1,\n self.casilla[1]],\n [self.vertice_2[0] + self.velocidad ,\n self.vertice_2[1]],\n [self.vertice_1[0] + 5, self.vertice_1[1]])\n self.nueva_posicion_posible_parte_inferior = self.mapa.consultar_casilla_por_movimiento([self.casilla[0] + 1,\n self.casilla[1] + 1],\n [self.vertice_4[0] + self.velocidad,\n self.vertice_4[1]],\n self.vertice_1)\n if self.nueva_posicion_posible_parte_superior[0] != 1 and self.nueva_posicion_posible_parte_inferior[0] != 1:\n self.x += self.velocidad * (self.x <= 655)\n self.posicion = [self.x,self.posicion[1]]\n self.casilla = [self.casilla[0] + self.nueva_posicion_posible_parte_superior[1], self.casilla[1]]\n self.redefinir_vertices()", "def disarm(self):\n pass", "def mostrar_promedio_disparo(self):\n participantes = self.__disparos.copy()\n promedios = self.__calcular_promedio_disparo(participantes)\n for promedio in promedios:\n print(\n f\"\"\"\n =================================\n ====== PARTICIPANTE Nº: {promedio['nroParticipante']} ======\n =================================\n Disparos: {promedio['disparos']},\n Nombre: {promedio['nombre']},\n Apellido: {promedio['apellido']},\n Promedio: {promedio['promedio']}\n =================================\n =================================\n \"\"\"\n )", "def parar():\n pass", "def cambiar_puntaje(self):\r\n self.puntaje_maximo = self.puntos_maximos.value()\r\n sleep(0.1)\r\n puntaje = {\"status\": \"cambio_puntaje\",\r\n \"data\": self.puntaje_maximo}\r\n self.server_signal_2.emit(puntaje)", "def preberi_pot(ukazi):", "def preberi_pot(ukazi):", "def preberi_pot(ukazi):", "def preberi_pot(ukazi):", "def preberi_pot(ukazi):", "def get_mvts(self, plateau):\n if self.type == \"p\": #Pion\n if self.color == \"w\":\n diags = [[self.x-1, self.y+1],[self.x+1, self.y+1]] #Mouvements possibles de diagonales\n faces = [[self.x, self.y+1]] #Mouvements possibles de face\n if not self.moved: #Si le pion n'a pas encore bougé de la partie\n faces.append([self.x, self.y+2])\n else:\n diags = [[self.x-1, self.y-1], [self.x+1, self.y-1]]\n faces = [[self.x, self.y-1]] #Mouvements possibles de \n if not self.moved:\n faces.append([self.x, self.y-2])\n pos = [] #Position de déplacement validées\n for d in diags:\n if verif_case(d[0], d[1]): #Si la case est sur le plateau \n pion = plateau.get_pion(d[0],d[1])\n if pion != None and pion.color != self.color: #Si il y a un pion ennemi\n pos.append(d)\n for f in faces: \n if verif_case(f[0],f[1]):\n pion = plateau.get_pion(f[0], f[1])\n if pion == None: #Si il n'y a pas de pion\n pos.append(f)\n return pos\n elif self.type == \"t\": #Tour\n pos = []\n dir = [[1,0],[-1,0],[0,1],[0,-1]] #4 directions possibles\n for d in dir:\n x,y = self.x+d[0],self.y+d[1] #Projection de position\n while verif_case(x,y): #Tant que (x, y) est sur le plateau\n pion = plateau.get_pion(x, y)\n if pion != None: #Si il y a un pion\n if pion.color != self.color: #Si il n'est pas allié\n pos.append([x,y])\n break\n pos.append([x,y])\n x += d[0]\n y += d[1]\n return pos\n elif self.type == \"c\": #Cavalier\n l = [-2,-1,1,2]\n mvts = [[x,y] for x in l for y in l if abs(x)!=abs(y)]\n pos = []\n for m in mvts:\n x = self.x + m[0]\n y = self.y + m[1]\n if verif_case(x,y):\n pion = plateau.get_pion(x, y)\n if pion == None or pion.color != self.color:\n pos.append([x, y])\n return pos\n elif self.type == \"f\": #Fou\n dir = [[1,1],[-1,1],[-1,-1],[1,-1]]\n pos = []\n for d in dir:\n x,y = self.x+d[0],self.y+d[1]\n while verif_case(x,y):\n pion = plateau.get_pion(x, y)\n if pion != None:\n if pion.color != self.color:\n pos.append([x,y])\n break\n pos.append([x,y])\n x += d[0]\n y += d[1]\n return pos\n elif self.type == \"k\": #Roi\n mvts = [[1,0],[-1,1],[0,-1],[-1,-1],[-1,0],[-1,1],[0,1],[1,1]] #4 mouvements possibles\n pos = []\n for m in mvts:\n x = self.x + m[0]\n y = self.y + m[1]\n if verif_case(x, y):\n pion = plateau.get_pion(x, y)\n if pion == None or pion.color != self.color:\n pos.append([self.x + m[0], self.y + m[1]])\n return pos\n elif self.type == \"q\": #Dame\n pos = []\n dir = [[1,0],[1,-1],[0,-1],[-1,-1],[-1,0],[-1,1],[0,1],[1,1]]\n for d in dir:\n x,y = self.x+d[0],self.y+d[1]\n while verif_case(x,y):\n pion = plateau.get_pion(x, y)\n if pion != None:\n if pion.color != joueur:\n pos.append([x,y])\n break\n pos.append([x,y])\n x += d[0]\n y += d[1]\n return pos", "def manipular_carga_general(self, camion):\r\n operaciones = self.operaciones[\"Operaciones manipuleo\"]\r\n\r\n # Manipuleo de camiones por cargar/descargar\r\n # Espera camion para realizar transbordo o interrumpe la espera de otro\r\n ejecucion_espera_o_interrumpe = yield self.process(\r\n camion.espera_transbordo_o_interrumpe(self))\r\n\r\n # Si el camion espera procede con un tranbordo\r\n if ejecucion_espera_o_interrumpe[\"Resultado\"] != \"Interrumpio espera\":\r\n transbordo = operaciones[\"Transbordo con grua\"]\r\n\r\n yield self.process(self.transbordar_o_cargar_descargar(\r\n camion, ejecucion_espera_o_interrumpe,\r\n transbordo, 500))", "def zapisi_pot(pot):", "def zapisi_pot(pot):", "def zapisi_pot(pot):", "def zapisi_pot(pot):", "def zapisi_pot(pot):", "def zapisi_pot(pot):", "def desapilar(pila):\n dato = pila.datos[pila.tope]\n pila.tope -= 1\n return dato", "def restar(self):\n self.resultado = self.valor_1 - self.valor_2", "def postepy(self,przedmiot:str)->float:\n pass", "def mover_bm_izquierda(self):\n self.nueva_posicion_posible_parte_superior = self.mapa.consultar_casilla_por_movimiento([self.casilla[0] - 1,\n self.casilla[1]],\n [self.vertice_1[0] - self.velocidad,self.vertice_1[1]], \n [self.vertice_1[0] - 5 - 5, self.vertice_1[1]])\n self.nueva_posicion_posible_parte_inferior = self.mapa.consultar_casilla_por_movimiento([self.casilla[0] - 1,\n self.casilla[1] + 1],\n [self.vertice_3[0] - self.velocidad,self.vertice_3[1]],\n [self.vertice_3[0] - 5,self.vertice_3[1]]) \n if self.nueva_posicion_posible_parte_superior[0] != 1 and self.nueva_posicion_posible_parte_inferior[0] != 1:\n self.x -= self.velocidad * (self.x >= 15)\n self.posicion = [self.x,self.posicion[1]]\n self.casilla = [self.casilla[0] - self.nueva_posicion_posible_parte_superior[1] *(self.nueva_posicion_posible_parte_inferior[0] != 1) * (self.nueva_posicion_posible_parte_superior[0] != 1), self.casilla[1]]\n self.redefinir_vertices()", "def manipular_sacos(self, camion):\r\n operaciones = self.operaciones[\"Operaciones manipuleo\"]\r\n\r\n # Manipuleo de camion por cargar\r\n if camion.tipo == \"Carga\":\r\n\r\n # Espera camion para realizar transbordo o interrumpe la espera de otro\r\n ejecucion_espera_o_interrumpe = yield self.process(\r\n camion.espera_transbordo_o_interrumpe(self, 100))\r\n\r\n # Si el camion espera procede con un tranbordo o carga a pulso\r\n if ejecucion_espera_o_interrumpe[\"Resultado\"] != \"Interrumpio espera\":\r\n transbordo = operaciones[\"Transbordo a pulso - Sacos\"]\r\n carga = operaciones[\"Carga a pulso - Sacos\"]\r\n\r\n yield self.process(self.transbordar_o_cargar_descargar(\r\n camion, ejecucion_espera_o_interrumpe,\r\n transbordo, 200,\r\n carga, self.medios_almacenamiento[\"Almacen 2\"], 200))\r\n\r\n # Manipuleo de camion por descargar\r\n elif camion.tipo == \"Descarga\":\r\n\r\n # Espera camion para realizar transbordo o interrumpe la espera de otro\r\n ejecucion_espera_o_interrumpe = yield self.process(\r\n camion.espera_transbordo_o_interrumpe(self, 100))\r\n\r\n # Si el camion espera procede con un tranbordo o descarga a pulso\r\n if ejecucion_espera_o_interrumpe[\"Resultado\"] != \"Interrumpio espera\":\r\n transbordo = operaciones[\"Transbordo a pulso - Sacos\"]\r\n descarga = operaciones[\"Descarga a pulso - Sacos\"]\r\n\r\n yield self.process(self.transbordar_o_cargar_descargar(\r\n camion, ejecucion_espera_o_interrumpe,\r\n transbordo, 200,\r\n descarga, self.medios_almacenamiento[\"Almacen 2\"], 200))", "def atencion_ingreso(self, camion):\r\n\r\n operaciones = self.operaciones[\"Operaciones complementarias\"]\r\n\r\n if camion.tipo == \"Descarga\":\r\n yield self.process(operaciones[\"Atencion recepcion 1\"]\r\n .ejecutar(self, camion))\r\n else:\r\n yield self.process(operaciones[\"Atencion despacho 1\"]\r\n .ejecutar(self, camion))\r\n\r\n if camion.carga not in [\"Contenedor 20\", \"Contenedor 40\"] and \\\r\n not (camion.tipo == \"Carga\" and camion.carga == \"Harina de Soya - Hi Pro/Pellet de Soya\"):\r\n yield self.process(operaciones[\"Primer pesaje\"]\r\n .ejecutar(self, camion))\r\n self.exit(camion.nombre)", "def mezclar_bolsa(self):", "def ptsigmav(self,prof_pt): #getter que halla el esfuerzo vertical en un punto\r\n sigmav=self.pext\r\n for capa in self.capas:\r\n capaini=capa.prof_init\r\n capafin=capa.prof_final\r\n if capa.prof_init >= prof_pt:\r\n break #se alcanzo una capa mas profunda a la del punto\r\n elif capa.prof_final <= prof_pt and capa.prof_final <= self.n_fret:\r\n #la capa esta encima y el nivel freatico esta debajo de la capa\r\n sigmav=sigmav + capa.gamma*(capa.prof_final - capa.prof_init)\r\n pass\r\n elif capa.prof_final <= prof_pt and capa.prof_init >= self.n_fret:\r\n #la capa esta encima y el nivel freatico en toda la capa\r\n sigmav=sigmav + capa.gamma_sat*(capa.prof_final-capa.prof_init) \r\n pass\r\n elif capa.prof_final<prof_pt and capa.prof_init < self.n_fret and capa.prof_final>self.n_fret:\r\n #la capa esta encima, pero el nivel freatico esta al interior de esa capa\r\n sigmav=sigmav + capa.gamma*(self.n_fret-capa.prof_init)\r\n sigmav=sigmav + capa.gamma_sat*(capa.prof_final-self.n_fret)\r\n pass\r\n else: #el punto se encuentra en la capa actual \r\n if capa.prof_init >= self.n_fret: #el nivel freatico esta por encima de la capa del punto\r\n sigmav=sigmav + capa.gamma_sat*(prof_pt-capa.prof_init) \r\n pass\r\n elif capa.prof_final <= self.n_fret or prof_pt <= self.n_fret: #el nivel freatico esta debajo de la capa del punto o del punto\r\n sigmav=sigmav + capa.gamma*(prof_pt-capa.prof_init)\r\n pass\r\n elif capa.prof_final>self.n_fret and capa.prof_init<self.n_fret: \r\n ##el nivel freatico esta dentro de la capa del punto y ademas esta entre el inicio de la capa y el punto\r\n sigmav=sigmav + capa.gamma*(self.n_fret-capa.prof_init)\r\n sigmav=sigmav + capa.gamma_sat*(prof_pt-self.n_fret)\r\n pass\r\n else:\r\n print('error en sigma v del punto')\r\n pass\r\n pass\r\n pass\r\n return round(sigmav,3)", "def revise():", "def promedio_disparos():\n with open(\"Basedatos.txt\", \"r\") as bd:\n datos = bd.readlines()\n disparos_totales = 0\n lista_disparos = []\n #se obtienen todos los puntajes\n for x in datos:\n lista = x[:-1].split(\",\")\n lista_disparos.append(lista)\n disparos_totales += int(lista[5]) #se van sumando cada uno de los puntajes\n disparos_totales /= len(lista_disparos) #se divide el puntaje entre el numero de usuarios\n disparos_totales = (round(disparos_totales, 2)) #se redondea el resultado a dos decimales\n print(\"Los disparos totales en promedio para ganar fueron: {}\".format(disparos_totales))", "def adelanta_camion(self, entorno, operacion, medio_de_origen_o_destino, camion, tipo):\r\n # TODO mejorar implementacion\r\n\r\n if tipo == \"Operacion\":\r\n\r\n operacion.recurso.cola.remove(camion)\r\n operacion.recurso.cola = \\\r\n operacion.recurso.cola[0:operacion.recurso.cola.index(self)] \\\r\n + [camion] + operacion.recurso.cola[operacion.recurso.cola.index(self):]\r\n\r\n print str(camion) + \" adelantado bajo criterio de \" + str(self) + \" \" + str(entorno.now)\r\n print \"\\tEn sistema: \" + str(operacion.recurso.cola) + \" Hora: \" + str(entorno.now)\r\n\r\n elif tipo == \"Almacen\":\r\n\r\n medio_de_origen_o_destino.cola.remove(camion)\r\n medio_de_origen_o_destino.cola = \\\r\n medio_de_origen_o_destino.cola[0:medio_de_origen_o_destino.cola.index(self)] \\\r\n + [camion] + medio_de_origen_o_destino.cola[medio_de_origen_o_destino.cola.index(self):]\r\n\r\n print str(camion) + \" adelantado bajo criterio de \" + str(self) + \" \" + str(entorno.now)\r\n print \"\\t\" + medio_de_origen_o_destino.nombre + \":\" \\\r\n + str(medio_de_origen_o_destino.cola) + \" Hora: \" + str(entorno.now)", "def postepy(self,przedmiot:str)-> float:\n return self.przedmioty[przedmiot].srednia()" ]
[ "0.5895572", "0.5794777", "0.56389797", "0.5613253", "0.5611132", "0.55967414", "0.55967414", "0.55967414", "0.55967414", "0.55967414", "0.54503614", "0.5439653", "0.53975016", "0.53975016", "0.53975016", "0.53975016", "0.53975016", "0.53975016", "0.5377089", "0.5350562", "0.5332245", "0.53197825", "0.5261364", "0.5259924", "0.52577496", "0.5222728", "0.5210798", "0.5196581", "0.5190575", "0.5187043" ]
0.6105102
0
Walk Animation walk_images = os.path.join(_RESFOLDERS, 'Walk', '.gif') walk_list = glob.glob(walk_images)
def iniciar_sprites(self): res_gifs = os.path.join(_RESFOLDERS, '**', '*.gif') gifs_list = glob.glob(res_gifs, recursive=True) for gif in gifs_list: self.guardar_sprite(gif)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_images(self):\r\n self.standing_frame = [load_image(\"cat1.png\")]\r\n self.walk_frames_r = [load_image(\"cat2.png\"), load_image(\"cat3.png\"),\r\n load_image(\"cat4.png\")]", "def load_images(self, folder):\n cwd = os.getcwd()\n dir = cwd + '/' + folder\n files = os.listdir(dir)\n for file in files:\n img = pygame.image.load(dir + '/' + file)\n self.images.append(img)", "def readPlayerImageFiles(self):\n currentPath = os.path.dirname(os.path.abspath(__file__))\n listOfFileNames=[]\n for i in os.listdir(currentPath):\n if re.match(\"player\\_\\d+\",i): #i.endswith(\".gif\")\n listOfFileNames.append(currentPath+'/'+i)\n return listOfFileNames", "def getimagelist(folder):\n imagefolder = Path(folder) \n imagelist = imagefolder.glob(\"**/*.png\") \n return list(imagelist)", "def get_imlist_png(path):\n \n return [os.path.join(path,f) for f in os.listdir(path) if f.endswith('.png')]", "def load_images(path):\n images = []\n images_names = []\n \n for file_name in os.listdir(path):\n image_name = file_name\n images_names.append(image_name)\n images_names = sorted(images_names) #use sort to insure linux file sys behaves\n print(images_names) #check for proper order\n\n for file_name in images_names:\n image = pygame.image.load(path + os.sep + file_name).convert()\n images.append(image)\n return images", "def get_image_list(folder):\n image_list = []\n for each_file in os.listdir(folder):\n filename, ext = os.path.splitext(each_file)\n if ext == '.gif':\n image_list.append(each_file)\n return image_list", "def animate(directory,gifname,n_t,step=2,duration=0.2):\n\t# create list of filenames\n\tfnames = dir_fname(directory,\"*\")\n\t# create list of plots\n\timages=[] \n\tfor k in range(0,n_t):\n\t\tk = k*step\n\t\tprint('Mounting Im '+ str(k))\n\t\tFIG_NAME=fnames[k]\n\t\timages.append(imageio.imread(FIG_NAME)) # read\n\t# Now we can assemble the video\n\timageio.mimsave(gifname, images,duration=duration) # create gif\n\tprint('Animation'+gifname+'Ready')\n\treturn True", "def create_gif(base_folder):\n img_list = []\n search_path = glob.glob(os.path.join(base_folder, '*.png'))\n search_path.sort()\n for f in search_path:\n im = Image.open(f)\n img_list.append(im)\n save_file = os.path.join(base_folder, 'animated_gif.gif')\n img_list[0].save(save_file,\n save_all=True, append_images=img_list[1:], optimize=False, duration=180, loop=0)", "def getImages(self,Project=\"\"):\n #images = [\"image1.jpg\",\"image2.jpg\",\"image3.jpg\"]\n \n os.chdir(self.dataDir)\n images = glob.glob(\"*.png\")\n \n return images", "def create_gifs(folder, time_per_image=0.1):\n # Retrieve images paths\n images_dict = defaultdict(list)\n folders_sorting_key = lambda s: int(s.split(\"_\")[-1])\n obs_folders = [f for f in os.listdir(folder) if f.split(\"_\")[0] == \"observations\"]\n obs_folders = sorted(obs_folders, key=folders_sorting_key)\n for obs_folder in obs_folders:\n for f in os.listdir(os.path.join(folder, obs_folder)):\n image_name = \"_\".join(f.split(\"_\")[:-1])\n images_dict[image_name].append(os.path.join(folder, obs_folder, f))\n # Create gifs\n for name in images_dict:\n target = os.path.join(folder, name + \".gif\")\n LungsLoader._create_gif(images_dict[name], target, time_per_image)", "def get_imlist(path):\n\treturn [os.path.join( path, f) for f in os.listdir(path) if f.endswith('.jpg')]", "def get_images(fish):\n fish_dir = TRAIN_DIR+'{}'.format(fish)\n images = [fish+'/'+im for im in os.listdir(fish_dir)]\n return images", "def get_imlist(path):\n return [\n os.path.join(path, f) for f in os.listdir(path) if f.endswith('.bmp')\n ]", "def preload_pathimgs(self, pathimgs):\n self.pathimgs = pathimgs\n print('build list images :' + self.pathimgs)\n listfiles = self.get_list_files()\n listfiles.sort(key=lambda v: v.upper())\n for imgpath in listfiles:\n if imgpath.endswith('gif'):\n listgif = self.build_list_gif(imgpath)\n self.listimages += listgif * self.passgif\n self.tempo += [self.durationgif] * len(listgif) * self.passgif\n else:\n img = Image.open(imgpath)\n img = img.resize((self.matrix.width, self.matrix.height), Image.ANTIALIAS)\n self.listimages.append(img.convert('RGB'))\n self.tempo += [self.durationimg]\n print(\" duration: {}s, {} Images\".format(int(sum(self.tempo, 0)), len(self.listimages)))", "def load_sprites(dir=\"/home/robin/workspace/python/ipt/chess/sprites\"):\n arr = []\n chdir(dir)\n for i in range(12):\n img = mimg.imread(\"sprite_\"+\"{:0>2d}\".format(i)+\".png\")\n arr.append(img)\n return arr", "def create_gif():\n anim_file = 'sample/training.gif'\n\n with imageio.get_writer(anim_file, mode='I') as writer:\n filenames = glob.glob('sample/*.jpg')\n filenames = sorted(filenames, key=lambda filename: int(filename[11:-4]))\n for filename in filenames:\n image = imageio.imread(filename)\n writer.append_data(image)\n image = imageio.imread(filename)\n writer.append_data(image)", "def get_imlist(path):\n return [os.path.join(path, f) for f in os.listdir(path) if f.endswith('.jpg')]", "def GetGifFrames(self, path):\n gif = imageio.mimread(path)\n return [\n (image.meta[\"duration\"], cv2.resize(\n cv2.cvtColor(image, cv2.COLOR_RGB2BGR), (64, 64))\n ) for image in gif\n ]", "def process_images():\n create_dirs()\n for root, dirs, files in os.walk(IN):\n for name in files:\n if name[0] == '.':\n continue\n process_image(name)", "def get_imgs(paths_list: list) -> list:\n \n imgs_list = [Image.open(project_path + data_path + paths_list[i]) for i in range(len(paths_list))]\n \n return imgs_list", "def make_gif():\n if MIGRATION:\n import imageio\n for n, JPG_DIR in enumerate(JPG_DIRS):\n images, image_file_names = [], []\n for file_name in os.listdir(JPG_DIR):\n if file_name.endswith('.jpg'):\n image_file_names.append(file_name) \n sorted_files = sorted(image_file_names, key=lambda y: int(y.split('_')[1]))\n for i in range(len(sorted_files)): \n file_path = os.path.join(JPG_DIR, sorted_files[i])\n images.append(imageio.imread(file_path))\n imageio.mimsave(FNAME.rsplit('.', 1)[0] + '_migration' + str(n) + '.gif', images, 'GIF', loop=1, fps=FPS)", "def loadimages(root):\n imgs = []\n\n def add_json_files(path,):\n for imgpath in glob.glob(path+\"/*.png\"):\n if exists(imgpath) and exists(imgpath.replace('png',\"json\")):\n imgs.append((imgpath,imgpath.replace(path,\"\").replace(\"/\",\"\"),\n imgpath.replace('png',\"json\")))\n for imgpath in glob.glob(path+\"/*.jpg\"):\n if exists(imgpath) and exists(imgpath.replace('jpg',\"json\")):\n imgs.append((imgpath,imgpath.replace(path,\"\").replace(\"/\",\"\"),\n imgpath.replace('jpg',\"json\")))\n\n def explore(path):\n if not os.path.isdir(path):\n return\n folders = [os.path.join(path, o) for o in os.listdir(path) \n if os.path.isdir(os.path.join(path,o))]\n if len(folders)>0:\n for path_entry in folders: \n explore(path_entry)\n else:\n add_json_files(path)\n\n explore(root)\n\n return imgs", "def findpaths(path):\n print('[INFO] Searching for .png images in ', path)\n frame_paths = []\n frame_to_path_dict = {}\n path_to_frame_dict = {}\n for root, dirs, files in os.walk(path, topdown=False):\n for name in files:\n if name.find('.png') != -1:\n frame_path = os.path.join(root, name)\n # NOTE: may want to change to deal with generic file names\n match = re.search(r'(?P<video_id>\\d+)_(?P<frame_id>\\d+).png', name)\n # video_id = int(match.group('video_id'))\n frame_id = int(match.group('frame_id'))\n frame_paths.append(frame_path)\n frame_to_path_dict[frame_id] = frame_path\n path_to_frame_dict[frame_path] = frame_id\n frame_paths_sorted = sorted(frame_paths, key=lambda x: int(path_to_frame_dict[x]))\n print('[INFO] %i frames located ' % (len(frame_paths)))\n return frame_paths_sorted, frame_to_path_dict, path_to_frame_dict", "def saveGIFBatch(directory, path, name=''):\n # for each frame in batch\n images = []\n for filename in directory:\n print(filename)\n images.append(imageio.imread(filename))\n\n name_gif = path + '/' + name + '.gif'\n imageio.mimsave(name_gif, images)", "def load_all_frames(self, path, convert_alpha=False):\n to_load = []\n for name in listdir(self.get_path(path)):\n as_list = name.split('.')\n if len(as_list) <= 2 and ImageLoader.is_decimal(as_list[0]) and \\\n isfile(self.get_path(path + [name])):\n to_load.append(name)\n to_load.sort(key=lambda name: name.split('.')[0])\n return [self.load_image(path + [x], convert_alpha)\n for x in to_load]", "def make_gifs_test(title, sort, path):\n images = os.listdir(path)\n generated_images = []\n\n for i in range(len(images)):\n file = os.path.join(path, '%s_%s_Results_%03d.png' % (title, sort, i+1))\n generated_images.append(imageio.imread(file))\n\n imageio.mimsave(path + '{}_{}_Test_Results.gif'.format(sort, title), generated_images, fps=2)\n print(\"{} gif file is generated.\".format(title))", "def animated_gif(file):\n\n\timport os.path\n\timport Image\n\tfrom conf import *\n\tfrom util_errors import gen_error\n\tANIMGIF_TAGID = 2\n\n\tfilepath = os.path.join(PROBATION_DIR, file[\"filename\"])\n\ttry:\n\t\timg = Image.open(filepath)\n\t\ttry:\n\t\t\timg.seek(1)\n\t\texcept:\n\t\t\tpass\n\t\telse:\n\t\t\tdel(img)\n\t\t\treturn [ANIMGIF_TAGID]\n\texcept Exception, data:\n\t\tgen_error('GENERIC', \"File couldn't be operated on, check perms -- \" + str(data))\n\n\tdel(img)\n\treturn []", "def load_from_folder(path):\n images = []\n files = os.listdir(path)\n files.sort()\n for file in tqdm(files):\n images.append(io.imread(path + file))\n return images", "def _fetch_all_images(self, path) -> List[str]:\n files_all = []\n\n for ext in self.exts:\n files_all.extend(glob.glob(join(path, ext)))\n\n return files_all" ]
[ "0.6584038", "0.64543307", "0.6450545", "0.64224446", "0.6325395", "0.6270912", "0.6255214", "0.62539303", "0.61220104", "0.61161965", "0.60567886", "0.59772885", "0.59587044", "0.58857423", "0.5868233", "0.58653075", "0.58591115", "0.5856427", "0.5824385", "0.581767", "0.57975805", "0.5783951", "0.577526", "0.5775185", "0.5775123", "0.57747364", "0.5760337", "0.5726206", "0.56794274", "0.56564033" ]
0.65915805
0
Populate choices using installed apps names.
def _get_target_choices(): apps = [('public', _("Public website"))] for model, entity in registry.registry.items(): if entity.menu: appname = model._meta.app_label.lower() apps.append((appname, unicode(entity.label))) return tuple(apps)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _load_installed_applications(self):\n for application in self.settings.get('apps', None) or []:\n path = None\n if isinstance(application, six.string_types):\n application_name = application\n if application.startswith('gordon.contrib.'):\n app_parts = application.split('.')\n path = os.path.join(self.root, 'contrib', app_parts[-1])\n application_name = '_'.join(app_parts[1:])\n settings = {}\n elif isinstance(application, dict):\n application_name = application.keys()[0]\n settings = application.values()[0]\n else:\n raise exceptions.InvalidAppFormatError(application)\n\n with indent(2):\n self.puts(colored.cyan(\"{}:\".format(application_name)))\n\n self.add_application(\n App(\n name=application_name,\n settings=settings,\n project=self,\n path=path\n )\n )", "def get_app_list(self):\n return self.get_setting('applications', 'installed_apps')", "def installed_appnames():\n appnames = set()\n for finder in sys.meta_path:\n if hasattr(finder, 'appname'):\n appnames.add(finder.appname)\n return appnames", "def installed_apps_fixture(installed_app, locations, app):\n return [installed_app,\n _create_installed_app(locations[1].location_id, app.app_id)]", "def list_apps(self) -> list:\n apps = self.app.list_apps()\n app_list = [app[\"title\"] for app in apps]\n return app_list", "def get_app_list(self, request):\n app_dict = self._build_app_dict(request)\n\n # Sort the apps alphabetically.\n app_list = sorted(app_dict.values(), key=lambda x: x['order'])\n\n # Sort the models alphabetically within each app.\n for app in app_list:\n app['models'].sort(key=lambda x: x['order'])\n\n return app_list", "def initDefaultChoices(self):\n return []", "def get_apps(self) -> List[str]:\n return list(self.config[\"apps\"].keys())", "def get_apps(self):\n return self.apps", "def get_app_list(self, request):\n ordering = {\n \"Sujets\":1,\n \"Secteurs\":2,\n \"Pages\":3,\n \"Liens\":4,\n \"Illustrations\":5,\n \"Pictures\":6,\n \"Picture dims\":7,\n \"Icons\":8,\n }\n app_dict = self._build_app_dict(request)\n # a.sort(key=lambda x: b.index(x[0]))\n # Sort the apps alphabetically.\n app_list = sorted(app_dict.values(), key=lambda x: x['name'].lower())\n\n # Sort the models alphabetically within each app.\n for app in app_list:\n app['models'].sort(key=lambda x: ordering[x['name']])\n\n return app_list", "def all_registered_appnames():\n yield from sorted(Registry.monomers.keys())", "def sync_apps(self):\n pass", "def add_app(self, app_name):\n self.add_list_setting('applications', 'installed_apps', app_name)", "def app_names(self):\n return self.get_app_names()", "async def get_installed_apps(self, params: Optional = None) -> dict:\r\n return await self.get_items(API_INSTALLEDAPPS, params=params)", "def ListApps(self, request, context):\n context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)", "def initDefaultChoices(self):\n return [entry[0] for entry in self.getEditChoices()]", "def get_apps(provider, query):\n\n workdir = os.path.dirname(os.path.realpath(__file__))\n with open(os.path.join(workdir, '..', 'config.yml')) as f:\n config = yaml.load(f)\n ex = Explorer()\n logging.info('Read bucket: %s', config['SCOOP_BUCKET'])\n apps = ex.get_apps(os.path.expandvars(config['SCOOP_BUCKET']), query)\n logging.info(\"Apps count = %d\", len(apps))\n installed = provider.get_installed()\n\n # check if already installed\n for app in apps:\n app['installed'] = app['name'] in installed\n\n return apps", "def selected_applications(self) -> Optional[pulumi.Input['NamespacedNamesArgs']]:\n return pulumi.get(self, \"selected_applications\")", "def selected_applications(self) -> Optional[pulumi.Input['NamespacedNamesArgs']]:\n return pulumi.get(self, \"selected_applications\")", "def get_app_names(self):\n groups = self['__store']\n lookup = {\n g.group_id: g.name[2:]\n for g in groups\n if (g.name.startswith('a_'))\n }\n return set(map(lookup.get, self.get_app_ids()))", "def get_chosen_options(user):\n user_profile = user.get_profile()\n user_application = user_profile.application\n np = user_application.np\n ocl = eval(user_application.options_selected)\n chosen_options = []\n for oc in ocl:\n chosen_options.append(Option.objects.get(opt_code=int(oc))) \n return chosen_options", "def get_all_apps(self):\n return list(self.apps.values())", "def get_local_app_list():\n\t\tapp_list = [\n\t\t\t{\n\t\t\t\t'name': app,\n\t\t\t\t'dir': os.path.dirname(os.path.abspath(import_module(app).__file__)),\n\t\t\t}\n\t\t\tfor app in settings.INSTALLED_APPS\n\t\t]\n\t\treturn [app for app in app_list if settings.BASE_DIR in app['dir']]", "def list_apps(request, pk=0):\n context = {'items': [], 'resource_type': 'App'}\n\n if pk == 0:\n context['h2'] = \"Managed Applications\"\n context['header_1'] = \"Developer\"\n context['header_2'] = \"Version\"\n refresh_managed_software_status()\n apps = MacOSApp.objects.filter(merged_into__isnull=True).reverse()\n if not request.user.has_perm('devices.manage_apps'):\n apps = apps.filter(managed=True).exclude(installed__isnull=True, pending_install__isnull=True)\n for app in apps:\n assignment_count = app.pending_install.count()\n installed_on = app.installed.all()\n data = {'meta': app, 'assignment_count': assignment_count, 'installed': installed_on}\n context['items'].append(data)\n else:\n if not request.user.has_perm('devices.manage_apps'):\n raise PermissionDenied\n\n device = get_object_or_404(Laptop, pk=pk)\n context['h2'] = \"Applications on {}\".format(device.name)\n context['header_1'] = \"Developer\"\n context['header_2'] = \"Version\"\n context['device_view'] = True\n context['device_id'] = pk\n apps = MacOSApp.objects.filter(pending_install__in=[device])\n apps |= MacOSApp.objects.filter(installed__in=[device])\n for app in apps:\n status = 'Not assigned'\n for entry in app.installed.all():\n if entry == device:\n status = 'Installed'\n for entry in app.pending_install.all():\n if entry == device:\n status = 'Assigned'\n data = {'meta': app, 'status': status}\n context['items'].append(data)\n\n return render(request, 'mdm/resource_list.html', context)", "def __init__(self):\n super().__init__(interface.Apps, DEFAULT_PRIORITIES)", "def discover_glitter_apps(self):\n for app_name in settings.INSTALLED_APPS:\n module_name = '{app_name}.glitter_apps'.format(app_name=app_name)\n try:\n glitter_apps_module = import_module(module_name)\n if hasattr(glitter_apps_module, 'apps'):\n self.glitter_apps.update(glitter_apps_module.apps)\n except ImportError:\n pass\n\n self.discovered = True", "async def app_list(self) -> List[interface.App]:\n return await self.relay(\"app_list\")()", "def test_installed_apps(self):\n self.assertIn(__package__, settings.INSTALLED_APPS)", "def handle(self, *args, **options):\n app_labels = [app.split('.')[-1] for app in settings.INSTALLED_APPS]\n if not args:\n args = app_labels\n for app in args:\n if app not in app_labels:\n print \"%s is not a valid application\" % app\n continue\n\n app_module = get_app(app_label=app, emptyOK=True)\n if app_module is None:\n continue\n\n print \"Models of %s:\" % app\n for model in get_models(app_module):\n print \" - %s has %d entries\" % (\n model.__name__,\n model.objects.count()\n )" ]
[ "0.63182974", "0.6118882", "0.61001414", "0.5881627", "0.5712084", "0.56903887", "0.56577164", "0.56141806", "0.55990887", "0.5588868", "0.5577193", "0.5558418", "0.555771", "0.55525905", "0.5540721", "0.5527112", "0.55160326", "0.5498196", "0.5469074", "0.5469074", "0.54687816", "0.5466761", "0.5440372", "0.54402584", "0.5429919", "0.54135245", "0.5412395", "0.53766793", "0.53566784", "0.5296665" ]
0.67037904
0
Computes the difference between nuclear luminosity and stellar luminosity. Arguments radius (scaled units) mass (scaled units) delta_m, eta, xi convergence paramters mue mean molecular weight pp_factor multiplicative factor for rate Returns Lnuc(R) 4piR2sigmaTeff4
def lum_difference(radius,mass,delta_m,eta,xi,mue,pp_factor): m,r,p,Lnuc = integrate(mass,radius,delta_m,eta,xi,mue,pp_factor,max_steps=10000) return Lnuc[-1]-surface_luminosity(Teff_for_main(m[-1]),r[-1])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def find_radius(mass,delta_m,eta,xi,mue,pp_factor):\n\n #range of radii; reason in detail under step 9 of report\n r_low = 0.01*Rsun # MKS\n r_high = 3*Rsun # MKS\n \n radius = brentq(lum_difference, r_low, r_high, xtol=1.0e-4, args = (mass,delta_m,eta,xi,mue,pp_factor))\n return radius", "def compute_luminosity(sed_star, types, verbose=False):\n lam = sed_star['lam']\n fstar1 = 1e23 * sed_star[\"fstar1\"]\n\n sed_star1_si = fluxToJy(fstar1, lam * 1e-6, 1, True)\n f_sed_star1 = interp1d(lam, fstar1)\n f_star1_vis = f_sed_star1(0.5)\n D = 1 * pc\n L1 = (4 * np.pi * (D * 1e-2) ** 2) * \\\n ip.trapz(sed_star1_si, lam) / const.L_sun.value\n L2 = ratio_vis = None\n\n if len(types) == 2:\n fstar2 = 1e23 * sed_star[\"fstar2\"]\n sed_star2_si = fluxToJy(fstar2, lam * 1e-6, 1, True)\n L2 = (4 * np.pi * (D * 1e-2) ** 2) * \\\n ip.trapz(sed_star2_si, lam) / const.L_sun.value\n f_sed_star2 = interp1d(lam, fstar2)\n f_star2_vis = f_sed_star2(0.5)\n ratio_vis = f_star2_vis / f_star1_vis\n\n if len(types) == 1:\n if verbose:\n cprint(\"\\nLtot = %2.1f Lsun\" % (L1), color=\"magenta\")\n return L1\n elif len(types) == 2:\n if verbose:\n cprint(\"\\nLtot = %2.1f + %2.1f = %2.1f Lsun\" %\n (L1, L2, L1 + L2), color=\"magenta\",)\n cprint(\"ratio[0.5mu] = %2.2f\" % ratio_vis, color=\"magenta\")\n return L1 + L2", "def LJ(epsilon,sigma,r):\n P1=(sigma/r)**12\n P2=(sigma/r)**6\n return 4*epsilon*(P1-P2)", "def Luminosity(self, z, f=1., dnu=1000.):\n ld = self.Luminosity_Distance(z)\n ld2 = ld * ld\n lum = f * self.Jy2CGS * dnu * self.MHz2Hz * 4 * np.pi * ld2\n return lum", "def calc_Lr(rho,mld,f,g=9.8,po=1027.):\n n2ml=np.ndarray(len(rho[1,:-1]))\n for i in range(len(rho[1,:-1])):\n n2ml[i]=-(g/po)*((rho[15,i]-rho[np.int8(mld[i])+15,i])/mld[i])\n Lr=(np.sqrt(n2ml)*mld[:-1])/f\n\n return Lr", "def luminosity_distance(self, z):\n da = self.angular_diameter_distance(z)\n dl = da*(1.+z)**2.\n return(dl)", "def lorentz_deriv(coord, sigma=10., beta=8./3, rho=28.0):\n x, y, z = coord # unpack coordinates\n return np.array([sigma * (y - x), x * (rho - z) - y, x * y - beta * z])", "def LJ(r, epsilon, sigma, x, y):\n A=((x/y)**(x/(x-y))/((x/y)-1))\n\n\n V=A*epsilon*((sigma/r)**x-(sigma/r)**y) #-4*Epsilon*((Sigma/Rc)**12-(Sigma/Rc)**6)\n\n return V", "def radio_lumfn(L, _params):\n print _params\n # Number density as a function of sfr, dn/dlog(sfr)\n sfr = L * 5.52e-29 # erg/s/Hz, Bell (2003), Eq. 6\n dndlogsfr_sfms, dndlogsfr_pass = g.sfr_fn(hm, sfr, z=0., params=_params)\n #phi = dndlogsfr_sfms #+ dndlogsfr_pass\n return dndlogsfr_sfms, dndlogsfr_pass", "def luminosity(r,T,autoDebug=True):\n\t#-----------BEGIN ERROR CHECKING----------\n\tif autoDebug:\n\t\tsam.type_check(r, sam.TYPES_math, \"r\")\n\t\tsam.type_check(T, sam.TYPES_math, \"T\")\n\t\tsam.value_check(r,.0,\">\",\"r\")\n\t\tsam.value_check(T,.0,\">\",\"T\")\n\t#-----------END ERROR CHECKING----------\n\n\tL = 4 * sam.CONSTANT_pi * r**2 * sam.CONSTANT_SB* T**4\n\treturn L", "def infrared_luminosity(template, norm=1.):\n\n waves, L_nu = read_K15_template(template)\n\n wavelength_range = np.logical_and((waves >= 8), (waves <= 1000))\n\n # integrate L_nu over dnu\n freqs = SPEED_OF_LIGHT.to(u.micron / u.s).value / waves[wavelength_range] # Hz\n delta_freqs= freqs[:-1] - freqs[1:]\n L_IR = np.sum(dnu* l_nu for dnu, l_nu in zip(delta_freqs, L_nu[wavelength_range])) * norm\n\n return L_IR * (u.W).to(u.Lsun)", "def ee_radius_diffraction(self, energy=FIRST_AIRY_ENCIRCLED):\n return _inverse_analytic_encircled_energy(self.fno, self.wavelength, energy)", "def dLJverlet(x,r2,R1,R2):\r\n rc = (2**(1/6))*((R1+R2)/(2))\r\n sig_int = (R1+R2)/(2) #JV: This is the sigma of the interaction (in the system units). We don't need to divide by sigma because we are already working with reduced units\r\n\r\n #JV: Because we are working on reduced units (from the values of the Argon gas)\r\n # we want need to divide our radius by the radius of the Argon gas\r\n\r\n #JV: See LJverlet() for more explanation on the truncation\r\n if((r2**(1/2))>rc):\r\n value = 0\r\n else:\r\n value = ((48.*x)/(r2))*(((((sig_int**2)*1.)/r2)**6) - ((((sig_int**2)*0.5)/r2)**3))\r\n\r\n return value", "def particle_LJV(R,N,D):\n b = np.zeros(N)\n for i in range(N):\n x = R[0,np.arange(N)!=i]-R[0,i]\n y = R[1,np.arange(N)!=i]-R[1,i]\n z = R[2,np.arange(N)!=i]-R[2,i]\n [x,y,z] = minimal_image(x,y,z,D)\n c = np.stack((x,y,z))\n r = np.sqrt(np.sum(c**2,0))\n b[i] = np.sum(4*((1/r)**12-(1/r)**6))\n Uv = np.sum(b)\n return Uv", "def calculate_luminosity(\n spec_fname, distance, wavelength_column=0,\n wavelength_unit=u.angstrom, flux_column=1,\n flux_unit=u.Unit('erg / (Angstrom cm2 s)')):\n #BAD STYLE change to parse quantity\n distance = u.Unit(distance)\n\n wavelength, flux = np.loadtxt(spec_fname, usecols=(wavelength_column, flux_column), unpack=True)\n\n flux_density = np.trapz(flux, wavelength) * (flux_unit * wavelength_unit)\n luminosity = (flux_density * 4 * np.pi * distance**2).to('erg/s')\n\n return luminosity.value, wavelength.min(), wavelength.max()", "def e_x_unpolarized(rho, sigma, **kwargs):\n x = get_reduced_density_gradient(rho, sigma)\n return lda.e_x_lda_unpolarized(rho) * f_x(x, **kwargs)", "def get_luminosity(self):\n\n if self.no_dist is False and self.no_flux is False:\n\n dist = self.distance\n snu = self.snu_at_1GHz\n lum = lumin(dist, snu)\n\n self.lum = lum\n else:\n self.lum = -1 # use -1 to indicate unknown luminosity\n return self.lum", "def Mass_sch19(Radius, eRadius):\n a = -0.024048024\n b = 1.0552427\n a_err = 0.007592668\n b_err = 0.017044148\n M = a + b * Radius\n eM = np.sqrt((a_err)**2 + (Radius * b_err)**2 + (b * eRadius)**2)\n return(M, eM)", "def LorentzFactor(self):\n # Use of abs() and x ** 0.5 provides a more stable calculation of lorentz\n # factor than math.sqrt() at high velocities.\n return 1 / abs( 1 - Particle.BetaVelocity(self) * Particle.BetaVelocity(self))**0.5", "def test_zernike_radial(self): # noqa: C901\n # https://en.wikipedia.org/wiki/Zernike_polynomials#Radial_polynomials\n\n def Z3_1(x, dx=0):\n if dx == 0:\n return 3 * x**3 - 2 * x\n if dx == 1:\n return 9 * x**2 - 2\n if dx == 2:\n return 18 * x\n if dx == 3:\n return np.full_like(x, 18)\n if dx >= 4:\n return np.zeros_like(x)\n\n def Z4_2(x, dx=0):\n if dx == 0:\n return 4 * x**4 - 3 * x**2\n if dx == 1:\n return 16 * x**3 - 6 * x\n if dx == 2:\n return 48 * x**2 - 6\n if dx == 3:\n return 96 * x\n if dx == 4:\n return np.full_like(x, 96)\n if dx >= 5:\n return np.zeros_like(x)\n\n def Z6_2(x, dx=0):\n if dx == 0:\n return 15 * x**6 - 20 * x**4 + 6 * x**2\n if dx == 1:\n return 90 * x**5 - 80 * x**3 + 12 * x\n if dx == 2:\n return 450 * x**4 - 240 * x**2 + 12\n if dx == 3:\n return 1800 * x**3 - 480 * x\n if dx == 4:\n return 5400 * x**2 - 480\n if dx == 5:\n return 10800 * x\n if dx == 6:\n return np.full_like(x, 10800)\n if dx >= 7:\n return np.zeros_like(x)\n\n l = np.array([3, 4, 6])\n m = np.array([1, 2, 2])\n r = np.linspace(0, 1, 11) # rho coordinates\n max_dr = 4\n desired = {\n dr: np.array([Z3_1(r, dr), Z4_2(r, dr), Z6_2(r, dr)]).T\n for dr in range(max_dr + 1)\n }\n radial = {\n dr: zernike_radial(r[:, np.newaxis], l, m, dr) for dr in range(max_dr + 1)\n }\n radial_poly = {\n dr: zernike_radial_poly(r[:, np.newaxis], l, m, dr)\n for dr in range(max_dr + 1)\n }\n for dr in range(max_dr + 1):\n np.testing.assert_allclose(radial[dr], desired[dr], err_msg=dr)\n np.testing.assert_allclose(radial_poly[dr], desired[dr], err_msg=dr)", "def delta(self, day, F_min, sigma, lmda_center):\n lmdas, flux = self.data[day]\n dev = flux - self.flux_model(lmdas, F_min, sigma, lmda_center)\n return np.sum(np.power(dev, 2))", "def calculate_DELTA(epoch):\n\n # Calculate solar coordinates\n O, beta, R = Sun.geometric_geocentric_position(epoch)\n\n # Compute distance Earth - Jupiter (DELTA) by iteration (start value:\n # DELTA = 5 AU)\n DELTA_old = -1.0\n DELTA = 5.0\n x = 0.0\n y = 0.0\n z = 0.0\n tau = 0.0\n l, b, r = 0, 0, 0\n iterations = 0\n\n while DELTA != DELTA_old and iterations < 5:\n # Calculate light-time delay\n tau = 0.0057755183 * DELTA\n\n l, b, r = Jupiter.geometric_heliocentric_position(epoch - tau)\n\n x = r * cos(b.rad()) * cos(l.rad()) + R * cos(O.rad())\n y = r * cos(b.rad()) * sin(l.rad()) + R * sin(O.rad())\n z = r * sin(b.rad()) + R * sin(beta.rad())\n\n DELTA_old = DELTA\n DELTA = sqrt(x ** 2 + y ** 2 + z ** 2)\n iterations += 1\n\n return DELTA, tau, l, b, r", "def _microstrip_LC_per_meter(wire_width, dielectric_thickness, eps_r):\n # Use the fact that v = 1/sqrt(L_m*C_m) = 1/sqrt(eps*mu) and\n # Z = sqrt(L_m/C_m) [Where L_m is inductance per meter]\n Z, eps_eff = _microstrip_Z(wire_width, dielectric_thickness, eps_r)\n eps0 = 8.854e-12\n mu0 = 4*pi*1e-7\n\n eps = eps_eff*eps0\n mu = mu0\n L_m = sqrt(eps*mu) * Z\n C_m = sqrt(eps*mu) / Z\n return L_m, C_m", "def approximate_nonlinear_vector_field_radial(dataset_path, L, epsilon):\n\n file_X0 = \"nonlinear_vectorfield_data_x0.txt\"\n names_X0 = ['X0_x', 'X0_y']\n data_X0 = pd.read_csv(dataset_path / file_X0, sep=' ', names=names_X0).to_numpy()\n\n names_X1 = ['X1_x', 'X1_y']\n file_X1 = \"nonlinear_vectorfield_data_x1.txt\"\n data_X1 = pd.read_csv(dataset_path / file_X1, sep=' ', names=names_X1).to_numpy()\n\n \"\"\"\n Following block calculates the values of phi_l's for each point in dataset of X0\n and form the corresponding phi_X matrix with the given value of L.\n \"\"\"\n phi = np.empty([2000, L])\n for l in range(L):\n phi_l = np.exp(-np.square(np.linalg.norm(data_X0 - data_X0[l],\n axis=1)) / epsilon ** 2)\n phi[:, l] = phi_l\n\n \"\"\"\n The following block performs the approximation of the vector field.\n \"\"\"\n V = (data_X1 - data_X0) / 0.1\n approx_func_Ct = np.linalg.inv(phi.T @ phi) @ phi.T @ V\n final = phi @ approx_func_Ct\n plt.scatter(final[:, 0], final[:, 1], c='green',\n label='approximated f(x)_hat values')\n plt.show()\n\n \"\"\"\n The following code plots the approximated vector field and the phase portrait.\n \"\"\"\n x, y = np.meshgrid(np.linspace(-5, 5, 10), np.linspace(-5, 5, 10))\n u, v = np.zeros((10, 10)), np.zeros((10, 10))\n for i in range(0, 10):\n for j in range(0, 10):\n u[i, j] = final.T[0, i]\n v[i, j] = final.T[1, j]\n plt.quiver(x, y, u, v)\n plt.streamplot(x, y, u, v)\n plt.title(\"Approximated Vector field\")\n plt.show()\n\n \"\"\"\n The following code calculates the MSE for the dataset X1 and the final values.\n \"\"\"\n MSE = np.square(data_X1 - final).mean()\n print(MSE)", "def TipLoss(lambdaInit, ThetaDist):\n if tipLoss == 1:\n iter = 0\n err = np.ones(len(r))\n while np.any(err > 0.005):\n # froot = 0.5*Nb*(r/((1 - r)*lam/r))\n f = 0.5 * Nb * ((1 - r) / lambdaInit)\n F = (2 / np.pi) * np.arccos(np.e ** (-f))\n lam = np.sqrt(1/4*(solDist*XsecPolarExp['Lift Slope']/(8*F)-lam_c)**2+solDist*XsecPolarExp['Lift Slope']*ThetaDist*r/(8*F))-(solDist*XsecPolarExp['Lift Slope']/(16*F)-lam_c/2)\n err = np.abs((lam - lambdaInit) / lam)\n err[np.where(np.isnan(err) == 1)] = 0\n lambdaInit = lam\n iter = iter + 1\n else:\n F = 1\n lam = np.sqrt(1/4*(solDist*XsecPolarExp['Lift Slope']/(8*F)-lam_c)**2+solDist*XsecPolarExp['Lift Slope']*ThetaDist*r/(8*F))-(solDist*XsecPolarExp['Lift Slope']/(16*F)-lam_c/2)\n\n lam[np.where(np.isnan(lam) == 1)] = 0\n # lam[0] = lam[1]\n # lam[-1] = lam[-2]\n return lam", "def radial(*args, attenuation: Union[float, bool]=0.0, magnitude: Union[float, bool]=0.0,\n maxDistance: Union[float, bool]=0.0, name: Union[AnyStr, bool]=\"\", perVertex:\n bool=True, position: Union[List[float, float, float], List[List[float, float,\n float]], bool]=None, torusSectionRadius: Union[float, bool]=0.0, type: Union[float,\n bool]=0.0, volumeExclusion: bool=True, volumeOffset: Union[List[float, float, float],\n bool]=None, volumeShape: Union[AnyStr, bool]=\"\", volumeSweep: Union[float, bool]=0.0,\n q=True, query=True, e=True, edit=True, **kwargs)->Union[AnyStr, Any]:\n pass", "def distance_from_torus(self, points, params, sqrt=False):\n axis, center, major_radius, minor_radius = params\n axis = axis.reshape((3, 1)) / torch.norm(axis, p=2)\n center = center.reshape((1, 3))\n\n center2points = points - center\n z_new = center2points @ axis # N x 1\n\n x_new = guard_sqrt(torch.sum(center2points ** 2, 1, keepdim=True) - z_new ** 2) # N x 1\n\n # min distance for right circle\n right_dst = (guard_sqrt((x_new - major_radius) ** 2 + z_new ** 2) - minor_radius) ** 2\n\n # min distance for left circle\n left_dst = (guard_sqrt((x_new + major_radius) ** 2 + z_new ** 2) - minor_radius) ** 2\n\n distance = torch.min(right_dst, left_dst)\n distance = distance.squeeze()\n\n if sqrt:\n distance = guard_sqrt(distance)\n\n if self.reduce:\n distance = torch.mean(distance)\n return distance", "def uniform_laplacian(image, radius=1):\n height, width = image.shape[:2]\n window_size = 2 * radius + 1\n\n W = sparse_conv_matrix(width, height, np.ones((window_size, window_size)))\n\n return weights_to_laplacian(W)", "def luminosity_distance(self, z):\n return self.proper_distance(z) * (1 + z)", "def distance_modulus(self, z):\n return(5.0*np.log10(self.luminosity_distance(z))+25.0)" ]
[ "0.6105835", "0.59289014", "0.543074", "0.5385494", "0.5364295", "0.52827114", "0.52727747", "0.5263927", "0.5261019", "0.5210305", "0.5124193", "0.50878245", "0.5032547", "0.4996151", "0.49636453", "0.49624547", "0.49145013", "0.49066216", "0.48998904", "0.48876116", "0.484519", "0.48445717", "0.48209718", "0.48143697", "0.48048723", "0.48021176", "0.48016164", "0.47925758", "0.47584116", "0.47333813" ]
0.7697479
0
For a given mass calls rootfind over some range of radii, integrates over the function until the difference in luminosity is zero (nuclear luminosity = surface luminosity) Arguments mass (scaled units) delta_m, eta, xi convergence paramters mue mean molecular weight pp_factor multiplicative factor for rate Returns radius radius that satisfies the luminosity difference (scaled units)
def find_radius(mass,delta_m,eta,xi,mue,pp_factor): #range of radii; reason in detail under step 9 of report r_low = 0.01*Rsun # MKS r_high = 3*Rsun # MKS radius = brentq(lum_difference, r_low, r_high, xtol=1.0e-4, args = (mass,delta_m,eta,xi,mue,pp_factor)) return radius
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def lum_difference(radius,mass,delta_m,eta,xi,mue,pp_factor):\n m,r,p,Lnuc = integrate(mass,radius,delta_m,eta,xi,mue,pp_factor,max_steps=10000)\n return Lnuc[-1]-surface_luminosity(Teff_for_main(m[-1]),r[-1])", "def omega_min(m,mu,R,epsilon):\r\n num = e**2*mu*m\r\n den = (1 + epsilon)*np.pi*epsilon_0*hbar**2*R\r\n return np.sqrt(num/den)", "def integrateMomentum(p, dt, fluid_v, fvolpp, mu_g, rfluid):\n\n #integration constants\n beta = 2.0/3.0 \n alpha1 = -4.0/3.0 \n alpha2 = 1.0/3.0 \n dtbeta = dt * beta \n\n vel1 = p.vel[0] \n pos1 = dtbeta * vel1 - alpha1 * p.pos[0] - alpha2 * p.pos[1] \n rp = p.get_density() \n D = p.get_diameter() \n mdot = (p.mass[0] - p.mass[1]) / dt \n \n mfluid = rfluid * fvolpp + 1e-30 # mass of fluid around particle\n fixedsrc = -alpha1 * p.vel[0] - alpha2 * p.vel[1] \n volp = math.pi * D * D * D / 6.0 \n volpp = fvolpp \n # enhance drag function for large volume fraction\n alphav = min(2.0, volp / max(volpp, 1e-30)) \n \n fp_vf = max((8.0 * alphav) ** 6.0 - 0.001, 0.0) \n\n #Integration loop\n max_iterations = 20\n for i in range(max_iterations): \n #Update fluid velocity based on delta particle momentum\n if i > 0: #Past first iteration\n fluid_v = fluid_v - ((vel1 - p.vel[0]) * p.mass[0] / mfluid ) \n\n dv = abs(fluid_v - vel1) \n Re = rfluid * D * dv / mu_g \n # blowing Reynolds number\n Re_b = abs(mdot / (D * mu_g * math.pi)) \n a = 0.09 + 0.077 * math.exp(-0.4 * Re) \n b = 0.4 + 0.77 * math.exp(-0.04 * Re) \n denom = 1.0 + a * Re_b **b \n\n fpblow = (1. + 0.0545 * Re + 0.1 * math.sqrt(Re) * (1.0 - 0.03 * Re)) / denom + fp_vf \n # Clift-Gauvin drag function (Crowe, 1998)\n fpcg = 1.0 + 0.15 * Re ** 0.687 + 0.0175 * Re / (1.0 + 4.25e4 * (Re+1e-20) **-1.16) + fp_vf \n # Choose drag function based on reynolds number. For high reynolds\n # number use Clift Gauvin, otherwise use blowing reynolds number \n if Re < 100:\n fp = fpblow\n else:\n fp = fpcg\n taup = rp * D ** 2 / (18.0 * mu_g * fp) \n vcoef = dtbeta / taup \n\n # vel1 = (vcoef*fluid_v + fixedsrc)/(1.+vcoef) \n f = (vcoef * fluid_v + fixedsrc) / (1.0 + vcoef) - vel1 \n df = -vcoef * p.mass[0] / (mfluid * (1.0 + vcoef)) - 1.0 \n vel1 -= -f/df \n pos1 = dtbeta * vel1 - alpha1 * p.pos[0] - alpha2 * p.pos[1] \n\n # If iterated at least 2 times, check for convergence\n if i > 1 and abs(f) / (abs(df) * (0.1 + abs(vel1))) < 1.0e-5 : \n break \n \n # Now advance the particle momentum in time\n p.vel[2] = p.vel[1] \n p.vel[1] = p.vel[0] \n p.vel[0] = vel1 \n p.pos[1] = p.pos[0] \n p.pos[0] = pos1", "def eps_from_mu(mu, delta):\n\n def f(x):\n \"\"\"Reversely solve dual by matching delta.\"\"\"\n return delta_eps_mu(x, mu) - delta\n\n return optimize.root_scalar(f, bracket=[0, 500], method=\"brentq\").root", "def F(n, m, sigma, omega, mu, tau, T, R, range, theta, epsilon = 1):\r\n\r\n phase = np.exp(1j*theta)\r\n k = (1 + 0.99*phase)*range*omega/c\r\n dk = 0.99j*range*phase*omega/c\r\n deltam = 1j*sigma*(m**2 + (k*R)**2)/(epsilon_0*omega*R)\r\n besselI = special.iv(m,k*R)\r\n term_epsilon_den = (epsilon*special.kv(m,k*R)*special.ivp(m,k*R) - besselI*special.kvp(m,k*R))*k*R\r\n return dk*k**n/(term_epsilon_den + besselI*special.kv(m,k*R)*deltam)", "def mt(P_1,V0_1,meanF_1,rho): \n psi = np.arctan2(V0_1[2],-V0_1[0])\n \n # Find swept ares\n idx_zmax = np.argmax(P_1[:,-1,2])\n idx_ymax = np.argmax(P_1[:,-1,1])\n idx_zmin = np.argmin(P_1[:,-1,2])\n \n Ad = np.linalg.norm(P_1[idx_zmax,-1,2]-P_1[idx_zmin,-1,2])*P_1[idx_ymax,-1,1]\n print P_1[idx_zmax,-1,2]\n V0 = np.linalg.norm(V0_1)\n \n Vi_1new = np.zeros_like(V0_1,dtype=float)\n\n while True:\n Vi_1 = Vi_1new\n \n Vi_1new[0] = meanF_1[0] / (2 * rho * Ad * np.sqrt( (V0*np.cos(psi)+Vi_1[0])**2 + (-V0*np.sin(psi)+Vi_1[2])**2 )) \n Vi_1new[2] = meanF_1[2] / (2 * rho * Ad * np.sqrt( (V0*np.cos(psi)+Vi_1[0])**2 + (-V0*np.sin(psi)+Vi_1[2])**2 )) \n \n if np.linalg.norm(Vi_1-Vi_1new) < 0.001:\n break\n\n return -Vi_1", "def F_calcDMradius(i, t, st, dm, t1, tth):\n mr = st.mn*dm.mxkg_v[i]/(st.mn+dm.mxkg_v[i]) # reduced mass, kg\n # before thermalization (cooling), rx changes with time:\n rxco = np.array([ F_rxco2(tim,t1,mr,(st.nb*1.e+6),dm.sigx_m,st.Rs,dm.mxkg_v[i],pF) for tim in t.time ]) # cm\n print \"-- Radius: rxco at t1 = \",F_rxco2(t1+0.1,t1,mr,(st.nb*1.e+6),dm.sigx_m,st.Rs,dm.mxkg_v[i],pF)\n # after thermalization:\n rxth1 = F_rxth(dm.mx_v[i],st.rhoc,st.Temp) # cm (formula)\n rxth2 = np.interp(tth,t.time,rxco) \t# cm (rxco(tth))\n rxth = rxth1\n print \" rxth=%.2e , rxth1=%.2e , rxth2=%.2e\" % (rxth,rxth1,rxth2)\n for k in xrange(len(t.time)):\n if t.time[k]<t1:\n t.rxtag[k] = 'Rs '\n t.rx[k] = st.Rs*1.e+2\n elif t.time[k]<tth:\n t.rxtag[k] = 'rxco'\n t.rx[k] = rxco[k]\n elif t.time[k]>=tth:\n t.rxtag[k] = 'rxth'\n t.rx[k] = rxth\n return rxco, rxth", "def kp_res(m, sigma, omega, mu, tau, T, R, range, epsilon = 1):\r\n\r\n I0 = IF(0, m, sigma, omega, mu, tau, T, R, range, epsilon)\r\n I1 = IF(1, m, sigma, omega, mu, tau, T, R, range, epsilon)\r\n I2 = IF(2, m, sigma, omega, mu, tau, T, R, range, epsilon)\r\n\r\n if (I0 == 0)|(I1 == 0)|(I2 == 0):\r\n return 0\r\n\r\n result1 = I1/I0\r\n result2 = I2/I1\r\n\r\n pct_error = 100*np.abs((result2 - result1)/result1)\r\n if pct_error > 1:\r\n return 0\r\n else:\r\n return result1", "def jam_axi_rms(surf_lum, sigma_lum, qobs_lum, surf_pot, sigma_pot, qobs_pot,\n inc, mbh, distance, xbin, ybin, ml=None, normpsf=1., pixang=0.,\n pixsize=0., plot=True, rms=None, erms=None, sigmapsf=0.,\n goodbins=None, quiet=False, beta=None, step=0., nrad=20,\n nang=10, rbh=0.01, tensor='zz', vmin=None, vmax=None, **kwargs):\n if beta is None:\n beta = np.zeros_like(surf_lum) # Anisotropy parameter beta = 1 - (sig_z/sig_R)**2\n if not (surf_lum.size == sigma_lum.size == qobs_lum.size == beta.size):\n raise ValueError(\"The luminous MGE components do not match\")\n if not (surf_pot.size == sigma_pot.size == qobs_pot.size):\n raise ValueError(\"The total mass MGE components do not match\")\n if xbin.size != ybin.size:\n raise ValueError(\"xbin and ybin do not match\")\n if rms is not None:\n if erms is None:\n erms = np.full_like(rms, np.median(rms)*0.05) # Constant ~5% errors\n if goodbins is None:\n goodbins = np.ones_like(rms, dtype=bool)\n elif goodbins.dtype != bool:\n raise ValueError(\"goodbins must be a boolean vector\")\n if not (xbin.size == rms.size == erms.size == goodbins.size):\n raise ValueError(\"(rms, erms, goodbins) and (xbin, ybin) do not match\")\n\n sigmapsf = np.atleast_1d(sigmapsf)\n normpsf = np.atleast_1d(normpsf)\n if sigmapsf.size != normpsf.size:\n raise ValueError(\"sigmaPSF and normPSF do not match\")\n\n pc = distance*np.pi/0.648 # Constant factor to convert arcsec --> pc\n\n surf_lum_pc = surf_lum\n surf_pot_pc = surf_pot\n sigma_lum_pc = sigma_lum*pc # Convert from arcsec to pc\n sigma_pot_pc = sigma_pot*pc # Convert from arcsec to pc\n xbin_pc = xbin*pc # Convert all distances to pc\n ybin_pc = ybin*pc\n pixSize_pc = pixsize*pc\n sigmaPsf_pc = sigmapsf*pc\n step_pc = step*pc\n\n # Add a Gaussian with small sigma and the same total mass as the BH.\n # The Gaussian provides an excellent representation of the second moments\n # of a point-like mass, to 1% accuracy out to a radius 2*sigmaBH.\n # The error increses to 14% at 1*sigmaBH, independently of the BH mass.\n #\n if mbh > 0:\n sigmaBH_pc = rbh*pc # Adopt for the BH just a very small size\n surfBH_pc = mbh/(2*np.pi*sigmaBH_pc**2)\n surf_pot_pc = np.append(surfBH_pc, surf_pot_pc) # Add Gaussian to potential only!\n sigma_pot_pc = np.append(sigmaBH_pc, sigma_pot_pc)\n qobs_pot = np.append(1., qobs_pot) # Make sure vectors do not have extra dimensions\n\n qobs_lum = qobs_lum.clip(0, 0.999)\n qobs_pot = qobs_pot.clip(0, 0.999)\n\n t = clock()\n rmsModel = _vrms2(xbin_pc, ybin_pc, inc, surf_lum_pc, sigma_lum_pc,\n qobs_lum, surf_pot_pc, sigma_pot_pc, qobs_pot, beta,\n tensor, sigmaPsf_pc, normpsf, pixSize_pc, pixang,\n step_pc, nrad, nang)\n if not quiet:\n print('jam_axi_rms elapsed time sec: %.2f' % (clock() - t))\n\n if tensor in ('xx', 'yy', 'zz'):\n rmsModel = np.sqrt(rmsModel.clip(0)) # Return SQRT and fix possible rounding errors\n if tensor in ('xy', 'xz'):\n rmsModel *= np.sign(xbin*ybin) # Calculation was done in positive quadrant\n\n # Analytic convolution of the MGE model with an MGE circular PSF\n # using Equations (4,5) of Cappellari (2002, MNRAS, 333, 400)\n #\n lum = surf_lum_pc*qobs_lum*sigma_lum**2 # Luminosity/(2np.pi) of each Gaussian\n flux = np.zeros_like(xbin) # Total MGE surface brightness for plotting\n for sigp, norp in zip(sigmapsf, normpsf): # loop over the PSF Gaussians\n sigmaX = np.sqrt(sigma_lum**2 + sigp**2)\n sigmaY = np.sqrt((sigma_lum*qobs_lum)**2 + sigp**2)\n surfConv = lum / (sigmaX*sigmaY) # PSF-convolved in Lsun/pc**2\n for srf, sx, sy in zip(surfConv, sigmaX, sigmaY): # loop over the galaxy MGE Gaussians\n flux += norp*srf*np.exp(-0.5*((xbin/sx)**2 + (ybin/sy)**2))\n\n if rms is None:\n\n chi2 = None\n if ml is None:\n ml = 1.\n else:\n rmsModel *= np.sqrt(ml)\n\n else:\n\n if (ml is None) or (ml <= 0):\n\n # y1, dy1 = rms, erms # (y1 are the data, y2 the model)\n # scale = sum(y1*y2/dy1**2)/sum(y2**2/dy1**2) # (equation 51)\n #\n ml = (np.sum(rms[goodbins]*rmsModel[goodbins]/erms[goodbins]**2)\n / np.sum((rmsModel[goodbins]/erms[goodbins])**2))**2\n\n rmsModel *= np.sqrt(ml)\n chi2 = np.sum(((rms[goodbins]-rmsModel[goodbins])/erms[goodbins])**2) / goodbins.sum()\n\n if not quiet:\n print('inc=%.1f beta_z=%.2f M/L=%.3g BH=%.2e chi2/DOF=%.3g' % (inc, beta[0], ml, mbh*ml, chi2))\n mass = 2*np.pi*surf_pot_pc*qobs_pot*sigma_pot_pc**2\n print('Total mass MGE: %.4g' % np.sum(mass*ml))\n\n if plot:\n\n rms1 = rms.copy() # Only symmetrize good bins\n rms1[goodbins] = symmetrize_velfield(xbin[goodbins], ybin[goodbins], rms[goodbins])\n\n if (vmin is None) or (vmax is None):\n vmin, vmax = stats.scoreatpercentile(rms1[goodbins], [0.5, 99.5]) # Could use np.percentile in Numpy 1.10\n\n plt.clf()\n plt.subplot(121)\n plot_velfield(xbin, ybin, rms1, vmin=vmin, vmax=vmax, flux=flux, **kwargs)\n plt.title(r\"Input $V_{\\rm rms}$\")\n\n plt.subplot(122)\n plot_velfield(xbin, ybin, rmsModel, vmin=vmin, vmax=vmax, flux=flux, **kwargs)\n plt.plot(xbin[~goodbins], ybin[~goodbins], 'ok', mec='white')\n plt.title(r\"Model $V_{\\rm rms}$\")\n plt.tick_params(labelleft='off')\n plt.subplots_adjust(wspace=0.03)\n\n return rmsModel, ml, chi2, flux", "def update_weights(self,tol = 1e-6,maxIter = 5e2, verbose = False):\n # Armijo parameter\n alphaA = 0.01\n\n self.update_boundaries()\n \n i = 0\n tau = .5\n F = -self.masses.copy()\n if self.intp_rho is None:\n F[self.indices] += self.compute_integrals(self.rho)\n else: \n F[self.indices] += self.compute_integrals_ipp(self.intp_rho,p=0) \n #F[self.indices] += self.compute_integrals(self.rho)\n\n error = np.linalg.norm(F) \n #cost_old = self.compute_ot_cost() \n if self.intp_rho is None :\n cost_old = self.compute_ot_cost()\n else: \n cost_old = self.compute_ot_cost_ipp()\n \n while error>tol and i<maxIter:\n \n Hess = self.compute_integrals_gradient(self.rho) \n #print(self.indices)\n #if tau<1e-9: theta=1. \n \n theta = 0.\n deltaw = -theta*F\n deltaw[self.indices] -= (1-theta)*spsolve(Hess,F[self.indices])\n \n weights_old = self.weights.copy()\n k=0\n \n # Linesearch\n while True:\n self.weights = weights_old +tau*deltaw\n self.update_boundaries()\n #cost = self.compute_ot_cost()\n\n if self.intp_rho is None :\n cost = self.compute_ot_cost()\n else: \n cost = (np.sum(self.masses*self.weights)\n +np.sum(self.compute_integrals_ipp(self.intp_rho,p=2)\n -self.compute_integrals_ipp(self.intp_rho,p=0)*self.weights[self.indices]))\n \n if (cost >= cost_old + tau*alphaA*np.dot(F,deltaw)\n and len(self.indices)==len(self.X)) or tau<1e-10: break\n \n else: \n k += 1\n tau = tau*.8 \n \n #print(deltaw)\n #if i>200: tau = np.min((1., tau*1.01))\n \n cost_old = cost\n \n #self.weights = weights_new.copy() \n #self.update_boundaries()\n #print(cost,tau)\n i+=1\n F = -self.masses.copy()\n if self.intp_rho is None:\n F[self.indices] += self.compute_integrals(self.rho)\n else:\n F[self.indices] += self.compute_integrals_ipp(self.intp_rho,p=0)\n #F[self.indices] += self.compute_integrals(self.rho) \n error = np.linalg.norm(F) \n \n if verbose: print(\"Newton step: {}, cost: {}, tau: {}, error: {}, active particles: {}\".format(i,cost,tau,error,len(self.indices))) \n tau = np.min((tau*1.1,1.))\n\n if i< maxIter and verbose: print(\"Optimization success!\")", "def sigma_mean_delta(R, m_x, **kwargs):\n return m_x / (np.pi * R**2)", "def _c_numeric(self, rij):\n radial_fun = np.zeros((self.lmax+1, self.nmax))\n radial_fun[0,1] = 1.0\n\n #Get local references to these variables so that we don't need `self`\n #all over in the overbasis calculation below.\n alpha = self.alpha\n rb = self.rb \n for n in range(1, self.nmax+1):\n argbess = 2*alpha*rb[n-1]*rij\n ep = np.exp(-alpha*(rij + rb[n-1])**2)\n em = np.exp(-alpha*(rij - rb[n-1])**2)\n #In the loops below, msb prefix refers to modified spherical bessel.\n for l in range(self.lmax+1):\n if l == 0:\n if argbess == 0.0:\n msb_fi_ki_l = np.exp(-alpha*(rb[n-1]**2 + rij**2))\n else:\n #msb_fi_ki_lm = cosh(arg_bess)/arg_bess\n #msb_fi_ki_l = sinh(arg_bess)/arg_bess\n msb_fi_ki_lm = 0.5 * (em + ep) / argbess\n msb_fi_ki_l = 0.5 * (em - ep) / argbess\n else:\n if argbess == 0.0:\n msb_fi_ki_l = 0.0\n else:\n msb_fi_ki_lmm = msb_fi_ki_lm\n msb_fi_ki_lm = msb_fi_ki_l\n msb_fi_ki_l = msb_fi_ki_lmm-(2*l-1)*msb_fi_ki_lm/argbess\n\n radial_fun[l,n-1] = msb_fi_ki_l #* rb[n-1]\n fc = fcut(rij, self.rcut, self.trans_width)\n return np.dot(radial_fun, self.transformbasis)*fc", "def helper_search(mass_min, mass_max, formula_mass, formula, last_index, delta):\n for index, element in enumerate(formula):\n if index >= last_index:\n new_formula_mass = formula_mass + element['freqisotope']['mass']\n if mass_max - new_formula_mass >= -delta:\n new_formula = formula.copy()\n new_formula[element] += 1\n if new_formula_mass - mass_min >= -delta:\n # formula in tolerance interval, add it to solutions\n formulas.append(new_formula)\n else:\n # still some mass left, keep searching\n helper_search(mass_min, mass_max, new_formula_mass, new_formula, index, delta)", "def Solve(self):\n if self.verbose:\n print(\"{0:>6s}{1:>16s}{2:>16s}{3:>16s}\".format(\"iter\", \"energy\", \"rms error\", \"max error\"))\n guess = self.polarization_matrix.dot(self.F)\n\n for k in range(1, self.max_iterations):\n F = self.F + self.interaction_matrix.dot(guess)\n\n (guess, difference) = self.Step(guess, F, k)\n\n # error is RMS in the difference\n rms_err = numpy.std(difference)\n max_err = numpy.max(difference)\n\n energy = -0.5 * self.F.dot(guess)\n if self.verbose:\n print(\"{0:6d}{1:16.8f}{2:16.8f}{3:16.8f}\".format(k, energy, rms_err, max_err))\n\n # according to:\n # Scalmani G et al. Theo. Chem. Account. 2004, (111), 90 - 100, DOI: 10.1007/s00214-003-0527-2\n #\n # we use a convergence threshold on both the RMS and MAX of the residual\n if rms_err < self.threshold and max_err < self.threshold:\n break\n\n # destroy matrices to conserve memory\n self.polarization_matrix = None\n self.interaction_matrix = None\n return guess", "def psi_xm(E_val,lec,lam):\n x = np.linspace(0, xm, n+1) # grid in the x-direction\n y = np.zeros(n+1) # wave-function in individual points\n # initial conditions\n y[0] = 0\n y[1] = 1.0\n #\n for i in range(1,n):\n y[i + 1] = (2 - 5 * dx2 * f(i, E_val,lec,lam) / 6) * y[i] - (1 + dx2 * f(i-1, E_val,lec,lam) / 12) * y[i - 1]\n y[i + 1] /= (1 + dx2 * f(i+1, E_val,lec,lam) / 12)\n return y[n]-asymptotic_boundary(-E_val)", "def calculate_muscl_fluxes(densities, pressures, velocities, gamma,\n mass_ratios, specific_heats, molar_masses, dt_over_dx):\n # Get half step densities\n limiter = UltraBeeLimiter()\n half_step_densities_L = np.zeros(len(densities) - 2)\n half_step_velocities_L = np.zeros(half_step_densities_L.shape)\n half_step_pressures_L = np.zeros(half_step_densities_L.shape)\n half_step_mass_ratios_L = np.zeros((len(densities) - 2, len(specific_heats)))\n half_step_densities_R = np.zeros(half_step_densities_L.shape)\n half_step_velocities_R = np.zeros(half_step_densities_L.shape)\n half_step_pressures_R = np.zeros(half_step_densities_L.shape)\n half_step_mass_ratios_R = np.zeros(half_step_mass_ratios_L.shape)\n for i, dens in enumerate(half_step_densities_L):\n idx = i + 1\n\n # Calculate slopes\n left_slopes = dict()\n left_slopes[\"rho\"] = (densities[idx] - densities[idx - 1]) / 2\n left_slopes[\"mom\"] = (densities[idx] * velocities[idx] - densities[idx - 1] * velocities[idx - 1]) / 2\n cell_energy = 0.5 * densities[idx] * velocities[idx] * velocities[idx] + pressures[idx] / (gamma[idx] - 1)\n behind_energy = 0.5 * densities[idx - 1] * velocities[idx - 1] * velocities[idx - 1] + pressures[idx - 1] / (gamma[idx - 1] - 1)\n left_slopes[\"energy\"] = (cell_energy - behind_energy) / 2\n\n right_slopes = dict()\n right_slopes[\"rho\"] = (densities[idx + 1] - densities[idx]) / 2\n right_slopes[\"mom\"] = (densities[idx + 1] * velocities[idx + 1] - densities[idx] * velocities[idx]) / 2\n forward_energy = 0.5 * densities[idx + 1] * velocities[idx + 1] * velocities[idx + 1] + pressures[idx + 1] / (gamma[idx + 1] - 1)\n right_slopes[\"energy\"] = (forward_energy - cell_energy) / 2\n\n average_density_slope, average_momentum_slope, average_energy_slope = limiter.calculate_limited_slopes(left_slopes, right_slopes)\n\n # Interpolate left and right densities\n left_density = densities[idx] - average_density_slope\n left_momentum = densities[idx] * velocities[idx] - average_momentum_slope\n left_energy = cell_energy - average_energy_slope\n left_mass_ratios = mass_ratios[idx, :]\n assert left_density > 0, left_density\n assert left_energy > 0, left_energy\n assert np.isclose(1.0, left_mass_ratios.sum(), 1e-14)\n\n right_density = densities[idx] + average_density_slope\n right_momentum = densities[idx] * velocities[idx] + average_momentum_slope\n right_energy = cell_energy + average_energy_slope\n right_mass_ratios = mass_ratios[idx, :]\n assert right_density > 0, right_density\n assert right_energy > 0, right_energy\n assert np.isclose(1.0, right_mass_ratios.sum(), 1e-14)\n\n # Perform half step flux\n left_velocity = left_momentum / left_density\n left_density_flux = left_momentum\n left_internal_energy = left_energy - 0.5 * left_momentum * left_velocity\n left_pressure = left_internal_energy * (gamma[idx] - 1)\n left_momentum_flux = left_momentum * left_velocity + left_pressure\n left_energy_flux = (left_energy + left_pressure) * left_velocity\n\n right_velocity = right_momentum / right_density\n right_density_flux = right_momentum\n right_internal_energy = right_energy - 0.5 * right_momentum * right_velocity\n right_pressure = right_internal_energy * (gamma[idx] - 1)\n right_momentum_flux = right_momentum * right_velocity + right_pressure\n right_energy_flux = (right_energy + right_pressure) * right_velocity\n\n half_step_density_flux = (left_density_flux - right_density_flux) * dt_over_dx * 0.5\n half_step_momentum_flux = (left_momentum_flux - right_momentum_flux) * dt_over_dx * 0.5\n half_step_energy_flux = (left_energy_flux - right_energy_flux) * dt_over_dx * 0.5\n\n state = ThermodynamicState1D(left_pressure, left_density, left_velocity, gamma[idx], left_mass_ratios)\n state.update_states(half_step_density_flux,\n half_step_momentum_flux,\n half_step_energy_flux,\n specific_heats, molar_masses)\n half_step_densities_L[i] = state.rho\n half_step_velocities_L[i] = state.u\n half_step_pressures_L[i] = state.p\n half_step_mass_ratios_L[i, :] = state.mass_ratios\n\n state = ThermodynamicState1D(right_pressure, right_density, right_velocity, gamma[idx], right_mass_ratios)\n state.update_states(half_step_density_flux,\n half_step_momentum_flux,\n half_step_energy_flux,\n specific_heats, molar_masses)\n half_step_densities_R[i] = state.rho\n half_step_velocities_R[i] = state.u\n half_step_pressures_R[i] = state.p\n half_step_mass_ratios_R[i, :] = state.mass_ratios\n\n # Calculate final fluxes\n density_fluxes = np.zeros(len(half_step_densities_R) - 1)\n momentum_fluxes = np.zeros(len(half_step_densities_R) - 1)\n total_energy_fluxes = np.zeros(len(half_step_densities_R) - 1)\n mass_ratio_fluxes = np.zeros((len(half_step_densities_R) - 1, mass_ratios.shape[1]))\n\n for i, dens_flux in enumerate(density_fluxes):\n solver = IterativeRiemannSolver()\n\n # Generate left and right states from cell averaged values\n left_state = ThermodynamicState1D(half_step_pressures_R[i],\n half_step_densities_R[i],\n half_step_velocities_R[i],\n gamma[i],\n half_step_mass_ratios_L[i, :])\n right_state = ThermodynamicState1D(half_step_pressures_L[i + 1],\n half_step_densities_L[i + 1],\n half_step_velocities_L[i + 1],\n gamma[i + 1],\n half_step_mass_ratios_R[i + 1, :])\n\n # Solve Riemann problem for star states\n p_star, u_star = solver.get_star_states(left_state, right_state)\n\n # Calculate fluxes using solver sample function\n p_flux, u_flux, rho_flux, is_left = solver.sample(0.0, left_state, right_state, p_star, u_star)\n\n # Store fluxes in array\n mass_ratio_fluxes[i, :] = left_state.mass_ratios if is_left else right_state.mass_ratios\n flux_gamma = left_state.gamma if is_left else right_state.gamma\n density_fluxes[i] = rho_flux * u_flux\n momentum_fluxes[i] = rho_flux * u_flux * u_flux + p_flux\n e_tot = p_flux / (flux_gamma - 1) + 0.5 * rho_flux * u_flux * u_flux\n total_energy_fluxes[i] = (p_flux + e_tot) * u_flux\n\n return density_fluxes, momentum_fluxes, total_energy_fluxes, mass_ratio_fluxes", "def compute_projmass(args):\n radius = args.radius/3600.0\n\n k_map = pyfits.open(args.kappa_map)\n k_data = k_map[0].data\n k_data_tmp = k_data\n\n pix_dim = math.fabs(k_map[0].header[\"CDELT1\"])\n pix_unit = k_map[0].header[\"CUNIT1\"]\n shape = k_map[0].data.shape\n\n x_axis = np.linspace(-(shape[0] - 1.0)/2.0*pix_dim , \\\n (shape[0] - 1.0)/2.0*pix_dim, shape[0])\n y_axis = np.linspace(-(shape[1] - 1.0)/2.0*pix_dim , \\\n (shape[1] - 1.0)/2.0*pix_dim, shape[1])\n\n if pix_unit != \"deg\":\n print \"Error, pixel unit not in deg\"\n if (x_axis.max() - x_axis.min())/2.0 < radius:\n print \"Error, the radius is larger than the image limits\"\n\n\n proj_mass = 0.0\n for i_x in range(shape[0]):\n for i_y in range(shape[1]):\n if x_axis[i_x]**2.0 + y_axis[i_y]**2.0 <= radius**2.0:\n #k_data_tmp[i_x][i_y] = 0.0\n proj_mass += k_data_tmp[i_x][i_y]\n\n print \"%e M_sol\" % (proj_mass*1E12)\n\n if args.plot_cont:\n circ = fc.make_circunference(radius*3600, 0, 0)\n plt.plot(circ[0], circ[1], \"k--\", linewidth = 2)\n plt.contour(x_axis*3600.0, y_axis*3600.0, k_data)\n plt.show()\n\n return proj_mass", "def linear_monotonicity_radius(self,acc=1.e-10,tol=1.e-15,tol2=1e-8):\n\n p,q=self.stability_function()\n for i in range(len(p)+1):\n if abs(p[i])<=tol2: p[i]=0.0\n for i in range(len(q)+1):\n if abs(q[i])<=tol2: q[i]=0.0\n #First check extreme cases\n if p.order>q.order: return 0\n phi = lambda z: p(z)/q(z)\n #Get the negative real zeroes of the derivative of p/q:\n phip=p.deriv()*q-q.deriv()*p\n zeroes=[z for z in phip.r if np.isreal(z) and z<0]\n #Find the extremum of phi on (-inf,0):\n xmax=-10000\n if phip(0)<0: return 0\n if len(zeroes)>0:\n for i in range(len(zeroes)):\n if p(zeroes[i])/q(zeroes[i])<p(xmax)/q(xmax) and zeroes[i]>xmax:\n xmax=zeroes[i]\n zmax=max(abs(phi(zeroes)))\n rlo=max(zeroes)\n if p.order==q.order:\n zmax=max(zmax, abs(p[len(p)]/q[len(q)]))\n else:\n if p.order<q.order: return -np.inf\n if p.order==q.order:\n zmax=abs(p[len(p)]/q[len(q)])\n if p[len(p)]/q[len(q)]>=-tol: return -np.inf\n rlo=-10000\n s=p-zmax*q\n zeroes2=[z for z in s.r if np.isreal(z) and z<0 and z>=xmax]\n if len(zeroes2)>0:\n r=max(zeroes2)\n else: r=0\n return float(np.real(r))", "def getAbsNormalizationFactor(deltaE_wkspace,min,max):\n global reducer\n van_mass=reducer.get_default_parameter('vanadium-mass') \n \n Integration(InputWorkspace=deltaE_wkspace,OutputWorkspace='van_int',RangeLower=min,RangeUpper=max,IncludePartialBins='1')\n input_ws = mtd[deltaE_wkspace]\n ei_monovan = input_ws.getRun().getLogData(\"Ei\").value\n data_ws=mtd['van_int']\n nhist = data_ws.getNumberHistograms()\n #print nhist\n\n signal1_sum = 0.0\n weight1_sum = 0.0 \n signal2_sum = 0.0\n weight2_sum = 0.0 \n signal3_sum = 0.0\n weight3_sum = 0.0 \n signal4_sum = 0.0\n weight4_sum = 0.0 \n\n \n ic=0;\n izerc=0;\n for i in range(nhist):\n try:\n det = data_ws.getDetector(i)\n except Exception:\n continue\n if det.isMasked():\n continue\n\n signal = data_ws.readY(i)[0]\n error = data_ws.readE(i)[0]\n \n if signal != signal: #ignore NaN\n continue\n if ((error<=0) or (signal<=0)): # ignore Inf (0 in error are probably 0 in sign\n izerc+=1\n continue\n # Guess which minimizes the value sum(n_i-n)^2/Sigma_i -- this what Libisis had\n weight = 1.0/error\n signal1_sum += signal * weight\n weight1_sum += weight \n # Guess which minimizes the value sum(n_i-n)^2/Sigma_i^2\n weight2 = 1.0/(error*error)\n signal2_sum += signal * weight2\n weight2_sum += weight2 \n # Guess which assumes puassonian distribution with Err=Sqrt(signal) and calculates \n # the function: N_avrg = 1/(DetEfficiency_avrg^-1)*sum(n_i*DetEfficiency_i^-1)\n # where the DetEfficiency = WB_signal_i/WB_average WB_signal_i is the White Beam Vanadium \n # signal on i-th detector and the WB_average -- average WB vanadium signal. \n # n_i is the modified signal \n err_sq = error*error\n weight = err_sq/signal\n signal3_sum += err_sq\n weight3_sum += weight\n # Guess which estimatnes value sum(n_i^2/Sigma_i^2)/sum(n_i/Sigma_i^2) TGP suggestion from 12-2012\n signal4_sum += signal*signal/err_sq\n weight4_sum += signal/err_sq\n \n ic += 1 \n #print 'signal value =' ,signal\n #print 'error value =' ,error \n #print 'average ',signal_sum \n #---------------- Loop finished\n \n if( weight1_sum==0.0 or weight2_sum == 0.0 or weight3_sum == 0.0 or weight4_sum == 0.0) :\n print \"WB integral has been calculated incorrectrly, look at van_int workspace and input workspace: \",deltaE_wkspace\n raise IOError(\" divided by 0 weight\")\n \n integral_monovanLibISIS=signal1_sum / weight1_sum\n integral_monovanSigSq =signal2_sum / weight2_sum \n integral_monovanPuason =signal3_sum / weight3_sum \n integral_monovanTGP =signal4_sum / weight4_sum\n #integral_monovan=signal_sum /(wbVan_sum)\n van_multiplier = (float(reducer.van_rmm)/float(van_mass))\n absnorm_factorLibISIS = integral_monovanLibISIS * van_multiplier\n absnorm_factorSigSq = integral_monovanSigSq * van_multiplier \n absnorm_factorPuason = integral_monovanPuason * van_multiplier \n absnorm_factorTGP = integral_monovanTGP * van_multiplier \n #print 'Monovan integral :' ,integral_monovan \n \n if ei_monovan >= 210.0: \n xsection = 421 # vanadium cross-section in mBarn/sR (402 mBarn/Sr) (!!!modified to fit high energy limit?!!!)\n else: # old textbook cross-section for vanadium for ei=20mEv\n xsection = 400 + (ei_monovan/10) \n\n absnorm_factorLibISIS /= xsection\n absnorm_factorSigSq /= xsection \n absnorm_factorPuason /= xsection \n absnorm_factorTGP /= xsection \n \n sample_multiplier = (float(reducer.sample_mass)/float(reducer.sample_rmm))\n absnorm_factorLibISIS= absnorm_factorLibISIS *sample_multiplier\n absnorm_factorSigSq = absnorm_factorSigSq *sample_multiplier\n absnorm_factorPuason = absnorm_factorPuason *sample_multiplier\n absnorm_factorTGP = absnorm_factorTGP *sample_multiplier\n \n if (absnorm_factorLibISIS !=absnorm_factorLibISIS)|(izerc!=0): # It is an error, print diagnostics:\n if (absnorm_factorLibISIS !=absnorm_factorLibISIS):\n print '--------> Absolute normalization factor is NaN <----------------------------------------------'\n else:\n print '--------> Warning, Monovanadium has zero spectra <--------------------------------------------' \n print '--------> Processing workspace: ',deltaE_wkspace\n print '--------> Monovan Integration range : min=',min,' max=',max\n print '--------> Summarized: ',ic,' spectra with total value: ',signal2_sum, 'and total weight: ',weight2_sum\n print '--------> Dropped: ',izerc,' empty spectra'\n print '--------> Van multiplier: ',van_multiplier,' sample multiplier: ',sample_multiplier, 'and xsection: ',xsection \n print '--------> Abs norm factors: LibISIS: ',absnorm_factorLibISIS,' Sigma^2: ',absnorm_factorSigSq\n print '--------> Abs norm factors: Puasonian: ',absnorm_factorPuason, ' TGP: ',absnorm_factorTGP\n print '----------------------------------------------------------------------------------------------' \n else:\n DeleteWorkspace(Workspace=deltaE_wkspace)\n DeleteWorkspace(Workspace=data_ws)\n return (absnorm_factorLibISIS,absnorm_factorSigSq,absnorm_factorPuason,absnorm_factorTGP)", "def _vmomentsurfacemass(self,R,n,m,romberg=False,nsigma=None,\n relative=False,phi=0.,deriv=None):\n #odd moments of vR are zero\n if isinstance(n,int) and n%2 == 1:\n return 0.\n if nsigma == None:\n nsigma= _NSIGMA\n logSigmaR= self.targetSurfacemass(R,log=True,use_physical=False)\n sigmaR2= self.targetSigma2(R,use_physical=False)\n sigmaR1= sc.sqrt(sigmaR2)\n logsigmaR2= sc.log(sigmaR2)\n if relative:\n norm= 1.\n else:\n norm= sc.exp(logSigmaR+logsigmaR2*(n+m)/2.)/self._gamma**m\n #Use the asymmetric drift equation to estimate va\n va= sigmaR2/2./R**self._beta*(1./self._gamma**2.-1.\n -R*self._surfaceSigmaProfile.surfacemassDerivative(R,log=True)\n -R*self._surfaceSigmaProfile.sigma2Derivative(R,log=True))\n if math.fabs(va) > sigmaR1: va = 0. #To avoid craziness near the center\n if deriv is None:\n if romberg:\n return sc.real(bovy_dblquad(_vmomentsurfaceIntegrand,\n self._gamma*(R**self._beta-va)/sigmaR1-nsigma,\n self._gamma*(R**self._beta-va)/sigmaR1+nsigma,\n lambda x: -nsigma, lambda x: nsigma,\n [R,self,logSigmaR,logsigmaR2,sigmaR1,\n self._gamma,n,m],\n tol=10.**-8)/sc.pi*norm/2.)\n else:\n return integrate.dblquad(_vmomentsurfaceIntegrand,\n self._gamma*(R**self._beta-va)/sigmaR1-nsigma,\n self._gamma*(R**self._beta-va)/sigmaR1+nsigma,\n lambda x: -nsigma, lambda x: nsigma,\n (R,self,logSigmaR,logsigmaR2,sigmaR1,\n self._gamma,n,m),\n epsrel=_EPSREL)[0]/sc.pi*norm/2.\n else:\n if romberg:\n return sc.real(bovy_dblquad(_vmomentderivsurfaceIntegrand,\n self._gamma*(R**self._beta-va)/sigmaR1-nsigma,\n self._gamma*(R**self._beta-va)/sigmaR1+nsigma,\n lambda x: -nsigma, lambda x: nsigma,\n [R,self,logSigmaR,logsigmaR2,sigmaR1,\n self._gamma,n,m,deriv],\n tol=10.**-8)/sc.pi*norm/2.)\n else:\n return integrate.dblquad(_vmomentderivsurfaceIntegrand,\n self._gamma*(R**self._beta-va)/sigmaR1-nsigma,\n self._gamma*(R**self._beta-va)/sigmaR1+nsigma,\n lambda x: -nsigma, lambda x: nsigma,\n (R,self,logSigmaR,logsigmaR2,sigmaR1,\n self._gamma,n,m,deriv),\n epsrel=_EPSREL)[0]/sc.pi*norm/2.", "def omega_plasma(number_density, mass):\n return np.sqrt(4 * np.pi * number_density * cgs.e**2 / mass)", "def sigma0(ptem, psal):\n zrau0 = 1000.0\n\n # compute volumic mass pure water at atm pressure\n zr1 = ( ( ( ( ( 6.536332e-9 * ptem - 1.120083e-6 ) * ptem + 1.001685e-4) * ptem \n -9.095290e-3 ) * ptem + 6.793952e-2 ) * ptem + 999.842594\n )\n # seawater volumic mass atm pressure\n zr2 = ( ( ( ( 5.3875e-9 * ptem - 8.2467e-7 ) * ptem + 7.6438e-5 ) * ptem\n -4.0899e-3 ) * ptem + 0.824493\n )\n zr3 = ( -1.6546e-6 * ptem + 1.0227e-4 ) * ptem - 5.72466e-3\n zr4 = 4.8314e-4\n \n # potential volumic mass (reference to the surface)\n sigma0_out = ( zr4 * psal + zr3 * zws + zr2 ) * psal + zr1 - zrau0\n\n return sigma0_out", "def calc_radius_from_mass(self, Mp):\n\n mass = Mp.to(\"earthMass\").value\n assert (\n np.min(mass) > 3e-4 and np.max(mass) < 3e5\n ), \"Mass range out of model expectation. Returning None.\"\n\n sample_size = len(mass)\n logm = np.log10(mass)\n prob = np.random.random(sample_size)\n logr = np.ones_like(logm)\n hyper_ind = np.random.randint(\n low=0, high=np.shape(self.all_hyper)[0], size=sample_size\n )\n hyper = self.all_hyper[hyper_ind, :]\n\n for i in range(sample_size):\n logr[i] = self.piece_linear(hyper[i], logm[i], prob[i])\n\n Rp = 10.0**logr * u.earthRad\n\n return Rp", "def _encircled_energy_core(mtf_data, radius, nu_p, dx, dy):\n integration_fourier = special.j1(2 * np.pi * radius * nu_p) / nu_p\n dat = mtf_data * integration_fourier\n return radius * dat.sum() * dx * dy", "def _second_moment(R, sig_l, sig_m, lum, mass, Mbh, beta, tensor,\n sigmaPsf, normPsf, step, nrad, surf_l, pixSize):\n if (max(sigmaPsf) > 0) and (pixSize > 0): # PSF convolution\n\n # Kernel step is 1/4 of largest value between sigma(min) and 1/2 pixel side.\n # Kernel half size is the sum of 3*sigma(max) and 1/2 pixel diagonal.\n #\n if step == 0:\n step = max(pixSize/2., np.min(sigmaPsf))/4.\n mx = 3*np.max(sigmaPsf) + pixSize/np.sqrt(2)\n\n # Make grid linear in log of radius RR\n #\n rmax = np.max(R) + mx # Radius of circle containing all data + convolution\n logRad = np.linspace(np.log(step), np.log(rmax), nrad) # Linear grid in log(RR)\n rr = np.exp(logRad)\n\n # The model Vrms computation is only performed on the radial grid\n # which is then used to interpolate the values at any other location\n #\n wm2Pol = np.empty_like(rr)\n mgePol = np.empty_like(rr)\n rup = 3*np.max(sig_l)\n for j in range(rr.size): # Integration of equation (50)\n wm2Pol[j] = quadva(_integrand, [rr[j], rup],\n args=(sig_l, sig_m, lum, mass, Mbh, rr[j], beta, tensor))[0]\n mgePol[j] = np.sum(surf_l * np.exp(-0.5*(rr[j]/sig_l)**2))\n\n nx = np.ceil(rmax/step)\n x1 = np.linspace(-nx, nx, 2*nx)*step\n xCar, yCar = np.meshgrid(x1, x1) # Cartesian grid for convolution\n\n # Interpolate MGE model and Vrms over cartesian grid\n #\n r1 = 0.5*np.log(xCar**2 + yCar**2) # Log radius of cartesian grid\n wm2Car = np.interp(r1, logRad, wm2Pol)\n mgeCar = np.interp(r1, logRad, mgePol)\n\n nk = np.ceil(mx/step)\n kgrid = np.linspace(-nk, nk, 2*nk)*step\n xgrid, ygrid = np.meshgrid(kgrid, kgrid) # Kernel is square\n\n # Compute kernel with equation (A6) of Cappellari (2008).\n # Normalization is irrelevant here as it cancels out.\n #\n kernel = np.zeros_like(xgrid)\n dx = pixSize/2\n sp = np.sqrt(2)*sigmaPsf\n for j in range(len(sigmaPsf)):\n kernel += normPsf[j] \\\n * (special.erf((dx-xgrid)/sp[j]) + special.erf((dx+xgrid)/sp[j])) \\\n * (special.erf((dx-ygrid)/sp[j]) + special.erf((dx+ygrid)/sp[j]))\n kernel /= np.sum(kernel)\n\n # Seeing and aperture convolution with equation (A3)\n #\n muCar = np.sqrt(signal.fftconvolve(wm2Car, kernel, mode='same')\n / signal.fftconvolve(mgeCar, kernel, mode='same'))\n\n # Interpolate convolved image at observed apertures.\n # Aperture integration was already included in the kernel.\n #\n mu = bilinear_interpolate(x1, x1, muCar, R/np.sqrt(2), R/np.sqrt(2))\n\n else: # No PSF convolution: just compute values\n\n mu = np.empty_like(R)\n rmax = 3*np.max(sig_l)\n for j in range(R.size):\n wm2Pol = quadva(_integrand, [R[j], rmax],\n args=(sig_l, sig_m, lum, mass, Mbh, R[j], beta, tensor))[0]\n mgePol = np.sum( surf_l * np.exp(-0.5*(R[j]/sig_l)**2) )\n mu[j] = np.sqrt(wm2Pol/mgePol)\n\n return mu", "def calculate_fuel_recursively(mass):\n fuel = calculate_fuel_from_mass(mass)\n if fuel < 0:\n return 0\n return calculate_fuel_recursively(fuel) + fuel", "def R(alpha, beta, gamma, tol = 1e-16):\n \n ca, cb, cg = np.cos(alpha), np.cos(beta), np.cos(gamma)\n sa, sb, sg = np.sin(alpha), np.sin(beta), np.sin(gamma)\n\n m = np.array([[ca*cb*cg - sa*sg, -sa*cg - ca*cb*sg, ca*sb],\n [sa*cb*cg + ca*sg, ca*cg - sa*cb*sg, sa*sb],\n [-sb*cg, sb*sg, cb]])\n\n #m[np.abs(m) < tol] = 0\n return m", "def boltzmann_radial_potential_linear_density_ebeam(\n r, current, r_e, e_kin, nl, kT, q, first_guess=None, ldu=None, max_step=500, rel_diff=1e-3\n ):\n # Solves the nonlinear radial poisson equation for a dynamic charge distribution following\n # the Boltzmann law\n # A * phi = b0 + bx (where b0 and bx are the static and dynamic terms)\n # Define cost function f = A * phi - b0 - bx\n # Compute jacobian J = A - diag(d bx_i / d phi_i)\n # Solve J y = f\n # Next guess: phi = phi - y\n # Iterate until adjustment is small\n cden = np.zeros(r.size)\n cden[r <= r_e] = -current/PI/r_e**2\n\n if ldu is None:\n ldu = fd_system_nonuniform_grid(r) # Set up tridiagonal system\n l, d, u = ldu\n\n nl = np.atleast_2d(np.asarray(nl))\n kT = np.atleast_2d(np.asarray(kT))\n q = np.atleast_2d(np.asarray(q))\n\n if first_guess is None:\n irho = np.zeros(r.size)\n irho[r <= r_e] = np.sum(q * Q_E * nl / (PI*r_e**2), axis=0)\n erho = cden/np.sqrt(2 * Q_E * e_kin/M_E)\n irho[r <= r_e] = np.minimum(-.95 * erho[r <= r_e], irho[r <= r_e])\n # if irho[0] < -erho[0]:\n phi = radial_potential_nonuniform_grid(r, erho + irho)\n # else:\n # phi = radial_potential_nonuniform_grid(r, erho)\n else:\n phi = first_guess\n\n for _ in range(max_step):\n # ion dist\n shape = np.exp(-q * (phi - phi.min())/kT)\n i_sr = np.atleast_2d(np.trapz(r*shape, r)).T\n nax = nl / 2 / PI / i_sr * np.atleast_2d(shape[:, 0]).T\n\n # dynamic rhs term\n _bx_a = - nax * q * shape * Q_E / EPS_0 # dynamic rhs term\n _bx_b = - cden/np.sqrt(2 * Q_E * (e_kin+phi)/M_E) / EPS_0\n _bx_a[:, -1] = 0 # boundary condition\n bx = np.sum(_bx_a, axis=0) + _bx_b\n\n # F = A.dot(phi) - (b0 + bx)\n f = _tridiag_targetfun(ldu, phi, bx)\n\n # Diagonal of the Jacobian df/dphi_i\n _c = np.zeros_like(shape)\n _c[:, :-1] = r[:-1] * (r[1:]-r[:-1]) * shape[:, :-1]\n j_d = -(np.sum(_bx_a * q/kT * (i_sr-_c)/i_sr, axis=0)\n + Q_E/M_E*_bx_b/(2 * Q_E * (e_kin+phi)/M_E)) # Diagonal of the Jacobian df/dphi_i\n\n y = tridiagonal_matrix_algorithm(l, d - j_d, u, f)\n res = np.max(np.abs(y[:-1]/phi[:-1]))\n phi = phi - y\n if res < rel_diff:\n break\n return phi, nax, shape", "def evaluate(self, radius, mtot, m0, alpha1, alpha2):\n model = mtot + m0 * (1 - np.exp(-alpha1*(radius/self.r0)**(-alpha2)))\n return model", "def newton(f, x0, dx, eps=1e-10):\n # Initialization\n globvar.ncalls = 0\n x = np.copy(x0)\n n = len(x)\n J = np.zeros((n, n), dtype='float64')\n fx = f(x)\n\n # Begin root search\n while True:\n globvar.ncalls += 1\n\n # Fill the Jacobian matrix\n for j in range(n):\n x[j] += dx[j]\n df = f(x) - fx\n\n for i in range(n):\n J[i, j] = df[i] / dx[j]\n\n x[j] -= dx[j]\n\n # Decompose and solve using Given's rotations\n decomp(J)\n Dx = -fx\n solve(J, Dx)\n\n # Begin backtracking linesearch\n lamb = 2.0\n while True: \n lamb /= 2\n y = x + Dx * lamb\n fy = f(y)\n\n fynorm = np.linalg.norm(fy)\n fxnorm = np.linalg.norm(fx)\n\n if (fynorm < (1 - lamb / 2) * fxnorm) or (lamb < (1 / 128.0)):\n break\n\n # Save latest approximation\n x = y\n fx = fy\n\n Dxnorm = np.linalg.norm(Dx)\n fxnorm = np.linalg.norm(fx)\n dxnorm = np.linalg.norm(dx)\n if Dxnorm < dxnorm or fxnorm < eps:\n break\n\n return x" ]
[ "0.63628334", "0.5616132", "0.55902416", "0.5445173", "0.54239476", "0.54030454", "0.53585684", "0.5299843", "0.5293189", "0.5257939", "0.52509886", "0.52108425", "0.5198325", "0.5182825", "0.51755184", "0.5121687", "0.51188475", "0.5108264", "0.5085689", "0.5066682", "0.50654644", "0.50527734", "0.5044296", "0.50338197", "0.5029758", "0.5029402", "0.50281596", "0.50273585", "0.5015065", "0.50139415" ]
0.7381081
0
Simple function to create or load existing label encoder If mode is train, alway create new label_encder
def get_or_make_label_encoder(params, problem, mode, label_list=None, zero_class=None): problem_path = params.ckpt_dir create_path(problem_path) le_path = os.path.join(problem_path, '%s_label_encoder.pkl' % problem) if mode == 'train' and not os.path.exists(le_path): label_encoder = LabelEncoder() label_encoder.fit(label_list, zero_class=zero_class) label_encoder.dump(le_path) else: label_encoder = LabelEncoder() label_encoder.load(le_path) return label_encoder
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getLabelEncoder():\n classes = list(string.letters + string.digits)\n classes.append('')\n le = LabelEncoder()\n le.fit(classes)\n\n return le", "def load_encoder(checkpoint, encoder_cls,\n HIDDEN_SIZE, embedding, ENCODER_N_LAYERS, DROPOUT, encoder_name, bidirectional):\n model = encoder_cls(HIDDEN_SIZE, embedding, ENCODER_N_LAYERS, DROPOUT,\n gate=encoder_name, bidirectional=bidirectional)\n model.load_state_dict(checkpoint['en'])\n model.eval()\n return model", "def build_label_transform():\n\n return NALabelEncoder()", "def _label_encoding(self):\n for feat in self.cat_feats:\n if self.train:\n lbl = preprocessing.LabelEncoder()\n lbl.fit(self.dataframe[feat].values)\n self.dataframe_d_copy.loc[:,feat] = lbl.transform(self.dataframe[feat].values)\n self.label_encoders[feat] = lbl\n else:\n lbl = self.encoders[feat]\n self.dataframe_d_copy.loc[:,feat] = lbl.transform(self.dataframe[feat].values)\n \n if self.train:\n encoder_path = f\"{self.output_path}/_label_encoder.pkl\"\n self.cat_feats_cfg['encoder_path'] = encoder_path\n joblib.dump(self.label_encoders, encoder_path)\n \n return self.dataframe_d_copy", "def build_encoder(\n self,\n build_encoder: dict,\n target_dir: str,\n cache_dir: str,\n train_csv_path: str,\n valid_csv_path: str,\n test_csv_paths: list,\n get_path_only: bool = False,\n ):\n encoder_path = Path(target_dir) / \"encoder.pkl\"\n if get_path_only:\n return encoder_path\n\n train_csv = pd.read_csv(train_csv_path)\n valid_csv = pd.read_csv(valid_csv_path)\n test_csvs = [pd.read_csv(path) for path in test_csv_paths]\n all_csv = pd.concat([train_csv, valid_csv, *test_csvs])\n\n multilabels = [\n [label.strip() for label in multilabel.split(\";\")]\n for multilabel in all_csv[\"labels\"].tolist()\n ]\n encoder = CategoryEncoders(\n [single_category_labels for single_category_labels in zip(*multilabels)]\n )\n with open(encoder_path, \"wb\") as f:\n pickle.dump(encoder, f)\n\n return encoder", "def abstract_encoder(label):\n global dictionary, wv, table\n model = torch.load('model/description_encoder')\n # label = label.lower()\n try:\n abstract = dictionary[label]\n d = abstract.translate(table).lower()\n d = d.replace('resource/', '').split()\n r = np.array(list(map(lambda x: wv.get_vector(x), d)),\n dtype=np.float32)\n hidden = model.init_hidden()\n except KeyError:\n return np.random.randn(100)\n try:\n for word in r:\n p, hidden = model(Variable(torch.tensor([[word]])),\n hidden)\n p = p[0][0].detach().numpy()\n return p\n except (KeyError, IndexError, TypeError) as _:\n return np.random.randn(100)", "def __init__(self, params, model, name=\"ds2_encoder\", mode='train'):\n super(DeepSpeech2Encoder, self).__init__(params, model, name, mode)", "def _decode_train(self, decoder, _encoder_output, _features, labels):\r\n target_embedded = tf.nn.embedding_lookup(decoder.target_embedding,\r\n labels[\"target_ids\"])\r\n\r\n return decoder(_encoder_output, labels=target_embedded[:,:-1], sequence_length=labels[\"target_len\"]-1)", "def main(\n lang='deu', n=900, epochs=50, batch_size=64, num_neurons=256,\n encoder_input_data=None,\n decoder_input_data=None,\n decoder_target_data=None,\n checkpoint_dir=os.path.join(BIGDATA_PATH, 'checkpoints'),\n ):\n mkdir_p(checkpoint_dir)\n encoder_input_path = os.path.join(\n checkpoint_dir,\n 'nlpia-ch10-translate-input-{}.npy'.format(lang))\n decoder_input_path = os.path.join(\n checkpoint_dir,\n 'nlpia-ch10-translate-decoder-input-{}.npy'.format(lang))\n decoder_target_path = os.path.join(\n checkpoint_dir,\n 'nlpia-ch10-translate-target-{}.npy'.format('eng'))\n data_paths = (encoder_input_path, decoder_input_path, decoder_target_path)\n\n encoder_input_data = []\n if all([os.path.isfile(p) for p in data_paths]):\n encoder_input_data = np.load(encoder_input_path)\n decoder_input_data = np.load(decoder_input_path)\n decoder_target_data = np.load(decoder_target_path)\n if len(encoder_input_data) < n:\n encoder_input_data, decoder_input_data, decoder_target_data = onehot_char_training_data(\n lang=lang, n=n, data_paths=data_paths)\n encoder_input_data = encoder_input_data[:n]\n decoder_input_data = decoder_input_data[:n] \n decoder_target_data = decoder_target_data[:n]\n model = fit(data_paths=data_paths, epochs=epochs, batch_size=batch_size, num_neurons=num_neurons)\n return model", "def load_encoders():\n\n encoders = {}\n\n # Pclass\n pclass_encoder = LabelBinarizer()\n\n with open(os.path.join('encoders', 'pclass_encoder.json'),\n 'r', encoding='utf8', errors='ignore') as infile:\n pclass_encoder.classes_ = json.load(infile)\n encoders['pclass_encoder'] = pclass_encoder\n\n # Sex\n sex_encoder = LabelBinarizer()\n\n with open(os.path.join('encoders', 'sex_encoder.json'),\n 'r', encoding='utf8', errors='ignore') as infile:\n sex_encoder.classes_ = json.load(infile)\n encoders['sex_encoder'] = sex_encoder\n\n # Age\n age_encoder = LabelBinarizer()\n age_encoder.classes_ = list(range(10))\n\n with open(os.path.join('encoders', 'age_bins.json'),\n 'r', encoding='utf8', errors='ignore') as infile:\n age_bins = json.load(infile)\n encoders['age_bins'] = age_bins\n encoders['age_encoder'] = age_encoder\n\n # Siblings/Spouses Aboard\n siblings_spouses_aboard_encoder = LabelBinarizer()\n\n with open(os.path.join('encoders', 'siblings_spouses_aboard_encoder.json'),\n 'r', encoding='utf8', errors='ignore') as infile:\n siblings_spouses_aboard_encoder.classes_ = json.load(infile)\n encoders['siblings_spouses_aboard_encoder'] = siblings_spouses_aboard_encoder\n\n # Parents/Children Aboard\n parents_children_aboard_encoder = LabelBinarizer()\n\n with open(os.path.join('encoders', 'parents_children_aboard_encoder.json'),\n 'r', encoding='utf8', errors='ignore') as infile:\n parents_children_aboard_encoder.classes_ = json.load(infile)\n encoders['parents_children_aboard_encoder'] = parents_children_aboard_encoder\n\n # Fare\n fare_encoder = LabelBinarizer()\n fare_encoder.classes_ = list(range(10))\n\n with open(os.path.join('encoders', 'fare_bins.json'),\n 'r', encoding='utf8', errors='ignore') as infile:\n fare_bins = json.load(infile)\n encoders['fare_bins'] = fare_bins\n encoders['fare_encoder'] = fare_encoder\n\n # Target Field: Survived\n survived_encoder = LabelEncoder()\n\n with open(os.path.join('encoders', 'survived_encoder.json'),\n 'r', encoding='utf8', errors='ignore') as infile:\n survived_encoder.classes_ = np.array(json.load(infile))\n encoders['survived_encoder'] = survived_encoder\n\n return encoders", "def train_src_encoder(encoder, classifier, data_loader):\n ####################\n # 1. setup network #\n ####################\n\n # set train state for Dropout and BN layers\n encoder.train()\n classifier.train()\n\n # setup criterion and optimizer\n optimizer = optim.Adam(\n list(encoder.parameters()) + list(classifier.parameters()),\n lr=params.c_learning_rate,\n betas=(params.beta1, params.beta2))\n criterion = nn.CrossEntropyLoss()\n\n ####################\n # 2. train network #\n ####################\n\n for epoch in range(params.num_epochs_pre):\n for step, (images, labels) in enumerate(data_loader):\n # make images and labels variable\n images = make_variable(images)\n labels = make_variable(labels.squeeze_())\n\n # zero gradients for optimizer\n optimizer.zero_grad()\n\n # compute loss for critic\n preds = classifier(encoder(images))\n loss = criterion(preds, labels)\n\n # optimize source classifier\n loss.backward()\n optimizer.step()\n\n # print step info\n if ((step + 1) % params.log_step_pre == 0):\n print(\"Epoch [{}/{}] Step [{}/{}]: loss={}\"\n .format(epoch + 1,\n params.num_epochs_pre,\n step + 1,\n len(data_loader),\n loss.data))\n\n # eval model on test set\n if ((epoch + 1) % params.eval_step_pre == 0):\n eval_src(encoder, classifier, data_loader)\n\n # save model parameters\n if ((epoch + 1) % params.save_step_pre == 0):\n save_model(encoder, \"ADDA-source-encoder-{}.pt\".format(epoch + 1))\n save_model(\n classifier, \"ADDA-source-classifier-{}.pt\".format(epoch + 1))\n\n # # save final model\n save_model(encoder, \"ADDA-source-encoder-final.pt\")\n save_model(classifier, \"ADDA-source-classifier-final.pt\")\n\n return encoder, classifier", "def get_target_encoder(self, train: NumpyOrPandas) -> Optional[type]:\n target_encoder = None\n if train.folds is not None:\n if train.task.name in [\"binary\", \"reg\"]:\n target_encoder = TargetEncoder\n else:\n n_classes = train.target.max() + 1\n if n_classes <= self.multiclass_te_co:\n target_encoder = MultiClassTargetEncoder\n\n return target_encoder", "def encode_labels(y_train, y_test):\n le = LabelEncoder()\n le.fit(y_train)\n y_train = le.transform(y_train)\n y_test = le.transform(y_test)\n return y_train, y_test", "def create_encoding(df):\n vocab = []\n vocab_df = df[\"company\"] + df[\"address\"] + df[\"date\"] + df[\"total\"]\n [vocab.extend(row) for row in vocab_df]\n enc = LabelEncoder()\n enc.fit(vocab)\n return enc", "def create_voc_label(is_training):\n voc_dir = config.voc_dir\n cls_map = {name: i for i, name in enumerate(config.coco_classes)}\n sub_dir = 'train' if is_training else 'eval'\n voc_dir = os.path.join(voc_dir, sub_dir)\n if not os.path.isdir(voc_dir):\n raise ValueError(f'Cannot find {sub_dir} dataset path.')\n\n image_dir = anno_dir = voc_dir\n if os.path.isdir(os.path.join(voc_dir, 'Images')):\n image_dir = os.path.join(voc_dir, 'Images')\n if os.path.isdir(os.path.join(voc_dir, 'Annotations')):\n anno_dir = os.path.join(voc_dir, 'Annotations')\n\n if not is_training:\n data_dir = config.voc_root\n json_file = os.path.join(data_dir, config.instances_set.format(sub_dir))\n file_dir = os.path.split(json_file)[0]\n if not os.path.isdir(file_dir):\n os.makedirs(file_dir)\n json_dict = {\"images\": [], \"type\": \"instances\", \"annotations\": [],\n \"categories\": []}\n bnd_id = 1\n\n image_files_dict = {}\n image_anno_dict = {}\n images = []\n for anno_file in os.listdir(anno_dir):\n print(anno_file)\n if not anno_file.endswith('xml'):\n continue\n tree = et.parse(os.path.join(anno_dir, anno_file))\n root_node = tree.getroot()\n file_name = root_node.find('filename').text\n img_id = get_imageId_from_fileName(file_name)\n image_path = os.path.join(image_dir, file_name)\n print(image_path)\n if not os.path.isfile(image_path):\n print(f'Cannot find image {file_name} according to annotations.')\n continue\n\n labels = []\n for obj in root_node.iter('object'):\n cls_name = obj.find('name').text\n if cls_name not in cls_map:\n print(f'Label \"{cls_name}\" not in \"{config.coco_classes}\"')\n continue\n bnd_box = obj.find('bndbox')\n x_min = int(float(bnd_box.find('xmin').text)) - 1\n y_min = int(float(bnd_box.find('ymin').text)) - 1\n x_max = int(float(bnd_box.find('xmax').text)) - 1\n y_max = int(float(bnd_box.find('ymax').text)) - 1\n labels.append([y_min, x_min, y_max, x_max, cls_map[cls_name]])\n\n if not is_training:\n o_width = abs(x_max - x_min)\n o_height = abs(y_max - y_min)\n ann = {'area': o_width * o_height, 'iscrowd': 0, 'image_id': \\\n img_id, 'bbox': [x_min, y_min, o_width, o_height], \\\n 'category_id': cls_map[cls_name], 'id': bnd_id, \\\n 'ignore': 0, \\\n 'segmentation': []}\n json_dict['annotations'].append(ann)\n bnd_id = bnd_id + 1\n\n if labels:\n images.append(img_id)\n image_files_dict[img_id] = image_path\n image_anno_dict[img_id] = np.array(labels)\n\n if not is_training:\n size = root_node.find(\"size\")\n width = int(size.find('width').text)\n height = int(size.find('height').text)\n image = {'file_name': file_name, 'height': height, 'width': width,\n 'id': img_id}\n json_dict['images'].append(image)\n\n if not is_training:\n for cls_name, cid in cls_map.items():\n cat = {'supercategory': 'none', 'id': cid, 'name': cls_name}\n json_dict['categories'].append(cat)\n json_fp = open(json_file, 'w')\n json_str = json.dumps(json_dict)\n json_fp.write(json_str)\n json_fp.close()\n\n return images, image_files_dict, image_anno_dict", "def load(self, encoder_path, decoder_path=None):\n if encoder_path:\n enc = torch.load(encoder_path, \"cpu\")\n # if the model was initialised in DataParallel\n # we'll have to revert that.\n if list(enc['model'].keys())[0][:7] == \"module.\":\n enc['model'] = OrderedDict([(k[7:], v) for k, v in enc['model'].items()])\n\n # copy encoder weights\n opts_e = enc['settings']\n # replace parameters in opts.\n blacklist = {\n \"checkpoint_encoder\",\n \"checkpoint_decoder\",\n \"cuda\",\n \"directory\",\n \"directory_name\",\n \"data\",\n \"log\",\n \"model\",\n \"save_mode\",\n \"telegram\",\n \"save_model\",\n \"train_from_state_dict\",\n \"batch_size\",\n \"epochs\",\n \"epoch\",\n \"device\"\n }\n for arg in dir(opts_e):\n if arg[0] == \"_\":\n continue\n if arg in blacklist:\n continue\n setattr(self.opt, arg, getattr(opts_e, arg))\n # initiate a new model\n self.initiate()\n # replace encoder weights\n self.model.encoder.load_state_dict(enc['model'])\n if self.opt.verbose:\n print(\"[Info] Loaded encoder model.\")\n if decoder_path:\n\n dec = torch.load(decoder_path, \"cpu\")\n\n if list(dec['model'].keys())[0][:7] == \"module.\":\n dec['model'] = OrderedDict([(k[7:], v) for k, v in dec['model'].items()])\n \n # Note that the decoder file contains both\n # the decoder and the target_word_projection.\n opts_d = enc['settings']\n self.model.decoder.load_state_dict(dec['model'])\n\n\n try:\n self.model.generator.load_state_dict(dec['generator'])\n except:\n generator = nn.Sequential(\n nn.Linear(self.model.generator.in_features, self.model.generator.out_features),\n nn.LogSoftmax(dim=1)).cuda()\n generator.load_state_dict(dec['generator'])\n del self.model.generator\n self.model.generator = generator\n \n if self.opt.verbose:\n print(\"[Info] Loaded decoder model.\")\n\n self.model.to(self.device)", "def __loadTokenizerAndEncoder(fileName):\n\n y, x = __prepareDataSet(fileName)\n\n tokenizer = Tokenizer(num_words=max_words)\n tokenizer.fit_on_texts(x)\n\n encoder = LabelEncoder()\n encoder.fit(y)\n\n return tokenizer, encoder", "def register_train(key, module):\n register(key, module, train_dict)", "def train_start(self):\n self.module.img_enc.train()\n self.module.txt_enc.train()", "def registerEncoder (encoder):\n assert False, \"TODO:\"", "def _load_encoder(self, encoder_file, encoder_id):\n assert isinstance(encoder_file, str),\\\n \"encoder_file not entered as string.\"\n assert isinstance(encoder_id, int),\\\n \"encoder_id not entered as integer.\"\n loaded_encoder = joblib.load(file_path(encoder_file))\n encoder_dict = {}\n encoder_classes = np.array(loaded_encoder.classes_)\n for cl in encoder_classes:\n encoder_dict[cl] = [float(x) for x in cl == encoder_classes]\n self.encoder[encoder_id] = encoder_dict\n return", "def encode_labels(Y, le=None, enc=None):\n\n # initialize encoders\n N = Y.shape[0]\n\n # Encode the labels\n if le is None:\n le = LabelEncoder()\n Y_le = le.fit_transform(Y).reshape(N, 1)\n else:\n Y_le = le.transform(Y).reshape(N, 1)\n\n # convert into one hot encoding\n if enc is None:\n enc = OneHotEncoder()\n Y_enc = enc.fit_transform(Y_le).toarray()\n else:\n Y_enc = enc.transform(Y_le).toarray()\n\n return Y_enc, le, enc", "def train(self, mode=True):\n super(Encoder, self).train(mode)\n self.apply(freeze_batchnorm)", "def encoder(self, features=[8], name=\"encoder\") -> KM.Model:\n input_tensor = KL.Input(\n shape=(32, 32, 3)\n ) # shape of images for cifar10 dataset\n encoded = KL.Conv2D(\n features[0],\n 3,\n strides=(2, 2),\n padding=\"same\",\n use_bias=False,\n name=name + f\"_conv_{1}\",\n )(input_tensor)\n encoded = KL.Activation(\"relu\")(KL.BatchNormalization()(encoded))\n encoded_list = [encoded]\n\n # Prepare the skip tensor from input\n skip_input_tensor = KL.Activation(\"relu\")(\n KL.BatchNormalization()(\n KL.Conv2D(features[0], 1, strides=1, use_bias=False)(input_tensor)\n )\n )\n skip_input_tensor = KL.SpatialDropout2D(rate=0.2)(skip_input_tensor)\n skip_input_tensor = KL.AveragePooling2D(pool_size=(2, 2), strides=2)(\n skip_input_tensor\n )\n skip_tensors = tf.concat(\n [\n skip_input_tensor, # Routing info from input tensor to next levels\n encoded, # Routes info from second level to next levels\n ],\n axis=-1,\n )\n for i, feature_num in enumerate(features[1:], start=2):\n encoded, skip_tensors = conv_block(\n encoded,\n skip_tensors,\n features_in=features[i - 2],\n features_out=feature_num,\n name=name + f\"_conv_{i}\",\n )\n encoded_list.append(encoded)\n return KM.Model(inputs=input_tensor, outputs=encoded_list, name=name)", "def add_label_encoding(df, categoricals_to_encode, line_name):\n label_encode_dir = '/Users/davidodwyer/Documents/studyCS/Semester_3/models_label_encodings'\n line_name = line_name\n\n # iterate through features list\n # label-encode their values\n\n try:\n \n for feature in categoricals_to_encode:\n le = LabelEncoder()\n name = feature + '_label'\n df[name] = le.fit_transform(df[feature])\n \n # create a mapping of original to labelled\n # values\n\n encoding = df[name].tolist()\n original = le.inverse_transform(df[name]).tolist()\n \n dictionary = dict(zip(original, encoding))\n\n\n # save the mapping to disk, as json\n\n line_encode_dir = label_encode_dir + '/' + line_name\n \n # line_encode_dir = os.path.join(label_encode_dir, line_name)\n\n if not os.path.isdir(line_encode_dir):\n os.mkdir(line_encode_dir)\n\n\n dict_name = feature + '_encode_map.json'\n save_target = os.path.join(line_encode_dir, dict_name)\n \n with open(save_target, 'w') as json_file:\n json.dump(dictionary, json_file)\n \n\n\n # drop the original, unencoded features \n \n for feature in categoricals_to_encode:\n df.drop(feature, axis=1, inplace=True)\n\n print('— features label encoded')\n \n return df\n\n except:\n\n print(\"Problem with label encoding function\")", "def load_data(encoder, name, loc=DATA_DIR, seed=1234):\n z = {}\n if name == 'MR':\n pos, neg = load_rt(loc=loc)\n elif name == 'SUBJ':\n pos, neg = load_subj(loc=loc)\n elif name == 'CR':\n pos, neg = load_cr(loc=loc)\n elif name == 'MPQA':\n pos, neg = load_mpqa(loc=loc)\n else:\n raise ValueError(name)\n\n labels = compute_labels(pos, neg)\n text, labels = shuffle_data(pos+neg, labels, seed=seed)\n z['text'] = text\n z['labels'] = labels\n print 'Computing skip-thought vectors...'\n features = encoder.encode(text, verbose=False)\n return z, features", "def create_label2id(cfg):\n label2id_model_path = join(\n cfg.model_dir, 'labels.json')\n\n # label2id is stored in the data dir and model dir\n if not exists(label2id_model_path):\n label2id_data_path = join(\n cfg.data_dir, 'labels.json')\n\n if not exists(label2id_data_path):\n label2id = {}\n\n for label in generate_labels(\n cfg, ['train.jsonl', 'valid.jsonl']):\n if label not in label2id:\n label2id[label] = len(label2id)\n\n with open(label2id_data_path, 'w') as fh:\n json.dump(label2id, fh)\n\n else:\n with open(label2id_data_path, 'r') as fh:\n label2id = json.load(fh)\n\n with open(label2id_model_path, 'w') as fh:\n json.dump(label2id, fh)\n\n with open(label2id_model_path, 'r') as fh:\n label2id = json.load(fh)\n\n return label2id", "def train(self, src, labels): # real signature unknown; restored from __doc__\n pass", "def encode(y):\n le = LabelEncoder()\n le.fit(y)\n print(list(le.classes_))\n y = le.transform(y)\n return y", "def get_encoder_class(self,label):\n return len(self.encodeDict[label].classes_)" ]
[ "0.6811553", "0.6775981", "0.6624491", "0.62879103", "0.6285247", "0.6068988", "0.5997439", "0.5965343", "0.5961353", "0.59140944", "0.583923", "0.57692605", "0.576683", "0.57488394", "0.5745894", "0.57296175", "0.56971973", "0.5688705", "0.56653464", "0.5649765", "0.56185657", "0.5601357", "0.55982935", "0.55933917", "0.55698353", "0.5557988", "0.5526229", "0.5524922", "0.55024916", "0.5498448" ]
0.76493424
0
Given an input string, returns it as the body of an html document. Adapted from example.py included in docutils distribution.
def rst_to_html(input_string, source_path=None, destination_path=None, input_encoding='unicode', doctitle=1, initial_header_level=1): overrides = {'input_encoding': input_encoding, 'doctitle_xform': doctitle, 'initial_header_level': initial_header_level, # the next two are for security reasons, to prevent malicious # insertion of raw html code. 'file_insertion_enabled': False, 'raw_enabled': False, } parts = core.publish_parts( source=input_string, source_path=source_path, destination_path=destination_path, writer_name='html', settings_overrides=overrides) return parts['html_body']
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rest2html(s):\n return core.publish_string(s, writer=html_fragment_writer)", "def htmlFormat( body = 'No text supplied', title = 'CS 5 project page' ):\n startString = \"\"\"\\\nContent-Type: text/html;\n\n<html>\n<head>\n<title>\n\"\"\"\n afterTitle = \"\"\"\\\n</title>\n</head>\n\n<body>\n\"\"\"\n afterBody = \"\"\"\\\n</body>\n</html>\n\"\"\"\n return startString + title + afterTitle + body + afterBody", "def get_html_from_rst(rst):\n\n compiler = nikola.plugins.compile.rest.CompileRest()\n compiler.set_site(FakeSite())\n return compiler.compile_string(rst)[0]", "def html_wrapper(content):\n\n header = '''<!DOCTYPE html>\n<html lang=\"en\">\n<head>\n <meta charset=\"utf-8\">\n <title>''' + SITE_NAME + '''</title>\n</head>\n<body>\n'''\n\n footer = '''\n</body>\n</html>'''\n return header + content + footer", "def safeHTML(s):\n parser = StrippingParser()\n parser.feed(s)\n parser.close()\n parser.cleanup()\n return parser.result", "def htmlize(text):\n htmlized = markdown.markdown(\n text,\n output_format=\"xhtml5\", safe_mode=\"escape\",\n )\n htmlversion = htmltemplate.format(body=htmlized)\n return htmlversion", "def html_builder(s, meta, template_path):\n s = convert_text(s)\n \n # create right navigation panel: \n right = toc_panel(s)\n \n with open(template_path, 'r') as html:\n contents = html.read()\n\n soup = BeautifulSoup(contents, 'lxml')\n\n \n right_div = soup.find(id='sidebar-wrapper')\n book_main = soup.find(id='content')\n metadata = soup.find(id='metadata-content')\n\n for key, value in meta.items():\n\n new_p = soup.new_tag(\"label\")\n value = key + \": \" + value\n new_p.append((value)) \n metadata.insert(0, new_p)\n \n soup.new_tag(\"div\", right_div.append(BeautifulSoup(right, 'lxml')))\n soup.new_tag(\"div\", book_main.insert(1, BeautifulSoup(s, 'html.parser')))\n \n # format main text as html:\n \n full_html = soup\n return str(full_html)", "def outputHtml(s):\n htmlFile.write(s + \"\\n\")", "def html_report(string):\n return html_div(string, \"report\")", "def get_html(html: str):\r\n WRAPPER = \"\"\"<div style=\"overflow-x: auto; border: 1px solid #e6e9ef; border-radius: 0.25rem; padding: 1rem; margin-bottom: 2.5rem\">{}</div>\"\"\"\r\n # Newlines seem to mess with the rendering\r\n html = html.replace(\"\\n\", \" \")\r\n return WRAPPER.format(html)", "def output_to_html(string_data):\n raise NotImplementedError(\"This function is not yet Implemented!\")", "def html(input):\n output=atpic.cleaner_alex.clean(input)\n return output", "def markdown_to_html(s):\n return markdown(s)", "def restructuredtext(text, **kwargs):\n from docutils import core\n parts = core.publish_parts(source=text,\n writer_name='html4css1',\n **kwargs)\n return parts['html_body']", "def get_content(self, default=None):\n\n tree = parse_string(self.book.get_template(self._template_name))\n tree_root = tree.getroot()\n\n tree_root.set('lang', self.lang or self.book.language)\n tree_root.attrib['{%s}lang' % NAMESPACES['XML']] = self.lang or self.book.language\n\n # add to the head also\n # <meta charset=\"utf-8\" />\n\n try:\n html_tree = parse_html_string(self.content)\n except:\n return ''\n\n html_root = html_tree.getroottree()\n\n # create and populate head\n\n _head = etree.SubElement(tree_root, 'head')\n\n if self.title != '':\n _title = etree.SubElement(_head, 'title')\n _title.text = self.title\n\n for lnk in self.links:\n if lnk.get(\"type\") == \"text/javascript\":\n _lnk = etree.SubElement(_head, 'script', lnk)\n # force <script></script>\n _lnk.text = ''\n else:\n _lnk = etree.SubElement(_head, 'link', lnk)\n\n # this should not be like this\n # head = html_root.find('head')\n # if head is not None:\n # for i in head.getchildren():\n # if i.tag == 'title' and self.title != '':\n # continue\n # _head.append(i)\n\n # create and populate body\n\n _body = etree.SubElement(tree_root, 'body')\n if self.direction:\n _body.set('dir', self.direction)\n\n body = html_tree.find('body')\n if body is not None:\n for i in body.getchildren():\n _body.append(i)\n\n tree_str = etree.tostring(tree, pretty_print=True, encoding='utf-8', xml_declaration=True)\n\n return tree_str", "def open_body(self) -> str:\n self.html_doc = self.html_doc + \"\"\"<body>\\n\n \"\"\"\n return self.html_doc", "def htmlformat(title, version=None, description=None, error_msg=None,\n error_tb=None):\n html = \"\"\"\n <html>\n <head>\n <meta charset=\"utf-8\">\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n <style type=\"text/css\">\n body{{\n font-family: sans-serif;\n }}\n .container{{\n width: 100%;\n font-family: sans-serif;\n display: block;\n }}\n </style>\n </head>\n <body>\n {body}\n </body>\n </html>\n \"\"\"\n\n body = [\n \"<h1>{title}</h1>\".format(title=title)\n ]\n\n if version is not None:\n body.append(\"<p>Version: {version}</p>\".format(version=version))\n\n if description:\n body.append(\"<p>{description}</p>\".format(description=description))\n\n if error_msg:\n body.append(\"<h4>Error: {error_msg}</h4>\".format(error_msg=error_msg))\n\n if error_tb:\n body.append(\"<h4>Traceback</h4>\")\n body.append(\"<pre>{error_tb}</pre>\".format(error_tb=error_tb))\n\n return html.format(body=\"\".join(body))", "def parse(self, input):\n root = etree.Element(\"html\")\n h = etree.SubElement(root, \"head\")\n t = etree.SubElement(h, \"title\")\n t.text = self.document_title\n body = etree.SubElement(root, \"body\")\n\n for line in input:\n p = etree.SubElement(body, \"p\")\n p.text = line[:-1]\n\n return root", "def html(self, encoding=None, errors=None):\n if encoding and errors:\n string = self.content.decode(encoding=encoding, errors=errors)\n parser = _get_html_parser(None)\n else:\n ASSERT.none(errors)\n string = self.content\n parser = _get_html_parser(\n encoding or ASSERT.not_none(self.encoding)\n )\n document = lxml.etree.fromstring(string, parser)\n # Check whether fromstring returns None because apparently\n # HTMLParser is more lenient than XMLParser and may cause\n # fromstring to return None on some malformed HTML input.\n if document is None:\n raise AssertionError(\n 'lxml.etree.fromstring error: %s content=%r' %\n (self.url, self.content)\n )\n return document", "def new(doctype: str = \"\", spaces: int = 4) -> \"PyHTML\":\n return PyHTML(doctype, spaces)", "def to_html(docstring: str) -> str:\n # careful: markdown2 returns a subclass of str with an extra\n # .toc_html attribute. don't further process the result,\n # otherwise this attribute will be lost.\n return pdoc.markdown2.markdown( # type: ignore\n docstring,\n extras=markdown_extensions,\n link_patterns=markdown_link_patterns,\n )", "def html_content(data, title):\r\n # add html header into text\r\n html_text = \"\"\"\r\n <!DOCTYPE html>\r\n <html>\r\n <head>\r\n <style>\r\n table {\r\n width: 25%;\r\n font-family: arial, sans-serif;\r\n border-collapse: collapse;\r\n }\r\n\r\n tr:nth-child(odd) {\r\n background-color: #dddddd;\r\n }\r\n\r\n td, th {\r\n border: 1px solid #dddddd;\r\n text-align: left;\r\n padding: 8px;\r\n }\r\n </style>\r\n </head>\r\n \"\"\"\r\n\r\n # Starting body of html\r\n html_text += \"<body>\"\r\n\r\n # adding title\r\n html_text += \"<h2> {} </h2>\".format(title)\r\n\r\n # Adding table content\r\n html_text += \"<table>\"\r\n # Each row in table\r\n for index, row in enumerate(data):\r\n #Each collumn in table\r\n html_text += \"<tr>\"\r\n if index == 0:\r\n # Table Head\r\n for name in row:\r\n html_text += \"<th>{}</th>\".format(name)\r\n else:\r\n # Table body\r\n for element in row:\r\n html_text += \"<td>{}</td>\".format(element)\r\n # Exit collumn\r\n html_text += \"</tr>\"\r\n # Exit row\r\n html_text += \"</table>\"\r\n # End of html\r\n html_text += \"\"\"</body>\r\n </html>\"\"\"\r\n # Return\r\n return html_text", "def mdhtml_to_html(data_str):\n mdrenderer = mistune.Renderer()\n markdown = mistune.Markdown(renderer=mdrenderer)\n return markdown(data_str)", "def get_body_content(self):\n\n try:\n html_tree = parse_html_string(self.content)\n except:\n return ''\n\n html_root = html_tree.getroottree()\n\n if len(html_root.find('body')) != 0:\n body = html_tree.find('body')\n\n tree_str = etree.tostring(body, pretty_print=True, encoding='utf-8', xml_declaration=False)\n\n # this is so stupid\n if tree_str.startswith(six.b('<body>')):\n n = tree_str.rindex(six.b('</body>'))\n\n return tree_str[7:n]\n\n return tree_str\n\n return ''", "def md2html(template,filepath):\n content=''\n s = string.Template(template) \n try:\n content=markdown2.markdown_path(filepath)\n except:\n logger.warning('md2html:markdown convertion failed... Trying safe mode ')\n try:\n content=markdown2.markdown_path(filepath,safe_mode=True)\n except:\n logger.error('md2html:markdown convertion failed for %s. Use raw text.' %filepath)\n import codecs\n try:\n content=codecs.open(filepath,'r','utf-8').read()\n except:\n logger.error('md2html:invalid file? %s ' %filepath)\n # print 'error processing markdown. Read raw file...' \n html=''\n try:\n html=s.substitute(content=content)\n except:\n logger.warning('md2html()::string.Template substitute failed... Trying safe mode ')\n try:\n html=s.safe_substitute(content=content) \n except:\n logger.error('md2html()::string.Template conversion failed for : %s ' %filepath)\n return html", "def html_manual_format(string):\n return html_div(string, \"manualfmt\")", "def provide_html_template():\n get_content = str(input(\"Paste the content you want to see displayed in the browser here. \\n\"))\n get_name = input(\"I am going to create an html file with your content. What do you want to call your file? \\n\")\n \n new_html_file = open(str(get_name) + '.html', 'w')\n \n page_content = '<html><head></head><body><p>' + get_content + '</p></body></html>'\n \n new_html_file.write(page_content)\n new_html_file.close()", "def html(template, **data):\n tmpl = template_loader.load(template)\n context = {}\n context_setup.dispatch(context)\n context.update(data)\n stream = tmpl.generate(**context)\n return stream", "def get_documentation(path=\"\"):\n return \"\"\"<HTML><head><title>Python Minidoc for \"\"\"+path+\"\"\"</title></head>\n <body>\n \"\"\"+get_documentation_body(path)+\"\"\"\n </body></html>\"\"\"", "def create_html(self):\n # Add html content to the self.doc\n self.doc.asis('<!DOCTYPE html>')\n with self.tag('html'):\n self.design_header()\n self.design_body()\n # Write html content from self.doc\n with codecs.open(self.filestream.name, 'w', 'utf-8') as f:\n html_content = indent(\n self.doc.getvalue(),\n indentation=' ',\n newline='\\r\\n'\n )\n f.write(html_content)" ]
[ "0.66641665", "0.6609217", "0.6305159", "0.6242836", "0.6172251", "0.60931087", "0.6054476", "0.6012877", "0.60044134", "0.59778404", "0.59670067", "0.59541255", "0.5941216", "0.5902097", "0.5884714", "0.588471", "0.583694", "0.57947314", "0.5752858", "0.57214975", "0.57137644", "0.5683702", "0.5681692", "0.56791854", "0.56742495", "0.56283873", "0.56119657", "0.5582164", "0.557769", "0.55709064" ]
0.7353787
0
Runs this transform over the given content. We return a new string that is the result of this transform.
def run(self, content): parts = [] offset = 0 for match in self.regexp.finditer(content): parts.append(content[offset:match.start(0)]) parts.append(self.replace(match)) offset = match.end(0) parts.append(content[offset:]) return ''.join(parts)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def transform(self, data, input_content_type, output_content_type):\n return self.transform_fn(data, input_content_type, output_content_type)", "def transform(self, stdout):\n return stdout", "def postprocess(self, text):\r\n return text", "def render(*content, **context):\n return u''.join((e for c in content for e in render_content(c, **context)))", "def process(self, data):\n return self.transformer.transform(data)", "def prepare(self, content):\n return content", "def __transform(self):\n try:\n self.tokenized_document, self.stack = None, []\n\n InlineProcessor.initialize()\n LinkHelper.initialize()\n\n POGGER.debug(\"\\n\\n>>>>>>>parse_blocks_pass>>>>>>\")\n first_pass_results = self.__parse_blocks_pass()\n\n POGGER.debug(\"\\n\\n>>>>>>>coalesce_text_blocks>>>>>>\")\n coalesced_results = CoalesceProcessor.coalesce_text_blocks(\n first_pass_results\n )\n\n POGGER.debug(\"\\n\\n>>>>>>>parse_inline>>>>>>\")\n final_pass_results = InlineProcessor.parse_inline(coalesced_results)\n\n POGGER.debug(\"\\n\\n>>>>>>>final_pass_results>>>>>>\")\n return final_pass_results\n except Exception as this_exception:\n raise BadTokenizationError(\n \"An unhandled error occurred processing the document.\"\n ) from this_exception", "def transform():", "def preprocess(self, text):\r\n return text", "def apply(self, text):", "def transform(self, your_text_string, show_debug=False):\n\n logging.getLogger().setLevel(logging.DEBUG if show_debug else logging.WARNING)\n ParserLogger.sync_on_next_call()\n self.source_provider = InMemorySourceProvider(your_text_string)\n return self.__transform()", "def transform():\n pass", "def convert(content, input_format, output_format):\r\n assert input_format in ('srt', 'sjson')\r\n assert output_format in ('txt', 'srt', 'sjson')\r\n\r\n if input_format == output_format:\r\n return content\r\n\r\n if input_format == 'srt':\r\n\r\n if output_format == 'txt':\r\n text = SubRipFile.from_string(content.decode('utf8')).text\r\n return HTMLParser().unescape(text)\r\n\r\n elif output_format == 'sjson':\r\n raise NotImplementedError\r\n\r\n if input_format == 'sjson':\r\n\r\n if output_format == 'txt':\r\n text = json.loads(content)['text']\r\n return HTMLParser().unescape(\"\\n\".join(text))\r\n\r\n elif output_format == 'srt':\r\n return generate_srt_from_sjson(json.loads(content), speed=1.0)", "def generate(self, content):\n\n tmp = self._create_tmp_image(content)\n rendered = self._render(tmp)\n return self._create_content_file(rendered)", "def apply(self):\n if self.applied:\n raise RuntimeError(\"Transform applied more than once\")\n \n self._apply()\n \n self.applied = True\n \n return self.template", "def transform(self, data):", "def transform(self, X):\n return self.transformer.transform(X)", "def render_content(self):\n return mark_safe(markdown(self.content))", "def process(self):\n try:\n f = StringIO.StringIO(self.content)\n dom = XTree.parse(f)\n xslt = XTree.parse(self.stylesheet)\n transform = XTree.XSLT(xslt)\n newdom = transform(dom)\n except IOError:\n print \"Xml or Xsl file not found!\"\n return False\n return XTree.tostring(newdom, pretty_print=True)", "def transform(self):", "def transform(self, **kwargs):\n\n parser = etree.HTMLParser()\n stripped = self.html.strip()\n tree = etree.fromstring(stripped, parser).getroottree()\n page = tree.getroot()\n\n if page is None:\n raise PremailerError(\"Could not parse the html\")\n\n rules = self._parse_selectors(page)\n first_time_styles = self._apply_classes(page, rules)\n self._reapply_initial_styles(first_time_styles)\n if self.remove_classes:\n self._remove_classes(page)\n if self.base_url:\n self._rewrite_urls(page)\n\n return self._output(stripped, tree, page, **kwargs)", "def content(self):\n return \"\".join(self.lines)", "def emit(self, content):\n self.output_stream.write(tostring(content).strip())", "def run(self, text1: str, action: Cryptography) -> str:\n\n chunks, chunk_size = len(text1), 8\n return \"\".join([self.run_block(text1[i:i + chunk_size], action)\n for i in range(0, chunks, chunk_size)])", "def render(self):\n self._render_text = self.content.replace('\\n', '<br>') # deal with new line\n return render_str(\"post.html\", p = self)", "def __PerformSubstitutions(self, text):\n\n for substitution in self.substitutions:\n pattern, replacement = self.SplitValue(substitution)\n text = re.compile(pattern,re.M).sub(replacement, text)\n return text", "def output(self): #1 这是在 BaseHTMLProcessor 中的一个方法,它永远不会被父类 SGMLParser 所调用。因为其它的处理器方法将它们重构的 HTML 保存在 self.pieces 中,这个函数需要将所有这些片段连接成一个字符串。正如前面提到的,Python 在处理列表方面非常出色,但对于字符串处理就逊色了。所以我们只有在某人确实需要它时才创建完整的字符串。\n return \"\".join(self.pieces) #2 如果您愿意,也可以换成使用 string 模块的 join 方法:string.join(self.pieces, \"\")。 ", "def to_content(cls, data: Mapping) -> str:", "def _transform(func_name):\n\n def wrapped(self, *args, **kwargs):\n replacement_string = _query_super(func_name)(self, *args, **kwargs)\n to_string = []\n char_counter = 0\n for index in range(0, len(self._raw_string)):\n if index in self._code_indexes:\n to_string.append(self._raw_string[index])\n elif index in self._char_indexes:\n to_string.append(replacement_string[char_counter])\n char_counter += 1\n return ANSIString(\n \"\".join(to_string),\n decoded=True,\n code_indexes=self._code_indexes,\n char_indexes=self._char_indexes,\n clean_string=replacement_string,\n )\n\n return wrapped", "def transform(self, actual_tokens): # noqa: C901\n transformed_data = \"\"\n avoid_processing = False\n previous_token = None\n\n for next_token in actual_tokens:\n # pre_transform = transformed_data\n if next_token.token_name == MarkdownToken.token_thematic_break:\n transformed_data += self.rehydrate_thematic_break(next_token)\n elif next_token.token_name == MarkdownToken.token_paragraph:\n transformed_data += self.rehydrate_paragraph(next_token)\n elif next_token.token_name == MarkdownToken.token_indented_code_block:\n transformed_data += self.rehydrate_indented_code_block(next_token)\n elif next_token.token_name == MarkdownToken.token_html_block:\n transformed_data += self.rehydrate_html_block(next_token)\n elif next_token.token_name == MarkdownToken.token_fenced_code_block:\n transformed_data += self.rehydrate_fenced_code_block(next_token)\n elif next_token.token_name == MarkdownToken.token_text:\n transformed_data += self.rehydrate_text(next_token)\n elif next_token.token_name == MarkdownToken.token_setext_heading:\n transformed_data += self.rehydrate_setext_heading(next_token)\n elif next_token.token_name == MarkdownToken.token_atx_heading:\n transformed_data += self.rehydrate_atx_heading(next_token)\n elif next_token.token_name == MarkdownToken.token_blank_line:\n transformed_data += self.rehydrate_blank_line(next_token)\n\n elif (\n next_token.token_name == MarkdownToken.token_unordered_list_start\n or next_token.token_name == MarkdownToken.token_ordered_list_start\n or next_token.token_name == MarkdownToken.token_block_quote\n or next_token.token_name\n == MarkdownToken.token_link_reference_definition\n or next_token.token_name == MarkdownToken.token_inline_link\n or next_token.token_name == MarkdownToken.token_inline_image\n ):\n avoid_processing = True\n break\n elif next_token.token_name == MarkdownToken.token_inline_hard_break:\n transformed_data += self.rehydrate_hard_break(next_token)\n elif next_token.token_name == MarkdownToken.token_inline_emphasis:\n transformed_data += self.rehydrate_inline_emphaisis(next_token)\n elif next_token.token_name == MarkdownToken.token_inline_uri_autolink:\n transformed_data += self.rehydrate_inline_uri_autolink(next_token)\n elif next_token.token_name == MarkdownToken.token_inline_email_autolink:\n transformed_data += self.rehydrate_inline_email_autolink(next_token)\n elif next_token.token_name == MarkdownToken.token_inline_raw_html:\n transformed_data += self.rehydrate_inline_raw_html(next_token)\n elif next_token.token_name == MarkdownToken.token_inline_code_span:\n transformed_data += self.rehydrate_inline_code_span(next_token)\n elif next_token.token_name.startswith(EndMarkdownToken.type_name_prefix):\n\n adjusted_token_name = next_token.token_name[\n len(EndMarkdownToken.type_name_prefix) :\n ]\n if adjusted_token_name == MarkdownToken.token_paragraph:\n transformed_data += self.rehydrate_paragraph_end(next_token)\n elif adjusted_token_name == MarkdownToken.token_indented_code_block:\n transformed_data += self.rehydrate_indented_code_block_end(\n next_token\n )\n elif adjusted_token_name == MarkdownToken.token_fenced_code_block:\n transformed_data += self.rehydrate_fenced_code_block_end(\n next_token, previous_token\n )\n elif adjusted_token_name == MarkdownToken.token_html_block:\n transformed_data += self.rehydrate_html_block_end(next_token)\n elif adjusted_token_name == MarkdownToken.token_setext_heading:\n transformed_data += self.rehydrate_setext_heading_end(next_token)\n elif adjusted_token_name == MarkdownToken.token_atx_heading:\n transformed_data += self.rehydrate_atx_heading_end(next_token)\n elif adjusted_token_name == MarkdownToken.token_inline_emphasis:\n transformed_data += self.rehydrate_inline_emphaisis_end(next_token)\n else:\n assert False, \"end_next_token>>\" + str(adjusted_token_name)\n else:\n assert False, \"next_token>>\" + str(next_token)\n\n print(\n \">>>>\"\n + str(next_token)\n + \"\\n---\\n\"\n + transformed_data.replace(\"\\n\", \"\\\\n\").replace(\"\\t\", \"\\\\t\")\n + \"\\n---\"\n )\n previous_token = next_token\n\n if transformed_data and transformed_data[-1] == \"\\n\":\n transformed_data = transformed_data[0:-1]\n return transformed_data, avoid_processing" ]
[ "0.63454944", "0.6209116", "0.6048773", "0.5973691", "0.5906884", "0.5887378", "0.5827559", "0.58250016", "0.57623434", "0.5721566", "0.56885874", "0.5667268", "0.56144154", "0.5599922", "0.5597909", "0.5460976", "0.5453108", "0.5448828", "0.54400074", "0.5394049", "0.5382979", "0.53631693", "0.5361662", "0.5330545", "0.5303634", "0.52755505", "0.52650774", "0.5254048", "0.5247314", "0.5247084" ]
0.69993293
0
Returns a list of roots of a linear polynomial.
def roots_linear(f): r = -f.nth(0)/f.nth(1) dom = f.get_domain() if not dom.is_Numerical: if dom.is_Composite: r = factor(r) else: from sympy.simplify.simplify import simplify r = simplify(r) return [r]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def realpolyroots(*cs):\n if not cs:\n return [0]\n try:\n f = 1.0/cs[0]\n cs = [f*c for c in cs[1:]]\n except ArithmeticError:\n return realpolyroots(*cs[1:])\n else:\n n = len(cs)\n if n == 0:\n return []\n elif n == 1:\n return [-cs[0]]\n elif n == 2:\n return _realroots_quadratic(*cs)\n elif n == 3:\n return _realroots_cubic(*cs)\n elif n == 4:\n return _realroots_quartic(*cs)\n else:\n raise RuntimeError(\"realpolyroots solver currently limited to polynoms up to the power of 4\")", "def roots(self):\n return list(self.positive_roots()) + list(self.negative_roots())", "def getRoots(self):\n # This part is for exercise 11\n # return []\n \n # This part is for exercise 12\n if self.getDegree() == 0:\n return []\n if self.getDegree() == 1:\n return LinearPolynomial(self.getCoefficients()).getRoots()\n if self.getDegree() == 2:\n return QuadraticPolynomial(self.getCoefficients()).getRoots()\n else:\n current_polynomial = Polynomial(self.getCoefficients())\n roots = []\n \n while current_polynomial.__coefficients[0] == 0:\n roots.append(0)\n current_polynomial.__coefficients = current_polynomial.__coefficients[1:]\n \n while current_polynomial.getDegree() > 2:\n\n #Initialization\n x = (random.random(), random.random(), random.random())\n while abs(current_polynomial.evaluate(x[2])) > EPSILON:\n x = (random.random(), random.random(), random.random())\n nb_iters = 0\n while (abs(current_polynomial.evaluate(x[2])) > EPSILON or abs(x[2] - x[1]) > TOLERANCE) and nb_iters < MAX_ITERATIONS:\n x = getNextApproximations(current_polynomial.evaluate, x)\n nb_iters += 1\n\n roots.append(x[2])\n \n if abs(x[2].imag) < TOLERANCE:\n current_polynomial = current_polynomial.divide(Polynomial([-x[2].real, 1]))[0]\n else:\n roots.append(x[2].conjugate())\n current_polynomial = current_polynomial.divide(Polynomial([abs(x[2])**2, -2*x[2].real, 1]))[0]\n \n roots += current_polynomial.getRoots()\n \n for i in range(len(roots)):\n if roots[i].imag == 0:\n roots[i] = roots[i].real\n \n return sorted(roots, key = lambda x: (x.real, x.imag))", "def arroots(self):\n return self.arpoly.roots()", "def find_poly_roots(poly, initial_guess = 0.0, limit = 0.00001, max_iterations = 1000):\n solutions = []\n # Find solutions numerically for n > 0, split them off until n = 2\n for q in range(polynomial.order(poly) - 2):\n x = find_poly_root(poly, initial_guess, limit, max_iterations)\n if not x:\n break\n poly = polynomial.div(poly, polynomial.make_poly([-x, 1]))\n solutions.append(x)\n # Find the rest of the roots analytically\n if polynomial.order(poly) == 1:\n solutions.append(- polynomial.coeff(poly, 1) / polynomial.coeff(poly, 0))\n elif polynomial.order(poly) == 2:\n a = polynomial.coeff(poly, 2)\n b = polynomial.coeff(poly, 1)\n c = polynomial.coeff(poly, 0)\n d = b ** 2 - 4 * a * c\n if d == 0:\n solutions.append(-b / (2 * a))\n elif d > 0:\n solutions.append((- b + sqrt(d)) / (2 * a))\n solutions.append((- b - sqrt(d)) / (2 * a))\n return solutions", "def roots(self) -> List[str]:\n return [node for node, degree in self.graph.in_degree() if degree == 0]", "def roots_cyclotomic(f, factor=False):\n L, U = _inv_totient_estimate(f.degree())\n\n for n in range(L, U + 1):\n g = cyclotomic_poly(n, f.gen, polys=True)\n\n if f.expr == g.expr:\n break\n else: # pragma: no cover\n raise RuntimeError(\"failed to find index of a cyclotomic polynomial\")\n\n roots = []\n\n if not factor:\n # get the indices in the right order so the computed\n # roots will be sorted\n h = n//2\n ks = [i for i in range(1, n + 1) if igcd(i, n) == 1]\n ks.sort(key=lambda x: (x, -1) if x <= h else (abs(x - n), 1))\n d = 2*I*pi/n\n for k in reversed(ks):\n roots.append(exp(k*d).expand(complex=True))\n else:\n g = Poly(f, extension=root(-1, n))\n\n for h, _ in ordered(g.factor_list()[1]):\n roots.append(-h.TC())\n\n return roots", "def roots(self):\n roots = []\n u = self.left_root\n while u != NULL:\n roots.append(u)\n u = self.right_sib(u)\n return roots", "def preprocess_roots(poly):\n coeff = S.One\n\n poly_func = poly.func\n try:\n _, poly = poly.clear_denoms(convert=True)\n except DomainError:\n return coeff, poly\n\n poly = poly.primitive()[1]\n poly = poly.retract()\n\n # TODO: This is fragile. Figure out how to make this independent of construct_domain().\n if poly.get_domain().is_Poly and all(c.is_term for c in poly.rep.coeffs()):\n poly = poly.inject()\n\n strips = list(zip(*poly.monoms()))\n gens = list(poly.gens[1:])\n\n base, strips = strips[0], strips[1:]\n\n for gen, strip in zip(list(gens), strips):\n reverse = False\n\n if strip[0] < strip[-1]:\n strip = reversed(strip)\n reverse = True\n\n ratio = None\n\n for a, b in zip(base, strip):\n if not a and not b:\n continue\n elif not a or not b:\n break\n elif b % a != 0:\n break\n else:\n _ratio = b // a\n\n if ratio is None:\n ratio = _ratio\n elif ratio != _ratio:\n break\n else:\n if reverse:\n ratio = -ratio\n\n poly = poly.eval(gen, 1)\n coeff *= gen**(-ratio)\n gens.remove(gen)\n\n if gens:\n poly = poly.eject(*gens)\n\n if poly.is_univariate and poly.get_domain().is_ZZ:\n basis = _integer_basis(poly)\n\n if basis is not None:\n n = poly.degree()\n\n def func(k, coeff):\n return coeff//basis**(n - k[0])\n\n poly = poly.termwise(func)\n coeff *= basis\n\n if not isinstance(poly, poly_func):\n poly = poly_func(poly)\n return coeff, poly", "def get_roots(self):\n roots = []\n for symbol in self.GlobalSymbolDict.values():\n if symbol.isRoot():\n roots += [symbol]\n return roots", "def getRoots(self):\n a, b, c = self.getCoefficients()[2], self.getCoefficients()[1], self.getCoefficients()[0]\n delta = b**2 - 4*a*c\n if delta >= 0:\n roots = sorted([(-b - math.sqrt(delta))/(2*a), (-b + math.sqrt(delta))/(2*a)])\n else:\n roots = sorted([(-b - math.sqrt(-delta)*1j)/(2*a), (-b + math.sqrt(-delta)*1j)/(2*a)], key=lambda x: (x.real, x.imag))\n return roots", "def get_roots(self):\n raise NotImplementedError()", "def solve(n=5000,C=-6*10**11,a=900,b=3):\n coeffs = np.zeros(n+2)\n coeffs[0] = a-b*n\n coeffs[1] = b*(n+1) - a\n coeffs[-3] = -C\n coeffs[-2] = 2*C - a\n coeffs[-1] = a+b-C\n mp.dps = 27\n roots = polyroots(coeffs)\n for root in roots:\n print root", "def roots_binomial(f):\n n = f.degree()\n\n a, b = f.nth(n), f.nth(0)\n base = -cancel(b/a)\n alpha = root(base, n)\n\n if alpha.is_number:\n alpha = alpha.expand(complex=True)\n\n # define some parameters that will allow us to order the roots.\n # If the domain is ZZ this is guaranteed to return roots sorted\n # with reals before non-real roots and non-real sorted according\n # to real part and imaginary part, e.g. -1, 1, -1 + I, 2 - I\n neg = base.is_negative\n even = n % 2 == 0\n if neg:\n if even == True and (base + 1).is_positive:\n big = True\n else:\n big = False\n\n # get the indices in the right order so the computed\n # roots will be sorted when the domain is ZZ\n ks = []\n imax = n//2\n if even:\n ks.append(imax)\n imax -= 1\n if not neg:\n ks.append(0)\n for i in range(imax, 0, -1):\n if neg:\n ks.extend([i, -i])\n else:\n ks.extend([-i, i])\n if neg:\n ks.append(0)\n if big:\n for i in range(0, len(ks), 2):\n pair = ks[i: i + 2]\n pair = list(reversed(pair))\n\n # compute the roots\n roots, d = [], 2*I*pi/n\n for k in ks:\n zeta = exp(k*d).expand(complex=True)\n roots.append((alpha*zeta).expand(power_base=False))\n\n return roots", "def evaluate_polynomial(tropical_matrix, coefficient_list):\n identity_matrix = get_identity_matrix(tropical_matrix.get_dimension())\n sum_list = []\n sum_list.append(identity_matrix.mult_scalar(coefficient_list[0]))\n for i in range(1, len(coefficient_list)):\n sum_list.append(tropical_matrix.mult_scalar(coefficient_list[i]))\n return get_minimum_sum(sum_list)", "def evaluate_polynomial(f,x):\n degree = len(f)-1\n ans = 0\n for i in f:\n ans += i*x**degree\n degree -= 1\n return(ans)", "def getRoots(self):\n return [float(-self.getCoefficients()[0])/self.getCoefficients()[1]]", "def _realroots_quadratic(a1, a0):\n D = a1*a1 - 4*a0\n if D < 0:\n return []\n SD = math.sqrt(D)\n return [0.5 * (-a1 + SD), 0.5 * (-a1 - SD)]", "def polynomial(a, x):\n\n sum = 0\n\n for i in range(len(a)):\n sum += a[i] * x**i\n return sum", "def increasing_roots(self):\n size = self.size()\n if size == 0:\n return []\n roots = [size]\n root = size\n for i in range(size - 1, 0, -1):\n if not self.le(i, root):\n roots.append(i)\n root = i\n return roots", "def root_factors(f, *gens, filter=None, **args):\n args = dict(args)\n\n F = Poly(f, *gens, **args)\n\n if not F.is_Poly:\n return [f]\n\n if F.is_multivariate:\n raise ValueError('multivariate polynomials are not supported')\n\n x = F.gens[0]\n\n zeros = roots(F, filter=filter)\n\n if not zeros:\n factors = [F]\n else:\n factors, N = [], 0\n\n for r, n in ordered(zeros.items()):\n factors, N = factors + [Poly(x - r, x)]*n, N + n\n\n if N < F.degree():\n G = reduce(lambda p, q: p*q, factors)\n factors.append(F.quo(G))\n\n if not isinstance(f, Poly):\n factors = [ f.as_expr() for f in factors ]\n\n return factors", "def roots(self):\n if not self.__roots:\n self.__roots = set()\n for n in self.__nodes:\n if n not in self.__reverse_map:\n self.__roots.add(n)\n return self.__roots", "def find_poly_root(poly, initial_guess = 0.0, limit = 0.00001, max_iterations = 1000):\n # Calculate the polynomial derivatives\n dpoly = polynomial.derivative(poly)\n ddpoly = polynomial.derivative(dpoly)\n # Closures !!!\n f = lambda x: polynomial.eval(poly, x)\n df = lambda x: polynomial.eval(dpoly, x)\n ddf = lambda x: polynomial.eval(ddpoly, x)\n # Call the generic root finder\n return find_root(f, df, ddf, initial_guess, limit, max_iterations)", "def find_roots(table_of_nodes: np.ndarray, table_of_edges: np.ndarray):\n rts = []\n for j in range(table_of_nodes.shape[0]):\n flag = True\n for k in range(table_of_edges.shape[0]):\n if int(table_of_edges[k, 1]) == int(table_of_nodes[j, 0]):\n flag = False\n if flag:\n if j == 0:\n rts = table_of_nodes[j, :]\n else:\n rts = np.concatenate((rts, table_of_nodes[j, :]))\n rts = np.reshape(rts, (int(rts.shape[0] / table_of_nodes.shape[1]), table_of_nodes.shape[1]))\n return rts", "def linear_polynomial(self, e: 'PFElement') -> Polynomial:\n poly = self.polynomial(-e)\n poly += poly.monic(1)\n return poly", "def general_poly (L):\n def inside(x):\n result = 0\n pwr = len(L) - 1\n for l in L:\n result = result + l * x ** pwr\n pwr -= 1\n return result\n return inside", "def PrimitiveRoots(self, modulo):\n modRange = range(1, modulo)\n required = {x for x in modRange if fractions.gcd(x, modulo)}\n return [g for g in modRange if required == {pow(g, powers, modulo) for powers in modRange}]", "def test_solve_polynomial_cv_1a():\n assert solveset_real(sqrt(x) - 1, x) == FiniteSet(1)\n assert solveset_real(sqrt(x) - 2, x) == FiniteSet(4)\n assert solveset_real(x**Rational(1, 4) - 2, x) == FiniteSet(16)\n assert solveset_real(x**Rational(1, 3) - 3, x) == FiniteSet(27)\n assert solveset_real(x*(x**(S(1) / 3) - 3), x) == \\\n FiniteSet(S(0), S(27))", "def roots_quintic(f):\n result = []\n\n coeff_5, coeff_4, p_, q_, r_, s_ = f.all_coeffs()\n\n if not all(coeff.is_Rational for coeff in (coeff_5, coeff_4, p_, q_, r_, s_)):\n return result\n\n if coeff_5 != 1:\n f = Poly(f / coeff_5)\n _, coeff_4, p_, q_, r_, s_ = f.all_coeffs()\n\n # Cancel coeff_4 to form x^5 + px^3 + qx^2 + rx + s\n if coeff_4:\n p = p_ - 2*coeff_4*coeff_4/5\n q = q_ - 3*coeff_4*p_/5 + 4*coeff_4**3/25\n r = r_ - 2*coeff_4*q_/5 + 3*coeff_4**2*p_/25 - 3*coeff_4**4/125\n s = s_ - coeff_4*r_/5 + coeff_4**2*q_/25 - coeff_4**3*p_/125 + 4*coeff_4**5/3125\n x = f.gen\n f = Poly(x**5 + p*x**3 + q*x**2 + r*x + s)\n else:\n p, q, r, s = p_, q_, r_, s_\n\n quintic = PolyQuintic(f)\n\n # Eqn standardized. Algo for solving starts here\n if not f.is_irreducible:\n return result\n f20 = quintic.f20\n # Check if f20 has linear factors over domain Z\n if f20.is_irreducible:\n return result\n # Now, we know that f is solvable\n for _factor in f20.factor_list()[1]:\n if _factor[0].is_linear:\n theta = _factor[0].root(0)\n break\n d = discriminant(f)\n delta = sqrt(d)\n # zeta = a fifth root of unity\n zeta1, zeta2, zeta3, zeta4 = quintic.zeta\n T = quintic.T(theta, d)\n tol = S(1e-10)\n alpha = T[1] + T[2]*delta\n alpha_bar = T[1] - T[2]*delta\n beta = T[3] + T[4]*delta\n beta_bar = T[3] - T[4]*delta\n\n disc = alpha**2 - 4*beta\n disc_bar = alpha_bar**2 - 4*beta_bar\n\n l0 = quintic.l0(theta)\n Stwo = S(2)\n l1 = _quintic_simplify((-alpha + sqrt(disc)) / Stwo)\n l4 = _quintic_simplify((-alpha - sqrt(disc)) / Stwo)\n\n l2 = _quintic_simplify((-alpha_bar + sqrt(disc_bar)) / Stwo)\n l3 = _quintic_simplify((-alpha_bar - sqrt(disc_bar)) / Stwo)\n\n order = quintic.order(theta, d)\n test = (order*delta.n()) - ( (l1.n() - l4.n())*(l2.n() - l3.n()) )\n # Comparing floats\n if not comp(test, 0, tol):\n l2, l3 = l3, l2\n\n # Now we have correct order of l's\n R1 = l0 + l1*zeta1 + l2*zeta2 + l3*zeta3 + l4*zeta4\n R2 = l0 + l3*zeta1 + l1*zeta2 + l4*zeta3 + l2*zeta4\n R3 = l0 + l2*zeta1 + l4*zeta2 + l1*zeta3 + l3*zeta4\n R4 = l0 + l4*zeta1 + l3*zeta2 + l2*zeta3 + l1*zeta4\n\n Res = [None, [None]*5, [None]*5, [None]*5, [None]*5]\n Res_n = [None, [None]*5, [None]*5, [None]*5, [None]*5]\n\n # Simplifying improves performance a lot for exact expressions\n R1 = _quintic_simplify(R1)\n R2 = _quintic_simplify(R2)\n R3 = _quintic_simplify(R3)\n R4 = _quintic_simplify(R4)\n\n # hard-coded results for [factor(i) for i in _vsolve(x**5 - a - I*b, x)]\n x0 = z**(S(1)/5)\n x1 = sqrt(2)\n x2 = sqrt(5)\n x3 = sqrt(5 - x2)\n x4 = I*x2\n x5 = x4 + I\n x6 = I*x0/4\n x7 = x1*sqrt(x2 + 5)\n sol = [x0, -x6*(x1*x3 - x5), x6*(x1*x3 + x5), -x6*(x4 + x7 - I), x6*(-x4 + x7 + I)]\n\n R1 = R1.as_real_imag()\n R2 = R2.as_real_imag()\n R3 = R3.as_real_imag()\n R4 = R4.as_real_imag()\n\n for i, s in enumerate(sol):\n Res[1][i] = _quintic_simplify(s.xreplace({z: R1[0] + I*R1[1]}))\n Res[2][i] = _quintic_simplify(s.xreplace({z: R2[0] + I*R2[1]}))\n Res[3][i] = _quintic_simplify(s.xreplace({z: R3[0] + I*R3[1]}))\n Res[4][i] = _quintic_simplify(s.xreplace({z: R4[0] + I*R4[1]}))\n\n for i in range(1, 5):\n for j in range(5):\n Res_n[i][j] = Res[i][j].n()\n Res[i][j] = _quintic_simplify(Res[i][j])\n r1 = Res[1][0]\n r1_n = Res_n[1][0]\n\n for i in range(5):\n if comp(im(r1_n*Res_n[4][i]), 0, tol):\n r4 = Res[4][i]\n break\n\n # Now we have various Res values. Each will be a list of five\n # values. We have to pick one r value from those five for each Res\n u, v = quintic.uv(theta, d)\n testplus = (u + v*delta*sqrt(5)).n()\n testminus = (u - v*delta*sqrt(5)).n()\n\n # Evaluated numbers suffixed with _n\n # We will use evaluated numbers for calculation. Much faster.\n r4_n = r4.n()\n r2 = r3 = None\n\n for i in range(5):\n r2temp_n = Res_n[2][i]\n for j in range(5):\n # Again storing away the exact number and using\n # evaluated numbers in computations\n r3temp_n = Res_n[3][j]\n if (comp((r1_n*r2temp_n**2 + r4_n*r3temp_n**2 - testplus).n(), 0, tol) and\n comp((r3temp_n*r1_n**2 + r2temp_n*r4_n**2 - testminus).n(), 0, tol)):\n r2 = Res[2][i]\n r3 = Res[3][j]\n break\n if r2 is not None:\n break\n else:\n return [] # fall back to normal solve\n\n # Now, we have r's so we can get roots\n x1 = (r1 + r2 + r3 + r4)/5\n x2 = (r1*zeta4 + r2*zeta3 + r3*zeta2 + r4*zeta1)/5\n x3 = (r1*zeta3 + r2*zeta1 + r3*zeta4 + r4*zeta2)/5\n x4 = (r1*zeta2 + r2*zeta4 + r3*zeta1 + r4*zeta3)/5\n x5 = (r1*zeta1 + r2*zeta2 + r3*zeta3 + r4*zeta4)/5\n result = [x1, x2, x3, x4, x5]\n\n # Now check if solutions are distinct\n\n saw = set()\n for r in result:\n r = r.n(2)\n if r in saw:\n # Roots were identical. Abort, return []\n # and fall back to usual solve\n return []\n saw.add(r)\n\n # Restore to original equation where coeff_4 is nonzero\n if coeff_4:\n result = [x - coeff_4 / 5 for x in result]\n return result", "def decreasing_roots(self):\n if self.size() == 0:\n return []\n roots = [1]\n root = 1\n for i in range(2, self.size() + 1):\n if not self.le(i, root):\n roots.append(i)\n root = i\n return roots" ]
[ "0.699301", "0.68372566", "0.6812553", "0.6778328", "0.6580493", "0.6448303", "0.64083934", "0.62958246", "0.6221394", "0.6214463", "0.6213689", "0.620138", "0.60809135", "0.6045944", "0.59933275", "0.59385794", "0.5924114", "0.5923547", "0.5893801", "0.58751374", "0.58578867", "0.5853305", "0.5834194", "0.5788073", "0.57687837", "0.5757184", "0.5733034", "0.5654485", "0.56385845", "0.5635474" ]
0.70028156
0
r""" Returns a list of roots of a quartic polynomial. There are many references for solving quartic expressions available [15]. This reviewer has found that many of them require one to select from among 2 or more possible sets of solutions and that some solutions work when one is searching for real roots but do not work when searching for complex roots (though this is not always stated clearly). The following routine has been tested and found to be correct for 0, 2 or 4 complex roots. The quasisymmetric case solution [6] looks for quartics that have the form `x4 + Ax3 + Bx2 + Cx + D = 0` where `(C/A)2 = D`. Although no general solution that is always applicable for all coefficients is known to this reviewer, certain conditions are tested
def roots_quartic(f): _, a, b, c, d = f.monic().all_coeffs() if not d: return [S.Zero] + roots([1, a, b, c], multiple=True) elif (c/a)**2 == d: x, m = f.gen, c/a g = Poly(x**2 + a*x + b - 2*m, x) z1, z2 = roots_quadratic(g) h1 = Poly(x**2 - z1*x + m, x) h2 = Poly(x**2 - z2*x + m, x) r1 = roots_quadratic(h1) r2 = roots_quadratic(h2) return r1 + r2 else: a2 = a**2 e = b - 3*a2/8 f = _mexpand(c + a*(a2/8 - b/2)) aon4 = a/4 g = _mexpand(d - aon4*(a*(3*a2/64 - b/4) + c)) if f.is_zero: y1, y2 = [sqrt(tmp) for tmp in roots([1, e, g], multiple=True)] return [tmp - aon4 for tmp in [-y1, -y2, y1, y2]] if g.is_zero: y = [S.Zero] + roots([1, 0, e, f], multiple=True) return [tmp - aon4 for tmp in y] else: # Descartes-Euler method, see [7] sols = _roots_quartic_euler(e, f, g, aon4) if sols: return sols # Ferrari method, see [1, 2] p = -e**2/12 - g q = -e**3/108 + e*g/3 - f**2/8 TH = Rational(1, 3) def _ans(y): w = sqrt(e + 2*y) arg1 = 3*e + 2*y arg2 = 2*f/w ans = [] for s in [-1, 1]: root = sqrt(-(arg1 + s*arg2)) for t in [-1, 1]: ans.append((s*w - t*root)/2 - aon4) return ans # whether a Piecewise is returned or not # depends on knowing p, so try to put # in a simple form p = _mexpand(p) # p == 0 case y1 = e*Rational(-5, 6) - q**TH if p.is_zero: return _ans(y1) # if p != 0 then u below is not 0 root = sqrt(q**2/4 + p**3/27) r = -q/2 + root # or -q/2 - root u = r**TH # primary root of solve(x**3 - r, x) y2 = e*Rational(-5, 6) + u - p/u/3 if fuzzy_not(p.is_zero): return _ans(y2) # sort it out once they know the values of the coefficients return [Piecewise((a1, Eq(p, 0)), (a2, True)) for a1, a2 in zip(_ans(y1), _ans(y2))]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _realroots_quartic(a3, a2, a1, a0):\n # see http://mathworld.wolfram.com/QuarticEquation.html for details\n ys = _realroots_cubic(-a2, a1*a3 - 4*a0, 4*a0*a2 - a1*a1 - a0*a3*a3)\n ys = [y for y in ys if a3*a3-4*a2+4*y >= 0 and y*y-4*a0 >= 0]\n if not ys:\n return []\n y1 = min(ys)\n if a3*y1-2*a1 < 0:\n return (_realroots_quadratic(0.5*(a3+math.sqrt(a3*a3-4*a2+4*y1)), 0.5*(y1-math.sqrt(y1*y1-4*a0))) +\n _realroots_quadratic(0.5*(a3-math.sqrt(a3*a3-4*a2+4*y1)), 0.5*(y1+math.sqrt(y1*y1-4*a0))))\n else:\n return (_realroots_quadratic(0.5*(a3+math.sqrt(a3*a3-4*a2+4*y1)), 0.5*(y1+math.sqrt(y1*y1-4*a0))) +\n _realroots_quadratic(0.5*(a3-math.sqrt(a3*a3-4*a2+4*y1)), 0.5*(y1-math.sqrt(y1*y1-4*a0))))", "def roots_quintic(f):\n result = []\n\n coeff_5, coeff_4, p_, q_, r_, s_ = f.all_coeffs()\n\n if not all(coeff.is_Rational for coeff in (coeff_5, coeff_4, p_, q_, r_, s_)):\n return result\n\n if coeff_5 != 1:\n f = Poly(f / coeff_5)\n _, coeff_4, p_, q_, r_, s_ = f.all_coeffs()\n\n # Cancel coeff_4 to form x^5 + px^3 + qx^2 + rx + s\n if coeff_4:\n p = p_ - 2*coeff_4*coeff_4/5\n q = q_ - 3*coeff_4*p_/5 + 4*coeff_4**3/25\n r = r_ - 2*coeff_4*q_/5 + 3*coeff_4**2*p_/25 - 3*coeff_4**4/125\n s = s_ - coeff_4*r_/5 + coeff_4**2*q_/25 - coeff_4**3*p_/125 + 4*coeff_4**5/3125\n x = f.gen\n f = Poly(x**5 + p*x**3 + q*x**2 + r*x + s)\n else:\n p, q, r, s = p_, q_, r_, s_\n\n quintic = PolyQuintic(f)\n\n # Eqn standardized. Algo for solving starts here\n if not f.is_irreducible:\n return result\n f20 = quintic.f20\n # Check if f20 has linear factors over domain Z\n if f20.is_irreducible:\n return result\n # Now, we know that f is solvable\n for _factor in f20.factor_list()[1]:\n if _factor[0].is_linear:\n theta = _factor[0].root(0)\n break\n d = discriminant(f)\n delta = sqrt(d)\n # zeta = a fifth root of unity\n zeta1, zeta2, zeta3, zeta4 = quintic.zeta\n T = quintic.T(theta, d)\n tol = S(1e-10)\n alpha = T[1] + T[2]*delta\n alpha_bar = T[1] - T[2]*delta\n beta = T[3] + T[4]*delta\n beta_bar = T[3] - T[4]*delta\n\n disc = alpha**2 - 4*beta\n disc_bar = alpha_bar**2 - 4*beta_bar\n\n l0 = quintic.l0(theta)\n Stwo = S(2)\n l1 = _quintic_simplify((-alpha + sqrt(disc)) / Stwo)\n l4 = _quintic_simplify((-alpha - sqrt(disc)) / Stwo)\n\n l2 = _quintic_simplify((-alpha_bar + sqrt(disc_bar)) / Stwo)\n l3 = _quintic_simplify((-alpha_bar - sqrt(disc_bar)) / Stwo)\n\n order = quintic.order(theta, d)\n test = (order*delta.n()) - ( (l1.n() - l4.n())*(l2.n() - l3.n()) )\n # Comparing floats\n if not comp(test, 0, tol):\n l2, l3 = l3, l2\n\n # Now we have correct order of l's\n R1 = l0 + l1*zeta1 + l2*zeta2 + l3*zeta3 + l4*zeta4\n R2 = l0 + l3*zeta1 + l1*zeta2 + l4*zeta3 + l2*zeta4\n R3 = l0 + l2*zeta1 + l4*zeta2 + l1*zeta3 + l3*zeta4\n R4 = l0 + l4*zeta1 + l3*zeta2 + l2*zeta3 + l1*zeta4\n\n Res = [None, [None]*5, [None]*5, [None]*5, [None]*5]\n Res_n = [None, [None]*5, [None]*5, [None]*5, [None]*5]\n\n # Simplifying improves performance a lot for exact expressions\n R1 = _quintic_simplify(R1)\n R2 = _quintic_simplify(R2)\n R3 = _quintic_simplify(R3)\n R4 = _quintic_simplify(R4)\n\n # hard-coded results for [factor(i) for i in _vsolve(x**5 - a - I*b, x)]\n x0 = z**(S(1)/5)\n x1 = sqrt(2)\n x2 = sqrt(5)\n x3 = sqrt(5 - x2)\n x4 = I*x2\n x5 = x4 + I\n x6 = I*x0/4\n x7 = x1*sqrt(x2 + 5)\n sol = [x0, -x6*(x1*x3 - x5), x6*(x1*x3 + x5), -x6*(x4 + x7 - I), x6*(-x4 + x7 + I)]\n\n R1 = R1.as_real_imag()\n R2 = R2.as_real_imag()\n R3 = R3.as_real_imag()\n R4 = R4.as_real_imag()\n\n for i, s in enumerate(sol):\n Res[1][i] = _quintic_simplify(s.xreplace({z: R1[0] + I*R1[1]}))\n Res[2][i] = _quintic_simplify(s.xreplace({z: R2[0] + I*R2[1]}))\n Res[3][i] = _quintic_simplify(s.xreplace({z: R3[0] + I*R3[1]}))\n Res[4][i] = _quintic_simplify(s.xreplace({z: R4[0] + I*R4[1]}))\n\n for i in range(1, 5):\n for j in range(5):\n Res_n[i][j] = Res[i][j].n()\n Res[i][j] = _quintic_simplify(Res[i][j])\n r1 = Res[1][0]\n r1_n = Res_n[1][0]\n\n for i in range(5):\n if comp(im(r1_n*Res_n[4][i]), 0, tol):\n r4 = Res[4][i]\n break\n\n # Now we have various Res values. Each will be a list of five\n # values. We have to pick one r value from those five for each Res\n u, v = quintic.uv(theta, d)\n testplus = (u + v*delta*sqrt(5)).n()\n testminus = (u - v*delta*sqrt(5)).n()\n\n # Evaluated numbers suffixed with _n\n # We will use evaluated numbers for calculation. Much faster.\n r4_n = r4.n()\n r2 = r3 = None\n\n for i in range(5):\n r2temp_n = Res_n[2][i]\n for j in range(5):\n # Again storing away the exact number and using\n # evaluated numbers in computations\n r3temp_n = Res_n[3][j]\n if (comp((r1_n*r2temp_n**2 + r4_n*r3temp_n**2 - testplus).n(), 0, tol) and\n comp((r3temp_n*r1_n**2 + r2temp_n*r4_n**2 - testminus).n(), 0, tol)):\n r2 = Res[2][i]\n r3 = Res[3][j]\n break\n if r2 is not None:\n break\n else:\n return [] # fall back to normal solve\n\n # Now, we have r's so we can get roots\n x1 = (r1 + r2 + r3 + r4)/5\n x2 = (r1*zeta4 + r2*zeta3 + r3*zeta2 + r4*zeta1)/5\n x3 = (r1*zeta3 + r2*zeta1 + r3*zeta4 + r4*zeta2)/5\n x4 = (r1*zeta2 + r2*zeta4 + r3*zeta1 + r4*zeta3)/5\n x5 = (r1*zeta1 + r2*zeta2 + r3*zeta3 + r4*zeta4)/5\n result = [x1, x2, x3, x4, x5]\n\n # Now check if solutions are distinct\n\n saw = set()\n for r in result:\n r = r.n(2)\n if r in saw:\n # Roots were identical. Abort, return []\n # and fall back to usual solve\n return []\n saw.add(r)\n\n # Restore to original equation where coeff_4 is nonzero\n if coeff_4:\n result = [x - coeff_4 / 5 for x in result]\n return result", "def roots_cubic(f, trig=False):\n if trig:\n a, b, c, d = f.all_coeffs()\n p = (3*a*c - b**2)/(3*a**2)\n q = (2*b**3 - 9*a*b*c + 27*a**2*d)/(27*a**3)\n D = 18*a*b*c*d - 4*b**3*d + b**2*c**2 - 4*a*c**3 - 27*a**2*d**2\n if (D > 0) == True:\n rv = []\n for k in range(3):\n rv.append(2*sqrt(-p/3)*cos(acos(q/p*sqrt(-3/p)*Rational(3, 2))/3 - k*pi*Rational(2, 3)))\n return [i - b/3/a for i in rv]\n\n # a*x**3 + b*x**2 + c*x + d -> x**3 + a*x**2 + b*x + c\n _, a, b, c = f.monic().all_coeffs()\n\n if c is S.Zero:\n x1, x2 = roots([1, a, b], multiple=True)\n return [x1, S.Zero, x2]\n\n # x**3 + a*x**2 + b*x + c -> u**3 + p*u + q\n p = b - a**2/3\n q = c - a*b/3 + 2*a**3/27\n\n pon3 = p/3\n aon3 = a/3\n\n u1 = None\n if p is S.Zero:\n if q is S.Zero:\n return [-aon3]*3\n u1 = -root(q, 3) if q.is_positive else root(-q, 3)\n elif q is S.Zero:\n y1, y2 = roots([1, 0, p], multiple=True)\n return [tmp - aon3 for tmp in [y1, S.Zero, y2]]\n elif q.is_real and q.is_negative:\n u1 = -root(-q/2 + sqrt(q**2/4 + pon3**3), 3)\n\n coeff = I*sqrt(3)/2\n if u1 is None:\n u1 = S.One\n u2 = Rational(-1, 2) + coeff\n u3 = Rational(-1, 2) - coeff\n b, c, d = a, b, c # a, b, c, d = S.One, a, b, c\n D0 = b**2 - 3*c # b**2 - 3*a*c\n D1 = 2*b**3 - 9*b*c + 27*d # 2*b**3 - 9*a*b*c + 27*a**2*d\n C = root((D1 + sqrt(D1**2 - 4*D0**3))/2, 3)\n return [-(b + uk*C + D0/C/uk)/3 for uk in [u1, u2, u3]] # -(b + uk*C + D0/C/uk)/3/a\n\n u2 = u1*(Rational(-1, 2) + coeff)\n u3 = u1*(Rational(-1, 2) - coeff)\n\n if p is S.Zero:\n return [u1 - aon3, u2 - aon3, u3 - aon3]\n\n soln = [\n -u1 + pon3/u1 - aon3,\n -u2 + pon3/u2 - aon3,\n -u3 + pon3/u3 - aon3\n ]\n\n return soln", "def _roots_quartic_euler(p, q, r, a):\n # solve the resolvent equation\n x = Dummy('x')\n eq = 64*x**3 + 32*p*x**2 + (4*p**2 - 16*r)*x - q**2\n xsols = list(roots(Poly(eq, x), cubics=False).keys())\n xsols = [sol for sol in xsols if sol.is_rational and sol.is_nonzero]\n if not xsols:\n return None\n R = max(xsols)\n c1 = sqrt(R)\n B = -q*c1/(4*R)\n A = -R - p/2\n c2 = sqrt(A + B)\n c3 = sqrt(A - B)\n return [c1 - c2 - a, -c1 - c3 - a, -c1 + c3 - a, c1 + c2 - a]", "def roots_quadratic(f):\n\n a, b, c = f.all_coeffs()\n dom = f.get_domain()\n\n def _sqrt(d):\n # remove squares from square root since both will be represented\n # in the results; a similar thing is happening in roots() but\n # must be duplicated here because not all quadratics are binomials\n co = []\n other = []\n for di in Mul.make_args(d):\n if di.is_Pow and di.exp.is_Integer and di.exp % 2 == 0:\n co.append(Pow(di.base, di.exp//2))\n else:\n other.append(di)\n if co:\n d = Mul(*other)\n co = Mul(*co)\n return co*sqrt(d)\n return sqrt(d)\n\n def _simplify(expr):\n if dom.is_Composite:\n return factor(expr)\n else:\n from sympy.simplify.simplify import simplify\n return simplify(expr)\n\n if c is S.Zero:\n r0, r1 = S.Zero, -b/a\n\n if not dom.is_Numerical:\n r1 = _simplify(r1)\n elif r1.is_negative:\n r0, r1 = r1, r0\n elif b is S.Zero:\n r = -c/a\n if not dom.is_Numerical:\n r = _simplify(r)\n\n R = _sqrt(r)\n r0 = -R\n r1 = R\n else:\n d = b**2 - 4*a*c\n A = 2*a\n B = -b/A\n\n if not dom.is_Numerical:\n d = _simplify(d)\n B = _simplify(B)\n\n D = factor_terms(_sqrt(d)/A)\n r0 = B - D\n r1 = B + D\n if a.is_negative:\n r0, r1 = r1, r0\n elif not dom.is_Numerical:\n r0, r1 = [expand_2arg(i) for i in (r0, r1)]\n\n return [r0, r1]", "def realpolyroots(*cs):\n if not cs:\n return [0]\n try:\n f = 1.0/cs[0]\n cs = [f*c for c in cs[1:]]\n except ArithmeticError:\n return realpolyroots(*cs[1:])\n else:\n n = len(cs)\n if n == 0:\n return []\n elif n == 1:\n return [-cs[0]]\n elif n == 2:\n return _realroots_quadratic(*cs)\n elif n == 3:\n return _realroots_cubic(*cs)\n elif n == 4:\n return _realroots_quartic(*cs)\n else:\n raise RuntimeError(\"realpolyroots solver currently limited to polynoms up to the power of 4\")", "def test_cubic_roots(roots, a0, a1, a2, a3=None, tol=1.0e-12):\n\n N = len(a0)\n for n in range(N):\n c0 = a0[n]\n c1 = a1[n]\n c2 = a2[n]\n c3 = a3[n]\n\n print(f\"Polynomial {n}: a = {(c0,c1,c2,c3)}\")\n\n rts = np.unique(roots[n])\n rts = rts[~np.isnan(rts)]\n\n for x in rts:\n f = c0 + c1 * x + c2 * x**2 + c3 * x**3\n ok = np.abs(f) <= tol\n\n print(f\" root x = {x}: f(x) = {f} {'OK' if ok else 'FAILED'}\")\n\n if not ok:\n raise Exception(\"NOT OK!\")\n\n if len(rts) == 0:\n print(\" no real roots.\")", "def _realroots_quadratic(a1, a0):\n D = a1*a1 - 4*a0\n if D < 0:\n return []\n SD = math.sqrt(D)\n return [0.5 * (-a1 + SD), 0.5 * (-a1 - SD)]", "def cubic_roots(a0, a1, a2, a3=None):\n\n N = len(a0)\n out = np.full([N, 3], np.nan)\n\n # Calculate the normalized form x^3 + a2 * x^2 + a1 * x + a0 = 0\n b_a = a2 if a3 is None else a2 / a3\n b_a2 = b_a * b_a\n c_a = a1 if a3 is None else a1 / a3\n d_a = a0 if a3 is None else a0 / a3\n\n # Solve the cubic equation\n Q = (3 * c_a - b_a2) / 9\n R = (9 * b_a * c_a - 27 * d_a - 2 * b_a * b_a2) / 54\n Q3 = Q * Q * Q\n D = Q3 + R * R\n b_a_3 = (1.0 / 3.0) * b_a\n\n sel = Q == 0.0\n if np.any(sel):\n o = out[sel, 0]\n\n sel2 = R == 0.0\n if np.any(sel2):\n o[sel2] = -b_a_3[sel][sel2]\n\n if np.any(~sel2):\n o[~sel2] = np.pow(2 * R[sel][~sel2], 1 / 3.0) - b_a_3[sel][~sel2]\n\n out[sel, 0] = o\n\n sel = D <= 0.0\n if np.any(sel):\n # Three real roots\n theta = np.arccos(R[sel] / np.sqrt(-Q3[sel]))\n sqrt_Q = np.sqrt(-Q[sel])\n\n out[sel, 0] = 2 * sqrt_Q * np.cos(theta / 3.0) - b_a_3[sel]\n out[sel, 1] = 2 * sqrt_Q * np.cos((theta + 2 * np.pi) / 3.0) - b_a_3[sel]\n out[sel, 2] = 2 * sqrt_Q * np.cos((theta + 4 * np.pi) / 3.0) - b_a_3[sel]\n\n return out", "def test_cases():\r\n quadratic_roots(1,3,-21)\r\n quadratic_roots(2,-4,-6)\r\n quadratic_roots(1,4,-12)\r\n quadratic_roots(4,12,9)\r\n quadratic_roots(-2,-11,-21)\r\n quadratic_roots(4,1,4)\r\n quadratic_roots(1,1,0)\r\n quadratic_roots(1,0,-16)\r\n quadratic_roots(1,-14,-49)\r\n quadratic_roots(1,10,25)", "def getRoots(self):\n a, b, c = self.getCoefficients()[2], self.getCoefficients()[1], self.getCoefficients()[0]\n delta = b**2 - 4*a*c\n if delta >= 0:\n roots = sorted([(-b - math.sqrt(delta))/(2*a), (-b + math.sqrt(delta))/(2*a)])\n else:\n roots = sorted([(-b - math.sqrt(-delta)*1j)/(2*a), (-b + math.sqrt(-delta)*1j)/(2*a)], key=lambda x: (x.real, x.imag))\n return roots", "def _realroots_cubic(a2, a1, a0):\n # see http://mathworld.wolfram.com/CubicFormula.html for details\n\n Q = (3*a1 - a2*a2) / 9.0\n R = (9*a2*a1 - 27*a0 - 2*a2*a2*a2) / 54.0\n D = Q*Q*Q + R*R\n\n if D > 0: # one real and two complex roots\n SD = math.sqrt(D)\n if R + SD >= 0:\n S = (R + SD)**(1/3.0)\n else:\n S = -(-R - SD)**(1/3.0)\n if R - SD >= 0:\n T = (R - SD)**(1/3.0)\n else:\n T = -(SD - R)**(1/3.0)\n return [S + T - a2/3.0]\n elif D == 0:\n if Q == 0: # one real root (R==0)\n return [-a2/3.0]\n else: # two real roots (R>0, Q<0)\n S = -math.sqrt(-Q)\n return [2*S - a2/3.0, -S - a2/3.0]\n else: # three real roots (Q<0)\n SQ = math.sqrt(-Q)\n arg = R / (SQ**3)\n if arg >= 1:\n theta = 0\n elif arg <= -1:\n theta = math.pi\n else:\n theta = math.acos(R/(SQ**3))\n return [2 * SQ * math.cos((theta + 2*2*i*math.pi)/3.0) - a2/3.0 for i in range(3)]", "def test_roots_slow():\n a, b, c, d, x = symbols(\"a,b,c,d,x\")\n\n f1 = x ** 2 * c + (a / b) + x * c * d - a\n f2 = x ** 2 * (a + b * (c - d) * a) + x * a * b * c / (b * d - d) + (a * d - c / d)\n\n assert list(roots(f1, x).values()) == [1, 1]\n assert list(roots(f2, x).values()) == [1, 1]\n\n (zz, yy, xx, zy, zx, yx, k) = symbols(\"zz,yy,xx,zy,zx,yx,k\")\n\n e1 = (zz - k) * (yy - k) * (xx - k) + zy * yx * zx + zx - zy - yx\n e2 = (zz - k) * yx * yx + zx * (yy - k) * zx + zy * zy * (xx - k)\n\n assert list(roots(e1 - e2, k).values()) == [1, 1, 1]\n\n f = x ** 3 + 2 * x ** 2 + 8\n R = list(roots(f).keys())\n\n assert not any(i for i in [f.subs(x, ri).n(chop=True) for ri in R])", "def root_3(a, b, c, d):\n abc = a * b * c\n bbb = b * b * b\n aad = a * a * d\n\n dd = (18. * abc * d - 4. * bbb * d\n + b * b * c * c - 4. * a * c * c * c\n - 27. * aad * d)\n d0 = b * b - 3. * a * c\n\n # second and third cubic unity roots (first is just 1)\n cu2 = -0.5 + 0.86602540378443864676j\n cu3 = -0.5 - 0.86602540378443864676j\n\n if not dd and not d0: # all real roots\n x1 = x2 = x3 = -b / (3. * a)\n elif not dd and d0: # double root, simple root\n x1 = x2 = ((9. * a * d - b * c) / (2. * d0))\n x3 = (4. * abc - 9. * aad - bbb) / (a * d0)\n else:\n d1 = 2. * bbb - 9. * abc\n d1 = d1 + 27. * aad\n\n if not d0: cin = d1 + 0j # inner terms cancel\n else: cin = (d1 - sqrt(-27.0 * a * a * dd)) / 2.\n\n cc = cin ** (1. / 3.)\n p = (-1. / (3. * a))\n\n x1 = p * (b + cc + d0 / cc)\n x2 = p * (b + cu2 * cc + d0 / (cu2 * cc))\n x3 = p * (b + cu3 * cc + d0 / (cu3 * cc))\n\n return x1, x2, x3", "def getRoots(self):\n # This part is for exercise 11\n # return []\n \n # This part is for exercise 12\n if self.getDegree() == 0:\n return []\n if self.getDegree() == 1:\n return LinearPolynomial(self.getCoefficients()).getRoots()\n if self.getDegree() == 2:\n return QuadraticPolynomial(self.getCoefficients()).getRoots()\n else:\n current_polynomial = Polynomial(self.getCoefficients())\n roots = []\n \n while current_polynomial.__coefficients[0] == 0:\n roots.append(0)\n current_polynomial.__coefficients = current_polynomial.__coefficients[1:]\n \n while current_polynomial.getDegree() > 2:\n\n #Initialization\n x = (random.random(), random.random(), random.random())\n while abs(current_polynomial.evaluate(x[2])) > EPSILON:\n x = (random.random(), random.random(), random.random())\n nb_iters = 0\n while (abs(current_polynomial.evaluate(x[2])) > EPSILON or abs(x[2] - x[1]) > TOLERANCE) and nb_iters < MAX_ITERATIONS:\n x = getNextApproximations(current_polynomial.evaluate, x)\n nb_iters += 1\n\n roots.append(x[2])\n \n if abs(x[2].imag) < TOLERANCE:\n current_polynomial = current_polynomial.divide(Polynomial([-x[2].real, 1]))[0]\n else:\n roots.append(x[2].conjugate())\n current_polynomial = current_polynomial.divide(Polynomial([abs(x[2])**2, -2*x[2].real, 1]))[0]\n \n roots += current_polynomial.getRoots()\n \n for i in range(len(roots)):\n if roots[i].imag == 0:\n roots[i] = roots[i].real\n \n return sorted(roots, key = lambda x: (x.real, x.imag))", "def find_poly_roots(poly, initial_guess = 0.0, limit = 0.00001, max_iterations = 1000):\n solutions = []\n # Find solutions numerically for n > 0, split them off until n = 2\n for q in range(polynomial.order(poly) - 2):\n x = find_poly_root(poly, initial_guess, limit, max_iterations)\n if not x:\n break\n poly = polynomial.div(poly, polynomial.make_poly([-x, 1]))\n solutions.append(x)\n # Find the rest of the roots analytically\n if polynomial.order(poly) == 1:\n solutions.append(- polynomial.coeff(poly, 1) / polynomial.coeff(poly, 0))\n elif polynomial.order(poly) == 2:\n a = polynomial.coeff(poly, 2)\n b = polynomial.coeff(poly, 1)\n c = polynomial.coeff(poly, 0)\n d = b ** 2 - 4 * a * c\n if d == 0:\n solutions.append(-b / (2 * a))\n elif d > 0:\n solutions.append((- b + sqrt(d)) / (2 * a))\n solutions.append((- b - sqrt(d)) / (2 * a))\n return solutions", "def root_4(a, b, c, d, e):\n aa, bb, cc, dd = b / a, c / a, d / a, e / a\n a2, b2 = aa * aa, bb * bb\n\n bq = (- (2. * b2 * bb) + 9. * aa * bb * cc\n - 27. * (cc * cc + a2 * dd)\n + 72. * bb * dd)\n c1 = (b2 - 3. * aa * cc + 12. * dd)\n cu2 = -0.5 + 0.86602540378443864676j\n\n p1 = sqrt(bq * bq - (4. * c1 * c1 * c1))\n v = (bq - p1) / -2.\n if not v: v = (bq + p1) / -2. # choose non zero quad root\n\n u = a2 / 4. - (2. * bb) / 3.\n if not v: uu = u # both quad roots zero, uu simplifies to u\n else:\n v3 = (v ** (1. / 3.)) * cu2\n uu = u + (1. / 3.) * (v3 + c1 / v3)\n\n p1 = - aa / 4.\n if not uu: # degenerate, quadruple root\n x1 = x2 = x3 = x4 = p1\n else:\n p2 = 3. * a2 - 8. * bb - 4. * uu\n p3 = -(a2 * aa) + 4. * aa * bb - 8. * cc\n\n usq = sqrt(uu)\n usq2 = usq / 2.\n u4 = uu / 4.\n\n blkp = .25 * sqrt(p2 + p3 / usq)\n blkm = .25 * sqrt(p2 + p3 / -usq)\n\n x1 = p1 + usq2 + blkp\n x2 = p1 - usq2 + blkm\n x3 = p1 + usq2 - blkp\n x4 = p1 - usq2 - blkm\n return x1, x2, x3, x4", "def roots(f, *gens,\n auto=True,\n cubics=True,\n trig=False,\n quartics=True,\n quintics=False,\n multiple=False,\n filter=None,\n predicate=None,\n strict=False,\n **flags):\n from sympy.polys.polytools import to_rational_coeffs\n flags = dict(flags)\n\n if isinstance(f, list):\n if gens:\n raise ValueError('redundant generators given')\n\n x = Dummy('x')\n\n poly, i = {}, len(f) - 1\n\n for coeff in f:\n poly[i], i = sympify(coeff), i - 1\n\n f = Poly(poly, x, field=True)\n else:\n try:\n F = Poly(f, *gens, **flags)\n if not isinstance(f, Poly) and not F.gen.is_Symbol:\n raise PolynomialError(\"generator must be a Symbol\")\n f = F\n except GeneratorsNeeded:\n if multiple:\n return []\n else:\n return {}\n else:\n n = f.degree()\n if f.length() == 2 and n > 2:\n # check for foo**n in constant if dep is c*gen**m\n con, dep = f.as_expr().as_independent(*f.gens)\n fcon = -(-con).factor()\n if fcon != con:\n con = fcon\n bases = []\n for i in Mul.make_args(con):\n if i.is_Pow:\n b, e = i.as_base_exp()\n if e.is_Integer and b.is_Add:\n bases.append((b, Dummy(positive=True)))\n if bases:\n rv = roots(Poly((dep + con).xreplace(dict(bases)),\n *f.gens), *F.gens,\n auto=auto,\n cubics=cubics,\n trig=trig,\n quartics=quartics,\n quintics=quintics,\n multiple=multiple,\n filter=filter,\n predicate=predicate,\n **flags)\n return {factor_terms(k.xreplace(\n {v: k for k, v in bases})\n ): v for k, v in rv.items()}\n\n if f.is_multivariate:\n raise PolynomialError('multivariate polynomials are not supported')\n\n def _update_dict(result, zeros, currentroot, k):\n if currentroot == S.Zero:\n if S.Zero in zeros:\n zeros[S.Zero] += k\n else:\n zeros[S.Zero] = k\n if currentroot in result:\n result[currentroot] += k\n else:\n result[currentroot] = k\n\n def _try_decompose(f):\n \"\"\"Find roots using functional decomposition. \"\"\"\n factors, roots = f.decompose(), []\n\n for currentroot in _try_heuristics(factors[0]):\n roots.append(currentroot)\n\n for currentfactor in factors[1:]:\n previous, roots = list(roots), []\n\n for currentroot in previous:\n g = currentfactor - Poly(currentroot, f.gen)\n\n for currentroot in _try_heuristics(g):\n roots.append(currentroot)\n\n return roots\n\n def _try_heuristics(f):\n \"\"\"Find roots using formulas and some tricks. \"\"\"\n if f.is_ground:\n return []\n if f.is_monomial:\n return [S.Zero]*f.degree()\n\n if f.length() == 2:\n if f.degree() == 1:\n return list(map(cancel, roots_linear(f)))\n else:\n return roots_binomial(f)\n\n result = []\n\n for i in [-1, 1]:\n if not f.eval(i):\n f = f.quo(Poly(f.gen - i, f.gen))\n result.append(i)\n break\n\n n = f.degree()\n\n if n == 1:\n result += list(map(cancel, roots_linear(f)))\n elif n == 2:\n result += list(map(cancel, roots_quadratic(f)))\n elif f.is_cyclotomic:\n result += roots_cyclotomic(f)\n elif n == 3 and cubics:\n result += roots_cubic(f, trig=trig)\n elif n == 4 and quartics:\n result += roots_quartic(f)\n elif n == 5 and quintics:\n result += roots_quintic(f)\n\n return result\n\n # Convert the generators to symbols\n dumgens = symbols('x:%d' % len(f.gens), cls=Dummy)\n f = f.per(f.rep, dumgens)\n\n (k,), f = f.terms_gcd()\n\n if not k:\n zeros = {}\n else:\n zeros = {S.Zero: k}\n\n coeff, f = preprocess_roots(f)\n\n if auto and f.get_domain().is_Ring:\n f = f.to_field()\n\n # Use EX instead of ZZ_I or QQ_I\n if f.get_domain().is_QQ_I:\n f = f.per(f.rep.convert(EX))\n\n rescale_x = None\n translate_x = None\n\n result = {}\n\n if not f.is_ground:\n dom = f.get_domain()\n if not dom.is_Exact and dom.is_Numerical:\n for r in f.nroots():\n _update_dict(result, zeros, r, 1)\n elif f.degree() == 1:\n _update_dict(result, zeros, roots_linear(f)[0], 1)\n elif f.length() == 2:\n roots_fun = roots_quadratic if f.degree() == 2 else roots_binomial\n for r in roots_fun(f):\n _update_dict(result, zeros, r, 1)\n else:\n _, factors = Poly(f.as_expr()).factor_list()\n if len(factors) == 1 and f.degree() == 2:\n for r in roots_quadratic(f):\n _update_dict(result, zeros, r, 1)\n else:\n if len(factors) == 1 and factors[0][1] == 1:\n if f.get_domain().is_EX:\n res = to_rational_coeffs(f)\n if res:\n if res[0] is None:\n translate_x, f = res[2:]\n else:\n rescale_x, f = res[1], res[-1]\n result = roots(f)\n if not result:\n for currentroot in _try_decompose(f):\n _update_dict(result, zeros, currentroot, 1)\n else:\n for r in _try_heuristics(f):\n _update_dict(result, zeros, r, 1)\n else:\n for currentroot in _try_decompose(f):\n _update_dict(result, zeros, currentroot, 1)\n else:\n for currentfactor, k in factors:\n for r in _try_heuristics(Poly(currentfactor, f.gen, field=True)):\n _update_dict(result, zeros, r, k)\n\n if coeff is not S.One:\n _result, result, = result, {}\n\n for currentroot, k in _result.items():\n result[coeff*currentroot] = k\n\n if filter not in [None, 'C']:\n handlers = {\n 'Z': lambda r: r.is_Integer,\n 'Q': lambda r: r.is_Rational,\n 'R': lambda r: all(a.is_real for a in r.as_numer_denom()),\n 'I': lambda r: r.is_imaginary,\n }\n\n try:\n query = handlers[filter]\n except KeyError:\n raise ValueError(\"Invalid filter: %s\" % filter)\n\n for zero in dict(result).keys():\n if not query(zero):\n del result[zero]\n\n if predicate is not None:\n for zero in dict(result).keys():\n if not predicate(zero):\n del result[zero]\n if rescale_x:\n result1 = {}\n for k, v in result.items():\n result1[k*rescale_x] = v\n result = result1\n if translate_x:\n result1 = {}\n for k, v in result.items():\n result1[k + translate_x] = v\n result = result1\n\n # adding zero roots after non-trivial roots have been translated\n result.update(zeros)\n\n if strict and sum(result.values()) < f.degree():\n raise UnsolvableFactorError(filldedent('''\n Strict mode: some factors cannot be solved in radicals, so\n a complete list of solutions cannot be returned. Call\n roots with strict=False to get solutions expressible in\n radicals (if there are any).\n '''))\n\n if not multiple:\n return result\n else:\n zeros = []\n\n for zero in ordered(result):\n zeros.extend([zero]*result[zero])\n\n return zeros", "def quadratic_roots(a, b, c):\r\n if (((math.pow(b,2))-(4*a*c))> 0):\r\n print(\"Equation: \",str(a),\"x^2 + \",str(b),\"x + \",str(c), sep=\"\")\r\n print(\"Two roots.\")\r\n print(\"x =\",((-b + math.sqrt(math.pow(b,2)-4*a*c))/(2*a)))\r\n print(\"x =\",((-b - math.sqrt(math.pow(b,2)-4*a*c))/(2*a)))\r\n \r\n elif (((math.pow(b,2))-4*a*c) == 0):\r\n print(\"Equation: \",str(a),\"x^2 + \",str(b),\"x + \",str(c), sep=\"\")\r\n print(\"One root.\")\r\n print(\"x =\",((-b + math.sqrt(math.pow(b,2)-4*a*c))/(2*a)))\r\n \r\n else :\r\n print(\"Equation: \",str(a),\"x^2 + \",str(b),\"x + \",str(c), sep=\"\")\r\n print(\"No roots.\")", "def solve(n=5000,C=-6*10**11,a=900,b=3):\n coeffs = np.zeros(n+2)\n coeffs[0] = a-b*n\n coeffs[1] = b*(n+1) - a\n coeffs[-3] = -C\n coeffs[-2] = 2*C - a\n coeffs[-1] = a+b-C\n mp.dps = 27\n roots = polyroots(coeffs)\n for root in roots:\n print root", "def roots_cyclotomic(f, factor=False):\n L, U = _inv_totient_estimate(f.degree())\n\n for n in range(L, U + 1):\n g = cyclotomic_poly(n, f.gen, polys=True)\n\n if f.expr == g.expr:\n break\n else: # pragma: no cover\n raise RuntimeError(\"failed to find index of a cyclotomic polynomial\")\n\n roots = []\n\n if not factor:\n # get the indices in the right order so the computed\n # roots will be sorted\n h = n//2\n ks = [i for i in range(1, n + 1) if igcd(i, n) == 1]\n ks.sort(key=lambda x: (x, -1) if x <= h else (abs(x - n), 1))\n d = 2*I*pi/n\n for k in reversed(ks):\n roots.append(exp(k*d).expand(complex=True))\n else:\n g = Poly(f, extension=root(-1, n))\n\n for h, _ in ordered(g.factor_list()[1]):\n roots.append(-h.TC())\n\n return roots", "def solve_quadratic(a, b, c):\n assert a != 0\n d = b * b - 4 * a * c\n if d < 0:\n return []\n elif d == 0:\n return -b / (2 * a)\n else:\n return [\n (-b - math.sqrt(d)) / (2 * a),\n (-b + math.sqrt(d)) / (2 * a)\n ]", "def quadratic_root_goes_to_infinity():\n for dt in 1E-7, 1E-12, 1E-16:\n a = dt\n b = 1 - dt\n c = -0.1\n print((dt, quadratic_roots(a, b, c)))", "def find_roots(a: int, b: int, c: int):\n if a == 0:\n x = -c / b\n print(f\"Solution is {x}\")\n return x\n\n d = b ** 2 - (4 * a * c)\n\n if d < 0:\n print(\"No solutions\")\n return None\n elif d == 0:\n x = (-b + sqrt(d)) / (2 * a)\n print(f\"Solution is {x}\")\n return x\n\n x1 = (-b + sqrt(d)) / (2 * a)\n x2 = (-b - sqrt(d)) / (2 * a)\n\n print(f\"Solutions are {x1} and {x2}\")\n return x1, x2", "def test_solve_polynomial_cv_1a():\n assert solveset_real(sqrt(x) - 1, x) == FiniteSet(1)\n assert solveset_real(sqrt(x) - 2, x) == FiniteSet(4)\n assert solveset_real(x**Rational(1, 4) - 2, x) == FiniteSet(16)\n assert solveset_real(x**Rational(1, 3) - 3, x) == FiniteSet(27)\n assert solveset_real(x*(x**(S(1) / 3) - 3), x) == \\\n FiniteSet(S(0), S(27))", "def test_solve_polynomial_cv_1a():\n assert solveset_real(sqrt(x) - 1, x) == FiniteSet(1)\n assert solveset_real(sqrt(x) - 2, x) == FiniteSet(4)\n assert solveset_real(x**Rational(1, 4) - 2, x) == FiniteSet(16)\n assert solveset_real(x**Rational(1, 3) - 3, x) == FiniteSet(27)\n assert solveset_real(x*(x**(S.One / 3) - 3), x) == \\\n FiniteSet(S.Zero, S(27))", "def quartic_potential(x):\n k1=1\n k2=10\n return (k1*x**4)-(k2*x**2)", "def iroots(a, b, c):\n discriminant = b*b-4*a*c\n if discriminant == 0:\n return -b/(2*a), None\n else:\n return (-b+isqrt(discriminant))/(2*a), (-b-isqrt(discriminant))/(2*a)", "def qsolve(self, options=''):\n for x in self._rhs:\n if x != 0:\n raise NotImplementedError, \"qsolve is currently only implemented for homogeneous systems (i.e., with rhs=0)\"\n out, err = self.call_4ti2('qsolve', options=options)\n qhom = ExtremalRays(self._read_file('qhom'), self)\n qfree = self._read_file('qfree')\n return (qhom, qfree)", "def _try_heuristics(f):\n if f.is_ground:\n return []\n if f.is_monomial:\n return [S.Zero]*f.degree()\n\n if f.length() == 2:\n if f.degree() == 1:\n return list(map(cancel, roots_linear(f)))\n else:\n return roots_binomial(f)\n\n result = []\n\n for i in [-1, 1]:\n if not f.eval(i):\n f = f.quo(Poly(f.gen - i, f.gen))\n result.append(i)\n break\n\n n = f.degree()\n\n if n == 1:\n result += list(map(cancel, roots_linear(f)))\n elif n == 2:\n result += list(map(cancel, roots_quadratic(f)))\n elif f.is_cyclotomic:\n result += roots_cyclotomic(f)\n elif n == 3 and cubics:\n result += roots_cubic(f, trig=trig)\n elif n == 4 and quartics:\n result += roots_quartic(f)\n elif n == 5 and quintics:\n result += roots_quintic(f)\n\n return result" ]
[ "0.7691381", "0.70223194", "0.67854947", "0.6726414", "0.6687224", "0.66422635", "0.66170025", "0.6552937", "0.63935107", "0.63915175", "0.6389726", "0.6380986", "0.62318456", "0.6211318", "0.61140424", "0.60838556", "0.60170877", "0.5920878", "0.58423185", "0.57521677", "0.57482815", "0.57407016", "0.5727141", "0.5722642", "0.5719739", "0.56606394", "0.56146795", "0.56029856", "0.5549739", "0.55233824" ]
0.7383764
1
Returns a list of roots of a binomial polynomial. If the domain is ZZ then the roots will be sorted with negatives coming before positives. The ordering will be the same for any numerical coefficients as long as the assumptions tested are correct, otherwise the ordering will not be sorted (but will be canonical).
def roots_binomial(f): n = f.degree() a, b = f.nth(n), f.nth(0) base = -cancel(b/a) alpha = root(base, n) if alpha.is_number: alpha = alpha.expand(complex=True) # define some parameters that will allow us to order the roots. # If the domain is ZZ this is guaranteed to return roots sorted # with reals before non-real roots and non-real sorted according # to real part and imaginary part, e.g. -1, 1, -1 + I, 2 - I neg = base.is_negative even = n % 2 == 0 if neg: if even == True and (base + 1).is_positive: big = True else: big = False # get the indices in the right order so the computed # roots will be sorted when the domain is ZZ ks = [] imax = n//2 if even: ks.append(imax) imax -= 1 if not neg: ks.append(0) for i in range(imax, 0, -1): if neg: ks.extend([i, -i]) else: ks.extend([-i, i]) if neg: ks.append(0) if big: for i in range(0, len(ks), 2): pair = ks[i: i + 2] pair = list(reversed(pair)) # compute the roots roots, d = [], 2*I*pi/n for k in ks: zeta = exp(k*d).expand(complex=True) roots.append((alpha*zeta).expand(power_base=False)) return roots
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def almost_positive_roots(self):\n assert self.cartan_type().is_finite()\n return sorted([ -beta for beta in self.simple_roots() ] + list(self.positive_roots()))", "def realpolyroots(*cs):\n if not cs:\n return [0]\n try:\n f = 1.0/cs[0]\n cs = [f*c for c in cs[1:]]\n except ArithmeticError:\n return realpolyroots(*cs[1:])\n else:\n n = len(cs)\n if n == 0:\n return []\n elif n == 1:\n return [-cs[0]]\n elif n == 2:\n return _realroots_quadratic(*cs)\n elif n == 3:\n return _realroots_cubic(*cs)\n elif n == 4:\n return _realroots_quartic(*cs)\n else:\n raise RuntimeError(\"realpolyroots solver currently limited to polynoms up to the power of 4\")", "def getRoots(self):\n # This part is for exercise 11\n # return []\n \n # This part is for exercise 12\n if self.getDegree() == 0:\n return []\n if self.getDegree() == 1:\n return LinearPolynomial(self.getCoefficients()).getRoots()\n if self.getDegree() == 2:\n return QuadraticPolynomial(self.getCoefficients()).getRoots()\n else:\n current_polynomial = Polynomial(self.getCoefficients())\n roots = []\n \n while current_polynomial.__coefficients[0] == 0:\n roots.append(0)\n current_polynomial.__coefficients = current_polynomial.__coefficients[1:]\n \n while current_polynomial.getDegree() > 2:\n\n #Initialization\n x = (random.random(), random.random(), random.random())\n while abs(current_polynomial.evaluate(x[2])) > EPSILON:\n x = (random.random(), random.random(), random.random())\n nb_iters = 0\n while (abs(current_polynomial.evaluate(x[2])) > EPSILON or abs(x[2] - x[1]) > TOLERANCE) and nb_iters < MAX_ITERATIONS:\n x = getNextApproximations(current_polynomial.evaluate, x)\n nb_iters += 1\n\n roots.append(x[2])\n \n if abs(x[2].imag) < TOLERANCE:\n current_polynomial = current_polynomial.divide(Polynomial([-x[2].real, 1]))[0]\n else:\n roots.append(x[2].conjugate())\n current_polynomial = current_polynomial.divide(Polynomial([abs(x[2])**2, -2*x[2].real, 1]))[0]\n \n roots += current_polynomial.getRoots()\n \n for i in range(len(roots)):\n if roots[i].imag == 0:\n roots[i] = roots[i].real\n \n return sorted(roots, key = lambda x: (x.real, x.imag))", "def unit_lobatto_nodes(order, tol=1e-15, output=True):\n\n\troots=[]\n\n\n\t# The polynomials are alternately even and odd functions\n\t# so evaluate only half the number of roots.\n\t# lobatto polynomial is derivative of legendre polynomial of order n-1\n\torder = order-1\n\tfor i in range(1,int(order/2) +1):\n\n\t\t# initial guess, x0, for ith root \n\t\t# the approximate values of the abscissas.\n\t\t# these are good initial guesses\n\t\t#x0=np.cos(np.pi*(i-0.25)/(order+0.5)) \n\t\tx0=np.cos(np.pi*(i+0.1)/(order+0.5)) # not sure why this inital guess is better\n\n\t\t# call newton to find the roots of the lobatto polynomial\n\t\tFfun, Jfun = dL(order), ddL(order) \n\t\tri, _ = newton( Ffun, Jfun, x0 )\n\n\t\troots.append(ri)\n\n\t# remove roots close to zero\n\tcleaned_roots = []\n\ttol = 1e-08\n\tfor r in roots:\n\t\tif abs(r) >= tol:\n\t\t\tcleaned_roots += [r]\n\troots = cleaned_roots\n\n\t# use symetric properties to find remmaining roots\n\t# the nodal abscissas are computed by finding the \n\t# nonnegative zeros of the Legendre polynomial pm(x) \n\t# with Newton’s method (the negative zeros are obtained from symmetry).\n\troots = np.array(roots)\n\t\n\t# add -1 and 1 to tail ends\n\t# check parity of order + 1\n\t# even. no center \n\tif (order + 1) % 2==0:\n\t\troots = np.concatenate( ([-1.0], -1.0*roots, roots[::-1], [1.0]) )\n\n\t# odd. center root is 0.0\n\telse:\n\t\troots = np.concatenate( ([-1.0], -1.0*roots, [0.0], roots[::-1], [1.0] ) )\n\n\treturn roots", "def roots(self):\n return list(self.positive_roots()) + list(self.negative_roots())", "def decreasing_roots(self):\n if self.size() == 0:\n return []\n roots = [1]\n root = 1\n for i in range(2, self.size() + 1):\n if not self.le(i, root):\n roots.append(i)\n root = i\n return roots", "def getRoots(self):\n a, b, c = self.getCoefficients()[2], self.getCoefficients()[1], self.getCoefficients()[0]\n delta = b**2 - 4*a*c\n if delta >= 0:\n roots = sorted([(-b - math.sqrt(delta))/(2*a), (-b + math.sqrt(delta))/(2*a)])\n else:\n roots = sorted([(-b - math.sqrt(-delta)*1j)/(2*a), (-b + math.sqrt(-delta)*1j)/(2*a)], key=lambda x: (x.real, x.imag))\n return roots", "def find_poly_roots(poly, initial_guess = 0.0, limit = 0.00001, max_iterations = 1000):\n solutions = []\n # Find solutions numerically for n > 0, split them off until n = 2\n for q in range(polynomial.order(poly) - 2):\n x = find_poly_root(poly, initial_guess, limit, max_iterations)\n if not x:\n break\n poly = polynomial.div(poly, polynomial.make_poly([-x, 1]))\n solutions.append(x)\n # Find the rest of the roots analytically\n if polynomial.order(poly) == 1:\n solutions.append(- polynomial.coeff(poly, 1) / polynomial.coeff(poly, 0))\n elif polynomial.order(poly) == 2:\n a = polynomial.coeff(poly, 2)\n b = polynomial.coeff(poly, 1)\n c = polynomial.coeff(poly, 0)\n d = b ** 2 - 4 * a * c\n if d == 0:\n solutions.append(-b / (2 * a))\n elif d > 0:\n solutions.append((- b + sqrt(d)) / (2 * a))\n solutions.append((- b - sqrt(d)) / (2 * a))\n return solutions", "def roots_cyclotomic(f, factor=False):\n L, U = _inv_totient_estimate(f.degree())\n\n for n in range(L, U + 1):\n g = cyclotomic_poly(n, f.gen, polys=True)\n\n if f.expr == g.expr:\n break\n else: # pragma: no cover\n raise RuntimeError(\"failed to find index of a cyclotomic polynomial\")\n\n roots = []\n\n if not factor:\n # get the indices in the right order so the computed\n # roots will be sorted\n h = n//2\n ks = [i for i in range(1, n + 1) if igcd(i, n) == 1]\n ks.sort(key=lambda x: (x, -1) if x <= h else (abs(x - n), 1))\n d = 2*I*pi/n\n for k in reversed(ks):\n roots.append(exp(k*d).expand(complex=True))\n else:\n g = Poly(f, extension=root(-1, n))\n\n for h, _ in ordered(g.factor_list()[1]):\n roots.append(-h.TC())\n\n return roots", "def almost_positive_roots_decomposition(self):\n # TODO: this should use a generic function for computing\n # orbits under the action of a group:\n # def orbits(seeds, operators)\n # INPUT:\n # - seeds: a list of elements\n # - operators: a list of functions\n #\n # Returns the orbits generated by seeds under the action of the operators\n tau_plus, tau_minus = self.tau_plus_minus()\n\n I = set(self.index_set())\n Delta = self.simple_roots()\n L, R = self.cartan_type().index_set_bipartition()\n\n orbits = []\n while I:\n i = I.pop()\n alpha = -self.simple_root(i)\n orbit = [alpha]\n if i in L:\n plus = False\n beta = tau_plus(alpha)\n else:\n plus = True\n beta = tau_minus(alpha)\n while -beta not in Delta and beta not in orbit:\n orbit.append(beta)\n if beta in Delta:\n j = beta.leading_support()\n I.discard(j)\n if plus:\n beta = tau_plus(beta)\n else:\n beta = tau_minus(beta)\n plus = not plus\n if -beta in Delta:\n orbit.append(beta)\n orbits.append(orbit)\n return orbits", "def increasing_roots(self):\n size = self.size()\n if size == 0:\n return []\n roots = [size]\n root = size\n for i in range(size - 1, 0, -1):\n if not self.le(i, root):\n roots.append(i)\n root = i\n return roots", "def roots(self) -> List[str]:\n return [node for node, degree in self.graph.in_degree() if degree == 0]", "def solve(n=5000,C=-6*10**11,a=900,b=3):\n coeffs = np.zeros(n+2)\n coeffs[0] = a-b*n\n coeffs[1] = b*(n+1) - a\n coeffs[-3] = -C\n coeffs[-2] = 2*C - a\n coeffs[-1] = a+b-C\n mp.dps = 27\n roots = polyroots(coeffs)\n for root in roots:\n print root", "def PrimitiveRoots(self, modulo):\n modRange = range(1, modulo)\n required = {x for x in modRange if fractions.gcd(x, modulo)}\n return [g for g in modRange if required == {pow(g, powers, modulo) for powers in modRange}]", "def preprocess_roots(poly):\n coeff = S.One\n\n poly_func = poly.func\n try:\n _, poly = poly.clear_denoms(convert=True)\n except DomainError:\n return coeff, poly\n\n poly = poly.primitive()[1]\n poly = poly.retract()\n\n # TODO: This is fragile. Figure out how to make this independent of construct_domain().\n if poly.get_domain().is_Poly and all(c.is_term for c in poly.rep.coeffs()):\n poly = poly.inject()\n\n strips = list(zip(*poly.monoms()))\n gens = list(poly.gens[1:])\n\n base, strips = strips[0], strips[1:]\n\n for gen, strip in zip(list(gens), strips):\n reverse = False\n\n if strip[0] < strip[-1]:\n strip = reversed(strip)\n reverse = True\n\n ratio = None\n\n for a, b in zip(base, strip):\n if not a and not b:\n continue\n elif not a or not b:\n break\n elif b % a != 0:\n break\n else:\n _ratio = b // a\n\n if ratio is None:\n ratio = _ratio\n elif ratio != _ratio:\n break\n else:\n if reverse:\n ratio = -ratio\n\n poly = poly.eval(gen, 1)\n coeff *= gen**(-ratio)\n gens.remove(gen)\n\n if gens:\n poly = poly.eject(*gens)\n\n if poly.is_univariate and poly.get_domain().is_ZZ:\n basis = _integer_basis(poly)\n\n if basis is not None:\n n = poly.degree()\n\n def func(k, coeff):\n return coeff//basis**(n - k[0])\n\n poly = poly.termwise(func)\n coeff *= basis\n\n if not isinstance(poly, poly_func):\n poly = poly_func(poly)\n return coeff, poly", "def arroots(self):\n return self.arpoly.roots()", "def base_polynome(numbers):\n\n monomes = [ x**n for n in numbers ]\n polynome = sum(monomes)\n\n return poly(polynome, x)", "def get_roots(self):\n roots = []\n for symbol in self.GlobalSymbolDict.values():\n if symbol.isRoot():\n roots += [symbol]\n return roots", "def Bpoly(n, x):\n n = int(n)\n out = 0\n for k in xrange(0, n+1):\n out += comb(n,k)*Bnum(n-k)*x**float(k)\n return out", "def find_roots(func, a, b, step=1e-3, roots=None):\n if roots is None:\n roots = []\n if len(roots) > 100:\n return roots\n if func(a) == 0:\n roots.append(a)\n find_roots(func, a+1e-3, b, roots)\n if func(b) == 0:\n roots.append(b)\n find_roots(func, a, b-1e-3, roots)\n # n_steps = 1000\n # step = 1./n_steps\n n_steps = int(1./step)\n win_size = b - a\n for s in np.linspace(a+step, b-step, n_steps):\n if func(s) * func(s+step*win_size) >= 0:\n continue\n r = brentq(func, s, s+step*win_size)\n roots.append(r)\n\n return roots", "def get_roots(self):\n raise NotImplementedError()", "def roots(self):\n roots = []\n u = self.left_root\n while u != NULL:\n roots.append(u)\n u = self.right_sib(u)\n return roots", "def _rootsFinder(self, fun, jac, bounds, npoints, method):\n if method == \"regular\":\n step = (bounds[1] - bounds[0]) / (npoints + 1)\n try:\n X0 = np.arange(bounds[0] + step, bounds[1], step)\n except:\n X0 = np.random.uniform(bounds[0], bounds[1], npoints)\n elif method == \"random\":\n X0 = np.random.uniform(bounds[0], bounds[1], npoints)\n\n def objFun(X, f, jac):\n g = 0\n j = np.zeros(X.shape)\n i = 0\n for x in X:\n fx = f(x)\n g = g + fx**2\n j[i] = 2 * fx * jac(x)\n i = i + 1\n return g, j\n\n opt = minimize(\n lambda X: objFun(X, fun, jac),\n X0,\n method=\"L-BFGS-B\",\n jac=True,\n bounds=[bounds] * len(X0),\n )\n\n X = opt.x\n np.round(X, decimals=5)\n return np.unique(X)", "def roots(self):\n if not self.__roots:\n self.__roots = set()\n for n in self.__nodes:\n if n not in self.__reverse_map:\n self.__roots.add(n)\n return self.__roots", "def root_factors(f, *gens, filter=None, **args):\n args = dict(args)\n\n F = Poly(f, *gens, **args)\n\n if not F.is_Poly:\n return [f]\n\n if F.is_multivariate:\n raise ValueError('multivariate polynomials are not supported')\n\n x = F.gens[0]\n\n zeros = roots(F, filter=filter)\n\n if not zeros:\n factors = [F]\n else:\n factors, N = [], 0\n\n for r, n in ordered(zeros.items()):\n factors, N = factors + [Poly(x - r, x)]*n, N + n\n\n if N < F.degree():\n G = reduce(lambda p, q: p*q, factors)\n factors.append(F.quo(G))\n\n if not isinstance(f, Poly):\n factors = [ f.as_expr() for f in factors ]\n\n return factors", "def PBpoly(n, x):\n n = int(n)\n return Bpoly(n, x-math.floor(x))", "def _realroots_quadratic(a1, a0):\n D = a1*a1 - 4*a0\n if D < 0:\n return []\n SD = math.sqrt(D)\n return [0.5 * (-a1 + SD), 0.5 * (-a1 - SD)]", "def test_find_all_roots(self):\n\n print(\"\\nFalse Position Method: All Roots\")\n\n # input\n xmin = -1e2 # minimum x-value\n xmax = 1e2 # maximum x-value\n step = 1e0 # step-size of interval\n et = 1e-6 # relative error threshold\n\n # function\n roots, ic = FalsePosition.find_all_roots(self.fn, xmin, xmax, step, et)\n\n if len(roots) != 0:\n print(\"\\tAll Roots: {X}\\n\\tIterations: {ic}\".format(X=roots, ic=ic))\n else:\n print(\"\\tNo root found with given initial values.\")", "def getRoots(self):\n return [float(-self.getCoefficients()[0])/self.getCoefficients()[1]]", "def abelian_invariants(self):\n if self.is_trivial:\n return []\n gns = self.generators\n inv = []\n G = self\n H = G.derived_subgroup()\n Hgens = H.generators\n for p in primefactors(G.order()):\n ranks = []\n while True:\n pows = []\n for g in gns:\n elm = g**p\n if not H.contains(elm):\n pows.append(elm)\n K = PermutationGroup(Hgens + pows) if pows else H\n r = G.order()//K.order()\n G = K\n gns = pows\n if r == 1:\n break\n ranks.append(multiplicity(p, r))\n\n if ranks:\n pows = [1]*ranks[0]\n for i in ranks:\n for j in range(i):\n pows[j] = pows[j]*p\n inv.extend(pows)\n inv.sort()\n return inv" ]
[ "0.63752747", "0.6269281", "0.6176525", "0.6108229", "0.6071052", "0.6057403", "0.5983822", "0.5921927", "0.5852689", "0.58482456", "0.57848656", "0.5704563", "0.5699721", "0.56636614", "0.56186897", "0.54831684", "0.54757977", "0.54500794", "0.54273564", "0.5352519", "0.5346999", "0.5332656", "0.5303764", "0.5290045", "0.52771896", "0.5242895", "0.52273047", "0.5193123", "0.51808536", "0.5174949" ]
0.7264747
0
Find ``(L, U)`` such that ``L >> from sympy.polys.polyroots import _inv_totient_estimate >>> _inv_totient_estimate(192) (192, 840) >>> _inv_totient_estimate(400) (400, 1750)
def _inv_totient_estimate(m): primes = [ d + 1 for d in divisors(m) if isprime(d + 1) ] a, b = 1, 1 for p in primes: a *= p b *= p - 1 L = m U = int(math.ceil(m*(float(a)/b))) P = p = 2 primes = [] while P <= U: p = nextprime(p) primes.append(p) P *= p P //= p b = 1 for p in primes[:-1]: b *= p - 1 U = int(math.ceil(m*(float(P)/b))) return L, U
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def findPotential(L, boundaryConditions, Minv = None):\n\tX = findStableState(L, boundaryConditions, Minv)\n\treturn np.trace(X.T.dot(L).dot(X))", "def Findlt(l,sp,rhs):\n m = sp.M(l)\n return (m / l**3) - rhs", "def linear_inv_state_estimate(results: List[ExperimentResult],\n qubits: List[int]) -> np.ndarray:\n measurement_matrix = np.vstack([\n vec(lifted_pauli(result.setting.out_operator, qubits=qubits)).T.conj()\n for result in results\n ])\n expectations = np.array([result.expectation for result in results])\n rho = pinv(measurement_matrix) @ expectations\n return unvec(rho)", "def coordLookup_ijk(l, I, J):\n k = (l // (I*J)) + 1\n j = (l - k*J*I) // I + 1\n i = l - (j*I + k*J*I)-1\n return i,j,k", "def calcul_travail_ext(x,modU):\n\tr = np.sqrt(x[:,0]*x[:,0] + x[:,1]*x[:,1])\n\tf = r[:]*modU[:]*modU[:]\n\tW = PointMilieu(r,f)\n\treturn W", "def get_estimated_inverse_slope(temp, temp_err, visc, visc_err, rad, rad_error):\n inv_slope = (1/get_slope(temp, visc, rad))\n rel_error = get_relative_error(temp, temp_err, visc, visc_err, rad, rad_error)\n error = rel_error * inv_slope\n return inv_slope, error", "def find_invpow(x,n):\n high = 1\n while high ** n <= x:\n high *= 2\n low = high//2\n while low < high:\n mid = (low + high) // 2\n if low < mid and mid**n < x:\n low = mid\n elif high > mid and mid**n > x:\n high = mid\n else:\n return mid\n return mid + 1", "def inv(P):\n L = cho_factor(P)\n return cho_solve(L, np.eye(P.shape[0]))", "def get_inverse(a):\n if len(a) == len(a[0]):\n i = get_identity(len(a))\n inverse = gaussian_solve(a, i)\n return inverse", "def compute_inverse(in1, in2):\n aL = [in1]\n bL = [in2]\n tL = [0]\n t = 1\n sL = [1]\n s = 0\n q = math.floor((aL[0] / bL[0]))\n r = (aL[0] - (q * bL[0]))\n\n while r > 0:\n temp = (tL[0] - (q * bL[0]))\n tL[0] = t\n t = temp\n temp = (sL[0] - (q * s))\n sL[0] = s\n s = temp\n aL[0] = bL[0]\n bL[0] = r\n q = math.floor(aL[0] / bL[0])\n r = (aL[0] - (q * bL[0]))\n\n inverse = s % in2\n return inverse", "def enthalpy_equality_func(self):\n residual = []\n for i in range(self.num_i):\n residual += [self.inl[i].h.val_SI - self.outl[i].h.val_SI]\n return residual", "def _solveX(L, U, b):\n m, n = L.shape\n # Forward Substitution\n y = list()\n y.insert(0, b[0]/L[0][0])\n for i in range(1, m):\n summ = 0\n for k in range(0, i):\n summ += L[i][k]*y[k]\n y.insert(i, (b[i]-summ)/(L[i][i]))\n\n # Backwards Substitution\n x = [0]*m\n x[m-1] = y[m-1] / U[m-1][m-1]\n for i in range(m - 2, -1, -1):\n summ = 0\n for k in range(i+1, n):\n summ += U[i][k]*x[k]\n x[i] = (y[i] - summ)/U[i][i]\n\n return x", "def epipoles_location(f_mat):\r\n u, s, vh = np.linalg.svd(f_mat)\r\n e_l = vh[-1, :]\r\n e_r = u[:, -1]\r\n # get x, y by dividing by w\r\n e_l = (e_l[0] / e_l[2], e_l[1] / e_l[2])\r\n e_r = (e_r[0] / e_r[2], e_r[1] / e_r[2])\r\n return e_l, e_r", "def linfit(x, y):\n n = len(x)\n if (n != len(y)):\n raise ValueError(\"Lengths of x, y arrays must be equal.\")\n\n sx = sum(x)\n\tsy = sum(y)\n\tsxx = sum(x * x)\n\tsyy = sum(y * y)\n\tsxy = sum(x * y)\n\n m = (n * sxy - sx * sy) / (n * sxx - sx * sx)\n\tb = sy / n - m * sx / n\n\te = y - b - m * x\n\n chi = sum(e * e)\n\ts = (n * syy - sy * sy - m * m * (n * sxx - sx * sx)) / n / (n - 2)\n\tdm = n * s / (n * sxx - sx * sx)\n\tdb = dm * sxx / n\n\n return (m, dm, b, db, chi)", "def findStableState(L, boundaryConditions, Minv = None):\n\tn = L.shape[0]\n\tm = len(boundaryConditions)\n\tVb = np.zeros(m)\n\tpositions = {}\n\tfor i in range(m):\n\t\tcondition = boundaryConditions[i]\n\t\tVb[i] = condition[0]\n\t\tpositions[condition[0]] = condition[1]\n\tVb = np.sort(Vb)\n\tBPrime = np.zeros((m, n))\n\tYPrime = np.zeros((m, 3))\n\tfor i in range(m):\n\t\tBPrime[i][int(Vb[i])] = 1\n\t\tYPrime[i] = positions[Vb[i]]\n\n\tif Minv is None:\n\t\tzeroCorner = np.zeros((m, m))\n\t\tM = np.array(np.bmat([[L, -BPrime.T], [BPrime, zeroCorner]]))\n\t\tMinv = np.linalg.inv(M)\n\n\tXT = np.zeros((3, n))\n\t# find x coordinates\n\ty = np.zeros(n + m)\n\ty[n:] = YPrime.T[0]\n\tx = np.dot(Minv, y)\n\tXT[0] = x[:n]\n\t# find y coordinates\n\ty = np.zeros(n + m)\n\ty[n:] = YPrime.T[1]\n\tx = np.dot(Minv, y)\n\tXT[1] = x[:n]\n\t# find z coordinates\n\ty = np.zeros(n + m)\n\ty[n:] = YPrime.T[2]\n\tx = np.dot(Minv, y)\n\tXT[2] = x[:n]\n\n\treturn XT.T", "def inductorenergy(L, I):\n return 1 / 2 * L * I ** 2", "def osmotic_equilibrium(L, nu, mu) :\n N = L**2 / mu\n return N", "def venus_heuristic(self, coeffs, const, l, u):\n if abs(l) < u:\n return self.parallel(coeffs, const, l, u, 'lower')\n else:\n return 0, 0", "def approximate_Psi(L,N_scales,m): \r\n l_max = rough_l_max(L)\r\n (g, _, t) = filter_design(l_max, N_scales)\r\n arange = (0.0, l_max)\r\n \r\n c=[]\r\n for kernel in g:\r\n c.append(cheby_coeff(kernel, m, m+1, arange))\r\n\r\n # c2=[]\r\n # for s in range(N_scales+1):\r\n # c2.append(cheby_coeff2(m,s+1))\r\n\r\n psi=cheby_op2(L, c, arange)\r\n \r\n \r\n \r\n psi_inv=[]\r\n for i in range(N_scales+1):\r\n psi[i]=np.float32(psi[i]) # convert psi to float 32\r\n psi_inv.append(np.linalg.inv(psi[i]))\r\n \r\n return psi,psi_inv", "def general_poly (L):\n def inside(x):\n result = 0\n pwr = len(L) - 1\n for l in L:\n result = result + l * x ** pwr\n pwr -= 1\n return result\n return inside", "def get_l_n_u_inegral(ppc, lower_bound, upper_bound, Nhrs=2):\n \"\"\"either lower and upper bound must be positive\"\"\"\n gens_hrs = ppc['gen'][:, 0]\n gens_hrs = np.sort(gens_hrs)\n \n n_buses = set_n_buses(ppc, Nhrs)\n n_gens = len(gens_hrs) // 2 \n l = np.zeros(n_buses)\n u = np.zeros(n_buses)\n for i in range(len(l)):\n if (i+1) in gens_hrs:\n l[i] = lower_bound\n u[i] = upper_bound\n else:\n l[i] = -np.inf\n u[i] = np.inf\n return l, u", "def find_h_inv(picture_pixels, decal_pixels):\n # Transform the points into RP2 points\n p_pixels = [[x,y,1] for (x,y) in picture_pixels]\n d_pixels = [[x,y,1] for (x,y) in decal_pixels]\n\n # Build the right side of linear system\n vector_d = [0]*12+[1]\n # Build 13x13 matrix\n matrix_u = []\n for idx, pixel in enumerate(d_pixels):\n for i in range(3):\n # coefficients for lambdas to complete each row \n lambdas = [0,0,0,0]\n lambdas[idx] = -p_pixels[idx][i]\n # Build each row by adding the RP2 points, and the lambdas coefficients and filling with 0's\n matrix_u.append([0]*(i*3)+pixel+[0]*(6-i*3)+lambdas)\n # Append lambda1=1 equation\n matrix_u.append([0]*9+[1]+[0]*3)\n # Solve the linear system\n sol = solve_lin(matrix_u, vector_d)\n # The Homography is built with the first 9 elements\n # of the solution, the remaining 3 are the lambdas\n hom = [sol[i*3:(i+1)*3] for i in range(3)]\n h_inv = np.linalg.inv(hom)\n \n return h_inv", "def test_diff_inu(self):\n l = 1\n x = np.array([1e-9])\n n = 10\n bessel_i = bessel_sk.lniknu(x, l)[0]\n bessel_i_n = bessel_sk.lniknu(n*x, l)[0]\n result = (-np.exp(bessel_i_n[:, -1] + bessel_i[:, -2])\n + np.exp(bessel_i[:, -1] + bessel_i_n[:, -2]))\n expected = ( (-n+1)*x/3+\n (-n/3 - n**3/5 + n**2/3 + 1/5)*x**3/6)\n assert_almost_equal(result/expected, 1, decimal=7)", "def _get_inv(self):\n m,d = self.B.shape\n Im = np.eye(m)\n Id = np.eye(d)\n BBt = [email protected]\n I_BBt_inv = np.linalg.pinv(Im + BBt)\n \n return (1/self.alpha)*(Id - self.B.T@( [email protected]/self.alpha))", "def chol_inv(L):\r\n\r\n return lapack.dtrtri(L, lower=True)[0]", "def get_sol(self):", "def phi2_coefficient(L):\r\n\r\n if 0 < L < 120:\r\n return L / 120\r\n if L >= 120:\r\n return 1", "def getL2Error(self,exactSolution):\n value = 0\n error = np.array(self.solution)-np.array([exactSolution(x) for x in self.triangulation.points])\n for ele,triPoints in enumerate(self.triangulation.simplices):\n transformMatrix,translateVector = self.calculateTransform(ele)\n determinant = abs(np.linalg.det(transformMatrix))\n #Last vector is the precalculated integral of the basisfunctions over a reference element\n value+=determinant*np.dot(error[triPoints]**2,np.array([1/6.,1/3.,1/3.]))\n return(math.sqrt(value))", "def _solve(self, mu=None):\n pass", "def solve(grid):\n return search(grid_values(grid))" ]
[ "0.5654112", "0.5446574", "0.5408425", "0.5286101", "0.5229726", "0.5152003", "0.51416683", "0.5085175", "0.5065494", "0.5054303", "0.5050599", "0.50233173", "0.5016458", "0.5014176", "0.49979034", "0.49948668", "0.49875477", "0.49864754", "0.49744186", "0.4971717", "0.4959693", "0.4946224", "0.4943046", "0.49348006", "0.49071288", "0.48958656", "0.48950967", "0.48905146", "0.4887918", "0.48824245" ]
0.6683084
0
Calculate exact roots of a solvable irreducible quintic with rational coefficients. Return an empty list if the quintic is reducible or not solvable.
def roots_quintic(f): result = [] coeff_5, coeff_4, p_, q_, r_, s_ = f.all_coeffs() if not all(coeff.is_Rational for coeff in (coeff_5, coeff_4, p_, q_, r_, s_)): return result if coeff_5 != 1: f = Poly(f / coeff_5) _, coeff_4, p_, q_, r_, s_ = f.all_coeffs() # Cancel coeff_4 to form x^5 + px^3 + qx^2 + rx + s if coeff_4: p = p_ - 2*coeff_4*coeff_4/5 q = q_ - 3*coeff_4*p_/5 + 4*coeff_4**3/25 r = r_ - 2*coeff_4*q_/5 + 3*coeff_4**2*p_/25 - 3*coeff_4**4/125 s = s_ - coeff_4*r_/5 + coeff_4**2*q_/25 - coeff_4**3*p_/125 + 4*coeff_4**5/3125 x = f.gen f = Poly(x**5 + p*x**3 + q*x**2 + r*x + s) else: p, q, r, s = p_, q_, r_, s_ quintic = PolyQuintic(f) # Eqn standardized. Algo for solving starts here if not f.is_irreducible: return result f20 = quintic.f20 # Check if f20 has linear factors over domain Z if f20.is_irreducible: return result # Now, we know that f is solvable for _factor in f20.factor_list()[1]: if _factor[0].is_linear: theta = _factor[0].root(0) break d = discriminant(f) delta = sqrt(d) # zeta = a fifth root of unity zeta1, zeta2, zeta3, zeta4 = quintic.zeta T = quintic.T(theta, d) tol = S(1e-10) alpha = T[1] + T[2]*delta alpha_bar = T[1] - T[2]*delta beta = T[3] + T[4]*delta beta_bar = T[3] - T[4]*delta disc = alpha**2 - 4*beta disc_bar = alpha_bar**2 - 4*beta_bar l0 = quintic.l0(theta) Stwo = S(2) l1 = _quintic_simplify((-alpha + sqrt(disc)) / Stwo) l4 = _quintic_simplify((-alpha - sqrt(disc)) / Stwo) l2 = _quintic_simplify((-alpha_bar + sqrt(disc_bar)) / Stwo) l3 = _quintic_simplify((-alpha_bar - sqrt(disc_bar)) / Stwo) order = quintic.order(theta, d) test = (order*delta.n()) - ( (l1.n() - l4.n())*(l2.n() - l3.n()) ) # Comparing floats if not comp(test, 0, tol): l2, l3 = l3, l2 # Now we have correct order of l's R1 = l0 + l1*zeta1 + l2*zeta2 + l3*zeta3 + l4*zeta4 R2 = l0 + l3*zeta1 + l1*zeta2 + l4*zeta3 + l2*zeta4 R3 = l0 + l2*zeta1 + l4*zeta2 + l1*zeta3 + l3*zeta4 R4 = l0 + l4*zeta1 + l3*zeta2 + l2*zeta3 + l1*zeta4 Res = [None, [None]*5, [None]*5, [None]*5, [None]*5] Res_n = [None, [None]*5, [None]*5, [None]*5, [None]*5] # Simplifying improves performance a lot for exact expressions R1 = _quintic_simplify(R1) R2 = _quintic_simplify(R2) R3 = _quintic_simplify(R3) R4 = _quintic_simplify(R4) # hard-coded results for [factor(i) for i in _vsolve(x**5 - a - I*b, x)] x0 = z**(S(1)/5) x1 = sqrt(2) x2 = sqrt(5) x3 = sqrt(5 - x2) x4 = I*x2 x5 = x4 + I x6 = I*x0/4 x7 = x1*sqrt(x2 + 5) sol = [x0, -x6*(x1*x3 - x5), x6*(x1*x3 + x5), -x6*(x4 + x7 - I), x6*(-x4 + x7 + I)] R1 = R1.as_real_imag() R2 = R2.as_real_imag() R3 = R3.as_real_imag() R4 = R4.as_real_imag() for i, s in enumerate(sol): Res[1][i] = _quintic_simplify(s.xreplace({z: R1[0] + I*R1[1]})) Res[2][i] = _quintic_simplify(s.xreplace({z: R2[0] + I*R2[1]})) Res[3][i] = _quintic_simplify(s.xreplace({z: R3[0] + I*R3[1]})) Res[4][i] = _quintic_simplify(s.xreplace({z: R4[0] + I*R4[1]})) for i in range(1, 5): for j in range(5): Res_n[i][j] = Res[i][j].n() Res[i][j] = _quintic_simplify(Res[i][j]) r1 = Res[1][0] r1_n = Res_n[1][0] for i in range(5): if comp(im(r1_n*Res_n[4][i]), 0, tol): r4 = Res[4][i] break # Now we have various Res values. Each will be a list of five # values. We have to pick one r value from those five for each Res u, v = quintic.uv(theta, d) testplus = (u + v*delta*sqrt(5)).n() testminus = (u - v*delta*sqrt(5)).n() # Evaluated numbers suffixed with _n # We will use evaluated numbers for calculation. Much faster. r4_n = r4.n() r2 = r3 = None for i in range(5): r2temp_n = Res_n[2][i] for j in range(5): # Again storing away the exact number and using # evaluated numbers in computations r3temp_n = Res_n[3][j] if (comp((r1_n*r2temp_n**2 + r4_n*r3temp_n**2 - testplus).n(), 0, tol) and comp((r3temp_n*r1_n**2 + r2temp_n*r4_n**2 - testminus).n(), 0, tol)): r2 = Res[2][i] r3 = Res[3][j] break if r2 is not None: break else: return [] # fall back to normal solve # Now, we have r's so we can get roots x1 = (r1 + r2 + r3 + r4)/5 x2 = (r1*zeta4 + r2*zeta3 + r3*zeta2 + r4*zeta1)/5 x3 = (r1*zeta3 + r2*zeta1 + r3*zeta4 + r4*zeta2)/5 x4 = (r1*zeta2 + r2*zeta4 + r3*zeta1 + r4*zeta3)/5 x5 = (r1*zeta1 + r2*zeta2 + r3*zeta3 + r4*zeta4)/5 result = [x1, x2, x3, x4, x5] # Now check if solutions are distinct saw = set() for r in result: r = r.n(2) if r in saw: # Roots were identical. Abort, return [] # and fall back to usual solve return [] saw.add(r) # Restore to original equation where coeff_4 is nonzero if coeff_4: result = [x - coeff_4 / 5 for x in result] return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _realroots_quartic(a3, a2, a1, a0):\n # see http://mathworld.wolfram.com/QuarticEquation.html for details\n ys = _realroots_cubic(-a2, a1*a3 - 4*a0, 4*a0*a2 - a1*a1 - a0*a3*a3)\n ys = [y for y in ys if a3*a3-4*a2+4*y >= 0 and y*y-4*a0 >= 0]\n if not ys:\n return []\n y1 = min(ys)\n if a3*y1-2*a1 < 0:\n return (_realroots_quadratic(0.5*(a3+math.sqrt(a3*a3-4*a2+4*y1)), 0.5*(y1-math.sqrt(y1*y1-4*a0))) +\n _realroots_quadratic(0.5*(a3-math.sqrt(a3*a3-4*a2+4*y1)), 0.5*(y1+math.sqrt(y1*y1-4*a0))))\n else:\n return (_realroots_quadratic(0.5*(a3+math.sqrt(a3*a3-4*a2+4*y1)), 0.5*(y1+math.sqrt(y1*y1-4*a0))) +\n _realroots_quadratic(0.5*(a3-math.sqrt(a3*a3-4*a2+4*y1)), 0.5*(y1-math.sqrt(y1*y1-4*a0))))", "def _realroots_quadratic(a1, a0):\n D = a1*a1 - 4*a0\n if D < 0:\n return []\n SD = math.sqrt(D)\n return [0.5 * (-a1 + SD), 0.5 * (-a1 - SD)]", "def getRoots(self):\n # This part is for exercise 11\n # return []\n \n # This part is for exercise 12\n if self.getDegree() == 0:\n return []\n if self.getDegree() == 1:\n return LinearPolynomial(self.getCoefficients()).getRoots()\n if self.getDegree() == 2:\n return QuadraticPolynomial(self.getCoefficients()).getRoots()\n else:\n current_polynomial = Polynomial(self.getCoefficients())\n roots = []\n \n while current_polynomial.__coefficients[0] == 0:\n roots.append(0)\n current_polynomial.__coefficients = current_polynomial.__coefficients[1:]\n \n while current_polynomial.getDegree() > 2:\n\n #Initialization\n x = (random.random(), random.random(), random.random())\n while abs(current_polynomial.evaluate(x[2])) > EPSILON:\n x = (random.random(), random.random(), random.random())\n nb_iters = 0\n while (abs(current_polynomial.evaluate(x[2])) > EPSILON or abs(x[2] - x[1]) > TOLERANCE) and nb_iters < MAX_ITERATIONS:\n x = getNextApproximations(current_polynomial.evaluate, x)\n nb_iters += 1\n\n roots.append(x[2])\n \n if abs(x[2].imag) < TOLERANCE:\n current_polynomial = current_polynomial.divide(Polynomial([-x[2].real, 1]))[0]\n else:\n roots.append(x[2].conjugate())\n current_polynomial = current_polynomial.divide(Polynomial([abs(x[2])**2, -2*x[2].real, 1]))[0]\n \n roots += current_polynomial.getRoots()\n \n for i in range(len(roots)):\n if roots[i].imag == 0:\n roots[i] = roots[i].real\n \n return sorted(roots, key = lambda x: (x.real, x.imag))", "def realpolyroots(*cs):\n if not cs:\n return [0]\n try:\n f = 1.0/cs[0]\n cs = [f*c for c in cs[1:]]\n except ArithmeticError:\n return realpolyroots(*cs[1:])\n else:\n n = len(cs)\n if n == 0:\n return []\n elif n == 1:\n return [-cs[0]]\n elif n == 2:\n return _realroots_quadratic(*cs)\n elif n == 3:\n return _realroots_cubic(*cs)\n elif n == 4:\n return _realroots_quartic(*cs)\n else:\n raise RuntimeError(\"realpolyroots solver currently limited to polynoms up to the power of 4\")", "def roots_quartic(f):\n _, a, b, c, d = f.monic().all_coeffs()\n\n if not d:\n return [S.Zero] + roots([1, a, b, c], multiple=True)\n elif (c/a)**2 == d:\n x, m = f.gen, c/a\n\n g = Poly(x**2 + a*x + b - 2*m, x)\n\n z1, z2 = roots_quadratic(g)\n\n h1 = Poly(x**2 - z1*x + m, x)\n h2 = Poly(x**2 - z2*x + m, x)\n\n r1 = roots_quadratic(h1)\n r2 = roots_quadratic(h2)\n\n return r1 + r2\n else:\n a2 = a**2\n e = b - 3*a2/8\n f = _mexpand(c + a*(a2/8 - b/2))\n aon4 = a/4\n g = _mexpand(d - aon4*(a*(3*a2/64 - b/4) + c))\n\n if f.is_zero:\n y1, y2 = [sqrt(tmp) for tmp in\n roots([1, e, g], multiple=True)]\n return [tmp - aon4 for tmp in [-y1, -y2, y1, y2]]\n if g.is_zero:\n y = [S.Zero] + roots([1, 0, e, f], multiple=True)\n return [tmp - aon4 for tmp in y]\n else:\n # Descartes-Euler method, see [7]\n sols = _roots_quartic_euler(e, f, g, aon4)\n if sols:\n return sols\n # Ferrari method, see [1, 2]\n p = -e**2/12 - g\n q = -e**3/108 + e*g/3 - f**2/8\n TH = Rational(1, 3)\n\n def _ans(y):\n w = sqrt(e + 2*y)\n arg1 = 3*e + 2*y\n arg2 = 2*f/w\n ans = []\n for s in [-1, 1]:\n root = sqrt(-(arg1 + s*arg2))\n for t in [-1, 1]:\n ans.append((s*w - t*root)/2 - aon4)\n return ans\n\n # whether a Piecewise is returned or not\n # depends on knowing p, so try to put\n # in a simple form\n p = _mexpand(p)\n\n\n # p == 0 case\n y1 = e*Rational(-5, 6) - q**TH\n if p.is_zero:\n return _ans(y1)\n\n # if p != 0 then u below is not 0\n root = sqrt(q**2/4 + p**3/27)\n r = -q/2 + root # or -q/2 - root\n u = r**TH # primary root of solve(x**3 - r, x)\n y2 = e*Rational(-5, 6) + u - p/u/3\n if fuzzy_not(p.is_zero):\n return _ans(y2)\n\n # sort it out once they know the values of the coefficients\n return [Piecewise((a1, Eq(p, 0)), (a2, True))\n for a1, a2 in zip(_ans(y1), _ans(y2))]", "def test_solveset_real_rational():\n x = Symbol('x', real=True)\n y = Symbol('y', real=True)\n assert solveset_real((x - y**3) / ((y**2)*sqrt(1 - y**2)), x) \\\n == FiniteSet(y**3)\n # issue 4486\n assert solveset_real(2*x/(x + 2) - 1, x) == FiniteSet(2)", "def test_solveset_real_rational():\n assert solveset_real((x - y**3) / ((y**2)*sqrt(1 - y**2)), x) \\\n == FiniteSet(y**3)\n # issue 4486\n assert solveset_real(2*x/(x + 2) - 1, x) == FiniteSet(2)", "def roots_quadratic(f):\n\n a, b, c = f.all_coeffs()\n dom = f.get_domain()\n\n def _sqrt(d):\n # remove squares from square root since both will be represented\n # in the results; a similar thing is happening in roots() but\n # must be duplicated here because not all quadratics are binomials\n co = []\n other = []\n for di in Mul.make_args(d):\n if di.is_Pow and di.exp.is_Integer and di.exp % 2 == 0:\n co.append(Pow(di.base, di.exp//2))\n else:\n other.append(di)\n if co:\n d = Mul(*other)\n co = Mul(*co)\n return co*sqrt(d)\n return sqrt(d)\n\n def _simplify(expr):\n if dom.is_Composite:\n return factor(expr)\n else:\n from sympy.simplify.simplify import simplify\n return simplify(expr)\n\n if c is S.Zero:\n r0, r1 = S.Zero, -b/a\n\n if not dom.is_Numerical:\n r1 = _simplify(r1)\n elif r1.is_negative:\n r0, r1 = r1, r0\n elif b is S.Zero:\n r = -c/a\n if not dom.is_Numerical:\n r = _simplify(r)\n\n R = _sqrt(r)\n r0 = -R\n r1 = R\n else:\n d = b**2 - 4*a*c\n A = 2*a\n B = -b/A\n\n if not dom.is_Numerical:\n d = _simplify(d)\n B = _simplify(B)\n\n D = factor_terms(_sqrt(d)/A)\n r0 = B - D\n r1 = B + D\n if a.is_negative:\n r0, r1 = r1, r0\n elif not dom.is_Numerical:\n r0, r1 = [expand_2arg(i) for i in (r0, r1)]\n\n return [r0, r1]", "def _roots_quartic_euler(p, q, r, a):\n # solve the resolvent equation\n x = Dummy('x')\n eq = 64*x**3 + 32*p*x**2 + (4*p**2 - 16*r)*x - q**2\n xsols = list(roots(Poly(eq, x), cubics=False).keys())\n xsols = [sol for sol in xsols if sol.is_rational and sol.is_nonzero]\n if not xsols:\n return None\n R = max(xsols)\n c1 = sqrt(R)\n B = -q*c1/(4*R)\n A = -R - p/2\n c2 = sqrt(A + B)\n c3 = sqrt(A - B)\n return [c1 - c2 - a, -c1 - c3 - a, -c1 + c3 - a, c1 + c2 - a]", "def getRoots(self):\n a, b, c = self.getCoefficients()[2], self.getCoefficients()[1], self.getCoefficients()[0]\n delta = b**2 - 4*a*c\n if delta >= 0:\n roots = sorted([(-b - math.sqrt(delta))/(2*a), (-b + math.sqrt(delta))/(2*a)])\n else:\n roots = sorted([(-b - math.sqrt(-delta)*1j)/(2*a), (-b + math.sqrt(-delta)*1j)/(2*a)], key=lambda x: (x.real, x.imag))\n return roots", "def find_poly_roots(poly, initial_guess = 0.0, limit = 0.00001, max_iterations = 1000):\n solutions = []\n # Find solutions numerically for n > 0, split them off until n = 2\n for q in range(polynomial.order(poly) - 2):\n x = find_poly_root(poly, initial_guess, limit, max_iterations)\n if not x:\n break\n poly = polynomial.div(poly, polynomial.make_poly([-x, 1]))\n solutions.append(x)\n # Find the rest of the roots analytically\n if polynomial.order(poly) == 1:\n solutions.append(- polynomial.coeff(poly, 1) / polynomial.coeff(poly, 0))\n elif polynomial.order(poly) == 2:\n a = polynomial.coeff(poly, 2)\n b = polynomial.coeff(poly, 1)\n c = polynomial.coeff(poly, 0)\n d = b ** 2 - 4 * a * c\n if d == 0:\n solutions.append(-b / (2 * a))\n elif d > 0:\n solutions.append((- b + sqrt(d)) / (2 * a))\n solutions.append((- b - sqrt(d)) / (2 * a))\n return solutions", "def roots_cubic(f, trig=False):\n if trig:\n a, b, c, d = f.all_coeffs()\n p = (3*a*c - b**2)/(3*a**2)\n q = (2*b**3 - 9*a*b*c + 27*a**2*d)/(27*a**3)\n D = 18*a*b*c*d - 4*b**3*d + b**2*c**2 - 4*a*c**3 - 27*a**2*d**2\n if (D > 0) == True:\n rv = []\n for k in range(3):\n rv.append(2*sqrt(-p/3)*cos(acos(q/p*sqrt(-3/p)*Rational(3, 2))/3 - k*pi*Rational(2, 3)))\n return [i - b/3/a for i in rv]\n\n # a*x**3 + b*x**2 + c*x + d -> x**3 + a*x**2 + b*x + c\n _, a, b, c = f.monic().all_coeffs()\n\n if c is S.Zero:\n x1, x2 = roots([1, a, b], multiple=True)\n return [x1, S.Zero, x2]\n\n # x**3 + a*x**2 + b*x + c -> u**3 + p*u + q\n p = b - a**2/3\n q = c - a*b/3 + 2*a**3/27\n\n pon3 = p/3\n aon3 = a/3\n\n u1 = None\n if p is S.Zero:\n if q is S.Zero:\n return [-aon3]*3\n u1 = -root(q, 3) if q.is_positive else root(-q, 3)\n elif q is S.Zero:\n y1, y2 = roots([1, 0, p], multiple=True)\n return [tmp - aon3 for tmp in [y1, S.Zero, y2]]\n elif q.is_real and q.is_negative:\n u1 = -root(-q/2 + sqrt(q**2/4 + pon3**3), 3)\n\n coeff = I*sqrt(3)/2\n if u1 is None:\n u1 = S.One\n u2 = Rational(-1, 2) + coeff\n u3 = Rational(-1, 2) - coeff\n b, c, d = a, b, c # a, b, c, d = S.One, a, b, c\n D0 = b**2 - 3*c # b**2 - 3*a*c\n D1 = 2*b**3 - 9*b*c + 27*d # 2*b**3 - 9*a*b*c + 27*a**2*d\n C = root((D1 + sqrt(D1**2 - 4*D0**3))/2, 3)\n return [-(b + uk*C + D0/C/uk)/3 for uk in [u1, u2, u3]] # -(b + uk*C + D0/C/uk)/3/a\n\n u2 = u1*(Rational(-1, 2) + coeff)\n u3 = u1*(Rational(-1, 2) - coeff)\n\n if p is S.Zero:\n return [u1 - aon3, u2 - aon3, u3 - aon3]\n\n soln = [\n -u1 + pon3/u1 - aon3,\n -u2 + pon3/u2 - aon3,\n -u3 + pon3/u3 - aon3\n ]\n\n return soln", "def _try_heuristics(f):\n if f.is_ground:\n return []\n if f.is_monomial:\n return [S.Zero]*f.degree()\n\n if f.length() == 2:\n if f.degree() == 1:\n return list(map(cancel, roots_linear(f)))\n else:\n return roots_binomial(f)\n\n result = []\n\n for i in [-1, 1]:\n if not f.eval(i):\n f = f.quo(Poly(f.gen - i, f.gen))\n result.append(i)\n break\n\n n = f.degree()\n\n if n == 1:\n result += list(map(cancel, roots_linear(f)))\n elif n == 2:\n result += list(map(cancel, roots_quadratic(f)))\n elif f.is_cyclotomic:\n result += roots_cyclotomic(f)\n elif n == 3 and cubics:\n result += roots_cubic(f, trig=trig)\n elif n == 4 and quartics:\n result += roots_quartic(f)\n elif n == 5 and quintics:\n result += roots_quintic(f)\n\n return result", "def test_solve_polynomial_cv_1a():\n assert solveset_real(sqrt(x) - 1, x) == FiniteSet(1)\n assert solveset_real(sqrt(x) - 2, x) == FiniteSet(4)\n assert solveset_real(x**Rational(1, 4) - 2, x) == FiniteSet(16)\n assert solveset_real(x**Rational(1, 3) - 3, x) == FiniteSet(27)\n assert solveset_real(x*(x**(S(1) / 3) - 3), x) == \\\n FiniteSet(S(0), S(27))", "def _realroots_cubic(a2, a1, a0):\n # see http://mathworld.wolfram.com/CubicFormula.html for details\n\n Q = (3*a1 - a2*a2) / 9.0\n R = (9*a2*a1 - 27*a0 - 2*a2*a2*a2) / 54.0\n D = Q*Q*Q + R*R\n\n if D > 0: # one real and two complex roots\n SD = math.sqrt(D)\n if R + SD >= 0:\n S = (R + SD)**(1/3.0)\n else:\n S = -(-R - SD)**(1/3.0)\n if R - SD >= 0:\n T = (R - SD)**(1/3.0)\n else:\n T = -(SD - R)**(1/3.0)\n return [S + T - a2/3.0]\n elif D == 0:\n if Q == 0: # one real root (R==0)\n return [-a2/3.0]\n else: # two real roots (R>0, Q<0)\n S = -math.sqrt(-Q)\n return [2*S - a2/3.0, -S - a2/3.0]\n else: # three real roots (Q<0)\n SQ = math.sqrt(-Q)\n arg = R / (SQ**3)\n if arg >= 1:\n theta = 0\n elif arg <= -1:\n theta = math.pi\n else:\n theta = math.acos(R/(SQ**3))\n return [2 * SQ * math.cos((theta + 2*2*i*math.pi)/3.0) - a2/3.0 for i in range(3)]", "def test_solve_polynomial_cv_1a():\n assert solveset_real(sqrt(x) - 1, x) == FiniteSet(1)\n assert solveset_real(sqrt(x) - 2, x) == FiniteSet(4)\n assert solveset_real(x**Rational(1, 4) - 2, x) == FiniteSet(16)\n assert solveset_real(x**Rational(1, 3) - 3, x) == FiniteSet(27)\n assert solveset_real(x*(x**(S.One / 3) - 3), x) == \\\n FiniteSet(S.Zero, S(27))", "def test_roots_slow():\n a, b, c, d, x = symbols(\"a,b,c,d,x\")\n\n f1 = x ** 2 * c + (a / b) + x * c * d - a\n f2 = x ** 2 * (a + b * (c - d) * a) + x * a * b * c / (b * d - d) + (a * d - c / d)\n\n assert list(roots(f1, x).values()) == [1, 1]\n assert list(roots(f2, x).values()) == [1, 1]\n\n (zz, yy, xx, zy, zx, yx, k) = symbols(\"zz,yy,xx,zy,zx,yx,k\")\n\n e1 = (zz - k) * (yy - k) * (xx - k) + zy * yx * zx + zx - zy - yx\n e2 = (zz - k) * yx * yx + zx * (yy - k) * zx + zy * zy * (xx - k)\n\n assert list(roots(e1 - e2, k).values()) == [1, 1, 1]\n\n f = x ** 3 + 2 * x ** 2 + 8\n R = list(roots(f).keys())\n\n assert not any(i for i in [f.subs(x, ri).n(chop=True) for ri in R])", "def roots_linear(f):\n r = -f.nth(0)/f.nth(1)\n dom = f.get_domain()\n\n if not dom.is_Numerical:\n if dom.is_Composite:\n r = factor(r)\n else:\n from sympy.simplify.simplify import simplify\n r = simplify(r)\n\n return [r]", "def solve(n=5000,C=-6*10**11,a=900,b=3):\n coeffs = np.zeros(n+2)\n coeffs[0] = a-b*n\n coeffs[1] = b*(n+1) - a\n coeffs[-3] = -C\n coeffs[-2] = 2*C - a\n coeffs[-1] = a+b-C\n mp.dps = 27\n roots = polyroots(coeffs)\n for root in roots:\n print root", "def iroots(a, b, c):\n discriminant = b*b-4*a*c\n if discriminant == 0:\n return -b/(2*a), None\n else:\n return (-b+isqrt(discriminant))/(2*a), (-b-isqrt(discriminant))/(2*a)", "def test_cases():\r\n quadratic_roots(1,3,-21)\r\n quadratic_roots(2,-4,-6)\r\n quadratic_roots(1,4,-12)\r\n quadratic_roots(4,12,9)\r\n quadratic_roots(-2,-11,-21)\r\n quadratic_roots(4,1,4)\r\n quadratic_roots(1,1,0)\r\n quadratic_roots(1,0,-16)\r\n quadratic_roots(1,-14,-49)\r\n quadratic_roots(1,10,25)", "def resolve(self):\n results = []\n for sym in self._symbols:\n lfrac = self._getFrac(str(self._left_operand))\n rfrac = self._getFrac(str(self._right_operand))\n frac = lfrac + ['-'] + rfrac\n \n expr = self._unfraction(frac)\n den = self._getDenom(str(self._left_operand)+'-'+str(self._right_operand))\n\n polyExpr = Poly(expr, sym)\n if den!='' : polyDen = Poly(den, sym)\n else : polyDen = Poly('1', sym)\n\n posiCase = solve_poly_inequality(polyDen, '>')\n negaCase = solve_poly_inequality(polyDen, '<')\n posCase = posiCase[0]\n for cas in posiCase : posCase=posCase.union(cas)\n negCase = negaCase[0]\n for cas in negaCase : negCase=negCase.union(cas)\n \n posiSol = solve_poly_inequality( polyExpr, self._operator)\n negaSol = solve_poly_inequality(-polyExpr, self._operator)\n posSol = posiSol[0]\n for cas in posiSol : posSol=posSol.union(cas)\n negSol = negaSol[0]\n for cas in negaSol : negSol=negSol.union(cas)\n\n \n result = (posCase.intersect(posSol)).union(negCase.intersect(negSol))\n results.append(result)\n return results", "def test_cubic_roots(roots, a0, a1, a2, a3=None, tol=1.0e-12):\n\n N = len(a0)\n for n in range(N):\n c0 = a0[n]\n c1 = a1[n]\n c2 = a2[n]\n c3 = a3[n]\n\n print(f\"Polynomial {n}: a = {(c0,c1,c2,c3)}\")\n\n rts = np.unique(roots[n])\n rts = rts[~np.isnan(rts)]\n\n for x in rts:\n f = c0 + c1 * x + c2 * x**2 + c3 * x**3\n ok = np.abs(f) <= tol\n\n print(f\" root x = {x}: f(x) = {f} {'OK' if ok else 'FAILED'}\")\n\n if not ok:\n raise Exception(\"NOT OK!\")\n\n if len(rts) == 0:\n print(\" no real roots.\")", "def almost_positive_roots_decomposition(self):\n # TODO: this should use a generic function for computing\n # orbits under the action of a group:\n # def orbits(seeds, operators)\n # INPUT:\n # - seeds: a list of elements\n # - operators: a list of functions\n #\n # Returns the orbits generated by seeds under the action of the operators\n tau_plus, tau_minus = self.tau_plus_minus()\n\n I = set(self.index_set())\n Delta = self.simple_roots()\n L, R = self.cartan_type().index_set_bipartition()\n\n orbits = []\n while I:\n i = I.pop()\n alpha = -self.simple_root(i)\n orbit = [alpha]\n if i in L:\n plus = False\n beta = tau_plus(alpha)\n else:\n plus = True\n beta = tau_minus(alpha)\n while -beta not in Delta and beta not in orbit:\n orbit.append(beta)\n if beta in Delta:\n j = beta.leading_support()\n I.discard(j)\n if plus:\n beta = tau_plus(beta)\n else:\n beta = tau_minus(beta)\n plus = not plus\n if -beta in Delta:\n orbit.append(beta)\n orbits.append(orbit)\n return orbits", "def roots(self):\n return list(self.positive_roots()) + list(self.negative_roots())", "def arroots(self):\n return self.arpoly.roots()", "def roots_cyclotomic(f, factor=False):\n L, U = _inv_totient_estimate(f.degree())\n\n for n in range(L, U + 1):\n g = cyclotomic_poly(n, f.gen, polys=True)\n\n if f.expr == g.expr:\n break\n else: # pragma: no cover\n raise RuntimeError(\"failed to find index of a cyclotomic polynomial\")\n\n roots = []\n\n if not factor:\n # get the indices in the right order so the computed\n # roots will be sorted\n h = n//2\n ks = [i for i in range(1, n + 1) if igcd(i, n) == 1]\n ks.sort(key=lambda x: (x, -1) if x <= h else (abs(x - n), 1))\n d = 2*I*pi/n\n for k in reversed(ks):\n roots.append(exp(k*d).expand(complex=True))\n else:\n g = Poly(f, extension=root(-1, n))\n\n for h, _ in ordered(g.factor_list()[1]):\n roots.append(-h.TC())\n\n return roots", "def test_find_all_roots(self):\n\n print(\"\\nFalse Position Method: All Roots\")\n\n # input\n xmin = -1e2 # minimum x-value\n xmax = 1e2 # maximum x-value\n step = 1e0 # step-size of interval\n et = 1e-6 # relative error threshold\n\n # function\n roots, ic = FalsePosition.find_all_roots(self.fn, xmin, xmax, step, et)\n\n if len(roots) != 0:\n print(\"\\tAll Roots: {X}\\n\\tIterations: {ic}\".format(X=roots, ic=ic))\n else:\n print(\"\\tNo root found with given initial values.\")", "def almost_positive_roots(self):\n assert self.cartan_type().is_finite()\n return sorted([ -beta for beta in self.simple_roots() ] + list(self.positive_roots()))", "def test_find_least_rational_surfaces():\n # simple test, linear iota going from 1 to 3\n iota = PowerSeriesProfile([1, 2])\n rhor, ior = find_most_rational_surfaces(iota, 10)\n rho, io = find_least_rational_surfaces(iota, 10)\n # to compare how rational/irrational things are, we use the length of the\n # continued fraction. Not a great test due to rounding errors, but seems to work\n lio = [len(dec_to_cf(i)) for i in io]\n lior = [len(dec_to_cf(i)) for i in ior]\n max_rational = max(lior)\n\n assert np.all(np.array(lio) > max_rational)" ]
[ "0.6811202", "0.6796312", "0.6786293", "0.6716183", "0.66416746", "0.65845186", "0.65421104", "0.65183496", "0.65000635", "0.6442644", "0.64239186", "0.63015795", "0.6251539", "0.62388915", "0.62045044", "0.6190044", "0.61316377", "0.60060287", "0.5985993", "0.58836967", "0.5878759", "0.5810418", "0.58023614", "0.5771291", "0.5748768", "0.57339907", "0.57324994", "0.56746197", "0.56509835", "0.56491137" ]
0.7315998
0
Compute coefficient basis for a polynomial over integers. Returns the integer ``div`` such that substituting ``x = divy`` ``p(x) = mq(y)`` where the coefficients of ``q`` are smaller than those of ``p``. For example ``x5 + 512x + 1024 = 0`` with ``div = 4`` becomes ``y5 + 2y + 1 = 0`` Returns the integer ``div`` or ``None`` if there is no possible scaling. Examples ======== >>> from sympy.polys import Poly >>> from sympy.abc import x >>> from sympy.polys.polyroots import _integer_basis >>> p = Poly(x5 + 512x + 1024, x, domain='ZZ') >>> _integer_basis(p) 4
def _integer_basis(poly): monoms, coeffs = list(zip(*poly.terms())) monoms, = list(zip(*monoms)) coeffs = list(map(abs, coeffs)) if coeffs[0] < coeffs[-1]: coeffs = list(reversed(coeffs)) n = monoms[0] monoms = [n - i for i in reversed(monoms)] else: return None monoms = monoms[:-1] coeffs = coeffs[:-1] # Special case for two-term polynominals if len(monoms) == 1: r = Pow(coeffs[0], S.One/monoms[0]) if r.is_Integer: return int(r) else: return None divs = reversed(divisors(gcd_list(coeffs))[1:]) try: div = next(divs) except StopIteration: return None while True: for monom, coeff in zip(monoms, coeffs): if coeff % div**monom != 0: try: div = next(divs) except StopIteration: return None else: break else: return div
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def divisor_num(x):\n factor_pow = map(lambda y: y + 1, factorint(x).values())\n div_num = reduce(mul, factor_pow)\n return div_num", "def integerpolynomialfactorization(f):\n cont = f.content()\n prim = f.primitive_part()\n F = [prim]\n G = prim\n c = 0\n one = G.getRing().one\n while (G.differentiate() and F[c] != one):\n deriv = G.differentiate()\n F.append(F[c].subresultant_gcd(deriv))\n c = c + 1\n G = F[c]\n sqfree_part = F[0].pseudo_floordiv(F[0].subresultant_gcd(F[1])).primitive_part()\n N = zassenhaus(sqfree_part)\n\n if cont != 1:\n result = [(one.scalar_mul(cont) ,1)]\n else:\n result = []\n\n F.reverse()\n e = len(F)\n for factor in N:\n for deg, deriv in enumerate(F):\n if not (deriv.pseudo_mod(factor)):\n result.append((factor, (e-deg)))\n break\n return result", "def height_bound(polynomial):\n # first check that polynomial is over QQ or ZZ\n K=polynomial.parent()\n\n if K.is_field():\n R = K.ring()\n else:\n R = K\n F = R.base_ring()\n\n if F != QQ and F!= ZZ:\n raise TypeError(\"coefficient ring is not the rational numbers or the integers\")\n\n # scale polynomial so that it has integer coefficients with gcd 1\n # this ensures that H(f) = H_infinity(f)\n f = R(polynomial)\n f = f*f.denominator()\n f = f/(gcd(f.coefficients()))\n\n # compute the infinite height\n L2norm_sq = sum([a**2 for a in f.coefficients()])\n\n return (6*(L2norm_sq)**3)", "def poly_integral(poly, C=0):\n if poly == [] or type(poly) is not list or type(C) is not int:\n return None\n if len(poly) == 1:\n return [C]\n result = [C]\n\n for i in range(len(poly)):\n integ = poly[i] / (i+1)\n if inte.is_integer():\n integ = int(r)\n result.append(integ)\n\n return result", "def pow2(x: int, p: int) -> int:\n while p > 0:\n x = x * x % q\n p -= 1\n return x", "def poly_integral(poly, C=0):\n try:\n if len(poly) < 1:\n return None\n except TypeError:\n return None\n if not (isinstance(C, int) or isinstance(C, float)):\n return None\n lastidx = 0\n for idx, coef in enumerate(poly):\n if not (type(coef) is int or type(coef) is float):\n return None\n if coef != 0:\n lastidx = idx + 1\n newlist = [C] + [int_if_whole(coef / (exp + 1))\n for exp, coef in enumerate(poly)]\n return newlist[:lastidx + 1]", "def poly_integral(poly, C=0):\n if not isinstance(poly, list) or poly == []:\n return None\n if not isinstance(C, (int, float)):\n return None\n result = [C]\n for degree, coef in enumerate(poly):\n val = coef * (1 / (degree + 1))\n if val.is_integer():\n result.append(int(val))\n else:\n result.append(val)\n for _ in range(len(result)):\n if result[-1] == 0:\n result.pop()\n return result", "def divide(self, dividend, divisor):\n MAX_INT = 0x7FFFFFFF\n MIN_INT = 0x80000000\n\n if divisor == 0:\n return MAX_INT\n\n sign = 1 if dividend > 0 and divisor > 0 or dividend < 0 and divisor < 0 else -1\n dividend, divisor = abs(dividend), abs(divisor)\n res = 0\n while dividend >= divisor:\n pow2 = 1\n tmp = divisor\n while dividend >= tmp:\n tmp <<= 1\n pow2 <<= 1\n tmp >>= 1\n pow2 >>= 1\n dividend -= tmp\n res += pow2\n \n res = sign * res\n return res if res <= MAX_INT else MAX_INT", "def PBpoly(n, x):\n n = int(n)\n return Bpoly(n, x-math.floor(x))", "def floor_int(i: int, base: int) -> int:\n return (i // base) * base if i > 0 else (i + 1) // base * base - base", "def _create_divisor(x):\n return x if x != 0 else 1", "def divide(x, y):\n assert y != 0\n if x == 1: return 0, 1\n q, r = divide(x >> 1, y)\n q *= 2\n r *= 2\n if x & 1: r += 1\n if r >= y:\n q += 1\n r -= y\n return q, r", "def Bpoly(n, x):\n n = int(n)\n out = 0\n for k in xrange(0, n+1):\n out += comb(n,k)*Bnum(n-k)*x**float(k)\n return out", "def divider_ref(dividend, divisor):\n rom_size = 2**8\n rom = [0 for _ in range(rom_size)]\n rom = [0] + [int(round(((2**16)-1)/float(ii)))\n for ii in range(1, rom_size)]\n rom = tuple(rom)\n divisor_reciprocal = rom[divisor]\n if dividend < 0:\n dividend_d1 = -dividend\n else:\n dividend_d1 = dividend\n mult = (dividend_d1 * divisor_reciprocal)\n mult_s = mult/(2**16)\n if dividend < 0:\n mult_s = -mult_s\n round_ = int((mult/(2**15)) % 2)\n if round_ == 1:\n if dividend >= 0:\n mult_s = mult_s + 1\n else:\n mult_s = int(mult_s - 1)\n return int(mult_s)", "def _split_factors(integer):\n assert integer.is_Integer\n if integer == 0:\n return [1, 0]\n # Gives dict of form {factor: multiplicity}\n factors = sympy.factorint(integer)\n left = sympy.Integer(1)\n right = sympy.Integer(1)\n for factor, mult in six.iteritems(factors):\n left_mult = random.randint(0, mult)\n right_mult = mult - left_mult\n left *= factor ** left_mult\n right *= factor ** right_mult\n return left, right", "def _Schoof_mod_l(self, l):\n if l == 2:\n return self._Schoof_mod2()\n E = self.cubic\n D = self.division_polynomials\n lth_div = self.division_polynomials[l]\n field = self.basefield\n bfsize = card(field)\n x = UniVarPolynomial({1:field.one}, field)\n k = bfsize % l\n x_frob = PolyPow(x, bfsize, lth_div) #x_frob=x^q\n x_frobfrob = PolyPow(x_frob, bfsize, lth_div) #x_frobfrob=x^{q^2}\n\n # test for x^{q^2} - x\n f, P = self._sub1(k, x_frobfrob - x, lth_div)\n f0, f3 = f[0], f[3]\n\n if GCD(lth_div, P).degree() > 0:\n if arith1.legendre(k, l) == -1:\n _log.debug(\"%s $\" % str((0, l)))\n return (0, l)\n\n # arith1.legendre(k, l) == 1 <=> k is QR\n w = arith1.modsqrt(k, l)\n f, P = self._sub1(w, x_frob - x, lth_div)\n\n if GCD(lth_div, P).degree() == 0: # coprime\n _log.debug(\"%s $$$$\" % str((0, l)))\n return (0, l)\n\n # there exist non trivial common divisors\n g0 = PolyPow(E, (bfsize - 1) // 2, lth_div) #y^(q-1)\n P = self._sub2(w, g0, f[3], lth_div)\n\n if GCD(lth_div, P).degree() > 0:\n _log.debug(\"%s $$\" % str((2*w % l, l)))\n return (2*w % l, l)\n else:\n _log.debug(\"%s $$$\" % str((-2*w % l, l)))\n return (-2*w % l, l)\n\n else: # coprime (GCD(P, lth_div).degree() == 0)\n Y = x - x_frobfrob\n g0 = PolyPow(E, (bfsize - 1) // 2, lth_div) #y^(q-1)\n g1 = PolyPow(g0, bfsize + 1, lth_div) #y^(q^2-1)\n f = -self._sub2(k, g1, f3, lth_div)\n h1 = PolyMulRed([f, f], lth_div)\n if k % 2 == 0:\n g = (PolyMulRed([Y, E, f3], lth_div) - f0) * 4\n h0 = PolyMulRed([g, g], lth_div)\n aux1 = PolyMulRed([f0, h0], lth_div) + h1\n X_d = PolyMulRed([E, f3, h0], lth_div)\n else:\n g = (PolyMulRed([Y, f3], lth_div) - PolyMulRed([E, f0], lth_div)) * 4\n h0 = PolyMulRed([g, g], lth_div)\n aux1 = PolyMulRed([E, PolyMulRed([f0, h0], lth_div) + h1], lth_div)\n X_d = PolyMulRed([f3, h0], lth_div)\n X_n = PolyMulRed([X_d, x_frobfrob + x_frob + x], lth_div) - aux1\n\n # loop of t\n e_q = PolyPow(self.cubic, bfsize, lth_div)\n for t in range(1, (l - 1)//2 + 1):\n Z_d_x, Z_n_x = self._Z_x(t, D, e_q, bfsize, lth_div)\n # X_n * Z_d_x == X_d * Z_n_x (mod lth_div)?\n if not PolyMod(X_n * Z_d_x - X_d * Z_n_x, lth_div):\n break\n else: # loop of t exhausted\n _log.debug(\"%s @@@\" % str((0, l)))\n return (0, l)\n\n # found: X_n * Z_d_x == X_d * Z_n_x (mod lth_div)\n y0 = PolyMulRed([-2*x_frobfrob - x, X_d], lth_div) + aux1\n if k % 2 == 0:\n Y_d = PolyMulRed([E, D[k], g, X_d], lth_div)\n else:\n Y_d = PolyMulRed([D[k], g, X_d], lth_div)\n Y_n = -PolyMulRed([g1, Y_d], lth_div) - PolyMulRed([f, y0], lth_div)\n Z_d_y, Z_n_y = self._Z_y(t, D, g0, bfsize, lth_div)\n\n # Y_n * Z_d_y == Y_d * Z_n_y (mod lth_div)?\n if PolyMod(Y_n * Z_d_y - Y_d * Z_n_y, lth_div):\n _log.debug(\"%s @@\" % str((l-t, l)))\n return (l-t, l)\n else:\n _log.debug(\"%s @\" % str((t, l)))\n return (t, l)", "def divide(self, dividend: int, divisor: int) -> int:\n sig = (dividend < 0) == (divisor < 0)\n a, b, res = abs(dividend), abs(divisor), 0\n while a >= b:\n shift = 0\n while a >= b << (shift + 1):\n print(a, res)\n shift += 1\n res += 1 << shift\n a -= b << shift\n return min(res if sig else -res, (1 << 31) - 1)", "def basis(n, *, algebra):\r\n if algebra.generic:\r\n return basis_generic(n, p=algebra.p)\r\n else:\r\n return basis_2(n)", "def div_power(a,b):\r\n if (base(a)==base(b)):\r\n return make_power(base(a), power(a)-power(b))\r\n else:\r\n return calc_power(a)*calc_power(b)", "def divisor_sum(x):\n factors = factorint(x)\n primes = factors.keys()\n powers = factors.values()\n sums = list()\n for i, p in enumerate(list(primes)):\n sums.append(sum([math.pow(p, x) for x in range(list(powers)[i] + 1)]))\n return int(reduce_mul(sums)) - x", "def solve(num_divisor):\n\n ordinal = 1\n triangle_number = 1\n num_factors = 1\n\n while num_factors <= num_divisor:\n ordinal += 1\n triangle_number += ordinal\n num_factors = len(get_factors(triangle_number))\n\n return triangle_number", "def Iq(q, lorentz_scale, porod_scale, cor_length, porod_exp, lorentz_exp):\n with errstate(divide='ignore'):\n porod = porod_scale / q**porod_exp\n lorentz = lorentz_scale / (1.0 + (q * cor_length)**lorentz_exp)\n inten = porod + lorentz\n return inten", "def _cmplx_div_ ( s , o ) :\n return ( 1.0 / o ) * complex ( s )", "def __floordiv__(self, other: 'SInt') -> 'SInt':\r\n return self.__divmod__(other)[0]", "def __mod__(A, B):\n if isinstance(B, Polynomial):\n return A.euclidean_division(B)[1]\n else:\n assert isinstance(B, int)\n assert all(isinstance(c, int) for c in A)\n return A.reduceP(B)", "def as_int(self):\n number = 0\n n = 1\n for i in reversed(self.qubit_values):\n number += n*i\n n = n << 1\n return number", "def __ifloordiv__(self, obj):\n # calls __floordiv__\n tmp = self // obj\n self.data = tmp.data\n return self", "def find_smallest_evenly_divisible_integer_of_range(range_i, range_f):\n max_power_primes = find_max_power_prime_in_range(range_i, range_f)\n sum = 1\n for x in max_power_primes:\n sum *= math.pow(x,max_power_primes[x])\n \n return sum", "def ceil_int(i: int, base: int) -> int:\n return ((i - 1) // base) * base + base if i >= 0 else (i // base) * base", "def automorphism_group_QQ_CRT(rational_function, prime_lower_bound=4, return_functions=True, iso_type=False):\n if rational_function.parent().is_field():\n K = rational_function.parent()\n R = K.ring()\n else:\n R = rational_function.parent()\n K = R.fraction_field()\n\n F = R.base_ring()\n\n if F != QQ and F!= ZZ:\n raise TypeError(\"coefficient ring is not the rational numbers or the integers\")\n\n z = R.gen(0)\n phi = K(rational_function)\n\n f = phi.numerator()\n g = phi.denominator()\n\n #scale f,g so both have integer coefficients\n N = lcm(f.denominator(),g.denominator())\n f = f*N\n g = g*N\n N = gcd(gcd(f.coefficients()), gcd(g.coefficients()))\n f = f/N\n g = g/N\n\n d = max(f.degree(), g.degree())\n\n if d == 1:\n raise ValueError(\"rational function has degree 1\")\n\n #badprimes is an integer divisible by every prime p such that either\n # 1) phi has bad reduction at p or\n # 2) the reduction map fails to be injective\n badprimes = (gcd(f[d],g[d])*f.resultant(g)*6)\n #6 is because over Q, Aut(phi) has order dividing 12\n #when generalizing to a number field K, 6 should be replaced with\n # 2*gcd(2*[K:Q] + 1, d^3 - d)\n\n #Determining the set that is used to obtain the height bound\n h = R(prod(x[0] for x in (R(f - g*z)).factor()))# take minimal polynomial of fixed points\n if h.degree() == 2: #if there are only 2 finite fixed points, take preimage of fixed points\n h = h[2]*f**2 + h[1]*f*g + h[0]*g**2\n elif h.degree() == 1: #if there is just 1 finite fixed point, take preimages under phi^2\n psi = phi(phi(z))\n f2 = psi.numerator()\n g2 = psi.denominator()\n N = lcm(f2.denominator(),g2.denominator())\n f2 = f2*N\n g2 = g2*N\n N = gcd(gcd(f2.coefficients()), gcd(g2.coefficients()))\n f2 = f2/N\n g2 = g2/N\n h = h[1]*f2 + h[0]*g2\n\n MaxH = height_bound(h)\n congruence = 1\n primes = Primes();\n p = primes.next(ZZ(prime_lower_bound))\n primepowers = []\n automorphisms = []\n orderaut = []\n orderelts = []\n\n if return_functions:\n elements = [z]\n else:\n elements = [matrix(ZZ, 2, [1,0,0,1])]\n\n badorders = [1, 12]# order 12 not possible over Q, even though 4 and 6 are\n\n #over QQ, elts of PGL_2 of finite order can only have order dividing 6 or 4,\n # and the finite subgroups can only be cyclic or dihedral (Beauville) so\n # the only possible groups are C_n, D_2n for n|6 or n|4\n # all of these groups have order dividing 24\n while (congruence < (2*MaxH**2)) and len(elements) < gcd(orderaut + [24]):\n if badprimes%p != 0: #prime of good reduction\n # compute automorphisms mod p\n phi_p = f.change_ring(GF(p))/g.change_ring(GF(p))\n sorted_automorphisms = automorphism_group_FF(phi_p)\n sorted_automorphisms.sort(key = PGL_order)\n orders = [PGL_order(A) for A in sorted_automorphisms]\n\n automorphisms.append(sorted_automorphisms)\n orderaut.append(len(automorphisms[-1]))\n orderelts.append(orders)\n primepowers.append(p)\n\n # check if we already found 8 or 12 automorphisms\n # and the gcd of orders over Fp and 24 is 24\n # or if the gcd is equal to the the number of automorphisms we have\n if (len(elements) == gcd(orderaut + [24])) or \\\n (gcd(orderaut + [24]) == 24 and \\\n (len(elements) == 12 or len(elements) == 8)):\n if iso_type:\n return(elements, which_group(elements))\n return elements\n else:\n N = gcd(orderaut + [12]) #all orders of elements divide N\n for order in [O for O in divisors(N) \\\n if not O in badorders]: #range over all orders\n # that are possible over QQ such that we haven't already\n # found all elements of that order\n\n # First count number of elements of particular order\n numeltsoffixedorder = []\n for L in orderelts:\n numeltsoffixedorder.append(L.count(order))\n numelts = min(numeltsoffixedorder)\n # Have some elts of fixed order mod p for each p\n if numelts != 0:\n #CRT order d elements together and check if\n # they are an automorphism\n autos, M = CRT_automorphisms(automorphisms,\n orderelts, order, primepowers)\n temp = valid_automorphisms(autos, phi, MaxH, M,\n return_functions)\n elements.extend(temp)\n\n if (len(elements) == gcd(orderaut + [24])):\n #found enough automorphisms\n if iso_type:\n return(elements, which_group(elements))\n return elements\n elif numelts <= (len(temp)):\n badorders.append(order)\n # found all elements of order 'order;\n elif len(temp) != 0:\n # found some elements of order 'order'\n # if an element of Aut_{F_p} has been lifted to QQ\n # remove that element from Aut_{F_p} so we don't\n # attempt to lift that element again unnecessarily\n automorphisms=remove_redundant_automorphisms(automorphisms,\n orderelts, primepowers, temp)\n if order == 4: #have some elements of order 4\n # so possible aut group is Z/4 or D_4\n badorders.extend([3, 6])\n elif order == 3 or order == 6:#have some elements of\n # order 3 or 6 so possible aut groups are Z/3,\n # D_3, Z/6, or D_6\n badorders.append(4)\n else: #no elements of order d in some F_v\n for m in divisors(N):\n if m%order == 0:\n badorders.append(m)\n #no elements of that order or any order that\n # is a multiple of it\n if len([order for order in divisors(N) \\\n if not order in badorders]) == 0:\n #found all elements of every possible order\n if iso_type:\n return(elements, which_group(elements))\n return elements\n congruence = congruence*p\n\n p = primes.next(p)\n\n if iso_type:\n return(elements, which_group(elements))\n return(elements)" ]
[ "0.5803568", "0.5495862", "0.53540903", "0.5195812", "0.5137353", "0.50611347", "0.5049097", "0.49525103", "0.48945084", "0.4893453", "0.48900345", "0.48405665", "0.48327824", "0.47568256", "0.47512928", "0.4697377", "0.46737888", "0.46663266", "0.46649405", "0.4663487", "0.46520147", "0.46307933", "0.46263018", "0.46009082", "0.45972726", "0.45957664", "0.4592651", "0.45864025", "0.45622504", "0.45582214" ]
0.73283625
0
Try to get rid of symbolic coefficients from ``poly``.
def preprocess_roots(poly): coeff = S.One poly_func = poly.func try: _, poly = poly.clear_denoms(convert=True) except DomainError: return coeff, poly poly = poly.primitive()[1] poly = poly.retract() # TODO: This is fragile. Figure out how to make this independent of construct_domain(). if poly.get_domain().is_Poly and all(c.is_term for c in poly.rep.coeffs()): poly = poly.inject() strips = list(zip(*poly.monoms())) gens = list(poly.gens[1:]) base, strips = strips[0], strips[1:] for gen, strip in zip(list(gens), strips): reverse = False if strip[0] < strip[-1]: strip = reversed(strip) reverse = True ratio = None for a, b in zip(base, strip): if not a and not b: continue elif not a or not b: break elif b % a != 0: break else: _ratio = b // a if ratio is None: ratio = _ratio elif ratio != _ratio: break else: if reverse: ratio = -ratio poly = poly.eval(gen, 1) coeff *= gen**(-ratio) gens.remove(gen) if gens: poly = poly.eject(*gens) if poly.is_univariate and poly.get_domain().is_ZZ: basis = _integer_basis(poly) if basis is not None: n = poly.degree() def func(k, coeff): return coeff//basis**(n - k[0]) poly = poly.termwise(func) coeff *= basis if not isinstance(poly, poly_func): poly = poly_func(poly) return coeff, poly
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def linear_simplify_poly(poly):\n if len(poly) < 4:\n return poly\n\n q = Queue()\n for v in poly:\n q.put(v)\n\n new_poly = []\n a = q.get()\n b = q.get()\n while True:\n if q.empty():\n new_poly += [a,b]\n break\n c = q.get()\n e1 = (b-a).normalized()\n e2 = (c-b).normalized()\n if abs(1.0 - e1.dot(e2)) < 1e-2:\n # colinear. skip b.\n a = a\n b = c\n else:\n # a,b needed.\n new_poly += [a]\n a = b\n b = c\n return new_poly", "def poly_derivative(poly):\n if type(poly) is not list or len(poly) < 1:\n return None\n if len(poly) == 1:\n return [0]\n\n derivated_coefficients = []\n\n for power, coefficient in enumerate(poly):\n if power == 0:\n pass\n\n else:\n new_coefficient = coefficient * power\n derivated_coefficients.append(new_coefficient)\n\n return(derivated_coefficients)", "def _filtered_gens(poly, symbol):\n gens = {g for g in poly.gens if symbol in g.free_symbols}\n for g in list(gens):\n ag = 1/g\n if g in gens and ag in gens:\n if ag.as_numer_denom()[1] != 1:\n g = ag\n gens.remove(g)\n return gens", "def charpoly(self):\n # FLINT polynomial coefficients are in reverse order compared to SymPy.\n return self.rep.charpoly().coeffs()[::-1]", "def _reform_poly_coefficients(fit_poly_x, fit_poly_y):\n # Extract values for CD matrix and recombining\n c11 = fit_poly_x.c1_0.value\n c12 = fit_poly_x.c0_1.value\n c21 = fit_poly_y.c1_0.value\n c22 = fit_poly_y.c0_1.value\n sip_poly_x = fit_poly_x.copy()\n sip_poly_y = fit_poly_y.copy()\n # Force low order coefficients to be 0 as defined in SIP\n sip_poly_x.c0_0 = 0\n sip_poly_y.c0_0 = 0\n sip_poly_x.c1_0 = 0\n sip_poly_x.c0_1 = 0\n sip_poly_y.c1_0 = 0\n sip_poly_y.c0_1 = 0\n\n cdmat = ((c11, c12), (c21, c22))\n invcdmat = npla.inv(np.array(cdmat))\n degree = fit_poly_x.degree\n # Now loop through all remaining coefficients\n for i in range(0, degree + 1):\n for j in range(0, degree + 1):\n if (i + j > 1) and (i + j < degree + 1):\n old_x = getattr(fit_poly_x, f'c{i}_{j}').value\n old_y = getattr(fit_poly_y, f'c{i}_{j}').value\n newcoeff = np.dot(invcdmat, np.array([[old_x], [old_y]]))\n setattr(sip_poly_x, f'c{i}_{j}', newcoeff[0, 0])\n setattr(sip_poly_y, f'c{i}_{j}', newcoeff[1, 0])\n\n return cdmat, sip_poly_x, sip_poly_y", "def recf_standard_poly(basetensor, ind, x):\n return basetensor[ind.all+ind.getPreceding(1)] * \\\n basetensor[ind.all+ind.getSpecial()]", "def rm_poly(self, order=1, dim=\"time\", nan_policy=\"none\"):\n return rm_poly(self._obj, order=order, dim=dim, nan_policy=nan_policy)", "def zzX_from_poly(f):\n if f.is_univariate:\n return zzx_from_poly(f)\n else:\n return zzX_from_dict(dict(zip(f.monoms, f.coeffs)), len(f.symbols))", "def zzX_to_poly(f, *symbols):\n from sympy.polys import Poly\n\n terms = {}\n\n for monom, coeff in zzX_to_dict(f).iteritems():\n terms[monom] = Integer(int(coeff))\n\n return Poly(terms, *symbols)", "def parse_poly(self, expr: str) -> Polynomial:\n return symbolic_polynomial(expr, self)", "def simplify(self): # TODO too complex, refactor\n simplified = self.__simplifyRecurse()\n # distributed out the polynomial. Now need to collect like terms\n simplified.vars = self.vars.copy()\n orderedVars = sorted(list(self.vars))\n\n powers = {} # will have keys of tuples. The tuples will represent the power of a variable. Values will be the\n for monomial in simplified.poly[1:]:\n power = [0] * len(orderedVars)\n total = 0\n\n if monomial.isSimple():\n monomial.poly = [\"*\", monomial.poly]\n\n for term in monomial.poly[1:]:\n term = ensurePoly(term)\n if isinstance(term.poly, (int, float)):\n total += term.poly\n elif isinstance(term.poly, (Variable.Variable)):\n power[orderedVars.index(term.poly)] += 1\n\n power = tuple(power)\n if power not in powers:\n powers[power] = total\n else:\n powers[power] = min(total, powers[power])\n\n finalPoly = Polynomial()\n finalPoly.poly = [\"+\"]\n finalPoly.vars = self.vars.copy()\n for power in sorted(list(powers.keys())):\n monomial = Polynomial()\n monomial.poly = [\"*\"]\n if powers[power] != 0:\n monomial.poly.append(powers[power])\n\n for pow, ind in zip(power, range(len(power))):\n if pow == 0:\n continue\n elif pow == 1:\n monomial.poly.append(orderedVars[ind])\n else:\n monomial.poly.append(orderedVars[ind]**pow)\n finalPoly.poly.append(monomial)\n return finalPoly", "def anti_deriv(self):\n poly_anti_deriv = [0]\n for i, val in enumerate(self.coeff):\n poly_anti_deriv.append(round(val/(i+1.0), 2))\n return Poly(poly_anti_deriv)", "def function_poly(coeffs, t, x, y, lc, coeffs_dict, coeffs_tuple, fix_coeffs, batman_params, poly_params, eclipse = False):\n new_flux = model_poly(coeffs, t, x, y, coeffs_dict, coeffs_tuple, fix_coeffs, batman_params, poly_params, eclipse = eclipse)\n return lc - new_flux", "def minpoly(self):\n\n field = self.field\n one = Constructible.lift_rational_field(1, field)\n poly = (-self, one)\n\n while field:\n field = field[1]\n if all(c.b == 0 for c in poly):\n # `poly` is its own conjugate, so just unlift it\n poly = tuple(c.a for c in poly)\n continue\n\n # calculate unlifted `poly * conj(poly)`\n conj_poly = tuple(c._conjugate() for c in poly)\n new_poly = []\n deg = len(poly) - 1\n zero = Constructible.lift_rational_field(0, field)\n\n for m in range(deg * 2 + 1):\n if m % 2 == 0:\n x = poly[m // 2] * conj_poly[m // 2]\n assert x.b == 0\n coef = x.a\n else:\n coef = zero\n\n # iterating i such that 0 <= i < m - i <= deg:\n for i in range(max(0, m - deg), (m + 1) // 2):\n x = poly[i] * conj_poly[m - i]\n coef += x.a * 2\n\n new_poly.append(coef)\n\n poly = new_poly\n\n return tuple(c.a for c in poly) # a tuple of `Rational`s", "def strUniPoly(poly, symbol=\"X\", asc=True):\n return termorder.UnivarTermOrder(cmp).format(poly, symbol, asc)", "def _cleanup_coefficients(self, coefficients = None, in_place = True) :\n if coefficients is None :\n coefficients = self.__coefficients\n \n if in_place :\n for s in coefficients.keys() :\n if not s in self.precision() or coefficients[s].is_zero() :\n del coefficients[s]\n else :\n ncoefficients = dict()\n \n for s in coefficients :\n if not s in self.precision() : continue\n\n v = coefficients[s]\n if v.is_zero() : continue\n\n ncoefficients[s] = v\n\n if in_place :\n return coefficients\n else :\n return ncoefficients", "def __neg__(self) -> Polynomial:\n return Polynomial(self.degree, [-c for c in self.coefficients])", "def poly_derivative(poly):\n try:\n iter(poly)\n except TypeError:\n return None\n if poly == [] or any(not isinstance(expo, (int, float)) for expo in poly):\n return None\n if len(poly) == 1:\n return [0]\n return [i*expo for i, expo in enumerate(poly)][1:]", "def zzx_to_poly(f, *symbols):\n from sympy.polys import Poly\n\n terms = {}\n\n for monom, coeff in zzx_to_dict(f).iteritems():\n terms[(monom,)] = Integer(int(coeff))\n\n return Poly(terms, *symbols)", "def poly_derivative(poly):\n if not poly or type(poly) is not list:\n return None\n\n response = []\n\n for order in range(1, len(poly)):\n response.append(order * poly[order])\n\n if not response:\n response.append(0)\n\n return response", "def zzX_strip(f):\n if poly_univariate_p(f):\n return zzx_strip(f)\n\n if zzX_zero_p(f):\n return f\n\n k = 0\n\n for coeff in f:\n if not zzX_zero_p(coeff):\n break\n else:\n k += 1\n\n if k == len(f):\n return zzX_zero_of(f)\n else:\n return f[k:]", "def poly_derivative(poly):\n result = []\n\n if poly is None or type(poly) != list or poly == []:\n return None\n\n for i in range(len(poly)):\n if type(poly[i]) not in (int, float):\n return None\n elif len(poly) == 1:\n result.append(0)\n else:\n if i == 0:\n continue\n result.append(i * poly[i])\n\n return result", "def poly_derivative(poly):\n if not type(poly) is list or len(poly) == 0 or type(poly[0]) is not int:\n return None\n\n derivative = []\n for i in range(1, len(poly)):\n derivative.append(poly[i] * i)\n\n if derivative == []:\n derivative = [0]\n\n return derivative", "def _cleanup_coefficients(self, coefficients = None, in_place = True) :\n if coefficients is None :\n coefficients = self.__coefficients\n \n if not in_place :\n ncoefficients = dict()\n \n for ch in coefficients.keys() :\n d = coefficients[ch]\n \n if in_place :\n for s in d.keys() :\n if not s in self.precision() or d[s].is_zero() :\n del d[s]\n \n if len(d) == 0 :\n del coefficients[ch]\n else :\n nd = dict()\n \n for s in d :\n if not s in self.precision() : continue\n\n v = d[s]\n if v.is_zero() : continue\n\n nd[s] = v\n \n if len(nd) != 0 :\n ncoefficients[ch] = nd\n\n if in_place :\n return coefficients\n else :\n return ncoefficients", "def poly_derivative(poly):\n res = []\n if type(poly) is not list or len(poly) == 0:\n return None\n if len(poly) == 1:\n return([0])\n for i in range(1, len(poly)):\n if type(poly[i]) is not int:\n return None\n res.append(poly[i] * i)\n return(res)", "def get_poly(kwargs):\n from sklearn.preprocessing import PolynomialFeatures\n return PolynomialFeatures(**kwargs)", "def getOpposite(self):\n return Polynomial([-x for x in self.__coefficients])", "def nc_coeffs(poly, var, max_deg=10, order='increasing'):\r\n\r\n # TODO: elegant way to find out the degree\r\n # workarround: pass the maximum expected degree as kwarg\r\n\r\n D0 = sp.Dummy('D0')\r\n poly = poly.expand() + D0 # ensure class add\r\n\r\n assert isinstance(poly, sp.Add)\r\n res = []\r\n # special case: 0-th power of var\r\n coeff = 0\r\n for a in poly.args:\r\n if not a.has(var):\r\n coeff += a\r\n res.append(coeff.subs(D0, 0))\r\n\r\n # special case: first power of var\r\n coeff = poly.diff(var).subs(var, 0)\r\n res.append(coeff)\r\n\r\n # powers > 1:\r\n for i in xrange(1, max_deg):\r\n coeff = 0\r\n for a in poly.args:\r\n if a.has(var**(i + 1)):\r\n term = a.subs(var, 1)\r\n coeff += term\r\n res.append(coeff)\r\n\r\n if order == \"decreasing\":\r\n res.reverse()\r\n\r\n return res", "def _poly_func(x, a, b, c, d, e):\n return a * x ** 6 + b * x ** 5 + c * x ** 4 + d * x ** 3 + e * x ** 2", "def poly(x, degree=2):\n x = np.array(x)\n X_trans = np.transpose(np.vstack((x**k for k in range(degree + 1))))\n return np.linalg.qr(X_trans)[0][:, 1:]" ]
[ "0.62805736", "0.6251077", "0.61265767", "0.6063948", "0.59309864", "0.58919007", "0.5861226", "0.58583367", "0.58181924", "0.58060616", "0.5801948", "0.5798098", "0.5743815", "0.5664095", "0.5663453", "0.56335896", "0.5619114", "0.56125027", "0.56103873", "0.55965537", "0.55514795", "0.554949", "0.5481182", "0.5475667", "0.5471092", "0.5469713", "0.5469283", "0.5462925", "0.545745", "0.5444112" ]
0.6638182
0
Computes symbolic roots of a univariate polynomial. Given a univariate polynomial f with symbolic coefficients (or a list of the polynomial's coefficients), returns a dictionary with its roots and their multiplicities. Only roots expressible via radicals will be returned. To get a complete set of roots use RootOf class or numerical methods instead. By default cubic and quartic formulas are used in the algorithm. To disable them because of unreadable output set ``cubics=False`` or ``quartics=False`` respectively. If cubic roots are real but are expressed in terms of complex numbers (casus irreducibilis [1]) the ``trig`` flag can be set to True to have the solutions returned in terms of cosine and inverse cosine functions. To get roots from a specific domain set the ``filter`` flag with
def roots(f, *gens, auto=True, cubics=True, trig=False, quartics=True, quintics=False, multiple=False, filter=None, predicate=None, strict=False, **flags): from sympy.polys.polytools import to_rational_coeffs flags = dict(flags) if isinstance(f, list): if gens: raise ValueError('redundant generators given') x = Dummy('x') poly, i = {}, len(f) - 1 for coeff in f: poly[i], i = sympify(coeff), i - 1 f = Poly(poly, x, field=True) else: try: F = Poly(f, *gens, **flags) if not isinstance(f, Poly) and not F.gen.is_Symbol: raise PolynomialError("generator must be a Symbol") f = F except GeneratorsNeeded: if multiple: return [] else: return {} else: n = f.degree() if f.length() == 2 and n > 2: # check for foo**n in constant if dep is c*gen**m con, dep = f.as_expr().as_independent(*f.gens) fcon = -(-con).factor() if fcon != con: con = fcon bases = [] for i in Mul.make_args(con): if i.is_Pow: b, e = i.as_base_exp() if e.is_Integer and b.is_Add: bases.append((b, Dummy(positive=True))) if bases: rv = roots(Poly((dep + con).xreplace(dict(bases)), *f.gens), *F.gens, auto=auto, cubics=cubics, trig=trig, quartics=quartics, quintics=quintics, multiple=multiple, filter=filter, predicate=predicate, **flags) return {factor_terms(k.xreplace( {v: k for k, v in bases}) ): v for k, v in rv.items()} if f.is_multivariate: raise PolynomialError('multivariate polynomials are not supported') def _update_dict(result, zeros, currentroot, k): if currentroot == S.Zero: if S.Zero in zeros: zeros[S.Zero] += k else: zeros[S.Zero] = k if currentroot in result: result[currentroot] += k else: result[currentroot] = k def _try_decompose(f): """Find roots using functional decomposition. """ factors, roots = f.decompose(), [] for currentroot in _try_heuristics(factors[0]): roots.append(currentroot) for currentfactor in factors[1:]: previous, roots = list(roots), [] for currentroot in previous: g = currentfactor - Poly(currentroot, f.gen) for currentroot in _try_heuristics(g): roots.append(currentroot) return roots def _try_heuristics(f): """Find roots using formulas and some tricks. """ if f.is_ground: return [] if f.is_monomial: return [S.Zero]*f.degree() if f.length() == 2: if f.degree() == 1: return list(map(cancel, roots_linear(f))) else: return roots_binomial(f) result = [] for i in [-1, 1]: if not f.eval(i): f = f.quo(Poly(f.gen - i, f.gen)) result.append(i) break n = f.degree() if n == 1: result += list(map(cancel, roots_linear(f))) elif n == 2: result += list(map(cancel, roots_quadratic(f))) elif f.is_cyclotomic: result += roots_cyclotomic(f) elif n == 3 and cubics: result += roots_cubic(f, trig=trig) elif n == 4 and quartics: result += roots_quartic(f) elif n == 5 and quintics: result += roots_quintic(f) return result # Convert the generators to symbols dumgens = symbols('x:%d' % len(f.gens), cls=Dummy) f = f.per(f.rep, dumgens) (k,), f = f.terms_gcd() if not k: zeros = {} else: zeros = {S.Zero: k} coeff, f = preprocess_roots(f) if auto and f.get_domain().is_Ring: f = f.to_field() # Use EX instead of ZZ_I or QQ_I if f.get_domain().is_QQ_I: f = f.per(f.rep.convert(EX)) rescale_x = None translate_x = None result = {} if not f.is_ground: dom = f.get_domain() if not dom.is_Exact and dom.is_Numerical: for r in f.nroots(): _update_dict(result, zeros, r, 1) elif f.degree() == 1: _update_dict(result, zeros, roots_linear(f)[0], 1) elif f.length() == 2: roots_fun = roots_quadratic if f.degree() == 2 else roots_binomial for r in roots_fun(f): _update_dict(result, zeros, r, 1) else: _, factors = Poly(f.as_expr()).factor_list() if len(factors) == 1 and f.degree() == 2: for r in roots_quadratic(f): _update_dict(result, zeros, r, 1) else: if len(factors) == 1 and factors[0][1] == 1: if f.get_domain().is_EX: res = to_rational_coeffs(f) if res: if res[0] is None: translate_x, f = res[2:] else: rescale_x, f = res[1], res[-1] result = roots(f) if not result: for currentroot in _try_decompose(f): _update_dict(result, zeros, currentroot, 1) else: for r in _try_heuristics(f): _update_dict(result, zeros, r, 1) else: for currentroot in _try_decompose(f): _update_dict(result, zeros, currentroot, 1) else: for currentfactor, k in factors: for r in _try_heuristics(Poly(currentfactor, f.gen, field=True)): _update_dict(result, zeros, r, k) if coeff is not S.One: _result, result, = result, {} for currentroot, k in _result.items(): result[coeff*currentroot] = k if filter not in [None, 'C']: handlers = { 'Z': lambda r: r.is_Integer, 'Q': lambda r: r.is_Rational, 'R': lambda r: all(a.is_real for a in r.as_numer_denom()), 'I': lambda r: r.is_imaginary, } try: query = handlers[filter] except KeyError: raise ValueError("Invalid filter: %s" % filter) for zero in dict(result).keys(): if not query(zero): del result[zero] if predicate is not None: for zero in dict(result).keys(): if not predicate(zero): del result[zero] if rescale_x: result1 = {} for k, v in result.items(): result1[k*rescale_x] = v result = result1 if translate_x: result1 = {} for k, v in result.items(): result1[k + translate_x] = v result = result1 # adding zero roots after non-trivial roots have been translated result.update(zeros) if strict and sum(result.values()) < f.degree(): raise UnsolvableFactorError(filldedent(''' Strict mode: some factors cannot be solved in radicals, so a complete list of solutions cannot be returned. Call roots with strict=False to get solutions expressible in radicals (if there are any). ''')) if not multiple: return result else: zeros = [] for zero in ordered(result): zeros.extend([zero]*result[zero]) return zeros
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def roots_cubic(f, trig=False):\n if trig:\n a, b, c, d = f.all_coeffs()\n p = (3*a*c - b**2)/(3*a**2)\n q = (2*b**3 - 9*a*b*c + 27*a**2*d)/(27*a**3)\n D = 18*a*b*c*d - 4*b**3*d + b**2*c**2 - 4*a*c**3 - 27*a**2*d**2\n if (D > 0) == True:\n rv = []\n for k in range(3):\n rv.append(2*sqrt(-p/3)*cos(acos(q/p*sqrt(-3/p)*Rational(3, 2))/3 - k*pi*Rational(2, 3)))\n return [i - b/3/a for i in rv]\n\n # a*x**3 + b*x**2 + c*x + d -> x**3 + a*x**2 + b*x + c\n _, a, b, c = f.monic().all_coeffs()\n\n if c is S.Zero:\n x1, x2 = roots([1, a, b], multiple=True)\n return [x1, S.Zero, x2]\n\n # x**3 + a*x**2 + b*x + c -> u**3 + p*u + q\n p = b - a**2/3\n q = c - a*b/3 + 2*a**3/27\n\n pon3 = p/3\n aon3 = a/3\n\n u1 = None\n if p is S.Zero:\n if q is S.Zero:\n return [-aon3]*3\n u1 = -root(q, 3) if q.is_positive else root(-q, 3)\n elif q is S.Zero:\n y1, y2 = roots([1, 0, p], multiple=True)\n return [tmp - aon3 for tmp in [y1, S.Zero, y2]]\n elif q.is_real and q.is_negative:\n u1 = -root(-q/2 + sqrt(q**2/4 + pon3**3), 3)\n\n coeff = I*sqrt(3)/2\n if u1 is None:\n u1 = S.One\n u2 = Rational(-1, 2) + coeff\n u3 = Rational(-1, 2) - coeff\n b, c, d = a, b, c # a, b, c, d = S.One, a, b, c\n D0 = b**2 - 3*c # b**2 - 3*a*c\n D1 = 2*b**3 - 9*b*c + 27*d # 2*b**3 - 9*a*b*c + 27*a**2*d\n C = root((D1 + sqrt(D1**2 - 4*D0**3))/2, 3)\n return [-(b + uk*C + D0/C/uk)/3 for uk in [u1, u2, u3]] # -(b + uk*C + D0/C/uk)/3/a\n\n u2 = u1*(Rational(-1, 2) + coeff)\n u3 = u1*(Rational(-1, 2) - coeff)\n\n if p is S.Zero:\n return [u1 - aon3, u2 - aon3, u3 - aon3]\n\n soln = [\n -u1 + pon3/u1 - aon3,\n -u2 + pon3/u2 - aon3,\n -u3 + pon3/u3 - aon3\n ]\n\n return soln", "def roots_quadratic(f):\n\n a, b, c = f.all_coeffs()\n dom = f.get_domain()\n\n def _sqrt(d):\n # remove squares from square root since both will be represented\n # in the results; a similar thing is happening in roots() but\n # must be duplicated here because not all quadratics are binomials\n co = []\n other = []\n for di in Mul.make_args(d):\n if di.is_Pow and di.exp.is_Integer and di.exp % 2 == 0:\n co.append(Pow(di.base, di.exp//2))\n else:\n other.append(di)\n if co:\n d = Mul(*other)\n co = Mul(*co)\n return co*sqrt(d)\n return sqrt(d)\n\n def _simplify(expr):\n if dom.is_Composite:\n return factor(expr)\n else:\n from sympy.simplify.simplify import simplify\n return simplify(expr)\n\n if c is S.Zero:\n r0, r1 = S.Zero, -b/a\n\n if not dom.is_Numerical:\n r1 = _simplify(r1)\n elif r1.is_negative:\n r0, r1 = r1, r0\n elif b is S.Zero:\n r = -c/a\n if not dom.is_Numerical:\n r = _simplify(r)\n\n R = _sqrt(r)\n r0 = -R\n r1 = R\n else:\n d = b**2 - 4*a*c\n A = 2*a\n B = -b/A\n\n if not dom.is_Numerical:\n d = _simplify(d)\n B = _simplify(B)\n\n D = factor_terms(_sqrt(d)/A)\n r0 = B - D\n r1 = B + D\n if a.is_negative:\n r0, r1 = r1, r0\n elif not dom.is_Numerical:\n r0, r1 = [expand_2arg(i) for i in (r0, r1)]\n\n return [r0, r1]", "def test_roots_slow():\n a, b, c, d, x = symbols(\"a,b,c,d,x\")\n\n f1 = x ** 2 * c + (a / b) + x * c * d - a\n f2 = x ** 2 * (a + b * (c - d) * a) + x * a * b * c / (b * d - d) + (a * d - c / d)\n\n assert list(roots(f1, x).values()) == [1, 1]\n assert list(roots(f2, x).values()) == [1, 1]\n\n (zz, yy, xx, zy, zx, yx, k) = symbols(\"zz,yy,xx,zy,zx,yx,k\")\n\n e1 = (zz - k) * (yy - k) * (xx - k) + zy * yx * zx + zx - zy - yx\n e2 = (zz - k) * yx * yx + zx * (yy - k) * zx + zy * zy * (xx - k)\n\n assert list(roots(e1 - e2, k).values()) == [1, 1, 1]\n\n f = x ** 3 + 2 * x ** 2 + 8\n R = list(roots(f).keys())\n\n assert not any(i for i in [f.subs(x, ri).n(chop=True) for ri in R])", "def roots_cyclotomic(f, factor=False):\n L, U = _inv_totient_estimate(f.degree())\n\n for n in range(L, U + 1):\n g = cyclotomic_poly(n, f.gen, polys=True)\n\n if f.expr == g.expr:\n break\n else: # pragma: no cover\n raise RuntimeError(\"failed to find index of a cyclotomic polynomial\")\n\n roots = []\n\n if not factor:\n # get the indices in the right order so the computed\n # roots will be sorted\n h = n//2\n ks = [i for i in range(1, n + 1) if igcd(i, n) == 1]\n ks.sort(key=lambda x: (x, -1) if x <= h else (abs(x - n), 1))\n d = 2*I*pi/n\n for k in reversed(ks):\n roots.append(exp(k*d).expand(complex=True))\n else:\n g = Poly(f, extension=root(-1, n))\n\n for h, _ in ordered(g.factor_list()[1]):\n roots.append(-h.TC())\n\n return roots", "def roots_quintic(f):\n result = []\n\n coeff_5, coeff_4, p_, q_, r_, s_ = f.all_coeffs()\n\n if not all(coeff.is_Rational for coeff in (coeff_5, coeff_4, p_, q_, r_, s_)):\n return result\n\n if coeff_5 != 1:\n f = Poly(f / coeff_5)\n _, coeff_4, p_, q_, r_, s_ = f.all_coeffs()\n\n # Cancel coeff_4 to form x^5 + px^3 + qx^2 + rx + s\n if coeff_4:\n p = p_ - 2*coeff_4*coeff_4/5\n q = q_ - 3*coeff_4*p_/5 + 4*coeff_4**3/25\n r = r_ - 2*coeff_4*q_/5 + 3*coeff_4**2*p_/25 - 3*coeff_4**4/125\n s = s_ - coeff_4*r_/5 + coeff_4**2*q_/25 - coeff_4**3*p_/125 + 4*coeff_4**5/3125\n x = f.gen\n f = Poly(x**5 + p*x**3 + q*x**2 + r*x + s)\n else:\n p, q, r, s = p_, q_, r_, s_\n\n quintic = PolyQuintic(f)\n\n # Eqn standardized. Algo for solving starts here\n if not f.is_irreducible:\n return result\n f20 = quintic.f20\n # Check if f20 has linear factors over domain Z\n if f20.is_irreducible:\n return result\n # Now, we know that f is solvable\n for _factor in f20.factor_list()[1]:\n if _factor[0].is_linear:\n theta = _factor[0].root(0)\n break\n d = discriminant(f)\n delta = sqrt(d)\n # zeta = a fifth root of unity\n zeta1, zeta2, zeta3, zeta4 = quintic.zeta\n T = quintic.T(theta, d)\n tol = S(1e-10)\n alpha = T[1] + T[2]*delta\n alpha_bar = T[1] - T[2]*delta\n beta = T[3] + T[4]*delta\n beta_bar = T[3] - T[4]*delta\n\n disc = alpha**2 - 4*beta\n disc_bar = alpha_bar**2 - 4*beta_bar\n\n l0 = quintic.l0(theta)\n Stwo = S(2)\n l1 = _quintic_simplify((-alpha + sqrt(disc)) / Stwo)\n l4 = _quintic_simplify((-alpha - sqrt(disc)) / Stwo)\n\n l2 = _quintic_simplify((-alpha_bar + sqrt(disc_bar)) / Stwo)\n l3 = _quintic_simplify((-alpha_bar - sqrt(disc_bar)) / Stwo)\n\n order = quintic.order(theta, d)\n test = (order*delta.n()) - ( (l1.n() - l4.n())*(l2.n() - l3.n()) )\n # Comparing floats\n if not comp(test, 0, tol):\n l2, l3 = l3, l2\n\n # Now we have correct order of l's\n R1 = l0 + l1*zeta1 + l2*zeta2 + l3*zeta3 + l4*zeta4\n R2 = l0 + l3*zeta1 + l1*zeta2 + l4*zeta3 + l2*zeta4\n R3 = l0 + l2*zeta1 + l4*zeta2 + l1*zeta3 + l3*zeta4\n R4 = l0 + l4*zeta1 + l3*zeta2 + l2*zeta3 + l1*zeta4\n\n Res = [None, [None]*5, [None]*5, [None]*5, [None]*5]\n Res_n = [None, [None]*5, [None]*5, [None]*5, [None]*5]\n\n # Simplifying improves performance a lot for exact expressions\n R1 = _quintic_simplify(R1)\n R2 = _quintic_simplify(R2)\n R3 = _quintic_simplify(R3)\n R4 = _quintic_simplify(R4)\n\n # hard-coded results for [factor(i) for i in _vsolve(x**5 - a - I*b, x)]\n x0 = z**(S(1)/5)\n x1 = sqrt(2)\n x2 = sqrt(5)\n x3 = sqrt(5 - x2)\n x4 = I*x2\n x5 = x4 + I\n x6 = I*x0/4\n x7 = x1*sqrt(x2 + 5)\n sol = [x0, -x6*(x1*x3 - x5), x6*(x1*x3 + x5), -x6*(x4 + x7 - I), x6*(-x4 + x7 + I)]\n\n R1 = R1.as_real_imag()\n R2 = R2.as_real_imag()\n R3 = R3.as_real_imag()\n R4 = R4.as_real_imag()\n\n for i, s in enumerate(sol):\n Res[1][i] = _quintic_simplify(s.xreplace({z: R1[0] + I*R1[1]}))\n Res[2][i] = _quintic_simplify(s.xreplace({z: R2[0] + I*R2[1]}))\n Res[3][i] = _quintic_simplify(s.xreplace({z: R3[0] + I*R3[1]}))\n Res[4][i] = _quintic_simplify(s.xreplace({z: R4[0] + I*R4[1]}))\n\n for i in range(1, 5):\n for j in range(5):\n Res_n[i][j] = Res[i][j].n()\n Res[i][j] = _quintic_simplify(Res[i][j])\n r1 = Res[1][0]\n r1_n = Res_n[1][0]\n\n for i in range(5):\n if comp(im(r1_n*Res_n[4][i]), 0, tol):\n r4 = Res[4][i]\n break\n\n # Now we have various Res values. Each will be a list of five\n # values. We have to pick one r value from those five for each Res\n u, v = quintic.uv(theta, d)\n testplus = (u + v*delta*sqrt(5)).n()\n testminus = (u - v*delta*sqrt(5)).n()\n\n # Evaluated numbers suffixed with _n\n # We will use evaluated numbers for calculation. Much faster.\n r4_n = r4.n()\n r2 = r3 = None\n\n for i in range(5):\n r2temp_n = Res_n[2][i]\n for j in range(5):\n # Again storing away the exact number and using\n # evaluated numbers in computations\n r3temp_n = Res_n[3][j]\n if (comp((r1_n*r2temp_n**2 + r4_n*r3temp_n**2 - testplus).n(), 0, tol) and\n comp((r3temp_n*r1_n**2 + r2temp_n*r4_n**2 - testminus).n(), 0, tol)):\n r2 = Res[2][i]\n r3 = Res[3][j]\n break\n if r2 is not None:\n break\n else:\n return [] # fall back to normal solve\n\n # Now, we have r's so we can get roots\n x1 = (r1 + r2 + r3 + r4)/5\n x2 = (r1*zeta4 + r2*zeta3 + r3*zeta2 + r4*zeta1)/5\n x3 = (r1*zeta3 + r2*zeta1 + r3*zeta4 + r4*zeta2)/5\n x4 = (r1*zeta2 + r2*zeta4 + r3*zeta1 + r4*zeta3)/5\n x5 = (r1*zeta1 + r2*zeta2 + r3*zeta3 + r4*zeta4)/5\n result = [x1, x2, x3, x4, x5]\n\n # Now check if solutions are distinct\n\n saw = set()\n for r in result:\n r = r.n(2)\n if r in saw:\n # Roots were identical. Abort, return []\n # and fall back to usual solve\n return []\n saw.add(r)\n\n # Restore to original equation where coeff_4 is nonzero\n if coeff_4:\n result = [x - coeff_4 / 5 for x in result]\n return result", "def preprocess_roots(poly):\n coeff = S.One\n\n poly_func = poly.func\n try:\n _, poly = poly.clear_denoms(convert=True)\n except DomainError:\n return coeff, poly\n\n poly = poly.primitive()[1]\n poly = poly.retract()\n\n # TODO: This is fragile. Figure out how to make this independent of construct_domain().\n if poly.get_domain().is_Poly and all(c.is_term for c in poly.rep.coeffs()):\n poly = poly.inject()\n\n strips = list(zip(*poly.monoms()))\n gens = list(poly.gens[1:])\n\n base, strips = strips[0], strips[1:]\n\n for gen, strip in zip(list(gens), strips):\n reverse = False\n\n if strip[0] < strip[-1]:\n strip = reversed(strip)\n reverse = True\n\n ratio = None\n\n for a, b in zip(base, strip):\n if not a and not b:\n continue\n elif not a or not b:\n break\n elif b % a != 0:\n break\n else:\n _ratio = b // a\n\n if ratio is None:\n ratio = _ratio\n elif ratio != _ratio:\n break\n else:\n if reverse:\n ratio = -ratio\n\n poly = poly.eval(gen, 1)\n coeff *= gen**(-ratio)\n gens.remove(gen)\n\n if gens:\n poly = poly.eject(*gens)\n\n if poly.is_univariate and poly.get_domain().is_ZZ:\n basis = _integer_basis(poly)\n\n if basis is not None:\n n = poly.degree()\n\n def func(k, coeff):\n return coeff//basis**(n - k[0])\n\n poly = poly.termwise(func)\n coeff *= basis\n\n if not isinstance(poly, poly_func):\n poly = poly_func(poly)\n return coeff, poly", "def roots_binomial(f):\n n = f.degree()\n\n a, b = f.nth(n), f.nth(0)\n base = -cancel(b/a)\n alpha = root(base, n)\n\n if alpha.is_number:\n alpha = alpha.expand(complex=True)\n\n # define some parameters that will allow us to order the roots.\n # If the domain is ZZ this is guaranteed to return roots sorted\n # with reals before non-real roots and non-real sorted according\n # to real part and imaginary part, e.g. -1, 1, -1 + I, 2 - I\n neg = base.is_negative\n even = n % 2 == 0\n if neg:\n if even == True and (base + 1).is_positive:\n big = True\n else:\n big = False\n\n # get the indices in the right order so the computed\n # roots will be sorted when the domain is ZZ\n ks = []\n imax = n//2\n if even:\n ks.append(imax)\n imax -= 1\n if not neg:\n ks.append(0)\n for i in range(imax, 0, -1):\n if neg:\n ks.extend([i, -i])\n else:\n ks.extend([-i, i])\n if neg:\n ks.append(0)\n if big:\n for i in range(0, len(ks), 2):\n pair = ks[i: i + 2]\n pair = list(reversed(pair))\n\n # compute the roots\n roots, d = [], 2*I*pi/n\n for k in ks:\n zeta = exp(k*d).expand(complex=True)\n roots.append((alpha*zeta).expand(power_base=False))\n\n return roots", "def realpolyroots(*cs):\n if not cs:\n return [0]\n try:\n f = 1.0/cs[0]\n cs = [f*c for c in cs[1:]]\n except ArithmeticError:\n return realpolyroots(*cs[1:])\n else:\n n = len(cs)\n if n == 0:\n return []\n elif n == 1:\n return [-cs[0]]\n elif n == 2:\n return _realroots_quadratic(*cs)\n elif n == 3:\n return _realroots_cubic(*cs)\n elif n == 4:\n return _realroots_quartic(*cs)\n else:\n raise RuntimeError(\"realpolyroots solver currently limited to polynoms up to the power of 4\")", "def root_factors(f, *gens, filter=None, **args):\n args = dict(args)\n\n F = Poly(f, *gens, **args)\n\n if not F.is_Poly:\n return [f]\n\n if F.is_multivariate:\n raise ValueError('multivariate polynomials are not supported')\n\n x = F.gens[0]\n\n zeros = roots(F, filter=filter)\n\n if not zeros:\n factors = [F]\n else:\n factors, N = [], 0\n\n for r, n in ordered(zeros.items()):\n factors, N = factors + [Poly(x - r, x)]*n, N + n\n\n if N < F.degree():\n G = reduce(lambda p, q: p*q, factors)\n factors.append(F.quo(G))\n\n if not isinstance(f, Poly):\n factors = [ f.as_expr() for f in factors ]\n\n return factors", "def roots_linear(f):\n r = -f.nth(0)/f.nth(1)\n dom = f.get_domain()\n\n if not dom.is_Numerical:\n if dom.is_Composite:\n r = factor(r)\n else:\n from sympy.simplify.simplify import simplify\n r = simplify(r)\n\n return [r]", "def viete(f, roots=None, *gens, **args):\n allowed_flags(args, [])\n\n if isinstance(roots, Basic):\n gens, roots = (roots,) + gens, None\n\n try:\n f, opt = poly_from_expr(f, *gens, **args)\n except PolificationFailed as exc:\n raise ComputationFailed('viete', 1, exc)\n\n if f.is_multivariate:\n raise MultivariatePolynomialError(\n \"multivariate polynomials are not allowed\")\n\n n = f.degree()\n\n if n < 1:\n raise ValueError(\n \"Cannot derive Viete's formulas for a constant polynomial\")\n\n if roots is None:\n roots = numbered_symbols('r', start=1)\n\n roots = take(roots, n)\n\n if n != len(roots):\n raise ValueError(\"required %s roots, got %s\" % (n, len(roots)))\n\n lc, coeffs = f.LC(), f.all_coeffs()\n result, sign = [], -1\n\n for i, coeff in enumerate(coeffs[1:]):\n poly = symmetric_poly(i + 1, roots)\n coeff = sign*(coeff/lc)\n result.append((poly, coeff))\n sign = -sign\n\n return result", "def roots_quartic(f):\n _, a, b, c, d = f.monic().all_coeffs()\n\n if not d:\n return [S.Zero] + roots([1, a, b, c], multiple=True)\n elif (c/a)**2 == d:\n x, m = f.gen, c/a\n\n g = Poly(x**2 + a*x + b - 2*m, x)\n\n z1, z2 = roots_quadratic(g)\n\n h1 = Poly(x**2 - z1*x + m, x)\n h2 = Poly(x**2 - z2*x + m, x)\n\n r1 = roots_quadratic(h1)\n r2 = roots_quadratic(h2)\n\n return r1 + r2\n else:\n a2 = a**2\n e = b - 3*a2/8\n f = _mexpand(c + a*(a2/8 - b/2))\n aon4 = a/4\n g = _mexpand(d - aon4*(a*(3*a2/64 - b/4) + c))\n\n if f.is_zero:\n y1, y2 = [sqrt(tmp) for tmp in\n roots([1, e, g], multiple=True)]\n return [tmp - aon4 for tmp in [-y1, -y2, y1, y2]]\n if g.is_zero:\n y = [S.Zero] + roots([1, 0, e, f], multiple=True)\n return [tmp - aon4 for tmp in y]\n else:\n # Descartes-Euler method, see [7]\n sols = _roots_quartic_euler(e, f, g, aon4)\n if sols:\n return sols\n # Ferrari method, see [1, 2]\n p = -e**2/12 - g\n q = -e**3/108 + e*g/3 - f**2/8\n TH = Rational(1, 3)\n\n def _ans(y):\n w = sqrt(e + 2*y)\n arg1 = 3*e + 2*y\n arg2 = 2*f/w\n ans = []\n for s in [-1, 1]:\n root = sqrt(-(arg1 + s*arg2))\n for t in [-1, 1]:\n ans.append((s*w - t*root)/2 - aon4)\n return ans\n\n # whether a Piecewise is returned or not\n # depends on knowing p, so try to put\n # in a simple form\n p = _mexpand(p)\n\n\n # p == 0 case\n y1 = e*Rational(-5, 6) - q**TH\n if p.is_zero:\n return _ans(y1)\n\n # if p != 0 then u below is not 0\n root = sqrt(q**2/4 + p**3/27)\n r = -q/2 + root # or -q/2 - root\n u = r**TH # primary root of solve(x**3 - r, x)\n y2 = e*Rational(-5, 6) + u - p/u/3\n if fuzzy_not(p.is_zero):\n return _ans(y2)\n\n # sort it out once they know the values of the coefficients\n return [Piecewise((a1, Eq(p, 0)), (a2, True))\n for a1, a2 in zip(_ans(y1), _ans(y2))]", "def _try_heuristics(f):\n if f.is_ground:\n return []\n if f.is_monomial:\n return [S.Zero]*f.degree()\n\n if f.length() == 2:\n if f.degree() == 1:\n return list(map(cancel, roots_linear(f)))\n else:\n return roots_binomial(f)\n\n result = []\n\n for i in [-1, 1]:\n if not f.eval(i):\n f = f.quo(Poly(f.gen - i, f.gen))\n result.append(i)\n break\n\n n = f.degree()\n\n if n == 1:\n result += list(map(cancel, roots_linear(f)))\n elif n == 2:\n result += list(map(cancel, roots_quadratic(f)))\n elif f.is_cyclotomic:\n result += roots_cyclotomic(f)\n elif n == 3 and cubics:\n result += roots_cubic(f, trig=trig)\n elif n == 4 and quartics:\n result += roots_quartic(f)\n elif n == 5 and quintics:\n result += roots_quintic(f)\n\n return result", "def test_cubic_roots(roots, a0, a1, a2, a3=None, tol=1.0e-12):\n\n N = len(a0)\n for n in range(N):\n c0 = a0[n]\n c1 = a1[n]\n c2 = a2[n]\n c3 = a3[n]\n\n print(f\"Polynomial {n}: a = {(c0,c1,c2,c3)}\")\n\n rts = np.unique(roots[n])\n rts = rts[~np.isnan(rts)]\n\n for x in rts:\n f = c0 + c1 * x + c2 * x**2 + c3 * x**3\n ok = np.abs(f) <= tol\n\n print(f\" root x = {x}: f(x) = {f} {'OK' if ok else 'FAILED'}\")\n\n if not ok:\n raise Exception(\"NOT OK!\")\n\n if len(rts) == 0:\n print(\" no real roots.\")", "def find_poly_roots(poly, initial_guess = 0.0, limit = 0.00001, max_iterations = 1000):\n solutions = []\n # Find solutions numerically for n > 0, split them off until n = 2\n for q in range(polynomial.order(poly) - 2):\n x = find_poly_root(poly, initial_guess, limit, max_iterations)\n if not x:\n break\n poly = polynomial.div(poly, polynomial.make_poly([-x, 1]))\n solutions.append(x)\n # Find the rest of the roots analytically\n if polynomial.order(poly) == 1:\n solutions.append(- polynomial.coeff(poly, 1) / polynomial.coeff(poly, 0))\n elif polynomial.order(poly) == 2:\n a = polynomial.coeff(poly, 2)\n b = polynomial.coeff(poly, 1)\n c = polynomial.coeff(poly, 0)\n d = b ** 2 - 4 * a * c\n if d == 0:\n solutions.append(-b / (2 * a))\n elif d > 0:\n solutions.append((- b + sqrt(d)) / (2 * a))\n solutions.append((- b - sqrt(d)) / (2 * a))\n return solutions", "def find_poly_root(poly, initial_guess = 0.0, limit = 0.00001, max_iterations = 1000):\n # Calculate the polynomial derivatives\n dpoly = polynomial.derivative(poly)\n ddpoly = polynomial.derivative(dpoly)\n # Closures !!!\n f = lambda x: polynomial.eval(poly, x)\n df = lambda x: polynomial.eval(dpoly, x)\n ddf = lambda x: polynomial.eval(ddpoly, x)\n # Call the generic root finder\n return find_root(f, df, ddf, initial_guess, limit, max_iterations)", "def solve(n=5000,C=-6*10**11,a=900,b=3):\n coeffs = np.zeros(n+2)\n coeffs[0] = a-b*n\n coeffs[1] = b*(n+1) - a\n coeffs[-3] = -C\n coeffs[-2] = 2*C - a\n coeffs[-1] = a+b-C\n mp.dps = 27\n roots = polyroots(coeffs)\n for root in roots:\n print root", "def getRoots(self):\n # This part is for exercise 11\n # return []\n \n # This part is for exercise 12\n if self.getDegree() == 0:\n return []\n if self.getDegree() == 1:\n return LinearPolynomial(self.getCoefficients()).getRoots()\n if self.getDegree() == 2:\n return QuadraticPolynomial(self.getCoefficients()).getRoots()\n else:\n current_polynomial = Polynomial(self.getCoefficients())\n roots = []\n \n while current_polynomial.__coefficients[0] == 0:\n roots.append(0)\n current_polynomial.__coefficients = current_polynomial.__coefficients[1:]\n \n while current_polynomial.getDegree() > 2:\n\n #Initialization\n x = (random.random(), random.random(), random.random())\n while abs(current_polynomial.evaluate(x[2])) > EPSILON:\n x = (random.random(), random.random(), random.random())\n nb_iters = 0\n while (abs(current_polynomial.evaluate(x[2])) > EPSILON or abs(x[2] - x[1]) > TOLERANCE) and nb_iters < MAX_ITERATIONS:\n x = getNextApproximations(current_polynomial.evaluate, x)\n nb_iters += 1\n\n roots.append(x[2])\n \n if abs(x[2].imag) < TOLERANCE:\n current_polynomial = current_polynomial.divide(Polynomial([-x[2].real, 1]))[0]\n else:\n roots.append(x[2].conjugate())\n current_polynomial = current_polynomial.divide(Polynomial([abs(x[2])**2, -2*x[2].real, 1]))[0]\n \n roots += current_polynomial.getRoots()\n \n for i in range(len(roots)):\n if roots[i].imag == 0:\n roots[i] = roots[i].real\n \n return sorted(roots, key = lambda x: (x.real, x.imag))", "def getRoots(self):\n a, b, c = self.getCoefficients()[2], self.getCoefficients()[1], self.getCoefficients()[0]\n delta = b**2 - 4*a*c\n if delta >= 0:\n roots = sorted([(-b - math.sqrt(delta))/(2*a), (-b + math.sqrt(delta))/(2*a)])\n else:\n roots = sorted([(-b - math.sqrt(-delta)*1j)/(2*a), (-b + math.sqrt(-delta)*1j)/(2*a)], key=lambda x: (x.real, x.imag))\n return roots", "def zzX_to_poly(f, *symbols):\n from sympy.polys import Poly\n\n terms = {}\n\n for monom, coeff in zzX_to_dict(f).iteritems():\n terms[monom] = Integer(int(coeff))\n\n return Poly(terms, *symbols)", "def zzx_to_poly(f, *symbols):\n from sympy.polys import Poly\n\n terms = {}\n\n for monom, coeff in zzx_to_dict(f).iteritems():\n terms[(monom,)] = Integer(int(coeff))\n\n return Poly(terms, *symbols)", "def test_solveset_real_rational():\n x = Symbol('x', real=True)\n y = Symbol('y', real=True)\n assert solveset_real((x - y**3) / ((y**2)*sqrt(1 - y**2)), x) \\\n == FiniteSet(y**3)\n # issue 4486\n assert solveset_real(2*x/(x + 2) - 1, x) == FiniteSet(2)", "def count_real_roots(f, inf=None, sup=None):\n return dmp_count_real_roots(f.rep, f.lev, f.dom, inf=inf, sup=sup)", "def _realroots_cubic(a2, a1, a0):\n # see http://mathworld.wolfram.com/CubicFormula.html for details\n\n Q = (3*a1 - a2*a2) / 9.0\n R = (9*a2*a1 - 27*a0 - 2*a2*a2*a2) / 54.0\n D = Q*Q*Q + R*R\n\n if D > 0: # one real and two complex roots\n SD = math.sqrt(D)\n if R + SD >= 0:\n S = (R + SD)**(1/3.0)\n else:\n S = -(-R - SD)**(1/3.0)\n if R - SD >= 0:\n T = (R - SD)**(1/3.0)\n else:\n T = -(SD - R)**(1/3.0)\n return [S + T - a2/3.0]\n elif D == 0:\n if Q == 0: # one real root (R==0)\n return [-a2/3.0]\n else: # two real roots (R>0, Q<0)\n S = -math.sqrt(-Q)\n return [2*S - a2/3.0, -S - a2/3.0]\n else: # three real roots (Q<0)\n SQ = math.sqrt(-Q)\n arg = R / (SQ**3)\n if arg >= 1:\n theta = 0\n elif arg <= -1:\n theta = math.pi\n else:\n theta = math.acos(R/(SQ**3))\n return [2 * SQ * math.cos((theta + 2*2*i*math.pi)/3.0) - a2/3.0 for i in range(3)]", "def refine_real_root(f, s, t, eps=None, steps=None, fast=False):\n return dmp_refine_real_root(f.rep, s, t, f.lev, f.dom, eps=eps, steps=steps, fast=fast)", "def get_roots(self):\n roots = []\n for symbol in self.GlobalSymbolDict.values():\n if symbol.isRoot():\n roots += [symbol]\n return roots", "def _rootsFinder(self, fun, jac, bounds, npoints, method):\n if method == \"regular\":\n step = (bounds[1] - bounds[0]) / (npoints + 1)\n try:\n X0 = np.arange(bounds[0] + step, bounds[1], step)\n except:\n X0 = np.random.uniform(bounds[0], bounds[1], npoints)\n elif method == \"random\":\n X0 = np.random.uniform(bounds[0], bounds[1], npoints)\n\n def objFun(X, f, jac):\n g = 0\n j = np.zeros(X.shape)\n i = 0\n for x in X:\n fx = f(x)\n g = g + fx**2\n j[i] = 2 * fx * jac(x)\n i = i + 1\n return g, j\n\n opt = minimize(\n lambda X: objFun(X, fun, jac),\n X0,\n method=\"L-BFGS-B\",\n jac=True,\n bounds=[bounds] * len(X0),\n )\n\n X = opt.x\n np.round(X, decimals=5)\n return np.unique(X)", "def arroots(self):\n return self.arpoly.roots()", "def refine_complex_root(f, s, t, eps=None, steps=None, fast=False):\n return dmp_refine_complex_root(f.rep, s, t, f.lev, f.dom, eps=eps, steps=steps, fast=fast)", "def root_case1(a,b,c):\n root = {'roots':'All complex numbers are roots'}\n return (\"%(roots)s\" % root)" ]
[ "0.6896479", "0.6696928", "0.65651536", "0.6392125", "0.6328955", "0.6318705", "0.63019294", "0.623209", "0.6114012", "0.5993156", "0.5949037", "0.5923371", "0.5701059", "0.5666536", "0.5619273", "0.5569309", "0.54927284", "0.5336779", "0.52657783", "0.52145165", "0.5202273", "0.51930773", "0.5185924", "0.5172841", "0.51668054", "0.51577866", "0.5142078", "0.5132687", "0.5115469", "0.5107529" ]
0.70961726
0
Returns all factors of a univariate polynomial. Examples ======== >>> from sympy.abc import x, y >>> from sympy.polys.polyroots import root_factors >>> root_factors(x2 y, x) [x sqrt(y), x + sqrt(y)]
def root_factors(f, *gens, filter=None, **args): args = dict(args) F = Poly(f, *gens, **args) if not F.is_Poly: return [f] if F.is_multivariate: raise ValueError('multivariate polynomials are not supported') x = F.gens[0] zeros = roots(F, filter=filter) if not zeros: factors = [F] else: factors, N = [], 0 for r, n in ordered(zeros.items()): factors, N = factors + [Poly(x - r, x)]*n, N + n if N < F.degree(): G = reduce(lambda p, q: p*q, factors) factors.append(F.quo(G)) if not isinstance(f, Poly): factors = [ f.as_expr() for f in factors ] return factors
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _compute_factors(roots, multiplicity, include_powers=False):\n current = cupy.array([1])\n suffixes = [current]\n for pole, mult in zip(roots[-1:0:-1], multiplicity[-1:0:-1]):\n monomial = cupy.r_[1, -pole]\n for _ in range(int(mult)):\n current = cupy.polymul(current, monomial)\n suffixes.append(current)\n suffixes = suffixes[::-1]\n\n factors = []\n current = cupy.array([1])\n for pole, mult, suffix in zip(roots, multiplicity, suffixes):\n monomial = cupy.r_[1, -pole]\n block = []\n for i in range(int(mult)):\n if i == 0 or include_powers:\n block.append(cupy.polymul(current, suffix))\n current = cupy.polymul(current, monomial)\n factors.extend(reversed(block))\n\n return factors, current", "def get_factors():", "def get_factors(number):\n\n factors = [1, number]\n\n for i in range(2, int(math.sqrt(number))):\n if number % i == 0:\n factors.extend([i, number / i])\n\n return(factors)", "def primefactors(n):\n factors = []\n primes = prime_sieve(n)\n\n for p in primes:\n while n % p == 0:\n factors.append(p)\n n /= p\n if n == 1:\n return(factors)\n return([n])", "def factors(self):\n X = [Var(i,2) for i in range(self.nvar)]\n factors = [Factor([],np.exp(self.c))] \n # TODO: exclude if zero? or exclude if inf/-inf, or if in \"assigned\", or?\n factors = factors + [Factor([X[i]],[-th,th]).exp() for i,th in enumerate(self.h) if self.dims[i]>1]\n L = coo(self.L)\n factors = factors + [Factor([X[i],X[j]],[[th,-th],[-th,th]]).exp() for i,j,th in zip(L.row,L.col,L.data) if i<j]\n return factors\n # TODO: should we exponentiate if isLog not True? ", "def factors2(n):\n\tfactors = []\n\ti = 1\n\twhile i <= math.sqrt(n):\n\t\tif n%i == 0:\n\t\t\tfactors.append(i)\n\t\t\tfactors.append(n/i)\n\t\ti += 1\n\treturn factors", "def factors(self, X):\r\n return (lambda fd: [X] if not fd else fd + self.factors(X // fd[0])) (self.firstdiv(X))", "def charpoly_factor_list(self):\n M = self\n K = M.domain\n\n # It is more efficient to start from the partial factorization provided\n # for free by M.charpoly_factor_blocks than the expanded M.charpoly.\n factors = M.charpoly_factor_blocks()\n\n factors_irreducible = []\n\n for factor_i, mult_i in factors:\n\n _, factors_list = dup_factor_list(factor_i, K)\n\n for factor_j, mult_j in factors_list:\n factors_irreducible.append((factor_j, mult_i * mult_j))\n\n return _collect_factors(factors_irreducible)", "def factors(n):\n factors = []\n for x in range(1, int(sqrt(n)+1)):\n if (n % x) == 0:\n factors += [x, n/x]\n \n return sorted(set(factors))", "def _factors(n):\n gen = ([i, n // i] for i in range(1, int(n ** 0.5) + 1) if n % i == 0)\n return set(sum(gen, []))", "def factors(n):\n _factors = []\n p = 1\n\n # Loop until half of n\n while p <= n // 2:\n p += 1\n if div_by(p, _factors):\n continue\n if not n % p:\n _factors.append(p)\n\n # Number given is a prime\n if not _factors:\n _factors.append(n)\n\n return _factors", "def prime_factors(n) -> []:\n i = 2\n factors = []\n while i * i <= n:\n if n % i:\n i += 1\n else:\n n //= i\n factors.append(i)\n if n > 1:\n factors.append(n)\n return factors", "def getallprimefactors(n):\n factors = []\n d = 2\n while n > 1:\n while n % d == 0:\n factors.append(d)\n print(n)\n n /= d\n d += 1\n return factors", "def factorset(self,x):\r\n return set(self.factors(x))", "def find_factors(number):\n \n i = 2\n prod = 1\n factors = []\n sqrt = math.sqrt(number)\n num = number\n \n while i < num:\n div = check_divisbility(number, i)\n if div == 'divisible':\n factors.append(i)\n number /= i\n prod *= i\n recurse = find_factors(number)\n \n #I recurse here because it prevents us wasting time playing with large numbers\n for fac in recurse:\n factors.append(fac)\n number /= fac\n prod *= fac\n #stop if we find a factor greater tha sqrt(number)\n if i >= sqrt:\n break\n #make sure we're not looking once we find all the factors \n if prod == num:\n break\n else:\n if i> sqrt:\n if len(factors)==0:\n factors.append(num)\n prod *= num\n else: \n print i\n recurse = find_factors(number)\n for fac in recurse:\n factors.append(fac)\n prod *= fac\n if prod == num:\n break\n i = i+1\n if prod != num:\n raise ValueError (\"This isn't right\")\n return factors", "def factors(n):\r\n\tif n<0: n=-n # Only deal with positive integers\r\n\tif (is_prime(n)):\r\n\t\treturn [n]\r\n\tfact = factorone(n)\r\n\tif (fact == 1): return \"Unable to factor \"+str(n) # Can't deal with units\r\n\tfacts = factors(n/fact) + factors(fact)\r\n\tfacts.sort()\r\n\treturn facts", "def prime_factors(n):\n factors = []\n lastresult = n\n c = 2\n while lastresult != 1:\n if lastresult % c == 0 and c % 2 > 0:\n factors.append(c)\n lastresult /= c\n c += 1\n else:\n c += 1\n return factors[0], factors[1]", "def prime_factors(number):\n factors = []\n\n if number == 0 : return factors\n\n # first round factors by two\n while number % 2 == 0:\n factors.append(2)\n number /= 2\n\n # other rounds goes by odd numbers only (no other even is prime)\n divisor = 3\n while divisor <= number:\n while number % divisor == 0:\n factors.append(divisor)\n number /= divisor\n divisor += 2\n\n return factors", "def get_factors(self, triples):\n pass", "def calculate_factors(x):\n print(\"The factors of\", x, \"are:\")\n for i in range(1, x + 1):\n if x % i == 0:\n print(i)", "def prime_factors(n):\n\n factors = []\n lastresult = n\n c = 2\n while lastresult != 1:\n if lastresult % c == 0 and c % 2 > 0:\n factors.append(c)\n lastresult /= c\n c += 1\n else:\n c += 1\n return factors[0], factors[1]", "def get_factors(val):\n N = np.sqrt(val)\n N = np.floor(N)\n M = val/N\n\n while (val % N != 0):\n N = N-1\n M = val/N\n\n return int(M), int(N)", "def factors(num):\n\tif is_prime(num) == True:\n\t\tfactors = [1, num]\n\t\treturn factors\n\telse:\n\t\tfactors = [1]\n\t\tsquare_root = int(math.ceil(math.sqrt(num)))\n\t\t\n\t\tfor n in range(2, square_root+1):\n\t\t\tif num % n == 0:\n\t\t\t\tfactors.append(n)\n\n\t\tfor n in range(1, len(factors)):\n\t\t\tnew_n = num / factors[n]\n\t\t\tif new_n not in factors:\n\t\t\t\tfactors.append(num / factors[n])\n\n\t\tfactors.append(num)\n\t\treturn factors", "def factors(self):\n self.assert_sampled()\n return self._factors", "def factors(n):\n\tif n<0: n=-n # Only deal with positive integers\n\tif (is_prime(n)):\n\t\treturn [n]\n\tfact = factorone(n)\n\tif ((abs(n) == 1) or (n == 0)): raise ValueError('Unable to factor \\\"{0}\\\"'.format(n))\n\tfacts = factors(n//fact) + factors(fact)\n\tfacts.sort()\n\treturn facts", "def prime_factors(n):\n if n < 2 or n - round(n) != 0:\n print('Numbers smaller than 2 and non-integers do not have prime',\n 'factors')\n L = []\n while n >= 2:\n i = low_prime(n)\n L.append(i)\n n //= i\n return L", "def find_factors(num):\n factors = set()\n i = 1\n while i*i < num:\n if num % i == 0:\n factors.add(i)\n factors.add(int(num/i))\n i+=1\n factors = list(factors)\n factors.sort()\n return factors", "def prime_factors(n):\n\n prime_set = primes(n)\n factors = []\n for prime in prime_set:\n if n % prime == 0:\n factors.append(prime)\n return factors", "def factors(n):\n f = list(reduce(list.__add__, ([i, n // i] for i in range(1, int(pow(n, 0.5) + 1)) if n % i == 0)))\n return sorted(f)", "def factors(n: int) -> List[int]:\n k = 1\n while k**2 < n:\n if n % k == 0:\n yield k\n k += 1\n\n k = int(n**(1/2))\n while k > 0:\n if n % k == 0:\n yield n // k\n k -= 1" ]
[ "0.7069163", "0.6946843", "0.6769513", "0.6616636", "0.6602128", "0.65511125", "0.65452355", "0.64717513", "0.63566077", "0.6323654", "0.6308325", "0.62574553", "0.62244284", "0.6213336", "0.6190737", "0.6173634", "0.6167063", "0.61606395", "0.6150739", "0.61217386", "0.61190355", "0.61189425", "0.6116596", "0.6115597", "0.6110868", "0.60769904", "0.6042041", "0.603954", "0.6025485", "0.6011323" ]
0.7182827
0
Modify the land cover to create rocks in the large gradient pixels (large steepness)
def set_rocks_in_grad(self, elevation, landcover): # Compute the steepness of each pixel grad = gaussian_gradient_magnitude(elevation, 1.0) grad /= self.mercator.Resolution(self.__zoom) # Get the mask of rock (with a smooth transition) mask = (grad >= ROCK_STEEPNESS).astype(np.float) mask = gaussian_filter(mask, 3.0) # Blend the images dtype = landcover.dtype rock_image = np.zeros(landcover.shape, dtype=dtype) rock_image[:,:] = ROCK_COLOR for i in range(3): rock_image[:,:,i] = (mask * rock_image[:,:,i]).astype(dtype) landcover[:,:,i] = ((1.0 - mask) * landcover[:,:,i]).astype(dtype) landcover += rock_image return landcover
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def preprocess_land_cover(\n src_files, dst_raster, dst_crs, dst_bounds, dst_res, geom=None, overwrite=False\n):\n if os.path.isfile(dst_raster) and not overwrite:\n log.info(\"Land cover data already preprocessed. Skipping.\")\n return\n log.info(\"Starting preprocessing of land cover data.\")\n LC_CLASSES = [\n \"bare\",\n \"crops\",\n \"grass\",\n \"moss\",\n \"shrub\",\n \"tree\",\n \"urban\",\n \"water-permanent\",\n \"water-seasonal\",\n ]\n with TemporaryDirectory(prefix=\"geohealthaccess_\") as tmpdir:\n\n tmpdir = Path(tmpdir)\n for tile in src_files:\n unzip(tile, tmpdir)\n\n reprojected_files = []\n tile_names = unique_tiles(tmpdir)\n\n if not tile_names:\n raise MissingDataError(\"Land cover data not found.\")\n\n for lc_class in LC_CLASSES:\n tiles = [\n p.as_posix()\n for p in tmpdir.glob(f\"*{lc_class}-coverfraction-layer*.tif\")\n ]\n if len(tiles) > 1:\n src_file = merge_tiles(\n tiles, os.path.join(tmpdir, f\"{lc_class}_mosaic.tif\"), nodata=255,\n )\n else:\n src_file = tiles[0]\n reprojected_files.append(\n reproject(\n src_raster=src_file,\n dst_raster=os.path.join(tmpdir, f\"{lc_class}.tif\"),\n dst_crs=dst_crs,\n dst_bounds=dst_bounds,\n dst_res=dst_res,\n src_nodata=255,\n dst_nodata=255,\n dst_dtype=\"Byte\",\n resampling_method=\"cubic\",\n overwrite=overwrite,\n )\n )\n\n if len(reprojected_files) > 1:\n raster = concatenate_bands(\n src_files=reprojected_files,\n dst_file=dst_raster,\n band_descriptions=LC_CLASSES,\n )\n else:\n raster = reprojected_files[0]\n\n if geom:\n mask_raster(raster, geom)", "def forehead_region_growing(img, seed,forehead_inf_limit, threshold=1):\n\n dims = img.shape\n reg = np.zeros(dims, np.uint8)\n\n #parameters\n mean_reg = float(img[seed[1], seed[0]])\n size = 1\n pix_area = dims[0]*dims[1]\n\n contour = [] # will be [ [[x1, y1], val1],..., [[xn, yn], valn] ]\n contour_val = []\n dist = 0\n # TODO: may be enhanced later with 8th connectivity\n orient = [(1, 0), (0, 1), (-1, 0), (0, -1)] # 4 connectivity\n cur_pix = [seed[0], seed[1]]\n\n #Spreading\n while(dist<threshold and size<pix_area):\n #adding pixels\n for j in range(4):\n #select new candidate\n temp_pix = [cur_pix[0] +orient[j][0], cur_pix[1] +orient[j][1]]\n\n #check if it belongs to the image\n is_in_img = dims[0]>temp_pix[0]>0 and dims[1]>temp_pix[1]>=forehead_inf_limit #returns boolean\n #candidate is taken if not already selected before\n if (is_in_img and (reg[temp_pix[1], temp_pix[0]]==0)):\n contour.append(temp_pix)\n contour_val.append(img[temp_pix[1], temp_pix[0]] )\n reg[temp_pix[1], temp_pix[0]] = 150\n #add the nearest pixel of the contour in it\n dist = abs(int(np.mean(contour_val)) - mean_reg)\n\n dist_list = [abs(i - mean_reg) for i in contour_val ]\n dist = min(dist_list) #get min distance\n index = dist_list.index(min(dist_list)) #mean distance index\n size += 1 # updating region size\n reg[cur_pix[1], cur_pix[0]] = 255\n\n #updating mean MUST BE FLOAT\n mean_reg = (mean_reg*size + float(contour_val[index]))/(size+1)\n #updating seed\n cur_pix = contour[index]\n\n #removing pixel from neigborhood\n del contour[index]\n del contour_val[index]\n\n return reg", "def make_glow_model(im_in, bin_x=1, bin_y=1):\n im=im_in.copy()\n im[0]=im[2]\n im[1]=im[2]\n im[-1]=im[-2]\n \n #glow image\n glow=np.zeros_like(im)\n \n #meshgrid\n x, y = np.meshgrid(np.arange(im.shape[1]), np.arange(im.shape[0]))\n \n \n def model_corner(im, x0, y0, xw, yw, iparams, std_clip=0):\n \"\"\" std_clip is the y height of the small corner to use to exclude\n spectra in the large corner,\n \n (iparams=(glow amp, x center, y center, xwid, ywid, xy amount)\n \n positions and initial params adjusted automatically for binning\n pass coordinates in 4k positions\n \"\"\"\n x0/=bin_x\n y0/=bin_y\n xw/=bin_x\n yw/=bin_y\n iparams=list(iparams)\n iparams[1]/=bin_x\n iparams[2]/=bin_y\n iparams[3]/=bin_x\n iparams[4]/=bin_y\n \n corner=im[y0:y0+yw,x0:x0+xw].copy()\n if std_clip:\n small_corner=im[y0:y0+std_clip,x0:x0+xw].copy()\n patch_locs=corner>2*small_corner.std()\n patch_locs[:y0+std_clip,:]=False\n corner[patch_locs]=np.median(small_corner)\n cim, param= gaussfit2D(corner, iparams)\n param=list(param)\n param[-1]=0\n param[1]+=x0\n param[2]+=y0\n return gauss2D(( x,y), *param)\n \n #Lower R\n try:\n tmp=model_corner(im, 3996, 2, 100, 100,\n (150, 58, -7, 30.0, 20.0, 0, 0))\n if tmp.min() < 0:\n raise RuntimeError('Glow model has negative values')\n else:\n glow+=tmp\n\n except RuntimeError, e:\n print 'Lower R glow model failed: {}'.format(str(e))\n\n #Lower L\n try:\n tmp=model_corner(im, 0, 2, 100, 100,\n (150, 40, 0, 30.0, 20.0, 0, 0),\n std_clip=50)\n if tmp.min() < 0:\n raise RuntimeError('Glow model has negative values')\n else:\n glow+=tmp\n\n except RuntimeError, e:\n print 'Lower L glow model failed: {}'.format(str(e))\n \n\n #Upper L\n try:\n tmp=model_corner(im, 0, 4012, 100, 100,\n (150, 40, 100, 30.0, 20.0, 0, 0))\n if tmp.min() < 0:\n raise RuntimeError('Glow model has negative values')\n else:\n glow+=tmp\n\n except RuntimeError, e:\n print 'Upper L glow model failed: {}'.format(str(e))\n\n #Upper R\n try:\n tmp=model_corner(im, 3996, 4000, 100, 100,\n (150, 58, 100, 30.0, 20.0, 0, 0))\n if tmp.min() < 0:\n raise RuntimeError('Glow model has negative values')\n else:\n glow+=tmp\n\n except RuntimeError, e:\n print 'Upper R glow model failed: {}'.format(str(e))\n \n return glow", "def colorfull_house(self):\n image = self.__image.copy()\n palette_path = self.get_image_path(\"color_palette.png\")\n self.color_palette = cv.resize(cv.imread(palette_path, 1),(598,245))\n self.pixel_picked = np.array([0,0,0])\n\n cv.namedWindow(\"Color Palette\")\n cv.setMouseCallback(\"Color Palette\", self.click_mouse_callback)\n cv.imshow(\"Color Palette\", self.color_palette)\n\n while (True):\n for i in iter(range(self.__rows)):\n for j in iter(range(self.__cols)):\n if self.segmentation_condition(i,j):\n image[i,j] = self.pixel_picked\n \n cv.imshow(\"Colorfull House\",image)\n if (cv.waitKey(1) & 0xFF == ord('q')):\n break", "def draw_cover():\n \n game_cover = games[selected_game].cover\n \n # Draw game cover art\n for i in range(len(game_cover)):\n for j in range(len(game_cover[0])):\n led_x = i + 1\n led_y = j + 2\n lp.led_ctrl_xy(led_x, led_y, *game_cover[i][j])", "def do_full(self, image,hsv,upper,lower,debug=False):\n single_color_img = self.extract_single_color_range(image,hsv,lower,upper)\n if debug:\n # cv2.imshow('single_color_img',single_color_img)\n cv2.imwrite('debug_pics/single_color_img.jpg',single_color_img)\n single_channel = self.threshold_image(single_color_img,debug)\n if debug:\n # cv2.imshow('single_channel',single_channel)\n cv2.imwrite('debug_pics/single_channel.jpg',single_channel)\n cont,hierarchy = self.contours(single_channel,debug)\n\n if debug:\n for i,cnt in enumerate(cont):\n cv2.drawContours(single_channel,cont,i,(0,0,255),2)\n if debug: cv2.imwrite('debug_pics/contours.jpg',single_channel) #cv2.imshow('contours',single_channel)\n\n return self.get_bricks(cont)", "def update_map(self, boundaries):\n image = Image.open(self.image_file)\n update_pixels = ImageDraw.Draw(image)\n for i in range(len(boundaries) - 1):\n update_pixels.point(boundaries[i], fill=self.path_color)\n if self.season ==\"winter\":\n image.save(\"temp_winter.png\")\n elif self.season ==\"spring\":\n image.save(\"temp_spring.png\")\n else:\n image.save(\"temp_fall.png\")", "def stain_image(image, num_stains, color):", "def burning_ship(pixels, width, height, max_iterations, re_start, re_end, im_start, im_end, color_hue,\n color_saturation, color_intensity):\n\n for x in prange(0, width):\n for y in prange(0, height):\n c = complex((re_start + (x / width) * (re_end - re_start)),\n (im_start + (y / height) * (im_end - im_start)))\n z = 0.0j\n\n iterations = 0\n while (abs(z) < 4.0) and iterations < max_iterations:\n abs_z = complex(abs(z.real), abs(z.imag))\n z = abs_z * abs_z + c\n iterations += 1\n\n # Color smoothing\n smooth_iterations = iterations - math.log(math.log(z.real * z.real + z.imag * z.imag)) + 4.0\n\n if iterations >= max_iterations:\n pixels[x, y, 0] = 0\n pixels[x, y, 1] = 0\n pixels[x, y, 2] = 0\n else:\n pixels[x, y, 0] = 255 * (color_hue / 360)\n pixels[x, y, 1] = 255 * color_saturation\n pixels[x, y, 2] = 255 * min(color_intensity * smooth_iterations / max_iterations, 1)", "def green_screen( orig_image, new_bg_image, corner=(0,0) ):\n\n #\n # First let's create a universal height and figure out what the length\n # should be for the smaller picture\n #\n bg_height = new_bg_image.shape[0]\n og_height = orig_image.shape[0]\n bg_length = new_bg_image.shape[1]\n og_length = orig_image.shape[1]\n if bg_height > og_height:\n new_height = bg_height\n new_length1 = (new_height / orig_image.shape[0]) * orig_image.shape[1]\n new_length1 = int(new_length1)\n else:\n new_height = og_height\n new_length1 = (new_height / new_bg_image.shape[0]) * new_bg_image.shape[1]\n new_length1 = int(new_length1)\n\n #\n # Then let's resize the images accordingly\n #\n if bg_height > og_height:\n orig_image = cv2.resize(orig_image, dsize=(new_length1, new_height), interpolation=cv2.INTER_LINEAR)\n else:\n new_bg_image = cv2.resize(new_bg_image, dsize=(new_length1, new_height), interpolation=cv2.INTER_LINEAR)\n\n #\n # Let's employ the helper function whatsgreen() to get a new one green image\n # Then with that image let's replace the green pixels with the corresponding pixel from\n # the background image\n #\n green = whatsgreen(orig_image)\n num_rows, num_cols, num_chans = green.shape\n final = green.copy()\n for row in range(num_rows):\n for col in range(num_cols):\n r, g, b = green[row, col]\n print(row, col)\n if r == 0 and g == 255 and b == 0:\n final[row, col] = new_bg_image[row, col]\n\n #\n #\n #\n if bg_length > og_length:\n final_final = new_bg_image.copy()\n else:\n final_final = orig_image.copy()\n\n num_rows, num_cols, num_chans = final.shape\n print(num_rows, num_cols)\n for row in range(num_rows):\n for col in range(num_cols):\n final_final[row, col] = final[row, col]\n return final_final", "def bndy_plasma(self):\n self.ne[0], self.ne[-1] = 1e11, 1e11\n self.ni[0], self.ni[-1] = 1e11, 1e11\n self.nn[0], self.nn[-1] = 1e11, 1e11\n self.Te[0], self.Te[-1] = 0.1, 0.1\n self.Ti[0], self.Ti[-1] = 0.01, 0.01\n # self.coll_em[0], self.coll_em[-1] = 1e5, 1e5\n # self.coll_im[0], self.coll_im[-1] = 1e5, 1e5", "def __init__(self):\n DetectLandmarks.__init__(self)\n self.red_l = 0\n self.green_l = 0\n self.blue_l = 0\n self.red_e = 0\n self.green_e = 0\n self.blue_e = 0\n self.debug = 0\n self.image = 0\n self.width = 0\n self.height = 0\n self.im_copy = 0\n self.lip_x = []\n self.lip_y = []", "def grass_drass():", "def smooth_image(img_file=\"cy_double.png\"):\n \n oldimg, newimg, width, height, win = setup_image(img_file)\n\n for col in range(newimg.getWidth()):\n for row in range(newimg.getHeight()):\n p = newimg.getPixel(col, row)\n neighbors = []\n # Put the 8 surrounding pixels into neighbors\n for i in range(col-1, col+2):\n for j in range(row-1, row+2):\n try:\n neighbor = newimg.getPixel(i, j)\n neighbors.append(neighbor)\n except:\n continue\n nlen = len(neighbors)\n # Average out the RBG values\n if nlen:\n # Uncommented, the following line would leave most of the white \n # untouched which works a little better for real photographs, imo.\n #~ if nlen and p[0]+p[1]+p[2] < 690:\n p.red = sum([neighbors[i][0] for i in range(nlen)])/nlen\n p.green = sum([neighbors[i][1] for i in range(nlen)])/nlen\n p.blue = sum([neighbors[i][2] for i in range(nlen)])/nlen\n newimg.setPixel(col,row,p)\n\n write_image(img_file, newimg, win, \"_smooth\")", "def cloud_cover_to_ghi_linear(cloud_cover, ghi_clear, offset=35):\n\n offset = offset / 100.\n cloud_cover = cloud_cover / 100.\n ghi = (offset + (1 - offset) * (1 - cloud_cover)) * ghi_clear\n return ghi", "def dem_jpeg(dem_file):\n out_file = dem_file+'.jpeg'\n rsc_file = out_file+'.rsc'\n shutil.copy2(dem_file+'.rsc', rsc_file)\n # read data\n dem = readfile.read(dem_file)[0]\n print('dem.shape:',dem.shape)\n # figure size\n ds_shape = tuple(reversed(dem.shape))\n fig_dpi = 300\n fig_size = [i / fig_dpi for i in ds_shape]\n print('fig_size:',fig_size)\n # color range\n disp_min = np.nanmin(dem) - 4000\n disp_max = np.nanmax(dem) + 2000\n # prepare shaded relief\n ls = LightSource(azdeg=315, altdeg=45)\n dem_shade = ls.shade(dem, vert_exag=0.3, cmap=plt.get_cmap('gray'), vmin=disp_min, vmax=disp_max)\n dem_shade[np.isnan(dem_shade[:, :, 0])] = np.nan\n print('dem_shade.shape:',dem_shade.shape)\n # plot\n fig, ax = plt.subplots(figsize=fig_size)\n ax.imshow(dem_shade, interpolation='spline16', origin='upper')\n # get rid of whitespace on the side\n ax.axis('off')\n ax.get_xaxis().set_ticks([])\n ax.get_yaxis().set_ticks([])\n fig.subplots_adjust(left=0,right=1,bottom=0,top=1)\n # output\n print('save figure to file {}'.format(out_file))\n plt.savefig(out_file, transparent=True, dpi=300, pad_inches=0.0)\n \n #resize to desired size(FA 8/19, unclear why size is wrong)\n im = Image.open(out_file)\n im_out = im.resize(dem.shape, Image.NEAREST)\n im_out.save(out_file)\n \n #plt.show()", "def inpaint(self, img_slice, mask_slice, min_x, max_x, min_y, max_y, views='lateral'):\n # create binary mask\n mask = np.zeros(img_slice.shape)\n mask[min_x:max_x, min_y:max_y] = 1\n # keep a copy of original to have background later \n img_orig = np.copy(img_slice)\n mask_binary = np.copy(mask)\n\n # rotate image if coronal\n if views=='coronal':\n img_slice = np.rot90(img_slice, axes=(1, 0)) # image is from lat,ax -> ax,lat\n mask_slice = np.rot90(mask_slice, axes=(1, 0))\n mask = np.rot90(mask, axes=(1, 0))\n \n # prepare binary mask for net\n mask = cv2.resize(mask, self.resize_size, interpolation=cv2.INTER_NEAREST)\n mask = torch.Tensor(mask) # gives dtype float32\n mask = mask.unsqueeze(0)\n mask = mask.unsqueeze(0)\n\n # prepare seg mask for net\n mask_slice[mask_slice==self.vertebra_id] = 0\n # resize to network size\n mask_seg = cv2.resize(mask_slice, self.resize_size, interpolation=cv2.INTER_NEAREST)\n mask_seg = np.uint8(np.round(mask_seg)) # just to be sure\n\n mask_seg = self.map_vert_to_class(mask_seg)\n mask_seg = torch.Tensor(mask_seg) # gives dtype float32\n mask_seg_one_hot = torch.nn.functional.one_hot(mask_seg.long(), num_classes=6)\n mask_seg_one_hot = mask_seg_one_hot.permute(2,0,1)\n mask_seg_one_hot = mask_seg_one_hot.unsqueeze(0)\n mask_seg = mask_seg.unsqueeze(0)\n mask_seg = mask_seg.unsqueeze(0)\n\n # prepare img for net \n img_slice = cv2.resize(img_slice, self.resize_size)\n img_slice = np.clip(img_slice, -1024, 3071) # clip to HU units\n img_slice = np.uint8(255*(img_slice+1024)/4095) # normalize to range 0-255 \n img_slice = img_slice[:,:, None]\n img_slice = self.toTensor(img_slice)\n img_slice = img_slice.unsqueeze(0)\n corrupt_img = (1-mask)*img_slice\n\n if self.use_cuda:\n mask = mask.cuda()\n mask_seg = mask_seg.cuda()\n corrupt_img = corrupt_img.cuda() \n\n # inpaint\n if views=='lateral':\n netG = self.netGlat\n elif views=='coronal':\n netG = self.netGcor\n\n # get prediction\n with torch.no_grad():\n _, inpainted_mask, inpainted_img = netG(corrupt_img, mask_seg, mask)\n inpainted_mask = self.softmax(inpainted_mask)\n\n #inpainted_mask = torch.argmax(inpainted_mask, dim=1)\n inpainted_img = inpainted_img * mask + corrupt_img * (1. - mask)\n inpainted_mask = inpainted_mask * mask + mask_seg_one_hot * (1. - mask)\n #inpainted_mask = self.map_class_to_vert(inpainted_mask)\n\n # set img back to how it was\n inpainted_img = inpainted_img.squeeze().detach().cpu().numpy()\n inpainted_img = (inpainted_img)*4095 - 1024 # normalize back to HU units \n inpainted_img = cv2.resize(inpainted_img, (self.orig_ax_length, self.orig_ax_length))\n # set mask back\n inpainted_mask = inpainted_mask.squeeze().detach().cpu().numpy()\n inpainted_mask_resized = np.zeros((6, self.orig_ax_length, self.orig_ax_length))\n for i in range(6):\n if views=='coronal':\n inpainted_mask_resized[i,:,:] = np.rot90(cv2.resize(inpainted_mask[i,:,:], (self.orig_ax_length, self.orig_ax_length))) #, interpolation=cv2.INTER_NEAREST)\n else:\n inpainted_mask_resized[i,:,:] = cv2.resize(inpainted_mask[i,:,:], (self.orig_ax_length, self.orig_ax_length)) #, interpolation=cv2.INTER_NEAREST)\n inpainted_mask = inpainted_mask_resized\n \n if views=='coronal':\n inpainted_img = np.rot90(inpainted_img) #, axes=(1, 0))\n\n return inpainted_img, inpainted_mask, mask_binary", "def ground(pos, mainColor= wPink, secondColor=wPurple):\n mc.setBlocks(pos.x, pos.y-1,pos.z,pos.x+3, pos.y-1,pos.z, wool, secondColor)\n \n i = 1\n #mc.setBlock(pos.x-1, pos.y-1,pos.z+1,wool, wPink)\n\n while i <= 8:\n mc.setBlock(pos.x-i, pos.y-1,pos.z+i,wool, secondColor)\n\n mc.setBlocks(pos.x-i+1, pos.y-1, pos.z+i,\n pos.x-i+1+2+(2*i), pos.y-1,pos.z+i,\n wool, mainColor)\n\n mc.setBlock(pos.x-i+3+(2*i), pos.y-1,pos.z+i,\n wool, secondColor)\n i += 1\n\n # build the larger area\n mc.setBlocks(pos.x-14, pos.y-1,pos.z+9,pos.x+17, pos.y-1,pos.z+37, wool, secondColor)\n mc.setBlocks(pos.x-13, pos.y-1,pos.z+10,pos.x+16, pos.y-1,pos.z+36, wool, mainColor)\n\n # remove purple line in front\n mc.setBlocks(pos.x-8, pos.y-1,pos.z+9,pos.x+11, pos.y-1,pos.z+9, wool, mainColor)\n\n ## End of ground fucntion ##", "def drawLatentClassPercent(regions, lclass, \\\n sensorLocations=\"../../data/locations/bb_floor2_locations_old.txt\", \\\n writeLocation = \"../../output/latent.png\", \\\n sensorDirections = \"../../data/locations/bb_floor2_draw_directions.txt\", \\\n bgImage = \"../../images/bb_floor2.png\", \\\n sensorSize = 12, \\\n baseSize = 10, \\\n length = 50, \\\n width = 20):\n\n im = Image.open(bgImage)\n d = ImageDraw.Draw(im)\n locations = []\n directions = []\n classSpot = 0\n\n #Open and parse the locations file\n f = open(sensorLocations, 'r')\n\n for line in f.readlines():\n split = line.split(' ')\n locations.append((split[1], split[2], split[0]))\n\n f = open(sensorDirections, 'r')\n\n for line in f.readlines():\n try:\n split = line.split(' ')\n directions.append(int(split[1]))\n except:\n pass\n\n _drawSensors(d, locations, sensorSize)\n\n for c in regions:\n\n c.matrixToModel(c.modelList)\n\n #Get first sensor\n sens = c.sensors[0]\n print sens\n\n sindex = bbdata.allSensors.index(sens)\n\n foo = locations[sindex]\n foodir = directions[sindex]\n\n x = int(foo[0])\n y = int(foo[1])\n\n if foodir == 0:\n y -= length + sensorSize + 5\n x -= sensorSize - 5\n if foodir == 1:\n x += sensorSize + 5\n y -= sensorSize - 5\n if foodir == 2:\n y += length + 10\n x -= sensorSize - 5\n if foodir == 3:\n x -= width + sensorSize + 5\n y -= sensorSize - 5 \n\n total = []\n localSum = 0\n for m in c.models:\n total.append(lclass[classSpot])\n classSpot += 1\n\n #Normalize\n total = [i / (sum(total) * 1.0) for i in total]\n \n print total\n _drawRatios(d, total, x, y, length = length, width = width)\n\n im.save(writeLocation, \"PNG\")", "def detect_edges_better(original_image: Image, threshold: int) -> Image :\r\n \r\n new_image = copy(original_image)\r\n final_image = copy(new_image)\r\n \r\n # Only two colours to be used\r\n black = create_color(0, 0, 0)\r\n white = create_color(255, 255, 255)\r\n \r\n # determine the width and height of the image\r\n pixel_width = get_width(original_image)\r\n pixel_height = get_height(original_image)\r\n \r\n # Pixel color change for all rows except the last row\r\n for x in range(pixel_width - 1) :\r\n for y in range(pixel_height - 1) : # Last row must be all white\r\n (r1,g1,b1) = get_color(new_image, x, y) # RGB components of the top pixel\r\n (r2,g2,b2) = get_color(new_image, x, y + 1) # RGB components of the bottom pixel\r\n (r3,g3,b3) = get_color(new_image, x + 1, y) # RGB components of the bottom pixel\r\n\r\n top_brightness = (r1 + g1 + b1)//3 \r\n bottom_brightness = (r2 + g2 + b2)//3\r\n side_brightness = (r3 + g3 + b3)//3\r\n \r\n if abs(top_brightness - bottom_brightness) > threshold or abs(top_brightness - side_brightness) > threshold :\r\n set_color(final_image,x,y,black)\r\n else :\r\n set_color(final_image,x,y,white) \r\n \r\n last_row = pixel_height - 1\r\n last_column = pixel_width - 1\r\n \r\n \r\n # Set the last row pixels to white\r\n for last_row in range(pixel_height) : \r\n for i in range(pixel_width) :\r\n set_color(final_image,x,y,white)\r\n \r\n # Set the last column pixels to white\r\n for last_column in range(pixel_width) : \r\n for j in range(pixel_height) :\r\n set_color(final_image,x,y,white)\r\n \r\n \r\n return final_image", "def neighboring_light(glow, block):\n\n return clamp(glow - blocks[block].dim, 0, 15)", "def overlay_edge_maps(lval=100):\n o_path = Path(f'../img/canny/overlay{lval}/')\n bb = [] # reuse bbox values by storing outside of the loop context\n fg = Image.open('../img/hand-edge-scaled-overlay.png')\n for n in np.arange(120,420,20):\n im = Image.open(f'../img/canny/hand-edge-{lval}-{n}.png')\n if bb == []:\n bb = bbox(np.invert(im))\n a, b, c, d = bb\n imcrop = im.crop((c+3, a+3, d-2, b-2))\n m = multi(fg, imcrop)\n m.save(o_path / f'hand-overlay-{lval}-{n}.png')\n call(['convert', '-delay', '20', o_path / f'hand-overlay-{lval}-*.png',\n o_path / f'hand-overlay-{lval}-anim.gif'])\n return", "def make_layers(self):\n w, h = self.image.get_size()\n shrink = pg.transform.smoothscale(self.image, (w//2, h//2))\n self.mid_image = tools.tile_surface((w,h), shrink, True)\n shrink = pg.transform.smoothscale(self.image, (w//4, h//4))\n self.base = tools.tile_surface(prepare.SCREEN_SIZE, shrink, True)", "def CreateLandmask(Fieldset, test = False):\n \n \n \"\"\"\n This first set of lines creates a numpy array with u velocities and a numpy\n array with v velocities. First we get the U and V fields from the dataset. Then\n we compute a time chunk, which is needed because of the dataset. Then we only\n take the first slice of the U and V field (we do not need more for finding the land\n and ocean grids). As last we make an empty array which will be filled with zeros and \n ones.\n \"\"\"\n fU = Fieldset.U\n fV = Fieldset.V\n Fieldset.computeTimeChunk(fU.grid.time[0], 1) \n uvel_mask_c = fU.data[0,:,:] \n vvel_mask_c = fV.data[0,:,:]\n# vvel_mask_c = np.roll(vvel_mask_c, 1, axis = 0)\n landmask = np.zeros((uvel_mask_c.shape[0], uvel_mask_c.shape[1]))\n \n \"\"\"\n The first loop checks the value of the u and v velocitites. Notice that we get the\n values of two adjacent grid, since we're working with a C-grid.\n Visualizations of velocities in the C-grids(see below). So for a grid to be flagged identified\n as a land grid two U velocities and 2 V velocities need to be zero. The first loop makes all\n ocean grids 1 and land grids 0. \n ____ ____ ____ ____\n | V | V | \n | | | \n U T U T U\n | | | \n |____V____|_____V_____| \n \"\"\"\n \n for i in range (len(landmask[:,0])-1):\n for j in range (len(landmask[0,:])-1):\n u1 = uvel_mask_c[i,j]\n\n u2 = uvel_mask_c[i,j+1]\n\n v1 = vvel_mask_c[i,j]\n\n v2 = vvel_mask_c[i+1,j]\n\n if u1 != 0 or u2 != 0 or v1 != 0 or v2 != 0:\n landmask[i,j] = 1\n \n \n \"\"\"\n Change all zero to 1 and rest 0. since we want the land grids to be 1 and ocean\n grids to be 0. \n \"\"\"\n \n landmask = ChangeValues(landmask,0,1) \n \n \"\"\"\n The created landmask needs to be shifted upwards one grid. We will\n use the numpy roll function to do this.\n \"\"\"\n \n if test == True:\n plt.figure()\n plt.imshow(landmask)\n plt.colorbar()\n \n return landmask", "def split_necessity(self):\n return max(self._color_var_rel) * self.n_pix\n # return reduce(int.__mul__, (l-u for u,l in self.bounds)) * self.n_pix", "def set_color_range(mic, N, indx, mat, quat, rod):\r\n first = True\r\n #print(indx)\r\n for i in range(N):\r\n if i in indx:\r\n mat[i,:,:] = RotRep.EulerZXZ2Mat(mic.snp[i,6:9]/180.0*np.pi)\r\n quat[i,:] = RotRep.quaternion_from_matrix(mat[i,:,:])\r\n rod[i,:] = RotRep.rod_from_quaternion(quat[i,:])\r\n if first:\r\n maxr = rod[i,0]\r\n minr = rod[i,0]\r\n maxg = rod[i,1]\r\n ming = rod[i,1]\r\n maxb = rod[i,2]\r\n minb = rod[i,2]\r\n maxri = i\r\n minri = i\r\n maxgi = i\r\n mingi = i\r\n maxbi = i\r\n minbi = i\r\n first = False\r\n else:\r\n if rod[i,0] > maxr:\r\n maxr = rod[i,0]\r\n maxri = i\r\n elif rod[i,0] < minr:\r\n minr = rod[i,0]\r\n minri = i\r\n if rod[i,1] > maxg:\r\n maxg = rod[i,1]\r\n maxgi = i\r\n elif rod[i,1] < ming:\r\n ming = rod[i,1]\r\n mingi = i\r\n if rod[i,2] > maxb:\r\n maxb = rod[i,2]\r\n maxbi = i\r\n elif rod[i,2] < minb:\r\n minb = rod[i,2]\r\n minbi = i\r\n else:\r\n rod[i,:]=[0.0,0.0,0.0]\r\n #print(\"Current rod values: \",rod)\r\n maxrgb = [maxr,maxg,maxb]\r\n minrgb = [minr,ming,minb]\r\n maxangs = [rod[maxri,0],rod[maxgi,1],rod[maxbi,2]]\r\n minangs = [rod[minri,0],rod[mingi,1],rod[minbi,2]]\r\n colors = rod\r\n for j in range(N):\r\n for k in range(0,3):\r\n colors[j,k] = (rod[j,k]-minrgb[k])/(maxrgb[k]-minrgb[k])\r\n return colors, maxangs, minangs", "def __set_mask_regions(self):\n self.bottom_clip = np.int32(np.int32([[[60,0], [1179,0], [1179,650], [60,650]]]))\n self.roi_clip = np.int32(np.int32([[[640, 425], [1179,550], [979,719],\n [299,719], [100, 550], [640, 425]]]))", "def straylight_background(det1im_file='None', sky2det_file='None',\n reg_file = 'None', det1_expo_file = 'None',\n diag=False, src_rad = None):\n \n from regions import Regions, PixCoord, CirclePixelRegion\n from astropy.io import fits\n from nustar_gen import info\n from astropy import units as u\n \n # Set up pixel coordinate grid:\n coords = np.array(np.meshgrid(range(360), range(360)))\n pix = PixCoord(coords[0, :], coords[1, :])\n \n\n\n assert os.path.isfile(sky2det_file) is True, \\\n f'\\n {sky2det_file} does not exist.'\n src = fits.getdata(sky2det_file)\n\n assert os.path.isfile(det1im_file) is True, \\\n f'\\n {det1im_file} does not exist.'\n im, im_hdr = fits.getdata(det1im_file, header=True)\n\n assert os.path.isfile(reg_file) is True, \\\n f'\\n {reg_file} does not exist.'\n reg = Regions.read(reg_file)\n\n\n assert os.path.isfile(det1_expo_file) is True, \\\n f'\\n {det1_expo_file} does not exist.'\n\n\n ns = info.NuSTAR()\n det1_pixarea = (ns.pixel_um.to(u.cm))**2\n exp_area = 0\n exp = 0\n if src_rad is None:\n radius = 2.0*u.arcmin\n else:\n radius = src_rad\n rad_pix = (radius / ns.pixel).cgs.value\n\n with fits.open(det1_expo_file) as hdu:\n\n # Hard work goes here\n start=True\n for ii, ihdu in enumerate(hdu):\n if ii == 0:\n continue\n exp += ihdu.header['DURATION']\n expim = ihdu.data\n \n if start:\n all_mask = np.array(expim).astype(bool)\n all_mask[:] = True\n start=False\n \n # Find the source DET1X/DET1Y at this time:\n sx = np.interp(ihdu.header['REF_TIME'], src['TIME'], src['DET1X'])\n sy = np.interp(ihdu.header['REF_TIME'], src['TIME'], src['DET1Y'])\n \n # Construct a pixel region\n src_reg = CirclePixelRegion(PixCoord(x=sx, y=sy), radius=rad_pix)\n \n # Loop over regions and find those that are include first:\n set=False\n\n for ri in reg:\n if (ri.meta['include']) is False:\n continue\n # Turn on any pixels in the mask for the source\n rm = ri.contains(pix)\n all_mask = all_mask & ~rm\n\n\n # Turn on any pixels in the mask for the source\n rm = src_reg.contains(pix)\n all_mask = all_mask & ~rm\n\n \n \n # # Mask out the exclusion regions:\n set = False\n for ri in reg:\n if (ri.meta['include']) is True:\n continue\n if set is False:\n # Defaults to False\n reg_mask = np.array(expim).astype(bool)\n reg_mask[:] = True\n set=True\n\n # # Will return \"True\" for regions that are *not* in the exclusion region.\n rm = ri.contains(pix)\n reg_mask = reg_mask & rm\n\n if set is True:\n # Invert reg_mask\n src_mask = all_mask & reg_mask\n else:\n src_mask = all_mask\n \n\n exp_area = (expim*src_mask).sum() / expim.max()\n area = (exp_area * det1_pixarea)\n\n counts = (im*src_mask).sum() * u.ct * 1.0\n rate= counts / (area *im_hdr['EXPOSURE']*u.s)\n\n\n\n if diag is True:\n import matplotlib.pyplot as plt\n from matplotlib.colors import LogNorm\n from scipy import ndimage\n axs = plt.figure(figsize=(10, 10)).subplots(nrows=2, ncols=2)\n axs[0,0].imshow(src_mask, origin = 'lower')\n axs[0, 0].set_title('Mask')\n axs[0,1].imshow(expim*src_mask, origin='lower') \n axs[0, 1].set_title('Exposure Map')\n axs[1, 0].imshow(im, origin = 'lower',\n norm = LogNorm(vmin=0.1, vmax=im.max()*2))\n axs[1, 0].set_title('Counts Image')\n axs[1, 1].imshow(im*src_mask, origin='lower',\n norm = LogNorm(vmin=0.1, vmax=(im).max()*2))\n axs[1, 1].set_title('Masked Counts Image')\n plt.show()\n print(f'straylight_background:')\n print(f'Background area: {area:8.2f}')\n print(f'Background rate: {rate:8.5f}')\n print('')\n return rate", "def theRoof(pos, blockTypeMain = wool , mainColor=wPurple, replaceGlass = wGlass):\n \n # try again the same trick to add the roof\n # Middle part\n for i in range(0,12,1):\n iy = i\n if i >= 6:\n iy=11-i\n #print i, iy\n mc.setBlocks(pos.x-4+i, pos.y+10+iy, pos.z+4,\n pos.x-4+i, pos.y+10+iy, pos.z+29, blockTypeMain, mainColor)\n\n # RIGHT SIDE of the house\n for ii in range(0,3,1):\n mc.setBlocks(pos.x-5+ii, pos.y+9+ii, pos.z+5+ii,\n pos.x-13+ii, pos.y+9+ii, pos.z+29-ii, blockTypeMain, mainColor)\n #Remove the blocks\n\n material = air\n if ii >=2 :\n material = replaceGlass\n mc.setBlocks(pos.x-5+ii, pos.y+9+ii, pos.z+8,\n pos.x-11+ii, pos.y+9+ii, pos.z+26-ii, material)\n \n # and LEFT side of the house\n xAdjust = 21\n for ii in range(0,3,1):\n mc.setBlocks(pos.x-5-ii+xAdjust, pos.y+9+ii, pos.z+5+ii,\n pos.x-13-ii+xAdjust, pos.y+9+ii, pos.z+29-ii, blockTypeMain, mainColor)\n #Remove the blocks\n\n material = air\n if ii >=2 :\n material = replaceGlass\n mc.setBlocks(pos.x-7-ii+xAdjust, pos.y+9+ii, pos.z+8,\n pos.x-13-ii+xAdjust, pos.y+9+ii, pos.z+26-ii, material)", "def reScaleLandsat(self,img):\n \n\t\tthermalBand = ee.List(['thermal'])\n\t\tthermal = ee.Image(img).select(thermalBand).multiply(10)\n \n\t\totherBands = ee.Image(img).bandNames().removeAll(thermalBand)\n\t\tscaled = ee.Image(img).select(otherBands).divide(0.0001)\n \n\t\timage = ee.Image(scaled.addBands(thermal)).int16()\n \n\t\treturn image.copyProperties(img)" ]
[ "0.5887975", "0.5787736", "0.57740206", "0.56503314", "0.5611882", "0.55984235", "0.55739576", "0.5566662", "0.5542999", "0.55224055", "0.551903", "0.5480114", "0.5477756", "0.5398472", "0.53926384", "0.53850454", "0.5373012", "0.53565186", "0.53404105", "0.53380716", "0.5336995", "0.53284574", "0.531579", "0.5293177", "0.52889925", "0.52812934", "0.5261209", "0.5253319", "0.52495193", "0.5246197" ]
0.71222955
0
Make the object directory and file for the adding file.
def make_directory_and_object_file(file_sha1_hash, file_content, parent_dir): try: # Create a path for the new directory, which is the first 2 characters # of the hash. new_dir_path = join(parent_dir, ".lgit/objects", file_sha1_hash[:2]) # Create the directory try: mkdir(new_dir_path) except FileExistsError: pass new_file_path = join(new_dir_path, file_sha1_hash[2:]) # Create the new file and write the content of the original file into # it try: new_file = open(new_file_path, "wb+") new_file.write(file_content) except PermissionError: print("Cannot add an object to a lgit repository") except TypeError: # Raise error if the sha1 hash is empty print("fatal: updating files failed")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def makeLibrary(self):\n #------------------------------------------ Instance for the output file\n outputFile = open(\"%s/%s\" % (self.sceneryPath,self.libTxtFileName),\"w\")\n #------------------------------------------------------ write the header\n for line in self.header:\n outputFile.write(\"%s\\n\" % (line))\n #------------------------------------------------- Loop over all folders\n packageContent = os.walk(self.sceneryPath)\n for folder in packageContent:\n for fileName in folder[2]:\n fileType = fileName.split(\".\")[-1]\n if fileType in self.objectTypes:\n realPath = folder[0][len(self.sceneryPath)+1:].replace(\"\\\\\",\"/\")\n filePath = \"%s/%s\" % (realPath,fileName)\n print filePath\n outputFile.write(\"EXPORT %s%s %s%s\\n\" % (self.libPrefix,filePath,self.realPathPrefix,filePath))\n outputFile.close()", "def create(self, basedir, outdir, name, prefix=None):", "def create(self):\n self.create_file()", "def create_obj(destination,mtl_name):\r\n\tshutil.copyfile(\"file_cube.obj\",destination)\r\n\tf=open(destination,\"r\")\r\n\tlines=f.readlines()\r\n\tlines[0]=\"mtllib \"+mtl_name+\"\\n\"\r\n\tf.close()\r\n\tf=open(destination,\"w\")\r\n\tf.writelines(lines)\r\n\tf.close()", "def _make_new_directory(self, *, file_path: Path, need_init: bool):\r\n\r\n if Path(file_path).exists():\r\n raise FileExistsError(f\"The directory at {file_path} already exists.\")\r\n\r\n Path(file_path).mkdir(parents=True)\r\n\r\n if need_init:\r\n\r\n Path(file_path / \"__init__.py\").touch()\r\n\r\n self._logger.info(\"The directory %s has been created.\", file_path)", "def make_path(self):\n folders = [\n f\"{self.save_path}{self.name}/json/\",\n f\"{self.save_path}{self.name}/images/\",\n ]\n if hasattr(self, \"masks\"):\n folders.append(f\"{self.save_path}{self.name}/masks/\")\n for folder in folders:\n if not os.path.exists(folder):\n os.makedirs(folder)", "def create_files(project_name, root_dir):\r\n root_dir = projectfolders.create_path(root_dir, project_name) #Modify the root\r\n \r\n write_setup(project_name, root_dir)\r\n write_inits(project_name, root_dir)\r\n write_tests(project_name, root_dir)", "def setup(self, newdir=None):\n if not os.path.exists(self.output_path):\n os.makedirs(self.output_path)\n if newdir:\n _new = os.path.join(self.output_path, newdir)\n if not os.path.exists(_new):\n os.makedirs(_new)", "def create_source(self, source):\n if not os.path.isdir(source):\n os.makedirs(source)\n # Create a text file in the source directory.\n text_file = os.path.join(source, 'notes.txt')\n with open(text_file, 'w') as handle:\n handle.write(\"This file should be included in the backup.\\n\")\n # Create a subdirectory in the source directory.\n subdirectory = os.path.join(source, 'subdirectory')\n os.mkdir(subdirectory)\n # Create a symbolic link in the subdirectory.\n symlink = os.path.join(subdirectory, 'symbolic-link')\n os.symlink('../include-me.txt', symlink)", "def createFolder(self):\n self.destination = self.getPath() #Find the destination to create the folder\n try:\n os.makedirs(self.destination) #Try and make a folder\n except FileExistsError:\n pass #Otherwise continue if an error is encountered because the file exists already", "def make_source_dir():\n\n os.makedirs(files['source_dir'].rel)", "def create_file_structure_for_packages(root_folder, file_to_copy, object_name):\n upload_package_folder = os.path.join(\n root_folder, object_name, 'coala' + object_name)\n os.makedirs(upload_package_folder, exist_ok=True)\n touch(os.path.join(upload_package_folder, '__init__.py'))\n shutil.copyfile(file_to_copy, os.path.join(upload_package_folder,\n object_name + '.py'))", "def NewObjfileHandler(event):\n\tCheckFileArch()", "def make(config):\n # Create child folders\n for func in (create_basic_structure,\n copy_resources,\n copy_databases,\n copy_libraries,\n copy_security,\n copy_app_actions,\n copy_pages,\n create_application_info_file,\n replace_all_guids):\n\n INFO(\"\")\n INFO(\"+\"*70)\n INFO(\"\")\n func(config)", "def MakeDir(self, path: str) -> None:\n ...", "def on_created(self, event):\n \n file_name = os.path.basename(event.src_path)\n parent = os.path.dirname(event.src_path)\n parents_id = self.filesystem[parent][\"id\"]\n\n if event.is_directory:\n if file_name not in self.ignore_dirs:\n file_id = self.gapy.create_file(file_name, path=parent, parents_id=[parents_id], isFolder=True)\n self.filesystem[file_name.rstrip(\"/\")] = file_id \n self.gapy.logger.info(\"The directory {} was created with id {}\".format(file_name, file_id))\n else:\n if file_name not in self.ignore_files:\n with open(event.src_path, \"w\") as empty_file:\n empty_file.write(\"\\t\")\n file_id = self.gapy.create_file(file_name, path=parent, parents_id=[parents_id])\n self.filesystem[parent.rstrip(\"/\")][\"files\"].append({\"name\": file_name, \"id\": file_id})\n self.gapy.logger.info(\"The file {} was created with id {}\".format(file_name, file_id))\n print(f\"\\nFile created: {file_name} at {datetime.now()}\")\n\n self.update_fs()", "def _create_object_directory(directory, metadata):\n if directory.exists():\n raise IOError(\"The directory '\" + str(directory) + \"' already exists\")\n valid_types = [DATASET_TYPENAME, FILE_TYPENAME, GROUP_TYPENAME]\n typename = metadata[EXDIR_METANAME][TYPE_METANAME]\n if typename not in valid_types:\n raise ValueError(\"{typename} is not a valid typename\".format(typename=typename))\n directory.mkdir()\n meta_filename = directory / META_FILENAME\n with meta_filename.open(\"w\", encoding=\"utf-8\") as meta_file:\n if metadata == _default_metadata(typename):\n # if it is the default, we know how to print it fast\n metadata_string = (''\n '{exdir_meta}:\\n'\n ' {type_meta}: \"{typename}\"\\n'\n ' {version_meta}: {version}\\n'\n '').format(\n exdir_meta=EXDIR_METANAME,\n type_meta=TYPE_METANAME,\n typename=typename,\n version_meta=VERSION_METANAME,\n version=1\n )\n else:\n from io import StringIO\n with StringIO() as buf:\n yaml.YAML(typ=\"safe\", pure=True).dump(metadata, buf)\n metadata_string = buf.getvalue()\n\n try:\n meta_file.write(metadata_string)\n except TypeError:\n # NOTE workaround for Python 2.7\n meta_file.write(metadata_string.decode('utf8'))", "def make_directory(self):\n if not os.path.isdir(self.directory):\n os.mkdir(self.directory)", "def create_folder(self):\n self.config.csv_path.mkdir(parents=True, exist_ok=True)\n self.config.images_path.mkdir(parents=True, exist_ok=True)", "def test_add3_dir(self):\n os.mkdir(tempdir + 'add3')\n TempfileManager.add_tempfile(tempdir + 'add3')", "def _makeDir(self):\n try:\n os.mkdir(self.dir)\n # log('created directory: %s\\n' % self.dir)\n except OSError, err:\n if err.errno != errno.EEXIST:\n raise", "def create_file(self, key=None):\n self.make_directory()\n open(self.file_path(key), 'w').close()", "def make_new_dir(self):\n try:\n shutil.rmtree(self.model_path)\n except:\n pass\n os.mkdir(self.model_path)", "def makefilename(self):\n fp= (pathlib.Path(self.vr_folder).expanduser()/(time.strftime(self.vr_filename))).with_suffix('')\n fp.parent.mkdir(parents=True, exist_ok=True)\n print('files setup', str(fp))\n return fp", "def add_file(self, path):\n pass", "def create_directory_structure():\n\n def ensure_directory(path):\n try:\n os.makedirs(path)\n\n except OSError as e:\n if e.errno != errno.EEXIST:\n raise\n\n ensure_directory('./out/textures')\n ensure_directory('./out/data')", "def createFolder(self):\n\n self.directory = \"D:\\\\CompositionHelper\"\n if not os.path.exists(self.directory):\n os.makedirs(self.directory)\n print ('Created new folder')", "def create_target(cls, relpath, target):\r\n cls.create_file(cls.build_path(relpath), target, mode='a')", "def test_create_files(self):\n\n testdir = \"test_output\"\n test_submission = Submission()\n self.addCleanup(os.remove, \"submission.tar.gz\")\n self.addCleanup(shutil.rmtree, testdir)\n\n test_submission.create_files(testdir)\n\n self.doCleanups()", "def create(self):\n if os.path.isdir(self.repodir):\n if os.listdir(self.repodir):\n raise EmtError('%s is not empty' % self.repodir)\n else:\n os.makedirs(self.repodir)\n self.git_cmd('init')\n self.initialized = True" ]
[ "0.67735976", "0.6593304", "0.6487546", "0.6399611", "0.62541074", "0.6146902", "0.6131344", "0.6102639", "0.6092799", "0.6058326", "0.60511124", "0.60165167", "0.60149306", "0.59659183", "0.59595376", "0.59442127", "0.59324336", "0.5891388", "0.58593774", "0.58547544", "0.5828899", "0.58118397", "0.5806531", "0.5804218", "0.577976", "0.57724726", "0.5768544", "0.57588845", "0.5756523", "0.5718071" ]
0.67971694
0
Sets the aux_input_type1 of this UpdateVehicleRequest.
def aux_input_type1(self, aux_input_type1): allowed_values = ["none", "emergencyLights", "emergencyAlarm", "stopPaddle", "powerTakeOff", "plow", "sweeper", "salter", "reefer", "door", "boom", "auxiliaryEngine", "generator", "eightWayLights"] # noqa: E501 if self.local_vars_configuration.client_side_validation and aux_input_type1 not in allowed_values: # noqa: E501 raise ValueError( "Invalid value for `aux_input_type1` ({0}), must be one of {1}" # noqa: E501 .format(aux_input_type1, allowed_values) ) self._aux_input_type1 = aux_input_type1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def aux_input_type2(self, aux_input_type2):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type2 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type2` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type2, allowed_values)\n )\n\n self._aux_input_type2 = aux_input_type2", "def aux_input_type6(self, aux_input_type6):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type6 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type6` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type6, allowed_values)\n )\n\n self._aux_input_type6 = aux_input_type6", "def aux_input_type10(self, aux_input_type10):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type10 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type10` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type10, allowed_values)\n )\n\n self._aux_input_type10 = aux_input_type10", "def aux_input_type3(self, aux_input_type3):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type3 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type3` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type3, allowed_values)\n )\n\n self._aux_input_type3 = aux_input_type3", "def aux_input_type8(self, aux_input_type8):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type8 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type8` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type8, allowed_values)\n )\n\n self._aux_input_type8 = aux_input_type8", "def aux_input_type9(self, aux_input_type9):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type9 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type9` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type9, allowed_values)\n )\n\n self._aux_input_type9 = aux_input_type9", "def aux_input_type5(self, aux_input_type5):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type5 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type5` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type5, allowed_values)\n )\n\n self._aux_input_type5 = aux_input_type5", "def aux_input_type4(self, aux_input_type4):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type4 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type4` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type4, allowed_values)\n )\n\n self._aux_input_type4 = aux_input_type4", "def aux_input_type7(self, aux_input_type7):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type7 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type7` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type7, allowed_values)\n )\n\n self._aux_input_type7 = aux_input_type7", "def SetFirstInput(self, input_signal1):\n self.__input_signal1 = input_signal1\n self._changelength()", "def _update_input_type(self):\n pass", "async def change_aux(self, aux, newstate):\n if not self.connected:\n return\n\n # we don't have 3 auxs!\n if aux > 1:\n return\n\n # we don't have THIS aux\n if not self.aux_array[aux]:\n return\n\n # this is a toggle switch, not on/off\n if self.aux_status[aux] == newstate:\n return\n\n # Setup the basic things we know\n data = bytearray(9)\n data[0] = M_START\n data[1] = 7\n data[2] = mtypes[BMTS_CONTROL_REQ][0]\n data[3] = mtypes[BMTS_CONTROL_REQ][1]\n data[4] = mtypes[BMTS_CONTROL_REQ][2]\n data[5] = C_AUX1 if aux == 0 else C_AUX2\n data[6] = 0x00 # who knows?\n data[7] = messages.Message.crc(data[1:7])\n data[8] = M_END\n\n self.writer.write(data)\n await self.writer.drain()", "def feature_vector1(self, feature_vector1):\n\n self._feature_vector1 = feature_vector1", "def SetSecondInput(self, input_signal2):\n self.__input_signal2 = input_signal2\n self._changelength()", "def update_interaction_model_slot_type_v1(self, slot_type_id, update_request, **kwargs):\n # type: (str, UpdateRequest_43de537, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"update_interaction_model_slot_type_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'slot_type_id' is set\n if ('slot_type_id' not in params) or (params['slot_type_id'] is None):\n raise ValueError(\n \"Missing the required parameter `slot_type_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'update_request' is set\n if ('update_request' not in params) or (params['update_request'] is None):\n raise ValueError(\n \"Missing the required parameter `update_request` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/api/custom/interactionModel/slotTypes/{slotTypeId}/update'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'slot_type_id' in params:\n path_params['slotTypeId'] = params['slot_type_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n if 'update_request' in params:\n body_params = params['update_request']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"No content, indicates the fields were successfully updated.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn&#39;t have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"There is no slot type defined for the slotTypeId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None", "def set_input_type(self, input_type):\n if input_type is not None: self._input_type.value = input_type\n return self", "def uni_type(self, uni_type):\n self._uni_type = uni_type", "def interaction_type(self, interaction_type):\n\n self._interaction_type = interaction_type", "def intensity_cycle1(self, intensity_cycle1):\n\n self._intensity_cycle1 = intensity_cycle1", "def __init__(__self__, *,\n type: Optional[pulumi.Input[Union[str, 'VNetSolutionType']]] = None):\n if type is not None:\n pulumi.set(__self__, \"type\", type)", "def reroute_input(ts0, ts1, op1):\n for i, t in enumerate(op1.inputs):\n if t is ts1:\n op1._update_input(i, ts0) # pylint: disable=protected-access", "def SetInput(self, , , p_float_6):\n ...", "def x1(self, x1=None):\n\n if x1 is None:\n return self._x1\n else:\n if not isinstance(x1, int) and not isinstance(x1, float):\n raise TypeError(\"x1 must be numeric, not '%s'\" % x1)\n self._x1 = x1", "def updateTransformationType(self):\n tt = self.vb.determineTransformationType()\n self.stackedWidget.setCurrentIndex(tt)\n self.checkRequirements()", "def SetLabelInput(self, _arg: 'itkImageUS2') -> \"void\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUS2_SetLabelInput(self, _arg)", "def __init__(self, attributes=None, aux_input_type1=None, aux_input_type10=None, aux_input_type2=None, aux_input_type3=None, aux_input_type4=None, aux_input_type5=None, aux_input_type6=None, aux_input_type7=None, aux_input_type8=None, aux_input_type9=None, engine_hours=None, external_ids=None, gateway_serial=None, harsh_acceleration_setting_type=None, license_plate=None, name=None, notes=None, odometer_meters=None, static_assigned_driver_id=None, tag_ids=None, vin=None, local_vars_configuration=None): # noqa: E501 # noqa: E501\n if local_vars_configuration is None:\n local_vars_configuration = Configuration()\n self.local_vars_configuration = local_vars_configuration\n\n self._attributes = None\n self._aux_input_type1 = None\n self._aux_input_type10 = None\n self._aux_input_type2 = None\n self._aux_input_type3 = None\n self._aux_input_type4 = None\n self._aux_input_type5 = None\n self._aux_input_type6 = None\n self._aux_input_type7 = None\n self._aux_input_type8 = None\n self._aux_input_type9 = None\n self._engine_hours = None\n self._external_ids = None\n self._gateway_serial = None\n self._harsh_acceleration_setting_type = None\n self._license_plate = None\n self._name = None\n self._notes = None\n self._odometer_meters = None\n self._static_assigned_driver_id = None\n self._tag_ids = None\n self._vin = None\n self.discriminator = None\n\n if attributes is not None:\n self.attributes = attributes\n if aux_input_type1 is not None:\n self.aux_input_type1 = aux_input_type1\n if aux_input_type10 is not None:\n self.aux_input_type10 = aux_input_type10\n if aux_input_type2 is not None:\n self.aux_input_type2 = aux_input_type2\n if aux_input_type3 is not None:\n self.aux_input_type3 = aux_input_type3\n if aux_input_type4 is not None:\n self.aux_input_type4 = aux_input_type4\n if aux_input_type5 is not None:\n self.aux_input_type5 = aux_input_type5\n if aux_input_type6 is not None:\n self.aux_input_type6 = aux_input_type6\n if aux_input_type7 is not None:\n self.aux_input_type7 = aux_input_type7\n if aux_input_type8 is not None:\n self.aux_input_type8 = aux_input_type8\n if aux_input_type9 is not None:\n self.aux_input_type9 = aux_input_type9\n if engine_hours is not None:\n self.engine_hours = engine_hours\n if external_ids is not None:\n self.external_ids = external_ids\n if gateway_serial is not None:\n self.gateway_serial = gateway_serial\n if harsh_acceleration_setting_type is not None:\n self.harsh_acceleration_setting_type = harsh_acceleration_setting_type\n if license_plate is not None:\n self.license_plate = license_plate\n if name is not None:\n self.name = name\n if notes is not None:\n self.notes = notes\n if odometer_meters is not None:\n self.odometer_meters = odometer_meters\n if static_assigned_driver_id is not None:\n self.static_assigned_driver_id = static_assigned_driver_id\n if tag_ids is not None:\n self.tag_ids = tag_ids\n if vin is not None:\n self.vin = vin", "def upgradeToVersion1(self):\n log.debug(u\"Upgrading iDevice\")\n self.image.isFeedback = True", "def set_input(self, input):\n AtoB = self.opt.direction == 'AtoB'\n self.real_A = input['A' if AtoB else 'B'].to(self.device)\n self.inst = input['inst'].to(self.device)\n self.real_B = input['B' if AtoB else 'A'].to(self.device)\n self.image_paths = input['A_paths' if AtoB else 'B_paths']", "def __init__(self, rpc, auxiliary):\n super(AuxiliaryModule, self).__init__(rpc, 'auxiliary', auxiliary)\n self._action = self._info.get('default_action', \"\")", "def get_interaction_model_slot_type_version_v1(self, slot_type_id, version, **kwargs):\n # type: (str, str, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05, SlotTypeVersionData_1f3ee474]\n operation_name = \"get_interaction_model_slot_type_version_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'slot_type_id' is set\n if ('slot_type_id' not in params) or (params['slot_type_id'] is None):\n raise ValueError(\n \"Missing the required parameter `slot_type_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'version' is set\n if ('version' not in params) or (params['version'] is None):\n raise ValueError(\n \"Missing the required parameter `version` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/api/custom/interactionModel/slotTypes/{slotTypeId}/versions/{version}'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'slot_type_id' in params:\n path_params['slotTypeId'] = params['slot_type_id']\n if 'version' in params:\n path_params['version'] = params['version']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.interaction_model.type_version.slot_type_version_data.SlotTypeVersionData\", status_code=200, message=\"Returns the slot type version metadata for the given slotTypeId and version.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn&#39;t have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"There is no slot type defined for the slotTypeId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceeds the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"GET\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v1.skill.interaction_model.type_version.slot_type_version_data.SlotTypeVersionData\")\n\n if full_response:\n return api_response\n return api_response.body" ]
[ "0.6881577", "0.6247039", "0.61682045", "0.6168174", "0.61245406", "0.6103021", "0.59515715", "0.57475", "0.55829304", "0.526451", "0.48469332", "0.47708565", "0.47298566", "0.4703361", "0.46092513", "0.45240486", "0.44671947", "0.44662634", "0.439882", "0.4355716", "0.43461382", "0.433834", "0.43012515", "0.42566326", "0.42060938", "0.42037424", "0.41788095", "0.41547686", "0.4150748", "0.41503718" ]
0.8198986
0
Sets the aux_input_type10 of this UpdateVehicleRequest.
def aux_input_type10(self, aux_input_type10): allowed_values = ["none", "emergencyLights", "emergencyAlarm", "stopPaddle", "powerTakeOff", "plow", "sweeper", "salter", "reefer", "door", "boom", "auxiliaryEngine", "generator", "eightWayLights"] # noqa: E501 if self.local_vars_configuration.client_side_validation and aux_input_type10 not in allowed_values: # noqa: E501 raise ValueError( "Invalid value for `aux_input_type10` ({0}), must be one of {1}" # noqa: E501 .format(aux_input_type10, allowed_values) ) self._aux_input_type10 = aux_input_type10
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def aux_input_type9(self, aux_input_type9):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type9 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type9` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type9, allowed_values)\n )\n\n self._aux_input_type9 = aux_input_type9", "def aux_input_type8(self, aux_input_type8):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type8 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type8` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type8, allowed_values)\n )\n\n self._aux_input_type8 = aux_input_type8", "def aux_input_type1(self, aux_input_type1):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type1 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type1` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type1, allowed_values)\n )\n\n self._aux_input_type1 = aux_input_type1", "def aux_input_type6(self, aux_input_type6):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type6 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type6` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type6, allowed_values)\n )\n\n self._aux_input_type6 = aux_input_type6", "def aux_input_type7(self, aux_input_type7):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type7 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type7` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type7, allowed_values)\n )\n\n self._aux_input_type7 = aux_input_type7", "def aux_input_type2(self, aux_input_type2):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type2 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type2` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type2, allowed_values)\n )\n\n self._aux_input_type2 = aux_input_type2", "def aux_input_type4(self, aux_input_type4):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type4 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type4` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type4, allowed_values)\n )\n\n self._aux_input_type4 = aux_input_type4", "def aux_input_type5(self, aux_input_type5):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type5 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type5` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type5, allowed_values)\n )\n\n self._aux_input_type5 = aux_input_type5", "def aux_input_type3(self, aux_input_type3):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type3 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type3` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type3, allowed_values)\n )\n\n self._aux_input_type3 = aux_input_type3", "async def change_aux(self, aux, newstate):\n if not self.connected:\n return\n\n # we don't have 3 auxs!\n if aux > 1:\n return\n\n # we don't have THIS aux\n if not self.aux_array[aux]:\n return\n\n # this is a toggle switch, not on/off\n if self.aux_status[aux] == newstate:\n return\n\n # Setup the basic things we know\n data = bytearray(9)\n data[0] = M_START\n data[1] = 7\n data[2] = mtypes[BMTS_CONTROL_REQ][0]\n data[3] = mtypes[BMTS_CONTROL_REQ][1]\n data[4] = mtypes[BMTS_CONTROL_REQ][2]\n data[5] = C_AUX1 if aux == 0 else C_AUX2\n data[6] = 0x00 # who knows?\n data[7] = messages.Message.crc(data[1:7])\n data[8] = M_END\n\n self.writer.write(data)\n await self.writer.drain()", "def setX10(self, ident, state):\r\n if self.visprotocol is not None:\r\n self.visprotocol.setX10(ident, state)", "def custom_data_10(self, custom_data_10):\n # type: (string_types) -> None\n\n if custom_data_10 is not None:\n if not isinstance(custom_data_10, string_types):\n raise TypeError(\"Invalid type for `custom_data_10`, type has to be `string_types`\")\n\n self._custom_data_10 = custom_data_10", "def _updateTruckRec(self, tNode, Uid):\n if type(Uid) == int:\n self._readTruckRec(tNode, Uid)\n print(f'Vehicle Id {Uid} record updated')\n print('------------------------------------')", "def set_temp_coeff(self, temp_coeff):\n assert 4 <= temp_coeff < 8, \"Temperature coefficient must be one of TEMP_COEFF_0..TEMP_COEFF_3.\"\n assert self.instr == self.INSTR_EXT, \"Please switch to extended instruction set first.\"\n self.temp_coeff = temp_coeff\n self.command([temp_coeff])", "def __init__(self, rpc, auxiliary):\n super(AuxiliaryModule, self).__init__(rpc, 'auxiliary', auxiliary)\n self._action = self._info.get('default_action', \"\")", "def set_input_status(self, input_status):\n \n self.__input_status = input_status", "def set_ivt_variable(self, var):\n self.set_input_variable(var)", "def set_tunable_parameters(self, input_type='default', reduced_size=64, **kwargs):\n self.logger.info(\"Set parameters: input_type=%s, reduced_size=%s\", input_type, reduced_size)\n self.input_type = input_type\n self.reduced_size = reduced_size", "def update_interaction_model_slot_type_v1(self, slot_type_id, update_request, **kwargs):\n # type: (str, UpdateRequest_43de537, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"update_interaction_model_slot_type_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'slot_type_id' is set\n if ('slot_type_id' not in params) or (params['slot_type_id'] is None):\n raise ValueError(\n \"Missing the required parameter `slot_type_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'update_request' is set\n if ('update_request' not in params) or (params['update_request'] is None):\n raise ValueError(\n \"Missing the required parameter `update_request` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/api/custom/interactionModel/slotTypes/{slotTypeId}/update'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'slot_type_id' in params:\n path_params['slotTypeId'] = params['slot_type_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n if 'update_request' in params:\n body_params = params['update_request']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"No content, indicates the fields were successfully updated.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn&#39;t have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"There is no slot type defined for the slotTypeId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None", "def update(self, v_input):\n\n self.v = v_input", "def SetInput(self, , , p_float_6):\n ...", "def _update_input_type(self):\n pass", "def modify_input(self, raw_input_par):\r\n raise NotImplementedError", "def modify_input(self, raw_input_par):\r\n raise NotImplementedError", "def enable_auxdata(sk):\n sk.setsockopt(SOL_PACKET, PACKET_AUXDATA, 1)", "def __init__(self, attributes=None, aux_input_type1=None, aux_input_type10=None, aux_input_type2=None, aux_input_type3=None, aux_input_type4=None, aux_input_type5=None, aux_input_type6=None, aux_input_type7=None, aux_input_type8=None, aux_input_type9=None, engine_hours=None, external_ids=None, gateway_serial=None, harsh_acceleration_setting_type=None, license_plate=None, name=None, notes=None, odometer_meters=None, static_assigned_driver_id=None, tag_ids=None, vin=None, local_vars_configuration=None): # noqa: E501 # noqa: E501\n if local_vars_configuration is None:\n local_vars_configuration = Configuration()\n self.local_vars_configuration = local_vars_configuration\n\n self._attributes = None\n self._aux_input_type1 = None\n self._aux_input_type10 = None\n self._aux_input_type2 = None\n self._aux_input_type3 = None\n self._aux_input_type4 = None\n self._aux_input_type5 = None\n self._aux_input_type6 = None\n self._aux_input_type7 = None\n self._aux_input_type8 = None\n self._aux_input_type9 = None\n self._engine_hours = None\n self._external_ids = None\n self._gateway_serial = None\n self._harsh_acceleration_setting_type = None\n self._license_plate = None\n self._name = None\n self._notes = None\n self._odometer_meters = None\n self._static_assigned_driver_id = None\n self._tag_ids = None\n self._vin = None\n self.discriminator = None\n\n if attributes is not None:\n self.attributes = attributes\n if aux_input_type1 is not None:\n self.aux_input_type1 = aux_input_type1\n if aux_input_type10 is not None:\n self.aux_input_type10 = aux_input_type10\n if aux_input_type2 is not None:\n self.aux_input_type2 = aux_input_type2\n if aux_input_type3 is not None:\n self.aux_input_type3 = aux_input_type3\n if aux_input_type4 is not None:\n self.aux_input_type4 = aux_input_type4\n if aux_input_type5 is not None:\n self.aux_input_type5 = aux_input_type5\n if aux_input_type6 is not None:\n self.aux_input_type6 = aux_input_type6\n if aux_input_type7 is not None:\n self.aux_input_type7 = aux_input_type7\n if aux_input_type8 is not None:\n self.aux_input_type8 = aux_input_type8\n if aux_input_type9 is not None:\n self.aux_input_type9 = aux_input_type9\n if engine_hours is not None:\n self.engine_hours = engine_hours\n if external_ids is not None:\n self.external_ids = external_ids\n if gateway_serial is not None:\n self.gateway_serial = gateway_serial\n if harsh_acceleration_setting_type is not None:\n self.harsh_acceleration_setting_type = harsh_acceleration_setting_type\n if license_plate is not None:\n self.license_plate = license_plate\n if name is not None:\n self.name = name\n if notes is not None:\n self.notes = notes\n if odometer_meters is not None:\n self.odometer_meters = odometer_meters\n if static_assigned_driver_id is not None:\n self.static_assigned_driver_id = static_assigned_driver_id\n if tag_ids is not None:\n self.tag_ids = tag_ids\n if vin is not None:\n self.vin = vin", "def FillInputPortInformation(self, p_int, vtkInformation):\n ...", "def _update_VOX(self, event):\n text = self._VOX_textbox.toPlainText()\n ras = self._convert_text(text, 'vox')\n if ras is not None:\n self._set_ras(ras)", "def modify_input(self, raw_input_par):\r\n\r\n return self.meta_model.modify_input(raw_input_par)", "def set_temperature(self):\n self.temperature = self.gui.doubleSpinBox_temperature.value()\n self.logger.debug('Changing the temperature to {}K'.format(self.temperature))\n\n self.anc350_instrument.temperature = self.temperature\n self.anc350_instrument.set_temperature_limits()\n\n self.max_dclevel_V = self.anc350_instrument.max_dC_level\n\n self.logger.debug('Changed the scanner piezo limits to {}'.format(self.max_dclevel_V))" ]
[ "0.6725952", "0.610575", "0.5790601", "0.5682387", "0.56456137", "0.54830354", "0.5383829", "0.53816664", "0.53323066", "0.440578", "0.43797588", "0.4314514", "0.43111408", "0.42576176", "0.4198845", "0.4124491", "0.41100422", "0.4049688", "0.40081385", "0.40019417", "0.39651954", "0.39366338", "0.3921866", "0.3921866", "0.38931978", "0.38884625", "0.38859606", "0.38736144", "0.3836214", "0.38281173" ]
0.8267368
0
Sets the aux_input_type2 of this UpdateVehicleRequest.
def aux_input_type2(self, aux_input_type2): allowed_values = ["none", "emergencyLights", "emergencyAlarm", "stopPaddle", "powerTakeOff", "plow", "sweeper", "salter", "reefer", "door", "boom", "auxiliaryEngine", "generator", "eightWayLights"] # noqa: E501 if self.local_vars_configuration.client_side_validation and aux_input_type2 not in allowed_values: # noqa: E501 raise ValueError( "Invalid value for `aux_input_type2` ({0}), must be one of {1}" # noqa: E501 .format(aux_input_type2, allowed_values) ) self._aux_input_type2 = aux_input_type2
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def aux_input_type1(self, aux_input_type1):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type1 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type1` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type1, allowed_values)\n )\n\n self._aux_input_type1 = aux_input_type1", "def aux_input_type3(self, aux_input_type3):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type3 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type3` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type3, allowed_values)\n )\n\n self._aux_input_type3 = aux_input_type3", "def aux_input_type6(self, aux_input_type6):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type6 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type6` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type6, allowed_values)\n )\n\n self._aux_input_type6 = aux_input_type6", "def SetSecondInput(self, input_signal2):\n self.__input_signal2 = input_signal2\n self._changelength()", "def aux_input_type9(self, aux_input_type9):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type9 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type9` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type9, allowed_values)\n )\n\n self._aux_input_type9 = aux_input_type9", "def aux_input_type8(self, aux_input_type8):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type8 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type8` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type8, allowed_values)\n )\n\n self._aux_input_type8 = aux_input_type8", "def aux_input_type10(self, aux_input_type10):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type10 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type10` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type10, allowed_values)\n )\n\n self._aux_input_type10 = aux_input_type10", "def aux_input_type5(self, aux_input_type5):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type5 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type5` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type5, allowed_values)\n )\n\n self._aux_input_type5 = aux_input_type5", "def aux_input_type4(self, aux_input_type4):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type4 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type4` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type4, allowed_values)\n )\n\n self._aux_input_type4 = aux_input_type4", "def aux_input_type7(self, aux_input_type7):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type7 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type7` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type7, allowed_values)\n )\n\n self._aux_input_type7 = aux_input_type7", "def feature_vector2(self, feature_vector2):\n\n self._feature_vector2 = feature_vector2", "def update_scan2D_type(self, param):\n try:\n self.settings.child('scan2D_settings', 'step_2d_axis1').show()\n self.settings.child('scan2D_settings', 'step_2d_axis2').show()\n scan_subtype = self.settings.child('scan2D_settings', 'scan2D_type').value()\n self.settings.child('scan2D_settings', 'scan2D_loss').show(scan_subtype == 'Adaptive')\n if scan_subtype == 'Adaptive':\n if self.settings.child('scan2D_settings', 'scan2D_loss').value() == 'resolution':\n title = 'Minimal feature (%):'\n if self.settings.child('scan2D_settings', 'step_2d_axis1').opts['title'] != title:\n self.settings.child('scan2D_settings', 'step_2d_axis1').setValue(1)\n self.settings.child('scan2D_settings', 'step_2d_axis2').setValue(100)\n\n self.settings.child('scan2D_settings', 'step_2d_axis1').setOpts(\n limits=[0, 100], title=title, visible=True,\n tip='Features smaller than this will not be probed first. In percent of maximal scanned area'\n ' length',\n )\n self.settings.child('scan2D_settings', 'step_2d_axis2').setOpts(\n limits=[0, 100], title='Maximal feature (%):', visible=True,\n tip='Features bigger than this will be probed first. In percent of maximal scanned area length',\n )\n\n else:\n self.settings.child('scan2D_settings', 'step_2d_axis1').hide()\n self.settings.child('scan2D_settings', 'step_2d_axis2').hide()\n else:\n self.settings.child('scan2D_settings',\n 'step_2d_axis1').setOpts(title='Step Ax1:',\n tip='Step size for ax1 in actuator units')\n self.settings.child('scan2D_settings',\n 'step_2d_axis2').setOpts(title='Step Ax2:',\n tip='Step size for ax2 in actuator units')\n\n if scan_subtype == 'Spiral':\n self.settings.child('scan2D_settings',\n 'start_2d_axis1').setOpts(title='Center Ax1')\n self.settings.child('scan2D_settings',\n 'start_2d_axis2').setOpts(title='Center Ax2')\n\n self.settings.child('scan2D_settings',\n 'stop_2d_axis1').setOpts(title='Rmax Ax1', readonly=True,\n tip='Read only for Spiral scan type, set the step and Npts/axis')\n self.settings.child('scan2D_settings',\n 'stop_2d_axis2').setOpts(title='Rmax Ax2', readonly=True,\n tip='Read only for Spiral scan type, set the step and Npts/axis')\n self.settings.child('scan2D_settings',\n 'npts_by_axis').show()\n\n # do some checks and set stops values\n self.settings.sigTreeStateChanged.disconnect()\n if param.name() == 'step_2d_axis1':\n if param.value() < 0:\n param.setValue(-param.value())\n\n if param.name() == 'step_2d_axis2':\n if param.value() < 0:\n param.setValue(-param.value())\n\n self.settings.child('scan2D_settings', 'stop_2d_axis1').setValue(\n np.rint(self.settings.child(\n 'scan2D_settings', 'npts_by_axis').value() / 2) * np.abs(\n self.settings.child('scan2D_settings', 'step_2d_axis1').value()))\n\n self.settings.child('scan2D_settings', 'stop_2d_axis2').setValue(\n np.rint(self.settings.child(\n 'scan2D_settings', 'npts_by_axis').value() / 2) * np.abs(\n self.settings.child('scan2D_settings', 'step_2d_axis2').value()))\n QtWidgets.QApplication.processEvents()\n self.settings.sigTreeStateChanged.connect(self.parameter_tree_changed)\n else:\n self.settings.child('scan2D_settings',\n 'start_2d_axis1').setOpts(title='Start Ax1')\n self.settings.child('scan2D_settings',\n 'start_2d_axis2').setOpts(title='Start Ax2')\n\n self.settings.child('scan2D_settings',\n 'stop_2d_axis1').setOpts(title='Stop Ax1', readonly=False,\n tip='Set the stop positions')\n self.settings.child('scan2D_settings',\n 'stop_2d_axis2').setOpts(title='StopAx2', readonly=False,\n tip='Set the stop positions')\n self.settings.child('scan2D_settings', 'npts_by_axis').hide()\n except Exception as e:\n raise ScannerException(str(e))", "def address_2(self, address_2):\n\n self._address_2 = address_2", "def custom_info2(self, custom_info2):\n\n self._custom_info2 = custom_info2", "def custom_info2(self, custom_info2):\n\n self._custom_info2 = custom_info2", "async def change_aux(self, aux, newstate):\n if not self.connected:\n return\n\n # we don't have 3 auxs!\n if aux > 1:\n return\n\n # we don't have THIS aux\n if not self.aux_array[aux]:\n return\n\n # this is a toggle switch, not on/off\n if self.aux_status[aux] == newstate:\n return\n\n # Setup the basic things we know\n data = bytearray(9)\n data[0] = M_START\n data[1] = 7\n data[2] = mtypes[BMTS_CONTROL_REQ][0]\n data[3] = mtypes[BMTS_CONTROL_REQ][1]\n data[4] = mtypes[BMTS_CONTROL_REQ][2]\n data[5] = C_AUX1 if aux == 0 else C_AUX2\n data[6] = 0x00 # who knows?\n data[7] = messages.Message.crc(data[1:7])\n data[8] = M_END\n\n self.writer.write(data)\n await self.writer.drain()", "def address2(self, address2):\n\n self._address2 = address2", "def SetInputNarrowBand(self, ptr: 'itkVectorContainerUILSNF2') -> \"void\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_SetInputNarrowBand(self, ptr)", "def x2(self, x2=None):\n\n if x2 is None:\n return self._x2\n else:\n if not isinstance(x2, int) and not isinstance(x2, float):\n raise TypeError(\"x2 must be numeric, not '%s'\" % x2)\n self._x2 = x2", "def street_2(self, street_2):\n\n self._street_2 = street_2", "def set_H2(self):\n self.slot.H2 = self.lf_H2.value()\n self.w_out.comp_output()\n # Notify the machine GUI that the machine has changed\n self.saveNeeded.emit()", "def custom_data_2(self, custom_data_2):\n # type: (string_types) -> None\n\n if custom_data_2 is not None:\n if not isinstance(custom_data_2, string_types):\n raise TypeError(\"Invalid type for `custom_data_2`, type has to be `string_types`\")\n\n self._custom_data_2 = custom_data_2", "def _upgradeIdeviceToVersion2(self):\n log.debug(\"upgrading to version 2, for 0.12\")\n self.userResources = []\n if self.icon:\n self.systemResources = [\"icon_\"+self.icon+\".gif\"]\n else:\n self.systemResources = []", "def set_action2(self, action2, feature):\n assert feature not in self._action2\n self._action2[feature] = action2", "def port2(self, port2):\n\n self._port2 = port2", "def set_W2(self):\n if self.c_W2_unit.currentIndex() == 0: # Rad\n self.slot.W2 = self.lf_W2.value()\n else:\n self.slot.W2 = self.lf_W2.value() / 180 * pi\n self.w_out.comp_output()\n # Notify the machine GUI that the machine has changed\n self.saveNeeded.emit()", "def address_line2(self, address_line2):\n\n self._address_line2 = address_line2", "def address_line2(self, address_line2):\n\n self._address_line2 = address_line2", "def address_line2(self, address_line2):\n\n self._address_line2 = address_line2", "def set_svpn2(self, svpn2):\n self.svpn2 = svpn2" ]
[ "0.67737305", "0.60007954", "0.5945109", "0.5709236", "0.5614584", "0.55712336", "0.5553947", "0.54106337", "0.52731603", "0.5124387", "0.5114672", "0.46984407", "0.469242", "0.46270508", "0.46270508", "0.45326632", "0.4530033", "0.45260003", "0.45230535", "0.4521897", "0.45010307", "0.4493778", "0.44835132", "0.44816235", "0.4464145", "0.44629577", "0.442794", "0.442794", "0.442794", "0.44256538" ]
0.8351475
0
Sets the aux_input_type3 of this UpdateVehicleRequest.
def aux_input_type3(self, aux_input_type3): allowed_values = ["none", "emergencyLights", "emergencyAlarm", "stopPaddle", "powerTakeOff", "plow", "sweeper", "salter", "reefer", "door", "boom", "auxiliaryEngine", "generator", "eightWayLights"] # noqa: E501 if self.local_vars_configuration.client_side_validation and aux_input_type3 not in allowed_values: # noqa: E501 raise ValueError( "Invalid value for `aux_input_type3` ({0}), must be one of {1}" # noqa: E501 .format(aux_input_type3, allowed_values) ) self._aux_input_type3 = aux_input_type3
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def aux_input_type9(self, aux_input_type9):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type9 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type9` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type9, allowed_values)\n )\n\n self._aux_input_type9 = aux_input_type9", "def aux_input_type4(self, aux_input_type4):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type4 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type4` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type4, allowed_values)\n )\n\n self._aux_input_type4 = aux_input_type4", "def aux_input_type6(self, aux_input_type6):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type6 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type6` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type6, allowed_values)\n )\n\n self._aux_input_type6 = aux_input_type6", "def aux_input_type1(self, aux_input_type1):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type1 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type1` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type1, allowed_values)\n )\n\n self._aux_input_type1 = aux_input_type1", "def aux_input_type2(self, aux_input_type2):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type2 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type2` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type2, allowed_values)\n )\n\n self._aux_input_type2 = aux_input_type2", "def aux_input_type7(self, aux_input_type7):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type7 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type7` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type7, allowed_values)\n )\n\n self._aux_input_type7 = aux_input_type7", "def aux_input_type8(self, aux_input_type8):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type8 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type8` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type8, allowed_values)\n )\n\n self._aux_input_type8 = aux_input_type8", "def aux_input_type5(self, aux_input_type5):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type5 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type5` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type5, allowed_values)\n )\n\n self._aux_input_type5 = aux_input_type5", "def Set3StateValue(self, state):\r\n\r\n if not self._is3State and state == wx.CHK_UNDETERMINED:\r\n raise Exception(\"Set3StateValue can only be used with 3-state checkbox items.\")\r\n\r\n self._checked = state", "def aux_input_type10(self, aux_input_type10):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type10 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type10` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type10, allowed_values)\n )\n\n self._aux_input_type10 = aux_input_type10", "def custom_data_3(self, custom_data_3):\n # type: (string_types) -> None\n\n if custom_data_3 is not None:\n if not isinstance(custom_data_3, string_types):\n raise TypeError(\"Invalid type for `custom_data_3`, type has to be `string_types`\")\n\n self._custom_data_3 = custom_data_3", "def partition3(self, partition3):\n\n self._partition3 = partition3", "def Set3State(self, allow):\r\n\r\n if self._type != 1:\r\n return False\r\n\r\n self._is3State = allow\r\n return True", "def SetItem3State(self, item, allow):\r\n\r\n return item.Set3State(allow)", "def updateATOM3instance( self, atom3i ):\r\n self.cb = atom3i.cb \r\n self.atom3i = atom3i", "def SetItem3StateValue(self, item, state):\r\n\r\n item.Set3StateValue(state)", "def is3_d(self, is3_d):\n\n self.container['is3_d'] = is3_d", "def f3z1(self, f3z1):\n\n self._f3z1 = f3z1", "def upgradeToVersion3(self):\n self.lastIdevice = False", "def setFocus3D(x,y,z, focustype='absolute'):\n fdict = {'absolute':'ABS','user':'USER'}\n dislin.vfoc3d(x,y,z,fdict[focustype])", "def u3(self, q0, alpha=DEF_PHASE, beta=DEF_PHASE, gamma=DEF_PHASE, ctrl=None):\n self.__add_quantum_gate(kind=ROTATION_U3, qid=[q0], phase=alpha, phase1=beta,\n phase2=gamma, ctrl=ctrl)\n return self", "def S3_u32(self) -> complex:\n return complex(\n self._addr(self.defined_registers['Phase 3 power [W]']['addr']),\n self._addr(self.defined_registers['Phase 3 volt amps reactive [VAr]']['addr']),\n )", "def ipob3(self):\n if self._ipob3 is None:\n if self.boundary_file is not None:\n self._ipob3 = self.get_bnd_ipobo()\n else:\n self._ipob3 = np.zeros((self.npoin3))\n\n return self._ipob3", "def address_line3(self, address_line3):\n\n self._address_line3 = address_line3", "def __array_finalize__(self, obj):\n if obj is None or obj.__class__ is Vector3:\n return\n if self.shape != (3,):\n raise ValueError(\n 'Invalid array to view as Vector3 - must be length-3 array.'\n )", "def SetInputNarrowBand(self, ptr: 'itkVectorContainerUILSNF3') -> \"void\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_SetInputNarrowBand(self, ptr)", "def cu3(self, q0, q1, alpha=DEF_PHASE, beta=DEF_PHASE, gamma=DEF_PHASE, ctrl=None):\n self.__add_quantum_gate(kind=CONTROLLED_U3, qid=[q0,q1], phase=alpha, phase1=beta,\n phase2=gamma, ctrl=ctrl)\n return self", "def itkReinitializeLevelSetImageFilterIF3_cast(obj: 'itkLightObject') -> \"itkReinitializeLevelSetImageFilterIF3 *\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_cast(obj)", "def I3_u3(self) -> complex:\n return self.I3_u1() * cmath.rect(1, 120 / 180 * cmath.pi)", "def svn_client_commit_item3_t_state_flags_set(svn_client_commit_item3_t_self, apr_byte_t_state_flags): # real signature unknown; restored from __doc__\n pass" ]
[ "0.60051674", "0.59958714", "0.59745276", "0.5884165", "0.573058", "0.55547076", "0.5548062", "0.5489199", "0.54143596", "0.5395056", "0.5113174", "0.5069129", "0.49999794", "0.49934226", "0.49783775", "0.49597377", "0.49532253", "0.48770082", "0.48317355", "0.4767722", "0.47508335", "0.4742356", "0.47111183", "0.4697524", "0.46770427", "0.46738294", "0.4639091", "0.46024686", "0.4586635", "0.45649904" ]
0.833222
0
Sets the aux_input_type4 of this UpdateVehicleRequest.
def aux_input_type4(self, aux_input_type4): allowed_values = ["none", "emergencyLights", "emergencyAlarm", "stopPaddle", "powerTakeOff", "plow", "sweeper", "salter", "reefer", "door", "boom", "auxiliaryEngine", "generator", "eightWayLights"] # noqa: E501 if self.local_vars_configuration.client_side_validation and aux_input_type4 not in allowed_values: # noqa: E501 raise ValueError( "Invalid value for `aux_input_type4` ({0}), must be one of {1}" # noqa: E501 .format(aux_input_type4, allowed_values) ) self._aux_input_type4 = aux_input_type4
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def aux_input_type6(self, aux_input_type6):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type6 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type6` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type6, allowed_values)\n )\n\n self._aux_input_type6 = aux_input_type6", "def aux_input_type3(self, aux_input_type3):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type3 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type3` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type3, allowed_values)\n )\n\n self._aux_input_type3 = aux_input_type3", "def aux_input_type5(self, aux_input_type5):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type5 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type5` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type5, allowed_values)\n )\n\n self._aux_input_type5 = aux_input_type5", "def aux_input_type8(self, aux_input_type8):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type8 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type8` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type8, allowed_values)\n )\n\n self._aux_input_type8 = aux_input_type8", "def aux_input_type9(self, aux_input_type9):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type9 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type9` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type9, allowed_values)\n )\n\n self._aux_input_type9 = aux_input_type9", "def aux_input_type1(self, aux_input_type1):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type1 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type1` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type1, allowed_values)\n )\n\n self._aux_input_type1 = aux_input_type1", "def aux_input_type10(self, aux_input_type10):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type10 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type10` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type10, allowed_values)\n )\n\n self._aux_input_type10 = aux_input_type10", "def aux_input_type7(self, aux_input_type7):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type7 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type7` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type7, allowed_values)\n )\n\n self._aux_input_type7 = aux_input_type7", "def aux_input_type2(self, aux_input_type2):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type2 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type2` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type2, allowed_values)\n )\n\n self._aux_input_type2 = aux_input_type2", "def isic_v4(self, isic_v4: str):\n\n self._isic_v4 = isic_v4", "def _set_ipv4(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_ipv4_openconfig_qos_interfaces__qos_classifiers_classifier_terms_term_conditions_ipv4, is_container='container', yang_name=\"ipv4\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"ipv4 must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_ipv4_openconfig_qos_interfaces__qos_classifiers_classifier_terms_term_conditions_ipv4, is_container='container', yang_name=\"ipv4\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__ipv4 = t\n if hasattr(self, '_set'):\n self._set()", "def custom_data_4(self, custom_data_4):\n # type: (string_types) -> None\n\n if custom_data_4 is not None:\n if not isinstance(custom_data_4, string_types):\n raise TypeError(\"Invalid type for `custom_data_4`, type has to be `string_types`\")\n\n self._custom_data_4 = custom_data_4", "def upgradeToVersion4(self):\n self._upgradeIdeviceToVersion1()\n self._storyInstruc = self.__dict__['storyInstruc']\n self._questionInstruc = self.__dict__['questionInstruc']\n self._feedbackInstruc = self.__dict__['feedbackInstruc']", "def _set_ipv4(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_ipv4_openconfig_qos_elements__qos_classifiers_classifier_terms_term_conditions_ipv4, is_container='container', yang_name=\"ipv4\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"ipv4 must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_ipv4_openconfig_qos_elements__qos_classifiers_classifier_terms_term_conditions_ipv4, is_container='container', yang_name=\"ipv4\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__ipv4 = t\n if hasattr(self, '_set'):\n self._set()", "def _set_ipv4(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_ipv4_openconfig_qos__qos_classifiers_classifier_terms_term_conditions_ipv4, is_container='container', yang_name=\"ipv4\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"ipv4 must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_ipv4_openconfig_qos__qos_classifiers_classifier_terms_term_conditions_ipv4, is_container='container', yang_name=\"ipv4\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__ipv4 = t\n if hasattr(self, '_set'):\n self._set()", "def set_isccp4(self, isccp4):\n # Possibly this should be internally accessible only?\n self.__isccp4 = isccp4", "def ipv4(self, ipv4):\n\n self._ipv4 = ipv4", "def setSafety(self, request, context):\n self.vehicle.safety = request.safety\n \n return droneconnect_pb2.Null()", "def normalize_ip4(self):\n\n ip = str(self.ip4)\n # Let's normalize the ip list first\n ip_list = list(\n map(\n lambda v: ipaddress.IPv4Network(v),\n filter(\n lambda v: self.try_convert(v, None, ipaddress.IPv4Network),\n map(\n lambda v: v.split('|')[1].split('/')[0].strip()\n if '|' in v else\n v.split('/')[0].strip(),\n ip.split(',')\n )\n )\n )\n )\n\n if ip_list:\n ip_list.sort()\n ip = tuple(\n int(c)\n for c in str(ip_list[0]).split('/')[0].split('.')\n )\n else:\n ip = (9999, ip)\n\n self.ip4 = ip", "def update(self, v_input):\n\n self.v = v_input", "async def change_aux(self, aux, newstate):\n if not self.connected:\n return\n\n # we don't have 3 auxs!\n if aux > 1:\n return\n\n # we don't have THIS aux\n if not self.aux_array[aux]:\n return\n\n # this is a toggle switch, not on/off\n if self.aux_status[aux] == newstate:\n return\n\n # Setup the basic things we know\n data = bytearray(9)\n data[0] = M_START\n data[1] = 7\n data[2] = mtypes[BMTS_CONTROL_REQ][0]\n data[3] = mtypes[BMTS_CONTROL_REQ][1]\n data[4] = mtypes[BMTS_CONTROL_REQ][2]\n data[5] = C_AUX1 if aux == 0 else C_AUX2\n data[6] = 0x00 # who knows?\n data[7] = messages.Message.crc(data[1:7])\n data[8] = M_END\n\n self.writer.write(data)\n await self.writer.drain()", "def __init__(self, attributes=None, aux_input_type1=None, aux_input_type10=None, aux_input_type2=None, aux_input_type3=None, aux_input_type4=None, aux_input_type5=None, aux_input_type6=None, aux_input_type7=None, aux_input_type8=None, aux_input_type9=None, engine_hours=None, external_ids=None, gateway_serial=None, harsh_acceleration_setting_type=None, license_plate=None, name=None, notes=None, odometer_meters=None, static_assigned_driver_id=None, tag_ids=None, vin=None, local_vars_configuration=None): # noqa: E501 # noqa: E501\n if local_vars_configuration is None:\n local_vars_configuration = Configuration()\n self.local_vars_configuration = local_vars_configuration\n\n self._attributes = None\n self._aux_input_type1 = None\n self._aux_input_type10 = None\n self._aux_input_type2 = None\n self._aux_input_type3 = None\n self._aux_input_type4 = None\n self._aux_input_type5 = None\n self._aux_input_type6 = None\n self._aux_input_type7 = None\n self._aux_input_type8 = None\n self._aux_input_type9 = None\n self._engine_hours = None\n self._external_ids = None\n self._gateway_serial = None\n self._harsh_acceleration_setting_type = None\n self._license_plate = None\n self._name = None\n self._notes = None\n self._odometer_meters = None\n self._static_assigned_driver_id = None\n self._tag_ids = None\n self._vin = None\n self.discriminator = None\n\n if attributes is not None:\n self.attributes = attributes\n if aux_input_type1 is not None:\n self.aux_input_type1 = aux_input_type1\n if aux_input_type10 is not None:\n self.aux_input_type10 = aux_input_type10\n if aux_input_type2 is not None:\n self.aux_input_type2 = aux_input_type2\n if aux_input_type3 is not None:\n self.aux_input_type3 = aux_input_type3\n if aux_input_type4 is not None:\n self.aux_input_type4 = aux_input_type4\n if aux_input_type5 is not None:\n self.aux_input_type5 = aux_input_type5\n if aux_input_type6 is not None:\n self.aux_input_type6 = aux_input_type6\n if aux_input_type7 is not None:\n self.aux_input_type7 = aux_input_type7\n if aux_input_type8 is not None:\n self.aux_input_type8 = aux_input_type8\n if aux_input_type9 is not None:\n self.aux_input_type9 = aux_input_type9\n if engine_hours is not None:\n self.engine_hours = engine_hours\n if external_ids is not None:\n self.external_ids = external_ids\n if gateway_serial is not None:\n self.gateway_serial = gateway_serial\n if harsh_acceleration_setting_type is not None:\n self.harsh_acceleration_setting_type = harsh_acceleration_setting_type\n if license_plate is not None:\n self.license_plate = license_plate\n if name is not None:\n self.name = name\n if notes is not None:\n self.notes = notes\n if odometer_meters is not None:\n self.odometer_meters = odometer_meters\n if static_assigned_driver_id is not None:\n self.static_assigned_driver_id = static_assigned_driver_id\n if tag_ids is not None:\n self.tag_ids = tag_ids\n if vin is not None:\n self.vin = vin", "def _(event):\n _logger.debug('Detected F4 key.')\n key_binding_manager.enable_vi_mode = not key_binding_manager.enable_vi_mode", "def reset_gesture_engine_interrupt_settings(self):\n self.write_flag_data([True], APDS_9960.GESTURE_CONFIG_4_REG_ADDRESS, 2)", "def _update_input_type(self):\n pass", "def updateTransformationType(self):\n tt = self.vb.determineTransformationType()\n self.stackedWidget.setCurrentIndex(tt)\n self.checkRequirements()", "def set_tunable_parameters(self, input_type='default', reduced_size=64, **kwargs):\n self.logger.info(\"Set parameters: input_type=%s, reduced_size=%s\", input_type, reduced_size)\n self.input_type = input_type\n self.reduced_size = reduced_size", "def _is_ue4(self):\n\n return False", "def setS4(self, num):\n self.space4 = num", "def _updateTruckRec(self, tNode, Uid):\n if type(Uid) == int:\n self._readTruckRec(tNode, Uid)\n print(f'Vehicle Id {Uid} record updated')\n print('------------------------------------')" ]
[ "0.6177032", "0.6103347", "0.5953408", "0.5917011", "0.58011657", "0.5769559", "0.5681293", "0.56600946", "0.53610873", "0.4841803", "0.45973018", "0.4593053", "0.45379746", "0.45276865", "0.45267612", "0.44020706", "0.4333473", "0.4321829", "0.4218026", "0.42120838", "0.4156517", "0.41110682", "0.40365285", "0.39071", "0.3906939", "0.38517332", "0.38238898", "0.38226244", "0.37919793", "0.37818938" ]
0.82154655
0
Sets the aux_input_type5 of this UpdateVehicleRequest.
def aux_input_type5(self, aux_input_type5): allowed_values = ["none", "emergencyLights", "emergencyAlarm", "stopPaddle", "powerTakeOff", "plow", "sweeper", "salter", "reefer", "door", "boom", "auxiliaryEngine", "generator", "eightWayLights"] # noqa: E501 if self.local_vars_configuration.client_side_validation and aux_input_type5 not in allowed_values: # noqa: E501 raise ValueError( "Invalid value for `aux_input_type5` ({0}), must be one of {1}" # noqa: E501 .format(aux_input_type5, allowed_values) ) self._aux_input_type5 = aux_input_type5
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def aux_input_type6(self, aux_input_type6):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type6 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type6` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type6, allowed_values)\n )\n\n self._aux_input_type6 = aux_input_type6", "def aux_input_type8(self, aux_input_type8):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type8 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type8` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type8, allowed_values)\n )\n\n self._aux_input_type8 = aux_input_type8", "def aux_input_type4(self, aux_input_type4):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type4 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type4` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type4, allowed_values)\n )\n\n self._aux_input_type4 = aux_input_type4", "def aux_input_type10(self, aux_input_type10):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type10 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type10` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type10, allowed_values)\n )\n\n self._aux_input_type10 = aux_input_type10", "def aux_input_type1(self, aux_input_type1):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type1 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type1` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type1, allowed_values)\n )\n\n self._aux_input_type1 = aux_input_type1", "def aux_input_type7(self, aux_input_type7):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type7 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type7` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type7, allowed_values)\n )\n\n self._aux_input_type7 = aux_input_type7", "def aux_input_type3(self, aux_input_type3):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type3 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type3` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type3, allowed_values)\n )\n\n self._aux_input_type3 = aux_input_type3", "def aux_input_type9(self, aux_input_type9):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type9 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type9` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type9, allowed_values)\n )\n\n self._aux_input_type9 = aux_input_type9", "def aux_input_type2(self, aux_input_type2):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type2 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type2` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type2, allowed_values)\n )\n\n self._aux_input_type2 = aux_input_type2", "def custom_data_5(self, custom_data_5):\n # type: (string_types) -> None\n\n if custom_data_5 is not None:\n if not isinstance(custom_data_5, string_types):\n raise TypeError(\"Invalid type for `custom_data_5`, type has to be `string_types`\")\n\n self._custom_data_5 = custom_data_5", "def _5qi(self, _5qi):\n if _5qi is None:\n raise ValueError(\"Invalid value for `_5qi`, must not be `None`\") # noqa: E501\n\n self.__5qi = _5qi", "def set_parameter_values(self, c5=None, lm=1.0):\n\n self._c5 = c5\n self._lm = lm\n\n self._update()", "async def change_aux(self, aux, newstate):\n if not self.connected:\n return\n\n # we don't have 3 auxs!\n if aux > 1:\n return\n\n # we don't have THIS aux\n if not self.aux_array[aux]:\n return\n\n # this is a toggle switch, not on/off\n if self.aux_status[aux] == newstate:\n return\n\n # Setup the basic things we know\n data = bytearray(9)\n data[0] = M_START\n data[1] = 7\n data[2] = mtypes[BMTS_CONTROL_REQ][0]\n data[3] = mtypes[BMTS_CONTROL_REQ][1]\n data[4] = mtypes[BMTS_CONTROL_REQ][2]\n data[5] = C_AUX1 if aux == 0 else C_AUX2\n data[6] = 0x00 # who knows?\n data[7] = messages.Message.crc(data[1:7])\n data[8] = M_END\n\n self.writer.write(data)\n await self.writer.drain()", "def __init__(self, _5qi=None, gbr_ul=None, gbr_dl=None): # noqa: E501\n self.swagger_types = {\n '_5qi': Model5Qi,\n 'gbr_ul': BitRate,\n 'gbr_dl': BitRate\n }\n\n self.attribute_map = {\n '_5qi': '5qi',\n 'gbr_ul': 'gbrUl',\n 'gbr_dl': 'gbrDl'\n }\n self.__5qi = _5qi\n self._gbr_ul = gbr_ul\n self._gbr_dl = gbr_dl", "def set_type(self, typ):\n if typ in range(5):\n self._type = typ\n\n else:\n raise ValueError(\n \"ERROR: Invalid input. Please give a numerical value \"\n \"between 0 and 4 ( both inclusive ) \")", "def SetInput(self, , , p_float_6):\n ...", "def fixN5(self):\n smi = None\n _bom = self.bom.copy()\n _chgs = self.chgs.copy()\n for ia in self.iasN5:\n ai = self.atoms[ia]\n for bi in ai.GetBonds():\n ia1, ja1 = bi.GetBeginAtomIdx(), bi.GetEndAtomIdx()\n ja = ia1 if ja1 == ia else ja1\n aj = self.atoms[ja]\n assert ja != ia\n if _bom[ia,ja] == 2: # re-assign BO to 1 for the first double bond found\n _bom[ia,ja] = _bom[ja,ia] = 1\n _chgs[ia] = 1\n _chgs[ja] = -1\n bi.SetBondType( bo2bt['1.0'] )\n ai.SetFormalCharge(1)\n aj.SetFormalCharge(-1)\n break\n self._bom = _bom\n self._chgs = _chgs\n if self.i_remove_isotope:\n self.remove_isotope()\n try:\n Chem.SanitizeMol(self.m)\n smi = Chem.MolToSmiles(self.m, canonical=T)\n except:\n raise Exception(':: fixN5() failed??')\n self.smiles = smi", "def setS5(self, num):\n self.space5 = num", "def set_tunable_parameters(self, input_type='default', reduced_size=64, **kwargs):\n self.logger.info(\"Set parameters: input_type=%s, reduced_size=%s\", input_type, reduced_size)\n self.input_type = input_type\n self.reduced_size = reduced_size", "def host_alias5(self, host_alias5):\n\n self._host_alias5 = host_alias5", "def uni_type(self, uni_type):\n self._uni_type = uni_type", "def LeNet5_architecture(self, input_shape):\r\n\r\n # Convolution layer (C1) hyperparameters\r\n s1 = self.hparameters[\"s1\"]\r\n f1 = self.hparameters[\"f1\"]\r\n n1 = self.hparameters[\"n1\"]\r\n\r\n # Average pooling layer(S2) hyperparameters\r\n s2 = self.hparameters[\"s2\"]\r\n f2 = self.hparameters[\"f2\"]\r\n\r\n # Convolutional layer (C3) hyperparameters\r\n s3 = self.hparameters[\"s3\"]\r\n f3 = self.hparameters[\"f3\"]\r\n n3 = self.hparameters[\"n3\"]\r\n\r\n # Average pooling layers (S4) hyperparameters\r\n s4 = self.hparameters[\"s4\"]\r\n f4 = self.hparameters[\"f4\"]\r\n\r\n # Convolutional layer (C5) hyperparameters\r\n s5 = self.hparameters[\"s5\"]\r\n f5 = self.hparameters[\"f5\"]\r\n n5 = self.hparameters[\"n5\"]\r\n\r\n # Number of outputs\r\n num_classes = self.num_classes\r\n\r\n X_input = Input(input_shape)\r\n X = X_input\r\n\r\n # Convolution layer 1\r\n X = Conv2D(n1, (f1,f1), strides = (s1, s1), padding = 'valid', name = 'C1', kernel_initializer = glorot_uniform(seed = 0))(X)\r\n # Average pooling\r\n X = AveragePooling2D(pool_size= (f2,f2), strides = (s2,s2), padding = 'valid', name = 'S2')(X)\r\n # Activation\r\n X = Activation('tanh')(X)\r\n # Convolution layer 2\r\n X = Conv2D(n3, (f3,f3), strides = (s3, s3), padding = 'valid', name = 'C3', kernel_initializer = glorot_uniform(seed = 0))(X)\r\n #Average pooling\r\n X = AveragePooling2D(pool_size= (f4,f4), strides = (s4,s4), padding = 'valid', name = 'S4')(X)\r\n # Activation\r\n X = Activation('tanh')(X)\r\n # Convolutional layer 3\r\n X = Conv2D(n5, (f5,f5), strides = (s5, s5), padding = 'valid', name = 'C5', kernel_initializer = glorot_uniform(seed = 0))(X)\r\n # Activation\r\n X = Activation('tanh')(X)\r\n # Flatten\r\n X = Flatten()(X)\r\n # Fully Connected layer\r\n X = Dense(num_classes, activation = 'softmax', name = 'FC', kernel_initializer = glorot_uniform(seed = 0))(X)\r\n\r\n #create model\r\n model = Model(inputs = X_input, outputs = X, name = 'LeNet5')\r\n\r\n return model", "def set_type(self, new_value):\n\n self.vax_type = new_value\n self.save()", "def SetInputNarrowBand(self, ptr: 'itkVectorContainerUILSNF3') -> \"void\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_SetInputNarrowBand(self, ptr)", "def interaction_type(self, interaction_type):\n\n self._interaction_type = interaction_type", "def block5_threshold(self):\n return self._safe_value(VAR_BLOCK5THRESHOLD, float)", "def set_ivt_variable(self, var):\n self.set_input_variable(var)", "def _update_input_type(self):\n pass", "def __init__(self,v0,v1,v2,v3,v4,v5):\r\n self.v0,self.v1,self.v2,self.v3,self.v4,self.v5 = v0,v1,v2,v3,v4,v5", "def r5_on_off():\n \n r5_cmd_packet = b'\\x04\\x14\\x10\\x00\\x00\\xd8\\x0f'\n ser_relay.write(r4_cmd_packet)" ]
[ "0.6735667", "0.61671937", "0.6084544", "0.60733855", "0.6030134", "0.57733315", "0.57299143", "0.56766456", "0.5534028", "0.5277805", "0.50033385", "0.45613086", "0.45503688", "0.44486523", "0.43939793", "0.43528506", "0.4218385", "0.41903195", "0.41550255", "0.41463068", "0.4103813", "0.4019174", "0.39844674", "0.39584473", "0.39538127", "0.39448747", "0.39425403", "0.39402294", "0.3939398", "0.3934243" ]
0.84810084
0
Sets the aux_input_type6 of this UpdateVehicleRequest.
def aux_input_type6(self, aux_input_type6): allowed_values = ["none", "emergencyLights", "emergencyAlarm", "stopPaddle", "powerTakeOff", "plow", "sweeper", "salter", "reefer", "door", "boom", "auxiliaryEngine", "generator", "eightWayLights"] # noqa: E501 if self.local_vars_configuration.client_side_validation and aux_input_type6 not in allowed_values: # noqa: E501 raise ValueError( "Invalid value for `aux_input_type6` ({0}), must be one of {1}" # noqa: E501 .format(aux_input_type6, allowed_values) ) self._aux_input_type6 = aux_input_type6
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def aux_input_type7(self, aux_input_type7):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type7 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type7` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type7, allowed_values)\n )\n\n self._aux_input_type7 = aux_input_type7", "def aux_input_type5(self, aux_input_type5):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type5 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type5` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type5, allowed_values)\n )\n\n self._aux_input_type5 = aux_input_type5", "def aux_input_type9(self, aux_input_type9):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type9 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type9` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type9, allowed_values)\n )\n\n self._aux_input_type9 = aux_input_type9", "def aux_input_type8(self, aux_input_type8):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type8 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type8` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type8, allowed_values)\n )\n\n self._aux_input_type8 = aux_input_type8", "def aux_input_type4(self, aux_input_type4):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type4 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type4` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type4, allowed_values)\n )\n\n self._aux_input_type4 = aux_input_type4", "def aux_input_type1(self, aux_input_type1):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type1 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type1` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type1, allowed_values)\n )\n\n self._aux_input_type1 = aux_input_type1", "def aux_input_type3(self, aux_input_type3):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type3 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type3` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type3, allowed_values)\n )\n\n self._aux_input_type3 = aux_input_type3", "def aux_input_type10(self, aux_input_type10):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type10 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type10` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type10, allowed_values)\n )\n\n self._aux_input_type10 = aux_input_type10", "def aux_input_type2(self, aux_input_type2):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type2 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type2` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type2, allowed_values)\n )\n\n self._aux_input_type2 = aux_input_type2", "def custom_data_6(self, custom_data_6):\n # type: (string_types) -> None\n\n if custom_data_6 is not None:\n if not isinstance(custom_data_6, string_types):\n raise TypeError(\"Invalid type for `custom_data_6`, type has to be `string_types`\")\n\n self._custom_data_6 = custom_data_6", "def SetInput(self, , , p_float_6):\n ...", "def ipv6(self, ipv6):\n\n self._ipv6 = ipv6", "async def change_aux(self, aux, newstate):\n if not self.connected:\n return\n\n # we don't have 3 auxs!\n if aux > 1:\n return\n\n # we don't have THIS aux\n if not self.aux_array[aux]:\n return\n\n # this is a toggle switch, not on/off\n if self.aux_status[aux] == newstate:\n return\n\n # Setup the basic things we know\n data = bytearray(9)\n data[0] = M_START\n data[1] = 7\n data[2] = mtypes[BMTS_CONTROL_REQ][0]\n data[3] = mtypes[BMTS_CONTROL_REQ][1]\n data[4] = mtypes[BMTS_CONTROL_REQ][2]\n data[5] = C_AUX1 if aux == 0 else C_AUX2\n data[6] = 0x00 # who knows?\n data[7] = messages.Message.crc(data[1:7])\n data[8] = M_END\n\n self.writer.write(data)\n await self.writer.drain()", "def ipv6_access_type(self) -> Optional[pulumi.Input['IPAllocationPolicyIpv6AccessType']]:\n return pulumi.get(self, \"ipv6_access_type\")", "def ipv6_bandwidth(self, ipv6_bandwidth):\n self._ipv6_bandwidth = ipv6_bandwidth", "def _update_input_type(self):\n pass", "def relu6(input, inplace=False):\n return FunctionLib.apply(\n 'Relu', input.device, [input],\n outputs=[input if inplace else None], alpha=0., max_value=6.)", "def uni_type(self, uni_type):\n self._uni_type = uni_type", "def _set_virtual_oper_VipV6_address(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=TypedListType(allowed_type=unicode), is_leaf=False, yang_name=\"virtual-oper-VipV6-address\", rest_name=\"v6\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'alt-name': u'v6'}}, namespace='urn:brocade.com:mgmt:brocade-chassis', defining_module='brocade-chassis', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"virtual_oper_VipV6_address must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=TypedListType(allowed_type=unicode), is_leaf=False, yang_name=\"virtual-oper-VipV6-address\", rest_name=\"v6\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'alt-name': u'v6'}}, namespace='urn:brocade.com:mgmt:brocade-chassis', defining_module='brocade-chassis', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__virtual_oper_VipV6_address = t\n if hasattr(self, '_set'):\n self._set()", "def set_iface_type(self, nIfaceType):\n\t\tcall_sdk_function('PrlVmDev_SetIfaceType', self.handle, nIfaceType)", "def add_ipv6(self, id_network_ipv6, id_equip, description):\n\n ip_map = dict()\n ip_map['id_network_ipv6'] = id_network_ipv6\n ip_map['description'] = description\n ip_map['id_equip'] = id_equip\n\n code, xml = self.submit({'ip': ip_map}, 'POST', 'ipv6/')\n\n return self.response(code, xml)", "def __init__(self, attributes=None, aux_input_type1=None, aux_input_type10=None, aux_input_type2=None, aux_input_type3=None, aux_input_type4=None, aux_input_type5=None, aux_input_type6=None, aux_input_type7=None, aux_input_type8=None, aux_input_type9=None, engine_hours=None, external_ids=None, gateway_serial=None, harsh_acceleration_setting_type=None, license_plate=None, name=None, notes=None, odometer_meters=None, static_assigned_driver_id=None, tag_ids=None, vin=None, local_vars_configuration=None): # noqa: E501 # noqa: E501\n if local_vars_configuration is None:\n local_vars_configuration = Configuration()\n self.local_vars_configuration = local_vars_configuration\n\n self._attributes = None\n self._aux_input_type1 = None\n self._aux_input_type10 = None\n self._aux_input_type2 = None\n self._aux_input_type3 = None\n self._aux_input_type4 = None\n self._aux_input_type5 = None\n self._aux_input_type6 = None\n self._aux_input_type7 = None\n self._aux_input_type8 = None\n self._aux_input_type9 = None\n self._engine_hours = None\n self._external_ids = None\n self._gateway_serial = None\n self._harsh_acceleration_setting_type = None\n self._license_plate = None\n self._name = None\n self._notes = None\n self._odometer_meters = None\n self._static_assigned_driver_id = None\n self._tag_ids = None\n self._vin = None\n self.discriminator = None\n\n if attributes is not None:\n self.attributes = attributes\n if aux_input_type1 is not None:\n self.aux_input_type1 = aux_input_type1\n if aux_input_type10 is not None:\n self.aux_input_type10 = aux_input_type10\n if aux_input_type2 is not None:\n self.aux_input_type2 = aux_input_type2\n if aux_input_type3 is not None:\n self.aux_input_type3 = aux_input_type3\n if aux_input_type4 is not None:\n self.aux_input_type4 = aux_input_type4\n if aux_input_type5 is not None:\n self.aux_input_type5 = aux_input_type5\n if aux_input_type6 is not None:\n self.aux_input_type6 = aux_input_type6\n if aux_input_type7 is not None:\n self.aux_input_type7 = aux_input_type7\n if aux_input_type8 is not None:\n self.aux_input_type8 = aux_input_type8\n if aux_input_type9 is not None:\n self.aux_input_type9 = aux_input_type9\n if engine_hours is not None:\n self.engine_hours = engine_hours\n if external_ids is not None:\n self.external_ids = external_ids\n if gateway_serial is not None:\n self.gateway_serial = gateway_serial\n if harsh_acceleration_setting_type is not None:\n self.harsh_acceleration_setting_type = harsh_acceleration_setting_type\n if license_plate is not None:\n self.license_plate = license_plate\n if name is not None:\n self.name = name\n if notes is not None:\n self.notes = notes\n if odometer_meters is not None:\n self.odometer_meters = odometer_meters\n if static_assigned_driver_id is not None:\n self.static_assigned_driver_id = static_assigned_driver_id\n if tag_ids is not None:\n self.tag_ids = tag_ids\n if vin is not None:\n self.vin = vin", "def associate_ipv6(self, id_equip, id_ipv6):\n\n if not is_valid_int_param(id_equip):\n raise InvalidParameterError(\n u'The identifier of equipment is invalid or was not informed.')\n\n if not is_valid_int_param(id_ipv6):\n raise InvalidParameterError(\n u'The identifier of ip is invalid or was not informed.')\n\n url = 'ipv6/' + str(id_ipv6) + '/equipment/' + str(id_equip) + '/'\n\n code, xml = self.submit(None, 'PUT', url)\n\n return self.response(code, xml)", "def r6_on_off():\n \n r6_cmd_packet = b'\\x04\\x14\\x20\\x00\\x00\\xc8\\x0f'\n ser_relay.write(r6_cmd_packet)", "def EnableMTIPv6(self):\r\n\t\treturn self._get_attribute('enableMTIPv6')", "def set_tunable_parameters(self, input_type='default', reduced_size=64, **kwargs):\n self.logger.info(\"Set parameters: input_type=%s, reduced_size=%s\", input_type, reduced_size)\n self.input_type = input_type\n self.reduced_size = reduced_size", "def convert_v6_to_tuple(self, content):\n # unpack the V6 content from raw byte array, arbitrarily chose 4 2-byte values\n # for the 8 \"reserved\" bytes\n (ver, input_format, probs, planes, us_ooo, us_oo, them_ooo, them_oo,\n stm, rule50_count, invariance_info, dep_result, root_q, best_q,\n root_d, best_d, root_m, best_m, plies_left, result_q, result_d,\n played_q, played_d, played_m, orig_q, orig_d, orig_m, visits,\n played_idx, best_idx, reserved1, reserved2, reserved3,\n reserved4) = self.v6_struct.unpack(content)\n \"\"\"\n v5 struct format was (8308 bytes total)\n int32 version (4 bytes)\n int32 input_format (4 bytes)\n 1858 float32 probabilities (7432 bytes)\n 104 (13*8) packed bit planes of 8 bytes each (832 bytes)\n uint8 castling us_ooo (1 byte)\n uint8 castling us_oo (1 byte)\n uint8 castling them_ooo (1 byte)\n uint8 castling them_oo (1 byte)\n uint8 side_to_move (1 byte)\n uint8 rule50_count (1 byte)\n uint8 dep_ply_count (1 byte) (unused)\n int8 result (1 byte)\n float32 root_q (4 bytes)\n float32 best_q (4 bytes)\n float32 root_d (4 bytes)\n float32 best_d (4 bytes)\n float32 root_m (4 bytes)\n float32 best_m (4 bytes)\n float32 plies_left (4 bytes)\n \"\"\"\n # v3/4 data sometimes has a useful value in dep_ply_count (now invariance_info),\n # so copy that over if the new ply_count is not populated.\n if plies_left == 0:\n plies_left = invariance_info\n plies_left = struct.pack('f', plies_left)\n\n assert input_format == self.expected_input_format\n\n # Unpack bit planes and cast to 32 bit float\n planes = np.unpackbits(np.frombuffer(planes, dtype=np.uint8)).astype(\n np.float32)\n rule50_divisor = 99.0\n if input_format > 3:\n rule50_divisor = 100.0\n rule50_plane = struct.pack('f', rule50_count / rule50_divisor) * 64\n\n if input_format == 1:\n middle_planes = self.flat_planes[us_ooo] + \\\n self.flat_planes[us_oo] + \\\n self.flat_planes[them_ooo] + \\\n self.flat_planes[them_oo] + \\\n self.flat_planes[stm]\n elif input_format == 2:\n # Each inner array has to be reversed as these fields are in opposite endian to the planes data.\n them_ooo_bytes = reverse_expand_bits(them_ooo)\n us_ooo_bytes = reverse_expand_bits(us_ooo)\n them_oo_bytes = reverse_expand_bits(them_oo)\n us_oo_bytes = reverse_expand_bits(us_oo)\n middle_planes = us_ooo_bytes + (6*8*4) * b'\\x00' + them_ooo_bytes + \\\n us_oo_bytes + (6*8*4) * b'\\x00' + them_oo_bytes + \\\n self.flat_planes[0] + \\\n self.flat_planes[0] + \\\n self.flat_planes[stm]\n elif input_format == 3 or input_format == 4 or input_format == 132 or input_format == 5 or input_format == 133:\n # Each inner array has to be reversed as these fields are in opposite endian to the planes data.\n them_ooo_bytes = reverse_expand_bits(them_ooo)\n us_ooo_bytes = reverse_expand_bits(us_ooo)\n them_oo_bytes = reverse_expand_bits(them_oo)\n us_oo_bytes = reverse_expand_bits(us_oo)\n enpassant_bytes = reverse_expand_bits(stm)\n middle_planes = us_ooo_bytes + (6*8*4) * b'\\x00' + them_ooo_bytes + \\\n us_oo_bytes + (6*8*4) * b'\\x00' + them_oo_bytes + \\\n self.flat_planes[0] + \\\n self.flat_planes[0] + \\\n (7*8*4) * b'\\x00' + enpassant_bytes\n\n # Concatenate all byteplanes. Make the last plane all 1's so the NN can\n # detect edges of the board more easily\n aux_plus_6_plane = self.flat_planes[0]\n if (input_format == 132\n or input_format == 133) and invariance_info >= 128:\n aux_plus_6_plane = self.flat_planes[1]\n planes = planes.tobytes() + \\\n middle_planes + \\\n rule50_plane + \\\n aux_plus_6_plane + \\\n self.flat_planes[1]\n\n assert len(planes) == ((8 * 13 * 1 + 8 * 1 * 1) * 8 * 8 * 4)\n\n if ver == V6_VERSION:\n winner = struct.pack('fff', 0.5 * (1.0 - result_d + result_q),\n result_d, 0.5 * (1.0 - result_d - result_q))\n else:\n dep_result = float(dep_result)\n assert dep_result == 1.0 or dep_result == -1.0 or dep_result == 0.0\n winner = struct.pack('fff', dep_result == 1.0, dep_result == 0.0,\n dep_result == -1.0)\n\n best_q_w = 0.5 * (1.0 - best_d + best_q)\n best_q_l = 0.5 * (1.0 - best_d - best_q)\n assert -1.0 <= best_q <= 1.0 and 0.0 <= best_d <= 1.0\n best_q = struct.pack('fff', best_q_w, best_d, best_q_l)\n\n return (planes, probs, winner, best_q, plies_left)", "def _read_opt_type(self, kind): # pylint: disable=no-self-use\n bin_ = bin(kind)[2:].zfill(8)\n\n type_ = dict(\n value=kind,\n action=_IPv6_Opts_ACT.get(bin_[:2]),\n change=bool(int(bin_[2], base=2)),\n )\n\n return type_", "def update(self, v_input):\n\n self.v = v_input", "def enable_auxdata(sk):\n sk.setsockopt(SOL_PACKET, PACKET_AUXDATA, 1)" ]
[ "0.6811139", "0.6711326", "0.6464062", "0.61897624", "0.60081035", "0.59919214", "0.5962499", "0.58940566", "0.5785702", "0.50470114", "0.49827394", "0.4801943", "0.4498699", "0.44764334", "0.43467817", "0.431762", "0.42938703", "0.42625344", "0.4177838", "0.4142461", "0.41138268", "0.41135055", "0.40673268", "0.40417963", "0.40270782", "0.40133157", "0.4008972", "0.40016696", "0.40011603", "0.3967951" ]
0.8384907
0
Sets the aux_input_type7 of this UpdateVehicleRequest.
def aux_input_type7(self, aux_input_type7): allowed_values = ["none", "emergencyLights", "emergencyAlarm", "stopPaddle", "powerTakeOff", "plow", "sweeper", "salter", "reefer", "door", "boom", "auxiliaryEngine", "generator", "eightWayLights"] # noqa: E501 if self.local_vars_configuration.client_side_validation and aux_input_type7 not in allowed_values: # noqa: E501 raise ValueError( "Invalid value for `aux_input_type7` ({0}), must be one of {1}" # noqa: E501 .format(aux_input_type7, allowed_values) ) self._aux_input_type7 = aux_input_type7
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def aux_input_type6(self, aux_input_type6):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type6 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type6` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type6, allowed_values)\n )\n\n self._aux_input_type6 = aux_input_type6", "def aux_input_type9(self, aux_input_type9):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type9 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type9` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type9, allowed_values)\n )\n\n self._aux_input_type9 = aux_input_type9", "def aux_input_type8(self, aux_input_type8):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type8 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type8` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type8, allowed_values)\n )\n\n self._aux_input_type8 = aux_input_type8", "def aux_input_type10(self, aux_input_type10):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type10 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type10` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type10, allowed_values)\n )\n\n self._aux_input_type10 = aux_input_type10", "def aux_input_type5(self, aux_input_type5):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type5 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type5` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type5, allowed_values)\n )\n\n self._aux_input_type5 = aux_input_type5", "def aux_input_type4(self, aux_input_type4):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type4 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type4` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type4, allowed_values)\n )\n\n self._aux_input_type4 = aux_input_type4", "def aux_input_type3(self, aux_input_type3):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type3 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type3` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type3, allowed_values)\n )\n\n self._aux_input_type3 = aux_input_type3", "def aux_input_type1(self, aux_input_type1):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type1 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type1` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type1, allowed_values)\n )\n\n self._aux_input_type1 = aux_input_type1", "def aux_input_type2(self, aux_input_type2):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type2 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type2` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type2, allowed_values)\n )\n\n self._aux_input_type2 = aux_input_type2", "def custom_data_7(self, custom_data_7):\n # type: (string_types) -> None\n\n if custom_data_7 is not None:\n if not isinstance(custom_data_7, string_types):\n raise TypeError(\"Invalid type for `custom_data_7`, type has to be `string_types`\")\n\n self._custom_data_7 = custom_data_7", "def SetInput(self, , , p_float_6):\n ...", "async def change_aux(self, aux, newstate):\n if not self.connected:\n return\n\n # we don't have 3 auxs!\n if aux > 1:\n return\n\n # we don't have THIS aux\n if not self.aux_array[aux]:\n return\n\n # this is a toggle switch, not on/off\n if self.aux_status[aux] == newstate:\n return\n\n # Setup the basic things we know\n data = bytearray(9)\n data[0] = M_START\n data[1] = 7\n data[2] = mtypes[BMTS_CONTROL_REQ][0]\n data[3] = mtypes[BMTS_CONTROL_REQ][1]\n data[4] = mtypes[BMTS_CONTROL_REQ][2]\n data[5] = C_AUX1 if aux == 0 else C_AUX2\n data[6] = 0x00 # who knows?\n data[7] = messages.Message.crc(data[1:7])\n data[8] = M_END\n\n self.writer.write(data)\n await self.writer.drain()", "def __init__(self, attributes=None, aux_input_type1=None, aux_input_type10=None, aux_input_type2=None, aux_input_type3=None, aux_input_type4=None, aux_input_type5=None, aux_input_type6=None, aux_input_type7=None, aux_input_type8=None, aux_input_type9=None, engine_hours=None, external_ids=None, gateway_serial=None, harsh_acceleration_setting_type=None, license_plate=None, name=None, notes=None, odometer_meters=None, static_assigned_driver_id=None, tag_ids=None, vin=None, local_vars_configuration=None): # noqa: E501 # noqa: E501\n if local_vars_configuration is None:\n local_vars_configuration = Configuration()\n self.local_vars_configuration = local_vars_configuration\n\n self._attributes = None\n self._aux_input_type1 = None\n self._aux_input_type10 = None\n self._aux_input_type2 = None\n self._aux_input_type3 = None\n self._aux_input_type4 = None\n self._aux_input_type5 = None\n self._aux_input_type6 = None\n self._aux_input_type7 = None\n self._aux_input_type8 = None\n self._aux_input_type9 = None\n self._engine_hours = None\n self._external_ids = None\n self._gateway_serial = None\n self._harsh_acceleration_setting_type = None\n self._license_plate = None\n self._name = None\n self._notes = None\n self._odometer_meters = None\n self._static_assigned_driver_id = None\n self._tag_ids = None\n self._vin = None\n self.discriminator = None\n\n if attributes is not None:\n self.attributes = attributes\n if aux_input_type1 is not None:\n self.aux_input_type1 = aux_input_type1\n if aux_input_type10 is not None:\n self.aux_input_type10 = aux_input_type10\n if aux_input_type2 is not None:\n self.aux_input_type2 = aux_input_type2\n if aux_input_type3 is not None:\n self.aux_input_type3 = aux_input_type3\n if aux_input_type4 is not None:\n self.aux_input_type4 = aux_input_type4\n if aux_input_type5 is not None:\n self.aux_input_type5 = aux_input_type5\n if aux_input_type6 is not None:\n self.aux_input_type6 = aux_input_type6\n if aux_input_type7 is not None:\n self.aux_input_type7 = aux_input_type7\n if aux_input_type8 is not None:\n self.aux_input_type8 = aux_input_type8\n if aux_input_type9 is not None:\n self.aux_input_type9 = aux_input_type9\n if engine_hours is not None:\n self.engine_hours = engine_hours\n if external_ids is not None:\n self.external_ids = external_ids\n if gateway_serial is not None:\n self.gateway_serial = gateway_serial\n if harsh_acceleration_setting_type is not None:\n self.harsh_acceleration_setting_type = harsh_acceleration_setting_type\n if license_plate is not None:\n self.license_plate = license_plate\n if name is not None:\n self.name = name\n if notes is not None:\n self.notes = notes\n if odometer_meters is not None:\n self.odometer_meters = odometer_meters\n if static_assigned_driver_id is not None:\n self.static_assigned_driver_id = static_assigned_driver_id\n if tag_ids is not None:\n self.tag_ids = tag_ids\n if vin is not None:\n self.vin = vin", "def custom_data_6(self, custom_data_6):\n # type: (string_types) -> None\n\n if custom_data_6 is not None:\n if not isinstance(custom_data_6, string_types):\n raise TypeError(\"Invalid type for `custom_data_6`, type has to be `string_types`\")\n\n self._custom_data_6 = custom_data_6", "def SetInputNarrowBand(self, ptr: 'itkVectorContainerUILSNF3') -> \"void\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_SetInputNarrowBand(self, ptr)", "def update(self, v_input):\n\n self.v = v_input", "def enable_auxdata(sk):\n sk.setsockopt(SOL_PACKET, PACKET_AUXDATA, 1)", "def uni_type(self, uni_type):\n self._uni_type = uni_type", "def _update_input_type(self):\n pass", "def set_ivt_variable(self, var):\n self.set_input_variable(var)", "def _from_protobuf(cls, aux_data):\n data = AuxData.serializer.decode(\n BytesIO(aux_data.data), aux_data.type_name\n )\n return cls(data=data, type_name=aux_data.type_name)", "def update_l7_rule(request, **kwargs):\n data = request.DATA\n l7_rule_id = data['l7rule'].get('id')\n\n conn = get_sdk_connection(request)\n l7_rule = conn.load_balancer.update_l7_rule(\n admin_state_up=data['l7rule'].get('admin_state_up'),\n compare_type=data['l7rule']['compare_type'],\n invert=data['l7rule'].get('invert'),\n key=data['l7rule'].get('key'),\n l7_policy=kwargs['l7_policy_id'],\n l7rule=l7_rule_id,\n type=data['l7rule']['type'],\n rule_value=data['l7rule']['rule_value'],\n )\n\n return _get_sdk_object_dict(l7_rule)", "def set_input(self, input):\n self.input = transfer_to_device(input, self.device)", "def _set_input(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_input_openconfig_qos_elements__qos_interfaces_interface_input, is_container='container', yang_name=\"input\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"input must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_input_openconfig_qos_elements__qos_interfaces_interface_input, is_container='container', yang_name=\"input\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__input = t\n if hasattr(self, '_set'):\n self._set()", "def firmware_version(self, firmware_version: str):\n\n self._firmware_version = firmware_version", "def _var_update(self, **kwargs):\n for k, v in kwargs.items():\n if v is not None:\n v = np.asanyarray(v)\n\n if not hasattr(self, k):\n setattr(self, k, v)\n elif v is not None:\n setattr(self, k, v)\n \n self._var_check()", "def populateTechnicalMaintenanceEquipmentDefaults(self):\n params = {}\n for i, e in enumerate(g_currentVehicle.item.eqsLayout):\n params['eId%s' % (i + 1)] = e.intCD if e else None\n\n self.populateTechnicalMaintenanceEquipment(**params)\n return", "def set_field(self,Hext):\n self.raw_parameters[\"Hext\"] = Hext\n self.parameters = NormalizedParameters(self.raw_parameters)\n self._load()", "def override_input(cls):\n globals()[\"input\"] = cls.input", "def _from_base(cls, _input: Optional[Union[Input, Dict]]) -> Optional[\"InternalInput\"]:\n if _input is None:\n return None\n if isinstance(_input, InternalInput):\n return _input\n if isinstance(_input, Input):\n # do force cast directly as there is no new field added in InternalInput\n # need to change the logic if new field is added\n _input.__class__ = InternalInput\n return _input\n return InternalInput(**_input)" ]
[ "0.737313", "0.70987934", "0.68551147", "0.6368619", "0.6157716", "0.61507314", "0.61456496", "0.60045", "0.5644334", "0.46709234", "0.4624464", "0.45685434", "0.4289816", "0.420974", "0.41962048", "0.41751334", "0.41050494", "0.40700984", "0.39788634", "0.39388323", "0.3886136", "0.38856232", "0.3863939", "0.38574246", "0.38478544", "0.38417676", "0.38404554", "0.38390565", "0.3833875", "0.38306236" ]
0.818187
0
Sets the aux_input_type8 of this UpdateVehicleRequest.
def aux_input_type8(self, aux_input_type8): allowed_values = ["none", "emergencyLights", "emergencyAlarm", "stopPaddle", "powerTakeOff", "plow", "sweeper", "salter", "reefer", "door", "boom", "auxiliaryEngine", "generator", "eightWayLights"] # noqa: E501 if self.local_vars_configuration.client_side_validation and aux_input_type8 not in allowed_values: # noqa: E501 raise ValueError( "Invalid value for `aux_input_type8` ({0}), must be one of {1}" # noqa: E501 .format(aux_input_type8, allowed_values) ) self._aux_input_type8 = aux_input_type8
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def aux_input_type9(self, aux_input_type9):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type9 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type9` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type9, allowed_values)\n )\n\n self._aux_input_type9 = aux_input_type9", "def aux_input_type10(self, aux_input_type10):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type10 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type10` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type10, allowed_values)\n )\n\n self._aux_input_type10 = aux_input_type10", "def aux_input_type7(self, aux_input_type7):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type7 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type7` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type7, allowed_values)\n )\n\n self._aux_input_type7 = aux_input_type7", "def aux_input_type6(self, aux_input_type6):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type6 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type6` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type6, allowed_values)\n )\n\n self._aux_input_type6 = aux_input_type6", "def aux_input_type4(self, aux_input_type4):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type4 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type4` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type4, allowed_values)\n )\n\n self._aux_input_type4 = aux_input_type4", "def aux_input_type1(self, aux_input_type1):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type1 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type1` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type1, allowed_values)\n )\n\n self._aux_input_type1 = aux_input_type1", "def aux_input_type5(self, aux_input_type5):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type5 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type5` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type5, allowed_values)\n )\n\n self._aux_input_type5 = aux_input_type5", "def aux_input_type3(self, aux_input_type3):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type3 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type3` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type3, allowed_values)\n )\n\n self._aux_input_type3 = aux_input_type3", "def aux_input_type2(self, aux_input_type2):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type2 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type2` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type2, allowed_values)\n )\n\n self._aux_input_type2 = aux_input_type2", "def custom_data_8(self, custom_data_8):\n # type: (string_types) -> None\n\n if custom_data_8 is not None:\n if not isinstance(custom_data_8, string_types):\n raise TypeError(\"Invalid type for `custom_data_8`, type has to be `string_types`\")\n\n self._custom_data_8 = custom_data_8", "async def change_aux(self, aux, newstate):\n if not self.connected:\n return\n\n # we don't have 3 auxs!\n if aux > 1:\n return\n\n # we don't have THIS aux\n if not self.aux_array[aux]:\n return\n\n # this is a toggle switch, not on/off\n if self.aux_status[aux] == newstate:\n return\n\n # Setup the basic things we know\n data = bytearray(9)\n data[0] = M_START\n data[1] = 7\n data[2] = mtypes[BMTS_CONTROL_REQ][0]\n data[3] = mtypes[BMTS_CONTROL_REQ][1]\n data[4] = mtypes[BMTS_CONTROL_REQ][2]\n data[5] = C_AUX1 if aux == 0 else C_AUX2\n data[6] = 0x00 # who knows?\n data[7] = messages.Message.crc(data[1:7])\n data[8] = M_END\n\n self.writer.write(data)\n await self.writer.drain()", "def uni_type(self, uni_type):\n self._uni_type = uni_type", "def add_int8(self, value):\n self._check_int_type(value, _INT_1BYTE_UPPERLIMIT)\n self._data += value.to_bytes(1, byteorder=\"little\")", "def write8(self, register, value):\n raise NotImplementedError", "def write_raw8(self, value):\n raise NotImplementedError", "def LD_Vx_byte(self, x, byte):\n\t\tself.V[x] = byte", "def read_raw8(self):\n raise NotImplementedError", "def read_U8(self, register):\n raise NotImplementedError", "def SetInputNarrowBand(self, ptr: 'itkVectorContainerUILSNF3') -> \"void\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_SetInputNarrowBand(self, ptr)", "def SetInput(self, , , p_float_6):\n ...", "def Write_uInt8(self,Address,Register,uInt8):\n self.Transaction(chr(Address)+chr(Register)+struct.pack('B',uInt8))", "def i2c_write8(self, address, register, value):\n raise NotImplementedError", "def __init__(self, attributes=None, aux_input_type1=None, aux_input_type10=None, aux_input_type2=None, aux_input_type3=None, aux_input_type4=None, aux_input_type5=None, aux_input_type6=None, aux_input_type7=None, aux_input_type8=None, aux_input_type9=None, engine_hours=None, external_ids=None, gateway_serial=None, harsh_acceleration_setting_type=None, license_plate=None, name=None, notes=None, odometer_meters=None, static_assigned_driver_id=None, tag_ids=None, vin=None, local_vars_configuration=None): # noqa: E501 # noqa: E501\n if local_vars_configuration is None:\n local_vars_configuration = Configuration()\n self.local_vars_configuration = local_vars_configuration\n\n self._attributes = None\n self._aux_input_type1 = None\n self._aux_input_type10 = None\n self._aux_input_type2 = None\n self._aux_input_type3 = None\n self._aux_input_type4 = None\n self._aux_input_type5 = None\n self._aux_input_type6 = None\n self._aux_input_type7 = None\n self._aux_input_type8 = None\n self._aux_input_type9 = None\n self._engine_hours = None\n self._external_ids = None\n self._gateway_serial = None\n self._harsh_acceleration_setting_type = None\n self._license_plate = None\n self._name = None\n self._notes = None\n self._odometer_meters = None\n self._static_assigned_driver_id = None\n self._tag_ids = None\n self._vin = None\n self.discriminator = None\n\n if attributes is not None:\n self.attributes = attributes\n if aux_input_type1 is not None:\n self.aux_input_type1 = aux_input_type1\n if aux_input_type10 is not None:\n self.aux_input_type10 = aux_input_type10\n if aux_input_type2 is not None:\n self.aux_input_type2 = aux_input_type2\n if aux_input_type3 is not None:\n self.aux_input_type3 = aux_input_type3\n if aux_input_type4 is not None:\n self.aux_input_type4 = aux_input_type4\n if aux_input_type5 is not None:\n self.aux_input_type5 = aux_input_type5\n if aux_input_type6 is not None:\n self.aux_input_type6 = aux_input_type6\n if aux_input_type7 is not None:\n self.aux_input_type7 = aux_input_type7\n if aux_input_type8 is not None:\n self.aux_input_type8 = aux_input_type8\n if aux_input_type9 is not None:\n self.aux_input_type9 = aux_input_type9\n if engine_hours is not None:\n self.engine_hours = engine_hours\n if external_ids is not None:\n self.external_ids = external_ids\n if gateway_serial is not None:\n self.gateway_serial = gateway_serial\n if harsh_acceleration_setting_type is not None:\n self.harsh_acceleration_setting_type = harsh_acceleration_setting_type\n if license_plate is not None:\n self.license_plate = license_plate\n if name is not None:\n self.name = name\n if notes is not None:\n self.notes = notes\n if odometer_meters is not None:\n self.odometer_meters = odometer_meters\n if static_assigned_driver_id is not None:\n self.static_assigned_driver_id = static_assigned_driver_id\n if tag_ids is not None:\n self.tag_ids = tag_ids\n if vin is not None:\n self.vin = vin", "def _update_input_type(self):\n pass", "def write_uint8(self,value):\n packed = struct.pack('!B',value)\n self.data.extend(packed)", "def update(self, v_input):\n\n self.v = v_input", "def SetInputNarrowBand(self, ptr: 'itkVectorContainerUILSNF2') -> \"void\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_SetInputNarrowBand(self, ptr)", "def enable_auxdata(sk):\n sk.setsockopt(SOL_PACKET, PACKET_AUXDATA, 1)", "def read_S8(self, register):\n raise NotImplementedError", "def Write_Int8(self,Address,Register,Int8):\n self.Transaction(chr(Address)+chr(Register)+struct.pack('b',Int8))" ]
[ "0.6755058", "0.633114", "0.629067", "0.62043154", "0.58486116", "0.5745061", "0.56476486", "0.55395204", "0.54535085", "0.5100648", "0.48467347", "0.4543333", "0.4463553", "0.44145846", "0.4389607", "0.43863967", "0.43651605", "0.42814347", "0.4237928", "0.42126638", "0.42052054", "0.41917986", "0.4181434", "0.4172628", "0.4168476", "0.4154245", "0.41194624", "0.40782917", "0.40249565", "0.40144384" ]
0.8293582
0
Sets the aux_input_type9 of this UpdateVehicleRequest.
def aux_input_type9(self, aux_input_type9): allowed_values = ["none", "emergencyLights", "emergencyAlarm", "stopPaddle", "powerTakeOff", "plow", "sweeper", "salter", "reefer", "door", "boom", "auxiliaryEngine", "generator", "eightWayLights"] # noqa: E501 if self.local_vars_configuration.client_side_validation and aux_input_type9 not in allowed_values: # noqa: E501 raise ValueError( "Invalid value for `aux_input_type9` ({0}), must be one of {1}" # noqa: E501 .format(aux_input_type9, allowed_values) ) self._aux_input_type9 = aux_input_type9
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def aux_input_type10(self, aux_input_type10):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type10 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type10` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type10, allowed_values)\n )\n\n self._aux_input_type10 = aux_input_type10", "def aux_input_type8(self, aux_input_type8):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type8 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type8` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type8, allowed_values)\n )\n\n self._aux_input_type8 = aux_input_type8", "def aux_input_type6(self, aux_input_type6):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type6 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type6` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type6, allowed_values)\n )\n\n self._aux_input_type6 = aux_input_type6", "def aux_input_type7(self, aux_input_type7):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type7 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type7` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type7, allowed_values)\n )\n\n self._aux_input_type7 = aux_input_type7", "def aux_input_type3(self, aux_input_type3):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type3 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type3` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type3, allowed_values)\n )\n\n self._aux_input_type3 = aux_input_type3", "def aux_input_type1(self, aux_input_type1):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type1 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type1` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type1, allowed_values)\n )\n\n self._aux_input_type1 = aux_input_type1", "def aux_input_type2(self, aux_input_type2):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type2 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type2` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type2, allowed_values)\n )\n\n self._aux_input_type2 = aux_input_type2", "def aux_input_type4(self, aux_input_type4):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type4 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type4` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type4, allowed_values)\n )\n\n self._aux_input_type4 = aux_input_type4", "def aux_input_type5(self, aux_input_type5):\n allowed_values = [\"none\", \"emergencyLights\", \"emergencyAlarm\", \"stopPaddle\", \"powerTakeOff\", \"plow\", \"sweeper\", \"salter\", \"reefer\", \"door\", \"boom\", \"auxiliaryEngine\", \"generator\", \"eightWayLights\"] # noqa: E501\n if self.local_vars_configuration.client_side_validation and aux_input_type5 not in allowed_values: # noqa: E501\n raise ValueError(\n \"Invalid value for `aux_input_type5` ({0}), must be one of {1}\" # noqa: E501\n .format(aux_input_type5, allowed_values)\n )\n\n self._aux_input_type5 = aux_input_type5", "async def change_aux(self, aux, newstate):\n if not self.connected:\n return\n\n # we don't have 3 auxs!\n if aux > 1:\n return\n\n # we don't have THIS aux\n if not self.aux_array[aux]:\n return\n\n # this is a toggle switch, not on/off\n if self.aux_status[aux] == newstate:\n return\n\n # Setup the basic things we know\n data = bytearray(9)\n data[0] = M_START\n data[1] = 7\n data[2] = mtypes[BMTS_CONTROL_REQ][0]\n data[3] = mtypes[BMTS_CONTROL_REQ][1]\n data[4] = mtypes[BMTS_CONTROL_REQ][2]\n data[5] = C_AUX1 if aux == 0 else C_AUX2\n data[6] = 0x00 # who knows?\n data[7] = messages.Message.crc(data[1:7])\n data[8] = M_END\n\n self.writer.write(data)\n await self.writer.drain()", "def uni_type(self, uni_type):\n self._uni_type = uni_type", "def __init__(self, attributes=None, aux_input_type1=None, aux_input_type10=None, aux_input_type2=None, aux_input_type3=None, aux_input_type4=None, aux_input_type5=None, aux_input_type6=None, aux_input_type7=None, aux_input_type8=None, aux_input_type9=None, engine_hours=None, external_ids=None, gateway_serial=None, harsh_acceleration_setting_type=None, license_plate=None, name=None, notes=None, odometer_meters=None, static_assigned_driver_id=None, tag_ids=None, vin=None, local_vars_configuration=None): # noqa: E501 # noqa: E501\n if local_vars_configuration is None:\n local_vars_configuration = Configuration()\n self.local_vars_configuration = local_vars_configuration\n\n self._attributes = None\n self._aux_input_type1 = None\n self._aux_input_type10 = None\n self._aux_input_type2 = None\n self._aux_input_type3 = None\n self._aux_input_type4 = None\n self._aux_input_type5 = None\n self._aux_input_type6 = None\n self._aux_input_type7 = None\n self._aux_input_type8 = None\n self._aux_input_type9 = None\n self._engine_hours = None\n self._external_ids = None\n self._gateway_serial = None\n self._harsh_acceleration_setting_type = None\n self._license_plate = None\n self._name = None\n self._notes = None\n self._odometer_meters = None\n self._static_assigned_driver_id = None\n self._tag_ids = None\n self._vin = None\n self.discriminator = None\n\n if attributes is not None:\n self.attributes = attributes\n if aux_input_type1 is not None:\n self.aux_input_type1 = aux_input_type1\n if aux_input_type10 is not None:\n self.aux_input_type10 = aux_input_type10\n if aux_input_type2 is not None:\n self.aux_input_type2 = aux_input_type2\n if aux_input_type3 is not None:\n self.aux_input_type3 = aux_input_type3\n if aux_input_type4 is not None:\n self.aux_input_type4 = aux_input_type4\n if aux_input_type5 is not None:\n self.aux_input_type5 = aux_input_type5\n if aux_input_type6 is not None:\n self.aux_input_type6 = aux_input_type6\n if aux_input_type7 is not None:\n self.aux_input_type7 = aux_input_type7\n if aux_input_type8 is not None:\n self.aux_input_type8 = aux_input_type8\n if aux_input_type9 is not None:\n self.aux_input_type9 = aux_input_type9\n if engine_hours is not None:\n self.engine_hours = engine_hours\n if external_ids is not None:\n self.external_ids = external_ids\n if gateway_serial is not None:\n self.gateway_serial = gateway_serial\n if harsh_acceleration_setting_type is not None:\n self.harsh_acceleration_setting_type = harsh_acceleration_setting_type\n if license_plate is not None:\n self.license_plate = license_plate\n if name is not None:\n self.name = name\n if notes is not None:\n self.notes = notes\n if odometer_meters is not None:\n self.odometer_meters = odometer_meters\n if static_assigned_driver_id is not None:\n self.static_assigned_driver_id = static_assigned_driver_id\n if tag_ids is not None:\n self.tag_ids = tag_ids\n if vin is not None:\n self.vin = vin", "def SetInput(self, , , p_float_6):\n ...", "def _update_input_type(self):\n pass", "def custom_data_9(self, custom_data_9):\n # type: (string_types) -> None\n\n if custom_data_9 is not None:\n if not isinstance(custom_data_9, string_types):\n raise TypeError(\"Invalid type for `custom_data_9`, type has to be `string_types`\")\n\n self._custom_data_9 = custom_data_9", "def update(self, v_input):\n\n self.v = v_input", "def reptile9(self, reptile9: List[PredictionsDatapoints]):\n\n self._reptile9 = reptile9", "def set_input_type(self, input_type):\n if input_type is not None: self._input_type.value = input_type\n return self", "def modify_input(self, raw_input_par):\r\n raise NotImplementedError", "def modify_input(self, raw_input_par):\r\n raise NotImplementedError", "def set_tipo_tag(self, serial, tipo):\n return self.parent.controller.set_tipo(decode(serial, \"hex_codec\"),\n tipo)", "def set_type(self, new_value):\n\n self.vax_type = new_value\n self.save()", "def _updateTruckRec(self, tNode, Uid):\n if type(Uid) == int:\n self._readTruckRec(tNode, Uid)\n print(f'Vehicle Id {Uid} record updated')\n print('------------------------------------')", "def __init__(self, rpc, auxiliary):\n super(AuxiliaryModule, self).__init__(rpc, 'auxiliary', auxiliary)\n self._action = self._info.get('default_action', \"\")", "def SetInputNarrowBand(self, ptr: 'itkVectorContainerUILSNF3') -> \"void\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_SetInputNarrowBand(self, ptr)", "def updateTransformationType(self):\n tt = self.vb.determineTransformationType()\n self.stackedWidget.setCurrentIndex(tt)\n self.checkRequirements()", "def setTypeUpdate_0(self, position, data):\n self.setTypeAndData(position.getX(), position.getY(), position.getZ(), data.getBlock(), data.getBlock().toLegacyData(data), 0)", "def __init__(__self__, *,\n type: Optional[pulumi.Input[Union[str, 'VNetSolutionType']]] = None):\n if type is not None:\n pulumi.set(__self__, \"type\", type)", "def update_speed_input_step(self,curr_v):\n \n # update speed inputs \n self.speed_inputs_east*=0\n self.speed_inputs_west*=0\n self.speed_inputs_north*=0\n self.speed_inputs_south*=0\n\n if self.use_eight_directions is True: \n self.speed_inputs_north_east*=0\n self.speed_inputs_north_west*=0\n self.speed_inputs_south_east*=0\n self.speed_inputs_south_west*=0\n \n #speed_values=self.rr[:self.N_e,0] \n speed_values=np.ones((self.N_e,1))\n\n if curr_v[0]>0:\n \n # north-east\n if self.use_eight_directions is True and curr_v[1]>0:\n self.speed_inputs_north_east=speed_values \n \n # south-east \n elif self.use_eight_directions is True and curr_v[1]<0:\n self.speed_inputs_south_east=speed_values\n \n #east \n else:\n self.speed_inputs_east=speed_values\n\n\n elif curr_v[0]<0:\n\n # north-west \n if self.use_eight_directions is True and curr_v[1]>0:\n self.speed_inputs_north_west=speed_values\n\n # south-west \n elif self.use_eight_directions is True and curr_v[1]<0:\n self.speed_inputs_south_west=speed_values\n \n # west \n else:\n self.speed_inputs_west=speed_values\n\n else: \n # north\n if curr_v[1]>0:\n self.speed_inputs_north=speed_values\n\n # south\n elif curr_v[1]<0:\n self.speed_inputs_south=speed_values", "def edit_frame_type(self, indx, frame_type, append=False):\n if not append:\n self['framebit'][indx] = 0\n self['framebit'][indx] = self.type_bitmask.turn_on(self['framebit'][indx], flag=frame_type)\n self['frametype'][indx] = self.type_bitmask.type_names(self['framebit'][indx])" ]
[ "0.6592232", "0.6393137", "0.6386548", "0.6197008", "0.59048843", "0.57917714", "0.5618292", "0.5533515", "0.5376587", "0.46700698", "0.45658058", "0.4494469", "0.43490845", "0.4330773", "0.43188262", "0.4283189", "0.4261341", "0.41801286", "0.40957448", "0.40957448", "0.4087617", "0.40346655", "0.40269282", "0.40113053", "0.40080753", "0.3999094", "0.3983309", "0.39822063", "0.3964472", "0.39551264" ]
0.81162375
0
Sets the engine_hours of this UpdateVehicleRequest.
def engine_hours(self, engine_hours): self._engine_hours = engine_hours
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def work_hours_setting(self, work_hours_setting):\n\n self._work_hours_setting = work_hours_setting", "def active_hours(self, active_hours):\n\n self._active_hours = active_hours", "def obd_engine_seconds(self, obd_engine_seconds):\n\n self._obd_engine_seconds = obd_engine_seconds", "def work_hours(self, work_hours):\n if work_hours is not None and len(work_hours) > 1024:\n raise ValueError(\"Invalid value for `work_hours`, length must be less than or equal to `1024`\") # noqa: E501\n\n self._work_hours = work_hours", "def engine_states(self, engine_states):\n\n self._engine_states = engine_states", "def opening_hours(self, opening_hours):\n if self.local_vars_configuration.client_side_validation and opening_hours is None: # noqa: E501\n raise ValueError(\"Invalid value for `opening_hours`, must not be `None`\") # noqa: E501\n\n self._opening_hours = opening_hours", "def set_all(self):\n\n self.ecm = EnergyConsumptionModel(\n vehicle_type=\"car\",\n vehicle_size=list(self.array.coords[\"size\"].values),\n powertrains=list(self.array.coords[\"powertrain\"].values),\n cycle=self.cycle,\n gradient=self.gradient,\n country=self.country,\n )\n\n diff = 1.0\n\n while diff > 0.0001:\n old_driving_mass = self[\"driving mass\"].sum().values\n self.set_vehicle_mass()\n self.set_power_parameters()\n self.set_component_masses()\n self.set_auxiliaries()\n self.set_power_battery_properties()\n self.set_battery_properties()\n self.set_energy_stored_properties()\n self.set_recuperation()\n\n if \"FCEV\" in self.array.powertrain.values:\n self.set_fuel_cell_power()\n self.set_fuel_cell_mass()\n\n # if user-provided values are passed,\n # they override the default values\n if \"capacity\" in self.energy_storage:\n self.override_battery_capacity()\n\n diff = (self[\"driving mass\"].sum().values - old_driving_mass) / self[\n \"driving mass\"\n ].sum()\n\n self.set_ttw_efficiency()\n self.calculate_ttw_energy()\n self.set_ttw_efficiency()\n\n self.set_range()\n\n if self.target_range:\n self.override_range()\n\n self.set_share_recuperated_energy()\n self.set_battery_fuel_cell_replacements()\n self.adjust_cost()\n\n self.set_electric_utility_factor()\n self.set_electricity_consumption()\n self.set_costs()\n self.set_hot_emissions()\n self.set_particulates_emission()\n self.set_noise_emissions()\n self.create_PHEV()\n if self.drop_hybrids:\n self.drop_hybrid()\n\n self.remove_energy_consumption_from_unavailable_vehicles()", "def end_times(self, end_times):\n\n self._end_times = end_times", "def submit_hours(self, report):\n raise NotImplementedError", "def wall_energy_efficiency(self, wall_energy_efficiency):\n\n self._wall_energy_efficiency = wall_energy_efficiency", "def setHour(self, *args):\n return _libsbml.Date_setHour(self, *args)", "def update(self, engine):\n\n curtime = datetime.datetime.now()\n\n # remove strategies with < _ENDT mins to go until start time.\n strats_finished = []\n for i, (s, m) in enumerate(self._strategies):\n # time left to live in seconds\n ttl = (m.starttime - curtime - self._endtdelta).total_seconds()\n\n if (ttl < 0):\n # remove strategy from app/engine\n self.remove_strategy(engine, s)\n # remove from internal \n strats_finished.append(i)\n else:\n # send a pulse to the strategy so that it knows time\n # left\n s.update_ttl(ttl)\n\n strats_finished.reverse()\n for i in strats_finished:\n self._strategies.pop(i)\n\n # add strategies with < STARTT mins to go until start time.\n strats_seen = []\n for (i, hmatch) in enumerate(self._hmatches):\n if (curtime + self._starttdelta > hmatch[0].starttime):\n # add the strategies for this market pair\n print 'adding strategies for', hmatch\n self.add_strategy(engine, hmatch)\n # remove from remaining matches list\n strats_seen.append(i)\n strats_seen.reverse()\n for i in strats_seen:\n self._hmatches.pop(i)", "def __get_engine_hours_from_line(self, line):\n # TODO: catch exceptions\n hours_str = line.split('engine_hours')[1]\n if hours_str.startswith('='):\n hours_str = hours_str[1:]\n hours_str = hours_str.split(';')[0]\n return float(hours_str)", "def set_Hour(self, value):\n super(GetTimestampFromDateParametersInputSet, self)._set_input('Hour', value)", "def tradeHours(self, context):\n raise NotImplementedError", "def set_eht(self, target_eht):\n self.target_eht = round(target_eht, 2)\n # Setting SEM to target EHT must be implemented in child class!", "def CalculateTimeFrameElectricEneregyCost(self, kwh:float, dollarsPerKiloWattHour = 0.1149):\n\t\t\n\t\treturn kwh * dollarsPerKiloWattHour", "def handle_set_speed_kph(self, req):\n self.cruising_speed += req.speed\n msg = \"Speed of vehicle #%i successfully set.\" % self.vehicle_id\n return srvs.SetSpeedResponse(True, msg)", "def set_hour(self, hour):\n if hour not in range(24):\n raise ValueError(\"Hour value for 24h must be in range [1..23] but is {}\".format(hour))\n\n # In case there was an issue with enabling the 14hour mode, we still want\n # to be able to write the hour correctly\n if self.__get_bit_12_24() == 0:\n # First we separate the tens and the digit\n tens, digit = divmod(int(hour), 10)\n\n # In 24h mode, we add them in a single int\n reg_value = (tens << 4) | digit\n\n else: # 12h mode\n # We get the meridien\n if hour <= 12:\n meridien = 0\n else:\n meridien = 1\n\n # We treat the hour\n if hour == 12:\n tens, digit = divmod(int(12), 10)\n else:\n tens, digit = divmod(int(hour % 12), 10)\n\n # In 24h mode, we add them in a single int\n reg_value = (meridien << 5) | (tens << 4) | digit\n\n # Then we print the value to the register\n self.__write_register(_REGISTER_HOUR, reg_value)", "def setHRVUpdate(self, numRRI):\n self.HRV_UPDATE = numRRI", "def update(self):\n try:\n now = datetime.now()\n today = date.today()\n midnight = datetime.combine(today, datetime.min.time())\n data = self.api.get_energy_details(\n self.site_id,\n midnight,\n now.strftime(\"%Y-%m-%d %H:%M:%S\"),\n meters=None,\n time_unit=\"DAY\",\n )\n energy_details = data[\"energyDetails\"]\n except KeyError:\n _LOGGER.error(\"Missing power flow data, skipping update\")\n return\n except (ConnectTimeout, HTTPError):\n _LOGGER.error(\"Could not retrieve data, skipping update\")\n return\n\n if \"meters\" not in energy_details:\n _LOGGER.debug(\n \"Missing meters in energy details data. Assuming site does not have any\"\n )\n return\n\n self.data = {}\n self.attributes = {}\n self.unit = energy_details[\"unit\"]\n meters = energy_details[\"meters\"]\n\n for entity in meters:\n for key, data in entity.items():\n if key == \"type\" and data in [\n \"Production\",\n \"SelfConsumption\",\n \"FeedIn\",\n \"Purchased\",\n \"Consumption\",\n ]:\n energy_type = data\n if key == \"values\":\n for row in data:\n self.data[energy_type] = row[\"value\"]\n self.attributes[energy_type] = {\"date\": row[\"date\"]}\n\n _LOGGER.debug(\n \"Updated SolarEdge energy details: %s, %s\", self.data, self.attributes\n )", "def SetRelTol(self, rel_tol):\n return _hypre.HypreAME_SetRelTol(self, rel_tol)", "def setTime(self, timeObj, day=None):\n\n # override day if it's None\n if not day:\n day = getDayFromNum(timeObj.weekday())\n\n self._fileCache[day][\"time-hr\"] = timeObj.hour\n self._fileCache[day][\"time-min\"] = timeObj.minute\n self._updateConfig()", "def calculate_ttw_energy(self) -> None:\n\n self.energy = self.ecm.motive_energy_per_km(\n driving_mass=self[\"driving mass\"],\n rr_coef=self[\"rolling resistance coefficient\"],\n drag_coef=self[\"aerodynamic drag coefficient\"],\n frontal_area=self[\"frontal area\"],\n electric_motor_power=self[\"electric power\"],\n engine_power=self[\"power\"],\n recuperation_efficiency=self[\"recuperation efficiency\"],\n aux_power=self[\"auxiliary power demand\"],\n battery_charge_eff=self[\"battery charge efficiency\"],\n battery_discharge_eff=self[\"battery discharge efficiency\"],\n fuel_cell_system_efficiency=self[\"fuel cell system efficiency\"],\n )\n\n self.energy = self.energy.assign_coords(\n {\n \"powertrain\": self.array.powertrain,\n \"year\": self.array.year,\n \"size\": self.array.coords[\"size\"],\n \"value\": self.array.coords[\"value\"],\n }\n )\n\n if self.energy_consumption:\n self.override_ttw_energy()\n\n distance = self.energy.sel(parameter=\"velocity\").sum(dim=\"second\") / 1000\n\n self[\"engine efficiency\"] = (\n np.ma.array(\n self.energy.loc[dict(parameter=\"engine efficiency\")],\n mask=self.energy.loc[dict(parameter=\"power load\")] == 0,\n )\n .mean(axis=0)\n .T\n )\n\n _o = lambda x: np.where((x == 0) | (x == np.nan), 1, x)\n\n if self.engine_efficiency is not None:\n print(\"Engine efficiency is being overridden.\")\n for key, val in self.engine_efficiency.items():\n pwt, size, year = key\n if (\n (val is not None)\n & (pwt in self.array.powertrain.values)\n & (year in self.array.year.values)\n & (size in self.array[\"size\"].values)\n ):\n self.array.loc[\n dict(\n powertrain=pwt,\n size=size,\n year=year,\n parameter=\"engine efficiency\",\n )\n ] = float(val)\n\n self.energy.loc[\n dict(\n powertrain=pwt,\n size=size,\n year=year,\n parameter=\"engine efficiency\",\n )\n ] = float(val) * np.where(\n self.energy.loc[\n dict(\n parameter=\"power load\",\n powertrain=pwt,\n size=size,\n year=year,\n )\n ]\n == 0,\n 0,\n 1,\n )\n\n self.energy.loc[\n dict(\n powertrain=pwt,\n size=size,\n year=year,\n parameter=\"motive energy\",\n )\n ] = self.energy.loc[\n dict(\n powertrain=pwt,\n size=size,\n year=year,\n parameter=\"motive energy at wheels\",\n )\n ] / (\n _o(\n self.energy.loc[\n dict(\n powertrain=pwt,\n size=size,\n year=year,\n parameter=\"engine efficiency\",\n )\n ]\n )\n * _o(\n self.energy.loc[\n dict(\n powertrain=pwt,\n size=size,\n year=year,\n parameter=\"transmission efficiency\",\n )\n ]\n )\n )\n\n self[\"transmission efficiency\"] = (\n np.ma.array(\n self.energy.loc[dict(parameter=\"transmission efficiency\")],\n mask=self.energy.loc[dict(parameter=\"power load\")] == 0,\n )\n .mean(axis=0)\n .T\n )\n\n if self.transmission_efficiency is not None:\n print(\"Transmission efficiency is being overridden.\")\n for key, val in self.transmission_efficiency.items():\n pwt, size, year = key\n\n if (\n (val is not None)\n & (pwt in self.array.powertrain.values)\n & (year in self.array.year.values)\n & (size in self.array[\"size\"].values)\n ):\n self.array.loc[\n dict(\n powertrain=pwt,\n size=size,\n year=year,\n parameter=\"transmission efficiency\",\n )\n ] = float(val)\n\n self.energy.loc[\n dict(\n powertrain=pwt,\n size=size,\n year=year,\n parameter=\"transmission efficiency\",\n )\n ] = float(val) * np.where(\n self.energy.loc[\n dict(\n parameter=\"power load\",\n powertrain=pwt,\n size=size,\n year=year,\n )\n ]\n == 0,\n 0,\n 1,\n )\n\n self.energy.loc[\n dict(\n powertrain=pwt,\n size=size,\n year=year,\n parameter=\"motive energy\",\n )\n ] = self.energy.loc[\n dict(\n powertrain=pwt,\n size=size,\n year=year,\n parameter=\"motive energy at wheels\",\n )\n ] / (\n _o(\n self.energy.loc[\n dict(\n powertrain=pwt,\n size=size,\n year=year,\n parameter=\"engine efficiency\",\n )\n ]\n )\n * _o(\n self.energy.loc[\n dict(\n powertrain=pwt,\n size=size,\n year=year,\n parameter=\"transmission efficiency\",\n )\n ]\n )\n )\n\n self[\"TtW energy\"] = (\n self.energy.sel(\n parameter=[\"motive energy\", \"auxiliary energy\", \"recuperated energy\"]\n ).sum(dim=[\"second\", \"parameter\"])\n / distance\n ).T\n\n self[\"TtW energy, combustion mode\"] = self[\"TtW energy\"] * (\n self[\"combustion power share\"] > 0\n )\n self[\"TtW energy, electric mode\"] = self[\"TtW energy\"] * (\n self[\"combustion power share\"] == 0\n )\n\n self[\"auxiliary energy\"] = (\n self.energy.sel(parameter=\"auxiliary energy\").sum(dim=\"second\") / distance\n ).T", "def setEmployees(self, employees):\n self.employees = employees", "def set_params(self, solver):\n params = yicespy.yices_new_param_record()\n yicespy.yices_default_params_for_context(solver.yices, params)\n for k,v in self.solver_options.items():\n rv = yicespy.yices_set_param(params, k, v)\n if rv != 0:\n raise PysmtValueError(\"Error setting the option '%s=%s'\" % (k,v))\n solver.yices_params = params", "async def set_energy(\r\n self, energy: int | float = 0, **kwargs # pylint: disable=unused-argument\r\n ) -> None:\r\n if KebaService.SET_ENERGY not in self.device_info.services:\r\n raise NotImplementedError(\r\n \"set_energy is not available for the given charging station.\"\r\n )\r\n\r\n if (\r\n not isinstance(energy, (int, float))\r\n or (energy < 1 and energy != 0)\r\n or energy >= 10000\r\n ):\r\n raise ValueError(\r\n \"Energy must be int or float and value must be above 0.0001 kWh and below 10000 kWh.\"\r\n )\r\n\r\n await self._send(f\"setenergy {int(round(energy * 10000))}\", fast_polling=True)", "def set_dwell_time(self, dwell_time):\n raise NotImplementedError", "def give_raise(self):\r\n self.hourly_pay = 12.00", "def calculate_hr_ee(self):\n\n # HR - resting HR = net HR\n net_hr = np.array([i - self.rest_hr if i is not None else None for i in self.df_epoch[\"HR\"]])\n\n # Sets values below 0% HRR (below resting HR) to 0\n net_hr[net_hr <= 0] = 0\n\n # Equation from Brage et al., 2004. Active EE in kJ/kg/min\n kj_kg_min = [.011 * (hr ** 2) + 5.82 * hr if hr is not None else None for hr in net_hr]\n\n # Converts kJ to kcal: relative EE (kcal/kg/min)\n kcal_kg_min = [k / 4.184 if k is not None else None for k in kj_kg_min]\n\n # Converts relative EE to absolute EE (kcal/min)\n kcal_min = [k * self.weight / 1000 if k is not None else None for k in kcal_kg_min]\n\n # kcals per epoch instead of per minute\n kcal_epoch = [k * (15 / 60) for k in kcal_min]\n\n total_ee = sum([i for i in kcal_epoch if not np.isnan(i)])\n print(\"-Total energy expenditure estimated from HR is {} kcal.\".format(int(total_ee)))\n\n self.df_epoch[\"HR_EE\"] = kcal_min" ]
[ "0.55531627", "0.51219904", "0.5094751", "0.48254585", "0.4715887", "0.46753588", "0.46118993", "0.459567", "0.45515487", "0.4540938", "0.45212317", "0.45212197", "0.44924808", "0.44742528", "0.4456462", "0.4445044", "0.44345856", "0.44310075", "0.44294956", "0.44075277", "0.43571466", "0.43518728", "0.43450204", "0.4344367", "0.43373784", "0.43135723", "0.4291427", "0.4247542", "0.42390496", "0.42297363" ]
0.8141102
0
Sets the external_ids of this UpdateVehicleRequest.
def external_ids(self, external_ids): self._external_ids = external_ids
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def external_ids(self, **kwargs):\n path = self._get_movie_id_path('external_ids')\n resp = self._get_method(path, kwargs)\n return resp", "def external_id(self, external_id):\n\n self._external_id = external_id", "def external_id(self, external_id):\n\n self._external_id = external_id", "def external_id(self, external_id):\n\n self._external_id = external_id", "def part_ids(self, part_ids):\n\n self._part_ids = part_ids", "def external_controller_services(self, external_controller_services):\n\n self._external_controller_services = external_controller_services", "def external_id_source(self, external_id_source):\n\n self._external_id_source = external_id_source", "def external_transit_networks(self, external_transit_networks):\n\n self._external_transit_networks = external_transit_networks", "def set_ext_params(self, ext_params):\n num_param = core.xc_func_info_get_n_ext_params(self.xc_func_info)\n if num_param == 0:\n raise ValueError(\"LibXCFunctional '%s' has no external parameters to set.\" % self.get_name())\n\n if len(ext_params) != num_param:\n raise ValueError(\n \"The length of the input external parameters (%d) does not match the length of the functional's external parameters (%d).\"\n % (len(ext_params), num_param))\n\n core.xc_func_set_ext_params(self.xc_func, np.asarray(ext_params, dtype=np.double))", "def allowed_vehicles(self, allowed_vehicles):\n\n self._allowed_vehicles = allowed_vehicles", "def allowed_vehicles(self, allowed_vehicles):\n\n self._allowed_vehicles = allowed_vehicles", "def external_id(self, external_id):\n if external_id is not None and len(external_id) > 255:\n raise ValueError(\"Invalid value for `external_id`, length must be less than or equal to `255`\") # noqa: E501\n\n self._external_id = external_id", "def external_id(self, external_id):\n if external_id is None:\n raise ValueError(\"Invalid value for `external_id`, must not be `None`\") # noqa: E501\n\n self._external_id = external_id", "def set_external_variables(self, xs):\n try:\n self.Attributes.update(xs)\n except AttributeError as e:\n raise e", "def status_ids(self, status_ids):\n\n self._status_ids = status_ids", "def resource_ids(self, resource_ids):\n\n self._resource_ids = resource_ids", "def external_customer_id(self, external_customer_id):\n\n self._external_customer_id = external_customer_id", "def location_ids(self, location_ids):\n\n self._location_ids = location_ids", "def set_vendors(self, vendors_list):\n self.multiple_items_selection_from_kendo_dropdown(self.vendors_kendo_dropdown_locator, vendors_list)\n self.wait_for_ajax_spinner_load()", "def setExternal(self):\n self.__external = True", "def external_use(self, external_use):\n if self.local_vars_configuration.client_side_validation and external_use is None: # noqa: E501\n raise ValueError(\"Invalid value for `external_use`, must not be `None`\") # noqa: E501\n\n self._external_use = external_use", "def ids(self, ids):\n self._ids = ids", "def invoice_ids(self, invoice_ids):\n\n self._invoice_ids = invoice_ids", "def set_serial_numbers(self, serial_numbers):\n if not all(isinstance(serial_number, str) for serial_number in serial_numbers):\n raise ApiError(\"One or more invalid serial numbers\")\n self._update_criteria(\"serial_number\", serial_numbers)\n return self", "def boundary_edge_ids(self, boundary_ids):\n self.boundary_edge_ids_ = boundary_ids\n logging.debug(\"Segment - {bid_len} Boundary edge IDs set.\".format(\n bid_len=len(self.boundary_edge_ids_)))", "def set_vendor(self, vendor_list):\n self.multiple_items_selection_from_kendo_dropdown(self.vendor_dropdown_locator, vendor_list)\n self.wait_for_ajax_spinner_load()", "def set_param_values(self, flattened_params, **tags):\n self._regressor.set_param_values(flattened_params, **tags)", "def boundary_facet_ids(self, boundary_ids):\n self.boundary_facet_ids_ = boundary_ids\n logging.debug(\"Segment - {bid_len} Boundary facet IDs set.\".format(\n bid_len=len(self.boundary_facet_ids)))", "def externally_managed(self, externally_managed):\n\n self._externally_managed = externally_managed", "def encoding_ids(self, encoding_ids):\n # type: (list) -> None\n\n if encoding_ids is not None:\n if not isinstance(encoding_ids, list):\n raise TypeError(\"Invalid type for `encoding_ids`, type has to be `list[string_types]`\")\n\n self._encoding_ids = encoding_ids" ]
[ "0.5645812", "0.56326723", "0.56326723", "0.56326723", "0.55220467", "0.5430042", "0.5307687", "0.5243291", "0.5185779", "0.51405275", "0.51405275", "0.5116388", "0.508638", "0.5023836", "0.49995106", "0.49790815", "0.49639636", "0.4961317", "0.49294525", "0.49190345", "0.48962608", "0.48775703", "0.48133597", "0.48045427", "0.4795125", "0.4727292", "0.4689793", "0.46792606", "0.46621573", "0.46121466" ]
0.7786375
0
Sets the gateway_serial of this UpdateVehicleRequest.
def gateway_serial(self, gateway_serial): self._gateway_serial = gateway_serial
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setGateway(self, gateway):\n # type: (str)->None\n\n self._validator.validate_one(\n 'gateway', VALID_OPTS['gateway'], gateway)\n self._ifAttributes['gateway'] = gateway", "def gateway_id(self, gateway_id):\n\n self._gateway_id = gateway_id", "def set_serial(self, serial=0):\n self.serial = serial", "def _SetDeviceSerial(self, device_serial):\n self._device_address = (\"127.0.0.1:%s\" % self._adb_port if\n self._adb_port else \"\")\n self._device_serial = (device_serial if device_serial else\n self._device_address)", "def setGateway(self):\n\t\tself.gatewayip = self.settings.getKeyValue('gatewayip')\n\t\tself.socket.send('setenv gatewayip ' + self.gatewayip+'\\r', 1)\n\t\treturn None", "def serial(self, serial):\n\n self._serial = serial", "def set_serial_number(self, sSerialNumber):\n\t\tcall_sdk_function('PrlVmDevHd_SetSerialNumber', self.handle, sSerialNumber)", "def serial(self, serial):\n if self.local_vars_configuration.client_side_validation and serial is None: # noqa: E501\n raise ValueError(\"Invalid value for `serial`, must not be `None`\") # noqa: E501\n\n self._serial = serial", "def serial_number(self, serial_number: str):\n\n self._serial_number = serial_number", "def serialno(self, serialno):\n\n self._serialno = serialno", "def set_router_gateway(self, ext_net_name, router_id):\n _ext_net_id = self.get_net_id(ext_net_name)\n if not isinstance(_ext_net_id, unicode):\n return\n\n LOG_OBJ.debug(\"Setting external gateway of %s router.\" % router_id)\n\n _url = \"http://\" + self.host_ip + \":9696/v2.0/routers/\" + \\\n router_id + \".json\"\n\n _headers = {'x-auth-token': self.project_info[\"token_project\"],\n 'content-type': 'application/json'}\n _gwdata = {\"router\": {\"external_gateway_info\":\n {\"network_id\": _ext_net_id}}}\n _body = json.dumps(_gwdata)\n\n response = self.request(\"PUT\", _url, _headers, _body)\n if response is None:\n LOG_OBJ.error(\"No response from Server while setting router:\"\n \" %s to gateway: %s\" % (router_id, _ext_net_id))\n return response\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\"Setting router gateway Failed with status %s \" %\n response.status)\n return response.status\n\n LOG_OBJ.info(\"Router Gateway set is done for %s router\" % router_id)\n return True", "def update_gateway_device(self, gateway_device_id, body=None):\r\n return self.put(self.gateway_device_path % gateway_device_id,\r\n body=body)", "def lot_serial_nbr(self, lot_serial_nbr):\n\n self._lot_serial_nbr = lot_serial_nbr", "def enable_gateway(self, enable_gateway):\n\n self._enable_gateway = enable_gateway", "def update_network_gateway(self, gateway_id, body=None):\r\n return self.put(self.network_gateway_path % gateway_id, body=body)", "def updateDevice(self, serial: str, **kwargs):\n\n kwargs.update(locals())\n\n metadata = {\n 'tags': ['devices', 'configure'],\n 'operation': 'updateDevice'\n }\n resource = f'/devices/{serial}'\n\n body_params = ['name', 'tags', 'lat', 'lng', 'address', 'notes', 'moveMapMarker', 'switchProfileId', 'floorPlanId', ]\n payload = {k.strip(): v for k, v in kwargs.items() if k.strip() in body_params}\n action = {\n \"resource\": resource,\n \"operation\": \"update\",\n \"body\": payload\n }\n return action", "def gateway_type(self, gateway_type):\n allowed_values = [\"CENTRAL\", \"INDIVIDUAL\"]\n if gateway_type not in allowed_values:\n raise ValueError(\n \"Invalid value for `gateway_type` ({0}), must be one of {1}\"\n .format(gateway_type, allowed_values)\n )\n\n self._gateway_type = gateway_type", "def router_gateway_set(mgr_or_client, router_id, external_network_id,\n **kwargs):\n net_client = _g_router_client(mgr_or_client)\n router = router_show(mgr_or_client, router_id)\n router_id = router['id']\n external_gateway_info = dict(network_id=external_network_id)\n en_snat = 'enable_snat'\n if en_snat in kwargs:\n external_gateway_info[en_snat] = kwargs.pop(en_snat)\n body = net_client.update_router(\n router_id,\n external_gateway_info=external_gateway_info)\n return body['router']", "def vehicle(self, vehicle):\n\n self._vehicle = vehicle", "def vehicle(self, vehicle):\n\n self._vehicle = vehicle", "def serial_dev(self, serial_dev):\n self._serial_dev = serial_dev\n return self", "def serial(self, serial):\n if serial is None:\n raise ValueError(\"Invalid value for `serial`, must not be `None`\") # noqa: E501\n\n self._serial = serial", "def set_speed(self, speed: str) -> None:\n if speed == SPEED_HIGH:\n self._bond.setSpeed(self._deviceId, self._speed_high)\n elif speed == SPEED_MEDIUM:\n self._bond.setSpeed(self._deviceId, self._speed_medium)\n elif speed == SPEED_LOW:\n self._bond.setSpeed(self._deviceId, self._speed_low)\n self._attributes['current_speed'] = speed", "def _set_default_gateway(self, gateway_ip, ifname):\n version = 4\n if gateway_ip.version == 6:\n version = 6\n current = self._get_default_gateway(version)\n desired = str(gateway_ip)\n ifname = self.generic_to_host(ifname)\n\n if current and current != desired:\n # Remove the current gateway and add the desired one\n self.sudo(\n '-%s' % version, 'route', 'del', 'default', 'via', current,\n 'dev', ifname\n )\n return self.sudo(\n '-%s' % version, 'route', 'add', 'default', 'via', desired,\n 'dev', ifname\n )\n if not current:\n # Add the desired gateway\n return self.sudo(\n '-%s' % version, 'route', 'add', 'default', 'via', desired,\n 'dev', ifname\n )", "def set_default_gateway(self, sNewDefaultGateway):\n\t\tcall_sdk_function('PrlVmDevNet_SetDefaultGateway', self.handle, sNewDefaultGateway)", "def bus_ob_rec_id(self, bus_ob_rec_id):\n\n self._bus_ob_rec_id = bus_ob_rec_id", "def set_pypi_serial(self, serial):\n with self._conn.begin():\n self._conn.execute(\"VALUES (set_pypi_serial(%s))\", (serial,))", "def baudrate(self, baudrate):\n self._baudrate = baudrate\n return self", "def carrier_voyage_number(self, carrier_voyage_number: Object):\n\n self._carrier_voyage_number = carrier_voyage_number", "def optimization_force_direction(self, optimization_force_direction):\n\n self._optimization_force_direction = optimization_force_direction" ]
[ "0.63021106", "0.5979875", "0.56400514", "0.5633011", "0.54569376", "0.526742", "0.52273846", "0.51936847", "0.5030974", "0.49815518", "0.49481562", "0.4906431", "0.4822673", "0.47393987", "0.47254622", "0.47191525", "0.4696486", "0.46910635", "0.46563208", "0.46563208", "0.4639108", "0.4615377", "0.46025482", "0.45698178", "0.45653644", "0.45245802", "0.44629657", "0.4403855", "0.43993637", "0.43912733" ]
0.8186771
0
Sets the harsh_acceleration_setting_type of this UpdateVehicleRequest.
def harsh_acceleration_setting_type(self, harsh_acceleration_setting_type): allowed_values = ["passengerCar", "lightTruck", "heavyDuty", "off", "automatic"] # noqa: E501 if self.local_vars_configuration.client_side_validation and harsh_acceleration_setting_type not in allowed_values: # noqa: E501 raise ValueError( "Invalid value for `harsh_acceleration_setting_type` ({0}), must be one of {1}" # noqa: E501 .format(harsh_acceleration_setting_type, allowed_values) ) self._harsh_acceleration_setting_type = harsh_acceleration_setting_type
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_acceleration(self, acceleration):\n self.device.set_acceleration(acceleration)\n return \"OK\"", "def set_adjustment_type(self, adjustment_type):\n self.single_selection_from_kendo_dropdown(self.adjustment_type_dropdown_locator, adjustment_type)\n self.wait_for_ajax_spinner_load()", "def gateway_type(self, gateway_type):\n allowed_values = [\"CENTRAL\", \"INDIVIDUAL\"]\n if gateway_type not in allowed_values:\n raise ValueError(\n \"Invalid value for `gateway_type` ({0}), must be one of {1}\"\n .format(gateway_type, allowed_values)\n )\n\n self._gateway_type = gateway_type", "def handle_set_speed_kph(self, req):\n self.cruising_speed = req.speed * (5. / self.traffic_level) / 3.6\n msg = \"Speed of vehicle #%i successfully set.\" % self.vehicle_id\n return srvs.SetSpeedResponse(True, msg)", "def handle_set_speed_kph(self, req):\n self.cruising_speed += req.speed\n msg = \"Speed of vehicle #%i successfully set.\" % self.vehicle_id\n return srvs.SetSpeedResponse(True, msg)", "def device_type(self, device_type):\n allowed_values = [\"active\", \"inactive\", \"all\"]\n if device_type not in allowed_values:\n raise ValueError(\n \"Invalid value for `device_type` ({0}), must be one of {1}\"\n .format(device_type, allowed_values)\n )\n\n self._device_type = device_type", "def set_device_type(device_type):\n device_type_data = {\n 'name': device_type,\n 'label': normalize_label(device_type),\n 'deviceColor': sigfox_main_color,\n 'deviceIcon': 'wifi',\n 'variableColor': sigfox_secondary_color,\n 'properties': [],\n 'variables': []\n }\n return device_type_data", "def setHgType(self, hgTypeToSet):\n self.hgType = hgTypeToSet", "def engine_type(self, engine_type):\n\n self._engine_type = engine_type", "def set_shared_type(self, shared_type):\n\n\t\tif shared_type is not None and not isinstance(shared_type, str):\n\t\t\traise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: shared_type EXPECTED TYPE: str', None, None)\n\t\t\n\t\tself.__shared_type = shared_type\n\t\tself.__key_modified['shared_type'] = 1", "def SetCoarsening(self, coarsen_type):\n return _hypre.HypreBoomerAMG_SetCoarsening(self, coarsen_type)", "def set_electrical_type(self, connection_etype):\n self.etype = connection_etype", "def hw_type(self, hw_type):\n if self.local_vars_configuration.client_side_validation and hw_type is None: # noqa: E501\n raise ValueError(\"Invalid value for `hw_type`, must not be `None`\") # noqa: E501\n\n self._hw_type = hw_type", "def set_model_type(hparams, FLAGS):\n if not FLAGS.hparams:\n return\n\n keyword = \"model=\"\n model_pos = find(FLAGS.hparams, keyword)\n if model_pos is None:\n return\n\n model_name_pos = model_pos + len(keyword)\n end_pos = find(FLAGS.hparams[model_name_pos:], \",\")\n if end_pos is None:\n end_pos = len(FLAGS.hparams)\n\n hparams.model = FLAGS.hparams[model_name_pos:end_pos]", "def _set_component_type_params(self, component_type):\n self.component_type = component_type\n\n if component_type == \"healpix\":\n self._name.required = False\n self._skycoord.required = False\n self._hpx_inds.required = True\n self._nside.required = True\n self._hpx_order.required = True\n self._hpx_frame.required = True\n else:\n self._name.required = True\n self._skycoord.required = True\n self._hpx_inds.required = False\n self._nside.required = False\n self._hpx_order.required = False\n self._hpx_frame.required = False", "def _set_spectral_type_params(self, spectral_type):\n self.spectral_type = spectral_type\n\n assert (\n spectral_type in self._spectral_type.acceptable_vals\n ), f\"spectral_type must be one of: {self._spectral_type.acceptable_vals}\"\n if spectral_type == \"spectral_index\":\n self._spectral_index.required = True\n self._reference_frequency.required = True\n self._Nfreqs.acceptable_vals = [1]\n self._freq_array.required = False\n elif spectral_type == \"subband\":\n self._freq_array.required = True\n # TODO: make _freq_edge_array required in v0.5\n # (and not required in other spectral types)\n self._spectral_index.required = False\n self._reference_frequency.required = False\n self._Nfreqs.acceptable_vals = None\n elif spectral_type == \"full\":\n self._freq_array.required = True\n self._spectral_index.required = False\n self._reference_frequency.required = False\n self._Nfreqs.acceptable_vals = None\n else:\n self._freq_array.required = False\n self._spectral_index.required = False\n self._reference_frequency.required = False\n self._Nfreqs.acceptable_vals = [1]", "def set_hvac_mode(self, hvac_mode):\n\n if hvac_mode == HVAC_MODE_OFF:\n self._on = False\n self._device.set_location_to_off()\n self._current_operation_mode = CONST_MODE_OFF\n\n elif hvac_mode == HVAC_MODE_AUTO:\n self._on = True\n self._device.set_temperature_to_auto()\n self._current_operation_mode = CONST_MODE_PROGRAM\n\n elif hvac_mode == HVAC_MODE_HEAT:\n self._on = True\n self._device.set_temperature_to_manual()\n self._current_operation_mode = CONST_MODE_FIXED\n\n else:\n raise InvalidStateError", "def type(self, type: str):\n allowed_values = [\"daylight_factor\", \"annual\", \"radiation\", \"direct_reflection\", \"five_phase\", \"point_in_time\", \"solar_access\", \"three_phase\"] # noqa: E501\n if type not in allowed_values:\n raise ValueError(\n \"Invalid value for `type` ({0}), must be one of {1}\"\n .format(type, allowed_values)\n )\n\n self._type = type", "def setHotspotStyle( self, hstyle ):\n self._hotspotStyle = hstyle", "def set_feedback_type(self, feedback_type):\r\n return self._arm.set_feedback_type(feedback_type)", "def encryption_type(self, encryption_type):\n allowed_values = [\"NO_ENCRYPTION\", \"ENCRYPTED_ONLY\", \"ENCRYPTED_OR_PSTN_ONLY\"] # noqa: E501\n if encryption_type not in allowed_values:\n raise ValueError(\n \"Invalid value for `encryption_type` ({0}), must be one of {1}\" # noqa: E501\n .format(encryption_type, allowed_values)\n )\n\n self._encryption_type = encryption_type", "def set_hvac_mode(self, hvac_mode: str) -> None:\n if hvac_mode == HVACMode.AUTO: # FollowSchedule\n self.svc_reset_zone_mode()\n elif hvac_mode == HVACMode.HEAT: # TemporaryOverride\n self.svc_set_zone_mode(mode=ZoneMode.PERMANENT, setpoint=25) # TODO:\n else: # HVACMode.OFF, PermentOverride, temp = min\n self.svc_set_zone_mode(self._device.set_frost_mode) # TODO:", "def interaction_type(self, interaction_type):\n\n self._interaction_type = interaction_type", "def _ship_acceleration(player, ship_name, way, game_data):\n\n # Get the current speed and position\n position = game_data['ships'][player][ship_name]\n speed = game_data['board'][position][player][ship_name]['speed']\n # get the max speed of the ship\n max_speed = game_data['ship_characteristics'][game_data['board'][position][player][ship_name]['type']]['max_speed']\n\n # faster\n if way == 'faster' and speed < max_speed:\n # update the speed of the ship\n game_data['board'][position][player][ship_name]['speed'] += 1\n # slower\n if way == 'slower' and speed > 0:\n # update the speed of the ship\n game_data['board'][position][player][ship_name]['speed'] -= 1", "def enable_acceleration(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_acceleration\")", "def enable_acceleration(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_acceleration\")", "def mumps_acceleration(self, mumps_acceleration):\n\n self._mumps_acceleration = mumps_acceleration", "def optimization_force_direction(self, optimization_force_direction):\n\n self._optimization_force_direction = optimization_force_direction", "def set_type(self, type):\n self.type = type", "def set_type(self, type):\n self.type = type" ]
[ "0.5853659", "0.50912744", "0.49172306", "0.48173192", "0.47014672", "0.4634827", "0.46120846", "0.4590229", "0.4576279", "0.45042148", "0.4476712", "0.43912166", "0.43879282", "0.4383811", "0.4366613", "0.4360031", "0.4352631", "0.43397018", "0.43366387", "0.43263713", "0.43233618", "0.43206865", "0.43122348", "0.42800635", "0.42675996", "0.42675996", "0.42597732", "0.42503265", "0.42201412", "0.42201412" ]
0.8702775
0
Sets the license_plate of this UpdateVehicleRequest.
def license_plate(self, license_plate): if (self.local_vars_configuration.client_side_validation and license_plate is not None and len(license_plate) > 12): raise ValueError("Invalid value for `license_plate`, length must be less than or equal to `12`") # noqa: E501 self._license_plate = license_plate
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def license(self, license):\n\n self._license = license", "def save(self, *args, **kwargs):\n if self.license_plate:\n self.license_plate = self.license_plate.replace('-','').replace(' ','')\n super(VehicleRegistration,self).save(*args, **kwargs)", "def license_number(self, license_number):\n\n self._license_number = license_number", "def license_date(self, license_date):\n\n self._license_date = license_date", "def set_license(self, license_code: str) -> PrivXAPIResponse:\n response_status, data = self._http_post(\n UrlEnum.LICENSE.LICENSE,\n body=license_code,\n )\n return PrivXAPIResponse(response_status, HTTPStatus.OK, data)", "def license_model_description(self, license_model_description):\n self._license_model_description = license_model_description", "def put(self, license_handler):\n\n full_license = request.data\n return license_handler.upload_license(full_license)", "def setLicenseKey(self,content):\n self.PDFreactorConfiguration.in1[\"licenseKey\"] = content", "def putlicensepath(self,licensepath_): # 3\n res = self.__obj.putlicensepath(licensepath_)\n if res != 0:\n raise Error(rescode(res),\"\")", "def test_create_vehicle_with_too_long_license_plate(self):\n payload = {\n 'user': self.user,\n 'type': 'VSL',\n 'license_plate': 'AA-1234-BB'\n }\n\n res = self.client.post(VEHICLE_URL, payload)\n\n self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)", "def license_key(self, license_key):\n # type: (string_types) -> None\n\n if license_key is not None:\n if not isinstance(license_key, string_types):\n raise TypeError(\"Invalid type for `license_key`, type has to be `string_types`\")\n\n self._license_key = license_key", "def putlicensewait(self,licwait_): # 3\n res = self.__obj.putlicensewait(licwait_)\n if res != 0:\n raise Error(rescode(res),\"\")", "def upload_license(self):\n param = self.module.params[\"param\"]\n license_file_path = param['license_file_path']\n if license_file_path and os.access(license_file_path, os.F_OK) and os.access(license_file_path, os.R_OK):\n self.client.upload_license(license_file_path)\n self.module.exit_json(msg=\"Import license file Success.\", changed=True, status='success')\n else:\n self.module.fail_json(msg=\"Import license file Fail.Please add 'hw_license_file_path' \"\n \"and make sure it can be read.\",\n changed=True, status='fail')", "def set_file_license_comment(self, doc, text):\n if self.has_package(doc) and self.has_file(doc):\n if not self.file_license_comment_set:\n self.file_license_comment_set = True\n if validations.validate_file_lics_comment(text):\n self.file(doc).license_comment = str_from_text(text)\n else:\n raise SPDXValueError('File::LicenseComment')\n else:\n raise CardinalityError('File::LicenseComment')\n else:\n raise OrderError('File::LicenseComment')", "def vehicle(self, vehicle):\n\n self._vehicle = vehicle", "def vehicle(self, vehicle):\n\n self._vehicle = vehicle", "def putlicensepath(self,licensepath_):\n if isinstance(licensepath_,unicode):\n licensepath_ = licensepath_.encode(\"utf-8\",errors=\"replace\")\n res = __library__.MSK_XX_putlicensepath(self.__nativep,licensepath_)\n if res != 0:\n raise Error(rescode(res),Env.getcodedesc(rescode(res))[1])", "def putlicensewait(self,licwait_):\n res = __library__.MSK_XX_putlicensewait(self.__nativep,licwait_)\n if res != 0:\n raise Error(rescode(res),Env.getcodedesc(rescode(res))[1])", "def cargo_fuel(self, cargo_fuel):\n\n self._cargo_fuel = cargo_fuel", "def license_plate(self) -> str:\n return self.numerify(self.generator.parse(self.random_element(self.license_formats)))", "def license_plate(self) -> str:\n prefix: str = self.random_element(self.license_plate_prefix)\n suffix = self.bothify(\n self.random_element(self.license_plate_suffix),\n letters=string.ascii_uppercase,\n )\n return prefix + suffix", "def putlicensedebug(self,licdebug_): # 3\n res = self.__obj.putlicensedebug(licdebug_)\n if res != 0:\n raise Error(rescode(res),\"\")", "def reservation(self, reservation):\n\n self._reservation = reservation", "def put(self, registration):\n args = self.reqparse.parse_args()\n check_for_empty_fields(args)\n return Car.edit(registration, args['model'], args['capacity'])", "def set_license_analytics(self, license_params: dict) -> PrivXAPIResponse:\n response_status, data = self._http_post(\n UrlEnum.LICENSE.OPT_IN,\n body=license_params,\n )\n return PrivXAPIResponse(response_status, HTTPStatus.OK, data)", "def _set_nameplate_to_match_resource_potential(self, resource):\n\n if \"nameplate\" in self.sam_sys_inputs:\n msg = ('Found \"nameplate\" input in config! Resource potential '\n 'from input data will be ignored. Nameplate capacity is {}'\n .format(self.sam_sys_inputs[\"nameplate\"]))\n logger.info(msg)\n return\n\n val = set(resource[\"potential_MW\"].unique())\n if len(val) > 1:\n msg = ('Found multiple values for \"potential_MW\" for site {}: {}'\n .format(self.site, val))\n logger.error(msg)\n raise InputError(msg)\n\n val = val.pop() * 1000\n\n logger.debug(\"Setting the nameplate to {}\".format(val))\n self.sam_sys_inputs[\"nameplate\"] = val", "def putlicensedebug(self,licdebug_):\n res = __library__.MSK_XX_putlicensedebug(self.__nativep,licdebug_)\n if res != 0:\n raise Error(rescode(res),Env.getcodedesc(rescode(res))[1])", "def update_license(self, sKey, sUser, sCompany):\n\t\treturn Job(SDK.PrlSrv_UpdateLicense(self.handle, sKey, sUser, sCompany)[0])", "def setParcel(self, newParcel):\n self._parcel = newParcel", "def putlicensecode(self,code): # 3\n if code is None:\n code_ = None\n else:\n try:\n code_ = memoryview(code)\n except TypeError:\n try:\n _tmparr_code = array.array(\"i\",code)\n except TypeError:\n raise TypeError(\"Argument code has wrong type\")\n else:\n code_ = memoryview(_tmparr_code)\n \n else:\n if code_.format != \"i\":\n code_ = memoryview(array.array(\"i\",code))\n \n if code_ is not None and len(code_) != value.license_buffer_length:\n raise ValueError(\"Array argument code has wrong length\")\n res = self.__obj.putlicensecode(code_)\n if res != 0:\n raise Error(rescode(res),\"\")" ]
[ "0.6482215", "0.60094345", "0.5971417", "0.57393056", "0.5617041", "0.55561376", "0.53961915", "0.5359062", "0.5322539", "0.5291167", "0.52001405", "0.5070711", "0.5057579", "0.49738538", "0.49601606", "0.49601606", "0.49461326", "0.4945529", "0.49165577", "0.4910613", "0.48727325", "0.48629662", "0.4846261", "0.48016953", "0.47764683", "0.4749892", "0.4705383", "0.47037765", "0.47005877", "0.46831986" ]
0.7658375
0
Sets the odometer_meters of this UpdateVehicleRequest.
def odometer_meters(self, odometer_meters): self._odometer_meters = odometer_meters
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def obd_odometer_meters(self, obd_odometer_meters):\n\n self._obd_odometer_meters = obd_odometer_meters", "def gps_odometer_meters(self, gps_odometer_meters):\n\n self._gps_odometer_meters = gps_odometer_meters", "def drive_distance_meters(self, drive_distance_meters):\n\n self._drive_distance_meters = drive_distance_meters", "def gps_distance_meters(self, gps_distance_meters):\n\n self._gps_distance_meters = gps_distance_meters", "def setDistanceUnits(self, units: Unit) -> None:\n self.units = ...", "def solar_meter(self, solar_meter):\n\n self._solar_meter = solar_meter", "def update_meters(self):\n self.previous_meters = self.current_meters\n self.current_meters = {'X' : 0,\n 'Y' : 0,\n 'RSSI' : 0}", "def odometry_updated(self, odo):\n self._current_speed = math.sqrt(odo.twist.twist.linear.x ** 2 +\n odo.twist.twist.linear.y ** 2 +\n odo.twist.twist.linear.z ** 2) * 3.6\n\n self._current_pose = odo.pose.pose\n quaternion = (\n odo.pose.pose.orientation.x,\n odo.pose.pose.orientation.y,\n odo.pose.pose.orientation.z,\n odo.pose.pose.orientation.w\n )\n _, _, self._vehicle_yaw = euler_from_quaternion(quaternion)", "def setWheelsSpeed(self, dc_motor_speed):\n self.dcmotorSpeed = dc_motor_speed # changed rightSpeed to dcmotorSpeed and right to\n self.updatePWM()", "def set_param_motor():\n servo.setSpeed(0, 0) # max = 255\n servo.setAccel(0, 0)\n servo.setSpeed(1, 150) # max = 255\n servo.setAccel(1, 150)", "def set(self, mdot = 0.0):\n self._setMassFlowRate(mdot)", "def set_speed (self, dx = None, dy = None) :\n if dx != None :\n self.speed[0] = dx\n if dy != None :\n self.speed[1] = dy", "def set_wheel_speed(self, om_l, om_r):\n self.om_left = om_l\n self.om_right = om_r", "def set_MeterNumber(self, value):\n super(AddressValidationInputSet, self)._set_input('MeterNumber', value)", "def setMotors(self, left_dist: int, right_dist: int, speed: int) -> None:\n \"\"\"\n The following is a work-around for a bug in the Neato API. The bug is that the\n robot won't stop instantly if a 0-velocity command is sent - the robot\n could continue moving for up to a second. To work around this bug, the\n first time a 0-velocity is sent in, a velocity of 1,1,1 is sent. Then,\n the zero is sent. This effectively causes the robot to stop instantly.\n \"\"\"\n if (int(left_dist) == 0 and int(right_dist) == 0 and int(speed) == 0):\n if (not self._stopped):\n self._stopped = True\n left_dist = 1\n right_dist = 1\n speed = 1\n else:\n self._stopped = False\n\n self._protocol.write_line(\n 'setmotor lwheeldist {} rwheeldist {} speed {}'.format(\n left_dist, right_dist, speed))", "def drive_time_minutes(self, drive_time_minutes):\n\n self._drive_time_minutes = drive_time_minutes", "def update(self,d:dict):\n for name,(value,n) in d.items():\n if n==0:\n continue\n self.meters[name].update(value,n)", "def set_distance(self):\n distance = self.gui.doubleSpinBox_distance.value()\n unit = self.gui.comboBox_unit.currentText()\n\n local_distance = ur(str(distance)+unit)\n self.logger.debug('local distance value: ' + str(local_distance))\n\n if local_distance > self.max_distance:\n self.logger.debug('value too high')\n local_max = self.max_distance.to(unit)\n self.logger.debug(str(local_max))\n self.gui.doubleSpinBox_distance.setValue(local_max.m_as(unit))\n distance = self.gui.doubleSpinBox_distance.value()\n elif local_distance < 0:\n self.logger.debug('value too low')\n self.gui.doubleSpinBox_distance.setValue(0)\n distance = self.gui.doubleSpinBox_distance.value()\n\n local_distance = ur(str(distance) + unit) #in case something changed\n self.distance = local_distance\n self.logger.debug('dictionary distance changed to: ' + str(self.distance))", "def set_MJD_obs(self, mjd):\n self.mjd_obs = mjd\n\n pass", "def set_parameter_values(self, c5=None, lm=1.0):\n\n self._c5 = c5\n self._lm = lm\n\n self._update()", "def motd(self, motd):\n if motd is None:\n raise ValueError(\"Invalid value for `motd`, must not be `None`\") # noqa: E501\n\n self._motd = motd", "def drive_distance(degrees, motor, gear_ratio): #TODO Finish documentation", "def set_motor_gains(\n self,\n kp: Union[float, Tuple[float], np.ndarray],\n kd: Union[float, Tuple[float], np.ndarray],\n ):\n self._kp = _convert_to_np_array(kp, self._num_motors)\n self._kd = _convert_to_np_array(kd, self._num_motors)", "def distance_miles(self, distance_miles):\n\n self._distance_miles = distance_miles", "def init_meter(self, loss_meters, elbo_meters):\n if loss_meters is None:\n self.train_loss_meter = RunningAverageMeter()\n self.val_loss_meter = RunningAverageMeter(0.5)\n else:\n self.train_loss_meter = loss_meters[0]\n self.val_loss_meter = loss_meters[1]\n\n if elbo_meters is None:\n self.train_elbo_meter = RunningAverageMeter()\n self.val_elbo_meter = RunningAverageMeter(0.5)\n else:\n self.train_elbo_meter = elbo_meters[0]\n self.val_elbo_meter = elbo_meters[1]", "def set_motor(self, nummer, direction, speed):\n self.log('set {} {} {}'.format(nummer, direction, speed))\n num = int(float(nummer))\n tempo = int(float(speed)*self.maxSpeed)\n self.set_direction(num, direction)\n self.set_speed(num, tempo)", "def update_odometer(self, mileage):\n if mileage >= self.odometer:\n self.odometer = mileage\n else:\n print(\"You can't roll back an odometer!\")", "def set_throttle(self, limit=None, units=None):\n self.delay = 0\n self.max_requests = 1e16\n self.made_requests = 0", "def set_speed(self, om_left, om_right):\n analog_om_left = self.LEFT_CONST + om_left*4\n analog_om_right = self.RIGHT_CONST - om_right*4\n self.servoWriteMicroseconds(self.PIN_LEFT, analog_om_left)\n self.servoWriteMicroseconds(self.PIN_RIGHT, analog_om_right)", "def set_rotor_diameter(self, rotor_diameter):\n raise Exception(\n \"FlorinInterface.set_rotor_diameter has been removed in favor of \"\n \"FlorinInterface.change_turbine. See examples/change_turbine/.\"\n )" ]
[ "0.78520304", "0.6952166", "0.6625439", "0.5700278", "0.5330142", "0.51499796", "0.49836195", "0.49621317", "0.49096128", "0.4897436", "0.4849823", "0.4838554", "0.4810013", "0.47394198", "0.4714336", "0.4705788", "0.4705542", "0.46347386", "0.46201065", "0.45941296", "0.45151672", "0.4514173", "0.4511925", "0.44877288", "0.44787925", "0.4470669", "0.44623947", "0.4454194", "0.44445992", "0.44335777" ]
0.8017434
0
Sets the static_assigned_driver_id of this UpdateVehicleRequest.
def static_assigned_driver_id(self, static_assigned_driver_id): self._static_assigned_driver_id = static_assigned_driver_id
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def driver_id(self, driver_id):\n\n self._driver_id = driver_id", "def driver_id(self, driver_id: int):\n if driver_id is None:\n raise ValueError(\"Invalid value for `driver_id`, must not be `None`\") # noqa: E501\n\n self._driver_id = driver_id", "def static_finding(self, static_finding):\n\n self._static_finding = static_finding", "def driver_id(self) -> int:\n return self._driver_id", "def VsiIdAssignedNumber(self):\n return self._get_attribute('vsiIdAssignedNumber')", "def vehicle(self, vehicle):\n\n self._vehicle = vehicle", "def vehicle(self, vehicle):\n\n self._vehicle = vehicle", "def driver_id(self):\n return self._driver_id", "def add_driver(self, driver):\n\n warnings.warn('add_driver is deprecated, use veh.control=driver instead')\n self._control = driver\n driver._veh = self", "def assigned_by_user(self, assigned_by_user):\n\n self._assigned_by_user = assigned_by_user", "def _set_static_ip(name, session, vm_):\n ipv4_cidr = \"\"\n ipv4_gw = \"\"\n if \"ipv4_gw\" in vm_.keys():\n log.debug(\"ipv4_gw is found in keys\")\n ipv4_gw = vm_[\"ipv4_gw\"]\n if \"ipv4_cidr\" in vm_.keys():\n log.debug(\"ipv4_cidr is found in keys\")\n ipv4_cidr = vm_[\"ipv4_cidr\"]\n log.debug(\"attempting to set IP in instance\")\n set_vm_ip(name, ipv4_cidr, ipv4_gw, session, None)", "def set_driver(self, driver):\n self.driver = driver", "def assigned_user(self, assigned_user):\n self._assigned_user = assigned_user", "def driver_name(self, driver_name):\n\n self._driver_name = driver_name", "def VplsIdAssignedNumber(self):\n return self._get_attribute('vplsIdAssignedNumber')", "def vehicle(self, vehicle: Vehicle) -> None:\n self.controlled_vehicles = [vehicle]", "def static_ips(self, static_ips):\n\n self._static_ips = static_ips", "def allowed_vehicles(self, allowed_vehicles):\n\n self._allowed_vehicles = allowed_vehicles", "def allowed_vehicles(self, allowed_vehicles):\n\n self._allowed_vehicles = allowed_vehicles", "def driver(self, driver):\n\n self._driver = driver", "def driver(self, driver):\n\n self._driver = driver", "def _setsenders_correspondent_53D(self, val):\n self.swift_obj.SendersCorrespondent_D = val\n self.swift_obj.SendersCorrespondent_D.swiftTag = \"53D\"", "def _setsenders_correspondent_53A(self, val):\n self.swift_obj.SendersCorrespondent_A = val\n self.swift_obj.SendersCorrespondent_A.swiftTag = \"53A\"", "def sport_radar_player_id(self, sport_radar_player_id):\n\n self._sport_radar_player_id = sport_radar_player_id", "def update_agent_id(self, agent_id):\n self.id = agent_id", "def sent_by_image_id(self, sent_by_image_id):\n\n self._sent_by_image_id = sent_by_image_id", "def assignDealer(self):\n\t\t_, index = self.findNthPlayerFromSeat(self.curDealerSeatNo, 1)\n\t\tself.curDealerSeatNo = index", "def _set_senders_reference_20(self, val):\n self.swift_obj.SendersReference = val\n self.swift_obj.SendersReference.swiftTag = \"20\"", "def get_driver_id(self, driver_name):\n cond = SQLBinaryExpr(SQLFuncExpr(self.db_func_map[DB_FUNC_NAME_LOWER],\n COL_NAME_DRIVERS_NAME),\n OP_EQ, SQLLiteral(driver_name.lower()))\n entries = self.select_generic_data(select_list=[COL_NAME_DRIVERS_DRIVERID],\n table_list=[TABLE_NAME_DRIVERS],\n where=cond)\n if len(entries) == 1:\n return entries[0][COL_NAME_DRIVERS_DRIVERID]\n elif len(entries) > 1:\n raise AdasDBError(\"Driver '%s' cannot be resolved because it is ambiguous. (%s)\" % (driver_name, entries))\n\n raise AdasDBError(\"No resolution of '%s'. (%s)\" % (driver_name, entries))", "def add_driver(self, driver):\n drv_cond = SQLBinaryExpr(SQLFuncExpr(self.db_func_map[DB_FUNC_NAME_LOWER],\n COL_NAME_DRIVERS_NAME),\n OP_EQ, SQLLiteral(driver[COL_NAME_DRIVERS_NAME].lower()))\n entries = self.select_generic_data(table_list=[TABLE_NAME_DRIVERS], where=drv_cond)\n if len(entries) <= 0:\n drvid = self._get_next_id(TABLE_NAME_DRIVERS, COL_NAME_DRIVERS_DRIVERID)\n driver[COL_NAME_DRIVERS_DRIVERID] = drvid\n self.add_generic_data(driver, TABLE_NAME_DRIVERS)\n return drvid\n else:\n if self.error_tolerance < ERROR_TOLERANCE_LOW:\n raise AdasDBError(\"Driver '%s' exists already in the catalog.\" % driver[COL_NAME_DRIVERS_NAME])\n else:\n warn(\"Driver '\" + entries[COL_NAME_DRIVERS_NAME] + \"' already exists in the catalog.\")\n if len(entries) == 1:\n return entries[0][COL_NAME_DRIVERS_DRIVERID]\n elif len(entries) > 1:\n tmp = \"Driver'%s' \" % (driver[COL_NAME_DRIVERS_NAME])\n tmp += \"cannot be resolved because it is ambiguous. (%s)\" % entries\n raise AdasDBError(tmp)" ]
[ "0.58979875", "0.52223", "0.50875014", "0.45650956", "0.45579106", "0.45233056", "0.45233056", "0.44840404", "0.44335097", "0.43853715", "0.4371814", "0.43593413", "0.43487522", "0.42879018", "0.42818657", "0.42814112", "0.42524332", "0.4240414", "0.4240414", "0.4225884", "0.4225884", "0.42194477", "0.41885814", "0.41829795", "0.4182528", "0.41719893", "0.4169465", "0.41625294", "0.41488862", "0.41381705" ]
0.8492683
0
Sets the tag_ids of this UpdateVehicleRequest.
def tag_ids(self, tag_ids): self._tag_ids = tag_ids
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_tags(self, tags):\r\n current_tags = set(self.tag_names())\r\n updated_tags = set(tags)\r\n removed_tags = current_tags.difference(updated_tags)\r\n new_tags = updated_tags.difference(current_tags)\r\n \r\n for tag in new_tags:\r\n self.add_tag(tag)\r\n \r\n for tag in removed_tags:\r\n self.remove_tag(tag)", "def tags(self, tags: List[Tag]):\n\n self._tags = tags", "def set_tags(self, tags):\n uniques = set()\n distinct = []\n for tag in tags:\n if tag not in uniques:\n distinct.append(tag)\n uniques.add(tag)\n self.__post_changes(distinct)", "def tags(self, tags):\n\n self._tags = tags", "def tags(self, tags):\n\n self._tags = tags", "def tags(self, tags):\n\n self._tags = tags", "def tags(self, tags):\n\n self._tags = tags", "def tags(self, tags):\n\n self._tags = tags", "def tags(self, tags):\n\n self._tags = tags", "def tags(self, tags):\n\n self._tags = tags", "def tags(self, tags):\n\n self._tags = tags", "def tags(self, tags):\n\n self._tags = tags", "def tags(self, tags):\n\n self._tags = tags", "def tag_names(self, tag_names):\n\n self._tag_names = tag_names", "def tags(self, tags):\n self._tags = tags", "def tags(self, tags):\n self._tags = tags", "def tags(self, tags):\n self._tags = tags", "def set_tags_batch(self, tags, filenames):\n # Explicitly ruling out strings here because passing in a\n # string would lead to strange and hard-to-find errors\n if isinstance(tags, basestring):\n raise TypeError(\"The argument 'tags' must be dictionary \"\n \"of strings\")\n if isinstance(filenames, basestring):\n raise TypeError(\"The argument 'filenames' must be \"\n \"an iterable of strings\")\n \n params = []\n params_utf8 = []\n for tag, value in tags.items():\n params.append(u'-%s=%s' % (tag, value))\n \n params.extend(filenames)\n params_utf8 = [x.encode('utf-8') for x in params]\n return self.execute(*params_utf8)", "def set_tags(self, tags):\n self._tag.clear()\n\n for tag in tags:\n if tag not in self._tag:\n self._tag.append(tag)\n\n return self", "def update_tags(self, tags, **kwargs):\n request = RequestMiddleware.get_request()\n is_admin = request.user and request.user.is_admin\n # Keep all tags that start with pf: because they are reserved.\n preserved = [tag for tag in self.tags if tag.startswith('pf:')]\n if is_admin:\n remove = [tag[1:] for tag in tags if tag.startswith('-pf:')]\n preserved = [tag for tag in preserved if tag not in remove]\n\n # Filter out new tags that are invalid or reserved.\n accepted = [tag for tag in tags\n if TAG_REGEX_COMPILED.match(tag)\n and (is_admin or not tag.startswith('pf:'))]\n # Limit the number of tags per entity.\n if len(accepted + preserved) > settings.MAX_TAGS_PER_ENTITY:\n accepted = accepted[:settings.MAX_TAGS_PER_ENTITY - len(preserved)]\n self.tags = list(set(accepted + preserved))", "def set_tags(self, tags):\n for task in self._tasks:\n task.set_tags(tags)\n\n return self", "def setTags(self,newtags):\n\t\tself.tags = newtags;", "def set_tags(self, tags, filename):\n return self.set_tags_batch(tags, [filename])", "def set_tags(self, session, *tags):\n if not tags:\n return list()\n\n result = self._tag(session.put, tags=list(tags), session=session)\n return result['tags']", "def boundary_facet_ids(self, boundary_ids):\n self.boundary_facet_ids_ = boundary_ids\n logging.debug(\"Segment - {bid_len} Boundary facet IDs set.\".format(\n bid_len=len(self.boundary_facet_ids)))", "def set_asset_tags(user_id, tag_id_list, commit=True):\n if not TagRepository.are_from_user_organization(tag_id_list, user_id):\n raise Error.Unauthorized(\"Every asset_tag for a user's notification preferences must belong to his organization\")\n\n db.session.query(NotificationAssetTag).filter(\n NotificationAssetTag.user_id == user_id,\n not_(NotificationAssetTag.tag_id.in_(tag_id_list))\n ).delete(synchronize_session = False)\n\n upsert_asset_tags(user_id, tag_id_list, commit)", "def ids(self, ids):\n self._ids = ids", "def append_tags(self, tags):\n\n tags = H.to_list(tags)\n # self._tags.update(tags)\n self.tags.update(tags)", "def defined_tags(self, defined_tags):\n self._defined_tags = defined_tags", "def part_ids(self, part_ids):\n\n self._part_ids = part_ids" ]
[ "0.6124682", "0.61125207", "0.60852545", "0.6005471", "0.6005471", "0.6005471", "0.6005471", "0.6005471", "0.6005471", "0.6005471", "0.6005471", "0.6005471", "0.6005471", "0.5952815", "0.59259295", "0.59259295", "0.59259295", "0.5924576", "0.5871416", "0.5602764", "0.55914205", "0.5586196", "0.5562834", "0.5516507", "0.5135185", "0.5056768", "0.5041436", "0.5000453", "0.49993426", "0.49899372" ]
0.75711256
0
Sets the vin of this UpdateVehicleRequest.
def vin(self, vin): if (self.local_vars_configuration.client_side_validation and vin is not None and len(vin) > 17): raise ValueError("Invalid value for `vin`, length must be less than or equal to `17`") # noqa: E501 if (self.local_vars_configuration.client_side_validation and vin is not None and len(vin) < 11): raise ValueError("Invalid value for `vin`, length must be greater than or equal to `11`") # noqa: E501 self._vin = vin
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_vin(self, value):\n return self.sendCMD(\"ATSET VIN={}\".format(value))", "def vm_volume_num_in(self, vm_volume_num_in):\n\n self._vm_volume_num_in = vm_volume_num_in", "def vehicle(self, vehicle):\n\n self._vehicle = vehicle", "def vehicle(self, vehicle):\n\n self._vehicle = vehicle", "def setVBin(self, vbin):\n with self.lock:\n self.vbin = vbin", "def vm_volume_num(self, vm_volume_num):\n\n self._vm_volume_num = vm_volume_num", "def virtual_router_ip(self, virtual_router_ip):\n self._virtual_router_ip = virtual_router_ip", "def decode_vin(self, vin, model_year=None):\n if model_year is not None:\n p = {'modelyear': model_year}\n else:\n p = None\n return self.get('vehicles/DecodeVin/{}'.format(vin), params=p)", "async def _select_vehicle(self, vin):\n params = {\"vin\": vin, \"_\": int(time.time())}\n js_resp = await self.get(API_SELECT_VEHICLE, params=params)\n _LOGGER.debug(pprint.pformat(js_resp))\n if js_resp.get(\"success\"):\n self._current_vin = vin\n _LOGGER.debug(\"Current vehicle: vin=%s\", js_resp[\"data\"][\"vin\"])\n return js_resp[\"data\"]\n if not js_resp.get(\"success\") and js_resp.get(\"errorCode\") == API_ERROR_VEHICLE_SETUP:\n # Occasionally happens every few hours. Resetting the session seems to deal with it.\n _LOGGER.warning(\"VEHICLESETUPERROR received. Resetting session.\")\n self.reset_session()\n return False\n _LOGGER.debug(\"Failed to switch vehicle errorCode=%s\", js_resp.get(\"errorCode\"))\n # Something else is probably wrong with the backend server context - try resetting\n self.reset_session()\n raise SubaruException(\"Failed to switch vehicle %s - resetting session.\" % js_resp.get(\"errorCode\"))", "def vm_volume(self, vm_volume):\n\n self._vm_volume = vm_volume", "def vm_vlan_num_in(self, vm_vlan_num_in):\n\n self._vm_vlan_num_in = vm_vlan_num_in", "def vm_volume_num_lte(self, vm_volume_num_lte):\n\n self._vm_volume_num_lte = vm_volume_num_lte", "def vm_num_in(self, vm_num_in):\n\n self._vm_num_in = vm_num_in", "def update(self, v_input):\n\n self.v = v_input", "def set_voltage(self, v):\n self.environment.set_voltage(self.neuron_id, v)", "def virtual_volumes(self, virtual_volumes):\n\n self._virtual_volumes = virtual_volumes", "def vip(self, vip):\n\n self._vip = vip", "def refund_incl_vat(self, refund_incl_vat):\n if (self.local_vars_configuration.client_side_validation and\n refund_incl_vat is not None and refund_incl_vat < 0): # noqa: E501\n raise ValueError(\"Invalid value for `refund_incl_vat`, must be a value greater than or equal to `0`\") # noqa: E501\n\n self._refund_incl_vat = refund_incl_vat", "def version_in(self, version_in):\n\n self._version_in = version_in", "def decode_vin_extended(self, vin, model_year=None):\n if model_year is not None:\n p = {'modelyear': model_year}\n else:\n p = None\n return self.get('vehicles/DecodeVinExtended/{}'.format(vin), params=p)", "def vm_volume_num_lt(self, vm_volume_num_lt):\n\n self._vm_volume_num_lt = vm_volume_num_lt", "def vm_vlan_num(self, vm_vlan_num):\n\n self._vm_vlan_num = vm_vlan_num", "def vm_volume_num_gte(self, vm_volume_num_gte):\n\n self._vm_volume_num_gte = vm_volume_num_gte", "def set(self, incoming_vector):\n self.vector = incoming_vector", "def set_vlan_tag(self, nVlanTag):\n\t\tcall_sdk_function('PrlVirtNet_SetVlanTag', self.handle, nVlanTag)", "def iscsi_lun_num_in(self, iscsi_lun_num_in):\n\n self._iscsi_lun_num_in = iscsi_lun_num_in", "def vm_num(self, vm_num):\n\n self._vm_num = vm_num", "def vertices(self, v):\n self._vertices = v", "def vds_num_in(self, vds_num_in):\n\n self._vds_num_in = vds_num_in", "def vat_number(self, vat_number):\n\n self._vat_number = vat_number" ]
[ "0.6181508", "0.54808533", "0.5342333", "0.5342333", "0.5278952", "0.51032424", "0.50570464", "0.4902143", "0.48931664", "0.48747736", "0.48244667", "0.4768621", "0.47286236", "0.46440622", "0.4581164", "0.4552281", "0.44910336", "0.44724986", "0.44671312", "0.44606727", "0.44266123", "0.44131854", "0.43419695", "0.43373993", "0.4329367", "0.4325972", "0.42918438", "0.42809895", "0.42748538", "0.42643693" ]
0.7512381
0
Return True if command return type is string
def is_string(self): answer = self._call('is_string') return answer.yes
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _is_string(arg):\n return isinstance(arg, types.StringTypes)", "def is_string(value):\n return isinstance(value, (str, bytes))", "def is_string_action(func: CLIActionType) -> bool:\n return check_function_type(func, [HammerDriver, Callable[[str], None]], Optional[str]) is None", "def _is_string(self, obj):\n return isinstance(obj, unicode) or isinstance(obj, str)", "def is_of_type(cmd):\r\n raise NotImplementedError()", "def typeIsString(obj):\n return type(obj) is str or _haveTypeUnicode and type(obj) is unicode", "def is_string(value):\n return isinstance(value, basestring)", "def is_string(obj):\n return isinstance(obj, str)", "def check_statement(self, statement):\n return isinstance(statement, str)", "def expects_result(self, command):\n return isinstance(command, (self.package(\"Syntax\").Operator,\n self.package(\"Syntax\").Formule))", "def DataIsString(self):\n return self.data_type in (definitions.REG_SZ, definitions.REG_EXPAND_SZ)", "def is_string(obj):\n return isinstance(obj, basestring)", "def is_valid_command(self, string):\n return string[:3] == \"--!\"", "def is_command(schema_obj):\n\n return isinstance(schema_obj, schema.Command)", "def is_string(atype):\n if atype == str:\n return True\n elif PY2:\n if atype == unicode:\n return True\n return False", "def _msg_is_command(self, msg):\n return isinstance(msg, dict)", "def is_string(value):\n return isinstance(value, string_types)", "def strtype(x):\n if type(x) == str:\n return True\n if type(x) == unicode:\n return True\n return False", "def do_type(self, str_arg):\n try:\n self.adbc.type(validateString(str_arg))\n except Exception, e:\n printLog(self.threadName + 'TYPE FAILED: %s' % e.message)\n self.resultFlag = False\n finally:\n return self.resultFlag", "def is_string(val):\n return (\n isinstance(val, unicode) or \\\n isinstance(val, str) \n )", "def is_command_response(schema_obj):\n\n return isinstance(schema_obj, schema.CommandResponse)", "def is_command(self, text):\n return text.split(' ', 1)[0].startswith(\"!\")", "def is_valid_command(args):\n if args.command is not None:\n return True\n return False", "def _is_str(item):\n return isinstance(item, str)", "def isString(x):\n if type(x) == str:\n return True\n else:\n return False", "def is_text(self):\n return self.value_type in (str, unicode)", "def is_str ( self, s ):\r\n\t\treturn isinstance ( s, type( str () ) )", "def cmd_type(args):", "def _is_command(self, ext):\n try:\n return issubclass(ext, CommandExtension)\n except TypeError:\n return False", "def is_string_type(self):\n raise exceptions.NotImplementedError()" ]
[ "0.72101504", "0.67675126", "0.67398417", "0.66946656", "0.6686076", "0.667609", "0.6644599", "0.66220903", "0.65879357", "0.65586376", "0.65518516", "0.6525141", "0.6505778", "0.6503069", "0.6468777", "0.64370143", "0.6400437", "0.6391969", "0.63687015", "0.6319832", "0.63106257", "0.6266616", "0.6259427", "0.62311757", "0.6186616", "0.6176651", "0.61693144", "0.6154602", "0.6133311", "0.6132417" ]
0.7352662
0
Return True if command return type is long
def is_long(self): answer = self._call('is_long') return answer.yes
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def convertToLong(boolean: bool) -> int:\n ...", "def is_of_type(cmd):\r\n raise NotImplementedError()", "def hasNextLong(self) -> bool:\n raise NotImplementedError", "def command_type(self):\n if self.cmd_type is not None:\n return self.cmd_type # should be true only after first run ..\n\n if any([self.cmd == c for c in (\"add\", \"sub\", \"neg\", \"eq\", \"gt\", \"lt\", \"and\", \"or\", \"not\")]):\n self.cmd_type = CmdTypes.C_ARITHMETIC\n self.arg1() # sets cmd_arg1\n return self.cmd_type\n elif self.cmd[0:4] == \"push\":\n self.cmd_type = CmdTypes.C_PUSH\n self.arg2() # sets cmd_arg1, cmd_arg2\n return self.cmd_type\n elif self.cmd[0:3] == \"pop\":\n self.cmd_type = CmdTypes.C_POP\n self.arg2() # sets cmd_arg1, cmd_arg2\n return self.cmd_type\n elif self.cmd[0:5] == \"label\":\n self.cmd_type = CmdTypes.C_LABEL\n self.arg1()\n return self.cmd_type\n elif self.cmd[0:4] == \"goto\":\n self.cmd_type = CmdTypes.C_GOTO\n self.arg1()\n return self.cmd_type\n elif self.cmd[0:7] == \"if-goto\":\n self.cmd_type = CmdTypes.C_IF\n self.arg1()\n return self.cmd_type\n elif self.cmd[0:8] == \"function\":\n self.cmd_type = CmdTypes.C_FUNCTION\n self.arg2()\n return self.cmd_type\n elif self.cmd[0:6] == \"return\":\n self.cmd_type = CmdTypes.C_RETURN\n return self.cmd_type\n elif self.cmd[0:4] == \"call\":\n self.cmd_type = CmdTypes.C_CALL\n self.arg2()\n return self.cmd_type\n return None", "def expects_result(self, command):\n return isinstance(command, (self.package(\"Syntax\").Operator,\n self.package(\"Syntax\").Formule))", "def cmd_type(args):", "def has_more_commands(self):\n return not self.eof", "def cmd_type (self, line):\r\n # ascii, ebcdic, image, local <byte size>\r\n t = line[1].lower()\r\n # no support for EBCDIC\r\n # if t not in ['a','e','i','l']:\r\n if t not in ['a','i','l']:\r\n self.command_not_understood(line.join())\r\n elif t == 'l' and (len(line) > 2 and line[2] != '8'):\r\n self.respond ('504 Byte size must be 8')\r\n else:\r\n self.current_mode = t\r\n self.respond ('200 Type set to %s.' % self.type_map[t])", "def is_command(schema_obj):\n\n return isinstance(schema_obj, schema.Command)", "def getLong(self, name: unicode) -> long:\n ...", "def _is_return(self, words):\n if words[0] == 'return':\n if len(words) != 1:\n raise SyntaxError(\"File line {}: Invalid number of arguments for C_RETURN command.\".format(self._file_line))\n return True\n else:\n return False", "def get_type_check(self, arg, option):\n pass", "def get_flag(self):\n return self.long_flag", "def is_command_response(schema_obj):\n\n return isinstance(schema_obj, schema.CommandResponse)", "def _is_command(self, ext):\n try:\n return issubclass(ext, CommandExtension)\n except TypeError:\n return False", "def setIsLong(self, value):\n return self._set(isLong=value)", "def num_long_term_logical_bytes(self) -> str:\n return pulumi.get(self, \"num_long_term_logical_bytes\")", "def _check_reply(self):\n self._more_packets_available = False\n try:\n if self._reply is None:\n self._status = (3, '{} without reply'.format(\n REPLAY_INFO[unpack_dint(self._message[:2])]))\n return False\n # Get the type of command\n typ = unpack_uint(self._reply[:2])\n\n # Encapsulation status check\n if unpack_dint(self._reply[8:12]) != SUCCESS:\n self._status = (3, \"{0} reply status:{1}\".format(\n REPLAY_INFO[typ],\n SERVICE_STATUS[unpack_dint(self._reply[8:12])]))\n return False\n\n # Command Specific Status check\n if typ == unpack_uint(ENCAPSULATION_COMMAND[\"send_rr_data\"]):\n status = unpack_usint(self._reply[42:43])\n if status != SUCCESS:\n status_msg = \"send_rr_data reply:{0} - Extend status:{1}\"\n self._status = (3, status_msg.format(\n SERVICE_STATUS[status],\n get_extended_status(self._reply, 42)))\n return False\n else:\n return True\n return True\n except Exception as e:\n raise DataError(e)", "def read_long_long(data):\n s_type = \"=%s\" % get_type(\"long_long\")\n return struct.unpack(s_type, data.read(8))[0]", "def returnsErrorCode(self):\n return self.rtype == \"int\"", "def is_match(self, command_bytes):", "def is_byte(self):\n return ida_bytes.is_byte(self.flags)", "def is_valid_command(self, string):\n return string[:3] == \"--!\"", "def is_command(line: str) -> bool:\n if line[0] == \"$\":\n return True\n return False", "def runLong(self, command):\n self.longCommand = command\n self.spawnProc.sendline(command)", "def nextLong(self) -> \"long\":\n raise NotImplementedError", "def do_type(self, str_arg):\n try:\n self.adbc.type(validateString(str_arg))\n except Exception, e:\n printLog(self.threadName + 'TYPE FAILED: %s' % e.message)\n self.resultFlag = False\n finally:\n return self.resultFlag", "def _msg_is_command(self, msg):\n return isinstance(msg, dict)", "def test_ulong_long_int(self):\n self.failUnlessEqual(self.callFunc('encode_longlong', self.const_integer), self.const_integer_long_long_encoded, 'long long encoding FAILED...')", "def has_more_commands(self):\n return self.counter < len(self.lines)" ]
[ "0.5989811", "0.57864743", "0.5738203", "0.5530481", "0.55091196", "0.5501726", "0.5487508", "0.54735243", "0.5433546", "0.5368896", "0.534729", "0.5277097", "0.52574474", "0.5248837", "0.51971203", "0.5174398", "0.5158678", "0.515556", "0.51363266", "0.5130246", "0.5119427", "0.50814044", "0.507908", "0.50753796", "0.5072792", "0.5064317", "0.5064167", "0.5057711", "0.5051473", "0.50510246" ]
0.74627966
0
Return True if command return type is double
def is_double(self): answer = self._call('is_double') return answer.yes
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_double(self, size=None):\n return False", "def _type_check_double(self, data):\n if type(data) not in self._VALID_TYPES:\n return False\n return True", "def double(self):\n if self.__valeur1 == self.__valeur2:\n return True\n else:\n return False", "def double(self):\n return self._double", "def convertToDouble(boolean: bool) -> float:\n ...", "def _is_double(arr):\n\n # Figure out which dtype for data\n if arr.dtype == np.float32:\n return False\n elif arr.dtype == np.float64:\n return True\n else:\n raise ValueError(\"Only float32 or float64 dtypes are supported\")", "def hasNextDouble(self) -> bool:\n raise NotImplementedError", "def double_value(self) -> Optional[pulumi.Input[float]]:\n return pulumi.get(self, \"double_value\")", "def do_math_double(cls, quad, type):\n\t\tdata1 = cls.get_address_value(quad.left_operand)\n\t\tdata2 = cls.get_address_value(quad.right_operand)\n\t\tval = 0.0\n\t\tif(type == \"pow\"):\n\t\t\tval = math.pow(data1, data2)\n\n\t\tcls.set_address_value(quad.result, val)", "def check_for_float(check):", "def evaluate(self, *args, **kwargs) -> Union[str, int, float, bool]:\n return True", "def _assert_double(self, subject):\n if not isinstance(subject, numbers.Real):\n raise TypeError('Expecting a float-point real number, not ' +\n repr(subject))\n if subject < 0.0 or subject > 1.0:\n raise ValueError('Expecting a real number between 0.0 & 1.0, not' +\n repr(subject))", "def expects_result(self, command):\n return isinstance(command, (self.package(\"Syntax\").Operator,\n self.package(\"Syntax\").Formule))", "def _check_validdtypeoutput(self, symbol):\n if symbol.type == self.scanner.KEYWORD and \\\n symbol.id in self.validdtypeoutputs:\n return True\n else:\n return False", "def _is_real(symbol):\n return isa(symbol, float) or is_int(symbol)", "def isdecimal(self):\n return isdecimal(self)", "def is_double_scalar_reg(register):\n if register in [ProcessorRegister.double_scalar_0,\n ProcessorRegister.double_scalar_1]:\n return True\n else:\n return False", "def validate_answer(answer):\r\n try:\r\n float(answer)\r\n return True\r\n except ValueError:\r\n return False", "def have_double_impl(math_name):\n return math_suffix(math_name, double) in libc_math_funcs", "def _has_numeric_or_bool(self) -> bool:\n dtypes: Set[str] = set(self._data.keys())\n return 'i' in dtypes or 'f' in dtypes or 'b' in dtypes", "def is_valid_output(output) -> bool:\n log.info(f\"Output validation: {output}\")\n\n try:\n float(output)\n except ValueError as value_error:\n log.error(value_error)\n return False\n\n log.info(\"Output successfully validated\")\n return True", "def is_numeric(self) -> bool:\n return False", "def isnumeric(self):\n return isnumeric(self)", "def nextDouble(self) -> \"double\":\n raise NotImplementedError", "def double(value):\n return value * 2", "def double(value):\n return value * 2", "def double(self) -> Optional[str]:\n\n error_message: Optional[str] = None\n if self.bet * 2 > self.actual_money:\n error_message = \"Cannot double because you have not enough money!\"\n\n elif len(self._hands[0].cards) != 2:\n error_message = \"Cannot double because you have already hit!\"\n\n elif len(self._hands) == 2:\n error_message = \"Cannot double because you have already splitted!\"\n\n else:\n self._bet *= 2\n\n return error_message", "def is_terminal(self):\n return self.type == 'number' or self.type == 'op'", "def double(value):\n return 2 * value", "def getDoubleValue(self):\n return _libsbml.ConversionOption_getDoubleValue(self)" ]
[ "0.68060154", "0.6694105", "0.6278531", "0.6156558", "0.6122468", "0.60317355", "0.59308475", "0.59101456", "0.58420163", "0.5835771", "0.56978625", "0.5695271", "0.56179863", "0.55873126", "0.5555344", "0.5545467", "0.54628015", "0.54531217", "0.5441905", "0.5435553", "0.54085326", "0.539245", "0.5369494", "0.5328698", "0.53245485", "0.53245485", "0.53169644", "0.52922744", "0.5285577", "0.5274976" ]
0.7801225
0
Return True if command return type is datetime
def is_datetime(self): answer = self._call('is_datetime') return answer.yes
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_datetime(self) -> bool:\n return False", "def has_datetime_type(obj: _std_typing.Any) -> bool:\n return obj.dtype == sc.DType.datetime64", "def is_datetime(s: Union[str, int, float]):\n if is_number(s):\n return False\n\n try:\n parse_datetime(s)\n return True\n except Exception:\n return False", "def test_14_digit_datetime_detection(self):\n obj = awstats_reader.awstats_datetime('20091130165230')\n self.assertTrue(isinstance(obj, awstats_reader.AwstatsDateTime))", "def is_datetime_type(val):\n return (\n pd.api.types.is_datetime64_any_dtype(val)\n or isinstance(val, pd.Timestamp)\n or isinstance(val, datetime)\n )", "def _uses_datetimeblock(dtype: Union[np.dtype, ExtensionDtype]) -> bool:\n vtype = dtype.type\n return issubclass(vtype, np.datetime64)", "def has_time(self):\n return isinstance(self._start, datetime.datetime)", "def could_be_datetime(val, fmt):\n\n if val == None or fmt == None:\n return False\n\n if isinstance(val, datetime):\n return True\n\n if isinstance(val, (str, unicode)):\n if Record.is_empty_str(val) or Record.is_empty_str(fmt):\n return False\n\n try:\n d = datetime.strptime(val, fmt)\n if not isinstance(d, datetime):\n raise ValueError\n else:\n return True\n except Exception as e:\n logging.error(e)\n return False\n\n #otherwise\n return False", "def is_interpretable(self):\n return bool(self.as_date() or self.as_time())", "def test_validate_datetime(self):\n self.datatrue = pd.read_csv(pkg_resources.resource_filename(resource_package, 'tests/testing_data/report_2_counts.csv'))\n\n self.datafalse = pd.read_csv(pkg_resources.resource_filename(resource_package, 'tests/testing_data/random_date1.csv'))\n\n self.test1 = utils.validate_datetime(self.datatrue)\n\n self.test2 = utils.validate_datetime(self.datafalse)\n\n self.assertTrue(isinstance(self.test1, pd.DataFrame))\n\n self.assertTrue(np.dtype('datetime64[ns]') in self.test1.dtypes.tolist())\n\n self.assertFalse(np.dtype('datetime64[ns]') in self.test2.dtypes.tolist())", "def validDateTime( dateTime ):\n try:\n datetime.strptime( dateTime, \"%Y-%m-%dT%H:%M:%S.%fZ\" )\n return True\n except ValueError:\n return False", "def valid_format(self):\n\n # If candidate is None, return true\n if not self.dt:\n print \"dt empty\"\n return True\n\n # Verify if time format is ok and stores in into a time-tuple format\n try:\n stime = datetime.strptime(self.dt, \"%Y-%m-%d %H:%M:%S\")\n except ValueError:\n return False\n else:\n return True", "def test_datetime_field():", "def test_8_digit_date_detection(self):\n obj = awstats_reader.awstats_datetime('20091130')\n self.assertTrue(isinstance(obj, awstats_reader.AwstatsDate))", "def is_date(dt):\n return isinstance(dt, datetime.date) and not isinstance(dt, datetime.datetime)", "def isdt(self):\n return self.Units.isreftime and self._subarray.dtype == _dtype_object", "def test_14_digit_datetime(self):\n obj = awstats_reader.awstats_datetime('20091130165230')\n dt = datetime.datetime(2009, 11, 30, 16, 52, 30)\n self.assertEqual(obj, dt)", "def test_convert_datetime():", "def test_last_access(self):\n self.assertIsInstance(self.obj.last_access, datetime)", "def test_created_at(self):\n self.assertIsInstance(self.obj.created_at, datetime)", "def test_created_at(self):\n self.assertIsInstance(self.obj.created_at, datetime)", "def test_created_at(self):\n self.assertIsInstance(self.obj.created_at, datetime)", "def test_opt_datetimeMissingOpenQuote(self):\n line = b'not \"'\n dt, remainder = self.server.opt_datetime(line)\n self.assertIsNone(dt)\n self.assertEqual(remainder, line)", "def test_datetime(self):\n diff = self.machine_date - self.actual_date < datetime.timedelta(0, 20, 0)", "def test_datetime_creation(self):\n self.assertIsInstance(self.user_1.created_at, datetime)\n self.assertIsInstance(self.user_1.updated_at, datetime)", "def test_datetime(self):\n self.assertEqual(datetime.datetime.min, self.__jenkins.datetime(*('job',)))", "def is_of_type(cmd):\r\n raise NotImplementedError()", "def skip_or_run_datetime_test(func):\n\n return skip_or_run_test_pcall_require(func, 'datetime',\n 'does not support datetime type')", "def test_created_type(self):\n\n base_model = BaseModel()\n self.assertTrue(base_model.created_at, datetime.datetime)", "def valid_datetime(dt):\n if isinstance(dt.tzinfo, tzinfo) and not datetime_ambiguous(dt):\n return True\n return False" ]
[ "0.7951076", "0.6984303", "0.6577533", "0.6427492", "0.6338919", "0.62653446", "0.62532926", "0.6224089", "0.6207788", "0.6151861", "0.5996967", "0.59684855", "0.59437066", "0.58614635", "0.584531", "0.5838218", "0.5837798", "0.5829952", "0.5794449", "0.57770663", "0.57770663", "0.57770663", "0.5772579", "0.5765009", "0.5739093", "0.571607", "0.5694268", "0.568948", "0.5678797", "0.56355166" ]
0.7957955
0
Return True if command return type is bool
def is_bool(self): answer = self._call('is_bool') return answer.yes
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_bool(self):\n return False", "def is_bool (self, phrase):\r\n \r\n return isinstance(phrase,bool)", "def __bool__(self) -> bool:\n return self.return_code == 0", "def __bool__(self) -> bool:\n return self.return_code == 0", "def __bool__(self) -> bool:\n return self.return_code == 0", "def getbool(self, strcommand):\n result = ct.c_bool()\n command = ct.c_wchar_p(strcommand)\n self.lib.AT_GetBool(self.AT_H, command, ct.addressof(result))\n return result.value", "def bool_option (arg: Any) -> bool:\n return True", "def Bool(arg):\n return arg.lower() in ('y', 'true', 't', '1')", "def __bool__(self):\n return bool(self.get_value())", "def __bool__(self):\n raise ValueError(\"bool() not permitted\")", "def expects_result(self, command):\n return isinstance(command, (self.package(\"Syntax\").Operator,\n self.package(\"Syntax\").Formule))", "def __bool__(self):\n return bool(self.obj)", "def __bool__(self):\n return self is TRUE", "def __bool__(self):\n return any(self.smask)", "def give_me_a_boolean():\n return True\n pass", "def bool(self):\n return bool(self.int(2))", "def bool(x) -> bool:\n pass", "def __bool__(self):\n return bool(self._value)", "def as_bool(self):\n return self.as_type(bool)", "def __bool__(self) -> bool:\n return self._rpc is not None", "def __bool__(self):\n return self.is_successful", "def _true(*args):\n # pylint:disable=unused-argument\n return True", "def isTrue(*args, **kwargs)->None:\n pass", "def read_bool(self):\n return self.read_uint32() == 1", "def get_bool2(self):\n pass", "def check_for_bool(check):", "def read_bool(self):\n return bool(self.read_and_unpack('l')[0])", "def __bool__(self):\n return not self.err", "def _iscommand(self, key):\r\n\t\tyes = False\r\n\t\tfor i in COMMAND_NAME.keys():\r\n\t\t\tif key == i: \r\n\t\t\t\tyes = True; break\r\n\t\treturn yes", "def readBoolean(self) -> bool:\n return self.readByte() == 1" ]
[ "0.7300168", "0.6982025", "0.69479877", "0.69479877", "0.69479877", "0.6931669", "0.6860637", "0.684384", "0.6820059", "0.6804663", "0.6776833", "0.6743398", "0.67413884", "0.67388", "0.6701831", "0.66715646", "0.66706675", "0.6627555", "0.6617246", "0.6615943", "0.65937847", "0.6563973", "0.6543734", "0.6517953", "0.6494402", "0.6490549", "0.64703864", "0.6454628", "0.64462304", "0.64312404" ]
0.71876997
1
Return True if command return type is map
def is_map(self): answer = self._call('is_map') return answer.yes
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _msg_is_command(self, msg):\n return isinstance(msg, dict)", "def is_mapping(self) -> bool:\n return isinstance(self.yaml_node, yaml.MappingNode)", "def is_map(self, alias):\n maps = {\"Ensembl2Reactome_All_Levels\": False,\n \"ReactomePathways\": True,\n \"reactome.homo_sapiens.interactions.tab-delimited\": False,\n \"ReactomePathwaysRelation\": True}\n return maps[alias]", "def is_base_type(cls, data):\n return _mapping_resolver.get_type(data) == \"MAPPING\"", "def checkMap(self):\n return True", "def _iscommand(self, key):\r\n\t\tyes = False\r\n\t\tfor i in COMMAND_NAME.keys():\r\n\t\t\tif key == i: \r\n\t\t\t\tyes = True; break\r\n\t\treturn yes", "def do_known_command(self, cmd):\n if cmd in self.commands:\n return \"true\", True\n else:\n return \"false\", True", "def is_of_type(cmd):\r\n raise NotImplementedError()", "def expects_result(self, command):\n return isinstance(command, (self.package(\"Syntax\").Operator,\n self.package(\"Syntax\").Formule))", "def is_command_response(schema_obj):\n\n return isinstance(schema_obj, schema.CommandResponse)", "def isMapping(obj):\n # type: (Any) -> bool\n return isinstance(obj, Mapping)", "def MapType(self):\n\t\treturn self._get_attribute('mapType')", "def is_command(schema_obj):\n\n return isinstance(schema_obj, schema.Command)", "def __commandExists(self, command, cmdtype):\n try:\n # method exists\n if hasattr(self, self.__getFullCommandName(command, cmdtype)):\n # command handler type exists\n if self.__commandHandlerTypeExists(cmdtype):\n return True\n else:\n return False\n else:\n return False\n # any key does not exist\n except KeyError:\n return False", "def __commandHandlerTypeExists(self, type):\n return self.__commandHandlers.has_key(type)", "def requires_mapping(self):", "def has_remap(self):\n return self.mapping1 is not None or self.mapping2 is not None", "def _valid_output_type(self, output_type):\n # pylint: disable=W0613, R0201\n return True", "def _empty_mapping(self):\r\n return self.type2test()", "def _is_command(self, ext):\n try:\n return issubclass(ext, CommandExtension)\n except TypeError:\n return False", "def _IsMapField(field_descriptor: FieldDescriptor) -> bool:\n return _GetMapFieldKeyValueTypes(field_descriptor) is not None", "def _is_command(obj, cli):\n if not inspect.isfunction(obj) or obj.__name__.startswith(\"_\"):\n return False\n return hasattr(obj, \"__module__\") and obj.__module__ == cli.__name__", "def is_valid_command(args):\n if args.command is not None:\n return True\n return False", "def is_match(self, command_bytes):", "def _is_pop_command(self):\n return self._match_memory_pattern(\"pop\")", "def is_valid_command(command):\n return is_get(command) or is_insert(command) or is_update(command) or is_delete(command) or is_showall(command) or is_search(command)", "def is_map(field):\n\n if isinstance(field, schema.Field):\n return field.is_map\n else:\n raise Exception('Expecting a field')", "def _is_return(self, words):\n if words[0] == 'return':\n if len(words) != 1:\n raise SyntaxError(\"File line {}: Invalid number of arguments for C_RETURN command.\".format(self._file_line))\n return True\n else:\n return False", "def cmd_type(args):", "def test_nmap_get_kind(self):\n assert_equal(self.test_nmap.get_kind(), 'mpnmap')" ]
[ "0.6289078", "0.6281217", "0.6242795", "0.61640793", "0.6034122", "0.5948415", "0.58508986", "0.58115065", "0.5724521", "0.56458396", "0.5560745", "0.55398226", "0.55355984", "0.5446891", "0.54456025", "0.5439933", "0.5427514", "0.54028404", "0.53899914", "0.53831494", "0.53772795", "0.53668886", "0.5362806", "0.53528965", "0.5318222", "0.53092146", "0.53048176", "0.5295763", "0.5279475", "0.5253453" ]
0.69451547
0
Return True if command return type is list
def is_list(self): answer = self._call('is_list') return answer.yes
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_list(self) -> bool:\n return False", "def is_multi_commands(args: list) -> bool:\n for arg in args:\n if not isinstance(arg, list):\n return False\n # all elements must be lists\n return True", "def _is_list(arg):\n return isinstance(arg, collections.Sequence) and not _is_string(arg)", "def isList(data):\n\ttry:\n\t\tfrom types import ListType\n\t\tif type(data) == ListType:\n\t\t\treturn True\n\texcept ImportError:\n\t\tif type(data) == type([]):\n\t\t\treturn True\n\treturn False", "def is_list ( self, s ):\r\n\t\treturn isinstance ( s, type( list () ) )", "def _is_list(item):\n return isinstance(item, list)", "def is_listing(op):\n return isinstance(op, (list, tuple))", "def _is_list(arg):\n if isinstance(arg, dict):\n return False\n if isinstance(arg, str): # Python 3-only, as str has __iter__\n return False\n return (\n not _has_method(arg, \"strip\")\n and _has_method(arg, \"__getitem__\")\n or _has_method(arg, \"__iter__\")\n )", "def _is_list(val):\n\n return isinstance(val, list)", "def _is_list(self):\n # TODO\n if self.is_int():\n return self.int() == 0\n else:\n return self.size_words() == 2 and self.tag() == 0 and self.field(1)._is_list()", "def is_list(obj):\n return type(obj) is list", "def isList(self, item):\n\t retval = False\n\t if type(item) in (ListType, TupleType) :\n\t retval = True", "def is_list(value):\n return isinstance(value, list)", "def is_list(self) -> bool:\n if self.is_list_of_list: # pylint: disable=R1705\n return False\n else:\n return bool(AnnotationWrapper.list_field_re.match(self.data))", "def isList(x):\n \n return ( type(x) == list ) # True if the type of x is a list", "def isValidTypeForList(self, *args):\n return _libsbml.MultiListOfReactionsPlugin_isValidTypeForList(self, *args)", "def isValidTypeForList(self, *args):\n return _libsbml.SBasePlugin_isValidTypeForList(self, *args)", "def isList(obj):\n return type(obj)==types.ListType", "def test_list(self):\n parser = parse_args(['-g', '10', '-s', 'bubble', '-l'])\n self.assertTrue(parser.list)\n self.assertEqual(True, parser.list)\n\n parser = parse_args(['-g', '10', '-s', 'bubble'])\n self.assertEqual(False, parser.list)", "def expects_result(self, command):\n return isinstance(command, (self.package(\"Syntax\").Operator,\n self.package(\"Syntax\").Formule))", "def is_list(value):\n return isinstance(value, list) or None", "def _validate_command(self):\n if not isinstance(self.command, list):\n raise securesystemslib.exceptions.FormatError(\n \"Invalid Link: field `command` must be of type list, got: {}\"\n .format(type(self.command)))", "def _list_like(self, value):\n return (not hasattr(value, \"strip\") and\n (hasattr(value, \"__getitem__\") or\n hasattr(value, \"__iter__\")))\n # return is_sequence(value) # use from pandas.core.common import is_sequence", "def is_list(s_list):\n return isa(s_list, List)", "def isList(l):\r\n return hasattr(l, '__iter__') \\\r\n or (type(l) in (types.ListType, types.TupleType))", "def is_list_of_list(self) -> bool:\n return bool(AnnotationWrapper.list_of_list_re.match(self.data))", "def test_arg_parser_list(self):\n args = self.parser.parse_args(['list'])\n self.assertEqual(args.command, 'list')", "def is_command_response(schema_obj):\n\n return isinstance(schema_obj, schema.CommandResponse)", "def check_for_list(check):", "def is_list(annotation):\n\n annotation_origin = getattr(annotation, \"__origin__\", None)\n\n return annotation_origin == list" ]
[ "0.74882597", "0.7068153", "0.69201654", "0.68849444", "0.68486714", "0.6833843", "0.6674785", "0.66685194", "0.6642766", "0.66335183", "0.66288596", "0.6529805", "0.65188724", "0.6512905", "0.6460788", "0.64399505", "0.6422735", "0.6361648", "0.6349589", "0.63341206", "0.63011885", "0.6295231", "0.6246825", "0.62045854", "0.6185196", "0.61631095", "0.614989", "0.6093777", "0.59999716", "0.59830296" ]
0.7490665
0
Set command return value
def set_result(self, value): value_rpc = utils.get_rpc_value(type(value), value) self._call('set_result', value=value_rpc)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def execute_success(self, *args, **kwargs):\n return 0, self.shell_output, None", "def execute_return(cmd):\n args = cmd.split()\n proc = Popen(args,stdout=PIPE,stderr=PIPE)\n out,err = proc.communicate()\n return out,err", "def execute(self, rc):\n pass", "def _set_returncode(self, code):\n if code >= self._return_code:\n self._return_code = code", "def get_exit_code(self):", "def setExecutionStatus(self, return_code):\n if return_code == 0:\n self.execution_status = 'executed'\n else:\n self.execution_status = 'failed'", "def cmd_result(is_success, cmd, output, error):\n\n\t\tself.__logs.append(output)", "def cli(self, cmd):\n p1 = Popen(cmd,stdout=PIPE, shell=True)\n output = p1.communicate()\n if p1.returncode != 0 :\n print('error returned from shell command: %s was %s'%(cmd,output[0]))\n return output[0],p1.returncode", "def _set_result(self, value):\n self._result = value\n self._state = FINISHED_STATE", "def SetResultValue(self, *args):\n return _gmat_py.Solver_SetResultValue(self, *args)", "def ret(self, r):\n if not self.event.is_set():\n self.result = r\n self.event.set()", "def set_result(self, result_obj, execution_type):\n response = result_obj.response\n if execution_type == \"sync\":\n # For sync call we make a blocking call to read all stdout and stderr\n # values and set all attributes in the result obj\n result_obj.status_code = response.status\n if result_obj.status_code != int(0):\n result_obj.reason = response.stderr.read()\n result_obj.response_data = response.read()\n pylogger.error('cli command failed: Error: %s', result_obj.error)\n pylogger.error('cli command failed: Status Code: %s', result_obj.status_code)\n pylogger.error('cli command failed: response data: %s', result_obj.response_data)\n pylogger.error('cli command failed: Reason: %s', result_obj.reason)\n result_obj.set_error(result_obj.response_data, result_obj.reason)", "def execute(self, cmd: Command) -> int:\n try:\n return self.cmds[cmd.id]\n except KeyError:\n if cmd.val:\n self.state[cmd.key] = cmd.val\n self.cmds[cmd.id] = self.state[cmd.key]\n return self.cmds[cmd.id]", "def set_exitcode(self, exitcode):\n self.exitcode = exitcode\n self.connected = False", "def getReturnCode(self):\n retcode = self.sendCmd(\"echo $?\")\n try:\n return int(retcode)\n except:\n return retcode", "def succeed(self,args):\n code, msg, val = args\n if code != 1:\n raise ROSParamException(msg)\n return val", "def returncode(self: \"ShellOutput\") -> Artefact[int]:\n self.__check_len()\n return self.returncodes[0]", "def process_command(self, cmd, config):\n return None, None", "def ReturnCode(rc):\r\n return _hiew.ReturnCode(rc)", "def execute_failure(self, *args, **kwargs):\n return 1, \"\", None", "def call(self, cmd):\n exitcode, _stdout, _stderr = self.run(cmd, nonzero_e = None)\n return exitcode", "def ConsoleExit(self, errorcode=200):\n pass", "def result(self, result):\n self._result = result", "def result(self, result):\n self._result = result", "def _check_return(self, name, ret_code):\n if ret_code == 0:\n pass\n else:\n raise RuntimeError('An error occured setting %s: %d' % (name, ret_code))", "def shell_success(self, cmd):\n self.shell_cmd = cmd\n return response", "def set_result(self, result):\n self._result = result\n self._set_done()", "def test_use_exit_status(self): # suppress(no-self-use)\n subprocess.call.return_value = 1\n GreenTestCommand(Distribution()).run()\n sys.exit.assert_called_with(1)", "def invoke(self):\n self.exitCode = self.script()", "def set_result(self, result):\n self.__test_result[Result.__RESULT] = result" ]
[ "0.64630413", "0.6332911", "0.62538105", "0.6227985", "0.61576617", "0.6150565", "0.61106575", "0.6073182", "0.59837717", "0.59783006", "0.5948261", "0.5886567", "0.58848333", "0.58677804", "0.58380526", "0.5837768", "0.5819132", "0.5764613", "0.5763976", "0.5747505", "0.574275", "0.57423735", "0.57366216", "0.57366216", "0.5721226", "0.57168454", "0.57049096", "0.5702767", "0.56816465", "0.567778" ]
0.63785946
1
Set exception in command. Information about exception will be called for adapter's side.
def set_exception(self, reason): self._call('set_exception', exception=reason)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_exception(self, exception):\n self._is_done = True\n if self._exception:\n self._log(logging.DEBUG, \"'{}.{}' has overwritten exception. From '{}.{}' to '{}.{}'.\".format(\n self.__class__.__module__,\n self.__class__.__name__,\n self._exception.__class__.__module__,\n self._exception.__class__.__name__,\n exception.__class__.__module__,\n exception.__class__.__name__\n ))\n ConnectionObserver._remove_from_not_raised_exceptions(self._exception)\n self._exception = exception\n ConnectionObserver._append_to_not_raised_exceptions(exception)\n self._log(logging.INFO, \"'{}.{}' has set exception '{}.{}'.\".format(self.__class__.__module__,\n self.__class__.__name__,\n exception.__class__.__module__,\n exception.__class__.__name__))", "def set_exception(self, exception):\n self.set_exc_info(\n (exception.__class__,\n exception,\n getattr(exception, '__traceback__', None)))", "def send_exception(self, *args, **kwargs):\n self._channel.send_exception(*args, **kwargs)", "def exception(self, *args, **kwargs):", "def set_exception(self, exception, noexceptions=False):\n if self._status != Future.STATUS_STARTED:\n if noexceptions:\n return\n raise InvalidStateError()\n self._exception = exception\n self._status = Future.STATUS_ERROR\n self._schedule_callbacks()", "def handle_exceptions(self, exception: Exception):\n try:\n raise exception\n\n except clisys.InvalidCommand as e:\n print(f'Invalid option: \"{str(e.command_name)}\" .')\n\n except clisys.InvalidArgument as e:\n print(f'Invalid argument: \"{str(e.argument_name)}\" .')\n\n except clisys.InvalidArgumentCount:\n print('Invalid argument count.')\n\n except ValueError as e:\n print(e)", "def set_fetch_values_exception(cls):\n cls._exception = True", "def handle_exception(self, channel: Channel, session: Session, msg: Message, # pylint: disable=W0613\n exc: Exception) -> None:\n self.outcome = Outcome.ERROR\n self.details = exc", "def exception(self, e):\n pass", "def set_error_from_exc(self, exc: Exception, code: Optional[int] = ERR_UNKNOWN) -> None:\n self.set_error(code=code, text=str(exc))", "def error(self, exception=None):\n self._error = exception", "def set_exception(self, exception):\n with self._done_condition:\n if self._state in [CANCELLED, FINISHED]:\n raise InvalidStateError()\n self._exception = exception\n self._state = FINISHED\n self._done_condition.notify_all()", "def whenException(self, channel, call):", "async def on_command_error(self, ctx, error):\n\n # This prevents any commands with local handlers being handled here in on_command_error.\n if hasattr(ctx.command, 'on_error'):\n return\n\n\n if hasattr(ctx.command, 'on_command_error'):\n return\n\n # This prevents any cogs with an overwritten cog_command_error being handled here.\n cog = ctx.cog\n if cog:\n if cog._get_overridden_method(cog.cog_command_error) is not None:\n return\n\n\n # Allows us to check for original exceptions raised and sent to CommandInvokeError.\n # If nothing is found. We keep the exception passed to on_command_error.\n error = getattr(error, 'original', error)\n\n # Anything in ignored will return and prevent anything happening.\n if isinstance(error, commands.CommandNotFound):\n await ctx.send(f'Command pas trouvé')\n return\n if isinstance(error, commands.DisabledCommand):\n await ctx.send(f'{ctx.command} has been disabled.')\n return\n\n if isinstance(error,commands.errors.PrivateMessageOnly):\n await ctx.message.delete()\n channel = await ctx.message.author.create_dm()\n await channel.send(f'{ctx.command} ne peut être exécuté que en message privé !!')\n return\n # For this error example we check to see where it came from...\n if isinstance(error, commands.BadArgument):\n await ctx.send('Mauvais arguments passés')\n return\n if isinstance(error, commands.MissingRequiredArgument):\n await ctx.send('Il manque des arguments à la commande')\n return\n # All other Errors not returned come here. And we can just print the default TraceBack.\n logger.error(f'Ignoring exception in command {ctx.command} : {type(error)} {error} {error.__traceback__}')", "def exception(self):\n raise Exception(\"Exception test\")", "def set_exception(self, exception):\n with self._condition:\n self._exception = exception\n self._state = FadeFuture.FINISHED\n self._condition.notify_all()", "def _on_exception(self, exception):\n pass", "def exc_handler(self, exc_type, exc, *args) -> None:\n self.exception = exc\n self.exit_code = 1", "def unexpected_error(self, exception):", "def exception_handler(self, exception):\n pass", "def exception_callback(self, exception):\n self.exception_callback_value = exception", "async def on_command_error(self, ctx, error):\n\n if hasattr(ctx.command, 'on_error'):\n return\n\n error = getattr(error, 'original', error)\n\n if isinstance(error, commands.MissingRequiredArgument):\n LOG.error(f\"Missing argument in command {ctx.command}\")\n message = \"An argument is missing\\n\\n\"\n message += f\"{self.command_prefix}{ctx.command.signature}\"\n await self.send(ctx.channel, message, code_block=True)\n elif type(error) not in self.handled_exceptions:\n LOG.error(f\"Exception '{type(error).__name__}' raised in command '{ctx.command}':\")\n traceback.print_exception(type(error), error, error.__traceback__, file=sys.stderr)", "def exception(self, msg, *args, **kwargs):\n ex = sys.exc_info()[1]\n\n if hasattr(ex, '_monocle'):\n args = args + (format_tb(ex),)\n self.logger.error('%s\\n%%s' % msg, *args, **kwargs)\n else:\n super(Adapter, self).exception(msg, *args, **kwargs)", "def exception(self, *args, **kwargs):\n return super(Blueprint, self).exception(*args, **kwargs)", "def handle_execution_exception(self, ex):\n raise(ex)", "async def on_command_error(self, ctx: Context, e: commands.CommandError) -> None:\n if hasattr(ctx.command, \"on_error\"):\n return\n\n e = getattr(e, \"original\", e)\n\n await ctx.message.add_reaction(\"\\U0000274c\")\n\n embed = DefaultEmbed(ctx, title=\"**An error has occurred:**\")\n\n if isinstance(e, commands.DisabledCommand):\n embed.description = \"Command not currently enabled.\"\n\n elif isinstance(e, commands.UserInputError):\n embed.description = f\"Command received bad argument: {e}.\"\n\n elif isinstance(e, commands.NotOwner):\n embed.description = \"You do not have enough permissions for this command.\"\n\n elif isinstance(e, commands.CommandOnCooldown):\n embed.description = f\"{e}.\"\n\n elif isinstance(e, commands.CheckFailure):\n embed.description = \"You do not have enough permissions to run this command.\"\n\n elif isinstance(e, commands.MissingPermissions):\n embed.description = \"Bot does not have enough permissions for this command.\"\n\n elif isinstance(e, commands.CommandNotFound):\n embed.description = \"Unknown command.\"\n\n else:\n embed.description = f\"{type(e).__name__}: {e}\"\n\n log.error(\"An error has occurred.\", exc_info=(type(e), e, e.__traceback__))\n\n embed.description = f\"`{embed.description}`\"\n\n await ctx.send(embed=embed)", "def except__else(self, exception: BaseException) -> typing.Any:\n raise exception", "async def on_command_error(ctx, error):\n await send_block(\n ctx,\n \"\".join(\n traceback.format_exception(\n etype=type(error), value=error, tb=error.__traceback__\n )\n ),\n )", "def set_error(self, exc_info):\n self.exc_info = exc_info\n if exc_info is None:\n self.meta_classes = {}\n self.meta_functions = {}", "def set_error(self, index: int) -> None:\n ..." ]
[ "0.65251386", "0.65130377", "0.640264", "0.6400604", "0.6305449", "0.6302403", "0.62972367", "0.6162168", "0.6148929", "0.6125187", "0.60864145", "0.60796374", "0.6046624", "0.6018471", "0.59831905", "0.5967945", "0.59093523", "0.5906773", "0.5851401", "0.58388805", "0.5811415", "0.5798013", "0.57866764", "0.5776028", "0.57103336", "0.5705723", "0.5663676", "0.56548566", "0.56344116", "0.56269336" ]
0.67082244
0
Reloads the datatype from Riak.
def reload(self, **params): if not self.bucket: raise ValueError('bucket property not assigned') if not self.key: raise ValueError('key property not assigned') dtype, value, context = self.bucket._client._fetch_datatype( self.bucket, self.key, **params) if not dtype == self.type_name: raise TypeError("Expected datatype {} but " "got datatype {}".format(self.__class__, TYPES[dtype])) self.clear() self._context = context self._set_value(value) return self
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reload_data(self):\n self._avro_payload.reload_data()", "def reload(self):\n self.restore()", "def reloadData(self):\n self.dto.readFromData()\n print(\"Record reloaded.\")", "def reload(self):", "def reload(self):", "def reload(self):\n\n pass", "def reload_data(self):\n super(UpdateMessage, self).reload_data()\n self._previous_avro_payload.reload_data()", "def reload(self) -> None: # pragma: no cover\n raise NotImplementedError()", "def refresh_types(self, type_clss):\n pass", "def reload(self):\n data = self.api.api_request(\"GET\", self.url)\n for t in self.ace_types:\n self[t].actors = data[t][\"actors\"]\n self[t].groups = data[t][\"groups\"]", "def reload_cache(self):\n self.data = self.read_data_cache()", "def reset_type(self, name):\r\n del self._retype_dictionary[name]", "def reload( self ):\n\t\tCORE.info( 'Reloading resources: modules, categories' )\n\t\tmoduleManager.load()\n\t\tcategoryManager.load()\n\t\tRESOURCES.info( 'Reloading UCR variables' )\n\t\tucr.load()", "def reload(self):\n from models.base_model import BaseModel\n from models.user import User\n from models.amenity import Amenity\n from models.city import City\n from models.place import Place\n from models.review import Review\n from models.state import State\n dict_reload = {}\n try:\n with open(FileStorage.__file_path) as file:\n dict_reload = json.load(file)\n for key, value in dict_reload.items():\n obj = value[\"__class__\"]\n self.__objects[key] = locals()[obj](**value)\n except:\n pass", "def Reload(self, data):\n self.__dict__ = json.loads(data, encoding='utf-8-sig')", "def refresh(self):\n raise UncodedError", "def reload(self):\n try:\n # if os.path.isfile(FileStorage.__file_path):\n with open(FileStorage.__file_path, 'r', encoding='UTF-8') as f:\n context2 = json.load(f)\n\n for key in context2.keys():\n new_value = context2[key]\n clss = new_value['__class__']\n# self.new(eval(clss)(**value))\n\n except Exception as e:\n pass", "def _clear_type_cache(): # real signature unknown; restored from __doc__\n pass", "def reloadfile(self, ):\n self.loadfile()", "def restore_data(self):\n self.R = self._Ro\n del self._Ro", "def reload(self):\n cluster_kubeconfig = self.ocp.cluster_kubeconfig\n self.data = self.get()\n self.__init__(**self.data)\n self.ocp.cluster_kubeconfig = cluster_kubeconfig", "def reload(self):\n from ..base_model import BaseModel\n from ..user import User\n from ..place import Place\n from ..state import State\n from ..city import City\n from ..amenity import Amenity\n from ..review import Review\n\n if exists(self.__file_path):\n with open(self.__file_path) as jsonfile:\n deserialized = json.load(jsonfile)\n\n cls = {\"BaseModel\": BaseModel, \"User\": User, \"Place\": Place,\n \"State\": State, \"City\": City, \"Amenity\": Amenity,\n \"Review\": Review}\n\n for keys in deserialized.keys():\n for cls_key in cls.keys():\n if deserialized[keys]['__class__'] == cls_key:\n self.__objects[keys] = cls[cls_key\n ](**deserialized[keys])\n break", "def reload(self):\n\n dict_of_dicts = {}\n classes = {\n \"BaseModel\": BaseModel,\n \"User\": User,\n \"Amenity\": Amenity,\n \"City\": City,\n \"Place\": Place,\n \"Review\": Review,\n \"State\": State}\n\n try:\n temp_dict = {}\n with open(self.__file_path, \"r\") as r:\n dict_of_dicts = json.load(r)\n for k, v in dict_of_dicts.items():\n if v['__class__'] in classes:\n temp_dict[k] = classes[v['__class__']](**v)\n self.__objects = temp_dict\n except Exception:\n pass", "def reload(self):\n try:\n with open(self.__file_path, 'r') as f:\n dicts = json.load(f)\n for key, value in dicts.items():\n obj1 = eval(value['__class__'])(**value)\n self.__objects[key] = obj1\n except FileNotFoundError:\n pass", "def ReloadSettings(self, data):\n self.__dict__ = json.loads(data, encoding='utf-8-sig')\n return", "def reload(self):\n with open(self._config) as f:\n self.data = json.load(f)", "def on_click_reload(self):\n with suppress_errors():\n self.load_imdb()\n self.load_exp()", "def reload(self):\n try:\n request = VI.ReloadRequestMsg()\n _this = request.new__this(self._mor)\n _this.set_attribute_type(self._mor.get_attribute_type())\n request.set_element__this(_this)\n self._server._proxy.Reload(request)\n except (VI.ZSI.FaultException), e:\n raise VIApiException(e)", "def test_0030_reactivate_datatypes_repository(self):\n installed_repository = self.test_db_util.get_installed_repository_by_name_owner(\n column_maker_repository_name, common.test_user_1_name\n )\n self.reactivate_repository(installed_repository)\n # This used to reactive datatype repositories and verify counts...\n # test may be considerably less useful now.", "def load(self):\n pass" ]
[ "0.69468844", "0.6571157", "0.652524", "0.6443367", "0.6443367", "0.62622434", "0.6048451", "0.6042948", "0.6036892", "0.60049", "0.5873903", "0.5867182", "0.5855046", "0.58414084", "0.5838228", "0.58149874", "0.57650065", "0.5740696", "0.57311875", "0.5674174", "0.56657064", "0.5653389", "0.56489086", "0.5643395", "0.55779266", "0.5562217", "0.5497143", "0.5485366", "0.54802513", "0.5474988" ]
0.6577614
1
Raises an exception if the context is not present
def _require_context(self): if not self._context: raise ContextRequired()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_context_invalid():\n\n with pytest.raises(ContextAttributeError):\n application_services.get_context('not_present_context')", "def handle_context_missing(self):", "def test_stored_context_err(self):\n self.stack = stack.Stack(self.ctx, 'creds_stack', self.tmpl)\n ex = self.assertRaises(exception.Error, self.stack.stored_context)\n expected_err = 'Attempt to use stored_context with no user_creds'\n self.assertEqual(expected_err, six.text_type(ex))", "def test_get_context_invalid_with_default():\n\n value = application_services.get_context('not_present_context', default=1003)\n assert value == 1003", "def in_context(self):\n pass", "def __missing__(self, key):\n raise KeyNotInContextError(f\"{key} not found in the pypyr context.\")", "def context_errored(self, cls, example, exception):", "def context(self) -> CONTEXT:", "def auditcontextnotfound(self) :\n\t\ttry :\n\t\t\treturn self._auditcontextnotfound\n\t\texcept Exception as e:\n\t\t\traise e", "def _raise_error_with_context(context):\n context_str = \" \"\n for c in context:\n if isinstance(c, str):\n context_str = context_str + f\"in key {c} \"\n elif isinstance(c, int):\n context_str = context_str + f\"in index {c} \"\n raise ValueError(f\"Value{context_str}is required\")", "def provide_context(self) -> Optional[Dict[Text, Any]]:", "def require_context(f):\n @wraps(f)\n def wrapper(*args, **kwargs):\n if not is_admin_context(args[0]) and not is_user_context(args[0]):\n raise exception.NotAuthorized()\n return f(*args, **kwargs)\n return wrapper", "def test_get_context_data(self):\n\n view = ElasticsearchErrorView()\n\n context = view.get_context_data()\n\n self.assertEqual(context.get('title'), 'Elasticsearch Error', \"'Elasticsearch Error' should be in context\")\n self.assertIn('status_code', \"status_code should be in context\")\n self.assertIn('status_description', context, \"status_description should be in context\")\n self.assertIn('error', \"error should be in context\")", "def run(self, context):\n TestStepBase.run(self, context)\n\n # Looks for the key in the context, raise an error if it doesn't exist\n info = context.get_info(self._key)\n if info is None:\n return_message = \"%s is not found in the context\" % self._key\n self._logger.error(return_message)\n raise AcsConfigException(AcsConfigException.OPERATION_FAILED, return_message)", "def test_get_context():\n\n application_services.add_context('context4', 'value4')\n assert application_services.get_context('context4') == 'value4'", "def context(self) -> Any:\n ...", "def test_execute_with_context(self):\n pass", "def init_with_context(self, context):\n pass", "def __init__(self, context=\"Resource not found\"):\n status_code = 404\n AppExceptionCase.__init__(self, status_code, context)", "def ctx():\n return None", "def user_context(request): # pragma: no cover\n # Disabled; this is bad practice\n raise NotImplementedError", "def test_add_context_duplicate():\n\n with pytest.raises(DuplicateContextKeyError):\n application_services.add_context('context2', 'value2')\n application_services.add_context('context2', 'value4')", "def assertContextSetTo(self, context):\r\n self.tracker.get_tracker.return_value.enter_context.assert_called_with( # pylint: disable=maybe-no-member\r\n UserTagsEventContextMiddleware.CONTEXT_NAME,\r\n context\r\n )", "def prepare(self, context):\n raise NotImplementedError", "def test_context_id(self):\n assert str(self.system.course_id) == self.xmodule.context_id", "def test_context_manager_error() -> None:\n with pytest.raises(ValueError):\n with managed_resource() as p:\n raise ValueError(\"Oops\")", "def assert_key_exists(self, key, caller):\n assert key, (\"key parameter must be specified.\")\n if key not in self:\n raise KeyNotInContextError(\n f\"context['{key}'] doesn't exist. It must exist for {caller}.\")", "def require_context(f):\n\n def wrapper(*args, **kwargs):\n if not is_admin_context(args[0]) and not is_user_context(args[0]):\n raise exception.NotAuthorized()\n return f(*args, **kwargs)\n return wrapper", "def require_context(f):\n\n def wrapper(*args, **kwargs):\n if not is_admin_context(args[0]) and not is_user_context(args[0]):\n raise exception.NotAuthorized()\n return f(*args, **kwargs)\n return wrapper", "def test_init_not_es(self):\n SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 2)\n SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 0)\n SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, 0)\n win = SDL_CreateWindow('cgles2_tests', -16, -16, 16, 16,\n SDL_WINDOW_OPENGL)\n glctx = SDL_GL_CreateContext(win)\n\n def make_current():\n SDL_GL_MakeCurrent(win, glctx)\n\n def get_proc_address(proc):\n return SDL_GL_GetProcAddress(proc)\n\n self.assertRaises(ValueError, Context, make_current, get_proc_address)" ]
[ "0.76646805", "0.7559883", "0.6614295", "0.6527365", "0.64800733", "0.64293313", "0.6406461", "0.6245151", "0.6196592", "0.61848545", "0.6142678", "0.60623604", "0.60348666", "0.6032899", "0.60134035", "0.60021675", "0.5999621", "0.59995925", "0.5892164", "0.5860368", "0.5839865", "0.58283204", "0.57876897", "0.5784874", "0.5774949", "0.57500136", "0.5719268", "0.5716138", "0.5716138", "0.57126415" ]
0.800951
0
Scrubs sys.path and sys.modules to a raw state.
def _scrub_import_environment(sys_modules_whitelist: typing.List[str], logger: typing.Callable): pex_root = pathlib.Path(Variables().PEX_ROOT) # A generator that emits sys.path elements def scrubbed_sys_path(): """Yields a scrubbed version of sys.path.""" for p in sys.path[:]: if not isinstance(p, str): yield p # Scrub any/all pex locations from sys.path. pp = pathlib.Path(p) if pex_root not in pp.parents: yield p def scrub_from_sys_modules(): """Yields keys of sys.modules as candidates for scrubbing/removal.""" for k, m in sys.modules.items(): if k in sys_modules_whitelist: continue if hasattr(m, '__file__') and m.__file__ is not None: mp = pathlib.Path(m.__file__) if pex_root in mp.parents: yield k def scrub_env(): # Replace sys.path with a scrubbed version. sys.path[:] = list(scrubbed_sys_path()) # Drop module cache references from sys.modules. modules_to_scrub = list(scrub_from_sys_modules()) for m in modules_to_scrub: del sys.modules[m] logger('Scrubbing sys.path and sys.modules in preparation for pex bootstrap\n') logger( f'sys.path contains {len(sys.path)} items, ' f'sys.modules contains {len(sys.modules)} keys\n' ) # Scrub environment. scrub_env() logger( f'sys.path now contains {len(sys.path)} items, ' f'sys.modules now contains {len(sys.modules)} keys\n' )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def patch_sys(cls):\n def patch_dict(old_value, new_value):\n old_value.clear()\n old_value.update(new_value)\n\n def patch_all(path, path_importer_cache, modules):\n sys.path[:] = path\n patch_dict(sys.path_importer_cache, path_importer_cache)\n patch_dict(sys.modules, modules)\n\n old_sys_path, old_sys_path_importer_cache, old_sys_modules = (\n sys.path[:], sys.path_importer_cache.copy(), sys.modules.copy())\n new_sys_path, new_sys_path_importer_cache, new_sys_modules = cls.minimum_sys()\n\n patch_all(new_sys_path, new_sys_path_importer_cache, new_sys_modules)\n yield", "def patch_sys(cls):\r\n def patch_dict(old_value, new_value):\r\n old_value.clear()\r\n old_value.update(new_value)\r\n\r\n def patch_all(path, path_importer_cache, modules):\r\n sys.path[:] = path\r\n patch_dict(sys.path_importer_cache, path_importer_cache)\r\n patch_dict(sys.modules, modules)\r\n\r\n old_sys_path, old_sys_path_importer_cache, old_sys_modules = (\r\n sys.path[:], sys.path_importer_cache.copy(), sys.modules.copy())\r\n new_sys_path, new_sys_path_importer_cache, new_sys_modules = cls.minimum_sys()\r\n\r\n patch_all(new_sys_path, new_sys_path_importer_cache, new_sys_modules)\r\n\r\n try:\r\n yield\r\n finally:\r\n patch_all(old_sys_path, old_sys_path_importer_cache, old_sys_modules)", "def scrub_from_sys_modules():\n for k, m in sys.modules.items():\n if k in sys_modules_whitelist:\n continue\n\n if hasattr(m, '__file__') and m.__file__ is not None:\n mp = pathlib.Path(m.__file__)\n if pex_root in mp.parents:\n yield k", "def scrubbed_sys_path():\n for p in sys.path[:]:\n if not isinstance(p, str):\n yield p\n\n # Scrub any/all pex locations from sys.path.\n pp = pathlib.Path(p)\n if pex_root not in pp.parents:\n yield p", "def load_modules_manually():\n #cmd_folder = os.path.realpath(os.path.abspath(os.path.split(inspect.getfile( inspect.currentframe() ))[0]))\n cmd_folder = '../myutils/'\n if cmd_folder not in sys.path:\n sys.path.insert(0, cmd_folder)\n #print sys.path", "def _update_loaded_modules(self):\n system_modules = sys.modules.keys()\n for module in list(self.loaded_modules):\n if module not in system_modules:\n self.processed_filepaths.pop(module)\n self.loaded_modules.remove(module)", "def __preload_sys_module_cache():\n # Preload sys module\n sys.stypy_module_cache = {\n 'sys': __load_python_module_dynamically('sys', False)} # By default, add original sys module clone\n\n # Preload builtins module\n sys.stypy_module_cache['__builtin__'] = __load_python_module_dynamically('__builtin__', False)\n sys.stypy_module_cache['ctypes'] = __load_python_module_dynamically('ctypes', False)", "def activate(self):\n not_in_path = self._path_entry not in sys.path\n if not_in_path:\n sys.path.append(self._path_entry)\n\n try:\n with self.cache:\n yield self.inject(Pipeline)\n finally:\n if not_in_path:\n sys.path.remove(self._path_entry)\n\n imported_modules = [\n name\n for name, module in sys.modules.items()\n if (filename := getattr(module, \"__file__\", None))\n and filename.startswith(self._path_entry)\n ]\n\n for name in imported_modules:\n del sys.modules[name]", "def cleanup(modpath):\n yield\n\n # Remove dummy modules from sys.modules\n pathlist = [p for p in sys.modules\n if p != modpath and p.startswith(modpath)]\n\n for p in pathlist:\n del sys.modules[p]\n\n if modpath in sys.modules:\n del sys.modules[modpath]\n\n # Remove TaskImporter\n index = [i for i, obj in enumerate(sys.meta_path)\n if isinstance(obj, TaskImporter)]\n\n for i in reversed(index):\n assert isinstance(sys.meta_path[i], TaskImporter)\n sys.meta_path.pop(i)", "def __enter__(self):\n if self.plugin_name == 'sideboard':\n return\n \n self._original_path = sys.path\n self._original_modules = sys.modules.copy()\n self.original_keys = set(self._original_modules.keys())\n\n #TODO: determine if there a sufficiently negative performance implication\n # to rethink doing this in recursive imports\n sys.path = _path_cache[self.plugin_name] + sys.path\n\n #This really does need to be an update in place.\n #Setting sys.modules = SOME_NEW_DICTIONARY means that\n # imports still write to the original sys.modules\n sys.modules.update(_module_cache[self.plugin_name])", "def modules_in_current_dir(path, module_name):\n yield from modules_from_path(Path(path).parent, module_name)", "def localPython ( localPath ) :\r\n\r\n if not type( localPath ) == str : return\r\n\r\n if not localPath.endswith( os.sep ) : localPath = localPath + os.sep\r\n\r\n # reads the paths to add to sys.path\r\n \r\n try :\r\n\r\n handler = open( localPath + \"sysPath.txt\", \"r\" )\r\n\r\n text = handler.read()\r\n\r\n handler.close()\r\n\r\n items = text.splitlines()\r\n\r\n except Exception, exception :\r\n\r\n items = [ ]\r\n\r\n\r\n # places the local paths before the previous search paths. only those that exist\r\n\r\n sysPath = [ ]\r\n\r\n for item in items :\r\n\r\n item = item.strip().replace( \"\\\\\", os.sep ).replace( \"/\", os.sep )\r\n\r\n if len( item ) == 0 : continue\r\n\r\n item = item.strip( os.sep )\r\n\r\n item = localPath + item\r\n\r\n if item in sysPath : continue\r\n\r\n if not os.path.exists( item ) : continue\r\n\r\n sysPath.append( item )\r\n\r\n # places the previous paths. only those that exist\r\n\r\n\r\n for item in sys.path :\r\n\r\n if item in sysPath : continue\r\n\r\n if not os.path.exists( item ) : continue\r\n\r\n sysPath.append( item )\r\n\r\n sys.path = sysPath", "def getRootModules():\n modules = []\n if ip.db.has_key('rootmodules'):\n return ip.db['rootmodules']\n t = time()\n store = False\n for path in sys.path:\n modules += moduleList(path) \n if time() - t >= TIMEOUT_STORAGE and not store:\n store = True\n print \"\\nCaching the list of root modules, please wait!\" \n print \"(This will only be done once - type '%rehashx' to \" + \\\n \"reset cache!)\"\n print\n if time() - t > TIMEOUT_GIVEUP:\n print \"This is taking too long, we give up.\"\n print\n ip.db['rootmodules'] = []\n return []\n \n modules += sys.builtin_module_names\n \n modules = list(set(modules))\n if '__init__' in modules:\n modules.remove('__init__')\n modules = list(set(modules))\n if store:\n ip.db['rootmodules'] = modules\n return modules", "def refresh(self):\n self.modules.clear()\n module_files = []\n module_paths = os.environ['MAYA_MODULE_PATH'].split(os.pathsep)\n for p in module_paths:\n try:\n module_files += [os.path.join(p, x).replace(os.sep, os.altsep or os.sep) for x in os.listdir(p) if\n x.lower()[-3:] == \"mod\"]\n except OSError:\n pass # ignore bad paths\n for eachfile in module_files:\n for eachmod in self.parse_mod(eachfile):\n self.modules[\"{0.name} ({0.version})\".format(eachmod)] = eachmod", "def minimum_sys_modules(cls, site_libs, modules=None):\n\n modules = modules or sys.modules\n new_modules = {}\n\n for module_name, module in modules.items():\n # builtins can stay\n if not hasattr(module, '__path__'):\n new_modules[module_name] = module\n continue\n\n # Unexpected objects, e.g. namespace packages, should just be dropped:\n if not isinstance(module.__path__, list):\n TRACER.log('Dropping %s' % (module_name,), V=3)\n continue\n\n # Pop off site-impacting __path__ elements in-place.\n for k in reversed(range(len(module.__path__))):\n if cls._tainted_path(module.__path__[k], site_libs):\n TRACER.log('Scrubbing %s.__path__: %s' % (module_name, module.__path__[k]), V=3)\n module.__path__.pop(k)\n\n # It still contains path elements not in site packages, so it can stay in sys.modules\n if module.__path__:\n new_modules[module_name] = module\n\n return new_modules", "def modules():", "def minimum_sys(cls):\r\n site_libs = set(cls._site_libs())\r\n for site_lib in site_libs:\r\n TRACER.log('Found site-library: %s' % site_lib)\r\n for extras_path in cls._extras_paths():\r\n TRACER.log('Found site extra: %s' % extras_path)\r\n site_libs.add(extras_path)\r\n site_libs = set(os.path.normpath(path) for path in site_libs)\r\n\r\n sys_modules = cls.minimum_sys_modules(site_libs)\r\n sys_path, sys_path_importer_cache = cls.minimum_sys_path(site_libs)\r\n\r\n return sys_path, sys_path_importer_cache, sys_modules", "def minimum_sys(cls):\n site_libs = set(cls._site_libs())\n for site_lib in site_libs:\n TRACER.log('Found site-library: %s' % site_lib)\n for extras_path in cls._extras_paths():\n TRACER.log('Found site extra: %s' % extras_path)\n site_libs.add(extras_path)\n site_libs = set(os.path.normpath(path) for path in site_libs)\n\n sys_path, sys_path_importer_cache = cls.minimum_sys_path(site_libs)\n sys_modules = cls.minimum_sys_modules(site_libs)\n\n return sys_path, sys_path_importer_cache, sys_modules", "def getStandard(self):\n\n app = self.app\n loadData = app.loadData\n\n if not loadData or loadData == \"core\":\n return\n\n aContext = app.context\n moduleSpecs = aContext.moduleSpecs\n seen = self.seen\n checkout = self.checkout\n backend = self.backend\n\n for m in moduleSpecs or []:\n org = m[\"org\"]\n repo = m[\"repo\"]\n relative = m[\"relative\"]\n theCheckout = m.get(\"checkout\", checkout)\n theBackend = m.get(\"backend\", backend)\n bRep = backendRep(theBackend, \"spec\", default=backend)\n\n ref = f\"{bRep}{org}/{repo}{relative}\"\n if ref in seen:\n continue\n\n if not self.getModule(\n org,\n repo,\n relative,\n theCheckout,\n backend=theBackend,\n specs=m,\n ):\n self.good = False", "def fix_sys_path():\n sys.path = EXTRA_PATHS + sys.path", "def getModules(self):\n\n self.provenance = []\n provenance = self.provenance\n self.mLocations = []\n mLocations = self.mLocations\n\n self.locations = None\n self.modules = None\n\n self.good = True\n self.seen = set()\n\n self.getMain()\n self.getRefs()\n self.getStandard()\n\n version = self.version\n good = self.good\n app = self.app\n\n if good:\n app.mLocations = mLocations\n app.provenance = provenance\n else:\n return\n\n mModules = []\n if mLocations:\n mModules.append(version or \"\")\n\n locations = self.locationsArg\n modules = self.modulesArg\n\n givenLocations = (\n []\n if locations is None\n else [expandDir(app, x.strip()) for x in itemize(locations, \"\\n\")]\n if type(locations) is str\n else [str(x) for x in locations]\n )\n givenModules = (\n []\n if modules is None\n else [normpath(x.strip()) for x in itemize(modules, \"\\n\")]\n if type(modules) is str\n else [normpath(str(x)) for x in modules]\n )\n\n self.locations = mLocations + givenLocations\n self.modules = mModules + givenModules", "def _fix_sys_path():\n global _fix_sys_path_done\n\n if _fix_sys_path_done:\n return\n _fix_sys_path_done = True\n if not (sys.argv and sys.path):\n # Not enough information\n return\n d = os.path.dirname(os.path.realpath(sys.argv[0]))\n if sys.path[0] == d:\n sys.path.pop(0)", "def modules_load(machine_config):\n\t#---modules in LOCAL configuration must be loaded before checking version\n\timport importlib\n\tif 'module_path' in machine_config: module_path = machine_config['module_path']\n\telse:\n\t\tmodule_parent = os.environ.get('MODULESHOME','/usr/share/Modules/default')\n\t\tmodule_path = os.path.join(module_parent,'init','python.py')\n\tincoming = {}\n\tif sys.version_info<(3,0): execfile(module_path,incoming)\n\telse: exec(open(module_path).read(),incoming)\n\t#---note that modules that rely on dynamically-linked C-code must use EnvironmentModules\n\tmodlist = machine_config['modules']\n\tif type(modlist)==str: modlist = modlist.split(',')\n\tfor mod in modlist:\n\t\t#---always unload gromacs to ensure correct version\n\t\tincoming['module']('unload','gromacs')\n\t\tprint('[STATUS] module load %s'%mod)\n\t\tincoming['module']('load',mod)", "def _blink_base(self):\n module_path = self._filesystem.path_to_module(self.__module__)\n tools_index = module_path.rfind('tools')\n assert tools_index != -1, 'could not find location of this checkout from %s' % module_path\n return self._filesystem.normpath(module_path[0:tools_index - 1])", "def get_system_modules():\n # print(\"## \" + \"System modules \" + \"#\"*60)\n import sys\n\n system_modules = sorted(sys.modules.keys())\n # for m in system_modules:\n # print(m)\n\n # print(\"## \" + \"pkg_resources \" + \"#\"*60)\n pkg_resources_pkgs = []\n for dist in __import__(\"pkg_resources\").working_set:\n if dist.project_name not in system_modules:\n pkg_resources_pkgs.append(dist.project_name)\n\n pkg_resources_pkgs = sorted(pkg_resources_pkgs)\n\n # for p in pkg_resources_pkgs:\n # print(p)\n\n # print(\"## \" + \"pkgutil \" + \"#\"*60)\n import pkgutil\n\n pkg_utils = []\n for m in pkgutil.iter_modules():\n if m[1] not in (system_modules + pkg_resources_pkgs):\n pkg_utils.append(m[1])\n pkg_utils = sorted(pkg_utils)\n # for m in pkg_utils:\n # print(m)\n return sorted(system_modules + pkg_resources_pkgs + pkg_utils)", "def load():\n out = load_as_root_module()\n parser = create_parser(os.path.basename(sys.argv[0]))\n opts = parser.parse_args(sys.argv[1:])\n load_env(opts, out.opt)\n\n return out", "def reset_modules(self) -> None:\n self.modules = {}\n self.update_modules()\n self.parse_modules()", "def load_shutit_modules(self):\n\t\tshutit_global.shutit_global_object.yield_to_draw()\n\t\tif self.loglevel <= logging.DEBUG:\n\t\t\tself.log('ShutIt module paths now: ',level=logging.DEBUG)\n\t\t\tself.log(self.host['shutit_module_path'],level=logging.DEBUG)\n\t\tfor shutit_module_path in self.host['shutit_module_path']:\n\t\t\tself.load_all_from_path(shutit_module_path)", "def get_system_modules( vars = {}, log = sys.stderr):\n\n if not vars.has_key(\"SYSIMG_PATH\"):\n vars[\"SYSIMG_PATH\"]=\"/\"\n SYSIMG_PATH=vars[\"SYSIMG_PATH\"]\n\n if not vars.has_key(\"NODE_MODEL_OPTIONS\"):\n vars[\"NODE_MODEL_OPTIONS\"] = 0;\n\n initrd, kernel_version = getKernelVersion(vars, log)\n\n # get the kernel version we are assuming\n if kernel_version is None:\n try:\n kernel_version= os.listdir( \"%s/lib/modules/\" % SYSIMG_PATH )\n except OSError, e:\n return\n\n if len(kernel_version) == 0:\n return\n\n if len(kernel_version) > 1:\n print( \"WARNING: We may be returning modules for the wrong kernel.\" )\n\n kernel_version= kernel_version[0]\n\n print( \"Using kernel version %s\" % kernel_version )\n\n # test to make sure the file we need is present\n modules_pcimap_path = \"%s/lib/modules/%s/modules.pcimap\" %\\\n (SYSIMG_PATH,kernel_version)\n if not os.access(modules_pcimap_path,os.R_OK):\n print( \"WARNING: Unable to read %s\" % modules_pcimap_path )\n return\n\n pcimap = pypcimap.PCIMap(modules_pcimap_path)\n\n # this is the actual data structure we return\n system_mods= {}\n\n # these are the lists that will be in system_mods\n network_mods= []\n scsi_mods= []\n\n # XXX: this is really similar to what BootCD/conf_files/pl_hwinit does. merge?\n pcidevs = get_devices()\n\n devlist=pcidevs.keys()\n devlist.sort()\n for slot in devlist:\n dev = pcidevs[slot]\n base = (dev[4] & 0xff0000) >> 16\n modules = pcimap.get(dev)\n if base not in (PCI_BASE_CLASS_STORAGE,\n PCI_BASE_CLASS_NETWORK):\n # special exception for forcedeth NICs whose base id\n # claims to be a Bridge, even though it is clearly a\n # network device\n if \"forcedeth\" in modules:\n base=PCI_BASE_CLASS_NETWORK\n else:\n continue\n\n if len(modules) > 0:\n if base == PCI_BASE_CLASS_NETWORK:\n network_mods += modules\n elif base == PCI_BASE_CLASS_STORAGE:\n scsi_mods += modules\n\n system_mods[MODULE_CLASS_SCSI]= scsi_mods\n system_mods[MODULE_CLASS_NETWORK]= network_mods\n\n return system_mods", "def build_missing_imports(self) -> None:\n self.undefined -= set(dir(__import__(\"builtins\")))\n\n # Optimisation: we will almost always define sys and pypprint. However, in order for us to\n # get to `import sys`, we'll need to examine our wildcard imports, which in the presence\n # of config, could be slow.\n if \"pypprint\" in self.undefined:\n pypprint_def = (\n inspect.getsource(pypprint) if self.define_pypprint else \"from pyp import pypprint\"\n )\n self.before_tree.body = ast.parse(pypprint_def).body + self.before_tree.body\n self.undefined.remove(\"pypprint\")\n if \"sys\" in self.undefined:\n self.before_tree.body = ast.parse(\"import sys\").body + self.before_tree.body\n self.undefined.remove(\"sys\")\n # Now short circuit if we can\n if not self.undefined:\n return\n\n def get_names_in_module(module: str) -> Any:\n try:\n mod = importlib.import_module(module)\n except ImportError as e:\n raise PypError(\n f\"Config contains wildcard import from {module}, but {module} failed to import\"\n ) from e\n return getattr(mod, \"__all__\", (n for n in dir(mod) if not n.startswith(\"_\")))\n\n subimports = {\"Path\": \"pathlib\", \"pp\": \"pprint\"}\n wildcard_imports = (\n [\"itertools\", \"math\", \"collections\"]\n + self.config.wildcard_imports\n + self.wildcard_imports\n )\n subimports.update(\n {name: module for module in wildcard_imports for name in get_names_in_module(module)}\n )\n\n def get_import_for_name(name: str) -> str:\n if name in subimports:\n return f\"from {subimports[name]} import {name}\"\n return f\"import {name}\"\n\n self.before_tree.body = [\n ast.parse(stmt).body[0] for stmt in sorted(map(get_import_for_name, self.undefined))\n ] + self.before_tree.body" ]
[ "0.6124312", "0.6062598", "0.59543145", "0.5901678", "0.56766284", "0.56067514", "0.5542571", "0.54748863", "0.5422463", "0.5348224", "0.53475314", "0.53056484", "0.52948594", "0.51196456", "0.51149005", "0.51118445", "0.50935435", "0.5014209", "0.50100213", "0.5005435", "0.49972638", "0.49371007", "0.49274635", "0.48939973", "0.48525187", "0.48509544", "0.4846581", "0.4836907", "0.48360032", "0.48143265" ]
0.64339405
0
Yields a scrubbed version of sys.path.
def scrubbed_sys_path(): for p in sys.path[:]: if not isinstance(p, str): yield p # Scrub any/all pex locations from sys.path. pp = pathlib.Path(p) if pex_root not in pp.parents: yield p
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def scrub_from_sys_modules():\n for k, m in sys.modules.items():\n if k in sys_modules_whitelist:\n continue\n\n if hasattr(m, '__file__') and m.__file__ is not None:\n mp = pathlib.Path(m.__file__)\n if pex_root in mp.parents:\n yield k", "def add_sys_paths(paths):\n original_syspath = sys.path[:]\n sys.path.extend(paths)\n\n try:\n yield\n finally:\n sys.path = original_syspath", "def removeduppaths():\r\n # This ensures that the initial path provided by the interpreter contains\r\n # only absolute pathnames, even if we're running from the build directory.\r\n L = []\r\n known_paths = set()\r\n for dir in sys.path:\r\n # Filter out duplicate paths (on case-insensitive file systems also\r\n # if they only differ in case); turn relative paths into absolute\r\n # paths.\r\n dir, dircase = makepath(dir)\r\n if not dircase in known_paths:\r\n L.append(dir)\r\n known_paths.add(dircase)\r\n sys.path[:] = L\r\n return known_paths", "def syspath():\n import sys\n pprint(sys.path)", "def modules_in_current_dir(path, module_name):\n yield from modules_from_path(Path(path).parent, module_name)", "def _scrub_import_environment(sys_modules_whitelist: typing.List[str], logger: typing.Callable):\n pex_root = pathlib.Path(Variables().PEX_ROOT)\n\n # A generator that emits sys.path elements\n def scrubbed_sys_path():\n \"\"\"Yields a scrubbed version of sys.path.\"\"\"\n for p in sys.path[:]:\n if not isinstance(p, str):\n yield p\n\n # Scrub any/all pex locations from sys.path.\n pp = pathlib.Path(p)\n if pex_root not in pp.parents:\n yield p\n\n def scrub_from_sys_modules():\n \"\"\"Yields keys of sys.modules as candidates for scrubbing/removal.\"\"\"\n for k, m in sys.modules.items():\n if k in sys_modules_whitelist:\n continue\n\n if hasattr(m, '__file__') and m.__file__ is not None:\n mp = pathlib.Path(m.__file__)\n if pex_root in mp.parents:\n yield k\n\n def scrub_env():\n # Replace sys.path with a scrubbed version.\n sys.path[:] = list(scrubbed_sys_path())\n\n # Drop module cache references from sys.modules.\n modules_to_scrub = list(scrub_from_sys_modules())\n for m in modules_to_scrub:\n del sys.modules[m]\n\n logger('Scrubbing sys.path and sys.modules in preparation for pex bootstrap\\n')\n logger(\n f'sys.path contains {len(sys.path)} items, '\n f'sys.modules contains {len(sys.modules)} keys\\n'\n )\n\n # Scrub environment.\n scrub_env()\n\n logger(\n f'sys.path now contains {len(sys.path)} items, '\n f'sys.modules now contains {len(sys.modules)} keys\\n'\n )", "def fix_sys_path():\n sys.path = EXTRA_PATHS + sys.path", "def cleanup(modpath):\n yield\n\n # Remove dummy modules from sys.modules\n pathlist = [p for p in sys.modules\n if p != modpath and p.startswith(modpath)]\n\n for p in pathlist:\n del sys.modules[p]\n\n if modpath in sys.modules:\n del sys.modules[modpath]\n\n # Remove TaskImporter\n index = [i for i, obj in enumerate(sys.meta_path)\n if isinstance(obj, TaskImporter)]\n\n for i in reversed(index):\n assert isinstance(sys.meta_path[i], TaskImporter)\n sys.meta_path.pop(i)", "def activate(self):\n not_in_path = self._path_entry not in sys.path\n if not_in_path:\n sys.path.append(self._path_entry)\n\n try:\n with self.cache:\n yield self.inject(Pipeline)\n finally:\n if not_in_path:\n sys.path.remove(self._path_entry)\n\n imported_modules = [\n name\n for name, module in sys.modules.items()\n if (filename := getattr(module, \"__file__\", None))\n and filename.startswith(self._path_entry)\n ]\n\n for name in imported_modules:\n del sys.modules[name]", "def _fix_sys_path():\n global _fix_sys_path_done\n\n if _fix_sys_path_done:\n return\n _fix_sys_path_done = True\n if not (sys.argv and sys.path):\n # Not enough information\n return\n d = os.path.dirname(os.path.realpath(sys.argv[0]))\n if sys.path[0] == d:\n sys.path.pop(0)", "def site_packages() -> Generator[Path, None, None]:\n site_packages = Path(thisdir, \"site-packages\")\n if site_packages.exists():\n rm_rf(str(site_packages))\n\n site_packages.mkdir(exist_ok=True)\n\n yield site_packages\n\n rm_rf(str(site_packages))", "def path():\n # Exclude path to this script from path.\n this_file = os.path.realpath(__file__)\n this_path = os.path.dirname(this_file)\n return os.pathsep.join(p for p in sys.path if p != this_path)", "def fix_sys_path(extra_extra_paths=()):\n sys.path[1:1] = EXTRA_PATHS\n fix_google_path()", "def setSysPath():\n c = os.path.abspath(os.path.dirname(__file__))\n\n add = [\n ['lib'],\n ]\n\n for item in add:\n p = os.path.join(c, *item)\n if not p in sys.path:\n sys.path[1:1] = [p]\n\n remove = ['django', 'simplejson']\n\n # Remove unwanted paths\n for item in sys.path:\n for r in remove:\n if item.find(r) > 0:\n sys.path.remove(item)", "def patch_sys(cls):\r\n def patch_dict(old_value, new_value):\r\n old_value.clear()\r\n old_value.update(new_value)\r\n\r\n def patch_all(path, path_importer_cache, modules):\r\n sys.path[:] = path\r\n patch_dict(sys.path_importer_cache, path_importer_cache)\r\n patch_dict(sys.modules, modules)\r\n\r\n old_sys_path, old_sys_path_importer_cache, old_sys_modules = (\r\n sys.path[:], sys.path_importer_cache.copy(), sys.modules.copy())\r\n new_sys_path, new_sys_path_importer_cache, new_sys_modules = cls.minimum_sys()\r\n\r\n patch_all(new_sys_path, new_sys_path_importer_cache, new_sys_modules)\r\n\r\n try:\r\n yield\r\n finally:\r\n patch_all(old_sys_path, old_sys_path_importer_cache, old_sys_modules)", "def patch_sys(cls):\n def patch_dict(old_value, new_value):\n old_value.clear()\n old_value.update(new_value)\n\n def patch_all(path, path_importer_cache, modules):\n sys.path[:] = path\n patch_dict(sys.path_importer_cache, path_importer_cache)\n patch_dict(sys.modules, modules)\n\n old_sys_path, old_sys_path_importer_cache, old_sys_modules = (\n sys.path[:], sys.path_importer_cache.copy(), sys.modules.copy())\n new_sys_path, new_sys_path_importer_cache, new_sys_modules = cls.minimum_sys()\n\n patch_all(new_sys_path, new_sys_path_importer_cache, new_sys_modules)\n yield", "def patch_sys_path():\n this_dir = os.path.dirname(__file__)\n to_add = os.path.join(this_dir, \"..\")\n to_add = os.path.abspath(to_add)\n sys.path.insert(0, to_add)", "def get_pythonpath(working_set, buildout, prefixes):\n\n # get all paths available in the current working set\n paths = list(working_set.entries)\n\n if hasattr(zc.buildout.easy_install, 'distribute_loc'):\n prepend_path(zc.buildout.easy_install.distribute_loc, paths)\n elif hasattr(zc.buildout.easy_install, 'setuptools_loc'):\n prepend_path(zc.buildout.easy_install.setuptools_loc, paths)\n else:\n prepend_path(zc.buildout.easy_install.setuptools_path, paths)\n\n return [k for k in working_set.entries \\\n if os.path.realpath(k) not in site_paths(buildout, prefixes)]", "def site_paths(buildout, prefixes):\n\n def is_buildout_dir(path):\n return path.startswith(buildout['eggs-directory']) or \\\n path.startswith(buildout['develop-eggs-directory'])\n\n def is_in_prefixes(path):\n return any([path.startswith(k) for k in prefixes])\n\n retval = [os.path.realpath(k) for k in site.sys.path]\n return [k for k in retval if not (is_buildout_dir(k) or is_in_prefixes(k))]", "def get_possible_paths():\n yield ('mtad', get_mtad_linter_path())\n yield ('bundled', get_bundled_linter_path())", "def freeze_includes() -> List[str]:\n import _pytest\n\n result = list(_iter_all_modules(_pytest))\n return result", "def storer_paths():\n return [dir_unchecked(), dir_checked(),\n dir_backup(), dir_tests()]", "def constrain_path_relative_to(path):\n environ_backup = os.environ\n environ = os.environ\n\n if path:\n environ = os.environ.copy()\n environ[\"PATH\"] = path\n\n os.environ = environ\n\n try:\n yield\n finally:\n os.environ = environ_backup", "def add_sys_paths(currpath, parentlevel: int = 2):\n if currpath is None:\n currpath = os.getcwd()\n curr_dir = os.path.realpath(os.path.abspath(currpath))\n if not curr_dir in sys.path:\n sys.path.insert(0, curr_dir)\n\n if parentlevel <= 0:\n return\n\n pdir = curr_dir\n i = range(parentlevel)\n while i > 0:\n i = i - 1\n pdir = os.path.dirname(pdir)\n if not pdir in sys.path:\n sys.path.insert(0, pdir)", "def localPython ( localPath ) :\r\n\r\n if not type( localPath ) == str : return\r\n\r\n if not localPath.endswith( os.sep ) : localPath = localPath + os.sep\r\n\r\n # reads the paths to add to sys.path\r\n \r\n try :\r\n\r\n handler = open( localPath + \"sysPath.txt\", \"r\" )\r\n\r\n text = handler.read()\r\n\r\n handler.close()\r\n\r\n items = text.splitlines()\r\n\r\n except Exception, exception :\r\n\r\n items = [ ]\r\n\r\n\r\n # places the local paths before the previous search paths. only those that exist\r\n\r\n sysPath = [ ]\r\n\r\n for item in items :\r\n\r\n item = item.strip().replace( \"\\\\\", os.sep ).replace( \"/\", os.sep )\r\n\r\n if len( item ) == 0 : continue\r\n\r\n item = item.strip( os.sep )\r\n\r\n item = localPath + item\r\n\r\n if item in sysPath : continue\r\n\r\n if not os.path.exists( item ) : continue\r\n\r\n sysPath.append( item )\r\n\r\n # places the previous paths. only those that exist\r\n\r\n\r\n for item in sys.path :\r\n\r\n if item in sysPath : continue\r\n\r\n if not os.path.exists( item ) : continue\r\n\r\n sysPath.append( item )\r\n\r\n sys.path = sysPath", "def _iter_variant_extracted_paths(root, path, variants):\n for variant in sorted(variants, key=len, reverse=True):\n inner_path = os.path.join(*[str(request) for request in variant])\n resolved_path = os.path.join(root, inner_path)\n\n if filer.in_directory(path, resolved_path, follow=False):\n yield path.replace(inner_path + os.sep, \"\")", "def all_possible_beards(paths):\n literal_paths = get_literal_beard_paths(paths)\n\n for path in literal_paths:\n for f in os.listdir(path):\n if is_module(os.path.join(path, f)):\n yield os.path.basename(f)", "def _discover_path_importables(\n pkg_pth: Path, pkg_name: str,\n) -> Generator[str, None, None]:\n for dir_path, _d, file_names in os.walk(pkg_pth):\n pkg_dir_path = Path(dir_path)\n\n if pkg_dir_path.parts[-1] == '__pycache__':\n continue\n\n if all(Path(_).suffix != '.py' for _ in file_names):\n continue\n\n rel_pt = pkg_dir_path.relative_to(pkg_pth)\n pkg_pref = '.'.join((pkg_name,) + rel_pt.parts)\n yield from (\n pkg_path\n for _, pkg_path, _ in pkgutil.walk_packages(\n (str(pkg_dir_path),), prefix=f'{pkg_pref}.',\n )\n )", "def _maybe_iterdir(path: epath.Path) -> Iterator[epath.Path]:\n # Use try/except rather than `.exists()` to avoid an extra RPC call\n # per namespace\n try:\n for f in path.iterdir():\n yield f\n except (\n OSError,\n FileNotFoundError,\n PermissionError,\n tf.errors.NotFoundError,\n tf.errors.PermissionDeniedError,\n ) as e:\n pass", "def morepath_modules(cls: type[morepath.App]) -> 'Iterator[str]':\n for base in cls.__mro__:\n if not issubclass(base, morepath.App):\n continue\n\n if base is morepath.App:\n continue\n\n module = '.'.join(base.__module__.split('.')[:2])\n\n if module.startswith('test'):\n continue\n\n yield module" ]
[ "0.6704874", "0.64456856", "0.63815147", "0.61018676", "0.6091217", "0.6007068", "0.596048", "0.5937054", "0.5828322", "0.5781934", "0.5675717", "0.567165", "0.56604296", "0.5641513", "0.56188345", "0.5617912", "0.55805993", "0.5579308", "0.5575357", "0.5502024", "0.54931635", "0.5471852", "0.5458711", "0.5446449", "0.5445708", "0.54051554", "0.5390755", "0.5373159", "0.5354777", "0.5350978" ]
0.80444944
0
Yields keys of sys.modules as candidates for scrubbing/removal.
def scrub_from_sys_modules(): for k, m in sys.modules.items(): if k in sys_modules_whitelist: continue if hasattr(m, '__file__') and m.__file__ is not None: mp = pathlib.Path(m.__file__) if pex_root in mp.parents: yield k
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def all_registered_modules():\n yield from iterchain(modules.values() for modules in Registry.monomers.values())", "def modules(self):\n for desc in self._mappings.values():\n if hasattr(desc, 'module'):\n yield desc.module\n else:\n continue", "def imports():\n for name, val in globals().items():\n if isinstance(val, getattr(types, \"ModuleType\")):\n yield val.__name__", "def modules(self):\n return self._modules.keys()", "def list_modules():\n for module_name in listdir(modules_directory):\n if isdir(join(modules_directory, module_name)):\n log.debug('Load module: {0}'.format(module_name))\n yield module_name", "def cleanup(modpath):\n yield\n\n # Remove dummy modules from sys.modules\n pathlist = [p for p in sys.modules\n if p != modpath and p.startswith(modpath)]\n\n for p in pathlist:\n del sys.modules[p]\n\n if modpath in sys.modules:\n del sys.modules[modpath]\n\n # Remove TaskImporter\n index = [i for i, obj in enumerate(sys.meta_path)\n if isinstance(obj, TaskImporter)]\n\n for i in reversed(index):\n assert isinstance(sys.meta_path[i], TaskImporter)\n sys.meta_path.pop(i)", "def minimum_sys_modules(cls, site_libs, modules=None):\n\n modules = modules or sys.modules\n new_modules = {}\n\n for module_name, module in modules.items():\n # builtins can stay\n if not hasattr(module, '__path__'):\n new_modules[module_name] = module\n continue\n\n # Unexpected objects, e.g. namespace packages, should just be dropped:\n if not isinstance(module.__path__, list):\n TRACER.log('Dropping %s' % (module_name,), V=3)\n continue\n\n # Pop off site-impacting __path__ elements in-place.\n for k in reversed(range(len(module.__path__))):\n if cls._tainted_path(module.__path__[k], site_libs):\n TRACER.log('Scrubbing %s.__path__: %s' % (module_name, module.__path__[k]), V=3)\n module.__path__.pop(k)\n\n # It still contains path elements not in site packages, so it can stay in sys.modules\n if module.__path__:\n new_modules[module_name] = module\n\n return new_modules", "def loaded_modules() -> List[str]:\n return PYSTAC_IO.keys()", "def discover_modules(package, module_match_func=trivial):\n for _, module_name, _ in pkgutil.walk_packages(\n package.__path__,\n prefix=package.__name__ + '.',\n ):\n module = __import__(module_name, fromlist=['__trash'], level=0)\n if module_match_func(module):\n yield module", "def _iter_module_files():\n # The list call is necessary on Python 3 in case the module\n # dictionary modifies during iteration.\n for module in list(sys.modules.values()):\n if module is None:\n continue\n filename = getattr(module, \"__file__\", None)\n if filename:\n old = None\n while not os.path.isfile(filename):\n old = filename\n filename = os.path.dirname(filename)\n if filename == old:\n break\n else:\n if filename[-4:] in (\".pyc\", \".pyo\"):\n filename = filename[:-1]\n yield filename", "def ImportsTest(recipe, allowed_modules):\n\n for _, val in sorted(recipe.global_symbols.iteritems()):\n if isinstance(val, types.ModuleType):\n module_name = val.__name__\n for pattern in allowed_modules:\n if pattern.match(val.__name__):\n break\n else:\n yield ('In %s:\\n'\n ' Non-whitelisted import of %s' % (recipe.path, module_name))", "def find_dependent_modules():\n tree = {}\n for module in sys.modules.values():\n if module is None:\n continue\n tree[module] = set()\n for attr_name in dir(module):\n attr = getattr(module, attr_name)\n if isinstance(attr, ModuleType):\n tree[module].add(attr)\n elif type(attr) in (FunctionType, type):\n tree[module].add(attr.__module__)\n return tree", "def __dir__():\n keys = (*globals().keys(), *_lazy_imports_obj.keys(), *_lazy_imports_mod.keys())\n return sorted(keys)", "def morepath_modules(cls: type[morepath.App]) -> 'Iterator[str]':\n for base in cls.__mro__:\n if not issubclass(base, morepath.App):\n continue\n\n if base is morepath.App:\n continue\n\n module = '.'.join(base.__module__.split('.')[:2])\n\n if module.startswith('test'):\n continue\n\n yield module", "def dcs_modules():\n\n dcs_dirname = os.path.dirname(__file__)\n module_prefix = __package__ + '.'\n\n if getattr(sys, 'frozen', False):\n importer = pkgutil.get_importer(dcs_dirname)\n return [module for module in list(importer.toc) if module.startswith(module_prefix) and module.count('.') == 2]\n else:\n return [module_prefix + name for _, name, is_pkg in pkgutil.iter_modules([dcs_dirname]) if not is_pkg]", "def getRootModules():\n modules = []\n if ip.db.has_key('rootmodules'):\n return ip.db['rootmodules']\n t = time()\n store = False\n for path in sys.path:\n modules += moduleList(path) \n if time() - t >= TIMEOUT_STORAGE and not store:\n store = True\n print \"\\nCaching the list of root modules, please wait!\" \n print \"(This will only be done once - type '%rehashx' to \" + \\\n \"reset cache!)\"\n print\n if time() - t > TIMEOUT_GIVEUP:\n print \"This is taking too long, we give up.\"\n print\n ip.db['rootmodules'] = []\n return []\n \n modules += sys.builtin_module_names\n \n modules = list(set(modules))\n if '__init__' in modules:\n modules.remove('__init__')\n modules = list(set(modules))\n if store:\n ip.db['rootmodules'] = modules\n return modules", "def deep_iter_modules(name):\r\n mod = import_dotted_name(name)\r\n yield name\r\n if not hasattr(mod, '__path__'):\r\n return\r\n for _, name, _ in iter_modules(mod.__path__, name + '.'):\r\n for name in deep_iter_modules(name):\r\n yield name", "def modules(self):\n return sorted([module for module in self._registry.values()],\n key=lambda scomp: (scomp.order, scomp.label))", "def load_conf_modules():\n for modname in _list_module_names():\n mod = importutils.import_module('monasca_api.conf.' + modname)\n required_funcs = ['register_opts', 'list_opts']\n for func in required_funcs:\n if hasattr(mod, func):\n yield mod", "def make_modules_importable(modules: Iterable[Module]) -> Dict[str, Module]:\n sys.modules.update({ module.__name__: module for module in modules })\n return sys.modules", "def _update_loaded_modules(self):\n system_modules = sys.modules.keys()\n for module in list(self.loaded_modules):\n if module not in system_modules:\n self.processed_filepaths.pop(module)\n self.loaded_modules.remove(module)", "def prune_modules(self, module_paths):\n result = {}\n for module in self.config.modules:\n try:\n result[module] = module_paths[module]\n except KeyError as e:\n raise KeyError(\n \"The {!r} module is required by the {} project, but is not \"\n \"available.\".format(module, self.config.project_dir)\n ) from e\n return result", "def _list_modules():\r\n return [\r\n desc.module_class\r\n for desc\r\n in _list_descriptors()\r\n ]", "def freeze_includes() -> List[str]:\n import _pytest\n\n result = list(_iter_all_modules(_pytest))\n return result", "def _scrub_import_environment(sys_modules_whitelist: typing.List[str], logger: typing.Callable):\n pex_root = pathlib.Path(Variables().PEX_ROOT)\n\n # A generator that emits sys.path elements\n def scrubbed_sys_path():\n \"\"\"Yields a scrubbed version of sys.path.\"\"\"\n for p in sys.path[:]:\n if not isinstance(p, str):\n yield p\n\n # Scrub any/all pex locations from sys.path.\n pp = pathlib.Path(p)\n if pex_root not in pp.parents:\n yield p\n\n def scrub_from_sys_modules():\n \"\"\"Yields keys of sys.modules as candidates for scrubbing/removal.\"\"\"\n for k, m in sys.modules.items():\n if k in sys_modules_whitelist:\n continue\n\n if hasattr(m, '__file__') and m.__file__ is not None:\n mp = pathlib.Path(m.__file__)\n if pex_root in mp.parents:\n yield k\n\n def scrub_env():\n # Replace sys.path with a scrubbed version.\n sys.path[:] = list(scrubbed_sys_path())\n\n # Drop module cache references from sys.modules.\n modules_to_scrub = list(scrub_from_sys_modules())\n for m in modules_to_scrub:\n del sys.modules[m]\n\n logger('Scrubbing sys.path and sys.modules in preparation for pex bootstrap\\n')\n logger(\n f'sys.path contains {len(sys.path)} items, '\n f'sys.modules contains {len(sys.modules)} keys\\n'\n )\n\n # Scrub environment.\n scrub_env()\n\n logger(\n f'sys.path now contains {len(sys.path)} items, '\n f'sys.modules now contains {len(sys.modules)} keys\\n'\n )", "def modules():", "def get_all_modules(package):\n base = Path(inspect.getabsfile(package)).parent\n\n for fl in base.glob(\"*.py\"):\n print(f\"loading module {fl}\")\n yield load_module(fl)", "def get_dep_map(kerneldir):\n\n\tf = open(os.path.join(kerneldir, 'modules.dep'))\n\tdeps = {}\n\tfor l in f:\n\t\t#print repr(l)\n\t\tmod, dep_list_str = l.strip().split(':', 1)\n\t\tassert mod not in deps\n\n\t\tkmod = KModuleName(mod)\n\t\tdep_list = [KModuleName(x) for x in dep_list_str.strip().split()]\n\t\tdep_list.insert(0, kmod)\t# prepend ourself as a dependency\n\n\t\tdeps[kmod] = dep_list\n\n\tf.close()\n\treturn deps", "def listModules(package):\n logger.debug(\"Listing modules for {0}\".format(package.__name__))\n for importer, modname, ispkg in pkgutil.iter_modules(package.__path__):\n yield (modname, ispkg)", "def scrubbed_sys_path():\n for p in sys.path[:]:\n if not isinstance(p, str):\n yield p\n\n # Scrub any/all pex locations from sys.path.\n pp = pathlib.Path(p)\n if pex_root not in pp.parents:\n yield p" ]
[ "0.6842569", "0.6502843", "0.6499838", "0.647568", "0.6403681", "0.6330229", "0.62831795", "0.62433136", "0.61260855", "0.6123235", "0.60469973", "0.6040864", "0.603489", "0.59373957", "0.59103936", "0.58853215", "0.5881549", "0.5876056", "0.5848127", "0.5820939", "0.5803232", "0.580137", "0.5785091", "0.57719815", "0.57622397", "0.57577276", "0.57303774", "0.57293147", "0.56906545", "0.5689356" ]
0.80843186
0
Extracts exactly 1 binary from a dir and returns a Path.
def _extract_resulting_binary(self, build_dir: pathlib.PosixPath, extension: str) -> pathlib.PosixPath: assert build_dir.is_dir(), f'build_dir {build_dir} was not a dir!' # N.B. It's important we use pathlib.Path.rglob (recursive) here, since pants v2 prefixes dist dirs # with their address namespace. binaries = list(build_dir.rglob(f'*.{extension}')) if len(binaries) != 1: raise self.BuildFailure( 'failed to select deterministic build artifact from workdir, needed 1 binary file with ' f'extension {extension} but found {len(binaries)}. Is the BUILD target a binary (pex) output type?' ) return binaries[0]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dir_bin():\n return abspath('bin')", "def _search_path_to_file(self, directory, binary_name):\n for root, dirs, files in os.walk(directory):\n if binary_name in files:\n return os.path.join(root, binary_name)\n raise micp_kernel.NoExecutableError", "def _extract_archive(path: str, extracted_dir_path: str) -> str:\n logging.info('extracting %s to %s', path, extracted_dir_path)\n with tarfile.open(path) as tar:\n tar.extractall(path=extracted_dir_path)\n extracted_items = os.listdir(extracted_dir_path)\n if len(extracted_items) != 1:\n raise ValueError(\n 'archive at {} did not contain a single directory'.format(path))\n return os.path.join(extracted_dir_path, extracted_items[0])", "def find_firmware_file(dir='.'):\n for file in os.listdir(dir):\n if file[-4:] in ('.hex', '.bin'):\n return file", "def get_directory(path):\n return mangle_path(path).rsplit('/',1)[0]", "def extract_file(path):", "def calculate_base(self, extract_dir):\n log = logging.getLogger(self.name)\n # Move the contents of the package in to the correct destination\n top_level_contents = os.listdir(extract_dir)\n if self.options['strip-top-level-dir'].strip().lower() in TRUE_VALUES:\n if len(top_level_contents) != 1:\n log.error('Unable to strip top level directory because there are more '\n 'than one element in the root of the package.')\n raise zc.buildout.UserError('Invalid package contents')\n base = os.path.join(extract_dir, top_level_contents[0])\n else:\n base = extract_dir\n return base", "def _unpacked_toplevel(self, dir):\n unpacked = glob.glob('%s/*' % dir)\n unpacked.extend(glob.glob(\"%s/.*\" % dir)) # include hidden files and folders\n # Check that dir contains nothing but a single folder:\n if len(unpacked) == 1 and os.path.isdir(unpacked[0]):\n return unpacked[0]\n else:\n return dir", "def dir_path(path):\n pattern='^(.*)[/]$'\n matchobj=re.match(pattern,path)\n if matchobj:\n return path\n else:\n return path+'/'", "def find_binary_in_path(filename: str) -> str:\n if \"PATH\" not in os.environ:\n raise PATHNotFoundError\n for directory in os.environ[\"PATH\"].split(os.pathsep):\n binary = os.path.abspath(os.path.join(directory, filename))\n if os.path.isfile(binary) and os.access(binary, os.X_OK):\n return binary\n raise BinaryNotFoundError", "def bin_search(binary):\n if sys.platform == 'win32':\n # Directory containing 'binary' should be in PATH\n return binary\n result = None\n mode = os.R_OK | os.X_OK\n for p in bin_search_path:\n path = join(p, binary)\n if os.access(path, mode) == 1:\n result = path\n break\n else:\n raise MissingBinary('Unable to find binary \"%s\"' % binary)\n return result", "def recursive_unpack(dir_path):\n exten = ['7z', 'zip', 'rar']\n one_more = False\n for r, d, files in os.walk(dir_path):\n packed = []\n for ext in exten:\n code_files = fnmatch.filter(files, '*.' + ext)\n if len(code_files) > 0:\n tmp_paths = [os.path.join(os.path.abspath(r), f) for f in code_files]\n packed.extend(tmp_paths)\n if not one_more and len(packed) > 0:\n one_more = True\n if len(packed) > 0:\n print(\"unpack list:\", packed)\n for p in packed:\n extract(p, os.path.dirname(p))\n os.remove(p)\n if one_more:\n recursive_unpack(dir_path)", "def untar_first(input_filename: str, extract_dir: str) -> str:\n with tarfile.open(input_filename) as tar_data:\n file_to_extract = tar_data.next()\n while file_to_extract is not None and not file_to_extract.isfile():\n file_to_extract = tar_data.next()\n\n if file_to_extract is None:\n print(f'No file found in archive {input_filename}')\n res = ''\n else:\n tar_data.extract(file_to_extract, path=extract_dir)\n res = os.path.join(extract_dir, file_to_extract.name)\n return res", "def extract(cls, path, outdir):\r\n raise NotImplementedError()", "def GetFile(file):\n\n file = file.replace(\"/\", \"\\\\\").strip(\"\\\\\")\n new = list(file)\n new.reverse()\n if \"\\\\\" not in new:\n return None, file # Don't raise an error, but there isn't any folder\n indx = new.index(\"\\\\\")\n return file[:-indx], file[-indx:] # Full path and file name", "def convertPath(srcpath, dstdir):\n bits = srcpath.split(\"/\")\n bits.pop(0)\n # Strip out leading 'unsigned' from paths like unsigned/update/win32/...\n if bits[0] == 'unsigned':\n bits.pop(0)\n return os.path.join(dstdir, *bits)", "def GetBinDirectory(self, *args):\n return _gmat_py.FileManager_GetBinDirectory(self, *args)", "def get_first_file(cmds):\n for cmd in cmds:\n all_files = glob.glob(cmd)\n if all_files:\n for f in all_files:\n if not os.path.isdir(f):\n return f\n return ''", "def find_specific_file_extension_in_dir(dir_path, extension):\r\n return glob.glob(os.path.join(dir_path, r'*{}'.format(extension)))[0].replace('\\\\', '/').split('/')[-1]", "def get_file_path(bin_name, dir_path=\"\"):\n with open('../../../.build_vars.json') as json_file:\n build_paths = json.load(json_file)\n basepath = os.path.normpath(build_paths['PREFIX'] + \"/../{0}\"\n .format(dir_path))\n\n file_path = list(Path(basepath).glob('**/{0}'.format(bin_name)))\n if not file_path:\n raise OSError(ENOENT, \"File {0} not found inside {1} Directory\"\n .format(bin_name, basepath))\n else:\n return file_path", "def extract_via_patoolib(\r\n file_path: str, unpack_path: str = None, remove_if_exists: bool = False\r\n) -> Optional[str]:\r\n # TODO handle compression with -zvxf\r\n if not os.path.exists(file_path):\r\n log.warning(file_path + \" does not exist.\")\r\n return None\r\n\r\n try:\r\n import patoolib\r\n except ImportError:\r\n log.warning(\"patoolib is not installed: Run pip install patool\")\r\n return None\r\n\r\n if not unpack_path:\r\n unpack_path = os.path.join(\r\n os.path.dirname(file_path), os.path.splitext(os.path.basename(file_path))[0]\r\n )\r\n\r\n if os.path.isdir(unpack_path):\r\n log.info(\"Unpack directory already exists \" + unpack_path)\r\n if not os.listdir(unpack_path):\r\n log.info(\"Directory is empty. Unpacking...\")\r\n elif remove_if_exists:\r\n log.info(\"Removing existing unpacked dir: \" + unpack_path)\r\n shutil.rmtree(unpack_path)\r\n else:\r\n return unpack_path\r\n\r\n try:\r\n patoolib.extract_archive(file_path, outdir=unpack_path)\r\n except Exception as e:\r\n log.warning(\"Failed to unpack via patoolib: \", exc_info=e)\r\n return None\r\n\r\n return unpack_path", "def extract(cls, path, outdir):\r\n with open_zip(path) as zip:\r\n for path in zip.namelist():\r\n # While we're at it, we also perform this safety test.\r\n if path.startswith('/') or path.startswith('..'):\r\n raise ValueError('Zip file contains unsafe path: %s' % path)\r\n # Ignore directories. extract() will create parent dirs as needed.\r\n if not path.endswith('/'):\r\n zip.extract(path, outdir)", "def Dir(path=None):\n global _last_files\n if path:\n _last_files = glob.glob(path)\n if _last_files:\n return os.path.split(_last_files.pop(0))[1] # VB just returns the filename, not full path\n else:\n return \"\"", "def getDirectory(path):\n\tfrom os.path import split\n\tpath = normalizePath(path)\n\treturn split(path)[0]", "def _build_binary_file_path(program_path: pathlib.Path, build_dir: pathlib.Path, hex_file: bool) -> pathlib.Path:\n fw_fbase = build_dir / program_path.name\n fw_file = fw_fbase.with_suffix(\".hex\" if hex_file else \".bin\")\n if not fw_file.exists():\n raise BinaryFileNotFoundError(f\"Build program file (firmware) not found {fw_file}\")\n return fw_file", "def find_file_by_binary(**kwargs):\n return AppServer.service.find_file_by_binary(binary=kwargs['binary'])", "def _extract_tar_dir(tar, dirname, b_dest):\n member_names = [to_native(dirname, errors='surrogate_or_strict')]\n\n # Create list of members with and without trailing separator\n if not member_names[-1].endswith(os.path.sep):\n member_names.append(member_names[-1] + os.path.sep)\n\n # Try all of the member names and stop on the first one that are able to successfully get\n for member in member_names:\n try:\n tar_member = tar.getmember(member)\n except KeyError:\n continue\n break\n else:\n # If we still can't find the member, raise a nice error.\n raise AnsibleError(\"Unable to extract '%s' from collection\" % to_native(member, errors='surrogate_or_strict'))\n\n b_dir_path = os.path.join(b_dest, to_bytes(dirname, errors='surrogate_or_strict'))\n\n b_parent_path = os.path.dirname(b_dir_path)\n try:\n os.makedirs(b_parent_path, mode=0o0755)\n except OSError as e:\n if e.errno != errno.EEXIST:\n raise\n\n if tar_member.type == tarfile.SYMTYPE:\n b_link_path = to_bytes(tar_member.linkname, errors='surrogate_or_strict')\n if not _is_child_path(b_link_path, b_dest, link_name=b_dir_path):\n raise AnsibleError(\"Cannot extract symlink '%s' in collection: path points to location outside of \"\n \"collection '%s'\" % (to_native(dirname), b_link_path))\n\n os.symlink(b_link_path, b_dir_path)\n\n else:\n if not os.path.isdir(b_dir_path):\n os.mkdir(b_dir_path, 0o0755)", "def __extract_zip(self):\n archive_binaries_dir = None\n zip_file = zipfile.ZipFile(self.archive)\n try:\n extract_dir = tempfile.mkdtemp()\n archive_binaries_dir = self.__create_extraction_dir(\n zip_file.namelist(), extract_dir, zip_file.extract)\n finally:\n zip_file.close()\n return archive_binaries_dir, extract_dir", "def get_bam_file(dirpath, logger=default_logger):\n\n if not os.path.isdir(dirpath):\n msg = \"Directory for BAM file does not exist: %s\" % dirpath\n logger.error(msg)\n raise BAMException(msg)\n\n for filename in os.listdir(dirpath):\n if filename.endswith(\".bam\"):\n return os.path.join(dirpath, filename)", "def get_bam_file(dirpath, logger=default_logger):\n\n if not os.path.isdir(dirpath):\n msg = \"Directory for BAM file does not exist: %s\" % dirpath\n logger.error(msg)\n raise BAMException(msg)\n\n for filename in os.listdir(dirpath):\n if filename.endswith(\".bam\"):\n return os.path.join(dirpath, filename)" ]
[ "0.57858336", "0.5713639", "0.5676852", "0.5615714", "0.5572907", "0.541237", "0.53744745", "0.5344345", "0.5302235", "0.5284864", "0.52758974", "0.52424115", "0.51901644", "0.5142074", "0.51340926", "0.5104739", "0.50982463", "0.50942296", "0.50817", "0.50708187", "0.5032798", "0.50284404", "0.5020557", "0.50128627", "0.49849322", "0.49758193", "0.4941741", "0.49287906", "0.49152306", "0.49152306" ]
0.65847987
0
Creates an Accordion widget and yields under care of its output capturer.
def _accordion_widget(self, title, height='300px', collapsed=True): # Generate unique class for multiple invocations unique_class = self._append_random_id('nb-console-output') auto_scroll_script = ''' const config = { childList: true, subtree: true }; const callback = function(mutationsList, observer) { for(let mutation of mutationsList) { if (mutation.type === 'childList') { var scrollContainer = document.querySelector(".%s"); scrollContainer.scrollTop = scrollContainer.scrollHeight; } } }; const addObserver = function() { const accordion = document.querySelector(".%s"); accordion.parentElement.style.backgroundColor = "black"; observer.observe(accordion, config); } const observer = new MutationObserver(callback); if (document.querySelector(".%s")) { addObserver(); } else { // Add a small delay in case the element is not available on the DOM yet window.setTimeout(addObserver, 100); } ''' % (unique_class, unique_class, unique_class) terminalStyling = ( '<style>.%s { background-color: black;} .%s pre { color: white; }</style>' ) % (unique_class, unique_class) def set_output_glyph(glyph): folder.set_title(0, f'{glyph} {title}') def expand(): folder.selected_index = 0 def collapse(): folder.selected_index = 0 folder.selected_index = None layout = ipywidgets.Layout(height=height, overflow_y="scroll") outputter = ipywidgets.Output(layout=layout) outputter.add_class(unique_class) outputter.append_display_data(Javascript(auto_scroll_script)) outputter.append_display_data(ipywidgets.HTML(terminalStyling)) folder = ipywidgets.Accordion(children=[outputter]) folder.selected_index = None if collapsed is True else 0 set_output_glyph(' ') display(folder) # Capture the output context. with outputter: yield expand, collapse, set_output_glyph
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_accordion(summary, main):\n return \"\"\"\n <details>\n <summary>{}</summary>\n <main>{}</main>\n </details>\n \"\"\".format(summary, main)", "def render_accordion(request, course, chapter, section, field_data_cache):\r\n # grab the table of contents\r\n user = User.objects.prefetch_related(\"groups\").get(id=request.user.id)\r\n request.user = user\t # keep just one instance of User\r\n toc = toc_for_course(user, request, course, chapter, section, field_data_cache)\r\n\r\n context = dict([\r\n ('toc', toc),\r\n ('course_id', course.id.to_deprecated_string()),\r\n ('csrf', csrf(request)['csrf_token']),\r\n ('due_date_display_format', course.due_date_display_format)\r\n ] + template_imports.items())\r\n return render_to_string('courseware/accordion.html', context)", "def render(self):\n content = self._render_pre_content('div')\n for widget in self._child_widgets:\n content += \"\\n\" + widget.render()\n content += self._render_post_content('div')\n content += \"\"\"<script>\n $(function(){\n $(\"#%s\").accordion({\n collapsible: %s,\n icons: %s,\n heightStyle: \"%s\"\n });\n });\n </script>\n \"\"\" % (self._name, \"true\" if self._collapsible else \"false\",\n self._icons, \"fill\" if self._fill_space else \"\")\n self._widget_content = content\n return self._widget_content", "def compose(self, golden_layout_string, **kwargs):\n\n controllers = self.main_controller.control_panels.values()\n template_code = ReadString(\n self.jinja_base_string_template.substitute(main_body=golden_layout_string)\n )\n self.template_cls._template = template_code\n\n template = self.template_cls(title=self.title, theme=self.theme_cls, **kwargs)\n controls = pn.Accordion(\n *[controller.panel for controller in controllers],\n toggle=True,\n sizing_mode=\"fixed\",\n width=SIDEBAR_WIDTH,\n )\n\n template.sidebar.append(controls)\n\n for panel_ID, panel in self.panels.items():\n template._render_items[panel_ID] = (panel, [\"main\"])\n\n return template", "def create_page(self, parent):\n ui = self.edit_traits(parent=parent, kind=\"subpanel\")\n return ui.control", "def create(dlg):\n page = CondaPage()\n return page", "def _create_analyses_input_page(self, notebook): # pylint: disable=R0914\r\n\r\n # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #\r\n # Build-up the containers for the tab. #\r\n # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #\r\n _hbox = gtk.HPaned()\r\n\r\n _fixed = gtk.Fixed()\r\n\r\n _frame = Widgets.make_frame(label=_(u\"Analysis Inputs\"))\r\n _frame.set_shadow_type(gtk.SHADOW_ETCHED_IN)\r\n _frame.add(_fixed)\r\n\r\n _hbox.pack1(_frame, True, True)\r\n\r\n # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #\r\n # Place the widgets used to display analysis input information. #\r\n # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #\r\n # Load the gtk.ComboBox() widgets.\r\n _results = [[u\"MCF\"], [u\"Kaplan-Meier\"], [_(u\"NHPP - Power Law\")],\r\n [u\"NHPP - Loglinear\"], [_(u\"Exponential\")],\r\n [_(u\"Lognormal\")], [_(u\"Normal\")], [u\"Weibull\"],\r\n [\"WeiBayes\"]]\r\n Widgets.load_combo(self.cmbDistribution, _results)\r\n _results = [[_(u\"Lower One-Sided\")], [_(u\"Upper One-Sided\")],\r\n [_(u\"Two-Sided\")]]\r\n Widgets.load_combo(self.cmbConfType, _results)\r\n _results = [[_(u\"Crow (NHPP Only)\")], [_(u\"Duane (NHPP Only)\")],\r\n [_(u\"Fisher Matrix\")], [_(u\"Likelihood\")],\r\n [_(u\"Bootstrap\")]]\r\n Widgets.load_combo(self.cmbConfMethod, _results)\r\n _results = [[\"MLE\"], [_(u\"Regression\")]]\r\n Widgets.load_combo(self.cmbFitMethod, _results)\r\n\r\n # Create the labels for the left half of the right side.\r\n _labels = [_(u\"Assembly:\"), _(u\"Description:\"), _(u\"Distribution:\"),\r\n _(\"Fit Method:\"), _(u\"Confidence:\"), _(u\"Confidence Type:\"),\r\n _(\"Confidence Method:\")]\r\n (_x_pos1, _y_pos1) = Widgets.make_labels(_labels, _fixed, 5, 5)\r\n _x_pos1 += 55\r\n\r\n # Create the labels for the right half of the right side.\r\n _labels = [_(u\"Start Time:\"), _(u\"End Time:\"), _(u\"Step Interval:\"),\r\n _(u\"Start Date:\"), _(u\"End Date:\")]\r\n (_x_pos2,\r\n _y_pos2) = Widgets.make_labels(_labels, _fixed, _x_pos1 + 215, 5)\r\n _x_pos2 += _x_pos1\r\n _x_pos2 += 275\r\n\r\n # Place widgets on the left side.\r\n _fixed.put(self.cmbAssembly, _x_pos1, _y_pos1[0])\r\n _fixed.put(self.txtDescription, _x_pos1, _y_pos1[1])\r\n _fixed.put(self.cmbDistribution, _x_pos1, _y_pos1[2])\r\n _fixed.put(self.cmbFitMethod, _x_pos1, _y_pos1[3])\r\n _fixed.put(self.txtConfidence, _x_pos1, _y_pos1[4])\r\n _fixed.put(self.cmbConfType, _x_pos1, _y_pos1[5])\r\n _fixed.put(self.cmbConfMethod, _x_pos1, _y_pos1[6])\r\n\r\n # Place widgets on the right side.\r\n _fixed.put(self.txtStartTime, _x_pos2, _y_pos2[0])\r\n _fixed.put(self.txtEndTime, _x_pos2, _y_pos2[1])\r\n _fixed.put(self.txtRelPoints, _x_pos2, _y_pos2[2])\r\n _fixed.put(self.txtStartDate, _x_pos2, _y_pos2[3])\r\n _fixed.put(self.btnStartDate, _x_pos2 + 105, _y_pos2[3])\r\n _fixed.put(self.txtEndDate, _x_pos2, _y_pos2[4])\r\n _fixed.put(self.btnEndDate, _x_pos2 + 105, _y_pos2[4])\r\n _fixed.put(self.chkGroup, _x_pos2, _y_pos2[4] + 30)\r\n _fixed.put(self.chkParts, _x_pos2, _y_pos2[4] + 60)\r\n\r\n _fixed.show_all()\r\n\r\n # Insert the tab.\r\n _label = gtk.Label()\r\n _label.set_markup(\"<span weight='bold'>\" +\r\n _(u\"Analysis\\nInputs\") + \"</span>\")\r\n _label.set_alignment(xalign=0.5, yalign=0.5)\r\n _label.set_justify(gtk.JUSTIFY_CENTER)\r\n _label.show_all()\r\n _label.set_tooltip_text(_(u\"Displays analysis inputs for the selected \"\r\n u\"dataset.\"))\r\n notebook.insert_page(_hbox, tab_label=_label, position=-1)\r\n\r\n return False", "def create_widget(self):\n self.widget = QCustomActionGroup(self.parent_widget())", "def ca(self, width=100, height=100, **options):\n return self.lumpy.widget(DiagCanvas, width=width, height=height,\n **options)", "def write_chapter(self):\n\n decision_table_index = self.tables.index(self.DECISION_TABLE)\n decision = DropDownLists.get_from_table(self.text_input_soup, decision_table_index)\n\n if decision[0] == 'Yes':\n self.make_plots()\n\n time_on_tasks = ResultsChapter(self.report, self.text_input, self.text_input_soup, self.TITLE,\n self.tables, self.picture_paths, self.parameters)\n\n self.report.add_paragraph(self.TITLE, self.TITLE_STYLE)\n\n # add bar plot or box plot depending on the choice of plot type or do nothing if no cGOM data is provided\n try:\n if self.plot_type == 'Bar plot':\n Picture.add_picture_and_caption(self.report,\n [self.BAR_PLOT_FIGURE_PATH],\n self.BAR_PLOT_FIGURE_PATH,\n self.BAR_PLOT_CAPTION,\n width=Cm(12)\n )\n if self.plot_type == 'Box plot':\n Picture.add_picture_and_caption(self.report,\n [self.BOX_PLOT_FIGURE_PATH],\n self.BOX_PLOT_FIGURE_PATH,\n self.BOX_PLOT_CAPTION,\n width=Cm(12)\n )\n except FileNotFoundError:\n pass\n\n self.report.add_paragraph(self.DISCUSSION_TITLE, self.DISCUSSION_STYLE)\n time_on_tasks.write_chapter()", "def setUp(self):\r\n # Ensure that the superclass sets up\r\n super(ContainerBase, self).setUp()\r\n\r\n self.auth_page = AutoAuthPage(self.browser, staff=True)\r\n self.outline = CourseOutlinePage(\r\n self.browser,\r\n self.course_info['org'],\r\n self.course_info['number'],\r\n self.course_info['run']\r\n )\r\n\r\n self.container_title = \"\"\r\n self.group_a = \"Expand or Collapse\\nGroup A\"\r\n self.group_b = \"Expand or Collapse\\nGroup B\"\r\n self.group_empty = \"Expand or Collapse\\nGroup Empty\"\r\n self.group_a_item_1 = \"Group A Item 1\"\r\n self.group_a_item_2 = \"Group A Item 2\"\r\n self.group_b_item_1 = \"Group B Item 1\"\r\n self.group_b_item_2 = \"Group B Item 2\"\r\n\r\n self.group_a_handle = 0\r\n self.group_a_item_1_handle = 1\r\n self.group_a_item_2_handle = 2\r\n self.group_empty_handle = 3\r\n self.group_b_handle = 4\r\n self.group_b_item_1_handle = 5\r\n self.group_b_item_2_handle = 6\r\n\r\n self.group_a_item_1_action_index = 0\r\n self.group_a_item_2_action_index = 1\r\n\r\n self.duplicate_label = \"Duplicate of '{0}'\"\r\n self.discussion_label = \"Discussion\"\r\n\r\n self.setup_fixtures()\r\n\r\n self.auth_page.visit()", "def visit_exercise_node(self, node):\n self.body.append(self.starttag(\n node, 'div', CLASS=('admonition exercise')))", "def create_widgets(self):", "def create_panel(self):\n return\n # return Panel(self)", "def createWidget(self, parent):\n raise NotImplementedError()", "def create_widget(self):\n pass", "def create(self, parent):\n self.widget = QtGui.QScrollArea(parent)", "def create_widgets( self ):", "def createInitialChildren(dash_instance):\n\t\n\t# create the header for the page\n\tamerenImage = createLogo(dash_instance)\n\theader = html.Div([\n\t\t\tamerenImage,\n\t\t\thtml.Label(\n\t\t\t\t\"Solarwinds Dashboard\",\n\t\t\t\tstyle=dict(fontSize=60, fontWeight='bold', fontStyle='italic')\n\t\t\t)\n\t\t],\n\t\tid='header',\n\t\tstyle={\n\t\t\t'height': '250px',\n 'backgroundColor': 'green',\n\t\t\t'textAlign': 'center',\n\t\t\t'verticalAlign': 'middle',\n\t\t\t'align': 'center'\n\t\t}\n\t)\n\t\n\tdown_item_fieldset = defineDownItemFieldset(dash_instance)\n\tstatistics_fieldset = defineStatisticsFieldset(dash_instance)\n\tpage_break = createPageBreak(dash_instance)\n\t\n\t# add the children\n\tdash_instance.addChildren(\n\t\theader,\n\t\tdown_item_fieldset,\n\t\tpage_break,\n\t\tstatistics_fieldset,\n\t\tdcc.Location(id='startPage', refresh=False)\n\t)", "def create_widgets(self):\n # only ever shown card in player's hand, so create widgets when dealt\n self.name_label = tk.Label(self, text=self.name)\n self.ability_label = tk.Label(self, text=self.ability)\n self.food_label = tk.Label(self, text=\"Food: \" + str(self.food))\n self.use_button = TraitUseButton(self, text=\"USE\", command=self.use)", "def _build(self):\n tab = self._tab\n tab.setModel(self._default_model)\n tab.setEnabled(False)\n tab.horizontalHeader().setClickable(False)\n if not self._connect:\n sig = SIG(\"sectionClicked(int)\")\n self.connect(tab.verticalHeader(), sig, self.setdata)\n self._connect = True", "def test_acid_block_preview(self):\r\n\r\n self.outline.visit()\r\n subsection = self.outline.section('Test Section').subsection('Test Subsection')\r\n unit = subsection.toggle_expand().unit('Test Unit').go_to()\r\n container = unit.components[0].go_to_container()\r\n\r\n acid_block = AcidView(self.browser, container.xblocks[0].preview_selector)\r\n self.validate_acid_block_preview(acid_block)", "def test_accordion_state(self):\r\n email, password = self.STUDENT_INFO[0]\r\n self.login(email, password)\r\n self.enroll(self.course, True)\r\n self.enroll(self.test_course, True)\r\n\r\n # Now we directly navigate to a section in a chapter other than 'Overview'.\r\n check_for_get_code(self, 200, reverse(\r\n 'courseware_section',\r\n kwargs={\r\n 'course_id': self.course.id.to_deprecated_string(),\r\n 'chapter': 'factory_chapter',\r\n 'section': 'factory_section'\r\n }\r\n ))\r\n\r\n # And now hitting the courseware tab should redirect to 'factory_chapter'\r\n resp = self.client.get(reverse('courseware',\r\n kwargs={'course_id': self.course.id.to_deprecated_string()}))\r\n\r\n self.assertRedirects(resp, reverse('courseware_chapter',\r\n kwargs={'course_id': self.course.id.to_deprecated_string(),\r\n 'chapter': 'factory_chapter'}))", "def _create_canvas(self, parent):\n # The panel lets us add additional controls.\n panel = wx.Panel(parent, -1, style=wx.CLIP_CHILDREN)\n sizer = wx.BoxSizer(wx.VERTICAL)\n panel.SetSizer(sizer)\n # matplotlib commands to create a canvas\n mpl_control = FigureCanvas(panel, -1, self.value)\n sizer.Add(mpl_control, 1, wx.LEFT | wx.TOP | wx.GROW)\n toolbar = NToolbar(mpl_control)\n sizer.Add(toolbar, 0, wx.EXPAND)\n self.value.canvas.SetMinSize((10,10))\n return panel", "def _build(self):\n tab = self._tab\n tab.setModel(self._model)\n tab.horizontalHeader().setClickable(False)\n sig = SIG(\"sectionClicked(int)\")\n connect(tab.verticalHeader(), sig, self.remove_cond)\n if self._grp_colm==\"New_coln\":\n tab.setItemDelegate(TextDelegate(self))\n tab.setItemDelegateForColumn(1, AstGroupDelegate(self,self._grp_names1))\n\n else :\n tab.setItemDelegate(ValueDelegate(self))\n tab.setItemDelegateForColumn(0, AstGroupDelegate(self, self._grp_names))", "def create_widgets(self):\r\n self.create_containers()\r\n self.setup_containers()\r\n self.create_panel_widgets()\r\n self.setup_scrollbar()", "def get_text(self, course):\r\n return views.render_accordion(\r\n self.request, course, course.get_children()[0].scope_ids.usage_id.to_deprecated_string(), None, None\r\n )", "def create_widget(self):\n self.widget = wxDockPane(self.parent_widget())", "def adv_new_window(self):\n adv=workflow.advancedoptions_w.ADialog()\n adv.exec_()", "def __init__(self, handle):\r\n self.context = {}\r\n self.enyo = None\r\n self.showconsole = False\r\n activity.Activity.__init__(self, handle)\r\n\r\n self.max_participants = 1\r\n\r\n self.make_toolbar()\r\n self.make_mainview()" ]
[ "0.5763516", "0.5742236", "0.53615534", "0.50737333", "0.50410753", "0.5024527", "0.49162087", "0.4911946", "0.48652512", "0.47383875", "0.45927936", "0.44744775", "0.44410545", "0.44230998", "0.43919158", "0.4383531", "0.43767828", "0.43757036", "0.4363259", "0.4358932", "0.43554065", "0.43547162", "0.4344721", "0.43391863", "0.43360436", "0.43218157", "0.4319858", "0.43163812", "0.42997655", "0.4279858" ]
0.72215044
0
Bootstraps a pex with widget UI display.
def _bootstrap_pex(self, pex_path: pathlib.PosixPath): title = f'[Bootstrap] {pex_path.name}' with self._accordion_widget(title) as (expand, collapse, set_output_glyph): try: with environment_as(PEX_VERBOSE='2'): # Scrub the environment. _scrub_import_environment(self._ORIGINATING_SYS_MODULES_KEYS, self._display_line) # Bootstrap pex. bootstrap_pex_env(pex_path) except Exception: try: set_output_glyph(FAIL_GLYPH) expand() finally: raise else: self._display_line(f'Successfully bootstrapped pex environment from {pex_path}\n') set_output_glyph(SUCCESS_GLYPH) collapse()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bootstrap():\n Bootstrap()", "def bootstrap(self):\n None", "def create_widgets(self):", "def _do_bootstrap(self, configs=None):\n pass", "def create_widgets( self ):", "def widgets(self):\r\n self.setWindowTitle(\"PyCrypt\")\r\n self.setMinimumSize(QSize(500, 500))\r\n self.setMaximumSize(QSize(500, 500))\r\n# Adding the sub def for widgets etc\r\n self.add_menus_and_status()\r\n self.add_buttons()", "def create_widgets(self):\r\n self.create_containers()\r\n self.setup_containers()\r\n self.create_panel_widgets()\r\n self.setup_scrollbar()", "def buildmainframe(self):\n self.mainframewidgets=[]\n for x in range(3):\n thislabel = Label(self.mainframe, text=str(x))\n thislabel.grid()\n self.mainframewidgets.append(thislabel)", "def main():\n LayoutsWithPanels().mainloop()", "def widgets(overwrite=True):\n install_nbextension(os.path.join(PKGPATH, 'static'),\n destination='molviz',\n overwrite=overwrite)", "def create_widget(self):\n pass", "def init_widget(self):\n self._build_config()\n self._raw_toolbar.initToolbar(self.config)", "def bootstrap(): # pragma: no cover, exercised via test_bootstrap() functional test\n pspec = PackageSpec(CFG, \"%s==%s\" % (PICKLEY, __version__))\n grand_parent = runez.parent_folder(runez.parent_folder(__file__))\n if grand_parent and grand_parent.endswith(\".whl\"):\n # We are indeed running from pex\n setup_audit_log()\n python = CFG.find_python(\"/usr/bin/python3\") # Prefer system py3, for stability\n if not python or python.problem:\n python = pspec.python\n\n LOG.debug(\"Bootstrapping pickley %s with %s (re-installing as venv instead of pex package)\" % (pspec.version, python))\n target = pspec.install_path\n venv = PythonVenv(target, python, pspec.index)\n venv.pip_install(\"wheel\")\n with runez.TempFolder():\n venv.run_python(\"-mwheel\", \"pack\", grand_parent)\n names = os.listdir(\".\")\n assert len(names) == 1\n venv.pip_install(names[0])\n\n delivery = DeliveryMethod.delivery_method_by_name(pspec.settings.delivery)\n return delivery.install(pspec, venv, {PICKLEY: \"bootstrapped\"})\n\n else:\n manifest = pspec.get_manifest()\n if not manifest:\n # We're not running from pex, but we need to re-install pickley with latest version, so it gets a manifest etc\n return perform_install(pspec, is_upgrade=False, quiet=False)", "def setup(app):\n app.add_css_file('collapsible_container.css')\n app.connect('builder-inited', patch_automodapi)", "def main():\n PanelDemo().mainloop()", "def generate_widgets(self):\n\n self.plot_canvas()\n\n # Select which m component to plot ------------------------------------\n\n mcomp_dropdown = ipw.Dropdown(options=['x', 'y', 'z', 'hls'],\n # description=r'$\\text{Plot}\\,\\,m_{i}$',\n value='z'\n )\n mcomp_dropdown.observe(lambda b: self.static_plot_component_colorbar(\n m_component=mcomp_dropdown.value))\n mcomp_dropdown_text = ipw.Label(r'$\\text{Plot}\\,\\,m_{i}$')\n mcomp_dropdown_text.width = '100px'\n\n mcomp_box = ipw.HBox(children=[mcomp_dropdown_text, mcomp_dropdown])\n\n # Button to start relaxation ------------------------------------------\n\n # We use two boxes with the maximum time and the number of steps\n run_box = ipw.FloatText()\n run_box_2 = ipw.FloatText()\n # Default values for the boxes\n run_box.value = 10\n run_box_2.value = 100\n # Change left and right margins\n run_box.layout.margin = '0px 50px'\n run_box.layout.width = '100px'\n run_box_2.layout.margin = '0px 50px'\n run_box_2.layout.width = '100px'\n\n # The button to start the relaxation of the system, using the values in\n # the boxes\n run_button = ipw.Button(description='Run sim!')\n run_button.layout.margin = '0px 50px'\n # The action requires a variable for the button (?). We just\n # impose it in a lambda function\n run_button.on_click(lambda b: self.relax(b,\n max_time=run_box.value * 1e-9,\n time_steps=run_box_2.value,\n m_component=mcomp_dropdown.value\n )\n )\n # run_button.margin = '0px 20px'\n run_button.layout.background_color = '#9c0909'\n run_button.color = 'white'\n\n run_box_text = ipw.Label(r'$\\text{Time [ns]}$')\n run_box_text.width = '50px'\n run_box_2_text = ipw.Label(r'$\\text{Steps}$')\n run_box_2_text.width = '50px'\n\n # Align everything in a horizontal box\n run_container = ipw.HBox(children=[run_box_text, run_box,\n run_box_2_text, run_box_2,\n run_button]\n )\n\n # Boxes to update the Zeeman field ------------------------------------\n\n zeeman_box_texts = [ipw.Label(r'$B_x\\,\\,\\text{[T]}$'),\n ipw.Label(r'$B_y\\,\\,\\text{[T]}$'),\n ipw.Label(r'$B_z\\,\\,\\text{[T]}$')\n ]\n for text in zeeman_box_texts:\n text.width = '50px'\n\n zeeman_box_x = ipw.FloatText()\n zeeman_box_y = ipw.FloatText()\n zeeman_box_z = ipw.FloatText()\n\n # Show the default value in the box\n zeeman_box_x.value = self.B[0]\n zeeman_box_x.layout.width = '50px'\n zeeman_box_x.layout.margin = '0px 50px'\n zeeman_box_y.value = self.B[1]\n zeeman_box_y.layout.width = '50px'\n zeeman_box_y.layout.margin = '0px 50px'\n zeeman_box_z.value = self.B[2]\n zeeman_box_z.layout.width = '50px'\n zeeman_box_z.layout.margin = '0px 50px'\n\n # Update the simulation using a button\n zeeman_button = ipw.Button(description='Update field')\n zeeman_button.on_click(\n lambda button: self.update_Zeeman_field((zeeman_box_x.value,\n zeeman_box_y.value,\n zeeman_box_z.value,\n )\n )\n )\n\n # Draw the default Field in the plot title\n self.title_text.set_text('Field: {} T'.format(\n self.sim.get_interaction('Zeeman').B0))\n self.fig.canvas.draw()\n\n zeeman_container = ipw.HBox(children=[zeeman_box_texts[0],\n zeeman_box_x,\n zeeman_box_texts[1],\n zeeman_box_y,\n zeeman_box_texts[2],\n zeeman_box_z,\n zeeman_button\n ]\n )\n\n # DMI magnitude box ---------------------------------------------------\n\n DMI_box_text = ipw.Label(r'$\\text{DMI [meV]}$')\n DMI_box_text.width = '50px'\n\n DMI_box = ipw.FloatText()\n\n # Show the default value in the box\n DMI_box.value = round(self.D / const.meV, 2)\n DMI_box.layout.width = '60px'\n DMI_box.layout.margin = '0px 50px'\n\n # Update the simulation using a button\n DMI_button = ipw.Button(description='Update')\n DMI_button.on_click(\n lambda button: self.update_DMI(DMI_box.value)\n )\n\n DMI_container = ipw.HBox(children=[DMI_box_text, DMI_box,\n DMI_button],\n )\n\n # Anisotropy -----------------------------------------------------------\n\n # anisotropy_box_text = ipw.Label(r'$\\text{Anisotropy [meV]}$')\n # anisotropy_box_text.width = '100px'\n\n # anisotropy_box = ipw.FloatText()\n\n # # Show the default value in the box\n # anisotropy_box.value = round(self.ku / const.meV, 2)\n # anisotropy_box.layout.width = '60px'\n # anisotropy_box.layout.margin = '0px 50px'\n\n # # Update the simulation using a button\n # anisotropy_button = ipw.Button(description='Update')\n # anisotropy_button.on_click(\n # lambda button: self.update_anisotropy(anisotropy_box.value)\n # )\n\n # anisotropy_container = ipw.HBox(children=[anisotropy_box_text,\n # anisotropy_box,\n # anisotropy_button]\n # )\n\n # Initial state -------------------------------------------------------\n\n # Options for the initial states. The values of the keys are the\n # initial magnetisation functions for Fidimag\n init_state_select = ipw.RadioButtons(\n options={'Skyrmion':\n lambda pos: self.skyrmion_m_field(pos, sign=1),\n '2-PI-Vortex':\n lambda pos: self.skyrmion_m_field(pos, sign=1,\n core=-1,\n pi_factor=2,\n out_skyrmion_dir=(0, 0, -1)\n ),\n '3-PI-Vortex':\n lambda pos: self.skyrmion_m_field(pos, sign=1,\n pi_factor=3,\n ),\n 'Helicoid':\n lambda pos: self.helicoid_m_field(pos),\n 'Sk-Helicoid':\n lambda pos: self.sk_helicoid_m_field(pos),\n 'Random':\n lambda pos: np.random.uniform(-1, 1, 3),\n 'Uniform':\n (0, 0, -1)\n },\n # description=r'$\\text{Initial State}$'\n )\n init_state_select.selected_label = 'Skyrmion'\n\n # We need the extra variable for the buttons action\n def update_state(b):\n self.generate_m_field(m_function=init_state_select.value)\n self.update_plot_component(mcomp_dropdown.value)\n\n # The selection changes are taken with the observe method\n init_state_select.observe(update_state)\n\n init_state_select_text = ipw.Label(r'$\\text{Initial State}$')\n init_state_select_text.width = '100px'\n\n init_state_select_box = ipw.HBox(children=[init_state_select_text,\n init_state_select]\n )\n\n # Pin boundaries button -----------------------------------------------\n\n pin_button = ipw.Button(description='Pin boundaries')\n pin_button.on_click(\n lambda button: self.pin_boundaries(plot_component=mcomp_dropdown.value)\n )\n\n unpin_button = ipw.Button(description='Unpin boundaries')\n unpin_button.on_click(\n lambda button: self.release_boundaries()\n )\n\n pin_box = ipw.HBox(children=[pin_button, unpin_button])\n pin_box.layout.margin = '20px 0px'\n\n # Display -------------------------------------------------------------\n\n display(mcomp_box)\n display(zeeman_container)\n display(DMI_container)\n # display(anisotropy_container)\n display(init_state_select_box)\n display(pin_box)\n display(run_container)\n\n # return", "def get_init_ui(self, container):\n w = self.get_frame(container)\n self.cols_configure(w)\n w.grid(row=0, column=0, sticky=tk.N+tk.W+tk.S+tk.E)\n\n return w", "def create_widget(self):\n self.widget = wxDockPane(self.parent_widget())", "def XPHideWidget(inWidget):\n pass", "def createWidgets(self):\n raise NotImplementedError", "def initWidgets(self):\r\n if self.autoExampleWidgets:\r\n self.initExampleWidgets()", "def create(self, parent):\n self.widget = wxBitmapWidget(parent)", "def bootstrap_hello():\n poolsize = [1, 2, 3, 4, 5, 6, 7, 8]\n kafka_throughput = [ dbwrapper.query_latest_throughput( \"kafka_%dprod\" % i) for i in poolsize ]\n kinesis_throughput = [ dbwrapper.query_latest_throughput(\"kinesis_%dprod\" % i) for i in poolsize ]\n\n return render_template('Bootstrap_hello.html', \n kafka_throughput=kafka_throughput,\n kinesis_throughput=kinesis_throughput)", "def getWidget(self):", "def main():\n app = guisupport.get_app_qt4()\n\n if INPROCESS:\n from qtconsole.inprocess import QtInProcessKernelManager\n km = QtInProcessKernelManager()\n else:\n from qtconsole.manager import QtKernelManager\n km = QtKernelManager()\n km.start_kernel()\n km.kernel.gui = 'qt4'\n kc = km.client()\n kc.start_channels()\n\n widget = RichJupyterWidget()\n widget.kernel_manager = km\n widget.kernel_client = kc\n if CLEANSHUTDOWN: # slow exit on CTRL+D\n def stop():\n kc.stop_channels()\n km.shutdown_kernel()\n app.exit()\n widget.exit_requested.connect(stop)\n else: # fast exit on CTRL+D\n widget.exit_requested.connect(app.quit)\n widget.show()\n guisupport.start_event_loop_qt4(app)", "def widgets(self):\n raise NotImplementedError(\"This method is not ready to be used yet\")", "def init_ui(self):\n self.parent.title(\"Roku Player Controller\")\n self.style.theme_use(\"default\")", "def widgetSetup(self):\n self.master.resizable(0, 0)\n self.master.iconbitmap('logo.ico')\n self.master.title(\"Ejercicio POO\")\n\n self.master.bind(\"<Return>\", lambda e: self.create())\n self.master.bind(\"<Delete>\", lambda e: self.delete())", "def bootstrap(element):\n element_type = element.__class__.__name__.lower()\n if element_type == 'boundfield':\n template = get_template(\"bootstrapform/field.html\")\n context = Context({'field': element})\n else:\n template = get_template(\"bootstrapform/form.html\")\n context = Context({'form': element})\n\n return mark_safe(template.render(context))", "def init_widget(self):" ]
[ "0.63333535", "0.5799136", "0.5798117", "0.57466966", "0.57229584", "0.5599561", "0.5393727", "0.5355387", "0.5324116", "0.5302776", "0.52597296", "0.5256661", "0.5252348", "0.52225083", "0.522084", "0.521721", "0.51877075", "0.5168642", "0.5132846", "0.51302344", "0.51216316", "0.5088113", "0.5071934", "0.506868", "0.50508654", "0.50493896", "0.5045291", "0.5040793", "0.5034797", "0.5029995" ]
0.61450976
1
Validates a given or stored path is a valid pants repo.
def _validate_pants_repo(self, pants_repo: pathlib.PosixPath) -> bool: return ( pants_repo and pants_repo.is_dir() and pants_repo.joinpath('pants').is_file() )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ValidateRepoPath(context, parameter, value):\n if value.startswith('/TEST/'):\n # Hackish command to allow for unit testing\n return value\n\n for name in ['BUILD.gn', '.gn', os.path.join('scripts', 'bootstrap.sh')]:\n expected_file = os.path.join(value, name)\n if not os.path.exists(expected_file):\n raise click.BadParameter(\n (\"'%s' does not look like a valid repository path: \"\n \"%s not found.\") % (value, expected_file))\n return value", "def validate(cls):\n for repository in cls.config['repositories']:\n if not os.path.isdir(repository['path']):\n LOGGER.error('Git Repo at ' + repository['path'] + ' not found')\n return False\n # Check for a repository with a local or a remote GIT_WORK_DIR\n if not os.path.isdir(os.path.join(repository['path'], '.git')) \\\n and not os.path.isdir(os.path.join(repository['path'], 'objects')):\n LOGGER.error('Directory ' + repository['path'] + ' is not a Git repository')\n return False\n return True", "def check_repo(self):\n if not os.path.exists(self.path):\n log.error(\"no dots repository found at '{}'\".format(self.path))\n if not os.path.exists(self.files_path):\n log.error(\"corrupted repository, the 'files' subfolder is missing\")\n if not os.path.exists(self.enc_files_path):\n log.error(\"corrupted repository, the 'encrypted' subfolder is missing\")\n if not os.path.exists(os.path.join(self.path, '.git')):\n log.error(\"corrupted repository, folder exists but is not versioned\")\n self.git_repo = Repo(self.path)", "def is_repo_root(path: str) -> bool:\n return os.path.isdir(os.path.join(path, \".repo\"))", "def validate_repo(parser, options):\n if not options.repo:\n return\n\n template = \"When specifying --repo, {0} is also required\"\n\n if not options.repo_os:\n parser.error(template.format(\"--repo-os\"))\n\n if not options.repo_url:\n parser.error(template.format(\"--repo-url\"))\n\n if not options.repo_id:\n parser.error(template.format(\"--repo-id\"))\n\n if not options.repo_name:\n parser.error(template.format(\"--repo-name\"))", "def _validate(self, s: str):\n if not os.path.exists(s):\n raise ValueError(('Path [\"{}\"] does not exist.').format(s))\n\n if not utils.is_valid_django_project(s):\n raise ValueError(\n ('Path [\"{}\"] does not contain a valid Django project.'\n ).format(s))", "def _validate(self, s: str):\n if not os.path.exists(s):\n raise ValueError(('Path [\"{}\"] does not exist.').format(s))\n\n if not utils.is_valid_django_project(s):\n raise ValueError(\n ('Path [\"{}\"] does not contain a valid Django project.'\n ).format(s))", "def test_path_not_repo(folder):\n\n with pytest.raises(ValueError):\n gitb.pull(folder)", "def _validate_path(self, path: str, is_file: bool) -> bool:\n is_valid_path = True\n if is_file and not os.path.isfile(path):\n is_valid_path = False\n elif not is_file and not os.path.isdir(path):\n is_valid_path = False\n if is_valid_path:\n logging.info('github_source_interceptor: Located path: ' + path)\n else:\n logging.error('github_source_interceptor: Could not locate path: ' + path)\n\n return is_valid_path", "def _check_dir(self, req, dir):\n if not os.path.isabs(dir):\n add_warning(req, _('The repository directory must be an absolute '\n 'path.'))\n return False\n prefixes = [os.path.join(self.env.path, prefix)\n for prefix in self.allowed_repository_dir_prefixes]\n if prefixes and not any(is_path_below(dir, prefix)\n for prefix in prefixes):\n add_warning(req, _('The repository directory must be located '\n 'below one of the following directories: '\n '%(dirs)s', dirs=', '.join(prefixes)))\n return False\n return True", "def is_valid_production_root(path: pathlib.Path) -> bool:\n if not path.is_absolute():\n return False\n if not path.exists():\n return False\n if not path.is_dir():\n return False\n config_file_path = get_production_config_file_path(path)\n return config_file_path.exists()", "def verify_path(path):\n if path is None:\n sys.exit('Program terminated. You must specify a correct path.')\n path = Path(path)\n assert path.exists(), f'The specified path was not found: {path}.'\n return path", "def ValidatePath(self, root_path: str) -> bool:\n if 'gold' in root_path:\n return True\n\n return False", "def path_validate(path):\n # functionality to be added later\n return path", "def check_repository(self, repo_type_key, value):\n def remove_tail(v, tail):\n if v.endswith(tail):\n v = v[:-len(tail)]\n return v\n\n for v in self.c.repositories.get(repo_type_key, ()):\n if remove_tail(v, '.git') == remove_tail(value, '.git'):\n return True\n return False", "def is_git_repo(template_repo):\n return template_repo.startswith(\"git@\") or \\\n template_repo.startswith(\"https://\")", "def is_git_repo(template_repo):\n return template_repo.startswith(\"git@\") or \\\n template_repo.startswith(\"https://\")", "def test_repo_does_not_exist(tmp_path):\n ProjectMock(tmp_path).style(\n \"\"\"\n [[\".pre-commit-config.yaml\".repos]]\n repo = \"local\"\n \"\"\"\n ).pre_commit(\n \"\"\"\n repos:\n - hooks:\n - id: whatever\n \"\"\"\n ).api_check_then_fix(\n Fuss(False, PRE_COMMIT_CONFIG_YAML, 333, \": repo 'local' does not exist under 'repos'\")\n )", "def test_path(tmp_path: Path) -> None:\n path = tmp_path / \"repository\"\n repository = Repository.init(path)\n assert path == repository.path", "def _check_repository_directory(self):\n if not os.path.exists(self._repository_path):\n os.makedirs(self._repository_path)", "def ValidatePath(self, root_path: str) -> bool:\n if 'silver' in root_path:\n return True\n\n return False", "def test_add_invalid_svn_repo(self):\n pass", "def test_unknownRepository(self):\n self.assertRaises(NotWorkingDirectory, getRepositoryCommand, self.repos)", "def _check_repository(self):\n if not os.path.exists(\"%s/.git\" % self._repository_path):\n Repo.clone_from(self.REPOSITORY_ADDRESS, self._repository_path)\n\n self._repo = Repo(self._repository_path)\n self._pull()", "def test_github_path_purepath():\n p = github_api.GithubPath('/tensorflow/datasets/tree/master/')\n sub_p = p / 'some_folder'\n assert isinstance(sub_p, github_api.GithubPath)\n assert str(p) == '/tensorflow/datasets/tree/master'\n assert p == github_api.GithubPath.from_repo('tensorflow/datasets')", "def test_no_repo_url(self):\n data = self._get_form_data(repo_url='')\n form = self._get_form(data=data)\n self.assertTrue(self._validate_form(form), form.errors)", "def validpath(self, path):\n root = self.realpath(self.root)\n path = self.realpath(path)\n if not self.root.endswith(os.sep):\n root = self.root + os.sep\n if not path.endswith(os.sep):\n path = path + os.sep\n if path[0:len(root)] == root:\n return True\n return False", "def check_valid_path(path):\n\n path = os.path.normpath(path)\n if not os.path.exists(path):\n print(f\"{path} doesn't exist\")\n print('Code execution exit')\n sys.exit()", "def test_no_repo_url(self):\n data = self._get_form_data(repo_url='')\n form = self._get_form(data=data)\n self.assertTrue(form.is_valid(), form.errors)", "def test_repository(self):\n path = Template().get_repository()\n self.assertTrue(os.path.exists(path))" ]
[ "0.6693658", "0.64390516", "0.6414603", "0.6258947", "0.62280005", "0.6122691", "0.6122691", "0.61224514", "0.60444325", "0.5846379", "0.5826569", "0.58253604", "0.5780301", "0.57039803", "0.57003903", "0.5675232", "0.5675232", "0.5665029", "0.5649705", "0.56372535", "0.5635165", "0.5598097", "0.5576563", "0.555578", "0.5546127", "0.5496154", "0.5481367", "0.5471405", "0.5454932", "0.54524803" ]
0.77448916
0
Updates the important game information for each round of play. In this case, that means the puzzle is revealed and the jumper is cut if necessary.
def _do_updates(self): is_right = self._puzzle.is_guess_right() if is_right: self._puzzle.reveal_puzzle() else: self._jumper.cut_line()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def game_updated(self):\n # <<-- Creer-Merge: game-updated -->> - Code you add between this comment and the end comment will be preserved between Creer re-runs.\n game = self.game\n\n for x in range(game.board_width):\n for y in range(game.board_height):\n self.checkers_map[x][y] = None\n\n self.force_checker = None\n self.cant_move = False\n for checker in game.checkers:\n self.checkers_map[checker.x][checker.y] = checker\n if checker.owner is self.player and checker is game.checker_moved:\n if game.checker_moved_jumped:\n self.force_checker = checker\n else:\n self.cant_move = True\n # <<-- /Creer-Merge: game-updated -->>", "def game_updated(self):\n\n # replace with your game updated logic\n self.update_board()", "def update_game(game, episode, buttons,run_status):\n\n game_round = game.round\n if game_round == 'newround':\n newround(game)\n print(game_round)\n game.round = 'preflop'\n print(game.round)\n\n\n return 'go'\n elif game_round == 'preflop':\n check = preflop(game, episode, buttons)\n if check == True:\n game.round = 'flop'\n return 'go'\n elif check == False:\n game.round = 'showdown'\n print(game.round)\n return 'go'\n elif check == 'no input':\n game.round = 'preflop'\n return 'stop'\n return 'stop'\n elif game_round == 'flop':\n check = flop(game, episode, buttons, run_status)\n if check == True:\n game.round = 'turn'\n return 'go'\n elif check == False:\n game.round = 'showdown'\n return 'go'\n elif check == 'no input':\n game.round = 'flop'\n return 'stop'\n return 'stop'\n pass\n elif game_round == 'turn':\n check = turn(game, episode, buttons, run_status)\n if check == True:\n game.round = 'river'\n return 'go'\n elif check == False:\n game.round = 'showdown'\n return 'go'\n elif check == 'no input':\n game.round = 'turn'\n return 'stop'\n return 'stop'\n elif game_round == 'river':\n check = turn(game, episode, buttons, run_status)\n if check == True:\n game.round = 'showdown'\n return 'go'\n elif check == False:\n game.round = 'showdown'\n return 'go'\n elif check == 'no input':\n game.round = 'river'\n return 'stop'\n return 'stop'\n elif game_round == 'showdown':\n showdown(game, episode)\n #game.player1.wager = 100\n #game.player2.wager = 50\n # game.update_tablepot()\n game.round = 'newround'\n return 'go'\n pass", "def update(self, game: hlt.Game):\n self.game_map = game.game_map\n self.player = game.me\n self.ships = game.me.get_ships()\n self.calc_total_halite()\n self.command_queue = []\n self.commands = {}\n self.commands_queue = {}\n self._initialize_ship_states()\n self._reset_current_moves()\n self.richest_clusters()\n self.check_endgame()", "def update_state_game_variables(self):\n self.model.numbers = [0, 1, 2, 3, 4, 5, 6, 7, 8]\n self.model.player_mark = \"\"\n self.model.player_move = 0\n self.model.boards = [\"board\"] * 9\n self.update_score_board()", "def play_one_round(self):\r\n new_solutions = self.breeder.breed(self.solutions)\r\n self.solutions.clear()\r\n self.solutions.extend(new_solutions)\r\n self.mutation_maker.mutate(self.solutions)\r\n self.round += 1\r\n self.simulation_stats.add_stats(self.round, self.solutions)\r\n if self.simulation_viz is SimulationViz.FRONT:\r\n self.report_progress()", "def update(self):\n if(self.game_on is True and\n (self.player_turn is True or self.computer_turn is True)):\n self.board.display()\n if(self.board.tiles.count == self.SPOT * self.SPOT or\n self.no_more_move()):\n self.player_turn = False\n self.computer_turn = False\n if(self.no_more_move()):\n print(\"No legal move for both!\")\n print(\"Game Over!\")\n if(len(self.board.tiles.blacks) ==\n len(self.board.tiles.whites)):\n self.tie = True\n elif(len(self.board.tiles.blacks) >\n len(self.board.tiles.whites)):\n self.black_wins = True\n elif(len(self.board.tiles.whites) >\n len(self.board.tiles.blacks)):\n self.white_wins = True\n self.winner(len(self.board.tiles.blacks),\n len(self.board.tiles.whites))\n elif(self.game_on is False):\n return\n elif(self.player_turn is False and self.computer_turn is False):\n self.make_record()", "def update(self):\n # If the game is not over.\n if self.tictactoe:\n # If the player is human, the choice can only be detected by the game,\n # because the player has no responsibility over the window\n if self.player.chooser:\n choice = self.choice\n self.choice = None\n else:\n choice = self.player.play(self.tictactoe)\n self.tictactoe.choose(choice)\n else:\n if self.on:\n self.end_time = time.time()\n self.on = False\n if self.restart_at_end:\n if time.time() - self.end_time > self.end_duration:\n self.restart()", "def play(self):\r\n\r\n gh.report_legend()\r\n\r\n for ship in self.__ships:\r\n ship.set_orientation()\r\n\r\n ship_coords = [ship.coordinates() for ship in self.__ships]\r\n ship_coords = [i for lst in ship_coords for i in lst]\r\n\r\n print(gh.board_to_string(self.__board_size, [], {}, [], ship_coords))\r\n\r\n while self.__ships:\r\n self.__play_one_round()\r\n\r\n gh.report_gameover()", "def restart(self):\n self.main_grid_values = [\n [0] * self.TILES_PER_ROW for _ in range(self.TILES_PER_ROW)\n ]\n\n self.score_value.set('0')\n self.add_two()\n self.add_two()\n self.update_grid()\n\n self.bind('<{}>'.format(self.controller.slide_left_control), self.push_left)\n self.bind('<{}>'.format(self.controller.slide_right_control), self.push_right)\n self.bind('<{}>'.format(self.controller.slide_up_control), self.push_up)\n self.bind('<{}>'.format(self.controller.slide_down_control), self.push_down)\n\n self.game_over_button.destroy()", "def game_main():\n # global variables that will be used in other functions\n global GAME_CLOCK, RENDER_WINDOW, GAME_PUZZLE, BOARD, MOVE_COUNT_BOX, MOVE_COUNT, PUZZLE_COPY, RESET_BTN, CHECK_BTN, NEW_BTN, K_VAL, SOLVED, RESULT, RND_TOG,N_MODE, R_BTN, N_BTN\n \n #Quickly Solvable Games\n #These all are solvable in less than 15 moves\n #I used these to keep the processing time lower\n quick_games = [[[4,1,3],[None, 2, 5], [7, 8, 6]],\n [[4,1,3],[2, None, 5], [7, 8, 6]],\n [[4,1,3],[2, 8, 5], [7, None, 6]],\n [[4,1,None],[2, 8, 3], [7, 6, 5]]]\n\n random_mode = False # toggle random mode\n \n GAME_CLOCK = pygame.time.Clock() # clock will assist with screen updates\n\n RENDER_WINDOW = pygame.display.set_mode((WIN_WIDTH, WIN_HEIGHT)) # set render window function \n\n puzzle_select = random.randint(0, 3) # generate a random number between 0 and 3\n \n GAME_PUZZLE = generate_new_puzzle() # generate new puzzle for the game \n\n # set toggle mode\n if random_mode is True:\n RND_TOG = 'X'\n N_MODE = ''\n else:\n RND_TOG = ''\n N_MODE = 'X'\n GAME_PUZZLE.puzzle = quick_games[random.randint(0, 3)] # pick a quick solve puzzle \n\n PUZZLE_COPY = copy.deepcopy(GAME_PUZZLE) # make a copy of the puzzle for resetting\n\n K_VAL = '' # set k value text to nothing\n\n SOLVED = '' # set solved text to nothing\n\n MOVE_COUNT = '0' # initialize move count\n\n run_game = True # establish case for game loop\n\n # MAIN GAME LOOP\n while run_game: \n \n\n # Draw Game Screen and GUI\n # ============\n draw_game() \n\n # Main Event Handler Loop\n # =======================\n for event in pygame.event.get(): # check for user interaction\n\n # check if user is exiting game\n if event.type == pygame.QUIT:\n pygame.quit() # deactivate Pygame Libraries (undoes init())\n sys.exit() # terminate program\n\n # Mouse click even listener\n if event.type == MOUSEBUTTONDOWN:\n\n position = pygame.mouse.get_pos() # mouse position\n tile_index = tile_clicked(position) # gets tile index if clicked\n \n # NUMBER TILE CLICKED\n if tile_index:\n \n # get blank position\n blank_position = GAME_PUZZLE.get_blank_pos() \n\n # if the tile clicked was not the blank tile\n if tile_index != blank_position:\n move_direction = get_move_type(tile_index, blank_position) # determine move direction\n\n GAME_PUZZLE.make_move(move_direction) # make move\n MOVE_COUNT = str(int(MOVE_COUNT) + 1)\n draw_puzzle() # render update\n \n # RESET BUTTON CLICKED\n if RESET_BTN.collidepoint(position):\n\n # Reset Puzzle\n GAME_PUZZLE = copy.deepcopy(PUZZLE_COPY)\n\n # Reset Game Values\n MOVE_COUNT = '0'\n SOLVED = ''\n K_VAL = ''\n\n # Render Update \n draw_puzzle() \n\n # NEW GAME BUTTON CLICKED\n if NEW_BTN.collidepoint(position):\n\n if random_mode is True:\n # Generate NEW\n GAME_PUZZLE = generate_new_puzzle()\n else:\n # pick a quick solve puzzle\n GAME_PUZZLE.puzzle = quick_games[random.randint(0, 3)] \n\n # make a copy of the puzzle for resetting\n PUZZLE_COPY = copy.deepcopy(GAME_PUZZLE)\n\n # Reset Game Values\n MOVE_COUNT = '0'\n SOLVED = ''\n K_VAL = ''\n\n # Render Update \n draw_puzzle() \n \n # CHECK BUTTON WAS CLICKED\n if CHECK_BTN.collidepoint(position):\n \n result = None # holds the result of the outcome\n moves = 0\n\n # check for a k - value\n if K_VAL != '':\n k = int(K_VAL) # transform to integer\n\n outcome = vpuz.build_move_tree(GAME_PUZZLE, k) # determine if solvable in k moves\n \n if outcome[0] is True: # Game Was Solved \n MOVE_COUNT= str(outcome[3].generation) # set number of moves\n SOLVED = ','.join(vpuz.get_solving_moves(outcome[3])) # join returned list into comma separated string\n result = 'Solvable! Winning Moves: ' + SOLVED\n SOLVED = result\n elif outcome[1] is True:\n SOLVED = 'Unsolvable in ' + K_VAL + ' moves...' # not solvable in k moves\n \n # Random mode was enabled\n if R_BTN.collidepoint(position):\n if random_mode is True:\n RND_TOG = ''\n N_MODE = 'X'\n random_mode = False\n else:\n RND_TOG = 'X'\n N_MODE = ''\n random_mode = True\n\n # Normal mode was enabled\n if N_BTN.collidepoint(position):\n if random_mode is True:\n RND_TOG = ''\n N_MODE = 'X'\n random_mode = False\n else:\n RND_TOG = 'X'\n N_MODE = ''\n random_mode = True\n \n \n # Key Pressed Event Listener\n if event.type == pygame.KEYDOWN:\n\n #backspace\n if event.key == pygame.K_BACKSPACE:\n K_VAL = K_VAL[:-1] # subtract one character from end\n elif event.key == pygame.K_DELETE:\n K_VAL = '' # delete number \n else:\n K_VAL += event.unicode # otherwise enter number\n\n\n pygame.display.set_caption(\"Eight Puzzle: By Joseph Polaski\")\n pygame.display.flip()\n GAME_CLOCK.tick(30) # limit to 30 Frames per second", "def reset_game(self):\n self.ships_left = self.settings.ship_limit\n self.fleets_left = self.settings.fleet_waves\n self.target_miss = self.settings.target_max_miss\n self.reset_level()", "def naked_round(self):\n self.change = False\n for row in range(self.board_size):\n for col in range(self.board_size):\n if len(self.possibles[row][col]) == 1:\n num = self.possibles[row][col].pop()\n self.set_number(num, row, col, \"NS\")", "def update(self):\n\n # Update guess tracker\n for i in range(atoms):\n\n ident = 'tracker' + str(i + 1)\n\n if i < len(game.guesslist):\n color = scheme.red\n else:\n color = scheme.white\n\n self.ids[ident].color = color\n\n # Update score\n self.ids.score.text = str(game.score)\n\n # Check for end game conditions! Make button (in)visible.\n if len(game.guesslist) == atoms:\n self.ids.end_button.disabled = False\n self.ids.end_button.opacity = 1\n else:\n self.ids.end_button.disabled = True\n self.ids.end_button.opacity = 0", "def update_cells(self):\n mineboard = self.mineboard\n gameboard = mineboard.gameboard\n for change in mineboard.changes:\n i, j = change[0], change[1]\n text_val = gameboard[i][j]\n\n if text_val == 'M':\n self.canvas.delete(self.cells[i][j])\n self.cells[i][j] = self.canvas.create_image(\n 2+j*CELLWIDTH, 2+i*CELLWIDTH, image=EXPLODED, anchor='nw')\n self.reveal_mines(i, j)\n\n elif text_val == 'F':\n self.canvas.delete(self.cells[i][j])\n self.cells[i][j] = self.canvas.create_image(\n 2+j*CELLWIDTH, 2+i*CELLWIDTH, image=FLAG, anchor='nw')\n\n elif text_val == ' ':\n self.canvas.delete(self.cells[i][j])\n self.cells[i][j] = self.canvas.create_rectangle(\n 2+j*CELLWIDTH, 2+i*CELLWIDTH, (j+1)*CELLWIDTH, (i+1)*CELLWIDTH, fill=DEFAULT_COLOR, outline=\"\")\n\n elif text_val in ['0', '1', '2', '3', '4', '5', '6', '7', '8']:\n self.canvas.itemconfig(\n self.cells[i][j], fill=COLORS[int(text_val)])\n if text_val != '0':\n # offset here is by 12 pixels\n self.canvas.create_text(\n 2+j*CELLWIDTH+(CELLWIDTH-1)//2, 2+i*CELLWIDTH+(CELLWIDTH-1)//2, anchor='center', text=f\"{text_val}\")\n\n mineboard.changes = [] # removes previous changes\n if mineboard.gamestate is not None:\n # if the game has ended displays game end message and buttons\n self.win_lose_lbl.grid(row=3, column=0, columnspan=4)\n self.win_lose_msg.set(\n f\"You {self.mineboard.gamestate}! Play again?\")\n self.same_again_bttn.grid(row=4, column=0, columnspan=2)\n self.play_again_bttn.grid(row=4, column=2, columnspan=2)", "def play(self):\n print(\"Board size: {}x{} with {} games using pieces: {}\".format(self.size[0], self.size[1], self.num_games, self.pieces))\n print(\"Player 1 using layout '{}' and play strategy '{}'\".format(self.layouts[0], self.plays[0]))\n print(\"Player 2 using layout '{}' and play strategy '{}'\".format(self.layouts[1], self.plays[1]))\n print(\"Running...\")\n self.start_time = time.time()\n\n for game in range(self.num_games):\n if self.verbose: print(\"Playing game {}:\".format(game))\n players = (Player(\"Player 1\", self.size[0], self.size[1], self.pieces, self.layouts[0], self.plays[0], self.verbose),\n Player(\"Player 2\", self.size[0], self.size[1], self.pieces, self.layouts[1], self.plays[1], self.verbose))\n\n finished = False\n game_round = 0\n\n while not finished:\n game_round += 1\n for i in range(2):\n player = players[i]\n opponent = players[0] if i == 1 else players[1]\n\n attack_pos = player.get_next_attack()\n player.set_attack_result(attack_pos, *opponent.is_hit(attack_pos))\n\n if opponent.is_player_dead() is True:\n self.wins[i] += 1\n self.tries[i] += game_round\n finished = True\n if self.verbose: print(\"Player {} won the game on round {}\\n\".format(i+1, game_round))\n break", "def update(self):\n for pl, result in zip(self._players, self.golf_round.doc.results):\n for score in result.scores:\n n = score.num-1\n # update net \n pl.dct_net['holes'][n] = score.gross - pl._bumps[n]\n pl.update_totals(pl.dct_net)", "def update(self, is_my_turn, clue_word, clue_num_guesses, guesses):\r\n pass", "def set_pieces(self):\n\n for i in range(len(self._game_board)):\n\n # Row 1\n if i == 0:\n for ii in range(len(self._game_board[i])):\n if ii == 0 or ii == 8:\n self._game_board[i][ii] = Chariot(\"black\", \"BCHA\")\n self._game_board[i][ii].update_location([i, ii])\n if ii == 1 or ii == 7:\n self._game_board[i][ii] = Horse(\"black\", \" BH \")\n self._game_board[i][ii].update_location([i, ii])\n if ii == 2 or ii == 6:\n self._game_board[i][ii] = Elephant(\"black\", \" BE \")\n self._game_board[i][ii].update_location([i, ii])\n if ii == 3 or ii == 5:\n self._game_board[i][ii] = Advisor(\"black\", \" BA \")\n self._game_board[i][ii].update_location([i, ii])\n if ii == 4:\n self._game_board[i][ii] = General(\"black\", \" BG \")\n self._game_board[i][ii].update_location([i, ii])\n\n # Row 3\n if i == 2:\n for ii in range(len(self._game_board[i])):\n if ii == 1 or ii == 7:\n self._game_board[i][ii] = Cannon(\"black\", \"BCAN\")\n self._game_board[i][ii].update_location([i, ii])\n\n # Row 4\n if i == 3:\n for ii in range(len(self._game_board[i])):\n if ii % 2 == 0:\n self._game_board[i][ii] = Soldier(\"black\", \"BSOL\")\n self._game_board[i][ii].update_location([i, ii])\n\n # Row 7\n if i == 6:\n for ii in range(len(self._game_board[i])):\n if ii % 2 == 0:\n self._game_board[i][ii] = Soldier(\"red\", \"RSOL\")\n self._game_board[i][ii].update_location([i, ii])\n\n # Row 8\n if i == 7:\n for ii in range(len(self._game_board[i])):\n if ii == 1 or ii == 7:\n self._game_board[i][ii] = Cannon(\"red\", \"RCAN\")\n self._game_board[i][ii].update_location([i, ii])\n\n # Row 10\n if i == 9:\n for ii in range(len(self._game_board[i])):\n if ii == 0 or ii == 8:\n self._game_board[i][ii] = Chariot(\"red\", \"RCHA\")\n self._game_board[i][ii].update_location([i, ii])\n if ii == 1 or ii == 7:\n self._game_board[i][ii] = Horse(\"red\", \" RH \")\n self._game_board[i][ii].update_location([i, ii])\n if ii == 2 or ii == 6:\n self._game_board[i][ii] = Elephant(\"red\", \" RE \")\n self._game_board[i][ii].update_location([i, ii])\n if ii == 3 or ii == 5:\n self._game_board[i][ii] = Advisor(\"red\", \" RA \")\n self._game_board[i][ii].update_location([i, ii])\n if ii == 4:\n self._game_board[i][ii] = General(\"red\", \" RG \")\n self._game_board[i][ii].update_location([i, ii])", "def _update_score(self) -> None:\n\n # setting new score by iterating over players\n self.score_play[self.n_play_turns, ] = [\n self._score_table[(\n self.contract.level,\n self.contract.suit,\n self.tricks[i],\n self.contract.player_vulnerability[i],\n int(self.contract.double + self.contract.redouble)\n )]\n for i in range(NUM_PLAYERS)\n ]", "def new_game(self):\n self.strikes = 0\n self.change_image()\n self.word = choice(self.WORD_LIST).upper()\n self.word_blank.set(\" _ \" * len(self.word))\n self.word_underscored = [\"_\"] * len(self.word)\n self.guessed = \"GUESSES: \"\n self.guesses.set(self.guessed)", "def run_game(self):\n\t\twhile True:\n\t\t\tself._check_events()\n\t\t\t\n\t\t\tif self.stats.game_active:\n\t\t\t\tself.pigeon.update()\n\t\t\t\tself._update_droppings()\n\t\t\t\tself._update_autos()\n\n\t\t\tself._update_screen()", "def restart_game(self):\n self.board = Board(None)\n self.view.reiniciar_jogo(self.board)\n\n self.white_player = None\n self.black_player = None\n self.atual_player = None\n self.finish_game = 0", "def updateWindow(gameWindow, figurelist, rounds):\n for item in gameWindow.items[:]:\n item.undraw() #Remove al elements in game window\n\n gameWindow.update() #update to ensure al is gone\n\n for i in range(15): #For loops to draw new figures\n for k in range(25):\n todraw = figurelist[i][k]\n todraw.draw(gameWindow)\n \n scoreText = str(\"Pick number:\" + str(rounds)) #Show how many times a color has been picked(Shows 1 on first round)\n toPrint = g.Text(g.Point(100, 775), scoreText) #Create text to print\n\n toPrint.draw(gameWindow) #Draws text with rounds", "def phase_7(self):\n\n test_board_1 = board(5, 5, snake_init_coordinates = [4, 2], fruit_init_coordinates = [0, 2])\n render = Render_engine('terminal', test_board_1)\n print(\"Before grow\")\n print(\"*******************************\")\n render.render_terminal(test_board_1)\n print(\"Now, Snake's length is {}\".format(len(test_board_1.Snake)))\n\n print(\"\\n\\nafter grow once\")\n print(\"*******************************\")\n test_board_1.Snake_grow(\"right\")\n test_board_1.Update_board()\n render.render_terminal(test_board_1)\n print(\"Now, Snake's length is {}\".format(len(test_board_1.Snake)))\n\n print(\"\\n\\nafter grow twice\")\n print(\"*******************************\")\n test_board_1.Snake_grow(\"right\")\n test_board_1.Update_board()\n render.render_terminal(test_board_1)\n print(\"Now, Snake's length is {}\".format(len(test_board_1.Snake)))\n\n print(\"\\n\\nafter grow three times\")\n print(\"*******************************\")\n test_board_1.Snake_grow(\"right\")\n test_board_1.Update_board()\n render.render_terminal(test_board_1)\n print(\"Now, Snake's length is {}\".format(len(test_board_1.Snake)))", "def update(self):\n self.player.eaten_cheese = False\n # Checa se o jogador ou agente chegaram no objetivo\n if self.grid[self.player.x][self.player.y] == 2:\n self.player.score += self.player.reward_amount\n self.done = True\n\n # Checa se o jogador ou agente comeram o queijo\n elif self.grid[self.player.x][self.player.y] == 4:\n self.player.score += 0.2\n self.player.eaten_cheese = True\n self.clear_position(self.player.x, self.player.y)\n\n # Popule a atual posicao do jogador com 1 e a do agente com 10\n if self.player.name == \"Player\":\n self.grid[self.player.x][self.player.y] = 1\n elif self.player.name == \"Agent\":\n self.grid[self.player.x][self.player.y] = 10", "def update(self, player_index=0, num_players=1, visible_scards = []):\n\n self.visible_scards = visible_scards\n self.controller._state.player_index = player_index\n if self.num_players > num_players and self.controller._state.rules.Shared_Board \\\n and not self.need_updated_buttons:\n # A player has left the game after the round has begun -- make adjustments so game can continue.\n self.playerLeftGame(num_players)\n self.num_players = num_players\n if self.controller._state.round == -1:\n self.mesgBetweenRounds(self.help_text)\n if self.round_advance:\n self.round_index = self.round_index + 1\n if self.round_index < len(self.Meld_Threshold):\n self.help_text[0] = 'This is the round of ' + str(self.Meld_Threshold[self.round_index]) + ' ! '\n self.need_updated_buttons = True # used for Liverpool.\n else:\n self.help_text = ['Game has concluded. Scores for each round can be found in command window.']\n self.round_advance = False\n else:\n if not self.round_index == self.controller._state.round:\n # Need this to true up round_index if a player joins mid-game.\n skipped_rounds = self.controller._state.round - self.round_index\n for idx in range(skipped_rounds):\n #todo: How to score latecomers should be moved to ruleset.\n score = 0\n self.controller.lateJoinScores(score)\n self.round_index = self.controller._state.round\n self.round_advance = True\n # reset outline colors on ready buttons to what they need to be at the start of the \"between rounds\" state.\n self.ready_color_idx = 2\n self.not_ready_color_idx = 6\n self.last_hand = self.current_hand\n self.current_hand = self.controller.getHand()\n if len(self.current_hand) == 0:\n self.hand_info = []\n elif not self.last_hand == self.current_hand:\n self.hand_info = HandManagement.WrapHand(self, self.current_hand, self.hand_info)\n HandManagement.ShowHolding(self, self.hand_info) # displays hand\n self.RuleSetsButtons.ButtonDisplay(self)", "def restart_game(self):\n for i in range(1, 4, 1):\n self.ids['door' + str(i)].source = \\\n 'door_closed.jpg'\n self.ids['button' + str(i)].disabled = False\n setattr(self, 'door'+str(i)+'_counter', 0)\n self.ids['score'].text = 'SCORE: 0'\n self.score = 0\n MainMenu.door_assignment()", "def play_best_guess(self, game):\n\n\n # create a list of cells\n cells = [game.board[i][j]\n for i in xrange(game.rows)\n for j in xrange(game.cols)]\n\n first_cell = cells[0]\n game.reveal_cell(first_cell.row, first_cell.col)\n\n # draw updated board and pause for a second\n game.draw_board()\n if PAUSE == True:\n time.sleep(1)\n\n\n total_flagged = 0\n while not game.lost_game and not game.won_game:\n\n # remember if we've made a move in the while loop\n # so we know whether to make a random move later on\n made_move = False\n\n # look through all revealed cells for any with a number of neighboring mines.\n # if the cell has the same number of unrevealed neighbors as the cell's\n # number of neighboring mines, all the unrevealed neighbors must be mines.\n revealed_numbered_cells = [c for c in cells if c.revealed and (not c.flagged) and (c.neighbors > 0)]\n while revealed_numbered_cells:\n cell = revealed_numbered_cells.pop()\n # cell may have been marked flagged after revealed_numbered_cells was compiled\n if not cell.flagged:\n neighbor_cells = ms.Minesweeper.get_neighbors(cell.row, cell.col, game.board)\n flagged_neighbors = [n for n in neighbor_cells if n.flagged]\n number_remaining_mines = cell.neighbors - len(flagged_neighbors)\n unknown_neighbors = [n for n in neighbor_cells if not n.flagged and not n.revealed]\n if number_remaining_mines > 0 and len(unknown_neighbors) == number_remaining_mines:\n # flag every neighbor\n for c in unknown_neighbors:\n if total_flagged < game.mines:\n total_flagged += 1\n game.flag_cell(c.row, c.col)\n if (game.test_did_win()):\n game.game_over()\n game.draw_board()\n if PAUSE == True:\n time.sleep(1)\n made_move = True\n\n # we may have won with the flag above so test whether we're still playing\n # before further calculations\n if not game.lost_game and not game.won_game:\n # loop through all unrevealed, unflagged cells and see if we know it's safe to reveal\n for c in cells:\n if not c.revealed and not c.flagged and self.is_cell_safe(c, game.board):\n game.reveal_cell(c.row, c.col)\n if (game.test_did_win()):\n game.game_over()\n game.draw_board()\n if PAUSE == True:\n time.sleep(1)\n made_move = True\n\n # assume we've made our best guesses and now have to guess randomly\n # this will prevent us from looping forever if no obvious moves are available\n if not made_move:\n unrevealed = [c for c in cells if not c.revealed and not c.flagged]\n if len(unrevealed) > 0:\n cell = random.choice(unrevealed)\n game.reveal_cell(cell.row, cell.col)\n if (game.test_did_win()):\n game.game_over()\n game.draw_board()\n if PAUSE == True:\n time.sleep(3)", "def update_screen(self, ai_game):\r\n self.surface.fill(self.settings.bg_color)\r\n self.ship.blitme()\r\n for bullet in self.ship_bullets.sprites():\r\n bullet.draw_bullet()\r\n for bullet in self.alien_bullets.sprites():\r\n bullet.draw_bullet()\r\n self.aliens.draw(self.surface)\r\n self.explosions.draw(self.surface)\r\n\r\n # Draw the score information.\r\n self.sb.show_score()\r\n\r\n # Draw the difficulty buttons if the game is inactive.\r\n if not self.stats.game_active:\r\n for button in self.buttons:\r\n button.draw_button()\r\n\r\n # Draw the game over message if appropriate\r\n if self.stats.game_over:\r\n self.surface.blit(self.game_over_text, self.game_over_text_rect)\r\n\r\n # Make the most recently drawn screen visible.\r\n self.screen.blit(self.surface, (0, 0))\r\n pg.display.flip()" ]
[ "0.6821897", "0.65869373", "0.6301358", "0.6244406", "0.6205162", "0.61089224", "0.60870314", "0.6009552", "0.59923613", "0.59637344", "0.5959768", "0.59509295", "0.5935243", "0.59325224", "0.59208655", "0.5909505", "0.59034586", "0.5883476", "0.5867642", "0.58632934", "0.58602417", "0.58553267", "0.58518803", "0.58466476", "0.58315295", "0.58216596", "0.5807074", "0.5801961", "0.5789401", "0.5768723" ]
0.69500154
0
Outputs the important game information for each round of play. In this case, that means the hider provides a hint.
def _do_outputs(self): self._puzzle.display_revealed_puzzle() hint = self._puzzle.get_hint() self._console.write(hint) print("") self._jumper.draw_jumper() print("") # These ifs end the game if self._puzzle.is_solved(): self._keep_playing = False self._puzzle.display_win_screen() if self._puzzle.incorrect_guesses >= 4: self._keep_playing = False self._puzzle.display_loss_screen()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def info():\n print(\"Made using the OOP RPG game creator (c) Claire.\\n\")", "def player_tie(self):\r\n\r\n self.summary = (\" \"* 78) + \"TIE. TRY AGAIN\"\r\n print(\"Match ends in a draw.\\n\")", "def print_statistics(self):\n print 'Ran %s iterations in %0.3f seconds\\n' % (\n self.iterations, self.elapsed_time)\n\n print 'Overall Equity'\n for index in range(len(self.holdem_ranges)):\n range_short_form = '%r' % self.holdem_ranges[index]\n print 'P%s) %-15s %0.3f' % (\n index,\n range_short_form,\n float(self.win_stats.get(index, 0))/self.iterations)\n print '\\n'\n print 'Hand distribution for each player'\n for stats in self.player_stats:\n stats.print_report()", "def display_stats(self):\n print(\"Simulation took: {:.2f} seconds to execute\".format(time.time() - self.start_time))\n for i, win in enumerate(self.wins):\n average = 0\n if win:\n average = float(self.tries[i]) / win\n print(\"Player {} wins: {} with (average number of rounds: {:.2f})\".format(i+1, win, average))", "def display_hall_of_fame(self) -> None:\n print(\"Hall of fame\")\n for env, dico in self.score_dic.items():\n print(\"Environment :\", env)\n for team, score in sorted(dico.items()):\n print(\"team: \", team, \"mean: \", score[0], \"std: \", score[1])", "def print_game_over():\n print()\n print(\" _____ __ __ ______ ______ ________ _____ \")\n print(r\" / ____| /\\ | \\/ | ____| / __ \\ \\ / / ____| __ \\ \")\n print(r\" | | __ / \\ | \\ / | |__ | | | \\ \\ / /| |__ | |__) |\")\n print(r\" | | |_ | / /\\ \\ | |\\/| | __| | | | |\\ \\/ / | __| | _ / \")\n print(r\" | |__| |/ ____ \\| | | | |____ | |__| | \\ / | |____| | \\ \\ \")\n print(r\" \\_____/_/ \\_\\_| |_|______| \\____/ \\/ |______|_| \\_\\\\\")\n print()", "def print_start_game():\n print(HANGMAN_ASCII_ART)\n print(MAX_TRIES)", "def displayGame(self):\n # row1 & row2 longer, row3 & row4 shorter, proper indented below\n print 'current table:'\n for key in ['row1','row2']:\n rowLs = self.table[key]\n string = ''\n for ele in rowLs:\n tmpStr = str(ele) + '\\t'\n string += tmpStr\n print string\n for key in ['row3','row4']:\n string = '\\t'\n rowLs = self.table[key]\n for ele in rowLs:\n tmpStr = str(ele) + '\\t'\n string += tmpStr\n print string \n print 'discardList:'\n print self.discardLs[0],'\\t',self.discardLs[1],'\\n',self.discardLs[2],'\\t',self.discardLs[3]", "def display_get_game():\n title = input(\"Please give me a title searched game: \")\n info_about_game = reports.get_game(filename, title)\n print(\"Properties of the game: {}\\n\".format(info_about_game))", "def print_people_strategies():\n\t\tfor person in sorted(Simulation.community):\n\t\t\tSimulation.community[person].print_info()\n\t\tPerson.person_progression.write(\"--------------- END OF WEEK ---------------\" + \"\\n\")", "def show_results(self):\r\n\r\n if self.player_cards > self.computer_cards: # player wins\r\n print('\\nCongratulations!!')\r\n print('You WIN by {0} / {1}'.format(self.player_cards, self.computer_cards))\r\n elif self.player_cards < self.computer_cards: # computer wins\r\n print('\\nToo bad!!')\r\n print('You LOST by {0} / {1}'.format(self.player_cards, self.computer_cards))\r\n else: # tied\r\n print('You TIED by {0} / {1}'.format(self.player_cards, self.computer_cards))", "def print_status(self):\r\n\t\tif VERBOSE:\r\n\r\n\t\t\tprint( 'Player : ')\r\n\t\t\tfor h in self.hands:\r\n\t\t\t\tprint('\\t' + str(h))\r\n\t\t\tprint( 'Dealer:\\n\\t' + str(self.dealer))\r\n\t\t\tprint( '-----------------------')", "def pretty_print_game (self, game_list):\n hand_tally = 0\n for game in game_list:\n hand_tally += int (game[2])\n\n game_result = \"won\" if hand_tally > 0 else (\"lost\" if hand_tally < 0 else \"tied\")\n\n print (\"I \" + game_result + \"!\")\n for game in game_list:\n print (game)\n print (\"-----------------------------------------------------------------\")", "def print_scoreboard(self):\n output = ''\n # parallel dictionaries with innings and scores\n innings = []\n away = []\n home = []\n for x in self:\n innings.append(x['inning'])\n away.append(x['away'])\n home.append(x['home'])\n # go through all the information and make a nice output\n # that looks like a scoreboard\n output += 'Inning\\t'\n for x in innings:\n output += str(x) + ' '\n output += '\\n'\n for x in innings:\n output += '---'\n output += '\\nAway\\t' + self.__enumerate_scoreboard(away)\n output += '\\nHome\\t' + self.__enumerate_scoreboard(home)\n return output", "def play_round(self):\n print('='*10) # Round separation display\n print(f'Round {self.round_num}:')\n for player in self.players:\n\n # Player separation display:\n if player != self.players[0]:\n print('-' * 5)\n\n self.play_turn(player)\n \n # Return if exit conditions are met\n if (self.exit_flag) or (self.winner is not None) or (self.board.full()):\n return\n self.round_num += 1", "def print(self):\n\n def format_guessed_word(word):\n return ' '.join(list(word))\n\n def format_blank_word(word):\n return ' '.join(list('_' * len(word)))\n\n print('\\n' + \"Board\" + '=' * 75)\n for word in self._words:\n word_str = format_guessed_word(word) \\\n if word in self._words_guessed \\\n else format_blank_word(word)\n print(word_str)\n print(\"{}/{} words remaining\".format(self._num_words - len(self._words_guessed),self._num_words))\n print('=' * 80 + '\\n')", "def show_game_status(self, game, diff, step):\n if self.verbose:\n print('========== Step {} =========='.format(step))\n print('Time cost ===> {:.3f}s'.format(diff))\n game.print_game()", "def show_game_mission():\n print_bold(\"任务:\")\n print(\"\\t选择李维可以休息的小屋...\")\n print_bold(\"TIP:\")\n print(\"保持警惕,周围有敌人!\")\n print_dotted_line()", "def print_game_stats(games_won=games_won):\n for k,v in games_won.items():\n print(k)\n print(v)\n if v == 1:\n print(f'{k} has won {v} game')\n else:\n print(f'{k} has won {v} games')", "def print_winner(self):\n if self.winner is None:\n print('There was no winner')\n else:\n print('The winner was {}!'.format(\n self.__class__.COUNTER_REPRESENTATION[self.winner]))", "def fav_game(game):\n print(\"\\nyour fav game is: \" +game)", "def print_hand(self):\n if self.cheating:\n print(\"You're cheating!\")\n print(\"until you reroll it!\")\n print(\"\"\"\nYou rolled:\na = [ {} ]\nb = [ {} ]\n\nYou are in Stage {}\n \"\"\".format(self.die_a, self.die_b, self.stage))", "def poker(project):\r\n total_stories = len(project.unestimated_stories())\r\n for idx, story in enumerate(project.unestimated_stories()):\r\n clear()\r\n rows, cols = _get_column_dimensions()\r\n print \"{} PLANNING POKER SESSION [{}]\".format(project.name.upper(), bold(\"{}/{} Stories Estimated\".format(idx+1, total_stories)))\r\n print \"-\" * cols\r\n pretty_print_story(story)\r\n prompt_estimation(project, story)\r\n else:\r\n print \"KaBoom!!! Nice Work Team\"", "def show_game_mission():\n print_bold(\"Misija:\")\n print(\"\\tOdaberi kućicu u kojoj se Talion može odmoriti ...\")\n print_bold(\"SAVJET:\")\n print(\"PAZI kako biraš jer neprijatelji su blizu!\")\n print_dotted_line()", "def print_player_info(self):\n\t\tclear_screen()\n\n\t\tprint(\"# PLAYER INFO #\\n\")\n\t\tprint(\"Name{:.>17} \".format(self.info['Name']))\n\t\tprint(\"Race{:.>17} \".format(self.info['Race']))\n\t\tprint(\"Level{:.>16} \".format(self.stats['Level']))\n\t\tprint(\"Hit Points{:.>11} \".format(self.stats['HP']))\n\t\tprint(\"Gold Pieces{:.>10} \".format(self.stats['GOLD']))\n\t\n\t\tpress_enter()", "def __show_scoreboard(self):\n self.clear_screen()\n\n print('\\n' * 2, end=\"\")\n for line in self.__fame:\n print((\" \" * 5) + line, end=\"\")\n print('\\n' * 2, end=\"\")\n\n with open(\"mastermind/assets/scores.json\", \"r\") as data:\n board = list(load(data).items())\n\n space = \" \" * 11\n print(f\"{space}RANK {'PLAYER':<30}\" +\n f\"{'TIME':>7} (seconds){'POINTS':>29}\\n\")\n\n lines_printed = 0\n for idx, entry in enumerate(board[:10]):\n lines_printed += 1\n space = \" \" * 10\n n = idx + 1\n year, month, day, time = entry[0].split(\" \")\n points = entry[1][\"points\"]\n playtime = entry[1][\"playtime\"]\n player = entry[1][\"player\"]\n\n print(f\"{space}{n:>4}. {player:<30}\" +\n f\"{playtime:>7,.2f}{points:>36}/15\")\n\n lines = \"\\n\" * (12 - lines_printed)\n print(f\"{lines}{space}\", end=\"\")\n sleep(.25)\n self.cool_print(\"Press ENTER to return to player menu.\",\n newline=False, margin=0)\n input()", "def show_results(self, game_state, winner, loser):\n if game_state is GameState.WINNER:\n self.__turn_marker_label.configure(\n image=self.__marker_images_big_win[winner])\n\n self.__player_labels[winner].configure(\n text=f\"Player {winner.value+1} wins!\", fg=Color.WIN_COLOR)\n self.__player_labels[loser].configure(\n text=f\"Player {loser.value+1} loses.\", fg=Color.DARK_TONE)\n elif game_state is GameState.TIE:\n self.__player_labels[MarkerType.CROSS].configure(\n text=f\"It's a tie!\", fg=Color.BLACK)\n self.__player_labels[MarkerType.CIRCLE].configure(text=\"\")", "def print_game_stats(games_won=games_won):\r\n for i in games_won: # set loop condition\r\n if games_won[i] != 1: # argument for if games is pluralized\r\n print (i + ' has won ' + str(games_won[i])+ ' games')\r\n else:\r\n print (i + ' has won ' + str(games_won[i])+ ' game')", "def start_game(answer, session):\n\n print(\"start_game, answer: \", answer)\n\n attributes = reset_attributes()\n\n if answer == \"einem spieler\":\n answer = \"1\"\n if answer == \"vier spieler\":\n answer = \"4\"\n\n if answer in [str(x) for x in range(1, 5)]:\n curr_round = 1\n curr_player = 1\n state = \"Gameon\"\n scores = {x:0 for x in range(1, int(answer)+1)}\n sess_fragen = populate_questions(scores)\n \n attributes[\"question_index\"] = 0\n attributes[\"current_round\"] = curr_round\n attributes[\"current_player\"] = curr_player\n attributes[\"state\"] = state\n attributes[\"scores\"] = scores\n attributes[\"sess_questions\"] = sess_fragen\n\n if answer == \"1\":\n text = \"<s>Alles klar. \"+ TEXT_BREAK + \"Wir beginnen ein Spiel mit einem Spieler.\"+\\\n \"</s> <s>Das Quiz enthält {} Fragen.\\\n </s>\".format(TOTAL_ROUNDS)\n else:\n text = \"<s>Alles klar.\" + TEXT_BREAK + \"Wir beginnen ein Spiel mit {} Spielern\"\\\n .format(answer) +\\\n \"</s><s> Es werden jeweils {} Fragen an jeden Spieler gestellt.\\\n </s>\".format(TOTAL_ROUNDS)\n\n frage1 = ask_question(0, attributes)\n text += TICK_HELP_MESSAGE\n text += frage1\n card_text = \"Spiel mit {0} Spielern begonnen.\\n\".format(len(scores)) + clear_tags(frage1)\n\n else:\n richtige_zahl_prompt = \"Sag eine Nummer zwischen 1 und 4.\"\n text = \"Ungültige Spielerzahl. \" + richtige_zahl_prompt\n frage1 = SPIELER_PROMPT_TEXT\n card_text = text\n\n attributes[\"current_question\"] = frage1\n attributes[\"speech_output\"] = text\n attributes[\"reprompt_text\"] = frage1\n \n return response(text, should_end_session=False, reprompt_text=frage1, \\\n attributes=attributes, card_text=card_text)", "def displayTurn(self, guess, result):\n self._currentTurnNum += 1\n print 'On turn', self._currentTurnNum, 'of', self._maxNumberOfTurns,\n print 'guess', self._patternAsString(guess), 'scored',\n print result.getNumBlack(), 'black and', result.getNumWhite(), 'white.'" ]
[ "0.66959804", "0.662896", "0.661329", "0.65989345", "0.6507802", "0.64726734", "0.6438309", "0.6436374", "0.63636094", "0.6347345", "0.6319752", "0.6309312", "0.6309008", "0.62329", "0.6201972", "0.6197924", "0.618897", "0.6177045", "0.617191", "0.6145641", "0.61397576", "0.613286", "0.6128712", "0.61225533", "0.6102467", "0.60966045", "0.6092233", "0.6086259", "0.6046607", "0.6041844" ]
0.66408557
1
If trainable, returns variable, otherwise the original embedding
def embedding_setup(self, embedding, emb_trainable): if emb_trainable == True: emb_variable = tf.get_variable( name="embedding_matrix", shape=embedding.shape, initializer = tf.constant_initializer(embedding)) return emb_variable else: return embedding
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def embedding_trainable_variables(self) -> Sequence[tf.Variable]:\n return self._embedding_layer.trainable_variables", "def forward(self, input_variable):\r\n return self.embedding(input_variable)", "def embedding_layer(self):\n with tf.name_scope(\"Embedding_Layer\"):\n V_size = len(self.vocab)\n embed_dim = len(self.embed[0]) \n W_embed_ = tf.get_variable(\"W_embed\",shape=[V_size, embed_dim],trainable=False).assign(np.asarray(self.embed))\n W_analogy_embed_ = tf.get_variable(\"W_analogy_embed\",shape=[V_size, embed_dim],trainable=True,initializer=tf.random_uniform_initializer(minval=-1,maxval=1))\n return W_embed_, W_analogy_embed_", "def _get_embedding(self, data):\n # Tensor(n, c)\n cat = data['cat']\n return self.one_hot_embed(cat)", "def get_embed(input_data, vocab_size, embed_dim):\n embedding = tf.Variable(tf.random_uniform((vocab_size, embed_dim), -1, 1))\n embed = tf.nn.embedding_lookup(embedding, input_data)\n\n return embed", "def source_embedding_fairseq(self):\r\n return tf.get_variable(\r\n name=\"W\",\r\n shape=[self.params[\"feature.dim\"], self.params[\"embedding.dim\"]],\r\n initializer=tf.random_normal_initializer(\r\n mean=0.0,\r\n stddev=0.1))", "def target_embedding_fairseq(self):\r\n if self.params[\"embedding.share\"]:\r\n return self.source_embedding_fairseq()\r\n return tf.get_variable(\r\n name=\"W\",\r\n shape=[self.target_vocab_info.total_size, self.params[\"embedding.dim\"]],\r\n initializer=tf.random_normal_initializer(\r\n mean=0.0,\r\n stddev=0.1))", "def get_embed(input_data, vocab_size, embed_dim):\n embedding = tf.Variable(tf.random_uniform((vocab_size,embed_dim), -1, 1))\n embed = tf.nn.embedding_lookup(embedding, input_data)\n #print (\"embed_dim: \",embed_dim) # 向量表达维度为 256\n #print (\"input_data.shape: \",input_data.shape) # (50, 5)\n #print (\"embed.shap: \", embed.shape) # word 的向量表达 ==特征 (50, 5, 256) ==(batch_size, num_step, embed_dim)\n return embed # 返回input的向量表达", "def embed_word(self):\n return self.emb.get_keras_embedding(trainable = self.trainable_emb,\n input_length = self.sent_maxlen)", "def embed_word(self):\n return self.emb.get_keras_embedding(dropout = self.emb_dropout,\n trainable = self.trainable_emb,\n input_length = self.sent_maxlen)", "def add_embedding(self):\n ### YOUR CODE HERE (~4-6 lines)\n embeddingTensor = tf.Variable(self.pretrained_embeddings)\n embeddings = tf.nn.embedding_lookup(embeddingTensor, self.input_placeholder)\n embeddings = tf.reshape(embeddings, [-1, self.max_length, Config.n_features * Config.embed_size])\n ### END YOUR CODE\n return embeddings", "def get_embedding_output(self):\n return self.embedding_output", "def build_embedding_layer(inputs_, vocab_size, embed_size):\n embedding = tf.Variable(tf.random_uniform((vocab_size, embed_size), -1, 1))\n embed = tf.nn.embedding_lookup(embedding, inputs_)\n \n return embed", "def _get_embedding_layer(self, input_data, doc_input_data):\n opts = self._options\n word_embedding = tf.Variable(tf.random_uniform((self.vocab_size, opts.embed_dim), -1.0, 1.0))\n embed = []\n\n temp = tf.zeros([opts.batch_size, opts.embed_dim])\n embed_d = []\n for n in range(opts.sentence_sample):\n temp = tf.add(temp, tf.nn.embedding_lookup(word_embedding, doc_input_data[:, n]))\n embed_d.append(temp)\n\n if opts.concat == 'True':\n combined_embed_vector_length = opts.embed_dim * opts.window_size + opts.embed_dim\n for j in range(opts.window_size):\n embed_w = tf.nn.embedding_lookup(word_embedding, input_data[:, j])\n embed.append(embed_w)\n embed.append(embed_d)\n else:\n combined_embed_vector_length = opts.embed_dim\n embed_w = tf.zeros([opts.batch_size, opts.embed_dim])\n for j in range(opts.window_size):\n embed_w += tf.nn.embedding_lookup(word_embedding, input_data[:, j])\n embed_w += embed_d\n embed.append(embed_w)\n\n return tf.concat(embed, 1), word_embedding, combined_embed_vector_length", "def add_embedding(self, prefix=''):\n with tf.variable_scope(prefix + 'embed'):\n if self.cfg.fix_emb:\n assert (hasattr(self.cfg, 'W_emb'))\n W_emb = pkl.load(open(self.cfg.W_emb_path, 'rb'))\n W = tf.get_variable('W', initializer= W_emb, trainable=True)\n print(\"iniitalize word embedding finished\")\n else:\n weightInit = tf.random_uniform_initializer(-0.001, 0.001)\n vocab = pkl.load(open(self.cfg.vocab_path, 'rb'))\n W = tf.get_variable('W', [len(vocab), self.cfg.emb_size], initializer=weightInit)\n if hasattr(self.cfg, 'relu_w') and self.cfg.relu_w:\n W = tf.nn.relu(W)\n return W", "def get_movie_embedding(self):\n raise NotImplementedError(\"has to be overwritten\")", "def target_embedding_init_value(self):\n if self._tgt_embedding is None:\n return None\n return self._tgt_embedding.word_vecs", "def embedding(x, vocab_size, dense_size, name=None, reuse=None, multiplier=1.0):\n with tf.variable_scope(\n name, default_name=\"embedding\", values=[x], reuse=reuse):\n embedding_var = tf.get_variable(\"kernel\", [vocab_size, dense_size])\n emb_x = tf.gather(embedding_var, x)\n if multiplier != 1.0:\n emb_x *= multiplier\n return emb_x", "def _get_embedding_variable(self, layer_name):\n return self._tls._embed_variables.get(layer_name, None)", "def _create_embedding_variable(self, name, initial_value):\n if name not in self._tls._embed_variables:\n embed_var = tf.Variable(\n initial_value,\n name=name + str(threading.get_ident()),\n shape=(None, None),\n dtype=tf.float32,\n trainable=False,\n )\n self._tls._embed_variables[name] = embed_var\n else:\n embed_var = self._tls._embed_variables[name]\n embed_var.assign(initial_value)\n return embed_var", "def embed(self, x):\n if self.embedding is None:\n return x\n else:\n return self.embedding(x)", "def concept_embedding(concept_model: ConceptDetectionModel2D):\n return concept_model.to_embedding()", "def _use_embeddings(self, word):\n if word == \"@PAD@\":\n return torch.zeros(self.embeddings_dim)\n else:\n return self.embeddings[word]", "def _embed(self):\n with tf.variable_scope('word_embedding'):\n self.pretrained_word_mat = tf.get_variable(\"word_emb_mat\",\n [self.vocab.word_size() - 2, self.vocab.word_embed_dim],\n dtype=tf.float32,\n initializer=tf.constant_initializer(\n self.vocab.word_embeddings[2:],\n dtype=tf.float32),\n trainable=False)\n self.word_pad_unk_mat = tf.get_variable(\"word_unk_pad\",\n [2, self.pretrained_word_mat.get_shape()[1]],\n dtype=tf.float32,\n initializer=tf.constant_initializer(\n self.vocab.word_embeddings[:2],\n dtype=tf.float32),\n trainable=True)\n\n self.word_mat = tf.concat([self.word_pad_unk_mat, self.pretrained_word_mat], axis=0)\n self.p_emb = tf.nn.embedding_lookup(self.word_mat, self.p)\n self.q_emb = tf.nn.embedding_lookup(self.word_mat, self.q)", "def extract_embedding(self, from_model):\n return from_model", "def dense_trainable_variables(self) -> Sequence[tf.Variable]:\n dense_vars = []\n for layer in self.layers:\n if layer != self._embedding_layer:\n dense_vars.extend(layer.trainable_variables)\n return dense_vars", "def add_embedding(self):\n #with tf.variable_scope(\"RNN\", reuse = tf.AUTO_REUSE):\n embeddings = tf.get_variable(\"embeddings\", initializer = self.pretrained_embeddings,trainable=True)\n inputs = self.input_placeholder\n inputs = tf.reshape(inputs, [self.config.batch_size, -1 , self.config.n_features])\n embeddings = tf.nn.embedding_lookup(embeddings, self.input_placeholder)\n embeddings = tf.reshape(embeddings, [self.config.batch_size, -1, self.config.n_features* self.config.embed_size])\n embeddings = tf.cast(embeddings, tf.float32)\n return embeddings", "def forward(self, input_sentence):\n sentence = self.word_embedding(input_sentence)\n embedding = self.encoder(sentence)\n return embedding", "def project_embedding(self):\n w = self.feature_embedding.weight.data\n d = w.size(-1) - 1\n narrowed = w.narrow(-1, 1, d)\n tmp = 1 + torch.sum(torch.pow(narrowed, 2), dim=-1, keepdim=True)\n tmp.sqrt_()\n w.narrow(-1, 0, 1).copy_(tmp)\n return w # can be delete?", "def pretrained_embedding_layer(word_to_vec_map, word_to_index):\n \n vocab_size = len(word_to_index) + 1 # adding 1 to fit Keras embedding (requirement)\n any_word = list(word_to_vec_map.keys())[0]\n emb_dim = word_to_vec_map[any_word].shape[0] # define dimensionality of your GloVe word vectors (= 50)\n \n ### START CODE HERE ###\n # Step 1\n # Initialize the embedding matrix as a numpy array of zeros.\n # See instructions above to choose the correct shape.\n emb_matrix = np.zeros((vocab_size, emb_dim))\n \n # Step 2\n # Set each row \"idx\" of the embedding matrix to be \n # the word vector representation of the idx'th word of the vocabulary\n for word, idx in word_to_index.items():\n emb_matrix[idx, :] = word_to_vec_map[word]\n\n # Step 3\n # Define Keras embedding layer with the correct input and output sizes\n # Make it non-trainable.\n embedding_layer = tensorflow.keras.layers.Embedding(input_dim = vocab_size, output_dim = emb_dim, trainable = False)\n ### END CODE HERE ###\n\n # Step 4 (already done for you; please do not modify)\n # Build the embedding layer, it is required before setting the weights of the embedding layer. \n embedding_layer.build((None,)) # Do not modify the \"None\". This line of code is complete as-is.\n \n # Set the weights of the embedding layer to the embedding matrix. Your layer is now pretrained.\n embedding_layer.set_weights([emb_matrix])\n \n return embedding_layer" ]
[ "0.7058622", "0.702349", "0.6737134", "0.66756374", "0.665749", "0.6568265", "0.65501094", "0.6522559", "0.6482883", "0.64559466", "0.64411855", "0.64393085", "0.6317404", "0.630891", "0.62915754", "0.62392634", "0.62351906", "0.62319773", "0.6220624", "0.6140934", "0.6062287", "0.6012951", "0.6007319", "0.6003705", "0.60028535", "0.5993935", "0.59859633", "0.59830487", "0.59679484", "0.59593713" ]
0.7045865
1
Looks if the received block is in the waiting list. If yes we check if the address is already recorded. If no it is added to the waiting list and broadcasted.
def arrivingBlock(self,data, addr, receivedBlock): if self.blockchain.waiting_blocks == []: self.confirmed.clear() self.neighboursOk.clear() self.confirmed.append(addr) self.blockchain.putting_block(receivedBlock) self.message = self.setMessage((self.ip_address,data)) nodesMessage = Thread(target = self.runNodesMessage) nodesMessage.setDaemon(True) nodesMessage.start() nodesMessage.join() if self.verifyConfirmed(self.confirmed): self.message = self.setMessage((self.ip_address,{'Confirmation': 'All my neighbours ok'})) nodesMessage = Thread(target = self.runNodesMessage) nodesMessage.setDaemon(True) nodesMessage.start() nodesMessage.join() self.confirmed.clear() else: if receivedBlock in self.blockchain.waiting_blocks: if addr not in self.confirmed: self.confirmed.append(addr) if self.verifyConfirmed(self.confirmed): self.message = self.setMessage((self.ip_address,{'Confirmation': 'All my neighbours ok'})) nodesMessage = Thread(target = self.runNodesMessage) nodesMessage.setDaemon(True) nodesMessage.start() nodesMessage.join() self.confirmed.clear() else: self.blockchain.putting_block(receivedBlock) self.blockchain.waiting_blocks = [self.blockchain.compare_blocks()] if self.blockchain.waiting_blocks[0] == receivedBlock: self.confirmed.clear() self.confirmed.append(addr) self.message = self.setMessage((self.ip_address,{'Block': self.blockchain.waiting_blocks[0]})) nodesMessage = Thread(target = self.runNodesMessage) nodesMessage.setDaemon(True) nodesMessage.start() nodesMessage.join()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_call_waiting(self) -> bool:", "def BlockheightCheck(self):\n if self.CurrentBlockheight == BC.Default().Height:\n if len(self.Peers) > 0:\n logger.debug(\"Blockheight is not advancing ...\")\n next_hash = BC.Default().GetHeaderHash(self.CurrentBlockheight + 1)\n culprit_found = False\n for peer in self.Peers:\n if next_hash in peer.myblockrequests:\n culprit_found = True\n peer.Disconnect()\n break\n\n # this happens when we're connecting to other nodes that are stuck themselves\n if not culprit_found:\n for peer in self.Peers:\n peer.Disconnect()\n else:\n self.CurrentBlockheight = BC.Default().Height", "def is_on_waiting_list(self):\n if self.user is None:\n return False\n if unicode(self.user._id) in self.barcamp.event.waiting_list:\n return True\n return False", "def is_ready(self, addr: int, /) -> bool:", "def Add_to_waitlist(self, email):\n if email not in self.Waitlist:\n self.Waitlist.add(email)\n else:\n raise PreexistingAddressException(email)", "def check_if_alive():\n global SOCKET1\n global PRINT_LOCK\n global DATA\n start = current_time()\n msg = pickle.dumps(\"is_alive\")\n while True:\n\n if (current_time() - start) < 10:\n continue\n\n for every_one in DATA[\"neighbor\"]:\n remote = (\"127.0.0.1\", every_one[2])\n # out of the present list\n # decrease waiting time\n SOCKET1.settimeout(2)\n SOCKET1.sendto(msg, remote)\n try:\n recv_msg = pickle.loads(SOCKET1.recvfrom(512)[0])\n if recv_msg is \"yes\":\n with PRINT_LOCK:\n print(\"{} is alive\".format(every_one[0]))\n except (OSError, socket.timeout) as e_ra:\n # reset waiting time\n SOCKET1.settimeout(socket.getdefaulttimeout())\n with PRINT_LOCK:\n print(\"{} is dead : {}\".format(every_one[0], e_ra))\n index = DATA[\"neighbor\"].index(every_one)\n DATA[\"neighbor\"].pop(index)\n index = DATA[\"distance_vec\"].index(\n [every_one[0], every_one[1]])\n DATA[\"distance_vec\"].pop(index)\n bellman_ford(DATA[\"router_id\"], DATA[\"distance_vec\"])\n # ensuring the time diff is always round about 10\n start = current_time()", "def notify_waiting_planes(self, curr_time):\n if self.cnt_waiting_to_land > 0:\n assert self.cnt_waiting_to_land == len(self.q_waiting_to_land)\n self.cnt_runways_in_use += 1\n self.cnt_waiting_to_land -= 1\n pending_event = self.q_waiting_to_land.pop()\n assert pending_event.type == EventType.PLANE_ARRIVES\n assert curr_time >= pending_event.time\n self.total_waiting_time_for_landing = curr_time - pending_event.time\n nxt_event_tuple = (EventType.PLANE_LANDED, curr_time+conf.runway_time_to_land, self.id)\n self.sim.schedule(nxt_event_tuple)\n elif self.cnt_waiting_to_depart > 0:\n assert self.cnt_waiting_to_depart == len(self.q_waiting_to_depart)\n self.cnt_runways_in_use += 1\n self.cnt_waiting_to_depart -= 1\n pending_event = self.q_waiting_to_depart.pop()\n assert pending_event.type == EventType.READY_FOR_TAKEOFF\n assert curr_time >= pending_event.time\n self.total_waiting_time_for_departing = curr_time - pending_event.time\n nxt_event_tuple = (EventType.PLANE_DEPARTS, curr_time+conf.runway_time_to_takeoff, self.id)\n self.sim.schedule(nxt_event_tuple)", "def check_saved_acks():\n log('Looking through saved ACKS')\n if (BUFFER):\n for decoded in RECEIVED_ACKS:\n lowest_seq = min(BUFFER.keys())\n send_from_buffer(decoded['ack'], lowest_seq)\n # if we removed the last item from the buffer break out of loop\n if not BUFFER:\n break", "def isWaitingForUnblockVis(self):\n return self.__nextSetZoneDoneEvent is not None", "def broadcast_new_block(self):\n\t\tneighbors = self.blockchain.nodes\n\n\t\tfor node in neighbors:\n\t\t\tprint(f\"Requesting {node} to resolve\")\n\t\t\tresponse = requests.get(f'http://{node}/nodes/resolve')\n\t\t\t# if response.status_code != 200:\n\t\t\t# \traise ValueError(f'Node {node} responded bad status code')\n\t\t\t# print(f\"{node} resolve completed\")\n\n\t\tprint(\"Broadcast Complete\")", "def wait_for_data(receiver):\n\n while not receiver.available(pipes[1]):\n time.sleep(0.01)", "def Blocking(self) -> bool:", "def in_waiting(self) -> int:\n pass", "def check_bcr_catchup(self):\n logger.debug(f\"Checking if BlockRequests has caught up {len(BC.Default().BlockRequests)}\")\n\n # test, perhaps there's some race condition between slow startup and throttle sync, otherwise blocks will never go down\n for peer in self.Peers: # type: NeoNode\n peer.stop_block_loop(cancel=False)\n peer.stop_peerinfo_loop(cancel=False)\n peer.stop_header_loop(cancel=False)\n\n if len(BC.Default().BlockRequests) > 0:\n for peer in self.Peers:\n peer.keep_alive()\n peer.health_check(HEARTBEAT_BLOCKS)\n peer_bcr_len = len(peer.myblockrequests)\n # if a peer has cleared its queue then reset heartbeat status to avoid timing out when resuming from \"check_bcr\" if there's 1 or more really slow peer(s)\n if peer_bcr_len == 0:\n peer.start_outstanding_data_request[HEARTBEAT_BLOCKS] = 0\n\n print(f\"{peer.prefix} request count: {peer_bcr_len}\")\n if peer_bcr_len == 1:\n next_hash = BC.Default().GetHeaderHash(self.CurrentBlockheight + 1)\n print(f\"{peer.prefix} {peer.myblockrequests} {next_hash}\")\n else:\n # we're done catching up. Stop own loop and restart peers\n self.stop_check_bcr_loop()\n self.check_bcr_loop = None\n logger.debug(\"BlockRequests have caught up...resuming sync\")\n for peer in self.Peers:\n peer.ProtocolReady() # this starts all loops again\n # give a little bit of time between startup of peers\n time.sleep(2)", "def check_buffer(self, interest_name: str):\n if str(interest_name) in self.get_next_buffer:\n return self.get_next_buffer[str(interest_name)]\n else:\n return False", "def check_last_inv_announcement(self, inv):\n\n test_function = lambda: self.block_announced\n self.wait_until(test_function)\n\n with p2p_lock:\n compare_inv = []\n if \"inv\" in self.last_message:\n compare_inv = [x.hash for x in self.last_message[\"inv\"].inv]\n assert_equal(compare_inv, inv)\n self.block_announced = False\n self.last_message.pop(\"inv\", None)", "def has_event(self):\n return self.ser.inWaiting()", "def block_waiting( self ):\n while self.num_waiting > 0:\n time.sleep( 1 )", "def waiting_on(self, circuit):\n for (circid, d) in self.waiting_circuits:\n if circuit.id == circid:\n return True\n return False", "def is_waiting_to_be_assigned(self):\n if self.status == \"WAITING_TO_BE_ASSIGNED\":\n return True\n else:\n return False", "def _availability_message_received(self, msg: ReceiveMessage) -> None:\n self._available = msg.payload == \"online\"\n self.async_write_ha_state()", "def has_receiver(self):\n return self.balance < 0", "def is_waiting(self, is_waiting):\n \n self._is_waiting = is_waiting", "def Promote_from_waitlist(self, email):\n if email in self.Waitlist:\n self.Waitlist.remove(email)\n self.Add_attendee(email)\n else:\n raise MissingAddressException(email)", "def would_retransmit(self):\n return not self.my_pending_requests.is_empty()", "def isWaiting(self):\r\n return self.scheduler.isWaiting()", "def wait_for_presences(self, pres):\n self.received.add(pres['from'].bare)\n if len(self.received) >= len(self.client_roster.keys()):\n self.presences_received.set()\n else:\n self.presences_received.clear()", "def test_forwarder_solicitation_sent(self):\n waittime = 3.0\n self.autoconflayer.start_process()\n # Pass an interest to the autoconfig layer to trigger forwarder solicitation\n interest = Interest(Name('/foo/bar'))\n self.queue_from_higher.put([None, interest])\n\n # Catch all data the autoconfig layer sends downwards for 3 seconds\n deadline = datetime.utcnow() + timedelta(seconds=waittime)\n tolower = []\n while datetime.utcnow() < deadline:\n try:\n data = self.queue_to_lower.get(timeout=waittime/10)\n tolower.append(data)\n except queue.Empty:\n pass\n # Make sure the broadcast face was actually created and get its face id\n bcfid = self.faceidtable.get_or_create_faceid(AddressInfo(('127.255.255.255', 4242), 0))\n self.assertIsNotNone(bcfid)\n # Make sure a forwarder solicitation was sent downwards\n solictiation = Interest(Name('/autoconfig/forwarders'))\n self.assertIn([bcfid, solictiation], tolower)", "def race_condition():\n if len(allocated_pids) != len(set(allocated_pids)):\n return True\n else:\n return False", "def is_ready(self, want_send_index, latest_index):\n return latest_index - want_send_index >= self.p-1" ]
[ "0.5706097", "0.5541932", "0.5535595", "0.55307394", "0.5490348", "0.5457724", "0.5444099", "0.54275596", "0.542084", "0.54196537", "0.5404514", "0.53694296", "0.53550977", "0.5339095", "0.5308351", "0.5284784", "0.5277014", "0.52734613", "0.526288", "0.52274597", "0.5192736", "0.5163117", "0.5146191", "0.5135691", "0.5130787", "0.5127085", "0.5120771", "0.51136386", "0.51119846", "0.50938404" ]
0.614878
0
Test that a new DynamicCadence is created by form submission
def test_create_cadence_for_new_site(self): new_form_data = { 'site': 'cpt', 'cadence_frequency': 10, 'target_id': self.target.id } original_dc_count = DynamicCadence.objects.all().count() response = self.client.post(reverse('nres_calibrations:nres_submission'), data=new_form_data) new_dc_count = DynamicCadence.objects.all().count() # there should be one more DynamicCadence than before self.assertEqual(new_dc_count, original_dc_count+1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_submit_calibration_valid(self):\n new_form_data = {\n 'site': 'tlv',\n 'cadence_frequency': 2, # new cadence_frequency\n 'target_id': self.target.id\n }\n response = self.client.post(reverse('nres_calibrations:nres_submission'),\n data=new_form_data)\n # get updated cadence from db\n dc = DynamicCadence.objects.all().first()\n\n # the cadence_frequency should have changed from the original\n self.assertNotEqual(self.original_cadence_parameters['cadence_frequency'],\n dc.cadence_parameters['cadence_frequency'])\n # the cadence_frequency should be what we set in new_form_data\n self.assertEqual(new_form_data['cadence_frequency'],\n dc.cadence_parameters['cadence_frequency'])", "def test_make_form():", "def test_create_new_form(self):\n\n survey = self._create_test_survey()\n assert survey is not None\n\n new_survey = SurveyForm.get(self.test_survey_name)\n assert new_survey is not None\n assert new_survey.form == self.test_form", "def test_aid_creation_view(client, contributor, aid_form_data):\n\n form_url = reverse('aid_create_view')\n\n # Logged user, access granted\n client.force_login(contributor)\n res = client.get(form_url)\n assert res.status_code == 200\n\n aids = Aid.objects.filter(author=contributor)\n assert aids.count() == 0\n\n aid_form_data['name'] = 'Very unique title'\n res = client.post(form_url, data=aid_form_data)\n assert res.status_code == 302\n assert aids.count() == 1\n assert aids[0].name == 'Very unique title'\n assert aids[0].author == contributor", "def test_submit_form_using_valid_data():", "def test_individual_ACH(self):\n form_data = self.form_data()\n form_data['payment_type'] = 'DirectDebit'\n form = DonationPaymentForm(data=form_data)\n self.assertTrue(form.is_valid())", "def test_create_control_with_cads(self):\n factories.CustomAttributeDefinitionFactory(\n id=444,\n attribute_type=\"Text\",\n definition_type=\"control\"\n )\n control_body = self.prepare_control_request_body()\n cad_body = self.prepare_external_cad_body(\"Text\", \"Control\")\n cav_body = self.prepare_external_cav_body(123, \"Control\")\n control_body.update({\n \"custom_attribute_definitions\": [cad_body],\n \"custom_attribute_values\": [cav_body],\n })\n\n response = self.api.post(all_models.Control, data={\n \"control\": control_body,\n })\n\n self.assertEqual(response.status_code, 201)\n cav = all_models.CustomAttributeValue.query.one()\n self.assert_cav_fields(cav, cav_body)", "def test_make_form_hidden():", "def test_create_entry_route_has_form(testapp):\n response = testapp.get('/journal/new-entry', status=200)\n html = response.html\n assert len(html.find_all(\"form\")) == 1", "def test_client_risk_assessment_create(self):\n pass", "def test_form_submition_and_product_creation(user_company, client, authenticated_user):\n add_product_url = reverse('add-product')\n response = client.post(add_product_url, {\n 'name': 'Test_product_name',\n 'serial_number': 'XZ001', \n 'manufacturer': 'Test company',\n 'price_net': 415.26,\n 'description': fake.paragraph(),\n 'stock': 16\n })\n assert response.status_code == 302\n product = Product.objects.get(name='Test_product_name')\n assert response.url == reverse('product-detail',kwargs={'pk': product.pk}) \n assert product.user == authenticated_user\n assert product in Product.objects.all()", "def test_form_create(self):\n create = {\n 'title': 'Last Post (Final)',\n 'content': '### Goodbye!',\n 'is_published': False,\n }\n\n form = self.form_cls(create)\n print(form.errors)\n\n form.save()\n\n actual = models.Entry.objects.get(slug='last-post-final')\n self.assertEquals(actual.title, create['title'])\n self.assertEquals(actual.content.raw, create['content'])\n self.assertIsNone(actual.published_timestamp)", "def test_form(self):\n\t\tform = self.resp.context['form']\n\t\tself.assertIsInstance(form, PadawanForm)", "def test_form_entity(admin_client):\n entity_datas = factory.build(dict, FACTORY_CLASS=factories.EntityFormFactory)\n\n form = EntityForm(data=entity_datas)\n\n assert form.is_valid() == True\n\n form.save()\n\n assert Entity.objects.count() == 1", "def test_make_form_field():", "def test_perform_create(self):\n data = {\n 'name': 'Jane Joe',\n 'crm': 1234,\n 'email': '[email protected]',\n 'phone': '+55998754128'\n }\n response = self.unath_client.post(reverse('doctor-list'), data=data)\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n response = self.client.post(reverse('doctor-list'), data=data)\n self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)", "def test_create_company_props_using_post(self):\n pass", "def test_qualifierForm(self):\n print 'Running %s ...' % getName()\n \n s1 = self.sequenceListingFixture.create_sequence_instance(self.sequenceListing)\n \n f1 = Feature.objects.create(sequence=s1, \n featureKey='modified_base', \n location='7')\n qf1 = QualifierForm(feature=f1, \n data={'qualifierName': 'note',\n 'qualifierValue':'test for value'})\n \n self.assertTrue(qf1.is_valid())\n self.assertEqual('note', qf1.cleaned_data['qualifierName']) \n \n qf2 = QualifierForm(feature=f1, \n data={'qualifierName': 'xxx',\n 'qualifierValue':'test for xxx value'})\n \n self.assertTrue(qf2.is_valid())", "def test_add_resource(self):\n self.login_editor() \n\n # invalid submission with missing required fields\n form_data = minimal_form_data()\n response = self.client.post('/resource/new', form_data )\n \n self.assertContains(response,'Por favor verifique os campos obrigatórios')\n self.assertContains(response,'Você precisa inserir pelo menos um descritor de assunto')\n self.assertContains(response,'Você precisa selecionar pelo menos uma área temática')\n\n # complete form_data with required fields and re-submit form\n form_data = complete_form_data()\n\n # test valid submission\n # after submit a valid content the view will redirect to /resources and list the objects\n # follow=True will allow check if the new data is on the list\n response = self.client.post('/resource/new', form_data, follow=True)\n\n self.assertRedirects(response, '/resources')\n self.assertContains(response, \"Recurso de teste\")\n\n # check if is set cooperative center code of user (editor = BR1.1)\n self.assertEquals(Resource.objects.all()[0].cooperative_center_code, \"BR1.1\")", "def test_creating_new_patient(self):\n\n form_data = {\"fname\": \"Jill\", \"lname\": \"Jones\", \n \"email\": \"[email protected]\", \"password\": \"password\", \n \"street-address\": \"33 Blue St\", \"city\": \"San Francisco\", \n \"state\": \"CA\", \"zipcode\": \"43223\", \"phone\": \"8884445555\",\n \"birthdate\":\"1984-05-05\"}\n\n patient_id = create_new_patient_account(form_data)\n\n self.assertEqual(3, patient_id)", "def test_creating_new_goal(self):\n\n form_data = {\"goal-body\": \"New goal body.\"}\n goal = create_new_goal(1, form_data)\n \n self.assertEqual(\"New goal body.\", goal.goal_body)", "def test_that_view_saves_data_if_form_valid(self):\n\n self.client.login(username='admin', password='admin')\n url = reverse(\"to_form\", args=str(self.my_instance.id))\n response = self.client.post(url, data={'name': 'Oleg', 'surname': 'Senyshyn', 'date': date(1995, 05, 03),\n 'email': '[email protected]', 'skype': 'sen9a1990'}, format='json')\n self.assertEqual('Data has been edit', json.loads(response.content)['ok'])\n my_instance = Contact.objects.first()\n self.assertEqual('Oleg', my_instance.name)\n self.assertEqual('Senyshyn', my_instance.surname)\n self.assertEqual(date(1995, 05, 03), my_instance.date)\n self.assertEqual('[email protected]', my_instance.email)\n self.assertEqual('sen9a1990', my_instance.skype)", "def test_request_form_successful(self):\n response = self.client.get(reverse(\n 'form', kwargs={'slug': self.agency.slug}))\n self.assertContains(response, self.agency.name)", "def test_create_patient(self):\n url = reverse('patient:patient-list')\n data = {\n \"birth_date\": \"1980-05-21\",\n \"patient_name\": \"testpatient2\",\n \"status\": \"A\",\n \"gender\": \"M\",\n \"patient_contact\" : \"+12342134523\"\n }\n response = self.client.post(url, data)\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)\n self.assertEqual(Patient.objects.count(), 2)", "def test_submit_for_endorsement(self):", "def create(self):\n\n if self.data.get('hydrogeology', None):\n self.form = self._make_form(\n self.well.hydrogeology_parameter if self.well.hydrogeology_parameter else HydrogeologyParameter()\n , HydrogeologyParameterForm, self.data['hydrogeology'])\n\n if self.data['hydrogeology'].get('pumping_test'):\n self.pumping_test_form = self._make_form(\n self.form.instance.pumping_test if self.form.instance.pumping_test else PumpingTest(),\n PumpingTestForm, self.data['hydrogeology']['pumping_test']\n )", "def acquisition_create(request, slug):\n #verifies if the company exists if not returns a 404 page\n company =get_object_or_404(Company,slug=slug)\n edit = validate_user_company_access_or_redirect(request,company)\n #if the request is GET presents empty form\n if request.method == 'GET':\n\n acquisition_form = AcquisitionForm()\n return render_to_response('acquisition_form.html', {'form': acquisition_form, 'company':company},\n context_instance=RequestContext(request))\n \n else:\n acquisition_form = AcquisitionForm(request.POST)\n #if is POST Validates the form is well filled and save it redirecting to the company page\n if acquisition_form.is_valid():\n aqf = acquisition_form.save(commit=False)\n aqf.company = company\n aqf.save()\n return HttpResponseRedirect('/company/'+str(slug))\n\n #if not well filled redirect to the original create and display error\n else:\n return render_to_response('acquisition_form.html', \n {'form': acquisition_form, 'form_errors': acquisition_form.errors, 'company':company},\n context_instance=RequestContext(request))", "def test_create_enrollment_term(self):\n # This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration.\n pass", "def test_post_entry_courses(self):\r\n # This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration.\r\n pass", "def test_saving(self):\n if self.form.is_valid():\n self.compound = self.form.save()\n self.assertIsNotNone(self.compound.id)" ]
[ "0.70193005", "0.6708431", "0.66458005", "0.6469265", "0.64464295", "0.6391832", "0.6252196", "0.62489915", "0.6191535", "0.6154543", "0.6122482", "0.6101032", "0.60874194", "0.6073931", "0.604443", "0.6040367", "0.6038633", "0.6028261", "0.6002224", "0.59992266", "0.5994009", "0.59853137", "0.59768105", "0.59365225", "0.591852", "0.5885697", "0.5870009", "0.5867127", "0.5864995", "0.58504283" ]
0.7397608
0
Test that the nres_home target list contains the NRES calibration targets
def test_nres_targets_list(self): response = self.client.get(reverse('nres_calibrations:nres_home')) self.assertContains(response, self.target.id)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_which_targets():\n num_multi_targets = 0\n for which_targets_day in which_targets:\n # All inputs have a label\n assert np.all(which_targets_day.sum(axis=1) > 0)\n # No inputs have more than 3 targets\n assert np.all(which_targets_day.sum(axis=1) < 4)\n\n num_multi_targets += np.sum(which_targets_day.sum(axis=1) > 1)\n\n # Some days have multi-targets\n assert num_multi_targets > 0", "def test_verify_all_gates_have_valid_targets():\n nSpinOrbitals = input_json[\"constants\"][\"nSpinOrbitals\"]\n\n interaction_list = input_json[\"terms\"]\n\n for interaction in interaction_list:\n targets = interaction[\"targets\"]\n\n for orbital in targets:\n assert 0 <= orbital < nSpinOrbitals, \"Orbital target is out of range\"", "def test_evrpnl_instances(self) -> bool:\n\n for rte_info in self.results.values():\n frvcp_solver = solver.Solver(\n self.ref_instance, rte_info['route'], self.q_init)\n obj, _ = frvcp_solver.solve()\n self.assertAlmostEqual(obj, rte_info['obj'], 3)", "def test_where_targets():\n num_multi_targets = 0\n for where_targets_day in where_targets:\n # All inputs have a label\n assert np.all(where_targets_day.sum(axis=3).sum(axis=3).sum(axis=1).sum(axis=1) > 0)\n num_multi_targets += np.sum((where_targets_day.sum(axis=3).sum(axis=3).sum(axis=2) > 1).sum(axis=1) > 1)\n\n # Some days have multi-targets\n assert num_multi_targets > 0", "def test_targets(iris):\n assert iris.num_targets == 3\n np.testing.assert_array_equal(\n iris.target_names, [\"setosa\", \"versicolor\", \"virginica\"]\n )", "def test_when_targets():\n num_multi_targets = 0\n for when_targets_day in when_targets:\n # All inputs have a label\n assert np.all(when_targets_day.sum(axis=1).sum(axis=1) > 0)\n\n num_multi_targets += np.sum((when_targets_day.sum(axis=2) > 1).sum(axis=1) > 1)\n\n # Some days have multi-targets\n assert num_multi_targets > 0", "def targets(self) -> Optional[jnp.ndarray]:\n pass", "def matches(self, tgt_residence_dir: str) -> bool:", "def testgeotargets(self):\r\n dim_geotargets = self.data.geotargets.shape\r\n dim_geovalues = (len(self.data.geovalues[0]), )\r\n assert dim_geotargets == dim_geovalues", "def test_build_deeplab_with_resnet(self):\n cfg = get_cfg_defaults()\n cfg.SYSTEM.NUM_GPUS = self.num_gpu\n cfg.MODEL.BACKBONE = 'resnet101'\n cfg.MODEL.AUX_OUT = True\n\n c_i = cfg.MODEL.IN_PLANES\n c_o = cfg.MODEL.OUT_PLANES\n b, h, w = 2, 65, 65\n x = torch.rand(b, c_i, h, w).to(self.device)\n\n for arch in ['deeplabv3a', 'deeplabv3b', 'deeplabv3c']:\n cfg.MODEL.ARCHITECTURE = arch\n model = build_model(cfg, self.device).eval()\n y = model(x)\n self.assertTrue(isinstance(y, OrderedDict))\n self.assertTrue(\"aux\" in y.keys())\n for key in y.keys():\n self.assertTupleEqual(\n tuple(y[key].shape),\n (b, c_o, h, w))", "def run_automatic_tester():\n number_of_target_maps = len(os.listdir(TargetDetectionTesterSettings.TARGET_DETECTION_REPORT_JSON_FILE_SAVE_PATH))\n overall_true_positive_count = 0\n overall_false_positive_count = 0\n overall_target_count = 0\n\n for index_0 in range(number_of_target_maps):\n\n answer_sheet = json.load(open(os.path.join(TargetDetectionTesterSettings.TARGET_MAP_ANSWER_SHEET_PATH, str(index_0 + 1) + \".json\")))\n answer_list = []\n\n for index_1 in range(len(answer_sheet[\"targets\"])):\n answer_list.append((answer_sheet[\"targets\"][index_1][\"target_center_coordinates\"][0], answer_sheet[\"targets\"][index_1][\"target_center_coordinates\"][1]))\n overall_target_count += len(answer_list)\n\n target_detection_result = json.load(open(os.path.join(TargetDetectionTesterSettings.TARGET_DETECTION_REPORT_JSON_FILE_SAVE_PATH, str(index_0 + 1) + \".json\")))\n result_list = []\n\n for index_2 in range(len(target_detection_result[\"image_processing_results\"])):\n result_list.append((target_detection_result[\"image_processing_results\"][index_2][\"target_location\"][0] + (target_detection_result[\"image_processing_results\"][index_2][\"target_location\"][2] / 2), target_detection_result[\"image_processing_results\"][index_2][\"target_location\"][1] + (target_detection_result[\"image_processing_results\"][index_2][\"target_location\"][3] / 2)))\n\n current_true_positive_count = 0\n current_false_positive_count = 0\n banned_index_list = []\n\n for index_3 in range(len(answer_list)):\n true_positive_found = False\n\n for index_4 in range(len(result_list)):\n is_index_4_banned = False\n\n for index_5 in range(len(banned_index_list)):\n if (index_4 == banned_index_list[index_5]):\n is_index_4_banned = True\n\n if (is_index_4_banned == True):\n continue\n\n correct_target_center_x = answer_list[index_3][0]\n correct_target_center_y = answer_list[index_3][1]\n\n detected_target_center_x = result_list[index_4][0]\n detected_target_center_y = result_list[index_4][1]\n\n if ((abs(correct_target_center_x - detected_target_center_x) <= 20) and (abs(correct_target_center_y - detected_target_center_y) <= 20)):\n current_true_positive_count += 1\n banned_index_list.append(index_4)\n true_positive_found = True\n continue\n\n current_false_positive_count = len(result_list) - current_true_positive_count\n\n overall_true_positive_count += current_true_positive_count\n overall_false_positive_count += current_false_positive_count\n\n percentage = 100 * float(overall_true_positive_count) / (overall_target_count)\n\n TargetDetectionTesterLogger.log(\"--------------------------------------------------\")\n TargetDetectionTesterLogger.log(\"Total True Positive Count: \" + str(overall_true_positive_count))\n TargetDetectionTesterLogger.log(\"Total False Positive Count: \" + str(overall_false_positive_count))\n TargetDetectionTesterLogger.log(\"Percentage of Successfully Detected Targets: \" + str(percentage) + \"%\")\n TargetDetectionTesterLogger.log(\"--------------------------------------------------\")", "def _set_target_info(self, targets, host_grps, iqn):\n for host_grp in host_grps:\n port = host_grp['portId']\n gid = host_grp['hostGroupNumber']\n storage_iqn = host_grp['iscsiName']\n if self._is_host_iqn_registered_in_target(port, gid, iqn):\n targets['info'][port] = True\n targets['list'].append((port, gid))\n targets['iqns'][(port, gid)] = storage_iqn\n return True\n return False", "def test_verify_list_of_devices_in_my_network():", "def test_call_default_params(self):\r\n\r\n app = Usearch61ReferenceOtuPicker(\r\n params={'save_intermediate_files': False,\r\n 'output_dir':\r\n self.output_dir,\r\n 'remove_usearch_logs': True\r\n })\r\n\r\n obs_clusters, failures = app(self.tmp_seq_filepath_97perc_id,\r\n refseqs_fp=self.tmp_seq_filepath_97perc_id_rc)\r\n\r\n # Randomly selected match is used for equivalent matches, so need to\r\n # test for results without order affecting output\r\n expected_clusters =\\\r\n {'usearch_ecoli_seq': ['usearch_ecoli_seq'],\r\n 'usearch_ecoli_seq_1bp_change': ['usearch_ecoli_seq_1bp_change',\r\n 'usearch_ecoli_seq_2bp_change']}\r\n\r\n for result in obs_clusters:\r\n for cluster in obs_clusters[result]:\r\n self.assertTrue(cluster in expected_clusters[result])\r\n\r\n expected_failures = []\r\n self.assertEqual(failures, expected_failures)", "def test_input_target_different():\n for day in range(len(departure_cameras)):\n which_targets_day = which_targets[day]\n when_targets_day = when_targets[day]\n where_targets_day = where_targets[day]\n departure_cameras_day = departure_cameras[day]\n # Which\n for departure_camera, target in zip(departure_cameras_day, which_targets_day):\n entrance_cameras = np.argwhere(target == 1) + 1\n assert departure_camera not in entrance_cameras\n # When\n for departure_camera, when_target in zip(departure_cameras_day, when_targets_day):\n target = when_target.sum(axis=1) > 1\n entrance_cameras = np.argwhere(target == 1) + 1\n assert departure_camera not in entrance_cameras\n # Where\n for departure_camera, where_target in zip(departure_cameras_day, where_targets_day):\n target = where_target.sum(axis=3).sum(axis=2).sum(axis=1) > 1\n entrance_cameras = np.argwhere(target == 1) + 1\n assert departure_camera not in entrance_cameras", "def test_check_source_6(self):\n self.src1.lab_host = \"\"\n import_genome.check_source(self.src1, self.eval_flags,\n host_genus=\"Mycobacterium\")\n self.assertEqual(len(self.src1.evaluations), 3)", "def test_target_number_less_than_alp(self):\n alp = list(range(10))\n targets = generate_targets(alp, 5)\n self.assertEqual(len(targets), 5)\n self.assertEqual(len(targets), len(set(targets)))", "def test_all_same_target_cameras():\n for day_num in range(1, NUM_DAYS):\n which_targets_day = which_targets[day_num]\n when_targets_day = when_targets[day_num]\n where_targets_day = where_targets[day_num]\n\n assert len(which_targets_day) == len(when_targets_day) == len(where_targets_day)\n\n for idx in range(len(which_targets_day)):\n which_target_cameras = np.argwhere(which_targets_day[idx])\n when_target_cameras = np.argwhere(when_targets_day[idx].sum(axis=1))\n where_target_cameras = np.argwhere(where_targets_day[idx].sum(axis=3).sum(axis=2).sum(axis=1))\n print(which_target_cameras, when_target_cameras, where_target_cameras)\n assert np.all(which_target_cameras == when_target_cameras)\n assert np.all(which_target_cameras == where_target_cameras)\n assert np.all(when_target_cameras == where_target_cameras)", "def test_fixture_list_runs(tmp_sample_project):\n config_dir = tmp_sample_project\n output = subprocess.run([\"smif\", \"list\", \"-d\", config_dir], stdout=subprocess.PIPE)\n assert \"energy_water_cp_cr\" in str(output.stdout)\n assert \"energy_central\" in str(output.stdout)\n\n # Run energy_central and re-check output with optional flag for completed results\n subprocess.run([\"smif\", \"run\", \"energy_central\", \"-d\", config_dir], stdout=subprocess.PIPE)\n output = subprocess.run([\"smif\", \"list\", \"-c\", \"-d\", config_dir], stdout=subprocess.PIPE)\n assert \"energy_central *\" in str(output.stdout)", "def targets_placeholder(self):", "def build_rpn_targets(anchors, gt_class_ids, gt_boxes, config):\n # RPN Match: 1 = positive anchor, -1 = negative anchor, 0 = neutral\n rpn_match = np.zeros([anchors.shape[0]], dtype=np.int32)\n # RPN bounding boxes: [max anchors per image, (dy, dx, log(dh), log(dw))]\n rpn_bbox = np.zeros((config.RPN_TRAIN_ANCHORS_PER_IMAGE, 4), dtype=np.float32)\n\n # can happen if all items cropped out or image with no items\n if (gt_class_ids == 0).all():\n return rpn_match, rpn_bbox\n\n # Handle COCO crowds\n # A crowd box in COCO is a bounding box around several instances. Exclude\n # them from training. A crowd box is given a negative class ID.\n crowd_ix = np.where(gt_class_ids < 0)[0]\n if crowd_ix.shape[0] > 0:\n # Filter out crowds from ground truth class IDs and boxes\n non_crowd_ix = np.where(gt_class_ids > 0)[0]\n crowd_boxes = gt_boxes[crowd_ix]\n gt_class_ids = gt_class_ids[non_crowd_ix]\n gt_boxes = gt_boxes[non_crowd_ix]\n # Compute overlaps with crowd boxes [anchors, crowds]\n crowd_overlaps = box_utils.compute_overlaps(anchors, crowd_boxes)\n crowd_iou_max = np.amax(crowd_overlaps, axis=1)\n no_crowd_bool = (crowd_iou_max < 0.001)\n else:\n # All anchors don't intersect a crowd\n no_crowd_bool = np.ones([anchors.shape[0]], dtype=bool)\n\n # Compute overlaps [num_anchors, num_gt_boxes]\n overlaps = box_utils.compute_overlaps(from_numpy(anchors),\n from_numpy(gt_boxes))\n overlaps = overlaps.cpu().numpy()\n\n # Match anchors to GT Boxes\n # If an anchor overlaps a GT box with IoU >= 0.7 then it's positive.\n # If an anchor overlaps a GT box with IoU < 0.3 then it's negative.\n # Neutral anchors are those that don't match the conditions above,\n # and they don't influence the loss function.\n # However, don't keep any GT box unmatched (rare, but happens). Instead,\n # match it to the closest anchor (even if its max IoU is < 0.3).\n #\n\n # 1. Set negative anchors first. They get overwritten below if a GT box is\n # matched to them. Skip boxes in crowd areas.\n anchor_iou_argmax = np.argmax(overlaps, axis=1)\n anchor_iou_max = overlaps[np.arange(overlaps.shape[0]), anchor_iou_argmax]\n rpn_match[(anchor_iou_max < 0.3) & (no_crowd_bool)] = -1\n\n # 2. Set an anchor for each GT box (regardless of IoU value).\n gt_iou_argmax = np.argmax(overlaps, axis=0)\n rpn_match[gt_iou_argmax] = 1\n\n # 3. Set anchors with high overlap as positive.\n rpn_match[anchor_iou_max >= 0.7] = 1\n\n # Subsample to balance positive and negative anchors\n # Don't let positives be more than half the anchors\n ids = np.where(rpn_match == 1)[0]\n extra = len(ids) - (config.RPN_TRAIN_ANCHORS_PER_IMAGE // 2)\n if extra > 0:\n # Reset the extra ones to neutral\n ids = np.random.choice(ids, extra, replace=False)\n rpn_match[ids] = 0\n\n # Same for negative proposals\n ids = np.where(rpn_match == -1)[0]\n extra = len(ids) - (config.RPN_TRAIN_ANCHORS_PER_IMAGE -\n np.sum(rpn_match == 1))\n if extra > 0:\n # Rest the extra ones to neutral\n ids = np.random.choice(ids, extra, replace=False)\n rpn_match[ids] = 0\n\n # For positive anchors, compute shift and scale needed to transform them\n # to match the corresponding GT boxes.\n ids = np.where(rpn_match == 1)[0]\n boxes = box_utils.box_refinement(from_numpy(anchors[ids]),\n from_numpy(gt_boxes[anchor_iou_argmax[ids]]))\n boxes = boxes.cpu().numpy()\n rpn_bbox[:len(boxes)] = boxes\n\n # Normalize\n rpn_bbox /= np.array(config.RPN_BBOX_STD_DEV, dtype=np.float32)\n\n return rpn_match, rpn_bbox", "def test_target_greater_than_alp(self):\n alp = list(range(5))\n targets = generate_targets(alp, 10)\n self.assertEqual(len(targets), 10)\n\n counts = Counter(targets)\n\n for item in alp:\n self.assertEqual(counts[item], 2)", "def testoptdone(self):\r\n\r\n assert self.data.optdone\r\n\r\n targets = self.data.geotargets\r\n values = numpy.abs(self.data.geovalues[-1])\r\n\r\n target_e = targets[0]\r\n target_g = targets[1:3]\r\n target_x = targets[3:]\r\n value_e = values[0]\r\n value_g = values[1:3]\r\n value_x = values[3:]\r\n\r\n conv_all = all(values < targets)\r\n conv_e = value_e < 25*target_e and all(value_g < target_g) and all(value_x < target_x)\r\n conv_g = value_e < target_e and all(value_g < target_g/3.0) and all(value_x < target_x*3.0)\r\n conv_x = value_e < target_e and all(value_g < target_g*3.0) and all(value_x < target_x/3.0)\r\n converged = conv_all or conv_e or conv_g or conv_x\r\n assert converged", "def test_usearch61_params(self):\r\n\r\n app = Usearch61ReferenceOtuPicker(\r\n params={'save_intermediate_files': False,\r\n 'output_dir':\r\n self.output_dir,\r\n 'remove_usearch_logs': True,\r\n 'wordlength': 25,\r\n 'usearch61_maxrejects': 200,\r\n 'usearch61_maxaccepts': 5\r\n })\r\n\r\n obs_clusters, failures = app(self.tmp_seq_filepath_97perc_id,\r\n otu_prefix=\"test\", refseqs_fp=self.tmp_seq_filepath_97perc_id_rc)\r\n\r\n # won't get 2bp_change as reference, due to RC status\r\n expected_clusters = {'usearch_ecoli_seq': ['usearch_ecoli_seq'],\r\n 'usearch_ecoli_seq_1bp_change': ['usearch_ecoli_seq_1bp_change',\r\n 'usearch_ecoli_seq_2bp_change']}\r\n\r\n for result in obs_clusters:\r\n for cluster in obs_clusters[result]:\r\n self.assertTrue(cluster in expected_clusters[result])\r\n\r\n expected_failures = []\r\n self.assertEqual(failures, expected_failures)", "def GetTargets(self):\n return []", "def get_reg_targets(self,\n anchor_list,\n valid_flag_list,\n num_level_anchors_list,\n cls_score_list,\n bbox_pred_list,\n gt_bboxes_list,\n img_metas,\n gt_bboxes_ignore_list=None,\n gt_labels_list=None,\n label_channels=1,\n unmap_outputs=True):\n (all_anchors, all_labels, all_label_weights, all_bbox_targets,\n all_bbox_weights, pos_inds_list, neg_inds_list) = multi_apply(\n self._get_target_single,\n anchor_list,\n valid_flag_list,\n cls_score_list,\n bbox_pred_list,\n num_level_anchors_list,\n gt_bboxes_list,\n gt_bboxes_ignore_list,\n gt_labels_list,\n img_metas,\n label_channels=label_channels,\n unmap_outputs=unmap_outputs,\n is_cls_assigner=False)\n # no valid anchors\n if any([labels is None for labels in all_labels]):\n return None\n # sampled anchors of all images\n num_total_pos = sum([max(inds.numel(), 1) for inds in pos_inds_list])\n num_total_neg = sum([max(inds.numel(), 1) for inds in neg_inds_list])\n # split targets to a list w.r.t. multiple levels\n anchors_list = images_to_levels(all_anchors, num_level_anchors_list[0])\n labels_list = images_to_levels(all_labels, num_level_anchors_list[0])\n label_weights_list = images_to_levels(all_label_weights,\n num_level_anchors_list[0])\n bbox_targets_list = images_to_levels(all_bbox_targets,\n num_level_anchors_list[0])\n bbox_weights_list = images_to_levels(all_bbox_weights,\n num_level_anchors_list[0])\n return (anchors_list, labels_list, label_weights_list,\n bbox_targets_list, bbox_weights_list, num_total_pos,\n num_total_neg)", "def test_target_property():\n atom = ATOMClassifier(X_bin, y_bin, random_state=1)\n atom.run(\"LR\")\n assert atom.lr.target == atom.target", "def get_targets(self):\n\t\n\t\tself.target = []\n\t\ttarget_ins = self.settings['target']\n\t\tfor key in target_ins.keys():\n\t\t\tif key == 'raw':\n\t\t\t\tself.target.append(target_ins[key])\n\t\t\telif key == 'textfile':\n\t\t\t\twith open(target_ins[key],'r') as fp: targs = fp.readlines()\n\t\t\t\tfor t in targs:\n\t\t\t\t\tif re.match('^[a-z,A-Z,_].+\\s*:\\s*[A-Z].+$',t):\n\t\t\t\t\t\tself.target.append(tuple([i.strip() for i in t.split(':')]))\n\t\t\telif key == 'textfile_rna':\n\t\t\t\twith open(target_ins[key],'r') as fp: targs = fp.readlines()\n\t\t\t\tfor t in targs:\n\t\t\t\t\tif re.match('^[a-z,A-Z,0-9,_].+\\s*:\\s*[A-Z,a-z].+$',t):\n\t\t\t\t\t\tself.target.append(list([i.strip() for i in t.split(':')]))\n\t\t\t\t\t\trnaseq = self.target[-1][1]\n\t\t\t\t\t\t#---extra substitutions for later\n\t\t\t\t\t\tif 'regex_subs' in self.settings.keys():\n\t\t\t\t\t\t\tfor regex in self.settings['regex_subs']:\n\t\t\t\t\t\t\t\trnaseq = re.sub(regex[0],regex[1],rnaseq)\n\t\t\t\t\t\trnaseq = rnaseq.upper()\n\t\t\t\t\t\trnaseq = re.sub('T','U',rnaseq)\n\t\t\t\t\t\taminoseq = ''.join([dna_mapping[i] for i in [rnaseq[i:i+3] \n\t\t\t\t\t\t\tfor i in range(0,len(rnaseq),3)]])\n\t\t\t\t\t\tself.target[-1][1] = re.sub('T','U',aminoseq)\n\t\t\t\t\t\tself.target[-1] = tuple(self.target[-1])\n\t\t\telse: raise Exception('except: unclear target type')", "def testBuildDeviceListWithMaxTargets(self):\n\n self.inv._maxtargets = 2\n self.inv._devices = {\n 'first': self.Device(),\n 'second': self.Device(),\n 'third': self.Device()\n }\n self.inv._CmdFilter('targets', ['^f.*,second,^t.ird'])\n self.inv._CmdFilter('xtargets', [''])\n self.inv._device_list = None\n self.assertRaises(ValueError, self.inv._BuildDeviceList)", "def verify_dot_classification(\n target_regime: str,\n regimes: Sequence[int],\n) -> bool:\n\n good_found = False\n target = DOT_LABEL_MAPPING[target_regime][1]\n if target in regimes:\n good_found = True\n return good_found" ]
[ "0.58674073", "0.5667292", "0.5577143", "0.5518126", "0.54473", "0.54211724", "0.5411076", "0.5409779", "0.54086864", "0.53584373", "0.53370976", "0.531879", "0.52858996", "0.52696484", "0.52417976", "0.52324855", "0.5231467", "0.51975954", "0.51834714", "0.5156341", "0.5138093", "0.51037186", "0.5097687", "0.5084553", "0.5051509", "0.5046348", "0.5042755", "0.50406855", "0.5036669", "0.5024297" ]
0.7668798
0
Test that the NRES Cadence list contains the ObservationGroup name of the DynamicCadence
def test_nres_cadence_list(self): response = self.client.get(reverse('nres_calibrations:nres_home')) self.assertContains(response, self.observation_group_name) # should appear in History column
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_check_ca_groups(self, admin_dashboard):\n ca_tab = admin_dashboard.select_custom_attributes()\n expected_ca_groups_set = set(\n [objects.get_normal_form(item) for item in objects.ALL_CA_OBJS])\n actual_ca_groups_set = set(\n [item.text for item in ca_tab.get_items_list()])\n assert expected_ca_groups_set == actual_ca_groups_set", "def test_get_package_name_list():\n package_list = [warrior.Actions.CommonActions]\n result = kw_driver.get_package_name_list(package_list)\n check1 = 'warrior.Actions.CommonActions' in result\n assert check1 == True", "def test_get_ns_descriptors_nsdinfoid():\r\n sonata_nsd = SONATAClient.Nsd(HOST_URL)\r\n sonata_auth = SONATAClient.Auth(HOST_URL)\r\n _token = json.loads(sonata_auth.auth(username=USERNAME, password=PASSWORD))\r\n _token = json.loads(_token[\"data\"])\r\n _nsd_list = json.loads(sonata_nsd.get_ns_descriptors(\r\n token=_token[\"token\"][\"access_token\"]))\r\n _nsd_list = json.loads(_nsd_list[\"data\"])\r\n Helpers._upload_test_nsd(_token=_token[\"token\"][\"access_token\"])\r\n\r\n for _n in _nsd_list:\r\n if \"sonata-demo\" == _n['nsd']['name']:\r\n _nsd = _n['uuid']\r\n\r\n response = json.loads(sonata_nsd.get_ns_descriptors_nsdinfoid(\r\n token=_token[\"token\"][\"access_token\"], nsdinfoid=_nsd))\r\n\r\n Helpers._delete_test_nsd(_token=_token[\"token\"][\"access_token\"])\r\n if response[\"error\"]:\r\n return True\r\n else:\r\n return False", "def test_4():\n h = iotbx.pdb.input(source_info=None, lines=test_pdb_4).construct_hierarchy()\n asc = h.atom_selection_cache()\n ncs_inp = iotbx.ncs.input(\n hierarchy=h,\n params=ncs_pars.ncs_search)\n ncs_groups = ncs_inp.get_ncs_restraints_group_list()\n assert len(ncs_groups) == 1\n # group 1\n assert ncs_groups[0].master_iselection.all_eq(\n asc.selection(string = \"chain A\").iselection())\n g1_c = ncs_groups[0].copies\n assert len(g1_c)==1\n assert g1_c[0].iselection.all_eq(\n asc.selection(string = \"chain B\").iselection())", "def test_ncs_selection():\n pdb_inp = iotbx.pdb.input(source_info=None, lines=test_pdb_str_2)\n ncs_obj_phil = ncs.input(\n hierarchy=pdb_inp.construct_hierarchy())\n ncs_restraints_group_list = ncs_obj_phil.get_ncs_restraints_group_list()\n # ncs_restraints_group_list._show()\n refine_selection = flex.size_t(range(30))\n result = ncs_restraints_group_list.get_extended_ncs_selection(\n refine_selection=refine_selection)\n # print list(result)\n expected = [0, 1, 2, 3, 4, 5, 6, 7, 24, 25, 26, 27, 28, 29]\n assert list(result) == expected", "def test_custom_query_response_descriptor_octopus_server_web_api_actions_list_event_groups_responder_spaces(self):\n pass", "def test_2():\n h = iotbx.pdb.input(source_info=None, lines=test_pdb_2).construct_hierarchy()\n asc = h.atom_selection_cache()\n ncs_inp = iotbx.ncs.input(\n hierarchy=h,\n params=ncs_pars.ncs_search)\n ncs_groups = ncs_inp.get_ncs_restraints_group_list()\n assert len(ncs_groups) == 1\n # group 1\n assert ncs_groups[0].master_iselection.all_eq(\n asc.selection(string = \"chain A\").iselection())\n g1_c = ncs_groups[0].copies\n assert len(g1_c)==1\n assert g1_c[0].iselection.all_eq(\n asc.selection(string = \"chain B\").iselection())", "def test_directive(list_json_output):\n\n data = list_json_output('Adrian Test.nwod')\n assert data[0]['faketype'][0] == 'Vampire'", "def test_getCategoryValues(self):\n smpl_ids = ['PC.354', 'PC.355', 'PC.356', 'PC.481', 'PC.593', 'PC.607',\n 'PC.634', 'PC.635', 'PC.636']\n\n exp = ['Control','Control','Control','Control','Control','Fast'\n ,'Fast','Fast','Fast']\n obs = self.overview_map.getCategoryValues(smpl_ids, 'Treatment')\n self.assertEqual(obs, exp)", "def test_whole_group_iselection():\n phil_groups = ncs_group_master_phil.fetch(\n iotbx.phil.parse(phil_str)).extract()\n pdb_inp = iotbx.pdb.input(source_info=None, lines=test_pdb_str_2)\n ncs_obj = ncs.input(hierarchy=pdb_inp.construct_hierarchy(),\n ncs_phil_groups=phil_groups.ncs_group)\n nrgl = ncs_obj.get_ncs_restraints_group_list()\n assert len(nrgl) == nrgl.get_n_groups() == 2\n isel = nrgl[1].whole_group_iselection()\n expected = [4, 5, 6, 7, 12, 13, 14, 15, 20, 21, 22, 23]\n assert list(isel) == expected\n isel = nrgl[0].whole_group_iselection()\n expected = [0, 1, 2, 3, 8, 9, 10, 11, 16, 17, 18, 19]\n assert list(isel) == expected", "def test_client_risk_assessment_list(self):\n pass", "def _contains(test_name: str, container_list: Iterable[Container[str]]) -> bool:\n for container in container_list:\n if test_name in container:\n return True\n return False", "def test_CategoryNames(self):\r\n exp = [\"BarcodeSequence\", \"DOB\", \"Description\", \"Treatment\"]\r\n obs = self.overview_map.CategoryNames\r\n self.assertEqual(obs, exp)\r\n\r\n obs = self.no_metadata.CategoryNames\r\n self.assertEqual(obs, [])\r\n\r\n obs = self.empty_map.CategoryNames\r\n self.assertEqual(obs, [])", "def test_list_group(self):\n pass", "def test_3():\n h = iotbx.pdb.input(source_info=None, lines=test_pdb_3).construct_hierarchy()\n asc = h.atom_selection_cache()\n ncs_inp = iotbx.ncs.input(\n hierarchy=h,\n params=ncs_pars.ncs_search)\n ncs_groups = ncs_inp.get_ncs_restraints_group_list()\n assert len(ncs_groups) == 1\n # group 1\n assert ncs_groups[0].master_iselection.all_eq(\n asc.selection(string = \"chain A\").iselection())\n g1_c = ncs_groups[0].copies\n assert len(g1_c)==1\n assert g1_c[0].iselection.all_eq(\n asc.selection(string = \"chain B\").iselection())", "def test_getCategoryValues(self):\r\n smpl_ids = ['PC.354', 'PC.355', 'PC.356', 'PC.481', 'PC.593', 'PC.607',\r\n 'PC.634', 'PC.635', 'PC.636']\r\n\r\n exp = [\r\n 'Control',\r\n 'Control',\r\n 'Control',\r\n 'Control',\r\n 'Control',\r\n 'Fast',\r\n 'Fast',\r\n 'Fast',\r\n 'Fast']\r\n obs = self.overview_map.getCategoryValues(smpl_ids, 'Treatment')\r\n self.assertEqual(obs, exp)", "def test_CategoryNames(self):\n exp = [\"BarcodeSequence\", \"DOB\", \"Description\", \"Treatment\"]\n obs = self.overview_map.CategoryNames\n self.assertEqual(obs, exp)\n\n obs = self.no_metadata.CategoryNames\n self.assertEqual(obs, [])\n\n obs = self.empty_map.CategoryNames\n self.assertEqual(obs, [])", "def test_legal_names(self):\n adjectives = ['Awesome', 'Shiny', 'Impressive', 'Portable', 'Improved']\n nouns = ['Anvil', 'Catapult' 'Disguise' 'Mousetrap', '???']\n products = acme_report.generate_products()\n for prod in range(len(products)):\n prod_name = products[prod].name\n name_split = prod_name.split()\n self.assertIn(name_split[0], adjectives)\n self.assertIn(name_split[1], nouns)", "def check_specific_names(citelist: list, specific_names: list) -> None:\n unique_names = list()\n nameset = set()\n for c in citelist:\n if c.name != \".\":\n clean = clean_specific_name(c.name)\n if (not (clean in nameset)) and (clean != \"\"):\n nameset |= {clean}\n unique_names.append(clean)\n unique_names.sort()\n for n in unique_names:\n is_found = False\n for s in specific_names:\n if n in s.variations:\n is_found = True\n if not is_found:\n report_error(\"Missing specific name: \" + n)", "def test_get_mosaics_names(self):\n pass", "def test_Categories_getter(self):\r\n expected = ['Treatment', 'DOB']\r\n observed = self.cs_overview.Categories\r\n self.assertEqual(observed, expected)", "def test_check_existence(caplog: pytest.LogCaptureFixture) -> None:\n assert JournalAbbreviations.check_existence(\"Test Journal\") is False\n for scope, level, message in caplog.record_tuples:\n if (\n scope == \"cobib.utils.journal_abbreviations\"\n and level == 30\n and \"'Test Journal' was not found\" in message\n ):\n break\n else:\n assert False, \"Warning not raised upon missing journal!\"\n caplog.clear()\n config.utils.journal_abbreviations = [(\"Test Journal\", \"Test J.\")]\n assert JournalAbbreviations.check_existence(\"Test Journal\")\n assert JournalAbbreviations.check_existence(\"Test J.\")\n assert JournalAbbreviations.check_existence(\"Test J\")", "def test_client_nationlities_list(self):\n pass", "def test_ion_list(particle, min_charge, max_charge, expected_charge_numbers):\n particle = Particle(particle)\n ions = ionic_levels(particle, min_charge, max_charge)\n np.testing.assert_equal(ions.charge_number, expected_charge_numbers)\n assert ions[0].element == particle.element\n if particle.is_category(\"isotope\"):\n assert ions[0].isotope == particle.isotope", "def test_get_crime_description(self):\n\n # can use the sample crime reports data\n self.data = pd.read_csv(pkg_resources.resource_filename(resource_package, 'tests/testing_data/report_2_counts.csv'))\n\n self.descriptions = utils.populate_offence(self.data)\n\n self.assertTrue(isinstance(self.descriptions, pd.DataFrame))\n\n # check anti-social behaviour match is anti-social behaviour and is lowercase\n self.assertEqual(self.descriptions.Crime_description[0], 'anti-social behaviour')\n\n # check value is contained in list of crime descriptions for Violence and sexual offences\n self.assertTrue(self.descriptions.Crime_description[7] in ['abuse of children through sexual exploitation',\n 'abuse of position of trust of a sexual nature',\n 'assault with injury', 'assault with injury on a constable',\n 'assault with intent to cause serious harm',\n 'assault without injury', 'assault without injury on a constable',\n 'attempted murder', 'causing death by aggravated vehicle taking',\n 'causing death by careless driving under influence of drink or drugs',\n 'causing death by careless or inconsiderate driving',\n 'causing death by driving: unlicensed or disqualified or uninsured drivers',\n 'causing death or serious injury by dangerous driving',\n 'causing or allowing death of child or vulnerable person',\n 'causing sexual activity without consent', 'child abduction',\n 'conspiracy to murder', 'cruelty to children/young persons',\n 'endangering life', 'exposure and voyeurism', 'harassment',\n 'homicide', 'incest or familial sexual offences',\n 'intentional destruction of a viable unborn child', 'kidnapping',\n 'malicious communications', 'modern slavery',\n 'other miscellaneous sexual offences',\n 'procuring illegal abortion',\n 'racially or religiously aggravated assault with injury',\n 'racially or religiously aggravated assault without injury',\n 'racially or religiously aggravated harassment',\n 'rape of a female aged 16 and over',\n 'rape of a female child under 13',\n 'rape of a female child under 16',\n 'rape of a male aged 16 and over', 'rape of a male child under 13',\n 'rape of a male child under 16',\n 'sexual activity etc with a person with a mental disorder',\n 'sexual activity involving a child under 13',\n 'sexual activity involving child under 16',\n 'sexual assault on a female aged 13 and over',\n 'sexual assault on a female child under 13',\n 'sexual assault on a male aged 13 and over',\n 'sexual assault on a male child under 13', 'sexual grooming',\n 'stalking', 'threats to kill',\n 'trafficking for sexual exploitation', 'unnatural sexual offences']\n )\n\n #self.assertEqual(self.descriptions.columns.tolist(), ['UID','datetime','Crime_description','Crime_type','LSOA_code','Police_force'])", "def test_group(self):\n obs_group, obs_nogroup = group(self.seqstruct, 0.75)\n exp_group = {'cluster_337': ['cluster_343', 'cluster_345',\n 'cluster_339'],\n 'cluster_347': ['cluster_338'],\n 'cluster_344': ['cluster_340']}\n exp_nogroup = [self.seqstruct[6], self.seqstruct[8]]\n\n self.assertEqual(obs_group, exp_group)\n self.assertEqual(obs_nogroup, exp_nogroup)", "def inconsistent_entityName(self):\n a = [s for s in self.subjects if len([sa for sa in s.samples if sa.inconsistent_entityName]) > 0]\n if len(a) == 0:\n return None\n return a", "def test_list_namespaced_build(self):\n pass", "def test_legal_names(self):\n names = [prod.name for prod in generate_products()]\n sep = [(name.split()[0], name.split()[1]) for name in names]\n for name in sep:\n self.assertIn(name[0], ADJS)\n self.assertIn(name[1], NOUNS)", "def test_list_dependent_assets1(self):\n pass" ]
[ "0.51378435", "0.502584", "0.50022244", "0.49259534", "0.48699087", "0.48539323", "0.48403198", "0.4828131", "0.48271948", "0.48038292", "0.47950238", "0.47871473", "0.4785799", "0.4756255", "0.47553796", "0.47491336", "0.4746582", "0.4743038", "0.47408164", "0.47399324", "0.47332448", "0.47320622", "0.47199476", "0.4715288", "0.47109243", "0.46956924", "0.46950856", "0.46923882", "0.4679339", "0.46710026" ]
0.6860291
0
Gets user input such as the localhost and the similarity value for the comparision. Reads all the ringsugars in the given database and and creates a data frame with aglycons, their coconut_id and taxonomy. The biological names are delete and if there are two different taxonomies for an aglycon, the taxonomy is called 'double'. Passes the created data frame.
def complete_databank(port="localhost:27017",coconut_database="COCONUT2020-10",sweetcoconut_database="sweetcoconut"): client = MongoClient(port) db_complete = client[coconut_database] collection = db_complete.uniqueNaturalProduct db_complete_only_ring_sugars = pd.DataFrame(list(collection.find({"contains_ring_sugars": True}))) df_complete_tax = pd.DataFrame({"taxonomy": db_complete_only_ring_sugars["textTaxa"], "smiles": db_complete_only_ring_sugars["smiles"], "coconut_id": db_complete_only_ring_sugars["coconut_id"], "no_sugar_smiles": db_complete_only_ring_sugars["sugar_free_smiles"] }) complete_names = [] indexes = [] for i in range(len(df_complete_tax.taxonomy)): # some entries are empty lists # doubles if df_complete_tax.taxonomy[i] != [] and ("plants" in df_complete_tax.taxonomy[i] or "bacteria" in df_complete_tax.taxonomy[i] or "marine" in df_complete_tax.taxonomy[i] or "animals" in df_complete_tax.taxonomy[i] or "fungi" in df_complete_tax.taxonomy[i]): indexes.append(i) complete_names.append(df_complete_tax.taxonomy[i]) df_five_tax = df_complete_tax.loc[indexes[:]] df_tax_id = pd.DataFrame({"taxonomy": df_five_tax.taxonomy, "coconut_id": df_five_tax.coconut_id}) df_tax_id = df_tax_id.reset_index() taxonomies = ["plants","bacteria","fungi","marine","animals"] biology_names = [] for row in df_tax_id.taxonomy: for name in row: if name not in taxonomies: biology_names.append(name) for biology_name in biology_names: for row in df_tax_id.taxonomy: if biology_name in row: row.remove(biology_name) # **------------for tax prediction---------------** df_tax_id.to_pickle("output_data/for_predict_doubletriple.pkl") # **----------end tax prediction--------------** for ind, tax_list in enumerate(df_tax_id.taxonomy): if "marine" in tax_list: #print(ind, tax_list) if len(tax_list) > 1: df_tax_id.taxonomy[ind].remove("marine") else: df_tax_id.taxonomy[ind].append("no") df_tax_id.taxonomy[ind].remove("marine") #df_tax_id.taxonomy[ind] = ["no"] taxonomy_Double = [] taxonomy_Triple = [] taxonomy_single_entry = [] for ind, tax_list in enumerate(df_tax_id.taxonomy): #print(ind, tax_list) if len(tax_list) == 1: taxonomy_single_entry.append(tax_list[0]) elif len(tax_list) == 2: taxonomy_single_entry.append('double') # save original annotation taxonomyDouble1 = [] for tax in tax_list: taxonomyDouble1.append(tax) taxonomy_Double.append(taxonomyDouble1) elif len(tax_list) == 3: taxonomy_single_entry.append('triple') # save original annotation taxonomyTriple1 = [] for tax in tax_list: taxonomyTriple1.append(tax) taxonomy_Triple.append(taxonomyTriple1) else: print('Error: Too many taxonomies for one aglycon','\n','create a new elif statement in line 102 in tanimoto_index.py') df_tax_id_fromCompleteDatabank = pd.DataFrame({"taxonomy": taxonomy_single_entry, "coconut_id": df_five_tax.coconut_id}) sweetcoconut_databank(df_tax_id_fromCompleteDatabank,taxonomy_Double,sweetcoconut_database,port)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sweetcoconut_databank(df_tax_id_fromCompleteDatabank, taxonomy_Double,sweetcoconut_database,port):\n client2 = MongoClient(port)\n db_s = client2[sweetcoconut_database]\n collection2 = db_s.sweetNaturalProduct\n sweetnp = pd.DataFrame(list(collection2.find({\"contains_sugar\": True})))\n sweetnp_with_tax = pd.merge(sweetnp, df_tax_id_fromCompleteDatabank, how=\"left\", on=\"coconut_id\")\n df_cutout_sweetnp_with_tax = pd.DataFrame({\"coconut_id\": sweetnp_with_tax.coconut_id,\n \"taxonomy\": sweetnp_with_tax.taxonomy,\n \"all_deglycosilated_smiles\": sweetnp_with_tax.all_deglycosilated_smiles\n })\n df_cutout_no_nan = df_cutout_sweetnp_with_tax.fillna('no')\n df_cutout_explode = df_cutout_no_nan.explode(\"all_deglycosilated_smiles\",ignore_index=True)\n #display(df_cutout_explode)\n unique_deglycosilated_smiles = set(df_cutout_explode[\"all_deglycosilated_smiles\"])\n unique_deglycosilated_smiles.pop()\n df_NP = pd.DataFrame(unique_deglycosilated_smiles, columns=[\"deglycosilated_smiles\"])\n df_NP[\"coconut_id\"] = \"\"\n df_NP[\"taxonomy\"] = \"\"\n index = 0\n for mol in df_NP.deglycosilated_smiles:\n all_rows = df_cutout_explode[df_cutout_explode[\"all_deglycosilated_smiles\"]==mol]\n df_NP.coconut_id[index] = (all_rows.coconut_id.values)\n df_NP.taxonomy[index] = (all_rows.taxonomy.values)\n index += 1\n #display(df_NP)\n # **-----------------for tax prediction-------------------**\n df_NP.to_pickle(\"output_data/for_predict_multiple_tax.pkl\")\n # **----------------end tax prediction--------------------**\n index = 0\n for tax_list in df_NP.taxonomy:\n df_NP.taxonomy[index] = set(tax_list)\n if len(df_NP.taxonomy[index]) >= 2:\n if 'no' in df_NP.taxonomy[index]:\n df_NP.taxonomy[index].remove('no')\n index += 1\n #display(df_NP)\n bar_plot(df_NP)\n venn_diagram(df_NP,taxonomy_Double)\n aglycon_single_tax(df_NP)", "def gen_info(middle_db_input):\n\n query = \"\"\n query_add = \"\"\n # Queries will be build from several part: \"select\" statement, followed by\n # what should be selected and what tables to select from. The last part is\n # extended using the information from the webpage request.\n\n # Two types of query body:\n # first to be used for selection by any accession number;\n query_t0 = \"from locus l, cds c, accession a where a.locus_id=l.id and c.locus_id=l.id and \"\n # second to be used in all other queries, as only primary (latest) accession\n # number will be displayed on the webpage.\n query_t1 = query_t0 + \"a.latest_version='T' and \"\n\n # Columns to be selected from the respective tables.\n locus = \"l.whole_seq as locus_sequence, l.chr_location, l.locus_name, l.chr_name\"\n cds = \"c.gene_name, c.product_name, c.product_id, c.seq_location, c.whole_seq, c.translation, c.complement\"\n accession = \"a.accession_num\"\n # Columns to be selected, when the user selects a cytogenic location.\n cyt_loc_cds = \"c.gene_name, c.product_name, c.product_id\"\n cyt_loc = \"l.chr_location\"\n \n # Query construction \n search = middle_db_input[\"name\"]\n # Type 0 (gene identifier) and 1 (product name) contain information on a\n # single element, hence no information repeats would be present in the\n # output; therefore just one query is generated\n if middle_db_input[\"type\"]==0:\n query = \"select \" + accession + \", \" + locus + \", \" + cds + \" \" + query_t1 + \"c.gene_name\" + \"=\" + \"'\"+search+\"'\"\n elif middle_db_input[\"type\"]==1:\n query = \"select \" + accession + \", \" + locus + \", \" + cds + \" \" + query_t1 + \"c.product_name\" + \"=\" + \"'\"+search+\"'\"\n # Type 2 (locus accession number) and 3 (cytogenic location) could have\n # multiple elements - multiple CDS or multiple loci and CDS, respetively).\n # Using one query would lead to information repeats. Using two queries \n # avoids unnecesary repetitions.\n elif middle_db_input[\"type\"]==2:\n query = \"select \" + locus + \" \" + query_t0 + \"a.accession_num\" + \"=\" + \"'\"+search+\"'\"\n query_add = \"select \" + cds + \" \" + query_t0 + \"a.accession_num\" + \"=\" + \"'\"+search+\"'\"\n elif middle_db_input[\"type\"]==3:\n query = \"select \" + cyt_loc_cds + \" \" + query_t1+ \"l.chr_location\" + \" like \" + \"'\"+search+\"%\"+\"'\"\n query_add = \"select \" + accession + \", \" + cyt_loc + \" \" + query_t1+ \"l.chr_location\" + \" like \" + \"'\"+search+\"%\"+\"'\"\n elif middle_db_input[\"type\"]==4:\n search2 = middle_db_input[\"product_id\"]\n query = \"select \" + accession + \", \" + locus + \", \" + cds + \" \" + query_t1 + \"c.product_id\" + \"=\" + \"'\"+search2+\"'\"\n \n\n db = pymysql.connect(db='0a002', user='0a002', passwd='0a002', host='hope', port=3306, cursorclass = pymysql.cursors.DictCursor)\n \n # Creating output from cursors depending on the query type.\n db_middle_output = [middle_db_input]\n if middle_db_input[\"type\"]==0 or middle_db_input[\"type\"]==1:\n cursor = db.cursor()\n q = cursor.execute(query)\n data = cursor.fetchall()\n db_middle_output += data\n elif middle_db_input[\"type\"]==2 or middle_db_input[\"type\"]==3:\n cursor1 = db.cursor()\n cursor2 = db.cursor()\n q1 = cursor1.execute(query)\n q2 = cursor2.execute(query_add)\n unit1 = cursor1.fetchall()\n unit2 = cursor2.fetchall()\n db_middle_output =db_middle_output + list(unit1) + list(unit2)\n # output includes the input dictionary for convenience of the front end.\n elif middle_db_input[\"type\"]==4:\n cursor = db.cursor()\n q = cursor.execute(query)\n data = cursor.fetchall()\n db_middle_output += data\n \n\n return(db_middle_output)", "def calculate_fraction_rna(structure_1, structure_2, image_name_column, distance_threshold, granule_bool, granule_threshold, database_name):\n\n from psycopg2 import sql\n import psycopg2\n import pandas as pd\n import os\n\n conn = psycopg2.connect('postgresql://'+os.environ['POSTGRES_USER']+':'+os.environ['POSTGRES_PASSWORD']+'@'+\"db\"+':'+'5432'+'/'+database_name)\n cur = conn.cursor()\n\n distance_col = 'distance_to_' + structure_2\n\n # if user does not specify a distance threshold, use the largest distance from the db\n if distance_threshold == None:\n max_distance_query = sql.SQL(\"SELECT MAX({distance_col}) from {structure_1_table}\").format(structure_1_table=sql.Identifier(structure_1), distance_col = sql.Identifier(distance_col))\n cur.execute(max_distance_query)\n distance_threshold = cur.fetchall()[0][0]\n\n # calculate the total structure 1 fluoresence, grouped by image\n total_structure_1_sql = sql.SQL(\"\"\"SELECT {name},\n sum(total_intensity)\n FROM {structure_1_table}\n WHERE {distance_col} <= %(max_distance)s\n GROUP BY {name};\"\"\").format(\n structure_1_table = sql.Identifier(structure_1),\n distance_col =sql.Identifier(distance_col),\n name = sql.Identifier(image_name_column))\n\n conn = psycopg2.connect('postgresql://'+os.environ['POSTGRES_USER']+':'+os.environ['POSTGRES_PASSWORD']+'@'+\"db\"+':'+'5432'+'/'+database_name)\n cur = conn.cursor()\n\n cur.execute(total_structure_1_sql, {'max_distance':distance_threshold})\n total_structure_1_data = cur.fetchall()\n\n\n # get the sum of structure 1 fluoresence intensity at each distance from structure 2\n structure_1_distance_query = sql.SQL(\"\"\"SELECT {name},\n SUM(total_intensity),\n {distance_col}\n FROM {structure_1_table}\n WHERE {distance_col} <= %(max_distance)s\n GROUP BY {name}, {distance_col};\"\"\").format(\n structure_1_table = sql.Identifier(structure_1),\n distance_col =sql.Identifier(distance_col),\n name = sql.Identifier(image_name_column))\n\n cur.execute(structure_1_distance_query, {'max_distance':distance_threshold})\n structure_1_per_distance_data = cur.fetchall()\n\n\n # calculate the % in objects > the granule_threshold if user desires\n if granule_bool:\n structure_1_granule_query = sql.SQL(\"\"\"SELECT {name},\n SUM(total_intensity),\n {distance_col}\n FROM {structure_1_table}\n WHERE {distance_col} <= %(max_distance)s\n AND normalized_intensity >= %(granule_threshold)s\n GROUP BY {name}, {distance_col};\"\"\").format(\n structure_1_table = sql.Identifier(structure_1),\n distance_col =sql.Identifier(distance_col),\n name = sql.Identifier(image_name_column))\n\n cur.execute(structure_1_granule_query, {'max_distance':distance_threshold, 'granule_threshold': granule_threshold})\n structure_1_granule_data = cur.fetchall()\n\n # get the image data for the experiment\n image_data_query = \"\"\"SELECT * FROM images;\"\"\"\n cur.execute(image_data_query)\n image_data = cur.fetchall()\n\n # get the column names for the images table\n column_name_query = \"SELECT column_name FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = 'images';\"\n cur.execute(column_name_query)\n column_names = cur.fetchall()\n\n\n cur.close()\n conn.close()\n\n column_names_ls = [name[0] for name in column_names]\n image_data_df = pd.DataFrame(image_data, columns = column_names_ls)\n\n\n total_structure_1_df = pd.DataFrame(total_structure_1_data, columns = ['name', 'structure_1_per_image'])\n\n structure_1_per_distance_df = pd.DataFrame(structure_1_per_distance_data, columns = ['name', 'structure_1_per_distance', 'distance'])\n\n structure_1_df = structure_1_per_distance_df.merge(total_structure_1_df, on = 'name', how='left')\n structure_1_df['percent_distance'] = structure_1_df['structure_1_per_distance'] / structure_1_df['structure_1_per_image'] * 100\n\n if granule_bool:\n structure_1_granule_df = pd.DataFrame(structure_1_granule_data, columns = ['name', 'granule_intensity', 'distance'])\n structure_1_granule_merge = structure_1_df.merge(structure_1_granule_df, on=['name', 'distance'], how='left')\n structure_1_granule_merge['percent_granule'] = structure_1_granule_merge['granule_intensity'] / structure_1_granule_merge['structure_1_per_image'] * 100\n structure_1_granule_merge.fillna(0, inplace=True)\n structure_1_distribution_df = structure_1_granule_merge.merge(image_data_df, on='name', how='left')\n\n return structure_1_distribution_df\n\n structure_1_distribution_df = structure_1_df.merge(image_data_df, on='name', how='left')\n\n return structure_1_distribution_df", "def taxon_query_no_auto():\n while True:\n query_type = input(\n '1.by id\\n'\n '2.by name\\n'\n )\n if query_type not in ['1', '2']:\n return\n con = sqlite3.connect('./data/DB')\n cur = con.cursor()\n if query_type == '1':\n taxon_id = input('taxon id:\\n')\n cur.execute('SELECT * FROM taxon WHERE Id = ?;', (taxon_id,))\n result = cur.fetchall()\n elif query_type == '2':\n name = input('scientific name:\\n')\n # cur.execute('select * from taxon where Name like ?;', ('%'+Name+'%', ))\n cur.execute('SELECT * FROM taxon WHERE Name = ?;', (name,))\n result = cur.fetchall()\n cur.execute('SELECT Id, Name FROM taxon;')\n result2 = cur.fetchall()\n cur.close()\n con.close()\n name_dict = {'': ''}\n for item in result2:\n name_dict[item[0]] = item[1]\n for i in result:\n taxon_id = i[0]\n name = i[1]\n rank = i[2]\n son = i[3].split(sep=' ')\n son_name = list()\n for item in son:\n son_name.append(name_dict[item])\n parent = i[4].split(sep=' ')\n parent_name = list()\n for item2 in parent:\n parent_name.append(name_dict[item2])\n greatson = i[5].split(sep=' ')\n greatson_name = list()\n for item3 in greatson:\n greatson_name.append(name_dict[item3])\n handle = open('out.txt', 'a', encoding='utf-8')\n handle.write('id : {0}\\n'.format(taxon_id))\n handle.write('name : {0}\\n'.format(name))\n handle.write('rank : {0}\\n'.format(rank))\n handle.write('parent : {0}\\n'.format('->'.join(parent_name)))\n handle.write('son : {0}\\n'.format(', '.join(son_name)))\n handle.write('greatson : {0}\\n\\n'.format(', '.join(greatson_name)))", "def cal_2rdgs(database_name, table_name, primary_key, group_name1, group_name2):\n\n ################################################################\n # conect to the database and return the query information\n ################################################################\n conn = connect_database(database_name)\n c = conn.cursor()\n\n sql1 = (\"select * from {0} where {1} = '{2}' \".format(table_name, primary_key, group_name1))\n sql2 = (\"select * from {0} where {1} = '{2}' \".format(table_name, primary_key, group_name2))\n\n c.execute(sql1)\n infolist1 = c.fetchall()\n\n c.execute(sql2)\n infolist2 = c.fetchall()\n\n # print(infolist1)\n # print(infolist2)\n\n #######################################################################\n # find the gene number of each disease group(group1_item_num,group2_item_num)\n ########################################################################\n group_1_item_num = get_icd_diseasegroup_geneinfo(database_name, table_name, primary_key, group_name1)[2]\n group_2_item_num = get_icd_diseasegroup_geneinfo(database_name, table_name, primary_key, group_name2)[2]\n # print(group_1_item_num)\n # print(group_2_item_num)\n # print(get_icd_diseasegroup_geneinfo(database_name, table_name, primary_key, group_name1)[1])\n # print(get_icd_diseasegroup_geneinfo(database_name, table_name, primary_key, group_name2)[1])\n ###############################################################\n # find the gene number of all the GDAs\n ###############################################################\n all_gene_num = get_all_gene_num(database_name, \"mesh_gene\")\n # print(all_gene_num)\n\n ###############################################################\n # bulid the random model of GROUP_NAME1, GROUP_NAME2, calculate C_random\n ###############################################################\n\n c_random = (group_1_item_num * group_2_item_num) / all_gene_num\n\n # print(c_random)\n\n ###############################################################\n # calculate the gene number of (GROUP_NAME1 intersection GROUP_NAME2), calculate C_real\n ###############################################################\n\n c_real = get_2diseasegroup_shared_gene(database_name, table_name, group_name1, group_name2, primary_key)[3]\n\n # print(c_real)\n\n ###############################################################\n # calculate sij = c_real/c_random\n ###############################################################\n\n s = float(c_real) / float(c_random)\n\n ###############################################################\n # normalization Si,j by min-max normalization method\n ###############################################################\n\n min_score = 0\n\n max_score = float(all_gene_num) / min(float(group_1_item_num), float(group_2_item_num))\n\n # print(max_score)\n\n sim = (s - min_score) / (max_score - min_score)\n\n sim = '%.5f' % sim\n\n conn.close()\n\n return sim", "def main():\n # Test code.\n #books = {}\n #books['hi'] = {'ffa': 1, 'gerjk': 40, 'eqrwf': 5}\n #books['bye'] = {'ffa': 30, 'grahooe': 4, 'ghuiewq': 2, 'fw': 10}\n #books['x'] = {'ffa': 40}\n #df = DataFrame(books).T.fillna(0)\n \n df = gen_user_artist_dataframe()\n ppmi_df = gen_ppmi_dataframe(df)\n ppmi_df = scale(ppmi_df)\n similarities = find_similarities(ppmi_df)\n record_similarities(similarities)", "def get_db_afvalcluster_info():\n db_df = get_dataframe(\"\"\"SELECT *\n FROM proj_afval_netwerk.afv_rel_nodes_poi\n \"\"\")\n db_df['woning'] = db_df['bk_afv_rel_nodes_poi'].str.split('~')\n db_df['cluster_x'] = db_df['woning'].apply(lambda x: x[0]).astype('float')\\\n .round(0).astype('int')\n db_df['cluster_y'] = db_df['woning'].apply(lambda x: x[1]).astype('float')\\\n .round(0).astype('int')\n db_df['type'] = db_df['woning'].apply(lambda x: x[2])\n db_df['bag'] = db_df['woning'].apply(lambda x: x[3])\n db_df = db_df.drop('woning', axis=1)\n return db_df", "def get_homology_lookup() -> pd.DataFrame:\n \n dataset = Dataset(name=CELEGANS_DATASET_NAME,\n host=HOST)\n \n attributes = [ENSEMBL_ID_ATTRIBUTE] + DROSO_HOMO_ATTRIBUTES\n df_lookup = dataset.query(attributes=attributes,\n filters=None)\n\n df_lookup.to_csv(LOOKUP_FILENAME, header=True, index=True)\n \n return df_lookup", "def restriction_sites(middle_db_input):\n\n # Two types of queries to retrieve the restriction site information and\n # locus sequence separately prevents information repeats.\n # Queries if the user is interested in the gene or protein.\n query1 = \"select s.start_position, s.end_position, e.recogn_seq from locus l, restriction_enzyme e, restriction_sites s, cds c where c.locus_id=l.id and s.locus_id=l.id and s.re_id=e.id and \"\n query2 = \"select c.gene_name, c.product_name, c.seq_location, l.whole_seq from locus l, cds c where c.locus_id=l.id and \"\n \n # Queries if the user is searching using the locus accession number.\n query3 = \"select s.start_position, s.end_position, e.recogn_seq from locus l, restriction_enzyme e, restriction_sites s, accession a where a.locus_id=l.id and s.locus_id=l.id and s.re_id=e.id and \"\n query4 = \"select l.whole_seq from locus l, accession a where a.locus_id=l.id and \"\n query5 = \"select c.gene_name, c.product_name, c.seq_location from locus l, cds c, accession a where a.locus_id=l.id and c.locus_id=l.id and \"\n\n\n # A list of enzymes present in the database.\n preset = [\"ecori\", \"bamhi\", \"bsumi\"]\n search = middle_db_input[\"name\"]\n enzyme = middle_db_input[\"enzyme\"]\n\n db = pymysql.connect(db='0a002', user='0a002', passwd='0a002', host='hope', port=3306, cursorclass = pymysql.cursors.DictCursor)\n \n # If the user searches for the restrictions sites produced by a preset enzyme:\n if middle_db_input[\"enzyme\"] in preset:\n # The function works, if the user searches with the gene identifier,\n # protein name or locus accession number. Cytogenetic location can be\n # shared by many loci, therefore it cannot be used to avoid confusion.\n if middle_db_input[\"type\"]==0:\n query_x = query1 + \"c.gene_name =\" + \"'\"+search+\"'\" + \"and e.name = \" + \"'\"+enzyme+\"'\"\n query_y = query2 + \"c.gene_name =\" + \"'\"+search+\"'\"\n elif middle_db_input[\"type\"]==1:\n query_x = query1 + \"c.product_name =\" + \"'\"+search+\"'\" + \"and e.name = \" + \"'\"+enzyme+\"'\"\n query_y = query2 + \"c.product_name =\" + \"'\"+search+\"'\"\n elif middle_db_input[\"type\"]==2:\n query_x = query3 + \"a.accession_num =\" + \"'\"+search+\"'\" + \"and e.name = \" + \"'\"+enzyme+\"'\"\n query_y = query4 + \"a.accession_num =\" + \"'\"+search+\"'\"\n query_z = query5 + \"a.accession_num =\" + \"'\"+search+\"'\"\n elif middle_db_input[\"type\"]==4:\n search2 = middle_db_input[\"product_id\"]\n query_x = query1 + \"c.product_id =\" + \"'\"+search2+\"'\" + \"and e.name = \" + \"'\"+enzyme+\"'\"\n query_y = query2 + \"c.product_id =\" + \"'\"+search2+\"'\"\n\n cursor1 = db.cursor()\n cursor2 = db.cursor()\n \n q_x = cursor1.execute(query_x)\n q_y = cursor2.execute(query_y)\n\n r_sites = cursor1.fetchall()\n sequence = cursor2.fetchall()\n\n if middle_db_input[\"type\"]==0 or middle_db_input[\"type\"]==1:\n db_middle_output = [middle_db_input] + list(r_sites) + list(sequence)\n if middle_db_input[\"type\"]==2:\n cursor3 = db.cursor()\n q_z = cursor3.execute(query_z)\n cds = cursor3.fetchall()\n db_middle_output = [middle_db_input] + list(r_sites) + list(sequence) + list(cds)\n if middle_db_input[\"type\"]==4:\n db_middle_output = [middle_db_input] + list(r_sites) + list(sequence)\n \n return(db_middle_output)\n \n # If the enzyme is not present in the database, the query will return only\n # the sequence for a corresponding locus for the middle layer to\n # calculate the restriction sites, given the recognition site.\n elif middle_db_input[\"enzyme\"] not in preset:\n if middle_db_input[\"enzyme\"]==\"\":\n return(middle_db_input)\n elif middle_db_input[\"enzyme\"]!=\"\":\n if middle_db_input[\"type\"]==0:\n query = query2 + \"c.gene_name =\" + \"'\"+search+\"'\"\n elif middle_db_input[\"type\"]==1:\n query = query2 + \"c.product_name =\" + \"'\"+search+\"'\"\n elif middle_db_input[\"type\"]==2:\n query = query4 + \"a.accession_num =\" + \"'\"+search+\"'\"\n query_extra = query5 + \"a.accession_num =\" + \"'\"+search+\"'\"\n elif middle_db_input[\"type\"]==4:\n search2 = middle_db_input[\"product_id\"]\n query = query2 + \"c.product_id =\" + \"'\"+search2+\"'\"\n\n cursor = db.cursor()\n \n q = cursor.execute(query)\n \n sequence = cursor.fetchall()\n\n if middle_db_input[\"type\"]==0 or middle_db_input[\"type\"]==1 or middle_db_input[\"type\"]==4:\n db_middle_output = [middle_db_input] + sequence\n if middle_db_input[\"type\"]==2:\n cursor_e = db.cursor()\n q_e = cursor_e.execute(query_extra)\n cds = cursor_e.fetchall()\n db_middle_output = [middle_db_input] + sequence + cds\n \n\n return(db_middle_output)", "def connect():\n conn = None\n try:\n # read connection parameters\n params = config()\n\n # connect to the PostgreSQL server\n print('Connecting to the PostgreSQL database...')\n conn = psycopg2.connect(**params)\n\n # create a cursor\n cur = conn.cursor()\n\n # Extract Database, this example is set for the Netherlands\n\n sql = \"COPY (with t as (select FIELD1, FIELD2, FIELD_N from _NAME_OF_DATABASE_ where _COLUMN_LONGITUDE_ between 3.29 and 7.31 and _COLUMN_LATITUDE_ between 50.61 AND 53.69) select json_agg(t) from t) TO STDOUT\"\n with open(\"/PATH/ONYOURLOCAL/MACHINE/completefile.txt\", \"w\") as file:\n cur.copy_expert(sql, file)\n\n print('Extracting information from the database...')\n\n # execute a statement\n print('PostgreSQL database version:')\n cur.execute('SELECT version()')\n\n # display the PostgreSQL database server version\n db_version = cur.fetchone()\n print(db_version)\n\n # close the communication with the PostgreSQL\n cur.close()\n except (Exception, psycopg2.DatabaseError) as error:\n print(error)\n finally:\n if conn is not None:\n conn.close()\n print('Database connection closed.')\n # adaptation of the postgrefile to a regular json format\n for line in fileinput.input(['completefile.txt'], inplace=True):\n print(line.replace('}, \\\\n {','}\\r{'), end='')\n for line in fileinput.input(['completefile.txt'], inplace=True):\n print(line.replace('\\\\\"',''), end='')\n for line in fileinput.input(['completefile.txt'], inplace=True):\n print(line.replace('\\\\',''), end='')\n for line in fileinput.input(['completefile.txt'], inplace=True):\n print(line.replace('[',''), end='')\n for line in fileinput.input(['completefile.txt'], inplace=True):\n print(line.replace(']',''), end='')", "def create_corpora_db(beagle=False, normalize=True):\n\n if beagle:\n name = 'beagle_mat'\n df = pd.read_csv(os.path.join(path, 'beagle.csv'), delimiter=',',\n skiprows=1, header=None)\n\n beagle_animals = df[0].apply(func=lambda x: x.upper()).values\n beagle_animals = beagle_animals.tolist()\n\n w2i = dict(zip(beagle_animals, np.arange(len(df.index))))\n i2w = list(beagle_animals)\n mat = df.drop(0, 1).as_matrix()\n np.fill_diagonal(mat, 0)\n else:\n name = 'ngram_mat'\n mat, i2w, w2i = fan.load_assoc_mat(path, 'google_normalized')\n\n animal_idx = []\n for animal in animal_words:\n if animal in i2w:\n animal_idx.append(i2w.index(animal))\n animal_idx = np.array(animal_idx)\n\n i2w_a = [i2w[a] for a in animal_idx]\n mat_a = mat[animal_idx][:, animal_idx]\n w2i_a = {w: i for i, w in enumerate(i2w_a)}\n\n if normalize:\n row_sums = mat_a.sum(axis=1, keepdims=True)\n mat_a = mat_a/row_sums\n mat_a = np.nan_to_num(mat_a)\n\n fan.save_assoc_mat(path, name, mat_a, i2w_a, w2i_a)\n print('Created', name, 'dataset in:', path)", "def query_household_consumption_index(db_path):\n\n try:\n db_connection = sqlite3.connect(db_path)\n except sqlite3.Error as e:\n print(e)\n return False\n\n table_name = \"Consumption_Index\"\n\n consumption_index = pd.read_sql_query('SELECT Trimestre, Valor FROM %s' % (table_name),db_connection)\n consumption_index.columns = ['Quarter', 'Consumption']\n consumption_index.set_index('Quarter', drop=True, inplace=True)\n\n\n db_connection.close()\n\n return consumption_index", "def analyse():\n health = db.execute(\"SELECT * FROM health WHERE user_ID = :id\", id=session[\"user_id\"])\n phone = db.execute(\"SELECT * FROM usage WHERE user_ID = :id\", id=session[\"user_id\"])\n hdata, pdata = []\n # get relevant values for health data then put values into two array\n for i in range(3):\n health1 = [health[i][h] for h in health[i]]\n hdata.append(health1[2:6])\n hdata = np.array(hdata)\n hdata[[0, 2]] = hdata[[2, 0]]\n print(np.mean(hdata, axis = 0))\n print(np.std(hdata, axis = 0))\n print(np.median(hdata, axis = 0))\n\n # get relevant values for phone data then put values into 2D array\n for i in range(3):\n phone1 = [phone[i][h] for h in phone[i]]\n pdata.append(phone1[2:7] + phone1[9:14])\n pdata = np.array(pdata)\n red = pdata[...,2]\n dep = []\n print(red)\n for i in hdata[...,0]:\n dep.append(int(i))\n dep = np.array(dep)\n print(dep)\n # pearson\n print (np.corrcoef(red, dep ))\n\n\n\n return render_template(\"stats.html\",)", "def main(example, df, possible_subjects):\n\n # Drop example tutor if in df\n try:\n df.drop(df[example['url_id']==df['url_id']].index.values, inplace=True)\n df.reset_index(drop=True, inplace=True)\n except:\n pass # Tutor is not in database\n\n # Check for graduate degree\n df = graduate_degrees(example, df)\n\n # Filter by Jaccard index and location.\n sim_tuts = subject_similarity(example, df, possible_subjects)\n sim_tuts = location_overlap(example, sim_tuts)\n\n # Relevant features for computing similarity\n rel_feats = ['avg_review_length',\\\n 'badge_hours',\\\n 'days_since_last_review',\\\n 'has_rating',\\\n 'number_of_ratings',\\\n 'number_of_reviews',\\\n 'profile_picture',\\\n 'rating',\\\n 'has_ivy_degree',\\\n 'has_background_check',\\\n 'response_time',\\\n 'avg_review_sentiment']\n\n # Convert similar tutors to matrix. Normalize features.\n # In parlance of machine learning, X are features, y is hourly rate.\n X = sim_tuts[rel_feats].as_matrix().astype(np.float)\n y = sim_tuts['hourly_rate'].as_matrix().astype(np.float)\n scaler = preprocessing.StandardScaler()\n X = scaler.fit_transform(X)\n\n X_example = example[rel_feats].as_matrix().astype(np.float)\n y_example = np.float(example['hourly_rate'])\n X_example = scaler.transform(X_example)\n\n # Get cosine similarity between example tutor and tutor db.\n cos_tuts = np.empty(X.shape[0])\n for i in xrange(X.shape[0]):\n cos_tuts[i] = cosine_similarity(X[i,:], X_example)\n\n # Sort by similarity\n sorted_idx = np.argsort(cos_tuts)[::-1]\n cos_tuts = cos_tuts[sorted_idx]\n y = y[sorted_idx]\n sim_tuts.reset_index(drop=True, inplace=True)\n\n # Only keep tutors with similarity > 0.5\n sim_tuts = sim_tuts.iloc[sorted_idx][cos_tuts>.5]\n\n # Calculate three outputted tutors.\n nearest_neighbor = sim_tuts.iloc[0] # Highest similarity\n max_tut = sim_tuts[sim_tuts['hourly_rate']==sim_tuts['hourly_rate'].max()].iloc[0]\n min_tut = sim_tuts[sim_tuts['hourly_rate']==sim_tuts['hourly_rate'].min()].iloc[0]\n\n scaling = scale_kde(y, cos_tuts)\n\n kde = gaussian_kde(y[cos_tuts>0], weights=cos_tuts[cos_tuts>0])\n x = np.linspace(0, y.max()+50, y.max()+50+1)\n\n pdf = kde(x)*scaling # Probability density function (estimated)\n\n img_io = make_kde_plot(x, pdf)\n\n return nearest_neighbor, max_tut, min_tut, img_io", "def query(request):\r\n\t# initialize form\r\n\tform = featureSelectionForm()\r\n\t# query output for displaying\r\n\toutput = []\r\n\t# preset variables\r\n\tstring_id_series = []\r\n\t# Mongo query to be defined\r\n\tquery = ''\r\n\tif request.method == 'POST' :\r\n\t\tform = featureSelectionForm(request.POST)\r\n\t\tif form.is_valid():\r\n\t\t\t# Presumed variables\r\n\t\t\t# collection_name = \"RNA_brain_gender-F_AD-vs-Control\"\r\n\t\t\t# feature_symbols_in_interest = ['APOE', 'BIN1', 'CLU']\r\n\t\t\t# way_to_choose_probe = \"fold change\"\r\n\t\t\t### Get variables from POST\r\n\t\t\tcollection_name = \"%s_%s_%s-%s_%s\" % (request.POST[\"dataType\"],\r\n\t\t\t\t\t\t\t\t\t\t\t\t\trequest.POST[\"tissue\"],\r\n\t\t\t\t\t\t\t\t\t\t\t\t\trequest.POST[\"category\"],\r\n\t\t\t\t\t\t\t\t\t\t\t\t\trequest.POST[\"group\"],\r\n\t\t\t\t\t\t\t\t\t\t\t\t\trequest.POST[\"comparison\"])\r\n\t\t\t\"\"\"\r\n\t\t\t\tWe should split POST[\"featureInput\"] here\r\n\t\t\t\"\"\"\r\n\t\t\t# import pdb; pdb.set_trace();\r\n\t\t\tfeature_symbols_in_interest = split_feature_input_to_list(request.POST[\"featureInput\"])\r\n\r\n\t\t\tway_to_choose_probe = request.POST[\"probeSelectionMethod\"]\r\n\r\n\t\t\ttest_stat_output_dict = {}\r\n\r\n\t\t\tall_datasets = test_stat_client.get_all_datasets(collection_name)\r\n\r\n\t\t\ttest_statistics = list(test_stat_client.get_all_for_this_category(collection_name))\r\n\r\n\t\t\ttest_statistics = pd.DataFrame(test_statistics)\r\n\r\n\t\t\tfor dataset in all_datasets:\r\n\t\t\t\t# Filter 1 - dataset accession & features in interest\r\n\t\t\t\tif request.POST[\"dataType\"] == \"RNA\":\r\n\t\t\t\t\tfilt_ind = (test_statistics['dataset_accession'] == dataset) & (test_statistics['symb'].isin(feature_symbols_in_interest))\r\n\t\t\t\telif request.POST[\"dataType\"] == \"protein\":\r\n\t\t\t\t\tsymbol_series = test_statistics['symb'].apply(extract_gene_symbol_from_protein_name)\r\n\t\t\t\t\tfilt_ind = (test_statistics['dataset_accession'] == dataset) & (symbol_series.isin(feature_symbols_in_interest))\r\n\r\n\t\t\t\ttest_stat_df = test_statistics[filt_ind]\r\n\r\n\t\t\t\t# Filter 2 - remove duplicates\r\n\t\t\t\t\"\"\"\r\n\t\t\t\t\tHere we provide options for user to choose how to select a probe when \r\n\t\t\t\t\tmultiple probes are corresponding to one feature\r\n\t\t\t\t\"\"\"\r\n\r\n\t\t\t\tif way_to_choose_probe == \"fold change\":\r\n\t\t\t\t\ttest_stat_df = filtered_duplicate_by(test_stat_df, by='fc', group_index=['symb'])\r\n\r\n\t\t\t\telif way_to_choose_probe == \"limma p value\" : \r\n\t\t\t\t\ttest_stat_df = filtered_duplicate_by(test_stat_df, by='lp', group_index=['symb'])\r\n\r\n\t\t\t\telif way_to_choose_probe == \"t test p value\" :\r\n\t\t\t\t\ttest_stat_df = filtered_duplicate_by(test_stat_df, by='tp', group_index=['symb'])\r\n\r\n\t\t\t\tif test_stat_df.empty:\r\n\t\t\t\t\tall_datasets.remove(dataset)\r\n\t\t\t\t\tcontinue\r\n\t\t\t\t# Split dataframe for stat table display and graph display\r\n\t\t\t\t# stat_table = test_stat_df.drop(['eval', 'dsl'], axis=1)\r\n\t\t\t\tstat_table = test_stat_df.drop(['eval'], axis=1)\r\n\t\t\t\t# import pdb; pdb.set_trace()\r\n\t\t\t\tstat_table['entrez_gene_id'] = stat_table.apply(from_symbol_to_entrez_gene_id, axis=1)\r\n\t\t\t\tstat_table['string_id'] = from_single_symbol_to_string_id(stat_table['symb'])\r\n\t\t\t\tstring_id_series = stat_table['string_id']\r\n\t\t\t\t\r\n\t\t\t\t# import pdb; pdb.set_trace()\r\n\r\n\t\t\t\tstat_table_output = stat_table.to_dict(outtype='records')\r\n\r\n\t\t\t\t# import pdb; pdb.set_trace()\r\n\t\t\t\ttest_stat_output_dict.update({dataset : stat_table_output})\r\n\r\n\r\n\r\n\t\t\treturn render(request, 'feature_stat.html',\r\n\t\t\t\t\t\t{\r\n\t\t\t\t\t\t\t'dataset_names' : all_datasets,\r\n\t\t\t\t\t\t\t'test_stat' : test_stat_output_dict,\r\n\t\t\t\t\t\t\t'way_to_choose_probe' : way_to_choose_probe,\r\n\t\t\t\t\t\t\t'datatype' : request.POST[\"dataType\"],\r\n\t\t\t\t\t\t\t'tissue' : request.POST[\"tissue\"],\r\n\t\t\t\t\t\t\t'category' : request.POST[\"category\"],\r\n\t\t\t\t\t\t\t'group' : request.POST[\"group\"],\r\n\t\t\t\t\t\t\t'comparison' : request.POST[\"comparison\"],\r\n\t\t\t\t\t\t\t'features' : generate_mulivariable_series_from_list(feature_symbols_in_interest),\r\n\t\t\t\t\t\t\t'string_url_id_component' : generate_string_id_get_query_from_list(string_id_series)\r\n\t\t\t\t\t\t})\r\n\telse:\t\t\r\n\t\treturn render(request, 'query.html', {\r\n\t\t\t'form' : form\r\n\t\t})", "def genome(args):\n set_quiet(args.quiet)\n\n # first, load taxonomic_assignments\n try:\n tax_assign = MultiLineageDB.load(args.taxonomy_csv,\n keep_full_identifiers=args.keep_full_identifiers,\n keep_identifier_versions=args.keep_identifier_versions,\n force=args.force, lins=args.lins)\n available_ranks = tax_assign.available_ranks\n\n lg_ranks=None\n all_lgs=None\n if args.lingroup:\n lingroups = tax_utils.read_lingroups(args.lingroup)\n lg_ranks, all_lgs = tax_utils.parse_lingroups(lingroups)\n\n except ValueError as exc:\n error(f\"ERROR: {str(exc)}\")\n sys.exit(-1)\n\n if not tax_assign:\n error(f'ERROR: No taxonomic assignments loaded from {\",\".join(args.taxonomy_csv)}. Exiting.')\n sys.exit(-1)\n\n if args.rank and args.rank not in available_ranks:\n error(f\"ERROR: No taxonomic information provided for rank {args.rank}: cannot classify at this rank\")\n sys.exit(-1)\n\n # get gather_csvs from args\n gather_csvs = tax_utils.collect_gather_csvs(args.gather_csv, from_file=args.from_file)\n\n try:\n query_gather_results = tax_utils.check_and_load_gather_csvs(gather_csvs, tax_assign, force=args.force,\n fail_on_missing_taxonomy=args.fail_on_missing_taxonomy,\n keep_full_identifiers=args.keep_full_identifiers,\n keep_identifier_versions = args.keep_identifier_versions,\n lins=args.lins)\n\n except ValueError as exc:\n error(f\"ERROR: {str(exc)}\")\n sys.exit(-1)\n\n if not query_gather_results:\n notify('No results for classification. Exiting.')\n sys.exit(-1)\n\n # for each queryResult, summarize at rank and classify according to thresholds, reporting any errors that occur.\n for queryResult in query_gather_results:\n try:\n queryResult.build_classification_result(rank=args.rank,\n ani_threshold=args.ani_threshold,\n containment_threshold=args.containment_threshold,\n lingroup_ranks=lg_ranks, lingroups=all_lgs)\n\n except ValueError as exc:\n error(f\"ERROR: {str(exc)}\")\n sys.exit(-1)\n\n # write outputs\n if \"csv_summary\" in args.output_format:\n summary_outfile, limit_float = make_outfile(args.output_base, \"classification\", output_dir=args.output_dir)\n with FileOutputCSV(summary_outfile) as out_fp:\n tax_utils.write_summary(query_gather_results, out_fp, limit_float_decimals=limit_float, classification=True)\n\n # write summarized output in human-readable format\n if \"human\" in args.output_format:\n summary_outfile, limit_float = make_outfile(args.output_base, \"human\", output_dir=args.output_dir)\n\n with FileOutput(summary_outfile) as out_fp:\n tax_utils.write_human_summary(query_gather_results, out_fp, args.rank or \"species\", classification=True)\n\n # The following require a single rank:\n # note: interactive krona can handle mult ranks, do we want to enable?\n if \"krona\" in args.output_format:\n krona_results, header = tax_utils.format_for_krona(query_gather_results=query_gather_results, rank=args.rank, classification=True)\n krona_outfile, limit_float = make_outfile(args.output_base, \"krona\", output_dir=args.output_dir)\n with FileOutputCSV(krona_outfile) as out_fp:\n tax_utils.write_krona(header, krona_results, out_fp)\n\n if \"lineage_csv\" in args.output_format:\n lineage_outfile, _ = make_outfile(args.output_base, \"lineage_csv\",\n output_dir=args.output_dir)\n lineage_results = []\n header = None\n for q_res in query_gather_results:\n if not header:\n ranks = list(q_res.ranks)\n if 'strain' in ranks: # maintains prior functionality.. but we could keep strain now, i think?\n ranks.remove('strain')\n header = [\"ident\", *ranks]\n lineageD = q_res.classification_result.as_lineage_dict(q_res.query_info, ranks)\n lineage_results.append(lineageD)\n with FileOutputCSV(lineage_outfile) as out_fp:\n tax_utils.write_output(header, lineage_results, out_fp)", "def query_db(self, samples: list):\n output_dict = {'chromosome': [],\n 'position': [],\n 'genotype': [],\n 'numerical_genotype': [],\n 'population_size': [],\n 'p_value': [],\n 'avg_allele_balance_difference': []}\n chromosomes = [str(x) for x in range(1, 24)]\n chromosomes.append('x')\n chromosomes.append('y')\n\n curs = self.connection.cursor()\n for c in chromosomes:\n # # remove this after development\n # if c != '1':\n # return\n get_template = \"\"\"SELECT position, genotype, allele_balances, numerical_genotype, samples FROM chromosome_{chr} WHERE samples LIKE '%{sample}%' \"\"\"\n other_sample_template = \"AND samples LIKE '%{sample}%'\"\n statement = get_template.format(sample=samples[0], chr=c)\n for samp in samples[1:]:\n statement += other_sample_template.format(sample=samp)\n curs.execute(statement)\n res = curs.fetchall()\n # for each row in the db with those samples listed\n for r in res:\n all_samp_names = r[4].split(';')\n all_samp_names = [x.replace('\\n','') for x in all_samp_names]\n all_abs = r[2].split(';')\n all_abs = [float(x) for x in all_abs]\n in_samples = [x for i, x in enumerate(all_abs) if all_samp_names[i] in samples]\n # get those in the population\n population = [x for i, x in enumerate(all_abs) if all_samp_names[i] not in samples]\n p_val = stats.ttest_ind(in_samples,population).pvalue\n diff = (sum(population)/len(population)) - (sum(in_samples)/len(in_samples))\n output_dict['chromosome'].append(c)\n output_dict['position'].append(r[0])\n output_dict['genotype'].append(r[1])\n output_dict['numerical_genotype'].append(r[2])\n output_dict['population_size'].append(len(population))\n output_dict['p_value'].append(p_val)\n output_dict['avg_allele_balance_difference'].append(diff)\n df = pd.DataFrame(output_dict)\n df.to_csv('query_results.tsv', sep='\\t')\n return df", "def get_data(kgs: int, credentials_db: dict) -> pd.DataFrame:\n main_query = f\"\"\"\n SELECT rectangle, radius\n FROM \n `kgs22_coordinates`\n WHERE \n `KGS22` = {kgs}\n \"\"\"\n\n conn = pymysql.connect(host=credentials_db['HOST'], user=credentials_db['USERNAME'],\n port=credentials_db['PORT'], passwd=credentials_db['PASSWORD'], db=credentials_db['DB'],\n connect_timeout=100000)\n df = pd.read_sql(main_query, conn)\n conn.close()\n return df", "def get_locus_info(database, query):\n # Connect to database.\n db_connexion = sqlite3.connect(database)\n cursor = db_connexion.cursor()\n\n # Query database.\n chrom_info = cursor.execute(query)\n\n # Convert to Pandas dataframe\n column_names = [column[0] for column in chrom_info.description]\n chrom_info_df = pd.DataFrame(chrom_info.fetchall(), columns=column_names)\n\n # Select only strands + and -\n chrom_info_df = chrom_info_df[ (chrom_info_df[\"Strand\"] == \"C\") | (chrom_info_df[\"Strand\"] == \"W\") ]\n # Remove \"2-micron\" plasmid\n chrom_info_df = chrom_info_df[ chrom_info_df[\"Chromosome\"] != \"2-micron\" ]\n # Convert chromosome id to int\n chrom_info_df[\"Chromosome\"] = chrom_info_df[\"Chromosome\"].astype(int)\n\n return chrom_info_df", "def main():\n\n # objects to access the CAGE DB. make them global because i'm lazy\n global db_conn, db_cursor\n\n with open(os.path.expandvars('$CAGE_SW/gui/config.json')) as f:\n config = json.load(f)\n\n db_conn = psycopg2.connect(\n host = config[\"cage_daq\"],\n dbname = config[\"db_name\"],\n user = config[\"db_user\"],\n password = config[\"password\"]\n )\n\n db_cursor = db_conn.cursor()\n\n # -- run analysis --\n # get_endpoints()\n # get_cooldown_data()\n # plot_cooldown_data()\n\n print('doing something')\n\n run = 66\n endpoints = ['cage_pressure', 'cage_coldPlate_temp', 'cage_topHat_temp']\n # t_earlier, t_later = cage_utils.getStartStop(run)\n t_earlier, t_later = '2020-10-08 20:35:50', '2020-10-09 01:35:41.367230415' #run 66\n df_file = f'./data/run{run}_SCDB.h5'\n\n # pandas_db_query(endpoints, t_earlier, t_later, df_file)\n\n\n\n plot_run_stability(run, df_file)\n\n\n # get_temp()", "def others():\n\n # Fuel cells ('FC') were not calculated and assigned heat rates\n # These sum up to 63 MW of capacity in WECC\n # Cleanest option is to remove them from the current runs:\n query = \"CREATE TABLE switch.fuel_cell_generation_plant_backup (like generation_plant);\\\n INSERT INTO fuel_cell_generation_plants\\\n (SELECT * FROM generation_plant WHERE gen_tech = 'FC');\\\n DELETE FROM generation_plant_scenario_member gpsm USING generation_plant gp\\\n WHERE gp.generation_plant_id = gpsm.generation_plant_id\\\n AND gen_tech = 'FC';\\\n DELETE FROM generation_plant_cost gpc USING generation_plant gp\\\n WHERE gp.generation_plant_id = gpc.generation_plant_id\\\n AND gen_tech = 'FC';\\\n DELETE FROM generation_plant_existing_and_planned gpep USING generation_plant gp\\\n WHERE gp.generation_plant_id = gpep.generation_plant_id\\\n AND gen_tech = 'FC';\\\n DELETE FROM generation_plant WHERE gen_tech = 'FC';\"\n connect_to_db_and_run_query(query,\n database='switch_wecc', user=user, password=password, quiet=True)\n\n # Others ('OT') also do not have an assigned heat rate. Assign an average.\n query = \"UPDATE generation_plant set full_load_heat_rate = \\\n (select avg(full_load_heat_rate)\\\n from generation_plant\\\n join generation_plant_scenario_member using (generation_plant_id)\\\n where energy_source = 'Gas'\\\n and generation_plant_scenario_id = 2)\\\n where gen_tech = 'OT' and energy_source = 'Gas'\"\n connect_to_db_and_run_query(query,\n database='switch_wecc', user=user, password=password, quiet=True)\n\n # Replace 'NaN's with 'Null's\n # (NaNs result from the aggregation process)\n cols_to_replace_nans = ['connect_cost_per_mw','hydro_efficiency','min_build_capacity',\n 'unit_size','storage_efficiency','store_to_release_ratio',\n 'min_load_fraction','startup_fuel','startup_om',\n 'ccs_capture_efficiency', 'ccs_energy_load']\n for col in cols_to_replace_nans:\n query = \"UPDATE generation_plant SET {c} = Null WHERE {c} = 'NaN'\".format(c=col)\n connect_to_db_and_run_query(query,\n database='switch_wecc', user=user, password=password, quiet=True)\n print \"Replaced NaNs in column '{}'\".format(col)\n\n # Replace Nulls with zeros where Switch expects a number\n query = \"UPDATE generation_plant\\\n SET connect_cost_per_mw = 0.0\\\n WHERE connect_cost_per_mw is Null\"\n connect_to_db_and_run_query(query,\n database='switch_wecc', user=user, password=password, quiet=True)", "def get_data_from_db(district):\n data_frames = get_athena_dataframes()\n df_result = copy.copy(data_frames['new_covid_case_summary'])\n df_result = df_result[df_result['district'] == district.lower()]\n df_result = df_result.loc[:, :'deceased']\n df_result.dropna(axis=0, how='any', inplace=True)\n df_result['date'] = pd.to_datetime(df_result['date'])\n df_result['date'] = df_result['date'].apply(lambda x: x.strftime(\"%-m/%-d/%y\"))\n df_result.rename({'total': 'confirmed', 'active': 'hospitalized'}, axis='columns', inplace=True)\n for col in df_result.columns:\n if col in ['hospitalized', 'confirmed', 'recovered', 'deceased']:\n df_result[col] = df_result[col].astype('int64')\n df_result = df_result.fillna(0)\n df_result = df_result.rename(columns={'date': 'index'})\n df_result.drop(['state', 'district'], axis=1, inplace=True)\n df_result = df_result.set_index('index').transpose().reset_index().rename(columns={'index': \"observation\"})\n df_result.insert(0, column=\"region_name\", value=district.lower().replace(',', ''))\n df_result.insert(1, column=\"region_type\", value=\"district\")\n\n return df_result", "def drugData(request):\n\tip = get_ip(request, right_most_proxy=True)\n\tIpAddressInformation.objects.create(ip_address=ip)\n\n\tif 'Uniprotkb' in request.GET and request.GET['Uniprotkb']:\n\t\tuniprotkb=request.GET['Uniprotkb']\n\t\tuniprotkb=uniprotkb.strip()\n\t\tdrugPresence=0\n\t\tdrugData=['NA']\n\t\tprotname=None\n\t\tgeneName=None\n\t\tes.indices.refresh(index=\"mrmassaydb-index\")\n\t\tquery={\"query\": {\n\t\t\t\"bool\": {\n\t\t\t\t\"must\": [\n\t\t\t\t\t{\"match\": {\"UniProtKB Accession\": uniprotkb}},\n\t\t\t\t\t{\"match\": {\"UniprotKb entry status\": \"Yes\"}}\n\t\t\t\t]\n\t\t\t}\n\t\t}\n\t\t}\n\n\t\tres=helpers.scan(client=es,scroll='2m',index=\"mrmassaydb-index\", doc_type=\"mrmassaydb-type\",query=query,request_timeout=30)\n\t\tfor hit in res:\n\t\t\tjdic=hit[\"_source\"]\n\t\t\tif jdic[\"UniProtKB Accession\"] ==uniprotkb.upper():\n\t\t\t\tjdic={str(tkey):force_text(tvalue) for tkey,tvalue in jdic.items()}\n\t\t\t\tprotname=str(jdic[\"Protein\"]).strip()\n\t\t\t\tgenename=str(jdic[\"Gene\"]).strip()\n\t\t\t\tjdic[\"sel\"] =\"\"\n\t\t\t\tjdic[\"Drug Bank\"]=jdic[\"Drug Bank\"].replace('<br>','|')\n\t\t\t\tjdic[\"Drug Bank\"]=jdic[\"Drug Bank\"].replace('</a>\"','</a>')\n\t\t\t\tjdic[\"Drug Bank\"]=jdic[\"Drug Bank\"].replace('href=\\\\','href=')\n\t\t\t\tjdic[\"Drug Bank\"]=jdic[\"Drug Bank\"].replace('\\\\\">','\">')\n\t\t\t\tif len(jdic[\"Drug Bank\"].strip()) > 0 and jdic[\"Drug Bank\"].strip() != \"NA\":\n\t\t\t\t\t#tempLst =resitem[\"Human Drug Bank\"].strip().split('|')\n\t\t\t\t\tdrugData=jdic[\"Drug Bank\"].strip().split('|')\n\t\t\t\t\tdrugPresence=len(drugData)\n\t\t\t\telse:\n\t\t\t\t\tdrugData=['NA']\n\t\t\t\tbreak\n\t\tes.indices.refresh(index=\"mrmassaydb-index\")\n\n\n\t\treturn render(request, 'drug.html',{'uniprotkb':uniprotkb,\\\n\t\t\t'drugPresence':drugPresence,'drugDataRaw':json.dumps(drugData),\\\n\t\t\t'protname':protname,'gene':genename})", "def load_data(data_links_list=(\n 'https://raw.githubusercontent.com/JanetMatsen/bacteriopop/master'\n '/raw_data/raw_data.csv',\n 'https://raw.githubusercontent.com/JanetMatsen/bacteriopop/master'\n '/raw_data/sample_meta_info.tsv')):\n\n # Reading data sets from the links provided.\n df1 = pd.read_csv(data_links_list[0],\n error_bad_lines=False)\n df2 = pd.read_csv(data_links_list[1],\n sep='\\t')\n df2 = df2.set_index(df2['project'])\n # fill the Nas id df1 as \". Makes the groupbys behave better.\n df1.fillna('', inplace=True)\n # repleace 'genus' = 'other' with an empty string to be consistent.\n df1.replace(to_replace='other', value='', inplace=True)\n # Removing duplicate columns.\n del df2['project']\n del df2['ID']\n df1 = df1.set_index(df1['project'])\n # Removing duplicate column.\n del df1['project']\n # Joining the two datasets.\n df = df1.join(df2)\n # Uniformity in non-capitalization of column names.\n df.rename(columns={'Kingdom': 'kingdom', 'Phylum': 'phylum',\n 'Class': 'class', 'Order': 'order',\n 'Family': 'family', 'Genus': 'genus',\n 'Length': 'length'}, inplace=True)\n df.index.names = ['sampleID']\n # Rearranging columns so that abundance is the last column.\n df = df[['kingdom',\t'phylum', 'class', 'order',\n 'family', 'genus', 'length', 'oxygen',\n 'replicate', 'week', 'abundance']]\n assert isinstance(df, pd.DataFrame)\n return df", "def main():\r\n # Read and clean Korean ECEC data\r\n data = read_csv('project_data.csv')\r\n \r\n # Make a dictionary of necessary variables for analysis\r\n new_dict = make_a_new_dict(data,['Family_Income','Gender', 'Interaction','Self_Regulation','School_Readiness'])\r\n \r\n # Change a dictionary into a dataframe\r\n new_dataframe = pd.DataFrame.from_dict(new_dict)\r\n \r\n # Count the number of boys and girls and get gender ratio\r\n number_of_boys_and_girls = count_gender(new_dict, 'Gender')\r\n number_of_boys = number_of_boys_and_girls[0]\r\n number_of_girls = number_of_boys_and_girls[1]\r\n ratio_of_girls = float(number_of_girls)/(number_of_boys + number_of_girls)\r\n print \"#######\"\r\n print \"Gender\"\r\n print \"#######\"\r\n print \"\"\r\n print \"The number of boys is \" + str(number_of_boys) + \" and the number of girls is \" + str(number_of_girls) + \\\r\n \". The ratio of girls is \" + str(ratio_of_girls) + \".\"\r\n print \"\"\r\n \r\n # Show frequencies, means, standard deviations, min/max numbers\r\n print \"#######################\"\r\n print \"Descriptive Statistics\"\r\n print \"#######################\"\r\n print \"\"\r\n print new_dataframe[['Family_Income','Interaction','Self_Regulation','School_Readiness']].describe()\r\n \r\n # Show a correlation matrix between variables\r\n print \"\"\r\n print \"#############\"\r\n print \"Correlations\"\r\n print \"#############\"\r\n print \"\"\r\n print new_dataframe[['Family_Income','Interaction','Self_Regulation','School_Readiness']].corr(method = 'pearson')\r\n \r\n # Check how significant correlations are \r\n print \"\"\r\n print \"#############################\"\r\n print \"Signficances of Correlations\"\r\n print \"#############################\"\r\n print \"\"\r\n get_pearsonr(new_dict, ['Family_Income','Interaction','Self_Regulation', 'School_Readiness'])\r\n\r\n # Show histgrams of variables\r\n bar_grapgh(new_dict, 'Family_Income')\r\n bar_grapgh(new_dict, 'Interaction')\r\n bar_grapgh(new_dict, 'Self_Regulation')\r\n bar_grapgh(new_dict, 'School_Readiness')\r\n\r\n # Show scatter_plots between variables \r\n scatter_plot(new_dict, 'Family_Income', 'Interaction')\r\n scatter_plot(new_dict, 'Family_Income', 'Self_Regulation')\r\n scatter_plot(new_dict, 'Family_Income', 'School_Readiness')\r\n scatter_plot(new_dict, 'Interaction','Self_Regulation')\r\n scatter_plot(new_dict, 'Interaction','School_Readiness')\r\n scatter_plot(new_dict, 'Self_Regulation', 'School_Readiness')\r\n \r\n # Regression and show a quantile-quantile plot\r\n reg = smf.ols('School_Readiness ~ Family_Income + Interaction + Self_Regulation', data = new_dataframe)\r\n res = reg.fit()\r\n plt.clf() # Deletes the previous plot \r\n sm.qqplot(res.resid).show()\r\n print \"\"\r\n print \"#############\"\r\n print \"Regression\"\r\n print \"#############\"\r\n print \"\"\r\n print(res.summary())", "def oreOutputQuery(inputs):\n\n # TODO: This is set up to work with only ONE crop/target category; it must be changed to allow for multiple...\n\n conn = sqlite3.connect(db)\n conn.row_factory = sqlite3.Row\n c = conn.cursor()\n\n category = inputs['exp_category']\n activities = inputs['exp_scenario']['Activity']\n app_eqips = inputs['exp_scenario']['AppEquip']\n app_types = inputs['exp_scenario']['AppType']\n formulations = inputs['exp_scenario']['Formulation']\n\n params = [category]\n\n def query_generator(exp_scenario, exp_scenario_list):\n\n query_string = exp_scenario + \" = ?\" # E.g. \"Activity = ?\"\n i = 0\n while i < len(exp_scenario_list):\n params.append(exp_scenario_list[i]) # append item to params[] to pass to SQL statement\n if i > 0: # skip 1st list item bc it is handle by default in the 'query_string' string definition\n query_string += \" OR \" + exp_scenario + \" = ?\" # E.g. \"Activity = ? OR Activity = ? OR Activity = ?\"\n i += 1\n return query_string\n\n sql_query = 'SELECT * FROM OccHandlerNC WHERE Category = ? ' \\\n 'AND (' + query_generator('Activity', activities) + ') ' \\\n 'AND (' + query_generator('AppEquip', app_eqips) + ') ' \\\n 'AND (' + query_generator('AppType', app_types) + ') ' \\\n 'AND (' + query_generator('Formulation', formulations) +')'\n\n # TreatedVal, TreatedUnit, DUESLNoG, DUESLG, DUEDLG, DUESLGCRH, DUEDLGCRH, IUENoR, IUEPF5R, IUEPF10R, IUEEC\n\n\n\n c.execute(sql_query, tuple(params))\n\n query = c.fetchall()\n conn.close() # Close 'row_factory' connection\n\n return query", "def get_gastrointestinal_surgery_patients(con) -> pd.DataFrame:\n combined_diagnoses = get_reason_for_admission(con)\n gisurg = combined_diagnoses[\n (combined_diagnoses['surgical'] == 1)\n & (combined_diagnoses['diagnosis'].str.contains(re_gisurg, na=False, flags=re.IGNORECASE))\n ]\n\n return gisurg", "def metagenome(args):\n set_quiet(args.quiet)\n\n # first, load taxonomic_assignments\n try:\n tax_assign = MultiLineageDB.load(args.taxonomy_csv,\n keep_full_identifiers=args.keep_full_identifiers,\n keep_identifier_versions=args.keep_identifier_versions,\n force=args.force, lins=args.lins)\n available_ranks = tax_assign.available_ranks\n except ValueError as exc:\n error(f\"ERROR: {str(exc)}\")\n sys.exit(-1)\n\n if not tax_assign:\n error(f'ERROR: No taxonomic assignments loaded from {\",\".join(args.taxonomy_csv)}. Exiting.')\n sys.exit(-1)\n\n if args.rank and args.rank not in available_ranks:\n error(f\"ERROR: No taxonomic information provided for rank {args.rank}: cannot summarize at this rank\")\n sys.exit(-1)\n\n # next, collect and load gather results\n gather_csvs = tax_utils.collect_gather_csvs(args.gather_csv, from_file= args.from_file)\n try:\n query_gather_results = tax_utils.check_and_load_gather_csvs(gather_csvs, tax_assign, force=args.force,\n fail_on_missing_taxonomy=args.fail_on_missing_taxonomy,\n keep_full_identifiers=args.keep_full_identifiers,\n keep_identifier_versions = args.keep_identifier_versions,\n lins=args.lins,\n )\n except ValueError as exc:\n error(f\"ERROR: {str(exc)}\")\n sys.exit(-1)\n\n if not query_gather_results:\n notify('No gather results loaded. Exiting.')\n sys.exit(-1)\n\n single_query_output_formats = ['csv_summary', 'kreport']\n desired_single_outputs = []\n if len(query_gather_results) > 1: # working with multiple queries\n desired_single_outputs = [x for x in args.output_format if x in single_query_output_formats]\n if desired_single_outputs:\n notify(f\"WARNING: found results for multiple gather queries. Can only output multi-query result formats: skipping {', '.join(desired_single_outputs)}\")\n # remove single query outputs from output format\n args.output_format = [x for x in args.output_format if x not in single_query_output_formats]\n if not args.output_format: # or do we want to insert `human` here so we always report something?\n error(f\"ERROR: No output formats remaining.\")\n sys.exit(-1)\n\n # for each queryResult, actually summarize at rank, reporting any errors that occur.\n for queryResult in query_gather_results:\n try:\n queryResult.build_summarized_result()\n except ValueError as exc:\n error(f\"ERROR: {str(exc)}\")\n sys.exit(-1)\n\n # write summarized output in human-readable format\n if \"lineage_summary\" in args.output_format:\n lineage_outfile, limit_float = make_outfile(args.output_base, \"lineage_summary\", output_dir=args.output_dir)\n\n ## aggregate by lineage by query\n lineageD, query_names= tax_utils.aggregate_by_lineage_at_rank(query_gather_results=query_gather_results,\n rank=args.rank, by_query=True)\n\n with FileOutputCSV(lineage_outfile) as out_fp:\n tax_utils.write_lineage_sample_frac(query_names, lineageD, out_fp, sep='\\t')\n\n # write summarized --> krona output tsv\n if \"krona\" in args.output_format:\n krona_results, header = tax_utils.format_for_krona(query_gather_results, rank=args.rank)\n\n krona_outfile, limit_float = make_outfile(args.output_base, \"krona\", output_dir=args.output_dir)\n with FileOutputCSV(krona_outfile) as out_fp:\n tax_utils.write_krona(header, krona_results, out_fp)\n\n if \"human\" in args.output_format:\n summary_outfile, limit_float = make_outfile(args.output_base, \"human\", output_dir=args.output_dir)\n\n with FileOutput(summary_outfile) as out_fp:\n human_display_rank = args.rank or \"species\"\n if args.lins and not args.rank:\n human_display_rank = query_gather_results[0].ranks[-1] # lowest rank\n\n tax_utils.write_human_summary(query_gather_results, out_fp, human_display_rank)\n\n # write summarized output csv\n single_query_results = query_gather_results[0]\n if \"csv_summary\" in args.output_format:\n summary_outfile, limit_float = make_outfile(args.output_base, \"csv_summary\", output_dir=args.output_dir)\n with FileOutputCSV(summary_outfile) as out_fp:\n tax_utils.write_summary(query_gather_results, out_fp, limit_float_decimals=limit_float)\n\n # write summarized --> kreport output tsv\n if \"kreport\" in args.output_format:\n kreport_outfile, limit_float = make_outfile(args.output_base, \"kreport\", output_dir=args.output_dir)\n\n with FileOutputCSV(kreport_outfile) as out_fp:\n header, kreport_results = single_query_results.make_kreport_results()\n tax_utils.write_output(header, kreport_results, out_fp, sep=\"\\t\", write_header=False)\n\n # write summarized --> LINgroup output tsv\n if \"lingroup\" in args.output_format:\n try:\n lingroups = tax_utils.read_lingroups(args.lingroup)\n except ValueError as exc:\n error(f\"ERROR: {str(exc)}\")\n sys.exit(-1)\n\n lingroupfile, limit_float = make_outfile(args.output_base, \"lingroup\", output_dir=args.output_dir)\n\n with FileOutputCSV(lingroupfile) as out_fp:\n header, lgreport_results = single_query_results.make_lingroup_results(LINgroupsD = lingroups)\n tax_utils.write_output(header, lgreport_results, out_fp, sep=\"\\t\", write_header=True)\n\n # write cami bioboxes format\n if \"bioboxes\" in args.output_format:\n bbfile, limit_float = make_outfile(args.output_base, \"bioboxes\", output_dir=args.output_dir)\n\n with FileOutputCSV(bbfile) as out_fp:\n header_lines, bb_results = single_query_results.make_cami_bioboxes()\n tax_utils.write_bioboxes(header_lines, bb_results, out_fp, sep=\"\\t\")", "def meta(request):\r\n\r\n\tdatatype = request.GET.get('datatype', 'RNA')\r\n\ttissue = request.GET.get('tissue', 'brain')\r\n\tcategory = request.GET.get('category', 'region')\r\n\tgroup = request.GET.get('group', 'PFC')\r\n\tcomparison = request.GET.get('comparison', 'AD-vs-Control')\r\n\tfeature_symbols_in_interest = request.GET.get('features', '').split(' ')\r\n\tcollection_name = \"%s_%s_%s-%s_%s\" % (datatype,\r\n\t\t\t\t\t\t\t\t\t\t\ttissue,\r\n\t\t\t\t\t\t\t\t\t\t\tcategory,\r\n\t\t\t\t\t\t\t\t\t\t\tgroup,\r\n\t\t\t\t\t\t\t\t\t\t\tcomparison)\r\n\t\"\"\"\r\n\t\tWe should split POST[\"featureInput\"] here\r\n\t\"\"\"\r\n\t# import pdb; pdb.set_trace();\r\n\t# feature_symbols_in_interest = split_feature_input_to_list(request.POST[\"featureInput\"])\r\n\r\n\t# way_to_choose_probe = request.GET.get('way_to_choose_probe', 'fold change')\r\n\r\n\trecords = list(meta_stat_client.get_all_records(collection_name))\r\n\trecords_all_teststat = list(test_stat_client.get_all_records(collection_name))\r\n\trecord_sample_count = test_stat_client.get_all_sample_count(collection_name)\r\n\trecord_disease_state = test_stat_client.get_all_disease_state(collection_name)\r\n\trecord_all_datasets = test_stat_client.get_all_datasets(collection_name)\r\n\r\n\t# Turn into dataframe\r\n\trecords = pd.DataFrame(records)\r\n\trecords_all_teststat = pd.DataFrame(records_all_teststat)\r\n\r\n\t# Select features in interest\r\n\tfilt_ind = records['symb'].isin(feature_symbols_in_interest)\r\n\trecords_queried = records[filt_ind]\r\n\r\n\trecords_queried['entrez_gene_id'] = records_queried.apply(from_symbol_to_entrez_gene_id, axis=1)\r\n\t\t\t\t\r\n\r\n\t# Select top 10 by meta-p-value\r\n\trecords_top_10 = records.sort('pval', ascending=True).iloc[0:9, ]\r\n\t# records_top_10 = records.sort('pval', ascending=True)\r\n\r\n\trecords_top_10['entrez_gene_id'] = records_top_10.apply(from_symbol_to_entrez_gene_id, axis=1)\r\n\t\r\n\t# Get meta info for this collection\r\n\tmeta_df = pd.DataFrame(record_sample_count, index=['sample_count'], columns=record_all_datasets)\r\n\tmeta_df = pd.DataFrame.transpose(meta_df)\r\n\tmeta_df['state_1_count'] = pd.Series(record_disease_state).apply(sum)\r\n\tmeta_df['state_0_count'] = meta_df['sample_count'] - meta_df['state_1_count']\r\n\tsymbol_count_list = []\r\n\t\r\n\tfor dataset in record_all_datasets:\r\n\t\tsymb_count = records_all_teststat[records_all_teststat['dataset_accession'] == dataset].shape[0]\r\n\t\tsymbol_count_list.append(symb_count)\r\n\r\n\tmeta_df['feature_count'] = symbol_count_list\r\n\tmeta_df['dataset_accession'] = meta_df.index\r\n\t# import pdb;pdb.set_trace();\r\n\r\n\t# Add string ids\r\n\trecords_queried['string_id'] = from_single_symbol_to_string_id(records_queried['symb'])\r\n\t# import pdb;pdb.set_trace();\r\n\t# records_top_10['string_id'] = from_single_symbol_to_string_id(records_top_10['symb'])\r\n\t# import pdb;pdb.set_trace();\r\n\t\r\n\tunion_feature_count = records.shape[0]\r\n\tcheck_all_presence = lambda x : '?' not in x['eff']\r\n\t\r\n\tintersect_feature_count = sum(records.apply(check_all_presence, axis=1))\r\n\t\r\n\r\n\t# Output queried records to dictionary\r\n\tmeta_stat_queried = records_queried.to_dict(outtype='records')\r\n\tmeta_stat_top_10 = records_top_10.to_dict(outtype='records')\r\n\tmeta_info = meta_df.to_dict(outtype='records')\r\n\t# import pdb;pdb.set_trace();\r\n\r\n\treturn render(request, 'meta_stat.html',\r\n\t\t\t\t{\r\n\t\t\t\t\t'meta_stat_queried' : meta_stat_queried,\r\n\t\t\t\t\t'meta_stat_top_10' : meta_stat_top_10,\r\n\t\t\t\t\t'collection_name' : collection_name,\r\n\t\t\t\t\t'feature_string' : '+'.join(feature_symbols_in_interest),\r\n\t\t\t\t\t'meta_info' : meta_info,\r\n\t\t\t\t\t'union_feature_count' : union_feature_count,\r\n\t\t\t\t\t'intersect_feature_count' : intersect_feature_count\r\n\t\t\t\t})", "def druggable_interactors(self) -> pd.DataFrame:\n cols = ['drug', 'capsule_interactor_type', 'capsule_interactor_bel', 'interactor_bel', 'interactor_type',\n 'interactor_name', 'relation_type', 'target_bel', 'target_symbol', 'target_type',\n 'pmid', 'pmc', 'rel_pub_year', 'rel_rid', 'drug_rel_rid', 'drug_rel_actions',\n 'drugbank_id', 'chembl_id', 'pubchem_id', 'pmod_type']\n\n if self.node_type != 'protein' or not self.pmods:\n pure_query = PURE_DRUGGABLE_QUERY.replace('MATCH {{class:pmod, as:pmod{}}}<-has__pmod-', 'MATCH')\n capsule_query_1 = CAPSULE_DRUGGABLE_MODIFIED.replace(\n 'MATCH {{class:pmod, as:pmod{}}}<-has__pmod-', 'MATCH'\n )\n capsule_query_2 = CAPSULE_DRUGGABLE_COMPLEX.replace(\n 'MATCH {{class:pmod, as:pmod{}}}<-has__pmod-', 'MATCH'\n )\n formatted_pure_sql = pure_query.format(self.node_type, self.node_name, self.edge_filters)\n formatted_capsule_sql_1 = capsule_query_1.format(self.node_type, self.node_name, self.edge_filters)\n formatted_capsule_sql_2 = capsule_query_2.format(self.node_type, self.node_name, self.edge_filters)\n\n else:\n if 'all' in self.pmods:\n pmod_condition = \"type != '' or name != ''\"\n else:\n pmod_condition = f\"type in {self.pmods}\"\n\n pmod_string = f\", WHERE:({pmod_condition})\"\n\n if 'pho' in self.pmods or 'all' in self.pmods:\n pmod_string = pmod_string.replace(\")\", \" OR name like '%phosphorylat%')\")\n\n # Drugs only for humans so only check one\n formatted_pure_sql = PURE_DRUGGABLE_QUERY.format(\n pmod_string, self.node_type, self.node_name, self.edge_filters\n )\n formatted_capsule_sql_1 = CAPSULE_DRUGGABLE_MODIFIED.format(\n pmod_string, self.node_type, self.node_name, self.edge_filters\n )\n formatted_capsule_sql_2 = CAPSULE_DRUGGABLE_COMPLEX.format(\n pmod_string, self.node_type, self.node_name, self.edge_filters\n )\n\n logger.info(\"Querying database...\")\n\n pure_results = self.__query_graphstore(sql=formatted_pure_sql)\n capsule_results_1 = self.__query_graphstore(sql=formatted_capsule_sql_1)\n capsule_results_2 = self.__query_graphstore(sql=formatted_capsule_sql_2)\n\n results_check = [x is not None for x in (pure_results, capsule_results_1, capsule_results_2)]\n\n if any(results_check): # Need only 1 not to be None\n df_concat = pd.concat(\n [pure_results, capsule_results_1, capsule_results_2], axis=0\n ).reindex(columns=cols)\n self.results = df_concat[cols]\n self.results[\"drug_rel_actions\"] = self.results[\"drug_rel_actions\"].str.join(\"|\")\n self.results = self.results.drop_duplicates()\n\n return self.results" ]
[ "0.58430374", "0.5564186", "0.54623526", "0.5419678", "0.5353768", "0.5343155", "0.5265147", "0.5263287", "0.5258324", "0.52520347", "0.5207315", "0.51895225", "0.51715577", "0.51707095", "0.5153093", "0.51300794", "0.50969017", "0.5094018", "0.5078065", "0.50657225", "0.5031439", "0.5028692", "0.499789", "0.49805295", "0.49779415", "0.4959068", "0.4952335", "0.49505338", "0.4947818", "0.4925719" ]
0.652284
0
Gets the created data frame with the three columns aglycon, coconut id and taxonomy Merges sweetcocunt data frame with incoming data frame via their coconut id. Replaces nan with "no" if there isn't a known taxonomy in the row for the aglycon. Summarize all aglycons with the same structure into one row. Writes a .pkl file with where all aglycons with the same smiles code are in the same row. Passes a data frame with all the same aglycon structures in one row.
def sweetcoconut_databank(df_tax_id_fromCompleteDatabank, taxonomy_Double,sweetcoconut_database,port): client2 = MongoClient(port) db_s = client2[sweetcoconut_database] collection2 = db_s.sweetNaturalProduct sweetnp = pd.DataFrame(list(collection2.find({"contains_sugar": True}))) sweetnp_with_tax = pd.merge(sweetnp, df_tax_id_fromCompleteDatabank, how="left", on="coconut_id") df_cutout_sweetnp_with_tax = pd.DataFrame({"coconut_id": sweetnp_with_tax.coconut_id, "taxonomy": sweetnp_with_tax.taxonomy, "all_deglycosilated_smiles": sweetnp_with_tax.all_deglycosilated_smiles }) df_cutout_no_nan = df_cutout_sweetnp_with_tax.fillna('no') df_cutout_explode = df_cutout_no_nan.explode("all_deglycosilated_smiles",ignore_index=True) #display(df_cutout_explode) unique_deglycosilated_smiles = set(df_cutout_explode["all_deglycosilated_smiles"]) unique_deglycosilated_smiles.pop() df_NP = pd.DataFrame(unique_deglycosilated_smiles, columns=["deglycosilated_smiles"]) df_NP["coconut_id"] = "" df_NP["taxonomy"] = "" index = 0 for mol in df_NP.deglycosilated_smiles: all_rows = df_cutout_explode[df_cutout_explode["all_deglycosilated_smiles"]==mol] df_NP.coconut_id[index] = (all_rows.coconut_id.values) df_NP.taxonomy[index] = (all_rows.taxonomy.values) index += 1 #display(df_NP) # **-----------------for tax prediction-------------------** df_NP.to_pickle("output_data/for_predict_multiple_tax.pkl") # **----------------end tax prediction--------------------** index = 0 for tax_list in df_NP.taxonomy: df_NP.taxonomy[index] = set(tax_list) if len(df_NP.taxonomy[index]) >= 2: if 'no' in df_NP.taxonomy[index]: df_NP.taxonomy[index].remove('no') index += 1 #display(df_NP) bar_plot(df_NP) venn_diagram(df_NP,taxonomy_Double) aglycon_single_tax(df_NP)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reduce_and_save():\n ### Get the signature information\n sig_info = pd.read_csv(join(FILE_PATH, \"GSE92742_Broad_LINCS_sig_info.txt\"), sep=\"\\t\")\n ### Columns are:\n ### Index([u'sig_id', u'pert_id', u'pert_iname', u'pert_type', u'cell_id',\n ### u'pert_dose', u'pert_dose_unit', u'pert_idose', u'pert_time',\n ### u'pert_time_unit', u'pert_itime', u'distil_id'],\n ### dtype='object')\n\n ### Filter for signature ids for small molecule pertubagens\n small_mol_sigs = sig_info['sig_id'][sig_info['pert_type'] == \"trt_cp\"]\n ### Results in 205034 signatures\n\n ### Read in the gene info\n gene_info = pd.read_csv(join(FILE_PATH, \"GSE92742_Broad_LINCS_gene_info.txt\"), sep='\\t')\n ### Index([u'pr_gene_id', u'pr_gene_symbol', u'pr_gene_title', u'pr_is_lm',\n ### u'pr_is_bing'],\n ### dtype='object')\n\n landmark_gene_ids = gene_info['pr_gene_id'][gene_info['pr_is_lm'] == 1] #Filters for directly measured transcripts\n ### Results in the 978 landmark pr_gene_ids\n\n ### LOAD in the main file filtering the columns so that only the small molecules signatures are loaded and the\n ### rows such that only the landmark genes are loaded into their custom gctoo container type\n relevent_sigs_gctoo = parse(join(FILE_PATH, \"GSE92742_Broad_LINCS_Level5_COMPZ.MODZ_n473647x12328.gctx\"),\n cid=small_mol_sigs, rid=landmark_gene_ids)\n # print small_mol_sigs.data_df.shape\n ### Should write an intermediate file with dimensions (978, 205034)\n write_gctx.write(relevent_sigs_gctoo, join(FILE_PATH, \"lm_sm_aggz\"))", "def datamerge_run(filenames, outdir, roc_cols):\n \n tbldict = collect2dict(filenames, outdir)\n tbldict = cogtest_manipulation(tbldict, roc_cols)\n \n #count number of tps\n tbldict['cogtests'] = count_instances(tbldict['cogtests'], 'codeb', 'NP_NoTps')\n tbldict['aseg_change'] = count_instances(tbldict['aseg_change'], 'codea', 'MRI_NoTps')\n tbldict['pibparams'] = count_instances(tbldict['pibparams'], 'codea', 'PIB_NoTps')\n \n new_tbldict = {}\n for key, tbl in tbldict.iteritems():\n tpcol = [s for s in tbl.columns if ('_Tp' in s)]\n if tpcol:\n tpcol = tpcol[0]\n tblflat, tblflatnm = flatten(tbl, tpcol, key, [1, '1'])\n new_tbldict[tblflatnm] = tblflat\n tbldict.update(new_tbldict)\n \n #make sure each table contains SubjID and BAC# fields\n for key, tbl in tbldict.iteritems():\n tbl = addcodes(tbl, tbldict['codetranslator'])\n tbldict[key] = tbl\n \n #merge tables\n tblstojoin = ['cogtests_flat','pibparams_flat','aseg_change_flat','fdg_metaroi_flat','subjinfo']\n joincol = ['codea','codeb']\n subjtbl = mergelots(tbldict, tblstojoin, joincol)\n \n #merge tables\n tblstojoin = ['cogtests','subjinfo','pibparams_flat','aseg_change_flat','fdg_metaroi_flat']\n joincol = ['codea','codeb']\n NPtbl = mergelots(tbldict, tblstojoin, joincol)\n \n cf.save_xls_and_pkl(subjtbl, 'subjtbl', outdir)\n cf.save_xls_and_pkl(NPtbl, 'NPtbl', outdir)\n \n return tbldict, NPtbl, subjtbl", "def combine_results(voting = 'hard',clf_list = ['test_small','rt_small','test2_small']):\n \n start = time.clock()\n df = load_all_dfs(clf_list)\n\n print('combining the data and voting ', voting)\n\n if voting == 'hard':\n print('voting')\n\n label_tupel_list = list(df.groupby(level=['id'])['std'].idxmax())#idmax \n num_samples = len(label_tupel_list)\n index = [label_tupel_list[i][0] for i in range(num_samples)]\n df.index\n time_need = []\n t2 = 0\n\n print(\"doing god's work\")\n df_new = df.ix[index]\n df_new = df.ix[label_tupel_list]\n end = time.clock()\n print('done', end-start)\n #return df_new\n \n \n cols = ['Class_1',\n 'Class_2',\n 'Class_3',\n 'Class_4',\n 'Class_5',\n 'Class_6',\n 'Class_7',\n 'Class_8',\n 'Class_9']\n df_new2 = df_new.reset_index()\n del df_new2['std']\n del df_new2['id']\n del df_new2['df']\n\n print('zero')\n try:\n print('first')\n clf_names = 'with_'\n print('second')\n for i in range(len(clf_list)):\n print(clf_list[i])\n clf_names = clf_names + '_' + clf_list[i]\n \n df_new2.to_csv('Pikki'+clf_names+ '.csv',header = cols,index_label = ['id'])\n \n df_new2.index +=1\n\n print('written to')\n print('Pikki'+clf_names+ '.csv')\n \n df_new2.to_csv('combined_Pikki'+clf_names+ '.csv',header = cols,index_label = ['id'])\n except:\n df_new2.to_csv('combined_Pikki.csv',header = cols,index_label = ['id'])\n return df_new", "def summarize(data, verbal=False, using_files=True):\n\n if using_files:\n for file_name in tqdm(data):\n fill_table(pd.read_csv(file_name))\n else:\n for table in tqdm(data):\n fill_table(table)\n\n for cluster in table_summary:\n #total_genes = sum(table_summary[cluster][\"phylum\"].values) # number of genes\n #total_genes = table_summary[cluster][\"N\"] # number of samples\n total_genes = table_summary[cluster][\"eggNOG\"].eggNOG.sum() # number of genes in COGs with duplicates\n \n phylum_percent = table_summary[cluster][\"phylum\"].apply(lambda x: x/total_genes * 100)\n phylum_percent.columns = [\"percent\"]\n table_summary[cluster][\"phylum\"] = pd.concat([table_summary[cluster][\"phylum\"],phylum_percent],axis=1)\n\n #Read above for fix\n genus_percent = table_summary[cluster][\"genus\"].apply(lambda x: x/total_genes * 100)\n genus_percent.columns = [\"percent\"]\n table_summary[cluster][\"genus\"] = pd.concat([table_summary[cluster][\"genus\"],genus_percent],axis=1)\n\n #read above for fix\n cog_percent = table_summary[cluster][\"eggNOG\"].apply(lambda x: x/table_summary[cluster][\"gene_cog\"] * 100)\n cog_percent.columns = [\"percent\"]\n table_summary[cluster][\"eggNOG\"] = pd.concat([table_summary[cluster][\"eggNOG\"],cog_percent],axis=1)\n\n #Print the data\n if verbal:\n print \"Cluster %s:\\n\" % cluster\n print \"Number of Samples: %d\\n\" % table_summary[cluster][\"N\"]\n print \"Taxonomy:\"\n print table_summary[cluster][\"phylum\"].sort(\"percent\", ascending=False)\n print \"----------------------------------\"\n print table_summary[cluster][\"genus\"].sort(\"percent\", ascending=False)\n print \"-----------------------------------\"\n print \"COGS:\"\n print table_summary[cluster][\"eggNOG\"].sort(\"percent\", ascending=False)\n print \"------------------------------------\"\n print \"End Summary\"", "def make_summary(df, save_path):\n # first survey blocks\n style_list = df.name.unique()\n\n # summary data\n num_styles = len(style_list)\n num_units = df.guid.count()\n num_floors = len(df.floor.unique())\n num_phases = len(df.phase.unique())\n num_priority = len(df.priority.unique())\n num_swings = len(df.swing_drop.unique())\n num_groups = len(df.db.unique())\n group_totals = df[[\"db\", \"guid\"]].groupby(\"db\").count()\n group_totals.reset_index(inplace=True)\n loners = len(group_totals.loc[group_totals.guid == 1, \"db\"].tolist())\n assemblies = len(group_totals.loc[group_totals.guid != 1, \"db\"].tolist())\n\n summary = {\"Total_Styles\": num_styles,\n \"Total_Units\": num_units,\n \"Total_Floors\": num_floors,\n \"Total_Phases\": num_phases,\n \"Total_Priorities\": num_priority,\n \"Total_Drops\": num_swings,\n \"Total_Groups\": num_groups,\n \"Total_Loners\": loners,\n \"Total_Assemblies\": assemblies}\n summary_df = pd.DataFrame(summary, index=[0])\n summary_df = summary_df.T\n\n # print(assemblies)\n # print(loners)\n # print(num_units)\n # print(list(df))\n\n # first groups\n # print(df[[\"group\",\"group_order\"]])\n # print(list(df))\n\n # reports\n floor_counts = df[[\"floor\", \"guid\"]].groupby(\"floor\").count()\n floor_counts.reset_index(inplace=True)\n # print(floor_counts)\n\n elevations = df[[\"elevation\", \"guid\"]].groupby(\"elevation\").count()\n elevations.reset_index(inplace=True)\n # (list(df))\n\n names = df[[\"name\", \"guid\"]].groupby(\"name\").count()\n names.reset_index(inplace=True)\n # print(names)\n\n priorities = df[[\"priority\", \"guid\"]].groupby(\"priority\").count()\n priorities.reset_index(inplace=True)\n # print(priorities)\n\n swing = df[[\"swing_drop\", \"guid\"]].groupby(\"swing_drop\").count()\n swing.reset_index(inplace=True)\n # print(swing)\n\n phase = df[[\"phase\", \"guid\"]].groupby(\"phase\").count()\n phase.reset_index(inplace=True)\n # print(phase)\n\n group_rpt = df[[\"db\", \"guid\"]].groupby(\"db\").count()\n group_rpt.reset_index(inplace=True)\n # print(group_rpt)\n\n group_odr = df[[\"name\", \"group_order\", \"guid\"]].groupby([\"group_order\", \"name\"]).count()\n group_odr.reset_index(inplace=True)\n # print(group_odr)\n\n samples = df.loc[df.instance == 1, [\"survey_name\", \"priority\", \"phase\"]]\n # print(firsts)\n\n\n\n\n df[\"qmarks\"] = df.name.apply(find_qmark)\n qmarks = df.loc[df.qmarks == 1, [\"name\", \"guid\"]]\n qmarks.reset_index(drop=True, inplace=True)\n qmarks = qmarks.groupby(\"name\").count()\n\n # Create a Pandas Excel writer using XlsxWriter as the engine.\n writer = pd.ExcelWriter(S.save_path, engine='xlsxwriter')\n\n # Write each dataframe to a different worksheet.\n\n summary_df.to_excel(writer, sheet_name='Summary')\n df.to_excel(writer, sheet_name='Main')\n floor_counts.to_excel(writer, sheet_name='Floors')\n names.to_excel(writer, sheet_name='Names')\n elevations.to_excel(writer, sheet_name='Elevations')\n priorities.to_excel(writer, sheet_name='Priorities')\n swing.to_excel(writer, sheet_name='Drops')\n phase.to_excel(writer, sheet_name='Phases')\n group_rpt.to_excel(writer, sheet_name='Groups')\n group_odr.to_excel(writer, sheet_name='Group Order')\n samples.to_excel(writer, sheet_name='Samples Trimmed')\n # first_styles_survey.to_excel(writer, sheet_name='Primary Styles Detail')\n qmarks.to_excel(writer, sheet_name=\"Question Marks\")\n\n # Close the Pandas Excel writer and output the Excel file.\n writer.save()\n print(\"Summaries done!\")\n # os.startfile(save_path)\n return df", "def stats_orgs(df, new_data=False):\n rows = []\n\n if new_data:\n df = df[df.index.isin(in_taxa_dict.keys())]\n else:\n df = df[df.index.isin(db_taxa_dict.keys())]\n\n df2 = df.copy()\n df2[df2 >= 1] = 1\n\n df = df.sum(axis=1).to_frame()\n\n if new_data:\n df[f\"Genes out of {len(matrix.columns)}\"] = df2.sum(axis=1).to_frame()\n df = df.rename(columns={0: f\"Sequences Collected\"})\n\n else:\n df = df.rename(columns={0: f\"Genes out of {len(matrix.columns)}\"})\n\n # Fill in taxonomic information\n if new_data:\n list_of_dicts = [{key: value[i] for key, value in in_taxa_dict.items()} for i in range(3)]\n else:\n list_of_dicts = [{key: value[i] for key, value in db_taxa_dict.items()} for i in range(3)]\n df['Long Name'] = df.index.map(list_of_dicts[2])\n df['Higher Taxonomy'] = df.index.map(list_of_dicts[0])\n df['Lower Taxonomy'] = df.index.map(list_of_dicts[1])\n\n # Rearrange Columns to Put Genes after taxa stats\n cols = df.columns.tolist()\n cols = cols[2:] + cols[:2]\n df = df[cols]\n\n if new_data:\n routes_dict = get_routes()\n list_of_routes_dicts = [{key: value[i] for key, value in routes_dict.items()} for i in range(3)]\n df[\"#SBH\"] = df.index.map(list_of_routes_dicts[0])\n df[\"#BBH\"] = df.index.map(list_of_routes_dicts[1])\n df[\"#HMM\"] = df.index.map(list_of_routes_dicts[2])\n out_filename = 'new_taxa_stats.tsv'\n else:\n out_filename = 'db_taxa_stats.tsv'\n\n # Fill in columns for including in SGT construction. By default all are yes\n has_paralogs = check_paralogs()\n if new_data:\n sgt_dict = {org: 'yes' for org in in_taxa_dict.keys()}\n else:\n sgt_dict = {org: 'yes' for org in db_taxa_dict.keys()}\n df['SGT'] = df.index.map(sgt_dict)\n\n # Fill in column for paralogs. If no paralogs entry is 'none'.\n # If there are paralogs entry is 'yes'. If there are paralogs, but --ortholog_only is given entry is 'no'.\n if new_data:\n pass\n else:\n paralogs_dict = {org: ('yes' if org in has_paralogs and not args.orthologs_only\n else 'no' if org in has_paralogs and args.orthologs_only else 'none')\n for org in db_taxa_dict}\n df['Paralogs'] = df.index.map(paralogs_dict)\n\n df = df.rename_axis('Unique ID')\n df.to_csv(f'{output_fold}/{out_filename}', sep='\\t')", "def merge_breached():\n # read breach and CMS data\n breachdf = pd.read_csv(updated_breach_file_name, encoding='latin1')\n breachdf.rename(columns=lambda x: x.strip(), inplace=True)\n print(breachdf.isnull().sum().sum(), \"null columns\")\n\n df = pd.read_csv(CMS_file_name, encoding='latin1')\n df.rename(columns=lambda x: x.strip(), inplace=True)\n\n print(\"dataframes read\")\n\n # merge data\n new_df = df.merge(breachdf, left_on='FAC_NAME', right_on='FAC_NAME', how='outer')\n print(\"merged\", new_df)\n\n new_df.to_csv(merged_file_name, index=False)\n print(\"Written to\", merged_file_name)", "def save_to_hdf5(fname, df, cosmo={}, tname=\"RockstarMergerTrees\", min_vmax=0):\n f = h5py.File(fname, 'a', libver='latest')\n colheads = df.columns.values\n treenums = df.loc[df.vmax >= min_vmax].tree.unique()\n if tname in f.keys():\n print(\"File already contains a group named {0}, so I can't save to it.\"\n \" Exiting.\".format(tname))\n sys.exit(1337)\n t = f.create_group(tname)\n if HAVE_PBAR:\n treenums = tqdm(treenums, desc='Saving')\n for i, tnum in enumerate(treenums):\n tg = t.create_group('Tree_' + str(tnum))\n for j, col in enumerate(colheads):\n col_data = df.loc[(df.tree == tnum), col].values\n tg.create_dataset(col, data=col_data)\n head = f.create_group('Header')\n for param in cosmo:\n head.create_dataset(param, data=cosmo[param])\n f.close()", "def merge_cooccur(args):\n if not args.quiet:\n logger.setLevel(logging.INFO)\n\n merged = dict.fromkeys(['mat', 'tokenizer', 'window_size', 'uniform_count'])\n with tqdm(total=len(args.cooccurfiles), ncols=80, disable=args.quiet) as prog:\n for file in args.cooccurfiles:\n # load the data\n corpus = load_corpus(file)\n\n if merged['tokenizer'] is None:\n merged['tokenizer'] = corpus._tokenizer.to_str()\n\n if merged['window_size'] is None:\n merged['window_size'] = corpus.window_size\n\n if merged['uniform_count'] is None:\n merged['uniform_count'] = corpus.uniform_count\n\n mat = corpus.mat.astype('float32')\n if args.symmetrization:\n mat = (mat + mat.T.tocoo()).tocoo()\n\n if merged['mat'] is None:\n merged['mat'] = mat\n else:\n merged['mat'] += mat\n\n prog.update()\n merged['mat'] = merged['mat'].tocoo()\n\n # save output\n logger.info('Saving to disk...')\n out_fn = join(args.path, args.out)\n with open(out_fn, 'wb') as fp:\n pkl.dump(\n {\n 'mat': {\n 'row': merged['mat'].row,\n 'col': merged['mat'].col,\n 'counts': merged['mat'].data\n },\n 'tokenizer': merged['tokenizer'],\n 'uniform_count': merged['uniform_count'],\n 'window_size': merged['window_size']\n },\n fp\n )", "def complete_databank(port=\"localhost:27017\",coconut_database=\"COCONUT2020-10\",sweetcoconut_database=\"sweetcoconut\"):\n client = MongoClient(port)\n db_complete = client[coconut_database]\n collection = db_complete.uniqueNaturalProduct\n db_complete_only_ring_sugars = pd.DataFrame(list(collection.find({\"contains_ring_sugars\": True})))\n df_complete_tax = pd.DataFrame({\"taxonomy\": db_complete_only_ring_sugars[\"textTaxa\"],\n \"smiles\": db_complete_only_ring_sugars[\"smiles\"],\n \"coconut_id\": db_complete_only_ring_sugars[\"coconut_id\"],\n \"no_sugar_smiles\": db_complete_only_ring_sugars[\"sugar_free_smiles\"]\n })\n complete_names = []\n indexes = []\n for i in range(len(df_complete_tax.taxonomy)):\n # some entries are empty lists\n # doubles\n if df_complete_tax.taxonomy[i] != [] and (\"plants\" in df_complete_tax.taxonomy[i] or \"bacteria\" in df_complete_tax.taxonomy[i] or \"marine\" in df_complete_tax.taxonomy[i] or \"animals\" in df_complete_tax.taxonomy[i] or \"fungi\" in df_complete_tax.taxonomy[i]):\n indexes.append(i)\n complete_names.append(df_complete_tax.taxonomy[i])\n df_five_tax = df_complete_tax.loc[indexes[:]]\n df_tax_id = pd.DataFrame({\"taxonomy\": df_five_tax.taxonomy,\n \"coconut_id\": df_five_tax.coconut_id})\n df_tax_id = df_tax_id.reset_index()\n taxonomies = [\"plants\",\"bacteria\",\"fungi\",\"marine\",\"animals\"]\n biology_names = []\n for row in df_tax_id.taxonomy:\n for name in row:\n if name not in taxonomies:\n biology_names.append(name)\n for biology_name in biology_names:\n for row in df_tax_id.taxonomy:\n if biology_name in row:\n row.remove(biology_name)\n # **------------for tax prediction---------------**\n df_tax_id.to_pickle(\"output_data/for_predict_doubletriple.pkl\")\n # **----------end tax prediction--------------**\n for ind, tax_list in enumerate(df_tax_id.taxonomy):\n if \"marine\" in tax_list:\n #print(ind, tax_list)\n if len(tax_list) > 1:\n df_tax_id.taxonomy[ind].remove(\"marine\")\n else:\n df_tax_id.taxonomy[ind].append(\"no\")\n df_tax_id.taxonomy[ind].remove(\"marine\")\n #df_tax_id.taxonomy[ind] = [\"no\"]\n taxonomy_Double = []\n taxonomy_Triple = []\n taxonomy_single_entry = []\n for ind, tax_list in enumerate(df_tax_id.taxonomy):\n #print(ind, tax_list)\n if len(tax_list) == 1:\n taxonomy_single_entry.append(tax_list[0])\n elif len(tax_list) == 2: \n taxonomy_single_entry.append('double')\n # save original annotation\n taxonomyDouble1 = []\n for tax in tax_list:\n taxonomyDouble1.append(tax)\n taxonomy_Double.append(taxonomyDouble1)\n elif len(tax_list) == 3:\n taxonomy_single_entry.append('triple')\n # save original annotation\n taxonomyTriple1 = []\n for tax in tax_list:\n taxonomyTriple1.append(tax)\n taxonomy_Triple.append(taxonomyTriple1)\n else:\n print('Error: Too many taxonomies for one aglycon','\\n','create a new elif statement in line 102 in tanimoto_index.py')\n df_tax_id_fromCompleteDatabank = pd.DataFrame({\"taxonomy\": taxonomy_single_entry,\n \"coconut_id\": df_five_tax.coconut_id})\n sweetcoconut_databank(df_tax_id_fromCompleteDatabank,taxonomy_Double,sweetcoconut_database,port)", "def aglycon_single_tax(df_NP):\n # **seperate aglycons with at least two different entries in taxonomy**\n index_Unique_Tax = [ind for ind, tax_list in enumerate(df_NP.taxonomy) if len(tax_list) == 1]\n df_Without_Double = df_NP.iloc[index_Unique_Tax[:]]\n #df_Without_Double\n # **check for 'double' or 'triple' entries in taxonomy**\n index_double_or_triple = [ind for ind, tax_list in enumerate(df_Without_Double.taxonomy) if 'double' not in tax_list and 'triple' not in tax_list]\n df_Without_Double_or_Triple = df_Without_Double.iloc[index_double_or_triple[:]]\n #df_Without_Double_or_Triple\n # **------for taxonomy prediction------**\n df_Without_Double_or_Triple.to_pickle(\"output_data/df_all_aglycons_with_single_taxonomy.pkl\")\n # **------end for taxonomy prediction------**", "def dump(\n cool_uri,\n table,\n columns,\n header,\n na_rep,\n float_format,\n range,\n range2,\n matrix,\n balanced,\n join,\n annotate,\n one_based_ids,\n one_based_starts,\n chunksize,\n out,\n):\n c = api.Cooler(cool_uri)\n\n # output stream\n if out is None or out == \"-\":\n f = sys.stdout\n elif out.endswith(\".gz\"):\n f = gzip.open(out, \"wt\")\n else:\n f = open(out, \"wt\")\n\n # choose the source\n if table == \"chroms\":\n selector = c.chroms()\n if columns is not None:\n selector = selector[list(columns)]\n chunks = (selector[:],)\n elif table == \"bins\":\n selector = c.bins()\n if columns is not None:\n selector = selector[list(columns)]\n chunks = (selector[:],)\n else:\n # load all the bins\n bins = c.bins()[:]\n if chunksize is None:\n chunksize = len(bins)\n\n if balanced and \"weight\" not in bins.columns:\n print(\"Balancing weights not found\", file=sys.stderr)\n sys.exit(1)\n\n h5 = c.open(\"r\")\n if range:\n i0, i1 = region_to_extent(\n h5, c._chromids, parse_region(range, c.chromsizes), binsize=c.binsize\n )\n if range2 is not None:\n j0, j1 = region_to_extent(\n h5,\n c._chromids,\n parse_region(range2, c.chromsizes),\n binsize=c.binsize,\n )\n else:\n j0, j1 = i0, i1\n\n triu_reader = CSRReader(h5, \"count\", chunksize)\n if matrix and c.storage_mode == \"symmetric-upper\":\n selector = query2d(triu_reader, i0, i1, j0, j1, duplex=True)\n else:\n selector = triu_reader(i0, i1, j0, j1, transpose=False)\n\n chunks = (\n pd.DataFrame(\n {\"bin1_id\": i, \"bin2_id\": j, \"count\": v},\n columns=[\"bin1_id\", \"bin2_id\", \"count\"],\n )\n for i, j, v in selector\n )\n else:\n selector = c.pixels()\n if columns is not None:\n selector = selector[list(columns)]\n n = len(selector)\n edges = np.arange(0, n + chunksize, chunksize)\n edges[-1] = n\n\n if matrix and c.storage_mode == \"symmetric-upper\":\n\n def _select(lo, hi):\n df = selector[lo:hi]\n dfT = df.copy()\n dfT[\"bin1_id\"], dfT[\"bin2_id\"] = df[\"bin2_id\"], df[\"bin1_id\"]\n return pd.concat([df, dfT])\n\n chunks = (_select(lo, hi) for lo, hi in zip(edges[:-1], edges[1:]))\n else:\n chunks = (selector[lo:hi] for lo, hi in zip(edges[:-1], edges[1:]))\n\n if balanced or join or annotate:\n annotator = make_annotator(\n bins, balanced, join, annotate, one_based_ids, one_based_starts\n )\n chunks = map(annotator, chunks)\n\n first = True\n if float_format is not None:\n float_format = \"%\" + float_format\n\n for chunk in chunks:\n if first:\n if header:\n chunk[0:0].to_csv(\n f, sep=\"\\t\", index=False, header=True, float_format=float_format\n )\n first = False\n\n chunk.to_csv(\n f,\n sep=\"\\t\",\n index=False,\n header=False,\n float_format=float_format,\n na_rep=na_rep,\n )\n\n else:\n f.flush()", "def merge_dataframe(df, column_name='population'):\n df['snp'] = df.chrom.astype(str) + '.' + df.position.astype(str)\n print('Getting open and closed combined data, this might take a while.')\n # Takes about 20 minutes\n open_closed = get_open_closed_counts(df)\n keep_cols = [\n 'snp', 'chrom', 'position', 'rsid', 'pre_freq', 'post_freq',\n 'pre_variance', 'post_variance', 'beta', 'p_value', 'z_value', 'ref',\n 'alt', 'depth', 'ref_depth', 'alt_depth', 'snp_postfreq', 'snp_prefreq',\n 'population'\n ]\n comb_functions = {\n 'chrom': first, 'position': first, 'rsid': first,\n 'ref': first, 'alt': first,\n 'pre_freq': np.median, 'post_freq': np.median,\n 'pre_variance': np.median, 'post_variance': np.median,\n 'beta': np.median, 'z_value': np.median, 'depth': np.median,\n 'ref_depth': np.median, 'alt_depth': np.median,\n 'snp_postfreq': np.median, 'snp_prefreq': np.median,\n column_name: ','.join, 'p_value': fishers\n }\n print('Collapsing dataframe, this also takes a while.')\n # Takes about 10 minutes\n df = df[keep_cols].groupby('snp').agg(comb_functions)\n # Split the fishers statistics\n df['fishers_value'], df['fishers_p'] = df.p_value.str\n print('Merging in open/closed data')\n df = pd.merge(df, open_closed, left_index=True, right_index=True)\n print('Organizing')\n final_cols = [\n 'chrom', 'position', 'rsid', 'fishers_p', 'ref', 'alt', 'open_best',\n 'closed_best', 'pop_count', column_name, 'pops_agree',\n 'most_pops_agree', 'all_open_closed', 'open_closed_freqs',\n 'fishers_value', 'pre_freq', 'post_freq', 'pre_variance',\n 'post_variance', 'beta', 'z_value', 'depth', 'ref_depth', 'alt_depth',\n 'snp_postfreq', 'snp_prefreq'\n ]\n final_names = [\n 'chrom', 'position', 'rsid', 'fishers_p', 'ref', 'alt', 'open_best',\n 'closed_best', column_name + '_count', column_name + 's', column_name +\n 's_agree', 'most_{}s_agree'.format(column_name), 'all_open_closed',\n 'open_closed_freqs', 'fishers_value', 'pre_freq_median',\n 'post_freq_median', 'pre_variance_median', 'post_variance_median',\n 'beta_median', 'z_value_median', 'depth_median', 'ref_depth_median',\n 'alt_depth_median', 'snp_postfreq_median', 'snp_prefreq_median'\n ]\n df = df[final_cols]\n df.columns = final_names\n print('Done')\n return df", "def calculated_data_to_csv(transmissivity_calculated, conductivity_calculated,\n confirmed_wells, feature_class_name):\n utm_e = [i[0][0] for i in confirmed_wells]\n utm_n = [i[0][1] for i in confirmed_wells]\n np.set_printoptions(suppress=True) #removes scientific notation\n location = np.array([utm_e, utm_n])\n location = location.transpose()\n transmissivity_calculated = np.array(transmissivity_calculated)\n conductivity_calculated = np.array(conductivity_calculated)\n joined_data = np.concatenate((location, transmissivity_calculated, conductivity_calculated), axis = 1)\n my_df = pd.DataFrame(joined_data)\n header_list = ['UTME', 'UTMN', 'T_min', 'T_raw', 'T_max', 'K_min', 'K_raw', 'K_max', 'Well ID']\n raw_csv_name = f\"{feature_class_name}.csv\"\n my_df.to_csv(raw_csv_name, index = False, header = header_list)\n return my_df, raw_csv_name", "def inout_creator(df = pd.DataFrame(), features='datosrahm.csv'):\r\n df = df\r\n \r\n start=time.time()\r\n \r\n datos=pd.read_csv(features)\r\n datos=datos.fillna(-1)\r\n\r\n dicc=dict(datos[['Symbol','Z']].values)\r\n\r\n dicc['D']=1\r\n dicc['Bk']=97\r\n dicc['Cf']=98\r\n dicc['Es']=99\r\n dicc['Fm']=100\r\n dicc['Md']=101\r\n dicc['No']=102\r\n dicc['Lr']=103\r\n \r\n max_sitios = max(df['sitios'].values)\r\n \r\n X=np.zeros((len(df),max_sitios,104))\r\n\r\n mult=np.zeros((len(df),max_sitios))\r\n wyckmul=np.load('support/WyckoffSG_dict.npy').item()['wyckmul']\r\n \r\n todelete = list()\r\n \r\n for row in range(len(df)):\r\n item=df['WyckOcc'][row]\r\n sitios=list(item.values()) \r\n sitocc=np.zeros((len(sitios),104)) \r\n spacegroup = str(df['sgnum'][row]).zfill(3)\r\n \r\n try:\r\n \r\n s=[int(wyckmul[spacegroup][i]) for j in [list(item.keys()) for item in \\\r\n sitios] for i in j]\r\n \r\n except:\r\n print(row)\r\n print('There exists an error concerning with the space group of CIF ', df['cif'][row],'\\n')\r\n print('Please check in www.crystallography.net to provide the correct space group number of that CIF',\r\n '\\n','\\n')\r\n spacegroup=input('Give me the correct spacegroup:'+'\\n'+'\\n')\r\n s=[int(wyckmul[spacegroup][i]) for j in [list(item.keys()) for item in \\\r\n list(df['WyckOcc'][row].values())] for i in j]\r\n \r\n occs=[]\r\n for i in range(len(sitios)):\r\n\r\n for j in list(sitios[i].values()):\r\n \r\n ocupacion=np.array(list(j.values()))\r\n llaves=[llave.replace('+','').replace('-','').replace('1',\r\n '').replace('2','').replace('3','').replace('4',\r\n '') for llave in np.array(list(j.keys()))]\r\n llaves=[llave.replace('.','') for llave in llaves]\r\n llaves=[llave.replace('5','').replace('6','').replace('7',\r\n '').replace('8','').replace('9','').replace('0',\r\n '') for llave in llaves]\r\n vector=np.zeros((1,104))\r\n occs=[sum(ocupacion)]+occs\r\n \r\n try:\r\n \r\n idx=[dicc[k] for k in llaves]\r\n \r\n except:\r\n print('The compound with the cif ', df['cif'][row], ' will be deleted')\r\n print('The database will be updated')\r\n todelete += [row]\r\n \r\n for k in idx:\r\n vector[0][k-1] = ocupacion[idx.index(k)]\r\n \r\n sitocc[i]=vector\r\n \r\n while sitocc.shape[0] != max_sitios:\r\n sitocc=np.concatenate((np.zeros((1,104)),sitocc))\r\n s=[0]+s\r\n \r\n X[row,:,:]=sitocc\r\n mult[row]=s\r\n \r\n features=datos.iloc[:,2:].values\r\n x=X[:,:,:96]\r\n \r\n fracsum = np.expand_dims(np.sum(x,axis=2), axis=2)\r\n \r\n x=np.dot(x,features) \r\n \r\n x = np.delete(x, todelete,axis=0)\r\n df = df.drop(df.index[todelete]).reset_index(drop=True)\r\n \r\n print('inout_creator lasted ',round(time.time()-start,2),' s') \r\n return x, fracsum, df", "def get_glevel_ori_agency(county_cens_file, crime_df, filename, cens_year, city_cens_file=False):\n\n \"\"\"\n 1. Append cities census file to counties census file\n \"\"\"\n national_census_df = pd.read_csv(county_cens_file)\n\n \"\"\"\n Checking for city census file coz we need to first append city census file to the bottom of county census file for 2000 and 2010.\n And city census file is passed only for 2000 and 2010 since for 1990 city and county census data is already together.\n \"\"\"\n if city_cens_file:\n cities_df = pd.read_csv(city_cens_file)\n national_census_df = national_census_df.append([cities_df])\n\n # Drop duplicates\n national_census_df = national_census_df.drop_duplicates(['STATEFP', 'place_fips'])\n national_census_df.to_csv(f'/Users/salma/Studies/Research/Criminal_Justice/research_projects/US Crime Analytics/data/cen_00/Census_{cens_year}_Unique.csv', index=False)\n\n\n \"\"\"\n 2.\n Merge census unique files with Crime_Major_Gov_Fips to get the correct cgovtype, CNTY based on fips state, fips place. \n Also obtain ORI, Agency columns from crime file. \n \"\"\"\n national_census_df = national_census_df.merge(crime_df, on=['STATEFP', 'place_fips'], how='right')\n\n\n \"\"\"\n 3. Create final Govt_level = Govt_level_y column which has govt_level values from crime file and get rid of _x and _y columns \n \"\"\"\n national_census_df['Govt_level'] = national_census_df['Govt_level_y']\n national_census_df['CNTY'] = national_census_df['CNTY_y']\n national_census_df.drop(['Govt_level_x', 'Govt_level_y', 'CNTY_x', 'CNTY_y'], axis=1, inplace=True)\n\n \"\"\"\n Add the year column to have year for even the missing census rows for certain ORIs\n \"\"\"\n national_census_df['YEAR'] = cens_year\n\n \"\"\"\n 4. Rearrange columns so that ORI, AGENCY, Govt_level are at the beginning\n \"\"\"\n cols = list(national_census_df.columns.values)\n cols.pop(cols.index('ORI'))\n cols.pop(cols.index('AGENCY'))\n cols.pop(cols.index('Govt_level'))\n cols.pop(cols.index('CNTY'))\n cols.pop(cols.index('YEAR'))\n\n national_census_df = national_census_df[['ORI', 'AGENCY', 'Govt_level', 'CNTY', 'YEAR'] + cols]\n #national_census_df = national_census_df[['ORI', 'AGENCY', 'YEAR'] + cols]\n\n # write the final df with updated govt_level, ori, agency etc. to a csv\n national_census_df.to_csv(f'/Users/salma/Studies/Research/Criminal_Justice/research_projects/US Crime Analytics/data/cen_00/{filename}.csv', index=False)", "def composeWorkplaceOntology():\n\n import ossPyFuncs \n import pandas as pd\n \n #mysql query to extract full table from government organizations\n #certian table columns feature capital letters which cases uproblems\n postgreSql_selectQuery=\"SELECT * FROM us_gov_manual.us_govman_2019 ;\"\n #pass querry and obtain table\n govTable=ossPyFuncs.queryToPDTable(postgreSql_selectQuery)\n\n #mysql query to obtain academic instutions\n postgreSql_selectQuery=\"SELECT institution FROM hipolabs.universities ;\"\n #pass querry and obtain table\n univTable=ossPyFuncs.queryToPDTable(postgreSql_selectQuery)\n \n postgreSql_selectQuery=\"SELECT company FROM forbes.fortune2018_us1000;\"\n businesses1=ossPyFuncs.queryToPDTable(postgreSql_selectQuery)\n \n postgreSql_selectQuery=\"SELECT company FROM forbes.fortune2019_us1000;\"\n businesses2=ossPyFuncs.queryToPDTable(postgreSql_selectQuery)\n \n postgreSql_selectQuery=\"SELECT company FROM forbes.fortune2020_global2000;\"\n businesses3=ossPyFuncs.queryToPDTable(postgreSql_selectQuery)\n\n #combine theinsitutions into a vector\n combinedSeries=[govTable['AgencyName'],univTable['institution'],businesses1['company'],businesses2['company'],businesses3['company']]\n #turn the multi item vector into a single series\n fullWordbank=pd.concat(combinedSeries)\n #turn that series into a pd dataframe\n wordbankTable=pd.DataFrame(fullWordbank.unique())\n\n return wordbankTable", "def social_infrastructure_point(osm_path): \n df_all = retrieve(osm_path,'points',['other_tags']).rename(columns={'other_tags': 'asset'}) \n \n #get requested healthcare assets categorized under the key 'healthcare' with correct formatting \n df_h = healthcare_filter(df_all)\n \n #get requested healthcare assets categorized under the key 'amenity' \n df_a = pandas.DataFrame(columns=['osm_id','asset','geometry']) #create df for saving data\n for row in range(len(df_all.index)): \n if 'amenity' in df_all[\"asset\"][row]: \n if not 'healthcare' in df_all[\"asset\"][row]: #check if healthcare key is present\n df_a = df_a.append(df_all.loc[row]) #if so, save in df\n \n if '\"amenity\"=>\"doctors\"' in df_a[\"asset\"][row]:\n df_a[\"asset\"][row] = 'doctors' #to be consistent with asset list \n elif '\"amenity\"=>\"pharmacy\"' in df_a[\"asset\"][row]:\n df_a[\"asset\"][row] = 'pharmacy'\n elif '\"amenity\"=>\"hospital\"' in df_a[\"asset\"][row]:\n df_a[\"asset\"][row] = 'hospital'\n elif '\"amenity\"=>\"clinic\"' in df_a[\"asset\"][row]:\n df_a[\"asset\"][row] = 'clinic'\n elif '\"amenity\"=>\"dentist\"' in df_a[\"asset\"][row]:\n df_a[\"asset\"][row] = 'dentist'\n else:\n df_a = df_a.drop(index=row)\n \n df_social_points = df_a.append(df_h)\n \n return df_social_points.reset_index(drop=True)", "def merge_summaries(root_dir: str,output_file: str=None) -> pd.DataFrame:\n #\n print (f'Collecting the available summary files in {root_dir}, can take time... please wait.')\n sumfiles = glob.glob(f\"{root_dir}/**/*smry.txt\",recursive=True)\n nsums = len(sumfiles)\n print (f\"Found {nsums} summary files in {root_dir}\")\n #\n # will concatenate all smry.txt files into one temporary file and then will put it in pandas DataFrame and \n # save as CSV\n #\n with tempfile.NamedTemporaryFile(mode='w') as fp:\n for sumfile in tqdm(sumfiles,desc='Collecting the summaries'):\n with open(sumfile,'r') as sfile:\n fp.write(sfile.read())\n #\n # now read as pandas dataframe\n #\n colnames = [\"rev\",\"obsid\",\"expid\",\"mode\",\"filt\",\"tstart\",\"tend\",\"texpo\",\"mvcratio\", # (a rough measure of the ratio of counts in the MnKa versus continuum)\n \"qboxt0\",\"qboxt1\",\"qboxt2\",\"qboxt3\", # x 4 (electronics quadrant box temperatures)\n \"ndisclin_mean0\",\"ndisclin_mean1\",\"ndisclin_mean2\",\"ndisclin_mean3\", #x 4\n \"mipsel0\",\"mipsel1\",\"mipsel2\",\"mipsel3\", #x 4 (parameter for on-board MIP rejection algorithm)\n \"maxmip0\",\"maxmip1\",\"maxmip2\",\"maxmip3\", #x 4 (parameter for on-board MIP rejection algorithm)\n \"ndisclin_med0\",\"ndisclin_med1\",\"ndisclin_med2\",\"ndisclin_med3\", #median x 4\n \"ndisclin_std0\",\"ndisclin_std1\",\"ndisclin_std2\",\"ndisclin_std3\"] #, stddev x 4\n\n df = pd.read_csv(fp.name,delimiter='\\s+',header=None,skip_blank_lines=True,names=colnames)\n #\n # now calculate the time_delta, the difference in years from observation start and 2000-01-01\n #\n stime = [(datetime.strptime(x,\"%Y-%m-%dT%H:%M:%S\")-time0).total_seconds()/(365.0*24.0*3600.0) for x in df.tstart]\n df.insert(6,\"delta_time\",pd.Series(stime,index=df.index))\n #\n print (f'Last observation t={df.delta_time.max():.2f} years')\n if (output_file is not None):\n df.to_csv(output_file)\n fp.close()\n return df", "def write_to_csv(path,data_dict):\n\n\n schema = [\"file_name\",\"family\",\"genus\",\"genus_confidence\",\n \"species_1\",\"confidence_1\",\"hall_1\",\n \"species_2\",\"confidence_2\",\"hall_2\",\n \"species_3\",\"confidence_3\",\"hall_3\",\n \"species_4\",\"confidence_4\",\"hall_4\",\"peaks\"]\n\n # if no file exists create a one and inform the user\n if not os.path.exists(path):\n print(\"creating new output file {}\".format(path))\n with open(path, \"w\") as csv_file:\n filewriter = csv.writer(csv_file, delimiter=\",\")\n filewriter.writerow(schema)\n\n row = []\n\n row.append(data_dict[\"file_name\"])\n row.append(data_dict[\"family\"])\n \n row.append(data_dict[\"genus_1\"])\n row.append(data_dict[\"genus_confidence_1\"][:5])\n \n row.append(data_dict[\"species_1\"])\n row.append(data_dict[\"confidence_1\"][:5])\n row.append(data_dict[\"hall_1\"])\n \n row.append(data_dict[\"species_2\"])\n row.append(data_dict[\"confidence_2\"][:5])\n row.append(data_dict[\"hall_2\"])\n\n row.append(data_dict[\"species_3\"])\n row.append(data_dict[\"confidence_3\"][:5])\n row.append(data_dict[\"hall_3\"])\n\n row.append(data_dict[\"species_4\"])\n row.append(data_dict[\"confidence_4\"][:5])\n row.append(data_dict[\"hall_4\"])\n\n row.append(data_dict[\"peaks\"])\n \n with open(path, \"a\") as csv_file:\n filewriter = csv.writer(csv_file, delimiter=\",\")\n filewriter.writerow(row)", "def FeaturesGen(ChopChopresults, outputDir, sgRNA_type):\n \n #make output Directory if it does not already exist\n if not os.path.isdir(outputDir):\n os.makedirs(outputDir)\n \n #list the directory contents \n for i,j,k in os.walk(ChopChopresults): #use walk to go through and find all directories\n \n if j == []: #no subdirectories\n saveDF = pd.DataFrame() #initiate dataframe\n for target in k: #loop through to find the sgRNA sequences\n if target.endswith('.offtargets'):\n with open(os.path.join(i,target), 'r+') as f:\n guide = f.readlines()\n #add them to a dataframe\n temp = pd.Series()\n temp['guideNo'] = target.split('.')[0] + sgRNA_type\n temp['guideSeq'] = guide.pop(0).rstrip()\n \n saveDF = saveDF.append(temp.to_frame().transpose())\n saveDF['type'] = 'sgRNA'\n \n if sgRNA_type == 'General' or sgRNA_type == None:\n saveDF['fwd'] = 'pink'\n saveDF['rev'] = 'green'\n elif sgRNA_type == 'GG':\n saveDF['fwd'] = 'yellow'\n saveDF['rev'] = 'plum'\n elif sgRNA_type == 'GA':\n saveDF['fwd'] = 'cyan'\n saveDF['rev'] = 'cornflower blue'\n \n \n #save to txt file with tab delimiter\n saveDF.to_csv(os.path.join(outputDir, os.path.basename(i) + '_features.txt'),\\\n index = False, header = False, sep = '\\t')\n \n del saveDF", "def pretty_output(associations, population):\n\tcolumn_names = ['ld_snp_rsID', 'chrom', 'pos', 'GRCh38_chrom', 'GRCh38_pos', 'afr', 'amr', 'eas', 'eur', 'sas', 'gnomad', 'gnomad_sas', 'gnomad_oth', 'gnomad_asj', 'gnomad_nfe', 'gnomad_afr', 'gnomad_amr', 'gnomad_fin', 'gnomad_eas','gene_symbol', 'gene_id', 'gene_chrom', 'gene_tss', 'GRCh38_gene_chrom', 'GRCh38_gene_pos', 'disease_name', 'disease_efo_id', 'score', 'rank', 'r2', 'cluster_id', 'gwas_source', 'gwas_snp', 'gwas_pvalue', 'gwas_pvalue_description', 'gwas_odds_ratio', 'gwas_odds_ratio_ci_start', 'gwas_odds_ratio_ci_end', 'gwas_beta', 'gwas_size', 'gwas_pmid', 'gwas_study', 'gwas_reported_trait', 'ls_snp_is_gwas_snp', 'vep_terms', 'vep_sum', 'vep_mean'] + [\"GTEx_\" + tissue_name for tissue_name in postgap.Globals.ALL_TISSUES] + [source.display_name for source in postgap.Cisreg.sources + postgap.Reg.sources]\n\tif postgap.Globals.PERFORM_BAYESIAN: \n\t\tcolumn_names += [tissue_name + \"_CLPP\" for tissue_name in postgap.Globals.ALL_TISSUES]\n\theader = \"\\t\".join(column_names).encode('utf-8')\n\tcontent = filter(lambda X: len(X) > 0, [pretty_cluster_association(association, population) for association in associations])\n\treturn \"\\n\".join([header] + content)", "def output_df(outdict, out_file):\n\tcols = ['#chrom', 'source', 'feature', 'chromStart', 'chromEnd', 'score', 'strand', 'frame', 'transcript_id']\n\tcolOut = ['#chrom', 'source', 'feature', 'start', 'end', 'score', 'strand', 'frame', 'transcript_id']\n\tgtfDF = pd.DataFrame(columns=cols)\n\n\tfor trsp in outdict:\n\t\tgtfDF = gtfDF.append(outdict[trsp], ignore_index=True)\n\t\t\n\tgtfDF.columns = colOut\n\t# print gtfDF.head()\n\tgtfDF.to_csv(out_file, compression='gzip', sep='\\t', index=False)", "def summarize(crosswalk, incidence_table, control_spec):\n\n include_integer_colums = not setting('NO_INTEGERIZATION_EVER', False)\n\n crosswalk_df = crosswalk.to_frame()\n incidence_df = incidence_table.to_frame()\n\n geographies = setting('geographies')\n seed_geography = setting('seed_geography')\n meta_geography = geographies[0]\n sub_geographies = geographies[geographies.index(seed_geography) + 1:]\n hh_id_col = setting('household_id_col')\n\n meta_ids = crosswalk_df[meta_geography].unique()\n for meta_id in meta_ids:\n meta_summary_df = \\\n meta_summary(incidence_df, control_spec, meta_geography,\n meta_id, sub_geographies, hh_id_col)\n out_table('%s_%s' % (meta_geography, meta_id), meta_summary_df)\n\n hh_weights_summary = pd.DataFrame(index=incidence_df.index)\n\n # add seed level summaries\n seed_weights_df = get_weight_table(seed_geography)\n hh_weights_summary['%s_balanced_weight' % seed_geography] = seed_weights_df['balanced_weight']\n if include_integer_colums:\n hh_weights_summary['%s_integer_weight' % seed_geography] = seed_weights_df['integer_weight']\n\n for geography in sub_geographies:\n\n weights_df = get_weight_table(geography)\n\n if weights_df is None:\n continue\n\n if include_integer_colums:\n hh_weight_cols = [hh_id_col, 'balanced_weight', 'integer_weight']\n else:\n hh_weight_cols = [hh_id_col, 'balanced_weight']\n\n hh_weights = weights_df[hh_weight_cols].groupby([hh_id_col]).sum()\n hh_weights_summary['%s_balanced_weight' % geography] = hh_weights['balanced_weight']\n if include_integer_colums:\n hh_weights_summary['%s_integer_weight' % geography] = hh_weights['integer_weight']\n\n # aggregate to seed level\n aggegrate_weights = weights_df.groupby([seed_geography, hh_id_col], as_index=False).sum()\n aggegrate_weights.set_index(hh_id_col, inplace=True)\n\n if include_integer_colums:\n aggegrate_weight_cols = [seed_geography, 'balanced_weight', 'integer_weight']\n else:\n aggegrate_weight_cols = [seed_geography, 'balanced_weight']\n\n aggegrate_weights = aggegrate_weights[aggegrate_weight_cols]\n aggegrate_weights['sample_weight'] = incidence_df['sample_weight']\n aggegrate_weights['%s_preliminary_balanced_weight' % seed_geography] = \\\n seed_weights_df['preliminary_balanced_weight']\n aggegrate_weights['%s_balanced_weight' % seed_geography] = \\\n seed_weights_df['balanced_weight']\n if include_integer_colums:\n aggegrate_weights['%s_integer_weight' % seed_geography] = \\\n seed_weights_df['integer_weight']\n\n out_table('%s_aggregate' % (geography,), aggegrate_weights)\n\n summary_col = 'integer_weight' if include_integer_colums else 'balanced_weight'\n df = summarize_geography(seed_geography, summary_col, hh_id_col,\n crosswalk_df, weights_df, incidence_df)\n out_table('%s_%s' % (geography, seed_geography,), df)\n\n df = summarize_geography(geography, summary_col, hh_id_col,\n crosswalk_df, weights_df, incidence_df)\n out_table('%s' % (geography,), df)\n\n out_table('hh_weights', hh_weights_summary)", "def analysis():\r\n data_frame = load_from_mysql('core', 'BDFMHQAA_D')\r\n data_frame.registerTempTable('business')\r\n gd = data_frame.select('AA03CSNO', 'AA08PRON')\r\n\r\n def merge_count(a, b):\r\n r = {}\r\n for p, c in a.items():\r\n if p in r:\r\n r[p] += c\r\n else:\r\n r[p] = c\r\n for p, c in b.items():\r\n if p in r:\r\n r[p] += c\r\n else:\r\n r[p] = c\r\n return r\r\n result = gd.map(lambda row: (row.AA03CSNO, {row.AA08PRON: 1})).reduceByKey(merge_count)\r\n pron_count = gd.map(lambda row: (row.AA08PRON, 1)).reduceByKey(lambda a, b: a + b)\r\n\r\n # result = gd.map(lambda row: (row.AA03CSNO, row.AA08PRON))\r\n print(result.take(10))\r\n print('----------------pron count-----------------')\r\n print(pron_count.collect())\r\n\r\n print(gd)", "def make_df_from_json(json_files, out_file):\n table = [[\"name\", \n \"cik\", \n \"city\",\n \"state\",\n \"street1\",\n \"street2\",\n \"zip_code\",\n \"year_of_incorp\", \n \"min_inv\", \n \"tot_off\", \n \"tot_sold\", \n \"tot_rem\", \n \"ind_group_type\", \n \"has_non_accred\", \n \"num_non_accred\", \n \"tot_num_inv\"\n ]] \n\n for json_dict in json_files:\n\n with open(json_dict, \"rb\") as f:\n data = json.load(f)\n print(json_dict)\n\n for i, key in enumerate(data):\n # if i % 1000 == 0:\n # print(i)\n entry = data[key] \n if entry == {}:\n #print(\"missing entry {0}\".format(i))\n continue\n row = []\n\n primary_issuer = entry[\"Primary Issuer\"]\n cik = primary_issuer[\"cik\"]\n name = primary_issuer[\"entity_name\"]\n phone = primary_issuer[\"phone\"]\n year_of_incorp = primary_issuer[\"year_of_incorp\"]\n address = primary_issuer[\"address\"]\n city = address[\"city\"]\n state = address[\"state\"]\n street1 = address[\"street1\"]\n street2 = address[\"street2\"]\n zip_code = address[\"zip_code\"]\n\n secondary_issuers = entry[\"Secondary Issuers\"]\n related_people = entry[\"Related People\"]\n \n offering_data = entry[\"Offering Data\"]\n min_inv = offering_data[\"min_investment_accepted\"]\n tot_off = offering_data[\"total_offering_amount\"]\n tot_sold = offering_data[\"total_amount_sold\"]\n tot_rem = offering_data[\"total_remaining\"]\n ind_group_type = offering_data[\"ind_group_type\"]\n has_non_accred = offering_data[\"has_non_accred\"]\n num_non_accred = offering_data[\"num_non_accred\"]\n tot_num_inv = offering_data[\"tot_num_inv\"] \n\n row = [name, \n cik, \n city,\n state,\n street1,\n street2,\n zip_code,\n year_of_incorp,\n min_inv,\n tot_off,\n tot_sold,\n tot_rem,\n ind_group_type,\n has_non_accred,\n num_non_accred,\n tot_num_inv\n ]\n\n table.append(row)\n\n df = pd.DataFrame(table)\n df.to_csv(out_file)\n\n return 0", "def summarize_biomass(site_csv, save_as):\n\n site_list = pd.read_csv(site_csv).to_dict(orient='records')\n df_list = []\n outerdir = r\"C:\\Users\\ginge\\Dropbox\\NatCap_backup\\Mongolia\\model_results\\monitoring_sites\"\n columns = ['green_biomass_gm2', 'dead_biomass_gm2', 'total_biomass_gm2',\n 'year', 'month']\n for precip_source in ['chirps_prec']: # 'namem_clim', 'worldclim',\n for sd in ['zero_sd']: # , 'average_sd']\n results_dir = os.path.join(outerdir, precip_source, sd)\n for site in site_list:\n site_id = site['site_id']\n sum_csv = os.path.join(results_dir, '{}'.format(site_id),\n 'summary_results.csv')\n sum_df = pd.read_csv(sum_csv)\n sum_df = sum_df.set_index('step')\n subset = sum_df # sum_df.loc[sum_df['month'].isin([7, 8, 9])]\n subset['green_biomass_gm2'] = subset['{}_green_kgha'.format(site_id)] / 10.\n subset['dead_biomass_gm2'] = subset['{}_dead_kgha'.format(site_id)] / 10.\n subset['total_biomass_gm2'] = subset['green_biomass_gm2'] + subset['dead_biomass_gm2']\n subset = subset[columns]\n subset['climate_source'] = precip_source\n subset['stocking_density_option'] = sd\n subset['site_id'] = site_id\n df_list.append(subset)\n sum_df = pd.concat(df_list)\n sum_df.to_csv(save_as)", "def analysis_2_result(units_df,output_folder_path):\n two_wheeler_df = units_df\\\n .filter(col(\"VEH_BODY_STYL_ID\").isin([\"POLICE MOTORCYCLE\", \"MOTORCYCLE\"]))\\\n .distinct()\\\n .agg(count(\"VEH_BODY_STYL_ID\").alias(\"TWO_WHEELER_COUNT\"))\n # distinct is calculated as there are entries with duplicate details\n # : with_duplicate_count = 784 2 wheelers\n # : without_duplicates_count = 773 2 wheelers\n print(\"Analysis 2: \\nTotal number of two wheelers are booked for crashes is :\")\n two_wheeler_df.show() #Displaying result DF\n write_df_to_csv(two_wheeler_df,output_folder_path+\"analysis_2_result\") #Writing to csv file", "def gos_files_creation(annotation_file, go_namespace_studied):\n go_ontology = pronto.Ontology('http://purl.obolibrary.org/obo/go/go-basic.obo')\n\n # For each GO terms look to the namespaces associated with them.\n go_namespaces = {}\n for go_term in go_ontology:\n go_namespaces[go_term.id] = go_term.other['namespace'][0]\n\n # For each GO terms look if there is an alternative ID fo them.\n go_alt_ids = {}\n for go_term in go_ontology:\n if 'alt_id' in go_term.other:\n for go_alt in go_term.other['alt_id']:\n go_alt_ids[go_alt] = go_term.id\n\n # Genome file with genes associated with GO terms.\n df = pa.read_csv(annotation_file, sep='\\t', header=None)\n df.columns = ['Gene_Name', 'GOs']\n df.replace(np.nan, '', inplace=True)\n\n gos_in_df = []\n for gos in df['GOs']:\n for go in gos.split(','):\n if go not in gos_in_df:\n gos_in_df.append(go)\n\n df.set_index('Gene_Name', inplace=True)\n\n gene_gos = []\n for gene, row in df.iterrows():\n for go in row['GOs'].split(','):\n gene_gos.append((go, gene))\n\n dic_go_genes = {}\n for go in tqdm(gos_in_df):\n genes = []\n for gene_go in gene_gos:\n if go != '' and go not in go_namespaces:\n go = go_alt_ids[go]\n if gene_go[0] == go and go != '' and go_namespaces[go] == go_namespace_studied:\n genes.append(gene_go[1])\n if go != '':\n dic_go_genes[go] = genes\n\n print(len(dic_go_genes))\n\n delete_keys = []\n for go in dic_go_genes:\n if len(dic_go_genes[go]) < 4:\n delete_keys.append(go)\n\n for key in delete_keys:\n del dic_go_genes[key]\n print(len(dic_go_genes))\n\n df_go = pa.DataFrame.from_dict(dic_go_genes, orient='index')\n df_go.insert(0, 'Description', 'GO_terms')\n\n df_go.to_csv('go_gene.gmt', sep='\\t', header=False)\n\n df.reset_index(inplace=True)\n df_query_go = pa.concat([pa.Series(row['Gene_Name'], row['GOs'].split(','))\n for _, row in df.iterrows()]).reset_index()\n df_query_go.columns = ['GOs', 'Gene_Name']\n df_query_go = df_query_go[['Gene_Name', 'GOs']]\n df_query_go.to_csv('query_go.tsv', sep='\\t', index=False)", "def write_concat_GO_dicts(self, *GO_dict):\n\n for i, j in zip(self.IDs[0:3], GO_dict):\n with open(i, 'w') as f:\n f.write('transcript_id\\tgene_ontology\\n')\n for k, v in j.iteritems():\n f.write(k + '\\t' + '\\t'.join(v) + '\\n')" ]
[ "0.615126", "0.5918436", "0.5613667", "0.55950755", "0.5493555", "0.5440564", "0.53833544", "0.53545773", "0.53458494", "0.53149766", "0.5311221", "0.5273746", "0.526471", "0.5234631", "0.5224225", "0.52036613", "0.5177393", "0.5147963", "0.5131519", "0.5126816", "0.51184547", "0.5115466", "0.51079655", "0.5107759", "0.50972503", "0.50833946", "0.5076369", "0.5068838", "0.50617564", "0.50599873" ]
0.6074063
1
Gets a data frame with all the same aglycon structures in one row. Counts all taxonomies and create a barplot. 'Double' is also a taxonomy. Saves the bar plot with the numbers of different taxonomies as .png.
def bar_plot(df_NP): cnt = Counter() for tax_list in df_NP.taxonomy: for tax in list(tax_list): if tax != 'no': cnt[tax] += 1 plt.bar(cnt.keys(),cnt.values()) plt.xlabel('taxonomic provenance') plt.ylabel('number of molecules') plt.title('number of aglycons with taxonomies') plt.savefig("output_data/Barplot.png") print("BAR PLOT DONE")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def eda_plot():\n\n df1 = pd.read_csv('eda_malware.csv')\n df2 = pd.read_csv('eda_random.csv')\n df3 = pd.read_csv('eda_popular.csv')\n\n df = pd.concat([df1, df2, df3], ignore_index=True)\n df['label'].replace([0,1],['Benign','Malware'],inplace=True)\n\n colors = ['#EAB6AB','#D9E6F3','#CBAACB','#CCE2CB', '#FFAEA5', '#A2E1DB', '#97C1A9']\n # b vs. m: node types counts\n f1 = pd.crosstab(df['label'], df['node_types_counts'])\n\n f1 = pd.DataFrame({\"3 Types\": [1, 4], \"4 Types\": [1, 407], \"5 Types\": [245, 5768], \"6 Types\": [39, 1113], \"7 Types\": [83, 487], \"8 Types\": [154, 368], \"9 Types\": [103, 286]}).rename(index={0:'Benign', 1:'Malware'})\n f1.plot(kind='bar', color=colors)\n fig = plt.gcf()\n plt.legend(loc='upper left')\n plt.title('Benign vs. Malicious: Number of Node Types')\n fig.savefig('bv_node_types.png')\n\n # for a better look, limit type 5 malware to 2k counts only\n f1 = pd.DataFrame({\"3 Types\": [1, 4], \"4 Types\": [1, 407], \"5 Types\": [245, 2000], \"6 Types\": [39, 1113], \"7 Types\": [83, 487], \"8 Types\": [154, 368], \"9 Types\": [103, 286]}).rename(index={0:'Benign', 1:'Malware'})\n f1.plot(kind='bar', color=colors)\n fig = plt.gcf()\n plt.legend(loc='upper left')\n plt.title('Benign vs. Malicious: Number of Node Types')\n fig.savefig('bv_node_types1.png')\n\n # node types\n # for malware: extract node types info for node types counts > 5, and sum up each types counts\n node_types = df[(df['label'] == 'Malware') & (df['node_types_counts'] >= 5)]['node_types'] #series\n lst = [ast.literal_eval(s) for s in node_types]\n\n c = Counter()\n for d in lst:\n c.update(d)\n\n df_nt = pd.DataFrame(dict(c).items(), columns=['node_types', 'counts'])\n df_nt = df_nt.sort_values(by=['counts'])\n\n sizes = [215060, 2823059, 3135725, 5641356, 10679709, 16547701]\n labels = ['Others', 'static,Node', 'public,static,Node', 'Node', 'external,Node', 'public,Node']\n\n colors = ['#EAB6AB','#D9E6F3','#CBAACB','#CCE2CB', '#FFAEA5', '#A2E1DB']\n\n fig1, ax1 = plt.subplots(figsize=(7, 7))\n ax1.pie(sizes, labels=labels, autopct='%1.1f%%',\n shadow=False, startangle=90, colors=colors)\n ax1.axis('equal') # Equal aspect ratio ensures that pie is drawn as a circle.\n plt.title('Malware: Top Node Types and Its Counts', y=1.05)\n\n plt.show()\n fig1.savefig('counts_pie_m.png')\n\n # for benign: extract node types info for node types counts, and sum up each types counts\n node_types = df[(df['label'] == 'Benign')]['node_types'] #series\n lst = [ast.literal_eval(s) for s in node_types]\n\n c = Counter()\n for d in lst:\n c.update(d)\n\n df_nt = pd.DataFrame(dict(c).items(), columns=['node_types', 'counts'])\n df_nt = df_nt.sort_values(by=['counts'])\n\n sizes = [77967, 2892033, 2964924, 5287258, 6478196, 20364339]\n labels = ['Others', 'staticNode', 'public,staticNode', 'external,Node', 'Node', 'public,Node']\n\n colors = ['#EAB6AB','#D9E6F3','#CBAACB','#CCE2CB', '#FFAEA5', '#A2E1DB']\n\n fig1, ax1 = plt.subplots(figsize=(7, 7))\n ax1.pie(sizes, labels=labels, autopct='%1.1f%%',\n shadow=False, startangle=90, colors=colors)\n ax1.axis('equal') # Equal aspect ratio ensures that pie is drawn as a circle.\n plt.title('Benign: Top Node Types and Its Counts', y=1.05)\n\n plt.show()\n fig1.savefig('counts_pie_b.png')\n\n # benign vs malware: counts\n sizes = [8435, 802]\n labels = ['Benign', 'Malware']\n\n colors = ['#EAB6AB','#D9E6F3']\n\n fig1, ax1 = plt.subplots(figsize=(7, 7))\n ax1.pie(sizes, labels=labels, autopct='%1.1f%%',\n shadow=False, startangle=90, colors=colors)\n ax1.axis('equal') # Equal aspect ratio ensures that pie is drawn as a circle.\n plt.title('Number of Benign vs. Malware', y=1.05)\n\n plt.show()\n fig1.savefig('bm_counts.png')\n\n # number of edges vs number of nodes\n groups = df.groupby('label')\n colors = ['#FFAEA5', '#A2E1DB']\n\n # Plot\n fig, ax = plt.subplots()\n ax.margins(0.05) # Optional, just adds 5% padding to the autoscaling\n for name, group in groups:\n if name == 'Benign':\n c = colors[0]\n else:\n c = colors[1]\n ax.plot(group.number_edges, group.number_nodes, marker='o', linestyle='', ms=4, label=name, c=c)\n ax.legend()\n ax.set_xlabel('Number of Edges')\n ax.set_ylabel('Number of Nodes')\n ax.set_title('Benign & Malware: Number of Edges vs. Number of Nodes', y=1.05)\n\n plt.show()\n fig.savefig('bm_edges_nodes.png')", "def barGraph(listOfWord, listOfFrequency):\r\n\r\n\tindex = np.arange(len(listOfWord))\r\n\r\n\tplt.title(\"Frekuensi Kemunculan Kata\")\r\n\tplt.barh(index, listOfFrequency)\r\n\tplt.xlabel('Frekuensi')\r\n\tplt.yticks(index, listOfWord, fontsize=6)\r\n\r\n\tplt.show()", "def createChart(cladeGroup, data, taxonomyDict, outputFile):\n dfData = []\n for clade in cladeGroup: \n temp, other, totalTemp = valueCountsSpecies(data, cladeGroup[clade], taxonomyDict)\n relativeTemp = {}\n for val in temp:\n relativeTemp[val] = (temp[val] / sum(list(temp.values())))*100\n dfData.append(relativeTemp)\n\n tempDF = pd.DataFrame(dfData, index=list(cladeGroup.keys()))\n tempDF = tempDF.fillna(0)\n\n # Plotting\n sns.set(rc={'figure.figsize':(20,15)}, font_scale=2)\n ax = tempDF.plot(kind=\"bar\", stacked=True, colormap=ListedColormap(sns.color_palette(\"twilight\", 12)), rot=0)\n for rect in ax.patches:\n # Find where everything is located\n height = rect.get_height()\n width = rect.get_width()\n x = rect.get_x()\n y = rect.get_y()\n \n # The height of the bar is the data value and can be used as the label\n label_text = f'{height:.2f}%' # f'{width:.2f}' to format decimal values\n \n # ax.text(x, y, text)\n label_x = x + width / 2\n label_y = y + height / 2\n \n # only plot labels greater than given width\n if height > 0.00:\n ax.text(label_x, label_y, label_text, ha='center', va='center', fontsize=20, color=\"w\")\n\n plt.legend(loc=\"center right\", bbox_to_anchor=(1.25, 0.5), ncol=1)\n plt.savefig(outputFile, bbox_inches=\"tight\")\n plt.show()\n return", "def compte(df):\n\n df.value_counts()[:100].plot(kind='bar')\n plt.show()", "def visualizeData(df):\n for column in df:\n df[column].value_counts().plot(kind = 'bar', rot = 'vertical', use_index = False)", "def visualize_data(df):\n # Remove 'not available'\n genres = df.genre.unique().tolist()\n remove_index = genres.index('Not Available')\n genres.pop(remove_index)\n print('Genres: ', genres)\n\n # Extract number of songs in each genre\n genre_counts = df.genre.value_counts().tolist()\n genre_counts.pop(remove_index)\n print('Counts: ', genre_counts)\n\n # Plot bar graph\n plt.bar(genres, genre_counts)\n plt.xlabel('Genres')\n plt.ylabel('Count')\n plt.show()", "def bar_grapgh(dictionary, variable):\r\n plt.clf() # Deletes the previous plot \r\n plt.hist(dictionary[variable])\r\n plt.title('Histogram of ' + variable)\r\n plt.xlabel(variable)\r\n plt.ylabel('Frequency')\r\n plt.savefig(variable)", "def visualize_type():\n \n data_file = parse(MY_FILE, ',')\n\n # num of incidents per category\n counter = Counter(item['Category'] for item in data_file)\n\n # Set the labels\n labels = tuple(counter.keys())\n\n # Set exactly where the labels hit the x-axis\n xlocations = na.array(range(len(labels))) + 0.5\n\n # Width of each bar\n width = 0.5\n\n # Assign data to a bar plot\n plt.bar(xlocations, counter.values(), width=width)\n\n # Assign labels and tick location to x-axis\n plt.xticks(xlocations + width / 2, labels, rotation=90)\n \n # Give some more room so the x-axis labels aren't cut off\n plt.subplots_adjust(bottom=0.4)\n\n # Make the overall graph/figure larger\n plt.rcParams['figure.figsize'] = 12, 8\n\n # save\n plt.savefig('Type.png')\n\n # close\n plt.clf()", "def visualize_type():\n\t\n\t#grab our parsed data\n\tdata_file = parse(MY_FILE, \",\")\n\t\n\t#make a new variable, counter, from iterating through each line of\n\t#data in parsed data, and count how many incidents happen by category\n\tcounter = Counter(item[\"Category\"] for item in data_file)\n\t\n\t#set the labels which are based on the keys of our counter\n\t#since order doesn't matter, we can just use counter.keys()\n\tlabels = tuple(counter.keys())\n\t\n\t#set exactly where the labels should hit the x-axis\n\txlocations = np.arange(len(labels)) + 0.5\n\t\n\t#width of each bar that will be plotted\n\twidth = 0.5\n\t\n\t#assign data to a bar plot\n\tplt.bar(xlocations, counter.values(), width=width)\n\t\n\t#assign labels and tick location to x-axis\n\tplt.xticks(xlocations + width /2, labels, rotation=90)\n\t\n\t#give more room to the x-axis so the labels aren't cut off\n\tplt.subplots_adjust(bottom=0.4)\n\t\n\t#make the overall graph/figure larger\n\tplt.rcParams['figure.figsize'] = 12, 8\n\t\n\t#save the graph\n\tplt.savefig(\"type.png\")\n\t\n\t#close the plot figure\n\tplt.clf()", "def BarOverview(data):\n return dcc.Graph(id=\"BarOverview\", className=\"bar\", figure=dict(\n data=[go.Bar(\n x=data[\"frequencies\"],\n y=data[\"names\"],\n orientation='h',\n marker={\n 'color': '#ff4058'\n },\n )],\n layout=dict(\n title=\"<b>Most common Persons</b>\",\n font=dict(family='Soria, Times New Roman, Times, serif', color='#002C77', size=19),\n margin=dict(l=10, r=20, t=50, b=30),\n plot_bgcolor=\"rgba(0,0,0,0)\",\n paper_bgcolor=\"rgba(0,0,0,0)\",\n xaxis=dict(tick0=0, dtick=max(data[\"frequencies\"])),\n yaxis=dict(ticks='outside',\n showgrid=True,\n showline=False,\n showticklabels=False),\n annotations=[dict(xref='paper', yref='y',\n x=0, y=yd,\n font=dict(\n color=\"#000000\",\n size=19\n ),\n text=str(yd),\n showarrow=False) for xd, yd in zip(data[\"frequencies\"], data[\"names\"])]\n )\n ))", "def bar_plot(data, xtitle, title):\n label = list(set(data))\n height = count_elements(data)\n height = [height[i] for i in label]\n plt.bar(label, height=height, width=0.8)\n plt.ylabel('frequency')\n plt.xlabel(xtitle)\n plt.xticks(label)\n plt.savefig('./figures/{}.png'.format(title))\n plt.close()", "def make_area_bar_chart(\r\n sample_ids, taxa_percents, taxa, dir_path, level, prefs,\r\n pref_colors,\r\n background_color, label_color, chart_type,\r\n generate_image_type,\r\n plot_width, plot_height, bar_width, dpi, resize_nth_label,\r\n label_type, include_html_legend, include_html_counts,\r\n file_prefix=None, props={},\r\n others_key=\"All Other Categories\",\r\n others_color=\"#eeeeee\", should_capitalize=True):\r\n # verify there is data in the file\r\n if not taxa_percents:\r\n raise ValueError(\"No data available for area chart.\")\r\n\r\n all_fracs = []\r\n all_labels = []\r\n colors = []\r\n\r\n # set font-size based on the number of samples\r\n fsize = 0\r\n for i in range(7):\r\n fsize = 11 - i\r\n if len(sample_ids) <= (i * 10):\r\n break\r\n\r\n # define figure parameters\r\n rc('font', size=fsize)\r\n rc('text', color=label_color)\r\n rc('patch', linewidth=.1)\r\n rc('axes', linewidth=0, edgecolor=background_color)\r\n rc('text', usetex=False)\r\n rc('xtick', labelsize=fsize, color=label_color)\r\n\r\n # define figure\r\n fig = figure(figsize=(plot_width, plot_height))\r\n ax1 = fig.add_subplot(111, axisbg=background_color)\r\n\r\n # change the tick colors and width\r\n for tick in ax1.xaxis.get_ticklines():\r\n tick.set_color(label_color)\r\n tick.set_markersize(0)\r\n tick.set_markeredgewidth(.5)\r\n\r\n # create an iterative array for length of sample_ids\r\n if label_type == 'categorical':\r\n x = numpy.arange(0, len(sample_ids))\r\n elif label_type == 'numeric':\r\n x = sorted(map(lambda x: float(x), sample_ids))\r\n\r\n # numerical numbers must be sorted or else it gets screwed up in the\r\n # plot\r\n else:\r\n raise ValueError('Label type is not valid!')\r\n\r\n # get the raw data into a form, we can use for plotting areas and bars\r\n y_data = numpy.row_stack((zip(*taxa_percents)))\r\n bar_y_data = zip(*taxa_percents)\r\n y_data_stacked = numpy.cumsum(y_data, axis=0)\r\n\r\n # if area chart we use fill_between\r\n if chart_type == 'area':\r\n # bar_width is for mouseovers, and since area charts are more polygonal\r\n # we use a small width, so user can at least mouseover on the x-axis\r\n # positions\r\n bar_width = 0.005\r\n # fill the first taxa\r\n ax1.fill_between(x, 0, y_data_stacked[0, :], linewidth=0,\r\n facecolor=data_colors[pref_colors[taxa[0]]].toHex(),\r\n alpha=1)\r\n\r\n # fill all taxa up to the last one\r\n for i, j in enumerate(y_data_stacked):\r\n if i < len(y_data_stacked) - 1:\r\n next = i + 1\r\n ax1.fill_between(x, y_data_stacked[i, :],\r\n y_data_stacked[next, :], linewidth=0,\r\n facecolor=data_colors[\r\n pref_colors[taxa[i + 1]]].toHex(),\r\n alpha=1)\r\n # fill the last taxa to the total height of 1/\r\n else:\r\n ax1.fill_between(x, y_data_stacked[i, :], 1, linewidth=0,\r\n facecolor=data_colors[\r\n pref_colors[taxa[i]]].toHex(),\r\n alpha=1)\r\n\r\n # this cleans up the whitespace around the subplot\r\n # ax1.set_xlim((0,len(x)))\r\n ax1.set_ylim((0, 1))\r\n\r\n # if area chart we use bar\r\n elif chart_type == 'bar':\r\n\r\n # iterate over the data and make stacked bars\r\n for i, j in enumerate(bar_y_data):\r\n # if we are not in the first row of array, append more taxa\r\n if i > 0:\r\n ax1.bar(x, bar_y_data[i], width=bar_width, linewidth=0,\r\n color=data_colors[pref_colors[taxa[i]]].toHex(),\r\n bottom=numpy.sum(bar_y_data[:i], axis=0), align='center')\r\n # make the bars for the first row of array\r\n else:\r\n ax1.bar(x, bar_y_data[i], width=bar_width, linewidth=0,\r\n color=data_colors[pref_colors[taxa[i]]].toHex(),\r\n align='center')\r\n # this cleans up the whitespace around the subplot\r\n # ax1.set_xlim((-0.5,len(sample_ids)-0.5))\r\n ax1.set_ylim((0, 1))\r\n\r\n # transform bar_data into an area map for html mouseovers\r\n xmap = transform_and_generate_xmap(ax1, bar_y_data, bar_width, taxa, x,\r\n plot_height, dpi, taxa_percents, sample_ids,\r\n chart_type)\r\n\r\n # rename each area map based on the level passed in.\r\n points_id = 'rect%s' % (level)\r\n\r\n # append the area map html\r\n map_html = MAP_SRC % (points_id, ''.join(xmap))\r\n\r\n # set the values for the x-ticks\r\n # ax1.xaxis.set_ticks(x)\r\n # create an iterative array for length of sample_ids\r\n if label_type == 'numeric':\r\n x_axis_labels = numpy.arange(min(x), max(x))\r\n output_labels = []\r\n if resize_nth_label > 0:\r\n # xlabels=ax1.get_xticklabels()\r\n start = x_axis_labels[0]\r\n iterator_size = 0\r\n for i, l in enumerate(x_axis_labels):\r\n if l == start:\r\n output_labels.append(l)\r\n else:\r\n if (iterator_size == (resize_nth_label - 1)):\r\n output_labels.append(l)\r\n iterator_size = 0\r\n else:\r\n iterator_size = iterator_size + 1\r\n\r\n ax1.xaxis.set_ticks(output_labels)\r\n ax1.set_xticklabels(output_labels, rotation='vertical')\r\n else:\r\n ax1.xaxis.set_ticks(x)\r\n ax1.set_xticklabels(x, rotation='vertical')\r\n\r\n else:\r\n x_axis_labels = numpy.arange(0, len(sample_ids))\r\n # if the user would like to create larger labels for every nth label\r\n # this iterates over the labels and adds the value 4 to the font-size,\r\n # thereby making the fontsize larger\r\n x_tick_locations = []\r\n output_labels = []\r\n if resize_nth_label > 0:\r\n xlabels = ax1.get_xticklabels()\r\n iterator_size = 0\r\n for i, l in enumerate(x_axis_labels):\r\n if i == 0:\r\n output_labels.append(sample_ids[i])\r\n x_tick_locations.append(i)\r\n else:\r\n if iterator_size == (resize_nth_label - 1):\r\n output_labels.append(sample_ids[i])\r\n x_tick_locations.append(i)\r\n iterator_size = 0\r\n else:\r\n iterator_size = iterator_size + 1\r\n ax1.xaxis.set_ticks(x_tick_locations)\r\n ax1.set_xticklabels(output_labels, rotation='vertical')\r\n else:\r\n ax1.xaxis.set_ticks(x)\r\n ax1.set_xticklabels(sample_ids, rotation='vertical')\r\n\r\n ax1.set_yticks([])\r\n\r\n # write out\r\n if file_prefix is None:\r\n img_name = make_img_name(file_ext='.png')\r\n else:\r\n img_name = file_prefix\r\n\r\n # define filepath\r\n img_abs = os.path.join(dir_path, 'charts', img_name)\r\n savefig(img_abs, dpi=80, facecolor=background_color)\r\n eps_link = \"\"\r\n eps_abs = \"\"\r\n\r\n # generate the image as a pdf\r\n if file_prefix is None:\r\n eps_img_name = make_img_name(file_ext=\".%s\" % generate_image_type)\r\n else:\r\n eps_img_name = file_prefix + \".%s\" % generate_image_type\r\n\r\n savefig(os.path.join(dir_path, 'charts', eps_img_name),\r\n facecolor=background_color)\r\n\r\n # generate the image as an eps\r\n if generate_image_type == 'eps':\r\n strip_eps_font(os.path.join(dir_path, 'charts', eps_img_name))\r\n\r\n eps_abs = os.path.join(dir_path, 'charts', eps_img_name)\r\n eps_link = PDF_LINK % (os.path.join('charts',\r\n eps_img_name), 'View Figure (.%s)' % generate_image_type)\r\n\r\n close(fig)\r\n clf()\r\n\r\n # this converts the sample_ids to a sample_id array and a corresponding\r\n # color array, so we can set the order based on array\r\n updated_taxa = []\r\n updated_colors = []\r\n for i in taxa:\r\n if i != others_key:\r\n updated_taxa.append(i.replace('\"', ''))\r\n updated_colors.append(data_colors[pref_colors[i]].toHex())\r\n else:\r\n updated_taxa.append(others_key)\r\n updated_colors.append(others_color)\r\n\r\n if include_html_legend:\r\n # first make a low-res png legend for display on webpage\r\n legend_fname_png = make_legend(updated_taxa, updated_colors,\r\n plot_width, plot_height, label_color,\r\n background_color, img_abs, 'png', 80)\r\n\r\n legend_fpath_png = (os.path.join('charts', legend_fname_png))\r\n\r\n # make high-res legend\r\n legend_fname = make_legend(updated_taxa, updated_colors,\r\n plot_width, plot_height, label_color,\r\n background_color, img_abs, generate_image_type, dpi)\r\n\r\n legend_fpath = (os.path.join('charts', legend_fname))\r\n legend_link = LEGEND_LINK % (legend_fpath, 'View Legend (.%s)' %\r\n (generate_image_type))\r\n\r\n if not include_html_legend:\r\n IMG_TEXT = IMG_SRC_minus_legend % (os.path.join('charts', img_name),\r\n points_id)\r\n else:\r\n IMG_TEXT = IMG_SRC_2 % (os.path.join('charts', img_name),\r\n points_id, legend_fpath_png)\r\n\r\n return eps_link, legend_link, IMG_TEXT, map_html", "def graph_cause_count(df):\r\n # set the visual features of the graph\r\n sns.set(font_scale=2)\r\n sns.set_style(\"darkgrid\")\r\n fig, ax = plt.subplots()\r\n fig.set_size_inches(20, 12)\r\n plt.xticks(rotation=45)\r\n ax.set_title(\"Yearly Vehicle Accident Police Deaths\")\r\n # create the graph of the data\r\n plot = sns.barplot(\"year\", \"count\", data=df, palette=\"winter_d\", ci=None)\r\n # plt.show()\r\n # save the graph as an image\r\n fig.savefig(\"2_graph_cause_count.png\")", "def value_counts_plot(df):\n \n plt.figure(figsize=(15,10))\n \n #get rid of sort_index() to change the graph\n return df.value_counts().sort_index().plot(kind='bar')", "def return_figures():\n graph_one = []\n df = cleandata()\n\n graph_one.append(\n go.Bar(name='Ones', x=['Related', 'Request', 'Offer',\n 'Aid related', 'Medical help', 'Medical products',\n 'Search and rescue', 'Security', 'Military', 'Child alone',\n 'Water', 'Food', 'Shelter', 'Clothing', 'Money', 'Missing people',\n 'Refugees', 'Death', 'Other aid', 'Infrastructure related',\n 'Transport', 'Buildings', 'Electricity', 'Tools', 'Hospitals',\n 'Shops', 'Aid centers', 'Other infrastructure', 'Weather related',\n 'Floods', 'Storm', 'Fire', 'Earthquake', 'Cold', 'Other weather',\n 'Direct report'], y=[df['related'].sum(),\n df['request'].sum(),\n df['offer'].sum(),\n df['aid_related'].sum(),\n df['medical_help'].sum(),\n df['medical_products'].sum(),\n df['search_and_rescue'].sum(),\n df['security'].sum(),\n df['military'].sum(),\n df['child_alone'].sum(),\n df['water'].sum(),\n df['food'].sum(),\n df['shelter'].sum(),\n df['clothing'].sum(),\n df['money'].sum(),\n df['missing_people'].sum(),\n df['refugees'].sum(),\n df['death'].sum(),\n df['other_aid'].sum(),\n df['infrastructure_related'].sum(),\n df['transport'].sum(),\n df['buildings'].sum(),\n df['electricity'].sum(),\n df['tools'].sum(),\n df['hospitals'].sum(),\n df['shops'].sum(),\n df['aid_centers'].sum(),\n df['other_infrastructure'].sum(),\n df['weather_related'].sum(),\n df['floods'].sum(),\n df['storm'].sum(),\n df['fire'].sum(),\n df['earthquake'].sum(),\n df['cold'].sum(),\n df['other_weather'].sum(),\n df['direct_report'].sum()]),\n )\n\n layout_one = dict(title='Distribution of message categories',\n xaxis=dict(tickangle=45),\n yaxis=dict(title='Count'),\n )\n\n graph_two = []\n graph_two.append(\n go.Bar(\n x=['Direct', 'News', 'Social'],\n y=df.groupby('genre').count()['message'],\n )\n )\n\n layout_two = dict(title='Distribution of message genres',\n xaxis=dict(title='Message Genres', ),\n yaxis=dict(title='Count'),\n )\n\n # append all charts to the figures list\n figures = []\n figures.append(dict(data=graph_one, layout=layout_one))\n figures.append(dict(data=graph_two, layout=layout_two))\n\n return figures", "def category_bar_chart(df):\n label_names = df.drop(['message', 'original', 'genre', 'id'], axis=1).columns\n label_counts = []\n for column in label_names:\n label_counts.append(df[column].sum())\n return {\n 'data': [\n Bar(\n x=label_names,\n y=label_counts\n )\n ],\n\n 'layout': {\n 'title': 'Distribution of Labelled Categories',\n 'yaxis': {\n 'title': \"Count\",\n 'type': 'log'\n },\n 'xaxis': {\n 'title': \"Category\"\n }\n }\n }", "def plot_featurewise_barplot(\n utr5_counts, cds_counts, utr3_counts, ax=None, saveto=None, **kwargs\n):\n fig = None\n if ax is None:\n fig, ax = plt.subplots()\n else:\n fig = ax.get_figure()\n barlist = ax.bar([0, 1, 2], [utr5_counts, cds_counts, utr3_counts])\n barlist[0].set_color(\"#1b9e77\")\n barlist[1].set_color(\"#d95f02\")\n barlist[2].set_color(\"#7570b3\")\n ax.set_xticks([0, 1, 2])\n ax.set_xticklabels([\"5'UTR\", \"CDS\", \"3'UTR\"])\n max_counts = np.max(np.hstack([utr5_counts, cds_counts, utr3_counts]))\n setup_axis(\n ax=ax, axis=\"y\", majorticks=max_counts // 10, minorticks=max_counts // 20\n )\n ax.set_ylabel(\"# RPFs\")\n # sns.despine(trim=True, offset=10)\n if saveto:\n fig.tight_layout()\n fig.savefig(saveto, dpi=DPI)\n return ax, fig", "def visualize_type(parsed_data, output_dir):\n\n # Fetching incident data by category\n counter = fetch_incident_by_category_and_resolution(parsed_data)\n\n # List of total incidents by Category\n # list of unsolved incidents by Category\n y1_values = [item[0] for item in counter.values()]\n y2_values = [item[1] for item in counter.values()]\n\n # Category labels\n x_labels = tuple(counter.keys())\n\n # Width of each bar\n bar_width = 0.4\n\n # bar locations on x-axis\n x1_locations = np.arange(len(x_labels))\n x2_locations = x1_locations + bar_width\n\n # assigning data to a bar plot\n plt.bar(x1_locations, y1_values, width=bar_width, label = \"Total\")\n plt.bar(x2_locations, y2_values, width=bar_width, label = \"Unresolved\")\n\n # Assigning labels and tick location to x-axis\n plt.xlabel('Incident Category', fontweight='bold')\n plt.ylabel('Incident Count', fontweight='bold')\n plt.xticks(x1_locations + bar_width/2, x_labels, rotation=90)\n\n # Giving some more room below x-axis\n plt.subplots_adjust(bottom=0.4)\n\n # Making the overall graph/figure larger\n plt.rcParams['figure.figsize'] = 12, 8\n\n plt.legend()\n file_name = os.path.join(output_dir, TYPE_PLOT_FILENAME)\n plt.savefig(file_name)\n plt.show()", "def multiple_bars(self, df, nrows, ncols, dict):\n fig, axs = plt.subplots(nrows=nrows, ncols=ncols, figsize=(6, 9.3))\n\n fig.subplots_adjust(left=0.03, right=0.97, hspace=0.50, wspace=0.05)\n\n bar_width = 0.35\n for ax, (key, dat) in zip(axs.flatten(), df):\n n_groups = len(dat.index)\n index = np.arange(n_groups)\n\n # make barchart for permutation test\n bar1 = ax.bar(index, dat[\"perm\"], bar_width, color='b',\n label='Permutation test')\n # make barchart for t-test\n bar2 = ax.bar(index + bar_width, dat[\"t_test\"], bar_width, color='r',\n label='t-test')\n\n ax.set_ylabel(\"Error\")\n ax.set_xticks(index + bar_width / 2)\n ax.set_xticklabels(dict[\"xtickslabels\"])\n ax.set_title(f\"Effect size = {key}\")\n ax.set_xlabel(f\"Group Size\")\n ax.legend()\n\n for rect, i in zip(bar1 + bar2, dat[\"sig\"]):\n height = rect.get_height()\n if i:\n ax.text(rect.get_x() + rect.get_width(), height, \"**\", ha='center', va='bottom')\n\n\n fig.suptitle(dict[\"title\"], y=1.0, fontsize = 15)\n fig.tight_layout()\n plt.show()", "def barplot(data, field_name, field_categories):\n\n\tcategories, counts = np.unique(data[field_name], return_counts=True)\n\n\tfig = plt.figure(figsize=(4, 3))\n\taxes = fig.add_axes([0, 0, 1, 1]) # left, bottom, width, height (range 0 to 1)\n\taxes.bar(range(len(categories)), counts, fc=\"gray\") # fc is the face color\n\n\taxes.set_xlabel(\"\")\n\taxes.set_ylabel('Count')\n\taxes.set_title(field_name)\n\tfig.autofmt_xdate(rotation=45)\n\n\taxes.set_xticks(range(len(categories)))\n\taxes.set_xticklabels([field_categories[c] for c in categories]);", "def bar_chart(self, df, n_groups, dict):\n fig, ax = plt.subplots()\n # choose bar width (standard 0.8 chosen)\n bar_width = 0.35\n # get an index to set the ticks for the x axis\n\n index = np.arange(n_groups)\n indexes = df.index.tolist()\n print(indexes)\n df[\"index\"] = indexes\n\n # make barchart for permutation test\n ax.bar(index, df[\"perm\"], bar_width, color='b', linewidth=4,\n label='Permutation test')\n # make barchart for t-test\n ax.bar(index + bar_width, df[\"t_test\"], bar_width, color='r',\n label='t-test')\n\n ax.set_xlabel(dict[\"xlabel\"])\n ax.set_ylabel(dict[\"ylabel\"])\n ax.set_title(dict[\"title\"])\n ax.set_xticks(index + bar_width / 2)\n ax.set_xticklabels(dict[\"xtickslabels\"])\n ax.legend()\n\n fig.tight_layout()\n plt.show()", "def graph_cause_count_each(df, label):\r\n # set the visual features of the graph\r\n sns.set(font_scale=1.5)\r\n sns.set_style(\"darkgrid\")\r\n fig, ax = plt.subplots()\r\n fig.set_size_inches(15, 8)\r\n plt.xticks(rotation=45)\r\n ax.set_title(label.capitalize() + \" Police Death Causes\")\r\n # create the graph of the data\r\n plot = sns.barplot(\"year\", \"count\", data=df, palette=\"winter_d\")\r\n # plt.show()\r\n # save the graph as an image with the correct cause naming\r\n name = \"2_graph_cause_count_\" + label + \".png\"\r\n fig.savefig(name)", "def simple_bar():\n\n # Make random discrete data\n discrete_a = np.zeros((8,2))\n discrete_b = np.zeros((8,2))\n discrete_c = np.zeros((8,2))\n discrete_a[:,0] = np.arange(8)\n discrete_b[:,0] = np.arange(8)\n discrete_c[:,0] = np.arange(8)\n discrete_a[:,1] = np.random.rand(8)*10\n discrete_b[:,1] = np.random.rand(8)*10\n discrete_c[:,1] = np.random.rand(8)*10\n\n # Make data sets, if using multiple bar_width must be the same\n dataset_a = DataSet(discrete_a,colour='pink',bar_width=0.8,plot='bar',label='A')\n dataset_b = DataSet(discrete_b,colour='violet',bar_width=0.8,plot='bar',label='B')\n dataset_c = DataSet(discrete_c,colour='darkviolet',bar_width=0.8,plot='bar',label='C')\n\n # Make plot object and add data sets\n plot = Plot()\n plot.add_dataset(dataset_a)\n plot.add_dataset(dataset_b)\n plot.add_dataset(dataset_c)\n plot.set_axes(xticks=(1,1),xlim=(-0.5,7.5),ylim=(0,12))\n plot.set_legend(legend=True,location='upper right')\n plot.set_text(legend=8)\n\n # Plot graph and display\n plot.plot()\n plot.save(name='./figures/2d_simple_bar',fmt='png')\n plot.display()", "def img_gen_bar():\n data = pd.DataFrame(data=np.random.rand(5,1), index=range(1,6), columns=['Fred'])\n #m,n = np.shape(data)\n\n plt.clf()\n plt.bar(x=data.index.values, height=data.values.ravel(), color='k') # figsize=(10, 6))\n # Options for later from https://matplotlib.org/api/_as_gen/matplotlib.pyplot.bar.html\n # bar_width = 0.35\n # alpha = .3\n fig=plt.gcf()\n fig.set_size_inches(2.24, 2.24)\n plt.axis('off')\n fig.tight_layout()\n fig.canvas.draw()\n # grab the pixel buffer and dump it into a numpy array\n pixels = np.array(fig.canvas.renderer._renderer)[:,:,:3]\n #print(pixels.shape)\n return pixels, data.index.values + data.values.ravel()", "def graph(link, outfile):\n soup = scraper.get_html_text(link)\n restaurants = scraper.get_restaurant_names(soup)\n costs = scraper.get_costs(soup)\n ratings = scraper.get_ratings(soup)\n \n l = len(restaurants)\n \n plt.bar(range(l),costs,color=\"#CC0033\")\n plt.ylabel(\"COSTS\",fontsize=15,color=\"#330033\")\n plt.xticks(arange(0.5,30.5,1.0), restaurants, ha=\"center\", rotation=90)\n plt.xlabel(\"RESTAURANTS\",fontsize=15,color=\"#330033\")\n plt.xlim([0,l])\n\n for X, Y, Z in zip(range(l), costs, ratings):\n plt.annotate('{}'.format(Z), xy=(X,Y), xytext=(0,2),textcoords='offset points',fontsize=8,color=\"#330033\")\n \n fig = plt.gcf()\n fig.subplots_adjust(bottom=0.4)\n fig.savefig(outfile)", "def plot_hist_drugs(x, name='drugs_hist', ordered=False, path='hist_drugs.png'):\n # x = data['DRUG']\n val_counts = x.value_counts().reset_index().rename(columns={'index': 'drug', 'DRUG': 'count'})\n if ordered:\n val_counts.sort_values(by='count', inplace=True)\n else:\n val_counts.sort_values(by='drug', inplace=True)\n fig, ax = plt.subplots()\n plt.barh(val_counts['drug'], val_counts['count'], color='b', align='center', alpha=0.7)\n # g = sns.barplot(x=val_counts['count'], y=val_counts['drug'], palette='viridis')\n # # g = sns.catplot(y='DRUG', data=data, kind='count', palette='viridis', alpha=0.9)\n # # g = sns.catplot(y='DRUG', data=data, kind='count', order=data['DRUG'].value_counts().index, palette='viridis', alpha=0.9)\n plt.xlabel('count')\n plt.ylabel('drug label')\n plt.grid(True)\n plt.tight_layout()\n plt.savefig(path, bbox_inches='tight')", "def Plot_disease_count(Y1,Y2,Y3,Y_names,comments):\n # Plot number of diseases per dataset\n plt.style.use('ggplot')\n # Create DataFrame for plot\n Y = np.array(np.concatenate((Count_label(Y_names,Y1), Count_label(Y_names,Y2),Count_label(Y_names,Y3)), axis=1))\n df = pd.DataFrame(Y, columns=['train','test','valid'], index=Y_names)\n # Show Dataframe\n # Plot bars\n ax=plt.figure(figsize=(13,7))\n for i,l in enumerate(df.columns):\n ax = plt.subplot(2,3,i+1)\n ax.set_title(comments[0] + l)\n bars = ax.bar(df.index,df[l],facecolor='cyan',edgecolor='black')\n plt.tight_layout()\n plt.show()", "def plot_individual_bar_chart_graph(data_values, title,\r\n number_of_keys,\r\n max_val,\r\n vals_for_bar_chart,\r\n file_in):\r\n\r\n n_groups = len(vals_for_bar_chart)\r\n fig, ax = plt.subplots()\r\n index = np.arange(n_groups)\r\n bar_width = 0.9\r\n opacity = 0.4\r\n # print vals_for_bar_chart\r\n rects1 = plt.bar(index,\r\n vals_for_bar_chart,\r\n bar_width,\r\n alpha=opacity,\r\n color='b') # label='whatever'\r\n plt.xlabel('number in cluster')\r\n plt.ylabel('Count')\r\n plt.title(title+\"_barchart\")\r\n plt.legend()\r\n pylab.grid(True)\r\n ax.set_yscale('symlog')\r\n ax.set_xscale('symlog')\r\n plt.tight_layout()\r\n plt.show()\r\n pylab.savefig(file_in + \"_\" + title + '_barchart.png')\r\n plt.close()\r\n pylab.close()", "def plot_meta_counts(self, meta_field, normalize=False, sort_values=True):\n counts = self.sample_meta[meta_field].value_counts(normalize=normalize)\n colname = \"counts\" if normalize is False else \"frequency\"\n df = pd.DataFrame({meta_field: counts.index.values, colname: counts.values})\n return barplot(df=df, x=meta_field, y=colname)", "def message_genre_bar_chart(df):\n genre_counts = df.groupby('genre').count()['message']\n genre_names = list(genre_counts.index)\n return {\n 'data': [\n Bar(\n x=genre_names,\n y=genre_counts\n )\n ],\n\n 'layout': {\n 'title': 'Distribution of Message Genres',\n 'yaxis': {\n 'title': \"Count\"\n },\n 'xaxis': {\n 'title': \"Genre\"\n }\n }\n }" ]
[ "0.670801", "0.6379678", "0.62803435", "0.62481606", "0.621397", "0.6171354", "0.614709", "0.612319", "0.6084341", "0.604334", "0.6034777", "0.5924511", "0.5897462", "0.5889659", "0.5855359", "0.58464646", "0.58460945", "0.57800704", "0.57795185", "0.57522184", "0.572955", "0.57097673", "0.5707373", "0.57052994", "0.56968343", "0.56886065", "0.568451", "0.56743795", "0.5671472", "0.5669348" ]
0.7676816
0
Gets a data frame with all the same aglycon structures in one row. Counts all taxonomies and creates a venn diagram with the four taxonomies plants, bacteria, animals, fungi. Reads the original taxonmies of the 'Double' entries. Saves a venndiagram of the different taxonmies as .png.
def venn_diagram(df_NP, taxonomy_Double): taxonomy_Single = [list(tax) for tax in df_NP.taxonomy if 'double' not in tax] taxonomy_All = taxonomy_Single + taxonomy_Double plants = set() bacteria = set() animals = set() fungi = set() for tax_list in taxonomy_All: if "plants" in tax_list: for tax in tax_list: plants.add(tax.index) if "bacteria" in tax_list: for tax in tax_list: bacteria.add(tax.index) if "animals" in tax_list: for tax in tax_list: animals.add(tax.index) if "fungi" in tax_list: for tax in tax_list: fungi.add(tax.index) dic_for_venn = {"plants": plants, "bacteria": bacteria, "animals": animals, "fungi": fungi} fig= venn.venn(dic_for_venn) plt.title("venn-diagram from the taxonomy of aglycons") plt.savefig("output_data/Venn-Diagram.png") print("VENN DIAGRAM DONE")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def eda_plot():\n\n df1 = pd.read_csv('eda_malware.csv')\n df2 = pd.read_csv('eda_random.csv')\n df3 = pd.read_csv('eda_popular.csv')\n\n df = pd.concat([df1, df2, df3], ignore_index=True)\n df['label'].replace([0,1],['Benign','Malware'],inplace=True)\n\n colors = ['#EAB6AB','#D9E6F3','#CBAACB','#CCE2CB', '#FFAEA5', '#A2E1DB', '#97C1A9']\n # b vs. m: node types counts\n f1 = pd.crosstab(df['label'], df['node_types_counts'])\n\n f1 = pd.DataFrame({\"3 Types\": [1, 4], \"4 Types\": [1, 407], \"5 Types\": [245, 5768], \"6 Types\": [39, 1113], \"7 Types\": [83, 487], \"8 Types\": [154, 368], \"9 Types\": [103, 286]}).rename(index={0:'Benign', 1:'Malware'})\n f1.plot(kind='bar', color=colors)\n fig = plt.gcf()\n plt.legend(loc='upper left')\n plt.title('Benign vs. Malicious: Number of Node Types')\n fig.savefig('bv_node_types.png')\n\n # for a better look, limit type 5 malware to 2k counts only\n f1 = pd.DataFrame({\"3 Types\": [1, 4], \"4 Types\": [1, 407], \"5 Types\": [245, 2000], \"6 Types\": [39, 1113], \"7 Types\": [83, 487], \"8 Types\": [154, 368], \"9 Types\": [103, 286]}).rename(index={0:'Benign', 1:'Malware'})\n f1.plot(kind='bar', color=colors)\n fig = plt.gcf()\n plt.legend(loc='upper left')\n plt.title('Benign vs. Malicious: Number of Node Types')\n fig.savefig('bv_node_types1.png')\n\n # node types\n # for malware: extract node types info for node types counts > 5, and sum up each types counts\n node_types = df[(df['label'] == 'Malware') & (df['node_types_counts'] >= 5)]['node_types'] #series\n lst = [ast.literal_eval(s) for s in node_types]\n\n c = Counter()\n for d in lst:\n c.update(d)\n\n df_nt = pd.DataFrame(dict(c).items(), columns=['node_types', 'counts'])\n df_nt = df_nt.sort_values(by=['counts'])\n\n sizes = [215060, 2823059, 3135725, 5641356, 10679709, 16547701]\n labels = ['Others', 'static,Node', 'public,static,Node', 'Node', 'external,Node', 'public,Node']\n\n colors = ['#EAB6AB','#D9E6F3','#CBAACB','#CCE2CB', '#FFAEA5', '#A2E1DB']\n\n fig1, ax1 = plt.subplots(figsize=(7, 7))\n ax1.pie(sizes, labels=labels, autopct='%1.1f%%',\n shadow=False, startangle=90, colors=colors)\n ax1.axis('equal') # Equal aspect ratio ensures that pie is drawn as a circle.\n plt.title('Malware: Top Node Types and Its Counts', y=1.05)\n\n plt.show()\n fig1.savefig('counts_pie_m.png')\n\n # for benign: extract node types info for node types counts, and sum up each types counts\n node_types = df[(df['label'] == 'Benign')]['node_types'] #series\n lst = [ast.literal_eval(s) for s in node_types]\n\n c = Counter()\n for d in lst:\n c.update(d)\n\n df_nt = pd.DataFrame(dict(c).items(), columns=['node_types', 'counts'])\n df_nt = df_nt.sort_values(by=['counts'])\n\n sizes = [77967, 2892033, 2964924, 5287258, 6478196, 20364339]\n labels = ['Others', 'staticNode', 'public,staticNode', 'external,Node', 'Node', 'public,Node']\n\n colors = ['#EAB6AB','#D9E6F3','#CBAACB','#CCE2CB', '#FFAEA5', '#A2E1DB']\n\n fig1, ax1 = plt.subplots(figsize=(7, 7))\n ax1.pie(sizes, labels=labels, autopct='%1.1f%%',\n shadow=False, startangle=90, colors=colors)\n ax1.axis('equal') # Equal aspect ratio ensures that pie is drawn as a circle.\n plt.title('Benign: Top Node Types and Its Counts', y=1.05)\n\n plt.show()\n fig1.savefig('counts_pie_b.png')\n\n # benign vs malware: counts\n sizes = [8435, 802]\n labels = ['Benign', 'Malware']\n\n colors = ['#EAB6AB','#D9E6F3']\n\n fig1, ax1 = plt.subplots(figsize=(7, 7))\n ax1.pie(sizes, labels=labels, autopct='%1.1f%%',\n shadow=False, startangle=90, colors=colors)\n ax1.axis('equal') # Equal aspect ratio ensures that pie is drawn as a circle.\n plt.title('Number of Benign vs. Malware', y=1.05)\n\n plt.show()\n fig1.savefig('bm_counts.png')\n\n # number of edges vs number of nodes\n groups = df.groupby('label')\n colors = ['#FFAEA5', '#A2E1DB']\n\n # Plot\n fig, ax = plt.subplots()\n ax.margins(0.05) # Optional, just adds 5% padding to the autoscaling\n for name, group in groups:\n if name == 'Benign':\n c = colors[0]\n else:\n c = colors[1]\n ax.plot(group.number_edges, group.number_nodes, marker='o', linestyle='', ms=4, label=name, c=c)\n ax.legend()\n ax.set_xlabel('Number of Edges')\n ax.set_ylabel('Number of Nodes')\n ax.set_title('Benign & Malware: Number of Edges vs. Number of Nodes', y=1.05)\n\n plt.show()\n fig.savefig('bm_edges_nodes.png')", "def bar_plot(df_NP):\n cnt = Counter()\n for tax_list in df_NP.taxonomy:\n for tax in list(tax_list):\n if tax != 'no':\n cnt[tax] += 1\n plt.bar(cnt.keys(),cnt.values())\n plt.xlabel('taxonomic provenance')\n plt.ylabel('number of molecules')\n plt.title('number of aglycons with taxonomies')\n plt.savefig(\"output_data/Barplot.png\")\n print(\"BAR PLOT DONE\")", "def main():\n rows = []\n for path in DATA.glob(\"*.tsv\"):\n with path.open() as file:\n _header = next(file)\n for line in file:\n dead_id, when, alt_id = line.strip(\"\\n\").split(\"\\t\")\n rows.append((path.stem, dead_id, when, alt_id))\n\n rows = sorted(rows)\n\n with OUTPUT_PATH.open(\"w\") as file:\n print(*HEADER, sep=\"\\t\", file=file)\n for row in rows:\n print(*row, sep=\"\\t\", file=file)\n\n df = pd.DataFrame(rows, columns=[\"prefix\", \"dead_id\", \"date\", \"alternative_id\"])\n fig, ax = plt.subplots(figsize=(6, 3))\n sns.histplot(data=df, y=\"prefix\", ax=ax)\n ax.set_ylabel(\"\")\n ax.set_xscale(\"log\")\n ax.set_xlabel(\"Dead Identifiers\")\n fig.tight_layout()\n fig.savefig(SUMMARY_SVG_PATH)", "def taxonomy_files(self):\n location=self.place.capitalize()+'-'+str(self.year)+'-'\n no_of_ideograms=self.OTU.make_tree(location,self.start_level,self.plot_level)\n return no_of_ideograms", "def main():\n\n #Getthefiles\n all_plasmid_path = []\n path_to_all_info = '/Users/gustavotamasco/mdrkrp/project_MDR_KRP/all_vir_files'\n dirpath=os.getcwd()\n os.chdir(path_to_all_info)\n files = list_files_simple(path_to_all_info)\n\n bad_files = [\"Hemo_536_vfdb_genome.tsv\", \"MI_119_vfdb.tsv\", \"Hemo_536_vfdb.tsv\",\n \"MI_119_vfdb_genome.tsv\",\n \"URO_775_vfdb_genome.tsv\", \"Hemo_825_vfdb.tsv\", \"URO_775_vfdb.tsv\",\n \"Hemo_825_vfdb_genome.tsv\",\n \"MI_329_vfdb.tsv\", \"MI_569_vfdb_genome.tsv\", \"MI_329_vfdb_genome.tsv\",\n \"MI_569_vfdb.tsv\",\n \"Hemo_989_vfdb_genome.tsv\", \"MI_78_vfdb.tsv\", \"Hemo_989_vfdb.tsv\",\n \"MI_78_vfdb_genome.tsv\"]\n\n final_files = list([x for x in files if x not in bad_files])\n print(len(final_files))\n\n\n '''Building metadata'''\n #All genes to each genome\n metadata = {}\n for file in final_files:\n with open(file) as vir_info:\n parse_genes_v2(file, vir_info, metadata)\n\n\n #All genes that occured\n all_genes = sorted(set(get_all_genes(metadata)))\n print(all_genes)\n\n #All vir classess\n\n\n '''Build dataframe for the classes plot'''\n df_info = {}\n df_major_classes = build_class_df(df_info, all_genes, metadata)\n df = pd.DataFrame.from_dict(df_major_classes, orient='index', columns=['entA', 'entB', 'entE', 'entS', 'fepA', 'fepB', 'fepC', 'fepD', 'fepG', 'fimA', 'fimE', 'fyuA', 'irp1', 'irp2', 'mgtB', 'mgtC', 'ompA', 'xcpA/pilD', 'xcpR', 'yagV/ecpE', 'yagW/ecpD', 'yagX/ecpC', 'yagY/ecpB', 'yagZ/ecpA', 'ybtA', 'ybtE', 'ybtP', 'ybtQ', 'ybtS', 'ybtT', 'ybtU', 'ybtX', 'ykgK/ecpR'])\n #df = df.transpose()\n #df.to_csv('arg_genes.csv', sep='\\t', encoding='utf-8')\n #sns.set(font_scale=0.65)\n #Need both\n #not_full = sns.clustermap(df, label='small', cmap=\"vlag\", standard_scale=1, linewidths=0)\n full_plot = sns.clustermap(df, label='small', cmap=\"vlag\", linewidths=0)\n #plt.title('Antibiotic resistance genes across 34 organism', fontsize=15)\n #sns.set(font_scale=1)\n plt.show()\n full_plot.savefig(\"final_genome_plasmid_vir.pdf\", bbox_inches='tight')\n #not_full.savefig(\"final_genome_plasmid_vir_scalled.pdf\", bbox_inches='tight')", "def stats_orgs(df, new_data=False):\n rows = []\n\n if new_data:\n df = df[df.index.isin(in_taxa_dict.keys())]\n else:\n df = df[df.index.isin(db_taxa_dict.keys())]\n\n df2 = df.copy()\n df2[df2 >= 1] = 1\n\n df = df.sum(axis=1).to_frame()\n\n if new_data:\n df[f\"Genes out of {len(matrix.columns)}\"] = df2.sum(axis=1).to_frame()\n df = df.rename(columns={0: f\"Sequences Collected\"})\n\n else:\n df = df.rename(columns={0: f\"Genes out of {len(matrix.columns)}\"})\n\n # Fill in taxonomic information\n if new_data:\n list_of_dicts = [{key: value[i] for key, value in in_taxa_dict.items()} for i in range(3)]\n else:\n list_of_dicts = [{key: value[i] for key, value in db_taxa_dict.items()} for i in range(3)]\n df['Long Name'] = df.index.map(list_of_dicts[2])\n df['Higher Taxonomy'] = df.index.map(list_of_dicts[0])\n df['Lower Taxonomy'] = df.index.map(list_of_dicts[1])\n\n # Rearrange Columns to Put Genes after taxa stats\n cols = df.columns.tolist()\n cols = cols[2:] + cols[:2]\n df = df[cols]\n\n if new_data:\n routes_dict = get_routes()\n list_of_routes_dicts = [{key: value[i] for key, value in routes_dict.items()} for i in range(3)]\n df[\"#SBH\"] = df.index.map(list_of_routes_dicts[0])\n df[\"#BBH\"] = df.index.map(list_of_routes_dicts[1])\n df[\"#HMM\"] = df.index.map(list_of_routes_dicts[2])\n out_filename = 'new_taxa_stats.tsv'\n else:\n out_filename = 'db_taxa_stats.tsv'\n\n # Fill in columns for including in SGT construction. By default all are yes\n has_paralogs = check_paralogs()\n if new_data:\n sgt_dict = {org: 'yes' for org in in_taxa_dict.keys()}\n else:\n sgt_dict = {org: 'yes' for org in db_taxa_dict.keys()}\n df['SGT'] = df.index.map(sgt_dict)\n\n # Fill in column for paralogs. If no paralogs entry is 'none'.\n # If there are paralogs entry is 'yes'. If there are paralogs, but --ortholog_only is given entry is 'no'.\n if new_data:\n pass\n else:\n paralogs_dict = {org: ('yes' if org in has_paralogs and not args.orthologs_only\n else 'no' if org in has_paralogs and args.orthologs_only else 'none')\n for org in db_taxa_dict}\n df['Paralogs'] = df.index.map(paralogs_dict)\n\n df = df.rename_axis('Unique ID')\n df.to_csv(f'{output_fold}/{out_filename}', sep='\\t')", "def test_taxonomy(n=5):\n ecoli_file = join(this_dir, \"e_coli_core.xml.gz\")\n ids = [\"Escherichia_coli_{}\".format(i) for i in range(1, n + 1)]\n taxa = pd.DataFrame({\"id\": ids})\n taxa[\"genus\"] = \"Escherichia\"\n taxa[\"species\"] = \"Eschericia coli\"\n taxa[\"reactions\"] = 95\n taxa[\"metabolites\"] = 72\n taxa[\"file\"] = ecoli_file\n return taxa", "def plot_all(df, nbd_residues, datatypes, activators=None, replicates=None,\n file_basename=None, normalize_nbd=False):\n if datatypes[0] != 'Release':\n raise ValueError(\"Release should be first in the datatypes list.\")\n # Some definitions\n # Define some default values\n if activators is None:\n activators = ['Bid', 'Bim']\n if replicates is None:\n replicates = (1, 2, 3)\n num_subplots = len(datatypes)\n # Labels and titles for each datatype\n ylabels = {'Release': '% Dye Release',\n 'NBD': 'F/$F_0$',\n 'FRET': '% FRET'}\n # Every mutant gets its own plot\n for nbd_index, nbd_site in enumerate(nbd_residues):\n if len(datatypes) == 2:\n fig_width = 11\n elif len(datatypes) == 3:\n fig_width = 14\n fig = plt.figure(figsize=(fig_width, 5))\n fig.set_tight_layout(True)\n # Define the subplot titles\n titles = {'Release': r'Dye release for NBD-%s-Bax' % nbd_site,\n 'NBD': 'NBD F/$F_0$ for NBD-%s-Bax' % nbd_site,\n 'FRET': 'FRET, NBD-%s-Bax' % nbd_site}\n # Every datatype gets its own subplot\n for dtype_ix, dtype in enumerate(datatypes):\n # There is no NBD/FRET curve for WT Bax, so skip\n if (dtype == 'NBD' and nbd_site == 'WT') or \\\n (dtype == 'FRET' and nbd_site == 'WT'):\n continue\n plt.subplot(1, num_subplots, dtype_ix + 1)\n # Activators and replicates are separate lines on the same plot\n for activator in activators:\n # Iterate over replicates...\n for i in replicates:\n # Get the data\n t = df[(activator, dtype, nbd_site, i, 'TIME')]\n v = df[(activator, dtype, nbd_site, i, 'VALUE')]\n # TODO Do the normalization here if desired TODO\n if normalize_nbd:\n v = v / v[0]\n plt.plot(t, v, label='%s Rep %d' % (activator, i),\n color=line_colors[activator],\n linestyle=line_styles[i])\n plt.xlabel('Time (sec)')\n plt.ylabel(ylabels[dtype])\n plt.title(titles[dtype])\n plt.legend(loc='lower right')\n # Datatype-specific formatting\n if dtype == 'Release':\n plt.ylim([0, 100])\n if file_basename:\n plt.savefig('%s_%s.pdf' % (file_basename, nbd_site))\n plt.savefig('%s_%s.png' % (file_basename, nbd_site))", "def stats_gene(df):\n # res.write(f'Gene,Total,total[%],SGT\\n')\n taxa_count = len(df)\n df = df.sum().to_frame()\n df = df.rename(columns={0: 'Number of Taxa'})\n df[f'Percent of Total Taxa (out of {taxa_count})'] = round((df['Number of Taxa'] / taxa_count) * 100, 2)\n df = df.rename_axis('Gene Name')\n df = df.sort_values(by=['Number of Taxa'], ascending=False)\n df['SGT'] = ['yes'] * len(df)\n df.to_csv(f'{output_fold}/gene_stats.tsv', sep='\\t')", "def taxonomy_plot(self,seasons):\n print('Formatting data.')\n no_of_ideograms=self.taxonomy_files()\n location=self.place.capitalize()+'-'+str(self.year)\n if seasons==True:\n seasons=self.weather.seasons(self.place)\n print('Done')\n self.conf.taxo_conf(no_of_ideograms, location, self.start_level, self.plot_level, seasons)", "def create_taxonomy(dataset_name, attr, dataset=[]):\n #path = os.getcwd()\n\n path_in = os.getcwd()\n pattern = '^.*/thesis-data-anonymisation/'\n path_top = re.search(pattern, path_in).group(0)\n\n path = path_top +'data'\n\n if len(dataset_name) > 0:\n prefix = '../data/'+dataset_name+'/hierarchy_'\n else:\n prefix = '../data/hierarchy_'\n\n postfix = '.csv'\n\n try:\n file = open(path + '/' + prefix + attr + postfix, 'r')\n except FileNotFoundError:\n if len(dataset_name) > 0:\n prefix = '/data/'+dataset_name+'/hierarchy_'\n else:\n prefix = '/data/hierarchy_'\n file = open(path+prefix + attr + postfix, 'r')\n\n taxonomy = {}\n #dataset_group = dataset.groupby(attr).groups\n\n lines_in = file.readlines()\n file.close()\n lines = [line.strip().split(';') for line in lines_in]\n max_height = max([len(line) for line in lines])\n try:\n float(lines[0][0])\n is_numeric = True\n except ValueError:\n is_numeric = False\n for line in lines:\n #try:\n # if is_numeric:\n # dataset_group[int(line[0])]\n # else:\n # dataset_group[line[0]]\n #except KeyError:\n # continue\n line.reverse()\n for i, val in enumerate(line):\n is_leaf = False\n if val == '*':\n node = TaxNode(val, None, is_numeric, is_leaf)\n else:\n if i == len(line) - 1:\n is_leaf = True\n\n node = TaxNode(val, taxonomy[line[i - 1]][-1], is_numeric, is_leaf)\n try:\n current_nodes = taxonomy[val]\n already_added = False\n for current_node in current_nodes:\n if current_node.parent is None:\n already_added = True\n elif current_node.parent.value == node.parent.value:\n already_added = True\n if not already_added:\n taxonomy[val].append(node)\n except KeyError:\n taxonomy[val] = [node] # Saves the nodes in a list in case of several parents (only valid for nodes with several parents!!!)\n hierarchy = Taxonomy(taxonomy, max_height)\n\n return hierarchy", "def visualize(houses:pd.DataFrame) -> None:\n #price_distribution(houses)\n #prop_types(houses)\n #zip_code(houses)\n #year_built(houses)\n #bed_bath(houses)\n return", "def visualize_type():\n\t\n\t#grab our parsed data\n\tdata_file = parse(MY_FILE, \",\")\n\t\n\t#make a new variable, counter, from iterating through each line of\n\t#data in parsed data, and count how many incidents happen by category\n\tcounter = Counter(item[\"Category\"] for item in data_file)\n\t\n\t#set the labels which are based on the keys of our counter\n\t#since order doesn't matter, we can just use counter.keys()\n\tlabels = tuple(counter.keys())\n\t\n\t#set exactly where the labels should hit the x-axis\n\txlocations = np.arange(len(labels)) + 0.5\n\t\n\t#width of each bar that will be plotted\n\twidth = 0.5\n\t\n\t#assign data to a bar plot\n\tplt.bar(xlocations, counter.values(), width=width)\n\t\n\t#assign labels and tick location to x-axis\n\tplt.xticks(xlocations + width /2, labels, rotation=90)\n\t\n\t#give more room to the x-axis so the labels aren't cut off\n\tplt.subplots_adjust(bottom=0.4)\n\t\n\t#make the overall graph/figure larger\n\tplt.rcParams['figure.figsize'] = 12, 8\n\t\n\t#save the graph\n\tplt.savefig(\"type.png\")\n\t\n\t#close the plot figure\n\tplt.clf()", "def plot_all_by_replicate(df, nbd_residues, datatypes, activators=None,\n replicates=None, file_basename=None,\n normalize_nbd=False):\n if datatypes[0] != 'Release':\n raise ValueError(\"Release should be first in the datatypes list.\")\n # Define some default values\n if activators is None:\n activators = ['Bid', 'Bim']\n if replicates is None:\n replicates = (1, 2, 3)\n # Every mutant gets its own plot\n for nbd_index, nbd_site in enumerate(nbd_residues):\n for activator in activators:\n plt.figure(figsize=(14, 5))\n # Each replicate gets its own subplot\n ax2_list = []\n nbd_max = -np.inf\n nbd_min = np.inf\n for rep in replicates:\n # Make the release plot\n plt.subplot(1, 3, rep)\n ax1 = plt.gca()\n ax2 = ax1.twinx()\n ax2_list.append(ax2)\n for dtype in datatypes:\n # Skip WT for NBD and FRET\n if (dtype == 'NBD' and nbd_site == 'WT') or \\\n (dtype == 'FRET' and nbd_site == 'WT'):\n continue\n # Get the data\n t = df[(activator, dtype, nbd_site, rep, 'TIME')]\n v = df[(activator, dtype, nbd_site, rep, 'VALUE')]\n # Set the axis\n if dtype == 'NBD':\n if normalize_nbd:\n v = v / v[0]\n ax = ax2\n if np.max(v) > nbd_max:\n nbd_max = np.max(v)\n if np.min(v) < nbd_min:\n nbd_min = np.min(v)\n elif dtype == 'Release' or dtype == 'FRET':\n ax = ax1\n else:\n raise ValueError(\"Unknown datatype: %s\" % dtype)\n # Plot the data\n if dtype == 'FRET':\n ax.plot(t, v, label='%s, %s' % (activator, dtype),\n color=dtype_line_colors[dtype],\n linestyle='', marker='.')\n else:\n ax.plot(t, v, label='%s, %s' % (activator, dtype),\n color=dtype_line_colors[dtype])\n\n # Adjust and label the figure\n ax1.set_ylim([0, 100])\n if 'Release' in datatypes and 'FRET' in datatypes:\n ax1.set_ylabel('% FRET, % Release')\n else:\n ax1.set_ylabel('% Release')\n\n ax2.set_xlabel('Time (sec)')\n ax2.set_ylabel('NBD $F/F_0$')\n ax2.set_title('NBD-%s-Bax, %s, Rep %d' %\n (nbd_site, activator, rep))\n ax1_lines, ax1_labels = ax1.get_legend_handles_labels()\n ax2_lines, ax2_labels = ax2.get_legend_handles_labels()\n ax2.legend(ax1_lines + ax2_lines, ax1_labels + ax2_labels,\n loc='lower right', prop={'size':8})\n plt.xticks([0, 1000, 2000, 3000, 4000])\n # Give all plots same range for NBD\n for ax2 in ax2_list:\n if not ((dtype == 'NBD' and nbd_site == 'WT') or \\\n (dtype == 'FRET' and nbd_site == 'WT')):\n ax2.set_ylim([nbd_min * 0.95, nbd_max * 1.05])\n plt.subplots_adjust(wspace=0.4, left=0.06, right=0.95)\n # Output file, if desired\n if file_basename:\n plt.savefig('%s_%s_%s.pdf' %\n (file_basename, nbd_site, activator))\n plt.savefig('%s_%s_%s.png' %\n (file_basename, nbd_site, activator))", "def visualize_type():\n \n data_file = parse(MY_FILE, ',')\n\n # num of incidents per category\n counter = Counter(item['Category'] for item in data_file)\n\n # Set the labels\n labels = tuple(counter.keys())\n\n # Set exactly where the labels hit the x-axis\n xlocations = na.array(range(len(labels))) + 0.5\n\n # Width of each bar\n width = 0.5\n\n # Assign data to a bar plot\n plt.bar(xlocations, counter.values(), width=width)\n\n # Assign labels and tick location to x-axis\n plt.xticks(xlocations + width / 2, labels, rotation=90)\n \n # Give some more room so the x-axis labels aren't cut off\n plt.subplots_adjust(bottom=0.4)\n\n # Make the overall graph/figure larger\n plt.rcParams['figure.figsize'] = 12, 8\n\n # save\n plt.savefig('Type.png')\n\n # close\n plt.clf()", "def __init__(\n self,\n gene_lists,\n taxon,\n requests_per_sec=10,\n padj_threshold=0.05,\n log2_fc_threshold=0,\n fc_threshold=None,\n enrichment_fdr=0.05,\n annot_col=\"Name\",\n ):\n Ontology.__init__(self)\n PlotGOTerms.__init__(self)\n\n self.gene_lists = gene_lists\n self.enrichment_fdr = enrichment_fdr\n\n # users can set the fold change threshold in the log2 scale or normal\n # scale.\n assert log2_fc_threshold >= 0, \"log2 fc_threshold must be >=0\"\n if fc_threshold is not None:\n log2_fc_threshold = pylab.log2(fc_threshold)\n\n from bioservices import panther, quickgo\n\n self.quick_go_graph = QuickGOGraph()\n\n self.panther = panther.Panther(cache=True)\n self.valid_taxons = [x[\"taxon_id\"] for x in self.panther.get_supported_genomes()]\n self.summary = {}\n\n self._taxon = None\n self.taxon = taxon\n\n self.quickgo = quickgo.QuickGO(cache=True)\n self.quickgo.requests_per_sec = requests_per_sec\n self.quickgo.services.settings.TIMEOUT = 120\n\n self._ancestors = {\n \"MF\": \"GO:0003674\",\n \"CC\": \"GO:0005575\",\n \"BP\": \"GO:0008150\",\n \"SLIM_MF\": \"GO:0003674\",\n \"SLIM_CC\": \"GO:0005575\",\n \"SLIM_BP\": \"GO:0008150\",\n }\n self.ontologies.extend(\n [\n \"ANNOT_TYPE_ID_PANTHER_GO_SLIM_MF\",\n \"ANNOT_TYPE_ID_PANTHER_GO_SLIM_BP\",\n \"ANNOT_TYPE_ID_PANTHER_GO_SLIM_CC\",\n \"ANNOT_TYPE_ID_PANTHER_PC\",\n \"ANNOT_TYPE_ID_PANTHER_PATHWAY\",\n \"ANNOT_TYPE_ID_REACTOME_PATHWAY\",\n ]\n )\n\n self.ontology_aliases.extend(\n [\n \"SLIM_MF\",\n \"SLIM_BP\",\n \"SLIM_CC\",\n \"PROTEIN\",\n \"PANTHER_PATHWAY\",\n \"REACTOME_PATHWAY\",\n ]\n )\n\n # panther accepts onyl ~2-3000 genes at max. Let us restrict the analysis\n # to the first 2000 genes based on their log2 fold change 2000 + and\n # 2000 negatives\n\n msg = \"Ignoring DEGs with adjusted p-value > {} and fold change in [{}, {}]\".format(\n padj_threshold, 1 / (2**log2_fc_threshold), 2**log2_fc_threshold\n )\n logger.info(msg)\n\n # used in report module\n self.summary[\"fold_change_range\"] = [\n 1 / (2**log2_fc_threshold),\n 2**log2_fc_threshold,\n ]\n self.summary[\"padj_threshold\"] = padj_threshold\n\n fc_threshold = log2_fc_threshold\n\n for x in sorted(gene_lists.keys()):\n\n N = len(gene_lists[x])\n logger.info(f\"Starting with {N} genes from category '{x}'\")\n\n self.summary[\"DGE_after_filtering\"] = {k: len(v) for k, v in gene_lists.items()}\n\n self.enrichment = {}\n self.stats = {}\n self.obsolets = []", "def getFig3Data(df, path):\n\ttmp = pd.DataFrame()\n\t# tmp = tmp.append(df)\n\ttmp = tmp.append(df[df.Location == 'exon'])\n\ttmp = tmp.append(df[df.Location == 'intron'])\n\t# print(df[df.Location == 'exon'].NbpG4rWt)\n\t# print(df[df.Location == 'intron'].NbpG4rWt)\n\tdicoNbTr = countTranscript.getFig3Percent(path)\n\tGlobal = pd.DataFrame()\n\tgroups = tmp.groupby('Class')\n\tfor name, group in groups:\n\t\trow = sumSubTable(group, name)\n\t\trow['Class'] = name\n\t\trow = pd.DataFrame(row, index=[len(Global)+1])\n\t\tGlobal = Global.append(row)\n\t# print(sum(Global.NbpG4rWt))\n\trow = {'Class' : 'Global',\n\t\t\t'nuclG' : sum(Global.nuclG),\n\t\t\t'nuclC' : sum(Global.nuclC),\n\t\t\t'NbpG4rWt' : sum(Global.NbpG4rWt),\n\t\t\t'NbpG4rShuf' : sum(Global.NbpG4rShuf),\n\t\t\t'Tot' : sum(Global.Tot)}\n\trow = pd.DataFrame(row, index=[len(Global)+1])\n\tGlobal = Global.append(row)\n\tGlobal['nbTr'] = Global['Class'].map( dicoNbTr['Tot'] )\n\tGlobal['NbTrpG4Wt'] = Global['Class'].map( dicoNbTr['Wt'] )\n\tGlobal['NbTrpG4Shuf'] = Global['Class'].map( dicoNbTr['Shuf'] )\n\tGlobal['PercentWt'] = Global['NbTrpG4Wt'] / Global['nbTr'] * 100\n\tGlobal['PercentShuf'] = Global['NbTrpG4Shuf'] / Global['nbTr'] * 100\n\tGlobal = computeDensity(Global, 'Segment')\n\treturn Global", "def get_full_tax(idx):\n logging.info('Compiling the taxonomy for all genomes...')\n tax_idx = collections.defaultdict(dict)\n for cluster_id,v in idx.items():\n for tax,vv in v.items():\n for genome_id,x in vv.items():\n tax_idx[tax][genome_id] = x['genome_len']\n n_genomes = 0\n for tax,v in tax_idx.items():\n n_genomes += len(v.keys())\n logging.info(' Total number of genomes: {}'.format(n_genomes))\n # return\n return tax_idx", "def explore_data():\n labels = [\"vehicles\", \"non-vehicles\"]\n labelmap = {0: \"vehicles\", 1: \"non-vehicles\"}\n vehicles_glob = os.path.join(data_dir, \"vehicles\", \"**\", \"*.png\")\n nonvehicles_glob = os.path.join(data_dir, \"non-vehicles\", \"**\", \"*.png\")\n class_fnames = [\n glob.glob(vehicles_glob, recursive = True),\n glob.glob(nonvehicles_glob, recursive = True)]\n n_samples = [len(fnames) for fnames in class_fnames]\n shapes = []\n samples = []\n print(table_format([\"label\", \"size\", \"shape\"], header = True))\n for label, fnames in enumerate(class_fnames):\n indices = np.random.choice(len(fnames), 4*10, replace = False)\n for i in indices:\n fname = fnames[i]\n img = cv2.imread(fname)\n samples.append(img)\n shape = img.shape\n shapes.append(shape)\n print(table_format([labels[label], n_samples[label], shapes[label]]))\n\n samples = np.stack(samples)\n samples = tile(samples, 2*4, 10)\n cv2.imwrite(os.path.join(out_dir, \"datasamples.png\"), samples)\n\n return class_fnames, labelmap", "def allDirectionalityRatios(ratioFunction):\n if not os.path.exists(\"savedHeatmaps\"):\n os.mkdir(\"savedHeatmaps\")\n wildRatio = np.log(ratioFunction(\"Wildtype_0min_BglII_rep1\"))\n for j, dataset in enumerate(datasets):\n ax = plt.subplot(len(datasets), 1, j + 1)\n curRatio = (ratioFunction(dataset))\n plt.title(\"{1}, r = {0:.2f}, p={2:.2e}\".format(pearsonr(curRatio, wildRatio)[0], names[dataset],\n pearsonr(curRatio, wildRatio)[1]), fontsize=10)\n plt.tick_params(axis='both', which='major', labelsize=10)\n plt.tick_params(axis='both', which='minor', labelsize=8)\n plt.plot(curRatio)\n plt.ylim((0.25, 0.75))\n plt.xlim((0, len(curRatio)))\n #plt.ylim((0, 1))\n plt.yticks((0.25, 0.5, 0.75))\n geneCoor = [1162773, 3509071, 1180887, 543099, 1953250, 2522439, 3328524, 1503879, 900483, 242693, 3677144, 3931680, 3677704, 3762707, 3480870, 3829656, 1424678, 901855, 1439056, 3678537]\n genePos = [i / 10000. for i in geneCoor]\n #genePos = []\n for lpos in genePos:\n plt.vlines(lpos , -.8, .8, alpha=0.2, linewidth=1, color=\"black\")\n plt.xticks([0, 50, 100, 150, 200, 250, 300, 350, 400], [\"\" for i in xrange(9)], fontsize=98)\n removeAxes(ax=ax)\n plt.subplots_adjust(0.07, 0.05, 0.94, 0.95, 0.2, 0.5)\n\n\n\n plt.show()\n exit()", "def init_taxon():\n if not exists('./data/taxdmp.zip'):\n ftp = FTP('ftp.ncbi.nih.gov')\n ftp.login()\n ftp.cwd('pub/taxonomy')\n ftp.retrbinary('RETR taxdmp.zip', open('./data/taxdmp.zip', 'wb').write)\n ftp.quit\n with ZipFile('./data/taxdmp.zip', 'r') as dumpfile:\n dumpfile.extractall(path='./data/')\n taxon_id = dict()\n data = list()\n name = dict()\n specie = list()\n son = dict()\n greatson = dict()\n parent = dict()\n rank = dict()\n global taxon\n taxon = list()\n with open('./data/names.dmp', 'r') as dumpfile:\n raw = dumpfile.read().split(sep='\\n')\n raw.pop()\n for record in raw:\n add = record.replace('\\t', '').split(sep='|')\n if add[0] not in name or add[2] == 'scientific name':\n name[add[0]] = add[1]\n with open('./data/nodes.dmp', 'r') as dumpfile:\n raw = dumpfile.read().split(sep='\\n')\n raw.pop()\n for record in raw:\n add = record.replace('\\t', '').split(sep='|')\n # 1696063|Sarcocystis corvusi||scientific name|\n taxon_id[add[0]] = add[1]\n rank[add[0]] = add[3]\n if add[2] == 'species':\n specie.append(add[0])\n for specie in specie:\n record = [specie, ]\n while taxon_id[specie] != '1':\n record.append(taxon_id[specie])\n specie = taxon_id[specie]\n # if '33090' in record:\n # record.pop()\n # record.pop()\n data.append(record)\n for data in data:\n for n in range(len(data)):\n if data[n] not in parent:\n parent[data[n]] = data[(n + 1):]\n if n == 0:\n continue\n if data[n] not in son:\n son[data[n]] = {data[n - 1], }\n else:\n son[data[n]].add(data[n - 1])\n if data[n] not in greatson:\n greatson[data[n]] = {data[0], }\n else:\n greatson[data[n]].add(data[0])\n for specie in name.items():\n if specie[0] not in son:\n son[specie[0]] = set()\n if specie[0] not in parent:\n parent[specie[0]] = list()\n if specie[0] not in greatson:\n greatson[specie[0]] = set()\n record = [specie[0], name[specie[0]], rank[specie[0]], son[specie[0]], parent[specie[0]], greatson[specie[0]]]\n taxon.append(record)\n\n con = sqlite3.connect('./data/DB')\n cur = con.cursor()\n cur.execute(\n 'CREATE TABLE IF NOT EXISTS taxon (Id TEXT, Name TEXT, Rank TEXT, Son TEXT, Parent TEXT, GreatSon TEXT);')\n for line in taxon:\n son = ' '.join(line[3])\n parent = ' '.join(line[4])\n greatson = ' '.join(line[5])\n cur.execute('INSERT INTO taxon (Id, Name, Rank, Son, Parent, GreatSon) VALUES (?, ?, ?, ?, ?, ?);',\n (line[0], line[1], line[2], son, parent, greatson))\n con.commit()\n cur.close()\n con.close()\n print('Done.\\n')", "def fill_taxonomy_database(taxids, password):\r\n\r\n for taxid in taxids:\r\n lineage = ncbi.get_lineage(taxid)\r\n names = ncbi.get_taxid_translator(lineage)\r\n print(lineage)\r\n print([names[taxid] for taxid in lineage])\r\n\r\n previous = \"\"\r\n\r\n for lin in lineage:\r\n if int(lin) != 1: # skipping 'root'\r\n rank = ncbi.get_rank([lin])\r\n SQL_connection = set_connection(password)\r\n cursor = SQL_connection.cursor(buffered=True)\r\n cursor.execute(\r\n \"select * \"\r\n \"from Taxonomie \"\r\n \"where taxonomy_ID = {};\".format(\r\n lin))\r\n results = cursor.fetchone()\r\n if results is None:\r\n if previous == \"\":\r\n cursor.execute(\"insert into Taxonomie \"\r\n \"(rank_up, taxonomy_ID, naam, rang) \"\r\n \"values(NULL, {}, '{}', '{}');\".format(\r\n lin, names[lin], rank[lin]))\r\n SQL_connection.commit()\r\n else:\r\n cursor.execute(\"insert into Taxonomie \"\r\n \"(rank_up, taxonomy_ID, naam, rang) \"\r\n \"values({}, {}, '{}', '{}');\".format(\r\n previous, lin, names[lin], rank[lin]))\r\n SQL_connection.commit()\r\n cursor.close()\r\n SQL_connection.close()\r\n previous = lin", "def write_phylogeny_pages(outfile: TextIO, genera_tree: list, species_tree: list, do_print: bool,\n refdict: dict) -> None:\n\n def split_html_tree(intree: list) -> Tuple[list, list]:\n \"\"\"\n take a phy2html output file and split it into style and body sections\n \"\"\"\n instyle = False\n inbody = False\n style_list = []\n body_list = []\n for inline in intree:\n if \"<style>\" in inline:\n instyle = True\n elif \"</style>\" in inline:\n instyle = False\n elif \"<body>\" in inline:\n inbody = True\n elif \"</body>\" in inline:\n inbody = False\n elif instyle:\n style_list.append(inline)\n elif inbody:\n body_list.append(inline)\n return style_list, body_list\n\n def add_link_to_genus(inline: str) -> str:\n if \"id=\\\"genera_taxon\" in inline:\n i = inline.find(\">\")\n start = inline[:i+1]\n tname = inline[i+1:]\n j = tname.find(\"<\")\n end = tname[j:]\n tname = tname[:j]\n tname = create_taxon_link(\"genus\", tname, do_print=do_print, include_rank=False)\n return start + tname + end\n else:\n return inline\n\n # --- main function code ---\n gen_style, gen_body = split_html_tree(genera_tree)\n sp_style, sp_body = split_html_tree(species_tree)\n\n if do_print:\n start_page_division(outfile, \"base_page\")\n else:\n common_header_part1(outfile, \"Fiddler Crab Phylogeny\")\n outfile.write(\" <style>\\n\")\n for line in gen_style:\n outfile.write(line)\n outfile.write(\"\\n\")\n for line in sp_style:\n outfile.write(line)\n outfile.write(\"\\n\")\n outfile.write(\" .phylogeny_grid { padding: 30px; }\\n\") # add a little extra padding\n outfile.write(\" </style>\\n\")\n\n if not do_print:\n common_header_part2(outfile)\n\n outfile.write(\" <header id=\\\"\" + init_data().tree_url + \"\\\">\\n\")\n outfile.write(\" <h1 class=\\\"bookmark1\\\">Phylogeny</h1>\\n\")\n outfile.write(\" </header>\\n\")\n outfile.write(\"\\n\")\n outfile.write(\" <p>\\n\")\n outfile.write(\" The phylogeny of fiddler crabs is still largely unresolved. Two trees are shown below: one \"\n \"of just the genera and one including all species. The tree of genera is fairly solid, \"\n \"but the species tree is a rough estimate with many polytomies. Both are predominantly based on the \"\n \"work of \" + format_reference_cite(refdict[\"Shih2016.2\"], do_print, AUTHOR_PAREN) + \".\\n\")\n outfile.write(\" </p>\\n\")\n outfile.write(\"\\n\")\n outfile.write(\" <section class=\\\"spsection\\\">\\n\")\n outfile.write(\" <h2 class=\\\"bookmark2\\\">Genera Phylogeny</h2>\\n\")\n for line in gen_body:\n outfile.write(add_link_to_genus(line))\n outfile.write(\" </section>\\n\")\n outfile.write(\" <section class=\\\"spsection\\\">\\n\")\n outfile.write(\" <h2 class=\\\"bookmark2\\\">Species Phylogeny</h2>\\n\")\n for line in sp_body:\n outfile.write(replace_species_in_string(line, True, do_print))\n outfile.write(\" </section>\\n\")\n outfile.write(\"\\n\")\n if do_print:\n end_page_division(outfile)\n else:\n common_html_footer(outfile)", "def sixteen_graphs(the_dir):\n # TODO change to deprecation warning\n warnings.warn(\"Does not call sv_pipeline functoins correctly\", DeprecationWarning)\n\n plb.rcParams['figure.figsize'] = 30, 30\n plt.clf()\n plt.figure(1)\n\n # should look like: read_data/all_files/chr4_124,017,492_124,029,032_merged.txt\n merged_files = glob.glob(the_dir + '*merged.txt')\n print(\"Running for {} regions\".format(len(merged_files)))\n for merged_filename in merged_files:\n # get filenames\n prefix = merged_filename[len(the_dir):-11]\n fasta_filename = the_dir + prefix + \".fa\"\n bed_filename = the_dir + prefix + \"-refcoords.bed\"\n print('Using ' + prefix)\n\n for min_matching_length in range(100, 1700, 100):\n print(min_matching_length)\n # used for ground truth\n preset, postset, spanset, gapset = get_read_classifications(prefix,\\\n bed_filename, merged_filename=merged_filename)\n # Generate and prune graph\n graph = generate_graph(prefix, fasta_filename, min_matching_length)\n graph = nx_helpers.remove_nodes(graph, preset)\n graph = nx_helpers.remove_nodes(graph, postset)\n\n # Plot the graph\n plt.subplot(4, 4, min_matching_length/100)\n communities = nx_helpers.get_communities(graph)\n graph, communities = drop_small_communities(graph, communities)\n node_colors = node_community_colors(graph, communities)\n pos = nx.spring_layout(graph)\n title = \"Chr {0};\\n L={1}; NumCom={2}\\nComQual = {3}, MapQual={4}\"\\\n .format(prefix, min_matching_length, len(communities),\\\n community_quality(communities, spanset, gapset),\\\n mapping_quality(graph, spanset, gapset))\n nx.draw(graph, node_color=node_colors, node_size=100, pos=pos)\n plt.title(title)\n plt.savefig(\"figs/\" + prefix + '-16-communities.pdf')\n plt.clf()", "def tree(n, names):\n \n t = Tree()\n t.populate(n, names_library=names.keys(), random_branches=True, support_range=(0.7, 1))\n t.write(format=2, outfile='fake.tree.newick', dist_formatter='%0.4g', support_formatter='%0.4g')\n with open('fake.tree.notation.tsv', 'w') as o:\n o.write('#TaxaID\\tName\\n')\n o.writelines('{}\\t{}\\n'.format(k, v) for k, v in names.items())", "def aglycon_single_tax(df_NP):\n # **seperate aglycons with at least two different entries in taxonomy**\n index_Unique_Tax = [ind for ind, tax_list in enumerate(df_NP.taxonomy) if len(tax_list) == 1]\n df_Without_Double = df_NP.iloc[index_Unique_Tax[:]]\n #df_Without_Double\n # **check for 'double' or 'triple' entries in taxonomy**\n index_double_or_triple = [ind for ind, tax_list in enumerate(df_Without_Double.taxonomy) if 'double' not in tax_list and 'triple' not in tax_list]\n df_Without_Double_or_Triple = df_Without_Double.iloc[index_double_or_triple[:]]\n #df_Without_Double_or_Triple\n # **------for taxonomy prediction------**\n df_Without_Double_or_Triple.to_pickle(\"output_data/df_all_aglycons_with_single_taxonomy.pkl\")\n # **------end for taxonomy prediction------**", "def index_figures(): \n # extract data needed for visuals\n # TODO: Below is an example - modify to extract data for your own visuals\n genre_counts = df.groupby('genre').count()['message']\n genre_names = list(genre_counts.index)\n \n # create visuals\n # TODO: Below is an example - modify to create your own visuals\n graph_one = []\n graph_one.append(\n go.Bar(\n x = genre_names,\n y = genre_counts\n )\n ) \n layout_one = dict(title = 'Distribution of Message Genres',\n yaxis = dict(title = 'Count'),\n xaxis = dict(title = 'Genre')\n )\n \n category_values = df.iloc[:,4:].sum().sort_values(ascending=False).head()\n category_names = list(category_values.index)\n \n graph_two = []\n graph_two.append(\n go.Pie(\n values=category_values,\n labels=category_names\n )\n )\n layout_two = dict(title = 'Top Categories',\n yaxis = dict(title = 'Count'),\n xaxis = dict(title = 'Category')\n )\n \n graphs = []\n graphs.append(dict(data=graph_one, layout=layout_one))\n graphs.append(dict(data=graph_two, layout=layout_two))\n return graphs", "def generate_subgraph(format):\n\n # get business information\n directorypath = genpath+directory\n if os.path.isfile(directorypath):\n \n bizdata = pd.read_csv( directorypath, escapechar='\\\\')\n\n #create a directory of page-id and object-ids\n tempdf = bizdata.set_index('pageid')\n tempdf = tempdf['objectid']\n dictionary = tempdf.to_dict()\n\n uncgraph = pd.read_csv(inpath+graphfile, escapechar='\\\\')\n uncgraph = uncgraph.dropna()\n uncgraph['likee_object_id'] = uncgraph.apply(lambda x: dictionary.get(x['likee_page_id']), axis=1)\n cgraph = uncgraph.dropna()\n cgraph = cgraph[['liker_page_id', 'likee_page_id']]\n cgraph.columns = ['Source', 'Target']\n\n \n print_stats(cgraph)\n if format == 'networkx' :\n print \"[Generating a networkX graph...]\" \n cgraph.to_csv(genpath+subgraph+'.ntx', index=False, header=False, sep= ' ')\n else:\n print \"[Generating a csv graph...]\" \n cgraph.to_csv(genpath+subgraph+'.csv', index=False)\n\n\n else:\n print \"Either file is missing or is not readable\"", "def complete_databank(port=\"localhost:27017\",coconut_database=\"COCONUT2020-10\",sweetcoconut_database=\"sweetcoconut\"):\n client = MongoClient(port)\n db_complete = client[coconut_database]\n collection = db_complete.uniqueNaturalProduct\n db_complete_only_ring_sugars = pd.DataFrame(list(collection.find({\"contains_ring_sugars\": True})))\n df_complete_tax = pd.DataFrame({\"taxonomy\": db_complete_only_ring_sugars[\"textTaxa\"],\n \"smiles\": db_complete_only_ring_sugars[\"smiles\"],\n \"coconut_id\": db_complete_only_ring_sugars[\"coconut_id\"],\n \"no_sugar_smiles\": db_complete_only_ring_sugars[\"sugar_free_smiles\"]\n })\n complete_names = []\n indexes = []\n for i in range(len(df_complete_tax.taxonomy)):\n # some entries are empty lists\n # doubles\n if df_complete_tax.taxonomy[i] != [] and (\"plants\" in df_complete_tax.taxonomy[i] or \"bacteria\" in df_complete_tax.taxonomy[i] or \"marine\" in df_complete_tax.taxonomy[i] or \"animals\" in df_complete_tax.taxonomy[i] or \"fungi\" in df_complete_tax.taxonomy[i]):\n indexes.append(i)\n complete_names.append(df_complete_tax.taxonomy[i])\n df_five_tax = df_complete_tax.loc[indexes[:]]\n df_tax_id = pd.DataFrame({\"taxonomy\": df_five_tax.taxonomy,\n \"coconut_id\": df_five_tax.coconut_id})\n df_tax_id = df_tax_id.reset_index()\n taxonomies = [\"plants\",\"bacteria\",\"fungi\",\"marine\",\"animals\"]\n biology_names = []\n for row in df_tax_id.taxonomy:\n for name in row:\n if name not in taxonomies:\n biology_names.append(name)\n for biology_name in biology_names:\n for row in df_tax_id.taxonomy:\n if biology_name in row:\n row.remove(biology_name)\n # **------------for tax prediction---------------**\n df_tax_id.to_pickle(\"output_data/for_predict_doubletriple.pkl\")\n # **----------end tax prediction--------------**\n for ind, tax_list in enumerate(df_tax_id.taxonomy):\n if \"marine\" in tax_list:\n #print(ind, tax_list)\n if len(tax_list) > 1:\n df_tax_id.taxonomy[ind].remove(\"marine\")\n else:\n df_tax_id.taxonomy[ind].append(\"no\")\n df_tax_id.taxonomy[ind].remove(\"marine\")\n #df_tax_id.taxonomy[ind] = [\"no\"]\n taxonomy_Double = []\n taxonomy_Triple = []\n taxonomy_single_entry = []\n for ind, tax_list in enumerate(df_tax_id.taxonomy):\n #print(ind, tax_list)\n if len(tax_list) == 1:\n taxonomy_single_entry.append(tax_list[0])\n elif len(tax_list) == 2: \n taxonomy_single_entry.append('double')\n # save original annotation\n taxonomyDouble1 = []\n for tax in tax_list:\n taxonomyDouble1.append(tax)\n taxonomy_Double.append(taxonomyDouble1)\n elif len(tax_list) == 3:\n taxonomy_single_entry.append('triple')\n # save original annotation\n taxonomyTriple1 = []\n for tax in tax_list:\n taxonomyTriple1.append(tax)\n taxonomy_Triple.append(taxonomyTriple1)\n else:\n print('Error: Too many taxonomies for one aglycon','\\n','create a new elif statement in line 102 in tanimoto_index.py')\n df_tax_id_fromCompleteDatabank = pd.DataFrame({\"taxonomy\": taxonomy_single_entry,\n \"coconut_id\": df_five_tax.coconut_id})\n sweetcoconut_databank(df_tax_id_fromCompleteDatabank,taxonomy_Double,sweetcoconut_database,port)", "def read_turte():\n file = request.form['upload-file']\n print(file)\n # no_of_rows = int(request.form['no_of_rows'])\n # g = rdflib.Graph()\n # g = ConjunctiveGraph()\n owlClass = rdflib.namespace.OWL.Class\n rdfType = rdflib.namespace.RDF.type\n\n result = g.parse(file, format=\"turtle\")\n final_list = []\n # Iterate over triples in store and print them out\n # for s, p, o in result:\n # # if type(o) == rdflib.term.Literal:\n # # sub.append(s),prop.append(p),obj.append(o)\n # # final_list.append((s, o))\n # final_list.append((s, g.label(s)))\n for s in result.subjects(predicate=rdfType, object=owlClass):\n class_labels.append(result.label(s).title())\n autocmplete_label_dict[result.label(s).title()] = {}\n final_list.append((s.title(), result.label(s).title()))\n class_labels_dict[result.label(s).title()] = s.title()\n labels = list(set([i for i in final_list if len(i[1]) > 0]))\n print(len(labels))\n # print(class_labels_dict)\n print(class_labels_dict.get('Computertomograph'))\n # print(class_labels_dict.get('DepthOfCut'))\n rdf_df = pd.DataFrame(labels, columns=['class(subject)', 'label(literals)'])\n alert_value = 1 # for alert.\n return render_template('turtle_list.html', tables=[rdf_df.to_html(classes='data')], titles=rdf_df.columns.values)\n # render_template('index_old.html', alert_value=alert_value)" ]
[ "0.61547685", "0.5806181", "0.5550293", "0.5549266", "0.5513179", "0.54985404", "0.5479837", "0.54608345", "0.54200816", "0.54113746", "0.5385822", "0.5380591", "0.53727204", "0.53407085", "0.5332196", "0.5309217", "0.5293119", "0.5277534", "0.526739", "0.52268946", "0.5209379", "0.51807946", "0.51642454", "0.5158195", "0.5143495", "0.5139653", "0.51293766", "0.5129155", "0.51218814", "0.5121498" ]
0.7490416
0
Gets a data frame with all the same aglycon structures in one row. Deletes all rows with more than one entry in the taxonomy row. Passes a data frame with only one entry (superkingdom or 'no') in the taxonomy row.
def aglycon_single_tax(df_NP): # **seperate aglycons with at least two different entries in taxonomy** index_Unique_Tax = [ind for ind, tax_list in enumerate(df_NP.taxonomy) if len(tax_list) == 1] df_Without_Double = df_NP.iloc[index_Unique_Tax[:]] #df_Without_Double # **check for 'double' or 'triple' entries in taxonomy** index_double_or_triple = [ind for ind, tax_list in enumerate(df_Without_Double.taxonomy) if 'double' not in tax_list and 'triple' not in tax_list] df_Without_Double_or_Triple = df_Without_Double.iloc[index_double_or_triple[:]] #df_Without_Double_or_Triple # **------for taxonomy prediction------** df_Without_Double_or_Triple.to_pickle("output_data/df_all_aglycons_with_single_taxonomy.pkl") # **------end for taxonomy prediction------**
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def clean(df):", "def gb_cleaner(df):\n df['tag'] = df.tags.apply(retagger)\n \n c_list = df.text.tolist()\n\n clean_corpus = []\n for docs in c_list:\n clean_corpus.append(data_cleaner(docs))\n \n df['clean'] = clean_corpus\n\n df = df.drop(['text', 'tags', 'stars'], axis= 1)\n \n return df", "def disagg(vec:gpd.GeoDataFrame):\n\t\t# Split GeometryCollections\n\t\tno_coll = []\n\t\tfor i, row in vec.iterrows():\n\t\t\tgeom = row.geometry\n\t\t\tif geom.type == 'GeometryCollection':\n\t\t\t\tfor part in geom:\n\t\t\t\t\trow2 = row.copy()\n\t\t\t\t\trow2.geometry = part\n\t\t\t\t\tno_coll.append(row2)\n\n\t\t\telse:\n\t\t\t\t\tno_coll.append(row) \n\n\t\t# Split Multi geomries\n\t\tres = []\n\t\tfor row in no_coll:\n\t\t\tgeom = row.geometry\n\t\t\tif geom.type.startswith('Multi'):\n\t\t\t\tfor part in geom:\n\t\t\t\t\trow2 = row.copy()\n\t\t\t\t\trow2.geometry = part\n\t\t\t\t\tres.append(row2)\n\t\t\telse:\n\t\t\t\t\tres.append(row)\n\n\t\treturn gpd.GeoDataFrame(res, crs=vec.crs).reset_index(drop=True)", "def drop_multindex(df):\n\n if isinstance(df.index, pd.MultiIndex):\n df_flat = df.reset_index()\n # keep index if False\n else:\n df_flat = df.copy()\n return df_flat", "def get_compound_df(df):\n columns = {'Name':'analyte', 'Amount': 'measurement'}\n df['Compound'].rename(columns=columns, inplace=True)\n df_compound = df['Compound'].copy()\n df_compound.loc[(df_compound.analyte.isnull()) & (df_compound.measurement > 0), 'analyte'] = 'wildcard'\n df_compound.dropna(subset = ['analyte'], inplace=True)\n return df_compound", "def telecom_towers_small1(osm_path): \n df = retrieve(osm_path,'points',['man_made','other_tags'],**{\"man_made\":[\"='tower'\"]}).rename(columns={'man_made': 'asset'}) \n\n for row in reversed(range(len(df.index))):\n if df[\"other_tags\"][row] != None:\n if not 'tower:type\"=>\"communication' in df[\"other_tags\"][row]:\n df = df.drop(df.index[row])\n \n return df.reset_index(drop=True)", "def drop_one_elem_columns(self, df):\n df_ = df.copy()\n\n # Incldue columns in dataframe\n include_idx = []\n for i in df_.columns:\n len_unique = df_[i].dropna().unique().size\n if len_unique > 1:\n include_idx.append(i)\n\n df_ = df_[include_idx]\n return df_", "def ex_pedigree_six_animals():\n\n data_dict = {'Calf': [3, 4, 5, 6],\n 'Sire': [1, 1, 4, 5],\n 'Dam': [2, 'Unknown', 3, 2]}\n\n df = pd.DataFrame(data_dict)\n\n return(df)", "def getquotedtweetdf(tweetdf):\r\n quoteddf = tweetdf[tweetdf['quoted'] == 1]\r\n quoteddf.reset_index(inplace=True)\r\n \r\n return quoteddf", "def collapse_taxa(df: pandas.DataFrame, genus: bool = False, suffix: str = None) -> pandas.DataFrame:\n\n if suffix is not None:\n group_names = []\n for name in df.index.tolist():\n if suffix in name:\n group_names.append(name.split(suffix)[0] + 'virus')\n elif genus:\n group_names = []\n for name in df.index.tolist():\n try:\n g = name.split(' ')[0]\n except IndexError:\n continue\n group_names.append(g)\n else:\n raise ValueError('Genus or suffix parameters must be set')\n\n grouped = []\n df.index = group_names\n for group, gdata in df.groupby(df.index):\n # print(f'Collapsing species in genus: {group}')\n d = gdata.apply(sum, axis=0)\n d.name = f\"{group} spp.\" if genus else f\"{group}\"\n grouped.append(d)\n\n grouped = pandas.DataFrame(grouped)\n\n return grouped", "def taxa_data_frame(self):\n cols = list(self._taxa.keys())\n cols.remove(\"uid\")\n cols.remove(\"object\")\n df = DataFrame(self._taxa, columns=cols, index=self._taxa[\"uid\"])\n df.index.name = \"uid\"\n\n return df", "def trimDf(df):\n cols = set(df.columns)\n\n cols.remove('exclamationCount') # bug in our feature extraction code\n cols.remove('price') # considered only free apps\n cols.remove('appName') # removing appNames\n\n # return df[list(cols)]\n\n\n\n return df[list(('revSent', 'appLabel'))]", "def clean_up(houses:pd.DataFrame) -> pd.DataFrame:\n houses= delete_columns(houses)\n houses= analyze_missing_values(houses)\n houses= add_seller_house(houses)\n houses= add_underscore(houses)\n houses= create_dummies(houses)\n houses= impute(houses)\n return houses", "def mark_garbage(row):\n\n def is_relation_deprecated():\n return row._relation.isdigit() or row._relation in DEPREC_RELS\n\n def is_postag_undefined():\n return np.all(row['subject']['postag'] == np.zeros((MAX_PHRASE_LEN, 18))) or np.all(\n row['object']['postag'] == np.zeros((MAX_PHRASE_LEN, 18))) or np.all(\n row['relation']['postag'] == np.zeros((MAX_PHRASE_LEN, 18)))\n\n return is_relation_deprecated() # or is_postag_undefined()", "def stats_orgs(df, new_data=False):\n rows = []\n\n if new_data:\n df = df[df.index.isin(in_taxa_dict.keys())]\n else:\n df = df[df.index.isin(db_taxa_dict.keys())]\n\n df2 = df.copy()\n df2[df2 >= 1] = 1\n\n df = df.sum(axis=1).to_frame()\n\n if new_data:\n df[f\"Genes out of {len(matrix.columns)}\"] = df2.sum(axis=1).to_frame()\n df = df.rename(columns={0: f\"Sequences Collected\"})\n\n else:\n df = df.rename(columns={0: f\"Genes out of {len(matrix.columns)}\"})\n\n # Fill in taxonomic information\n if new_data:\n list_of_dicts = [{key: value[i] for key, value in in_taxa_dict.items()} for i in range(3)]\n else:\n list_of_dicts = [{key: value[i] for key, value in db_taxa_dict.items()} for i in range(3)]\n df['Long Name'] = df.index.map(list_of_dicts[2])\n df['Higher Taxonomy'] = df.index.map(list_of_dicts[0])\n df['Lower Taxonomy'] = df.index.map(list_of_dicts[1])\n\n # Rearrange Columns to Put Genes after taxa stats\n cols = df.columns.tolist()\n cols = cols[2:] + cols[:2]\n df = df[cols]\n\n if new_data:\n routes_dict = get_routes()\n list_of_routes_dicts = [{key: value[i] for key, value in routes_dict.items()} for i in range(3)]\n df[\"#SBH\"] = df.index.map(list_of_routes_dicts[0])\n df[\"#BBH\"] = df.index.map(list_of_routes_dicts[1])\n df[\"#HMM\"] = df.index.map(list_of_routes_dicts[2])\n out_filename = 'new_taxa_stats.tsv'\n else:\n out_filename = 'db_taxa_stats.tsv'\n\n # Fill in columns for including in SGT construction. By default all are yes\n has_paralogs = check_paralogs()\n if new_data:\n sgt_dict = {org: 'yes' for org in in_taxa_dict.keys()}\n else:\n sgt_dict = {org: 'yes' for org in db_taxa_dict.keys()}\n df['SGT'] = df.index.map(sgt_dict)\n\n # Fill in column for paralogs. If no paralogs entry is 'none'.\n # If there are paralogs entry is 'yes'. If there are paralogs, but --ortholog_only is given entry is 'no'.\n if new_data:\n pass\n else:\n paralogs_dict = {org: ('yes' if org in has_paralogs and not args.orthologs_only\n else 'no' if org in has_paralogs and args.orthologs_only else 'none')\n for org in db_taxa_dict}\n df['Paralogs'] = df.index.map(paralogs_dict)\n\n df = df.rename_axis('Unique ID')\n df.to_csv(f'{output_fold}/{out_filename}', sep='\\t')", "def clean_up_raw(df_raw):\n # exclude_subset = ['well', 'tile', 'cell', 'intensity', 'blob'] # causes issues with later joins, maybe a pandas bug\n import lasagna.utils\n df_raw[CYCLE] = df_raw[CYCLE].astype(int)\n df_raw = df_raw.sort_values([WELL, TILE, CELL, BLOB, CYCLE, CHANNEL])\n return df_raw", "def get_subtable(df, col, val) -> pd.DataFrame:\r\n return df[df[col] == val].drop(columns=col)", "def related_df_shaper(df): \n id_related=list()\n id_primary=list()\n id_relation_type=list()\n for id_term in df.id_term:\n \n related_id_list=df.loc[df.id_term==id_term,'related_terms'].values[0]\n id_relation_type_list=df.loc[df.id_term==id_term,'id_relation_type'].values[0]\n for i in range(len(related_id_list)):\n id_related.append(related_id_list[i])\n id_relation_type.append(id_relation_type_list[i])\n id_primary.append(id_term)\n \n df_rs=pd.DataFrame({'id_term':id_primary,'id_term_related':id_related,'id_relation_type':id_relation_type})\n now=pd.to_datetime(datetime.datetime.now())\n df_rs=df_rs.assign(datetime_created=now)\n df_rs=df_rs.assign(datetime_updated=now)\n df_rs=df_rs.assign(id_user_created=7)\n df_rs=df_rs.assign(id_user_updated=7)\n \n return df_rs", "def prune_terms(docs, min_df=3):\n ###TODO\n final_list = []\n items_dict = defaultdict(lambda:0.0)\n for i in docs:\n for j in i:\n items_dict[j] = items_dict[j] + 1\n \n for i in docs:\n for j in list(i):\n if items_dict[j] < min_df:\n del i[j]\n if len(i) != 0:\n final_list.append(Counter(i))\n return final_list", "def removeBiotype(df):\n\tdf = df[ df.Biotype != 'IG_C_gene']\n\tdf = df[ df.Biotype != 'IG_D_gene']\n\tdf = df[ df.Biotype != 'IG_J_gene']\n\tdf = df[ df.Biotype != 'IG_V_gene']\n\tdf = df[ df.Biotype != 'pseudogene']\n\tdf = df[ df.Biotype != 'rRNA']\n\tdf = df[ df.Biotype != 'sRNA']\n\tdf = df[ df.Biotype != 'TR_C_gene']\n\tdf = df[ df.Biotype != 'TR_D_gene']\n\tdf = df[ df.Biotype != 'TR_J_gene']\n\tdf = df[ df.Biotype != 'TR_V_gene']\n\tdf = df[ df.Biotype != 'macro_lncRNA']\n\tdf = df[ df.Biotype != 'bidirectional_promoter_lncRNA']\n\tdf = df[ df.Biotype != '3prime_overlapping_ncRNA']\n\tdf = df[ df.Biotype != 'non_coding']\n\tdf = df[ df.Biotype != 'pseudogene']\n\tdf = df[ df.Biotype != 'TR_J_pseudogene']\n\tdf = df[ df.Biotype != 'IG_C_pseudogene']\n\tdf = df[ df.Biotype != 'IG_J_pseudogene']\n\tdf = df[ df.Biotype != 'IG_pseudogene']\n\tdf = df[ df.Biotype != 'TR_V_pseudogene']\n\tdf = df[ df.Biotype != 'polymorphic_pseudogene']\n\tdf = df[ df.Biotype != 'IG_V_pseudogene']\n\tdf = df[ df.Biotype != 'TEC']\n\tdf = df[ df.Biotype != 'Predictif']\n\tdf = df[ df.Biotype != 'ribozyme']\n\tdf = df[ df.Biotype != 'scRNA']\n\tdf = df[ df.Biotype != 'scaRNA']\n\tdf = df[ df.Biotype != 'snRNA']\n\tdf = df[ df.Biotype != 'snoRNA']\n\tdf = df[ df.Biotype != 'vaultRNA']\n\tdf = df[ df.Biotype != 'translated_processed_pseudogene']\n\treturn df", "def delete_child_rows(rows, doctype):\n\tfor p in list(set([r[1] for r in rows])):\n\t\twebnotes.conn.sql(\"\"\"delete from `tab%s` where parent=%s\"\"\" % (doctype, '%s'), p)", "def delete_child_rows(rows, doctype):\n\tfor p in list(set([r[1] for r in rows])):\n\t\twebnotes.conn.sql(\"\"\"delete from `tab%s` where parent=%s\"\"\" % (doctype, '%s'), p)", "def delete_child_rows(rows, doctype):\n\tfor p in list(set([r[1] for r in rows])):\n\t\twebnotes.conn.sql(\"\"\"delete from `tab%s` where parent=%s\"\"\" % (doctype, '%s'), p)", "def delete_cand():\n if row:\n return row[0] + [[key + [left_i]]]", "def get_gastrointestinal_surgery_patients(con) -> pd.DataFrame:\n combined_diagnoses = get_reason_for_admission(con)\n gisurg = combined_diagnoses[\n (combined_diagnoses['surgical'] == 1)\n & (combined_diagnoses['diagnosis'].str.contains(re_gisurg, na=False, flags=re.IGNORECASE))\n ]\n\n return gisurg", "def remove_recipes_with_allergies(df, user):\n\n if len(user.allergies) == 0:\n # User has no allergies - do not need to remove recipes\n return df\n\n allergies = [a + '_allergic' for a in user.allergies]\n\n return df.loc[df[allergies].any(1) == False]", "def ex_pedigree_eight_animals():\n data_dict = {'Calf': [4, 5, 6, 7, 8],\n 'Sire': [1, 3, 1, 4, 3],\n 'Dam': ['Unknown', 2, 2, 5, 6]}\n\n A = pd.DataFrame(data_dict)\n return(A)", "def cleaned(df):\n\n cdf = df.copy()\n lcdf = len(cdf)\n\n danglings = []\n ld = len(danglings)\n\n print('cleaning data frame')\n iteration_times = 1\n while True:\n for index, row in cdf.iterrows():\n if index in danglings:\n cdf = cdf.drop(index)\n elif not (cdf['from'] == row['to']).any():\n danglings.append(index)\n cdf = cdf.drop(index)\n\n if not index % 77:\n print(f'{del_line}{index / lcdf * 100:2.1f}% #{iteration_times}', end='')\n iteration_times += 1\n\n # iterate until `danglings` does not change\n if len(danglings) == ld:\n break\n else:\n ld = len(danglings)\n\n print(f'{del_line}data cleaned with {iteration_times} iterations')\n\n return cdf, np.array(danglings)", "def social_healthcare(osm_path): \n df_all = retrieve(osm_path,'multipolygons',['other_tags', 'amenity']).rename(columns={'other_tags': 'asset'}) \n \n #delete rows that are duplicates of social_amenity\n asset_list = ['hospital', 'doctors', 'clinic', 'dentist', 'pharmacy'] #note that this list of assets should be similar to assets extracted in def social_amenity\n for asset in asset_list:\n index_delete = df_all[(df_all['amenity'] == asset)].index\n df_all.drop(index_delete,inplace=True)\n df_all = df_all.drop(['amenity'], axis=1).reset_index(drop=True) #drop amenity column, reset index\n \n #get requested assets \n df = healthcare_filter(df_all)\n \n return df.reset_index(drop=True)", "def _prune_categories(\n df: pd.DataFrame,\n prune_dict: dict,\n fill: object = \"Other\",\n) -> pd.DataFrame:\n df = df.copy()\n for col in df.columns:\n if col in CATEGORICAL_TRANS:\n if col not in prune_dict.keys():\n continue\n n = prune_dict[col]\n if n == -1:\n continue\n # Get most frequent:\n major_categories = list(\n df[col].value_counts()[:n].keys()\n )\n mask = df[col].isin(major_categories)\n df[col][~ mask] = fill\n return df" ]
[ "0.5444545", "0.5277261", "0.52311677", "0.5200804", "0.51652235", "0.51347816", "0.51267016", "0.51248044", "0.50985575", "0.5084206", "0.50411373", "0.5032984", "0.5006409", "0.49748224", "0.49653932", "0.48951414", "0.48935908", "0.48864013", "0.4885388", "0.48809195", "0.48646176", "0.48646176", "0.48646176", "0.4864309", "0.48604748", "0.48503187", "0.4847633", "0.48439285", "0.48393938", "0.4780029" ]
0.5743044
0
Draw a histogram given the graph.
def draw_histogram(graph: Graph) -> Optional[Graph]: if not graph: return None try: # generate and open a new figure figure, ax = plt.subplots() # When graph.x or y is str, the histogram is ill-defined. ax.barh(graph.y, graph.x, color=graph.color) ax.set_title(graph.title) if graph.xlabel: ax.set_xlabel(graph.xlabel) if graph.ylabel: ax.set_ylabel(graph.ylabel) for index, value in enumerate(graph.x): show_value = f'{value:.2f}' if isinstance(value, float) else value # To avoid the number has overlap with the box of the graph. if value > 0.9 * max(graph.x): ax.text( value - (value / 10), index, show_value, va='center', color='w' ) else: ax.text(value, index, show_value, va='center') graph.figure = figure graph.base64str = figure_to_base64str(figure) except TypeError as e: logging.info('skipping %s for histogram; plot error: %s:', graph.name, e) return None finally: # closes the figure (to limit memory consumption) plt.close() return graph
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw_histogram(xx, hist_ax, alpha=1.0, colorV=None, facecolor='#80D080', edgecolor=None, nbins=75,\n fontsize=8, linewidth=1, xlabel=None, ylabel=None, label=None):\n plt.sca(hist_ax)\n if colorV is None:\n n, bins, patches = hist_ax.hist(xx, nbins, histtype='stepfilled', alpha=alpha, linewidth=linewidth, label=label)\n plt.setp(patches, 'facecolor', facecolor)\n if edgecolor is not None:\n plt.setp(patches, 'edgecolor', edgecolor)\n else:\n n, bins, patches = hist_ax.hist(xx, nbins, alpha=alpha, linewidth=linewidth, label=label)\n\n if xlabel is not None:\n hist_ax.set_xlabel(xlabel, fontsize=fontsize)\n if ylabel is not None:\n hist_ax.set_ylabel(ylabel, fontsize=fontsize)\n return hist_ax", "def plot_histogram(self) -> None:\n\n if self.data:\n plt.hist(self.data)\n plt.title(\"Histogram of data\")\n plt.xlabel(\"data\")\n plt.ylabel(\"count\")\n else:\n raise ValueError(\"Histogram cannot be generated as no\\\n data has been provided\")", "def draw_histogram(data, # type: thelper.typedefs.ArrayType\n bins=50, # type: Optional[int]\n xlabel=\"\", # type: Optional[thelper.typedefs.LabelType]\n ylabel=\"Proportion\", # type: Optional[thelper.typedefs.LabelType]\n show=False, # type: Optional[bool]\n block=False, # type: Optional[bool]\n ): # type: (...) -> thelper.typedefs.DrawingType\n fig, ax = plt.subplots()\n ax.hist(data, density=True, bins=bins)\n if len(ylabel) > 0:\n ax.set_ylabel(ylabel)\n if len(xlabel) > 0:\n ax.set_xlabel(xlabel)\n ax.set_xlim(xmin=0)\n if show:\n fig.show()\n if block:\n plt.show(block=block)\n return fig\n plt.pause(0.5)\n return fig, ax", "def plot_histograms(top, bot, edges, resolution, *, ax=None):\n if ax is None:\n ax = plt.gca()\n\n ax.hlines(y=0,\n xmin=0,\n xmax=1,\n linestyle='dashed',\n color='black',\n alpha=0.2)\n ax.bar(edges, top, width=resolution)\n ax.bar(edges, -bot, width=resolution)\n # Set some sensible defaults - these can be overridden after the fact,\n # since we return the axes object\n ax.set_xlim((-0.05, 1.05))\n ax.set_xlabel('Predicted Probability')\n height = max(abs(x) for x in ax.get_ylim())\n ax.set_ylim((-height, height))\n ax.set_ylabel('Count')\n return ax", "def makeHistogram(values, numBins, xLabel, yLabel, title=None):", "def plot_hist(axis, data, title=None):\n axis.hist(data.ravel(), bins=256)\n axis.ticklabel_format(axis='y', style='scientific', scilimits=(0, 0))\n\n if title:\n axis.set_title(title)\n\n return None", "def hist(self):\r\n plt.hist(self.data_array, bins='auto', density=False, facecolor='b')\r\n plt.title(self.column_name)\r\n plt.savefig(self.column_name + \".svg\")\r\n plt.close()", "def histogram(self):\r\n channel = self.ui.channel_selection.itemData(self.ui.channel_selection.currentIndex())\r\n\r\n #create a window, the reference must be stored, because the window\r\n #gets destroyed when its reference is garbage collected\r\n #make plotWindow a list and append to that if multiple windows should be possible\r\n title = \"histogram of {:s} channel\".format(self.ui.channel_selection.currentText())\r\n self.plotWindow = pyguitools.SimplePlotWindow(name = title)\r\n self.plotWindow.ax1.hist(self.npImg[self.ui.y0.value():self.ui.y1.value(),\r\n self.ui.x0.value():self.ui.x1.value(), \r\n channel].flatten(),\r\n bins=self.settings[\"histogramm bins\"],\r\n range=(self.settings[\"histogramm min\"],self.settings[\"histogramm max\"]))\r\n self.plotWindow.ax1.set_xlim(self.settings[\"histogramm min\"],self.settings[\"histogramm max\"]) \r\n self.plotWindow.show()", "def plot_hist(self):\n labels = [self.get_class_str(action, obj)\n for (action, obj, subj, rec, beg, end) in self.action_clips]\n visualize.plot_hist(labels, proportion=True)", "def showHistogram(x,y,txt,**options):\n plot2d_system = pyformex.cfg['gui/plot2d']\n\n if plot2d_system == 'gnuplot':\n if not utils.hasModule('gnuplot'):\n error(\"You do not have the Python Gnuplot module installed.\\nI can not draw the requested plot.\")\n return\n \n import Gnuplot\n maxlen = min(len(x),len(y))\n data = Gnuplot.Data(x[:maxlen],y[:maxlen],title=txt, with_='histeps') \n g = Gnuplot.Gnuplot(persist=1)\n g.title('pyFormex histogram: %s' % txt)\n g.plot(data)\n \n elif plot2d_system == 'qwt':\n pass\n #from PyQt4.Qwt5.qplt import *", "def plot_histogram(self, years_statistics):\n\n plt.hist(years_statistics, normed=True)\n plt.ylabel('Histogram');\n plt.hist(years_statistics)\n plt.title(\"Statistics for years\")\n plt.xlabel(\"Value\")\n plt.ylabel(\"Frequency\")\n plt.show()", "def draw_histogram(x: pd.Series, x_label: str):\n\n x.plot.hist(grid=True, bins=20, rwidth=0.9, color='#607c8e')\n plt.title('Histogram for %s' % x_label)\n plt.show()", "def draw_hist(self, canvas, data_type, x_label, y_label):\n\n canvas.axes.cla()\n data_in = self.import_postdata(data_type)\n data_temp = [np.nan if i == np.inf else i for i in data_in]\n data_temp = [i for i in data_temp if (math.isnan(i) == False)]\n if len(data_temp) > 1:\n bw = 2 * np.subtract.reduce(np.percentile(data_temp, [75, 25])) / len(data_temp) ** (1 / 3)\n if bw == 0:\n bw = 1\n canvas.axes.hist(data_temp, bins=np.arange(min(data_temp), max(data_temp) + bw, bw))\n else:\n canvas.axes.hist(data_temp, bins=1)\n canvas.axes.set_ylabel(y_label, fontsize='10')\n canvas.axes.set_xlabel(x_label, fontsize='10')\n canvas.draw()", "def histogram(values, title, fig_size=(4,3), path=None):\n plt.clf()\n f, ax = plt.subplots(1, figsize=fig_size)\n ax.hist(values, bins=60)\n ax.set_title(title)\n f.tight_layout()\n if(path != None):\n f.savefig(path+'/hist_'+title+'.png')", "def show_histogram(im):\n\n if im.ndim == 2:\n # Input image is single channel\n plt.hist(im.flatten(), 256, range=(0, 250), fc='k')\n plt.show()\n\n elif im.ndim == 3:\n # Input image is three channels\n fig = plt.figure()\n fig.add_subplot(311)\n plt.hist(im[..., 0].flatten(), 256, range=(0, 250), fc='b')\n fig.add_subplot(312)\n plt.hist(im[..., 1].flatten(), 256, range=(0, 250), fc='g')\n fig.add_subplot(313)\n plt.hist(im[..., 2].flatten(), 256, range=(0, 250), fc='r')\n plt.show()", "def makeHistogram(values, numBins, xLabel, yLabel, title=None):\n pylab.hist(values, bins = numBins)\n pylab.xlabel(xLabel)\n pylab.ylabel(yLabel)\n if not title == None:\n pylab.title(title)\n pylab.show()", "def makeHistogram(values, numBins, xLabel, yLabel, title=None):\r\n pylab.hist(values, bins = numBins)\r\n pylab.xlabel(xLabel)\r\n pylab.ylabel(yLabel)\r\n if title != None:\r\n pylab.title(title)\r\n pylab.show()", "def hist(data):\n\n fig = plt.figure()\n ax1 = fig.add_subplot(111)\n plt.hold(True)\n for x in xrange(len(data[:,0,0])):\n counts, edges = np.histogram(data[x,:,:],bins=100)\n centers = [(edges[i]+edges[i+1])/2.0 for i,v in enumerate(edges[:-1])]\n ax1.plot(centers,counts)\n plt.hold(False)\n\n plt.show(block=False)\n\n # return fig", "def hist(self, bins):\n x = self.x\n plt.hist(x, bins)\n plt.xlabel('Observed Data')\n plt.ylabel('Frequency')\n plt.show()", "def plot_histogram(hist, outname, xlabel=\"\", ylabel=\"frequency\"):\n plt.bar(hist[:,0], hist[:,1])\n plt.xlabel(xlabel)\n plt.ylabel(ylabel)\n plt.savefig(outname)\n plt.close()", "def plot_hitstogram_graph(data_values, title,\r\n number_of_keys,\r\n max_val,\r\n file_in):\r\n\r\n # bins = max(data_values)\r\n # pylab.hist(data_values, facecolor='blue')\r\n pylab.hist(data_values, facecolor='green', alpha=0.6)\r\n pylab.grid(True)\r\n pylab.title(title + \"_histogram\")\r\n pylab.xlabel('number in cluster')\r\n pylab.ylabel('Count')\r\n pylab.savefig(file_in + \"_\" + title + '_histogram.png')\r\n plt.close()\r\n pylab.close()\r\n os.chdir('..')", "def draw_histogram(self):\n\n hframe = self.dpar.latest_frame[::4,::4]\n\n gcy_shrink = 0.8\n gcy_offset = (1. - gcy_shrink)/2.\n \n gcurve_x = [0, self.dpar.iwindow[0][0]/100., self.dpar.iwindow[0][1]/100., 1.]\n gcurve_y = [gcy_offset, gcy_offset, 1.-gcy_offset, 1.-gcy_offset]\n\n hist, bin_edges = np.histogram(hframe.flatten(), bins=HIST_NBINS, range=(0., self.camera.pixel_maxval))\n hcurve_y = np.array(hist).astype(float) / float(max(hist))\n #hcurve_x = np.array(bin_edges).astype(float)[0:-1] / 256.\n hcurve_x = np.arange(HIST_NBINS) / HIST_NBINS\n\n\n self.hist_canvas.gain_trace.set_data(gcurve_x, gcurve_y)\n for hb,hy in zip(self.hist_canvas.hist_bars, hcurve_y):\n hb.set_height(hy)\n\n self.hist_canvas.draw()", "def makeHistogram(values, numBins, xLabel, yLabel, title=None):\r\n # TODO\r\n pylab.hist(values, bins = numBins)\r\n pylab.xlabel(xLabel)\r\n pylab.ylabel(yLabel)\r\n if title != None:\r\n pylab.title(title)\r\n pylab.show()", "def plot_histogram(hs, bins, ax=None, labels=None, title=None, **bar_params):\r\n # identify how many histogram series:\r\n if len(hs) == len(bins) - 1:\r\n nhs = 1\r\n hs = [hs]\r\n else:\r\n nhs = len(hs)\r\n if labels == None:\r\n labels = ['' for i in range(nhs)]\r\n width = (bins[1]-bins[0])/nhs\r\n x = np.array(bins[0:-1])\r\n if ax==None:\r\n f, ax = plt.subplots()\r\n for i in range(nhs):\r\n ax.bar(x + width * (i+0.5), hs[i], width=width, label=labels[i], **bar_params)\r\n if labels[0] != '':\r\n plt.legend()\r\n if title!=None:\r\n plt.title(title)\r\n return ax", "def plot_histogram(self,ax=None,**kwargs):\n if not ax:\n fig = plt.figure()\n ax = fig.add_subplot(111)\n probs,bins,patches = ax.hist(self.scores_list,normed=True,label=\"Sample\",**kwargs)\n ax.vlines(self.xhat,*ax.get_ylim(),label='Mean',color='r')\n ax.legend()\n return ax,probs,bins", "def add_histogram(self, tag, values, global_step=None, bins='tensorflow'):\n values = make_np(values)\n self.vis.histogram(make_np(values), opts={'title': tag})", "def plot_histogram(img):\n rgb_hist = rgb_histogram(img)\n plt.figure()\n for color, hist in rgb_hist.items():\n plt.plot(hist, color=color)\n plt.xlim([0, 256])", "def histogram(data, title, path):\n plt.hist(data,\n bins=60)\n plt.xticks(size=22)\n plt.yticks(size=22)\n plt.title(title,\n fontsize=30)\n plt.savefig(path)\n plt.clf()", "def plot_hist(xdata, ylabels):\n fig = plt.figure(figsize=(6,3))\n #plt.hist(xdata, 2, normed=True)\n #plt.show()\n\n ##weights = np.ones_like(xdata)/float(len(xdata))\n ##plt.hist(xdata, bins=100, weights=weights)\n ##plt.show()\n counter = 0\n for dataset in xdata:\n density, bins = np.histogram(dataset, bins=100, density=True)\n unity_density = density/density.sum()\n bincenters = 0.5*(bins[1:]+bins[:-1])\n plt.plot(bincenters, unity_density, label=ylabels[counter])\n counter += 1\n plt.legend(loc='lower right', frameon=False, numpoints=1)\n plt.show()", "def drawHist(data, xLabel, unit, binSize, title):\n mean = np.mean(data)\n median = np.median(data)\n mode = stats.mode(data)[0].astype(float)\n \n q1, q3 = np.percentile(data, [25, 75])\n iqr = q3 - q1\n sigma = np.std(data)\n \n \n bins = np.arange(min(data), max(data) + 1, binSize)\n plt.style.use('dark_background')\n fig, ax = plt.subplots(figsize=(12,7))\n plt.hist(data, bins=bins, histtype='bar') \n plt.title(title)\n plt.xlabel(xLabel + \" \" + unit)\n plt.ylabel('count')\n ymax = ax.get_ylim()[1]\n ax.vlines(mean, 0, ymax, color='red', label='mean')\n ax.vlines(mean-sigma, 0, ymax, color='red', linestyle='--', \n label='mean +/- std')\n ax.vlines(mean+sigma, 0, ymax, color='red', linestyle='--')\n plt.legend()\n plt.show()\n \n print(\"Einheit: \", unit)\n print(\"Minimum: \", round(data.min(),3))\n print(\"Maximum: \", round(data.max(),3))\n print(\"Mittelwert: \", round(mean,3))\n print(\"Median: \", round(median,3))\n print(\"Modus: \", round(mode[0],3))\n print(\"Standardabweichung: \", round(sigma, 3))\n print(\"1. Quartil: \", round(q1,3))\n print(\"3. Quartil: \", round(q3,3))\n print(\"Quartilsdifferenz: \", round(iqr,3))" ]
[ "0.6689416", "0.65681064", "0.6548414", "0.6526117", "0.64784855", "0.6347681", "0.62804496", "0.6251401", "0.62138206", "0.62123525", "0.6209556", "0.62087065", "0.6201147", "0.6197796", "0.61838293", "0.61743563", "0.6154292", "0.61535", "0.61413497", "0.61310756", "0.6111287", "0.6103744", "0.60668236", "0.60619634", "0.60485554", "0.6021529", "0.6013355", "0.6010173", "0.5996812", "0.5996296" ]
0.74443585
0
Converts a Matplotlib figure to a base64 string encoding.
def figure_to_base64str(fig: matplotlib.figure.Figure) -> str: buf = io.BytesIO() fig.savefig(buf, bbox_inches='tight', format='png') return base64.b64encode(buf.getbuffer().tobytes()).decode('ascii')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def plot2uri(figure):\n image = io.BytesIO()\n figure.savefig(image, format=\"png\")\n image.seek((0))\n string = base64.b64encode(image.read())\n uri = urllib.parse.quote(string)\n\n return uri", "def base64(self):\n image = self.png.getvalue()\n return base64.encodestring(image).decode('utf-8')", "def fig_to_uri(in_fig, close_all=True, **save_args):\n out_img = BytesIO()\n in_fig.savefig(out_img, format='png', **save_args)\n if close_all:\n in_fig.clf()\n plt.close('all')\n out_img.seek(0) # rewind file\n encoded = base64.b64encode(out_img.read()).decode(\"ascii\").replace(\"\\n\", \"\")\n return \"data:image/png;base64,{}\".format(encoded)", "def fig_to_uri(in_fig, close_all=True, **save_args):\n out_img = BytesIO()\n in_fig.savefig(out_img, format='png', **save_args)\n if close_all:\n in_fig.clf()\n plt.close('all')\n out_img.seek(0) # rewind file\n encoded = base64.b64encode(out_img.read()).decode(\"ascii\").replace(\"\\n\", \"\")\n return \"data:image/png;base64,{}\".format(encoded)", "def save_png_to_str(plt, plotwidth=400):\n from StringIO import StringIO\n figfile = StringIO()\n plt.savefig(figfile, format='png')\n figfile.seek(0) # rewind to beginning of file\n figdata_png = figfile.buf # extract string\n import base64\n figdata_png = base64.b64encode(figdata_png)\n html_text = '<img src=\"data:image/png;base64,%(figdata_png)s\" width=\"%(plotwidth)s\">' % vars()\n return html_text", "def data_2_base64(data: np.ndarray) -> str:\n bytes_io = io.BytesIO()\n np.save(bytes_io, data, allow_pickle=False)\n return base64.b64encode(zlib.compress(bytes_io.getvalue())).decode('utf-8')", "def _encode_base64(data: str) -> str:\n ebytes = base64.b64encode(data.encode(\"utf-8\"))\n estring = str(ebytes, \"utf-8\")\n return estring", "def data64(self) -> str:\n return Image.encode64(self.data)", "def get_body(self):\n from matplotlib.backends.backend_agg import \\\n FigureCanvasAgg as FigureCanvas\n\n canvas = FigureCanvas(self._body)\n png_output = BytesIO()\n canvas.print_png(png_output)\n data = png_output.getvalue()\n\n data_uri = base64.b64encode(data).decode('utf-8')\n return '<img title=\"{}\" src=\"data:image/png;base64,{}\">'.format(\n self.key, data_uri)", "def adobe_base64_encode(cls, to_encode):\n if isinstance(to_encode, unicode):\n to_encode = to_encode.encode(\"utf8\")\n encoded = base64.encodestring(to_encode)\n return encoded.replace(b\"+\", b\":\").replace(b\"/\", b\";\").replace(b\"=\", b\"@\").strip()", "def base64_string(self) -> global___Expression:", "def get_Base64(self):\n\n return base64_with_linebreaks(self.get_DER())", "def repr_figure(self):\n\n default_kwargs = {'placement': self.placement,\n 'caption': self.caption,\n 'label': self.label,\n 'figure_env_name': self.figure_env_name}\n\n myfig = self.extension_mapping[self.extension]()\n\n return self.fig_str.format(myfig=myfig, **default_kwargs)", "def dumps(self) -> str:\n bits = dill.dumps(self)\n return base64.b64encode(bits).decode(\"ascii\")", "def _plt_to_png(self):\n import matplotlib.pyplot as plt\n from io import BytesIO\n\n with BytesIO() as file_obj:\n plt.savefig(file_obj, format='png')\n plt.close() # supress plot output\n file_obj.seek(0)\n png = file_obj.read()\n return png", "def base64encode(self, value):\n\n return value.encode(\"base64\")[:-1].replace(\"\\n\", \"\")", "def get_image_base64_str(self, message: ImageMessage) -> str:\n return ImageContentProcessor.binary_img_to_base64_str(self._core.get_message_content(str(message.id)).content)", "def _get_image(x):\n return b64encode(x).decode('ascii')", "def base64_encode(data):\n return base64.encodestring(data);", "def image_to_base64str(image):\n file_bytes = image.file.read()\n base64_img_str = 'data:image;base64, '\n base64_img_str += str(base64.b64encode(file_bytes), 'utf-8')\n return base64_img_str", "def as_str(self) -> str:\n return dumps(self.as_dict(), cls=NumpyEncoder)", "def encodeFrame(frame):\n return base64.b64encode(frame)", "def image_to_base64(pixbuf, activity):\n _file_name = os.path.join(get_path(activity, 'instance'), 'imagetmp.png')\n if pixbuf != None:\n pixbuf.save(_file_name, \"png\")\n _base64 = os.path.join(get_path(activity, 'instance'), 'base64tmp')\n _cmd = \"base64 <\" + _file_name + \" >\" + _base64\n subprocess.check_call(_cmd, shell=True)\n _file_handle = open(_base64, 'r')\n _data = _file_handle.read()\n _file_handle.close()\n return _data", "def png2x(fig):\n import matplotlib\n if not fig.axes and not fig.lines:\n return\n # double DPI\n dpi = 2 * matplotlib.rcParams['savefig.dpi']\n pngbytes = print_figure(fig, fmt='png', dpi=dpi)\n x,y = pngxy(pngbytes)\n x2x = x // 2\n y2x = y // 2\n png64 = encodestring(pngbytes).decode('ascii')\n return u\"<img src='data:image/png;base64,%s' width=%i height=%i/>\" % (png64, x2x, y2x)", "def my_base64encode(s):\n return base64.b64encode(s).decode(\"utf-8\")", "def to_base64(a):\n\treturn str(\n\t\tbinascii.b2a_base64(\n\t\t\ta.tostring()\n\t\t).strip(),\n\t\t\"UTF-8\"\n\t)", "def fn_base64(self, value):\n if isinstance(value, str):\n value = value.encode()\n return base64.b64encode(value).decode()", "def _hash_encoder(data: bytes) -> str:\n return base64.urlsafe_b64encode(data).rstrip(b\"=\").decode('ascii')", "def _encodeArray(self, array):\n\n # Actually, we want dtype,naxis,axNlen,base64(array)\n return base64.b64encode(array.tostring())", "def serialize(obj):\n result = base64.urlsafe_b64encode(obj)\n # this workaround is needed because in case of python 3 the\n # urlsafe_b64encode method returns string of 'bytes' class.\n result = result.decode()\n return result" ]
[ "0.7147997", "0.68165886", "0.6809981", "0.6809981", "0.6721276", "0.6467986", "0.6416611", "0.64131576", "0.6268338", "0.62255985", "0.6189878", "0.6181082", "0.5922518", "0.5921818", "0.5912652", "0.5896791", "0.5858558", "0.5847519", "0.5846032", "0.58170253", "0.5803382", "0.5793282", "0.5779276", "0.57789713", "0.57644224", "0.5763317", "0.5760317", "0.57381743", "0.57276154", "0.57083625" ]
0.88419247
0
Get person's ID from mbank.tcredits (turnes DB)
def get_person_id(contract_num, phone): exfin_connection = MySQLdb.connect( host="10.10.100.27", # host of MySQL database user="root", # user's username passwd="Orraveza(99)", # your password db="mbank", # name of the database charset="utf8" ) # create CURSOR and set UTF8 params exfin_cursor = exfin_connection.cursor() exfin_cursor.execute('SET NAMES utf8;') exfin_cursor.execute('SET CHARACTER SET utf8;') exfin_cursor.execute('SET character_set_connection=utf8;') print("get_person_id", contract_num, phone) exfin_cursor.execute( """ SELECT tc.id, tc.client_id, ts.status as last_status, ts.dt_created, CONCAT(td.name, tp.tel_mob_num) FROM mbank.tcredits tc join mbank.tstatuses ts on ts.credit_id = tc.id and ts.is_last = 1 join mbank.tpersons tp on tp.id = tc.client_id join mbank.tdropdown_details td on td.id = tp.tel_mob_kod WHERE tc.contract_num = {0} ORDER BY ts.dt_created DESC LIMIT 1; """.format(contract_num) ) person_id = exfin_cursor.fetchall() print(person_id) try: """ if credit status == 5 return client's ID status 5 is 'active credit' and if phone contain tel_mob_num """ if person_id[0][2] in [5, '5', 55, '55'] and person_id[0][4] in phone: return person_id[0][1] else: return None except IndexError: return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_person_id_and_tel(contract_num):\n exfin_connection = MySQLdb.connect(\n host=\"10.10.100.27\", # host of MySQL database\n user=\"root\", # user's username\n passwd=\"Orraveza(99)\", # your password\n db=\"mbank\", # name of the database\n charset=\"utf8\"\n )\n\n # create CURSOR and set UTF8 params\n exfin_cursor = exfin_connection.cursor()\n exfin_cursor.execute('SET NAMES utf8;')\n exfin_cursor.execute('SET CHARACTER SET utf8;')\n exfin_cursor.execute('SET character_set_connection=utf8;')\n if contract_num.isdigit():\n exfin_cursor.execute(\n \"\"\"\n SELECT\n tc.id,\n tc.client_id,\n ts.status as last_status,\n ts.dt_created,\n tp.tel_mob_num,\n tp.tel_mob_kod\n FROM\n mbank.tcredits tc\n join mbank.tstatuses ts on ts.credit_id = tc.id\n join mbank.tpersons tp on tp.id = tc.client_id\n WHERE tc.contract_num = {0}\n ORDER BY ts.dt_created DESC\n LIMIT 1;\n \"\"\".format(contract_num)\n )\n person_data = exfin_cursor.fetchall()\n else:\n return None\n\n if person_data:\n exfin_cursor.execute(\n \"\"\"\n SELECT\n name\n FROM\n mbank.tdropdown_details\n WHERE id = {0};\n \"\"\".format(person_data[0][5])\n )\n person_mobile_operator_code = exfin_cursor.fetchall()[0]\n\n try:\n \"\"\"\n if client_id and tel_mob_num exists\n \"\"\"\n if person_data[0][1] and person_data[0][4]:\n print(\n \"get_person_id_and_tel\",\n \"+38{0}{1}\".format(\n person_mobile_operator_code[0],\n person_data[0][4]\n )\n )\n return (\n person_data[0][1],\n \"+38{0}{1}\".format(\n person_mobile_operator_code[0],\n person_data[0][4]\n )\n )\n else:\n return \"\"\n except IndexError:\n return \"\"\n else:\n return \"\"", "def getID():", "def get_person_id(person_data):\n person_ref = person_data['Casualty_Reference']\n veh_ref = person_data['Vehicle_Reference']\n acc_id = get_acc_id_from_data(person_data)\n person_id = common.get_gb_person_id(acc_id, int(veh_ref), int(person_ref))\n return person_id", "def get_bank_id_by_name(bank_name: str) -> int:\n # Open a new connection\n db, cursor = db_connector.cursor()\n\n query = \"select id from bank where name = '{}';\".format(bank_name)\n cursor.execute(query)\n data = cursor.fetchall()\n db.disconnect()\n return data[0][0]", "def getID(self) -> int:\n ...", "def get_id(self) -> int:\n query = '''SELECT id \n FROM ESLReceipts \n WHERE Transaction_Number=? AND Date=? AND Description=? \n AND Memo=? AND Amount_Debit=? \n AND Amount_Credit=? AND Balance=? \n AND Check_Number=? AND Fees=? \n AND Card_Type=? AND Is_Payment=? \n AND Is_Transaction=? AND User_id=?;'''\n return int(self.db.fetchall(query, values=self.to_tuple())[0][0])", "def getTournamentID(t_name, create=True):\n conn, cur = connect()\n query = \"SELECT T_ID FROM TOURNAMENTS WHERE T_NAME = %s;\"\n param = (t_name, )\n cur.execute(query, param)\n t_id = cur.fetchone()\n if t_id is None and create:\n query = \"INSERT INTO TOURNAMENTS (T_NAME) VALUES (%s);\"\n cur.execute(query, param)\n conn.commit()\n cur.execute(\"SELECT MAX(T_ID) FROM TOURNAMENTS;\")\n t_id = cur.fetchone()\n conn.close()\n if t_id is None and create is False:\n return -1\n return t_id[0]", "def getCorpusID(corpus, conn):\n cur = conn.cursor()\n cur.execute(\"SELECT id_corpus FROM corpus WHERE name=?\", (corpus,))\n id_corpus = cur.fetchone()[0]\n return id_corpus", "async def _get_account_id(db, name):\n assert name, 'no account name specified'\n _id = await db.query_one(\"SELECT id FROM hive_accounts WHERE name = :n\", n=name)\n assert _id, \"account not found: `%s`\" % name\n return _id", "def get_id(self):\n pass", "def get_id(self):\n pass", "def get_id(self):\n pass", "def get_id(self):\n pass", "def getFutbinID(self, internalid):\n internalid = int(internalid)\n mydict = pd.read_csv('./data/fut_bin21_players.csv',\n header=None, index_col=0, squeeze=True).to_dict()\n futbinid = mydict[internalid]\n return futbinid", "def get_primary_id(self):", "def getID(self):\n return self.__clubDbID", "def select_person_by_id(conn, person_id):\n sql = \"\"\"SELECT * FROM person WHERE id=?\"\"\"\n cur = conn.cursor()\n try:\n cur.execute(sql, (person_id,))\n data = cur.fetchall()\n if data:\n userid = (data[0][0])\n print \"\\nQuerying for userID {}\\n\".format(userid)\n print sql_pp(cur, data)\n except OperationalError, msg:\n print \"SQL error {} while running our code\".format(msg)", "def getPersonId(tx, withApp):\n if not withApp:\n query = (\n \"MATCH (p:Person) \"\n \"RETURN ID(p);\"\n )\n else:\n query = (\n \"MATCH (p:Person) \"\n \"WHERE p.app = \\\"True\\\" \"\n \"RETURN ID(p);\"\n )\n\n idsList = tx.run(query).data()\n return idsList", "def getTrid(request):\n words = request.replace('\\'').strip().split(',')[2].split(':')\n return int(words[0])", "def get_tournament_id(self):\n User = Query()\n serialized_tournament = self.serialize_tournament()\n documents = table_tournaments.search(User.tournament_name == str(serialized_tournament['tournament_name'])\n and User.start_date == str(serialized_tournament['start_date']))\n id_tournament = None\n for document in documents:\n id_tournament = document.doc_id\n return id_tournament", "def find_ID(table):\n if field_exists(table, \"orig_ID\"):\n return \"orig_ID\"\n elif field_exists(table, \"ORIG_FID\"):\n return \"ORIG_FID\"\n else:\n return arcpy.Describe(table).OIDFieldName", "def get_id(self): \n\t\treturn (self.user_id)", "def get_sample_id():\n response = requests.get('http://localhost:5000/api/persons')\n data = response.json()\n sample_uuid = data[0]['id']\n\n return sample_uuid", "def get_turn_holder() -> EntityID:\n return store.turn_holder", "def myID() -> np.int:\r\n return 304976335", "def myID() -> np.int:\r\n return 304976335", "def get_actual_id(translated):", "def getIdent (self) :\n return self.id", "def get_round_id(turn_number):\n database = TinyDB('db.json')\n rounds_table = database.table('round')\n # recuperation du nombre de tour du tournoi\n id_round = []\n for i in range(1, turn_number + 1):\n # getting round\n data = rounds_table.all()[-i]\n # Obtaining a round ID\n id_round.append(data.doc_id)\n return id_round", "def _get_id(self):\n return self.id" ]
[ "0.6560628", "0.6496635", "0.6305405", "0.62735915", "0.6225594", "0.6144446", "0.60823274", "0.5909578", "0.5867553", "0.5829623", "0.5829623", "0.5829623", "0.5829623", "0.5757211", "0.57547575", "0.57257414", "0.5696543", "0.56823105", "0.56737465", "0.56495553", "0.5647907", "0.5639732", "0.5607205", "0.5602476", "0.55992556", "0.55992556", "0.558616", "0.55763507", "0.55714685", "0.5561238" ]
0.68747634
0
Get person's ID from mbank.tcredits (turnes DB)
def get_person_id_and_tel(contract_num): exfin_connection = MySQLdb.connect( host="10.10.100.27", # host of MySQL database user="root", # user's username passwd="Orraveza(99)", # your password db="mbank", # name of the database charset="utf8" ) # create CURSOR and set UTF8 params exfin_cursor = exfin_connection.cursor() exfin_cursor.execute('SET NAMES utf8;') exfin_cursor.execute('SET CHARACTER SET utf8;') exfin_cursor.execute('SET character_set_connection=utf8;') if contract_num.isdigit(): exfin_cursor.execute( """ SELECT tc.id, tc.client_id, ts.status as last_status, ts.dt_created, tp.tel_mob_num, tp.tel_mob_kod FROM mbank.tcredits tc join mbank.tstatuses ts on ts.credit_id = tc.id join mbank.tpersons tp on tp.id = tc.client_id WHERE tc.contract_num = {0} ORDER BY ts.dt_created DESC LIMIT 1; """.format(contract_num) ) person_data = exfin_cursor.fetchall() else: return None if person_data: exfin_cursor.execute( """ SELECT name FROM mbank.tdropdown_details WHERE id = {0}; """.format(person_data[0][5]) ) person_mobile_operator_code = exfin_cursor.fetchall()[0] try: """ if client_id and tel_mob_num exists """ if person_data[0][1] and person_data[0][4]: print( "get_person_id_and_tel", "+38{0}{1}".format( person_mobile_operator_code[0], person_data[0][4] ) ) return ( person_data[0][1], "+38{0}{1}".format( person_mobile_operator_code[0], person_data[0][4] ) ) else: return "" except IndexError: return "" else: return ""
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_person_id(contract_num, phone):\n exfin_connection = MySQLdb.connect(\n host=\"10.10.100.27\", # host of MySQL database\n user=\"root\", # user's username\n passwd=\"Orraveza(99)\", # your password\n db=\"mbank\", # name of the database\n charset=\"utf8\"\n )\n\n # create CURSOR and set UTF8 params\n exfin_cursor = exfin_connection.cursor()\n exfin_cursor.execute('SET NAMES utf8;')\n exfin_cursor.execute('SET CHARACTER SET utf8;')\n exfin_cursor.execute('SET character_set_connection=utf8;')\n print(\"get_person_id\", contract_num, phone)\n exfin_cursor.execute(\n \"\"\"\n SELECT\n tc.id,\n tc.client_id,\n ts.status as last_status,\n ts.dt_created,\n CONCAT(td.name, tp.tel_mob_num)\n\n FROM\n mbank.tcredits tc\n join mbank.tstatuses ts on ts.credit_id = tc.id and ts.is_last = 1\n join mbank.tpersons tp on tp.id = tc.client_id\n join mbank.tdropdown_details td on td.id = tp.tel_mob_kod\n WHERE tc.contract_num = {0}\n ORDER BY ts.dt_created DESC\n LIMIT 1;\n \"\"\".format(contract_num)\n )\n person_id = exfin_cursor.fetchall()\n print(person_id)\n try:\n \"\"\"\n if credit status == 5 return client's ID\n status 5 is 'active credit'\n and\n if phone contain tel_mob_num\n \"\"\"\n if person_id[0][2] in [5, '5', 55, '55'] and person_id[0][4] in phone:\n return person_id[0][1]\n else:\n return None\n except IndexError:\n return None", "def getID():", "def get_person_id(person_data):\n person_ref = person_data['Casualty_Reference']\n veh_ref = person_data['Vehicle_Reference']\n acc_id = get_acc_id_from_data(person_data)\n person_id = common.get_gb_person_id(acc_id, int(veh_ref), int(person_ref))\n return person_id", "def get_bank_id_by_name(bank_name: str) -> int:\n # Open a new connection\n db, cursor = db_connector.cursor()\n\n query = \"select id from bank where name = '{}';\".format(bank_name)\n cursor.execute(query)\n data = cursor.fetchall()\n db.disconnect()\n return data[0][0]", "def getID(self) -> int:\n ...", "def get_id(self) -> int:\n query = '''SELECT id \n FROM ESLReceipts \n WHERE Transaction_Number=? AND Date=? AND Description=? \n AND Memo=? AND Amount_Debit=? \n AND Amount_Credit=? AND Balance=? \n AND Check_Number=? AND Fees=? \n AND Card_Type=? AND Is_Payment=? \n AND Is_Transaction=? AND User_id=?;'''\n return int(self.db.fetchall(query, values=self.to_tuple())[0][0])", "def getTournamentID(t_name, create=True):\n conn, cur = connect()\n query = \"SELECT T_ID FROM TOURNAMENTS WHERE T_NAME = %s;\"\n param = (t_name, )\n cur.execute(query, param)\n t_id = cur.fetchone()\n if t_id is None and create:\n query = \"INSERT INTO TOURNAMENTS (T_NAME) VALUES (%s);\"\n cur.execute(query, param)\n conn.commit()\n cur.execute(\"SELECT MAX(T_ID) FROM TOURNAMENTS;\")\n t_id = cur.fetchone()\n conn.close()\n if t_id is None and create is False:\n return -1\n return t_id[0]", "def getCorpusID(corpus, conn):\n cur = conn.cursor()\n cur.execute(\"SELECT id_corpus FROM corpus WHERE name=?\", (corpus,))\n id_corpus = cur.fetchone()[0]\n return id_corpus", "async def _get_account_id(db, name):\n assert name, 'no account name specified'\n _id = await db.query_one(\"SELECT id FROM hive_accounts WHERE name = :n\", n=name)\n assert _id, \"account not found: `%s`\" % name\n return _id", "def get_id(self):\n pass", "def get_id(self):\n pass", "def get_id(self):\n pass", "def get_id(self):\n pass", "def getFutbinID(self, internalid):\n internalid = int(internalid)\n mydict = pd.read_csv('./data/fut_bin21_players.csv',\n header=None, index_col=0, squeeze=True).to_dict()\n futbinid = mydict[internalid]\n return futbinid", "def get_primary_id(self):", "def getID(self):\n return self.__clubDbID", "def select_person_by_id(conn, person_id):\n sql = \"\"\"SELECT * FROM person WHERE id=?\"\"\"\n cur = conn.cursor()\n try:\n cur.execute(sql, (person_id,))\n data = cur.fetchall()\n if data:\n userid = (data[0][0])\n print \"\\nQuerying for userID {}\\n\".format(userid)\n print sql_pp(cur, data)\n except OperationalError, msg:\n print \"SQL error {} while running our code\".format(msg)", "def getPersonId(tx, withApp):\n if not withApp:\n query = (\n \"MATCH (p:Person) \"\n \"RETURN ID(p);\"\n )\n else:\n query = (\n \"MATCH (p:Person) \"\n \"WHERE p.app = \\\"True\\\" \"\n \"RETURN ID(p);\"\n )\n\n idsList = tx.run(query).data()\n return idsList", "def getTrid(request):\n words = request.replace('\\'').strip().split(',')[2].split(':')\n return int(words[0])", "def get_tournament_id(self):\n User = Query()\n serialized_tournament = self.serialize_tournament()\n documents = table_tournaments.search(User.tournament_name == str(serialized_tournament['tournament_name'])\n and User.start_date == str(serialized_tournament['start_date']))\n id_tournament = None\n for document in documents:\n id_tournament = document.doc_id\n return id_tournament", "def find_ID(table):\n if field_exists(table, \"orig_ID\"):\n return \"orig_ID\"\n elif field_exists(table, \"ORIG_FID\"):\n return \"ORIG_FID\"\n else:\n return arcpy.Describe(table).OIDFieldName", "def get_id(self): \n\t\treturn (self.user_id)", "def get_sample_id():\n response = requests.get('http://localhost:5000/api/persons')\n data = response.json()\n sample_uuid = data[0]['id']\n\n return sample_uuid", "def get_turn_holder() -> EntityID:\n return store.turn_holder", "def myID() -> np.int:\r\n return 304976335", "def myID() -> np.int:\r\n return 304976335", "def get_actual_id(translated):", "def getIdent (self) :\n return self.id", "def get_round_id(turn_number):\n database = TinyDB('db.json')\n rounds_table = database.table('round')\n # recuperation du nombre de tour du tournoi\n id_round = []\n for i in range(1, turn_number + 1):\n # getting round\n data = rounds_table.all()[-i]\n # Obtaining a round ID\n id_round.append(data.doc_id)\n return id_round", "def _get_id(self):\n return self.id" ]
[ "0.68761975", "0.6497191", "0.63046885", "0.6275422", "0.62251246", "0.61442626", "0.6080249", "0.59082514", "0.58671093", "0.58295715", "0.58295715", "0.58295715", "0.58295715", "0.5758327", "0.5754676", "0.5724665", "0.56968856", "0.56817", "0.5672638", "0.5647657", "0.5647413", "0.5639309", "0.5608062", "0.56010807", "0.55990994", "0.55990994", "0.5586433", "0.557614", "0.5570157", "0.5561008" ]
0.65625286
1
Adapting numpy.int64 type to SQLconform int type using psycopg extension, see [1]_ for more info.
def adapt_numpy_int64(numpy_int64): return AsIs(numpy_int64)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def castData(data, type='int64'):\n data = data.astype(type)\n return data", "def cast_to_integer(array, attributes):\n atts = array.att_names\n\n for nm, typ, null in array.sdbtype.full_rep:\n if nm not in attributes:\n continue\n if 'int' in typ:\n continue\n if typ == 'bool':\n x = _new_attribute_label('__cast', array)\n array = array.attribute_rename(nm, x).apply(nm, 'iif(%s, 1, 0)' % x)\n continue\n else:\n raise ValueError(\"Don't know how to turn %s to int64\" % typ)\n\n return array.project(*atts)", "def float_to_int_64(x):\n return np.float64(x).view(np.int64)", "def dtype_int(dtype: DType):\n # TODO: Is there a better way of doing this?\n name = list(convert(dtype, NPDType).__name__)\n while name and name[0] not in set([str(i) for i in range(10)]):\n name.pop(0)\n return _convert_back(_name_to_numpy_dtype(\"int\" + \"\".join(name)), dtype)", "def datetime_to_int64(df):\n if isinstance(df.index, pd.DatetimeIndex):\n df.index = df.index.astype(np.int64) / 1e9\n df.reset_index(inplace=True)\n return df", "def _decimal_to_int64(decimal: Decimal) -> int:\n return int(f\"{decimal:0f}\".replace(\".\", \"\"))", "def get_column_type(cls, **kwargs: Any) -> Any:\n return sqlalchemy.BigInteger()", "def get_int_cast_type(cls):\n\n if PyFunceble.CONFIGURATION.db_type == \"mariadb\":\n return \"INTEGER\"\n return \"SIGNED\"", "def _column_type(t):\n return 'bigint' if datastore_type[t].numeric else 'text'", "def _convert_int(self) -> pd.Series:\n\n if self.requires_nan:\n dtype = \"float\"\n else:\n dtype = \"int\"\n\n return self._convert(dtype=dtype)", "def _convert_int_to_i64(val):\n if val > 0x7FFFFFFFFFFFFFFF:\n val -= 0x10000000000000000\n return val", "def get_column_type(cls, **kwargs: Any) -> Any:\n return sqlalchemy.Integer()", "def execute_cast_timestamp_to_integer(op, data, type, **kwargs):\n return data.value", "def execute_cast_datetime_to_integer(op, data, type, **kwargs):\n return pd.Timestamp(data).value", "def _as_int(self, name):\n org_type = self._get_type(name)\n if org_type == 'int': return None\n valid = ['single']\n is_num_str = self.is_like_numeric(name) if org_type == 'string' else False\n is_all_ints = self._all_str_are_int(self._data[name])\n is_convertable = is_num_str and is_all_ints\n if not (org_type in valid or is_convertable):\n msg = 'Cannot convert variable {} of type {} to int!'\n raise TypeError(msg.format(name, org_type))\n if self._has_categorical_data(name):\n self._meta['columns'][name].pop('values')\n self._meta['columns'][name]['type'] = 'int'\n if org_type == 'string':\n if is_all_ints:\n self._data[name] = self._data[name].apply(lambda x: int(x))\n else:\n self._data[name] = self._data[name].apply(lambda x: float(x))\n return None", "def numpy_type(sqltype):\n m = re.match(\"char\\(([0-9]+)\\)\", sqltype.strip())\n if m is not None:\n # It's a string\n return np.dtype(\"|S\"+m.group(1))\n else:\n # It's a numeric type\n if sqltype == \"integer\" or sqltype == \"int\":\n return np.int32\n elif sqltype == \"bigint\":\n return np.int64\n elif sqltype == \"real\":\n return np.float32\n elif sqltype == \"float\":\n return np.float64\n else:\n raise ValueError(\"Unsupported data type \"+sqltype)", "def db_cast(self):\n if self.is_int:\n return 'BIGINT'\n return 'TEXT'", "def sql_to_python_type(sql_type):\n if sql_type.startswith(\"CHAR(\"):\n return str\n\n if sql_type.startswith(\"INTERVAL\"):\n # Calcite will always convert to milliseconds\n # no matter what the actual interval is\n # I am not sure if this breaks somewhere,\n # but so far it works\n return lambda x: timedelta(milliseconds=int(x))\n\n if sql_type.startswith(\"DECIMAL(\"):\n # We use np.float64 always\n return np.float64\n\n try:\n return _SQL_TO_PYTHON[sql_type]\n except KeyError: # pragma: no cover\n raise NotImplementedError(f\"The SQL type {sql_type} is not implemented (yet)\")", "def get_column_type(cls, **kwargs: Any) -> Any:\n return sqlalchemy.SmallInteger()", "def _convert_to_integer(srs, d):\n return srs.map(lambda x: d[x])", "def _safely_castable_to_int(dt):\n int_size = np.dtype(int).itemsize\n safe = (np.issubdtype(dt, np.signedinteger) and dt.itemsize <= int_size) or (\n np.issubdtype(dt, np.unsignedinteger) and dt.itemsize < int_size\n )\n return safe", "def _handle_integer(\n *, artifacts: types.ColumnArtifacts\n) -> typing.Union[Integer, BigInteger]:\n if artifacts.open_api.format is None or artifacts.open_api.format == \"int32\":\n return Integer\n if artifacts.open_api.format == \"int64\":\n return BigInteger\n raise exceptions.FeatureNotImplementedError(\n f\"{artifacts.open_api.format} format for integer is not supported.\"\n )", "def rint(a: Number) -> int:\n return np.round(a).astype(int)", "def dblint(xa, xb, ya, yb, tck):\n tx, ty, c, kx, ky = tck\n return dfitpack.dblint(tx, ty, c, kx, ky, xa, xb, ya, yb)", "def add_int64(self, value):\n self._check_int_type(value, _INT_8BYTE_UPPERLIMIT)\n self._data += value.to_bytes(8, byteorder=\"little\")", "def test_integer(self):\n conn = self.database.connection()\n cursor = conn.cursor()\n dialect = self.database.dialect()\n dbapi = self.database.dbapi()\n query = dialect.translate('DROP TABLE test_integer')\n try:\n cursor.execute(query)\n except dbapi.Error:\n conn.rollback()\n query = dialect.translate('CREATE TABLE test_integer ' \\\n '( value INTEGER NOT NULL )')\n cursor.execute(query)\n data = []\n query = 'INSERT INTO test_integer VALUES (%s)'\n for i in range(100):\n item = random.randrange(-sys.maxint, sys.maxint)\n data.append(item)\n cursor.execute(query, (item,))\n query = 'SELECT * FROM test_integer'\n cursor.execute(query)\n result = cursor.fetchall()\n for row in result:\n item = row[0]\n assert isinstance(item, int) or isinstance(item, long)\n assert item in data\n data.remove(item)\n query = dialect.translate('DELETE FROM test_integer')\n cursor.execute(query)\n query = dialect.translate('DROP TABLE test_integer')\n cursor.execute(query)\n conn.commit()", "def cast(elem, psql_type):\n if psql_type == 'real':\n return float(format(elem, '.6g'))\n elif psql_type == 'double precision':\n return float(format(elem, '.15g'))\n elif psql_type == 'timestamp':\n if isinstance(elem, pd.Timestamp):\n return elem.to_pydatetime()\n else:\n return elem\n elif psql_type == 'text':\n if type(elem) == float:\n return \"NaN\"\n return str(elem)\n else:\n return elem", "def decode_i64(as_bytes: typing.List[int]) -> int:\n return le_bytes_to_int(as_bytes, True)", "def test_numpy_datatype_binding(conn_cnx, db_parameters):\n epoch_time = int(time.time()) * 1000000000 + 123456789\n all_data = [{\n 'tz': 'America/Los_Angeles',\n 'float': '1.79769313486e+308',\n 'epoch_time': epoch_time,\n 'current_time': np.datetime64(epoch_time, 'ns'),\n 'specific_date': np.datetime64('2005-02-25T03:30Z')\n }, {\n 'tz': 'Asia/Tokyo',\n 'float': '-1.79769313486e+308',\n 'epoch_time': epoch_time,\n 'current_time': np.datetime64(epoch_time, 'ns'),\n 'specific_date': np.datetime64('1970-12-31T05:00:00Z')\n }, {\n 'tz': 'America/New_York',\n 'float': '-1.79769313486e+308',\n 'epoch_time': epoch_time,\n 'current_time': np.datetime64(epoch_time, 'ns'),\n 'specific_date': np.datetime64('1969-12-31T05:00:00Z')\n }, {\n 'tz': 'UTC',\n 'float': '-1.79769313486e+308',\n 'epoch_time': epoch_time,\n 'current_time': np.datetime64(epoch_time, 'ns'),\n 'specific_date': np.datetime64('1968-11-12T07:00:00.123Z')\n }]\n try:\n with conn_cnx(use_numpy=True) as cnx:\n cnx.cursor().execute(\"\"\"\nCREATE OR REPLACE TABLE {name} (\n c1 integer, -- int8\n c2 integer, -- int16\n c3 integer, -- int32\n c4 integer, -- int64\n c5 float, -- float16\n c6 float, -- float32\n c7 float, -- float64\n c8 timestamp_ntz, -- datetime64\n c9 date, -- datetime64\n c10 timestamp_ltz, -- datetime64,\n c11 timestamp_tz) -- datetime64\n \"\"\".format(name=db_parameters['name']))\n for data in all_data:\n cnx.cursor().execute(\"\"\"\nALTER SESSION SET timezone='{tz}'\"\"\".format(tz=data['tz']))\n cnx.cursor().execute(\"\"\"\nINSERT INTO {name}(\n c1,\n c2,\n c3,\n c4,\n c5,\n c6,\n c7,\n c8,\n c9,\n c10,\n c11\n)\nVALUES(\n %s,\n %s,\n %s,\n %s,\n %s,\n %s,\n %s,\n %s,\n %s,\n %s,\n %s)\"\"\".format(\n name=db_parameters['name']), (\n np.iinfo(np.int8).max,\n np.iinfo(np.int16).max,\n np.iinfo(np.int32).max,\n np.iinfo(np.int64).max,\n np.finfo(np.float16).max,\n np.finfo(np.float32).max,\n np.float64(data['float']),\n data['current_time'],\n data['current_time'],\n data['current_time'],\n data['specific_date'],\n ))\n rec = cnx.cursor().execute(\"\"\"\nSELECT\n c1,\n c2,\n c3,\n c4,\n c5,\n c6,\n c7,\n c8,\n c9,\n c10,\n c11\n FROM {name}\"\"\".format(\n name=db_parameters['name'])).fetchone()\n assert np.int8(rec[0]) == np.iinfo(np.int8).max\n assert np.int16(rec[1]) == np.iinfo(np.int16).max\n assert np.int32(rec[2]) == np.iinfo(np.int32).max\n assert np.int64(rec[3]) == np.iinfo(np.int64).max\n assert np.float16(rec[4]) == np.finfo(np.float16).max\n assert np.float32(rec[5]) == np.finfo(np.float32).max\n assert rec[6] == np.float64(data['float'])\n assert rec[7] == data['current_time']\n assert str(rec[8]) == str(data['current_time'])[0:10]\n assert rec[9] == data['current_time']\n assert rec[10] == data['specific_date']\n cnx.cursor().execute(\"\"\"\nDELETE FROM {name}\"\"\".format(name=db_parameters['name']))\n finally:\n with conn_cnx() as cnx:\n cnx.cursor().execute(\"\"\"\n DROP TABLE IF EXISTS {name}\n \"\"\".format(name=db_parameters['name']))", "def int64_t(n):\n return int(n).to_bytes(8, byteorder='little', signed=True)" ]
[ "0.6342146", "0.62293154", "0.6208453", "0.5914053", "0.5862272", "0.5841508", "0.5763204", "0.5722632", "0.57082486", "0.5705555", "0.5675615", "0.5662397", "0.565685", "0.5637229", "0.5554309", "0.54482377", "0.5430291", "0.53955346", "0.5389029", "0.53852504", "0.53627807", "0.5356268", "0.53502095", "0.53180575", "0.5311791", "0.5292874", "0.5290642", "0.52756166", "0.5274421", "0.5262338" ]
0.7526429
0
Gets a dict of bestfit information from the database regarding the specified form factor. Also gets some "meta" information like the associated momentum, current, and lattice size.
def get_best_fit_information(engine, form_factor_id): Nstates = collections.namedtuple( 'NStates', ['n', 'no', 'm', 'mo'], defaults=(1, 0, 0, 0) ) def _float_or_none(astr): if astr is None: return None if (astr.lower() == 'nan') or (astr.lower() == 'none'): return None return float(astr) query = f""" select campaign.form_factor_id, ens.ens_id, ens.ns, form_factor.momentum, form_factor.spin_taste_current, result_id, n_decay_ll as n, n_oscillating_ll as "no", n_decay_hl as m, n_oscillating_hl as mo, tmin_ll as tmin_src, tmax_ll as tmax_src, tmin_hl as tmin_snk, tmax_hl as tmax_snk, binsize, shrinkage, fold as do_fold, sign, pedestal, params from campaign_form_factor as campaign join form_factor using(form_factor_id) join ensemble as ens using(ens_id) join sign_form_factor using(ens_id, spin_taste_current) join result_form_factor as result on (result.form_factor_id = campaign.form_factor_id) and (result.id = campaign.result_id) join analysis_form_factor as analysis on (analysis.analysis_id = result.analysis_id) join reduction_form_factor as reduction on (reduction.reduction_id = result.reduction_id) where campaign.form_factor_id in ({form_factor_id});""" best_fit = pd.read_sql_query(query, engine) best_fit['params'] = best_fit['params'].apply(parse_string_dict) best_fit['pedestal'] = best_fit['pedestal'].apply(_float_or_none) best_fit['nstates'] = best_fit[['n', 'no', 'm', 'mo']].apply( lambda args: Nstates(*args), axis=1) best_fit['params'] = best_fit[['params', 'nstates']].apply( lambda pair: reshape_params(*pair), axis=1) best_fit, = best_fit.to_dict('records') # Unpack single entry return best_fit
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_stats(trj_datasets, ff_form):\n\n stats_data = {}\n\n params = ff_form['hyperparams']\n stats_func = ff_func[ff_form['potential']]\n\n for key, trj in trj_datasets.items():\n \n stats_dict = {'energy':[]}\n \n for ii, (xyz, box) in enumerate(zip(trj['xyz'], trj['box'])):\n \n #a1, ar, a2, f1, fr, f2 = \n u_stats, f_stats = stats_func(xyz, box, params)\n\n stats_dict['energy'].append(u_stats)\n\n stats_data[key] = stats_dict\n \n stats_data['ff_form'] = ff_form\n \n return stats_data", "def get_recommended_formations(opponent_formation, league, limit):\n\n cursor = mc.get_db_cursor(mc.DB_NAME)\n select_query = \"\"\"SELECT formation_1 FROM recom_formation\n WHERE formation_2 = %s\n AND league = %s\n ORDER BY formation_points DESC limit %s\"\"\"\n insert_value = (opponent_formation, league, limit)\n cursor.execute(select_query, insert_value)\n formations = cursor.fetchall()\n\n # initializes a dictionary for storing the recom formations\n result_dict = {\n \"recoms\": []\n }\n # save all the formations in the dictionary\n for formation in formations:\n result_dict[\"recoms\"].append(formation[0])\n return result_dict", "def get_form_factor_data(form_factor_id, engines, apply_alias=True, sanitize=True):\n query = (\n \"SELECT ens_id, RTRIM(name, '-_fine') as BASENAME, corr_type \"\n \"FROM junction_form_factor AS junction \"\n \"JOIN correlator_n_point AS corr ON (corr.corr_id = junction.corr_id) \"\n \"WHERE (form_factor_id = {form_factor_id}) AND (name LIKE '%%fine');\"\n )\n query = query.format(form_factor_id=form_factor_id)\n dataframe = pd.read_sql_query(query, engines['postgres'])\n ens_id = dataframe['ens_id'].unique().item()\n basenames = dataframe['basename'].values\n\n # Grab a list of necessary correlators, in particular identifying the\n # source and sink 2pt functions. This line gives a map from the full\n # basename to a name like 'source' or 'sink'.\n aliases = alias.get_aliases(basenames)\n # Apply any further renaming, e.g., 'sink' --> 'heavy-light'\n name_map = alias.apply_naming_convention(aliases)\n data = {}\n for basename in aliases:\n key = name_map[basename] if apply_alias else basename\n try:\n data[key] = hdf5_cache.get_correlator(engines[ens_id], basename)\n except ValueError as err:\n LOGGER.warning(\"WARNING: Unable to load %s\", key)\n if sanitize:\n data, nan_rows = sanitize_data(data)\n if nan_rows:\n LOGGER.warning(\"WARNING: NaNs found while sanitizing: %s\", nan_rows)\n return data", "def scrapeFormulations(conn):\n c = conn.cursor()\n # Return the results as discussed with Adi\n query = \"\"\"SELECT f10.description, f10.landed_cost_price, f10.fob_price,\n f10.period, f10.issue_unit, country.name, country.id,\n f10.fob_currency, f10.landed_cost_currency, f10.period\n FROM form10_row AS f10\n INNER JOIN country ON f10.country = country.id\n ORDER BY f10.description, country.name\"\"\"\n c.execute(query)\n results = []\n for row in c:\n result = {}\n result['formulation'] = row[0].replace('*', '')\n result['landed_cost_price'] = row[1] or None\n result['fob_price'] = row[2] or None\n result['period'] = row[3]\n result['unit'] = row[4]\n result['country'] = row[5]\n result['country_id'] = country_codes[row[6]]\n result['fob_currency'] = row[7]\n result['landed_currency'] = row[8]\n result['period'] = int(row[9])\n results.append(result)\n return results", "def compute_derived_parameters(cls, fdict):\n cgg = fdict['cgd'] + fdict['cgs']\n return dict(\n cgg=cgg,\n cdd=fdict['cgd'] + fdict['cds'],\n vstar=2.0 * (fdict['ids'] / fdict['gm']),\n gain=fdict['gm'] / fdict['gds'],\n ft=fdict['gm'] / (2.0 * np.pi * cgg),\n )", "def compute_derived_parameters(cls, fdict):\n cgg = fdict['cgd'] + fdict['cgs'] + fdict['cgb']\n return dict(\n cgg=cgg,\n cdd=fdict['cgd'] + fdict['cds'] + fdict['cdb'],\n css=fdict['cgs'] + fdict['cds'] + fdict['csb'],\n cbb=fdict['cgb'] + fdict['cdb'] + fdict['csb'],\n vstar=2.0 * (fdict['ids'] / fdict['gm']),\n gain=fdict['gm'] / fdict['gds'],\n ft=fdict['gm'] / (2.0 * np.pi * cgg),\n )", "def fit():\n form = MedForm(request.form)\n if request.method == 'POST' and form.validate():\n\n zipcode = form.zipcode.data\n # Check the zipcode\n\n plan = form.plan.data\n medication = form.medication.data\n\n ip = str(request.environ.get('HTTP_X_REAL_IP', request.remote_addr))\n rq = Requests(**dict(user=current_user.id, ip = ip, zipcode = zipcode, plan = plan, drug = medication))\n rq.save()\n\n # Process either medicare or medicaid\n plan_type = form.plan_type.data\n try:\n if plan_type == 'medicare':\n table = get_medicare_plan(medication, plan, zipcode)\n else:\n table = get_medicaid_plan(medication, plan, zipcode, plan_type)\n\n except tools.BadPlanName as e:\n form.errors['plan_name'] = str(e)\n context = {'form': form}\n html = 'fit.html'\n\n except tools.BadLocation as e:\n form.errors['zipcode'] = str(e)\n context = {'form': form}\n html = 'fit.html'\n else:\n # You have to order the data in a list or it won't show right\n data = []\n for item in table['data']:\n row = [item[h] for h in table['heading']]\n data.append(row)\n\n context = {'data':data,\n 'head':table['heading'],\n 'drug':medication,\n 'pa': table['pa'],\n 'zipcode':zipcode,\n 'plan':plan,\n 'plan_type':form.plan_type.data,\n }\n html = 'table.html'\n\n # If its a GET see if parameters were passed\n else:\n if request.method == 'GET':\n form.zipcode.data = request.args.get('zipcode', \"\")\n form.plan.data = request.args.get('plan', \"\")\n form.medication.data = request.args.get('drug', \"\")\n form.plan_type.data = request.args.get('plan_type', \"medicare\")\n\n # a POST with errors\n elif form.errors:\n if 'plan_type' in form.errors:\n form.errors['plan_type'] = \"Please pick a Medicare, Medicaid, or Private plan\"\n\n context = {'form': form}\n html = 'fit.html'\n\n content = render_template(html, **context)\n return content", "def read_formations():\n\n with resource_stream('component_contribution',\n FullTrainingData.FORMATION_ENERGY_FNAME) as fp:\n formation_df = pd.read_csv(gzip.GzipFile(fileobj=fp))\n\n cids_that_dont_decompose = set(\n formation_df.loc[formation_df['decompose'] == 0, 'cid'])\n\n for col in [\"dG'0\", \"T\", \"I\", \"pH\", \"pMg\"]:\n formation_df[col] = formation_df[col].apply(float)\n\n formation_df = formation_df[~pd.isnull(formation_df[\"dG'0\"])]\n formation_df['reaction'] = formation_df['cid'].apply(\n lambda c: Reaction({c: 1}))\n\n formation_df['balance'] = False\n formation_df['description'] = formation_df['name'] + ' formation'\n formation_df.rename(columns={'compound_ref': 'reference'},\n inplace=True)\n formation_df.drop(['name', 'cid', 'remark', 'decompose'],\n axis=1, inplace=True)\n\n logger.debug('Successfully added %d formation energies' %\n formation_df.shape[0])\n return formation_df, cids_that_dont_decompose", "def content_fitting():\n\n all_functions = {\n \"sigmoid_function\": \"return sigmoid function for fitting or plotting\",\n \"fit_curve\": \"returns values r2 score and fitting parameters for a single sample\",\n \"fitting_column\": \"returns r2_scores, fitting_parameters\",\n \"compute_r2_score\": \"returns array of r2_scores\",\n \"compute_fitting_function\": \"returns df with columns \\\n [fitting_function+'_r2'] and fitting_function\",\n \"compare_fitting_functions\": \"returns df with columns better_fitting, \\\n displays df_best.loc[fitting_function, min, max, \\\n r2>0, r2>0.8, r2>0.9, r2>0.99\",\n }\n return all_functions", "def _get_fitted_params(self):\n return {}", "def predict_form_factors(features, sys_cl):\n classifiers = get_classification_models()\n # evaluate population form factors\n cl_models_to_use = classifiers[sys_cl]\n form_factors = {}\n for ipop, struct in enumerate(sys_cl.split('__')):\n pop_id = 'pop{}'.format(ipop)\n form_result = cl_models_to_use[pop_id]['form'].predict( \n cl_models_to_use[pop_id]['form'].get_x_array(features))\n form_factors[pop_id+'_form'] = (form_result[0][0], form_result[1][0]) \n return form_factors", "def form_factor(self) -> SmartSsdFormFactor:\n return self._form_factor", "def get_formations_options(league):\n\n cursor = mc.get_db_cursor(mc.DB_NAME)\n select_query = \"\"\"SELECT DISTINCT(formation_2) FROM recom_formation\n WHERE league = %s;\"\"\"\n cursor.execute(select_query, (league, ))\n formations = cursor.fetchall()\n\n # initializes a dictionary for storing the different formations\n result_dict = {\n \"values\": []\n }\n # save all the different formations in the dictionary\n for formation in formations:\n result_dict[\"values\"].append(formation[0])\n return result_dict", "def get_mof_descriptors(self) -> dict:\n result_dict = OrderedDict(\n (\n (\"name\", self.name),\n (\"graph_hash\", self.graph_hash),\n (\"path\", self._filename),\n (\"density\", self.density),\n (\"has_oms\", self.has_oms),\n (\"has_carbon\", self.has_carbon),\n (\"has_hydrogen\", self.has_hydrogen),\n (\"has_atomic_overlaps\", self.has_atomic_overlaps),\n (\"has_overcoordinated_c\", self.has_overvalent_c),\n (\"has_overcoordinated_n\", self.has_overvalent_n),\n (\"has_overcoordinated_h\", self.has_overvalent_h),\n (\"has_undercoordinated_c\", self.has_undercoordinated_c),\n (\"has_undercoordinated_n\", self.has_undercoordinated_n),\n (\"has_metal\", self.has_metal),\n (\"has_lone_atom\", self.has_lone_atom),\n (\"has_lone_molecule\", self.has_lone_molecule),\n (\"has_high_charges\", self.has_high_charges),\n (\"has_undercoordinated_metal\", self.has_undercoordinated_metal),\n )\n )\n return result_dict", "def get_list_of_forms(Nmin, Nmax, compute=False):\n FL = dict()\n try:\n for N in range(Nmin, Nmax):\n print(\"N={0}\".format(N))\n M = VVHarmonicWeakMaassForms(int(N), -0.5, 75, dr=False, verbose=1)\n\n print(\"minPP={0}\".format(M.smallest_pp()))\n print(\"Compute form on {0}\".format(M))\n # s=\"FN\"+str(N)+\"-DR-D\"+str(F.get_principal_part(str=True))+\".sobj\"\n s = \"FN\" + str(N) + \"-DR-D\" + str(list(M.smallest_pp().keys())[0]) + \".sobj\"\n print(\"trying :{0}\".format(s))\n try:\n F = load(s)\n except IOError:\n # the file did not exist\n if(compute):\n F = M.get_element(maxD=500)\n save(F, s)\n else:\n continue\n FL[N] = F\n except KeyboardInterrupt:\n pass\n return FL", "def fit(self) -> None:\n\n levels = self.levels\n TSs = GetAggregateTS(self.data).aggregate(levels)\n models = {}\n residuals = {}\n fcsts = {}\n for bm in self.baseModels:\n model_name = bm.model_name\n if model_name is None: # only residuals and fcsts are provided\n models[bm.level] = None\n residuals[bm.level] = bm.residuals\n fcsts[bm.level] = bm.fcsts\n else:\n m = BASE_MODELS[model_name](\n data=TSs[bm.level],\n params=bm.model_params,\n )\n m.fit()\n models[bm.level] = m\n self.models = models\n self.info_fcsts = fcsts\n self.info_residuals = residuals", "def get_best_model_configs(self):\n self.best_models = {}\n with self.database:\n cur = self.database.cursor()\n for model in self.active_models:\n if self.tuning_depth == 'minimal':\n a = cur.execute(\"SELECT MAX(accuracy),unique_id from model_performance_results\")\n elif self.tuning_depth == 'normal':\n a = cur.execute(\"SELECT MAX(accuracy),unique_id from model_performance_results WHERE model = ?\",\n (model,))\n elif self.tuning_depth == 'maximal':\n a = cur.execute(\"SELECT MAX(accuracy),unique_id from model_performance_results WHERE model = ?\",\n (model,))\n # TODO not implimented, same as normal\n self.best_models[model] = list(a)[0][0]", "def getHFtableData(self, ep=None):\n HFdict = {}\n if self.hfMode == 'limiter':\n HFdict['Heat Flux Mode'] = 'Limiter'\n if self.lqCNmode == 'eich':\n HFdict[\"\\u03BB Near Mode\"] = 'Eich Regression #15'\n HFdict[\"Common Region Near Heat Flux Width (\\u03BBq CN) [mm]\"] = self.lqEich\n else:\n HFdict[\"\\u03BB Near Mode\"] = 'User Defined'\n HFdict[\"Common Region Near Heat Flux Width (\\u03BBq CN) [mm]\"] = self.lqCN\n if self.lqCFmode == 'horacek':\n HFdict[\"\\u03BB Far Mode\"] = 'Horacek Figure 6a'\n HFdict[\"Common Region Far Heat Flux Width (\\u03BBq CF) [mm]\"] = self.lqCF\n else:\n HFdict[\"\\u03BB Far Mode\"] = 'User Defined'\n HFdict[\"Common Region Far Heat Flux Width (\\u03BBq CF) [mm]\"] = self.lqCF\n\n HFdict[\"Common Region Near Power Fraction\"] = self.fracCN\n HFdict[\"Common Region Far Power Fraction\"] = self.fracCF\n\n elif self.hfMode == 'multiExp':\n HFdict['Heat Flux Mode'] = 'Multiple (4) Exponentials'\n if self.lqCNmode == 'eich':\n HFdict[\"\\u03BB Near Mode\"] = 'Eich Regression #15'\n HFdict[\"Common Region Near Heat Flux Width (\\u03BBq CN) [mm]\"] = self.lqEich\n else:\n HFdict[\"\\u03BB Near Mode\"] = 'User Defined'\n HFdict[\"Common Region Near Heat Flux Width (\\u03BBq CN) [mm]\"] = self.lqCN\n\n if self.lqCFmode == 'horacek':\n HFdict[\"\\u03BB Far Mode\"] = 'Horacek Figure 6a'\n else:\n HFdict[\"\\u03BB Far Mode\"] = 'User Defined'\n\n\n\n HFdict[\"Common Region Far Heat Flux Width (\\u03BBq CF) [mm]\"] = self.lqCF\n HFdict[\"Private Region Near Heat Flux Width (\\u03BBq PN) [mm]\"] = self.lqPN\n HFdict[\"Private Region Far Heat Flux Width (\\u03BBq PF) [mm]\"] = self.lqPF\n HFdict[\"Common Region Near Power Fraction\"] = self.fracCN\n HFdict[\"Common Region Far Power Fraction\"] = self.fracCF\n HFdict[\"Private Region Near Power Fraction\"] = self.fracPN\n HFdict[\"Private Region Far Power Fraction\"] = self.fracPF\n\n elif self.hfMode == 'qFile':\n HFdict[\"Heat Flux Mode\"] = 'Read HF from qFile'\n HFdict['qFilePath'] = self.qFilePath\n HFdict['qFileTag'] = self.qFileTag\n\n elif self.hfMode == 'eich':\n HFdict['Heat Flux Mode'] = 'Gaussian Spreading'\n if self.lqCNmode == 'eich':\n HFdict[\"\\u03BB Mode\"] = 'Eich Regression #15'\n HFdict[\"Heat Flux Width (\\u03BBq) [mm]\"] = self.lqEich\n else:\n HFdict[\"\\u03BB Mode\"] = 'User Defined'\n HFdict[\"Heat Flux Width (\\u03BBq) [mm]\"] = self.lqCN\n\n if self.SMode == 'makowski':\n HFdict['Greenwald Density Fraction'] = self.fG\n HFdict['Spreading (S) Mode'] = 'Makowski Figure 6'\n else:\n HFdict['Spreading (S) Mode'] = 'User Defined'\n HFdict['Greenwald Density Fraction'] = 'Only used for Makowski S Mode'\n HFdict['S [mm]'] = self.S\n HFdict['Background Heat Flux'] = self.qBG\n\n if self.hfMode != 'qFile':\n HFdict[\"Power Injected (Pinj) [MW]\"] = self.Pinj\n HFdict[\"Radiated Fraction of Injected Power\"] = self.coreRadFrac\n HFdict[\"Power Crossing Separatrix (Psol) [MW]\"] = self.Psol\n HFdict[\"Upper Inner Divertor Power Fraction\"] = self.fracUI\n HFdict[\"Upper Outer Divertor Power Fraction\"] = self.fracUO\n HFdict[\"Lower Inner Divertor Power Fraction\"] = self.fracLI\n HFdict[\"Lower Outer Divertor Power Fraction\"] = self.fracLO\n\n return HFdict", "def fm(self):\n return None if self.accumulation is None else self.accumulation[\"fm\"]", "def getOptimalParams(self):\n\t\t# Load calibration chain and find optimal for like1\n\t\tcal_data = pd.read_csv(self.database_path, sep=',')\n\t\tparams = cal_data.ix[cal_data['like1'].idxmax()].to_dict()\n\t\tcost = params['like1']\n\t\t# reformat parameters to match original naming\n\t\tparams_reformatted = {}\n\t\tfor k, p in self.cal_params.items():\n\t\t\tparams_reformatted[k] = params['par'+k]\n\n\t\treturn params_reformatted, cost", "def get_fitted_params(self):\n self.check_is_fitted()\n return {\n name: self._fitted_forecaster.params.get(name)\n for name in self._get_fitted_param_names()\n }", "def preview_formcalc(self, get):\r\n\r\n result = {'preview': '',\r\n 'error': ''}\r\n\r\n try:\r\n formula = get['formula']\r\n except KeyError:\r\n result['error'] = \"No formula specified.\"\r\n return result\r\n\r\n result['request_start'] = int(get.get('request_start', 0))\r\n\r\n try:\r\n # TODO add references to valid variables and functions\r\n # At some point, we might want to mark invalid variables as red\r\n # or something, and this is where we would need to pass those in.\r\n result['preview'] = latex_preview(formula)\r\n except pyparsing.ParseException as err:\r\n result['error'] = \"Sorry, couldn't parse formula\"\r\n result['formula'] = formula\r\n except Exception:\r\n # this is unexpected, so log\r\n log.warning(\r\n \"Error while previewing formula\", exc_info=True\r\n )\r\n result['error'] = \"Error while rendering preview\"\r\n\r\n return result", "def get_bests(self):\n set_names = [\"training\", \"hp_selection\", \"validation\"]\n run_tec_conf_set = recursivedict()\n validation = self._campaign_configuration['General']['validation']\n hp_selection = self._campaign_configuration['General']['hp_selection']\n if (validation, hp_selection) in {(\"All\", \"All\"), (\"Extrapolation\", \"All\"), (\"All\", \"HoldOut\"), (\"HoldOut\", \"All\"), (\"HoldOut\", \"HoldOut\"), (\"Extrapolation\", \"HoldOut\")}:\n # For each run, for each technique the best configuration\n run_tec_best_conf = recursivedict()\n\n # Hyperparameter search\n for conf in self._exp_confs:\n run = int(conf.get_signature()[0].replace(\"run_\", \"\"))\n technique = conf.technique\n run_tec_conf_set[run][technique][str(conf.get_signature()[4:])] = conf.mapes\n # First experiment for this technique or better than the current best\n if technique not in run_tec_best_conf[run] or conf.mapes[\"hp_selection\"] < run_tec_best_conf[run][technique].mapes[\"hp_selection\"]:\n run_tec_best_conf[run][technique] = conf\n\n # Print results for each run\n for run in range(0, self._campaign_configuration['General']['run_num']):\n self._logger.info(\"-->Printing results for run %s\", str(run))\n overall_run_best = None\n # Print data of single techniques\n for technique in run_tec_best_conf[run]:\n temp = run_tec_best_conf[run][technique]\n self._logger.info(\"---Best result for %s - Configuration is %s - (Training MAPE is %f - HP Selection MAPE is %f) - Validation MAPE is %f\", technique, temp.get_signature()[4:], temp.mapes[\"training\"], temp.mapes[\"hp_selection\"], temp.mapes[\"validation\"])\n\n # Compute which is the best technique\n if not overall_run_best or temp.mapes[\"hp_selection\"] < overall_run_best.mapes[\"hp_selection\"]:\n overall_run_best = temp\n best_model_description = overall_run_best.print_model()\n self._logger.info(\"<--Overall best result is %s %s - (Training MAPE is %f - HP Selection MAPE is %f) - Validation MAPE is %f\", overall_run_best.get_signature()[3:], \"(\" + best_model_description + \")\" if best_model_description else \"\", overall_run_best.mapes[\"training\"], overall_run_best.mapes[\"hp_selection\"], overall_run_best.mapes[\"validation\"])\n\n elif (validation, hp_selection) in {(\"KFold\", \"All\"), (\"KFold\", \"HoldOut\")}:\n folds = float(self._campaign_configuration['General']['folds'])\n # For each run, for each fold, for each technique, the best configuration\n run_fold_tec_best_conf = recursivedict()\n\n # Hyperparameter search inside each fold\n for conf in self._exp_confs:\n run = int(conf.get_signature()[0].replace(\"run_\", \"\"))\n fold = int(conf.get_signature()[1].replace(\"f\", \"\"))\n technique = conf.technique\n if \"hp_selection\" not in run_tec_conf_set[run][technique][str(conf.get_signature_string()[4:])]:\n for set_name in set_names:\n run_tec_conf_set[run][technique][str(conf.get_signature_string()[4:])][set_name] = 0\n for set_name in set_names:\n run_tec_conf_set[run][technique][str(conf.get_signature_string()[4:])][set_name] = run_tec_conf_set[run][technique][str(conf.get_signature_string()[4:])][set_name] + conf.mapes[set_name] / folds\n # First experiment for this fold+technique or better than the current best\n if technique not in run_fold_tec_best_conf[run][fold] or conf.mapes[\"hp_selection\"] < run_fold_tec_best_conf[run][fold][technique].mapes[\"hp_selection\"]:\n run_fold_tec_best_conf[run][fold][technique] = conf\n\n # Aggregate different folds (only the value of the mapes)\n run_tec_set = recursivedict()\n for run in run_fold_tec_best_conf:\n for fold in run_fold_tec_best_conf[run]:\n for tec in run_fold_tec_best_conf[run][fold]:\n if \"hp_selection\" not in run_tec_set[run][technique]:\n for set_name in set_names:\n run_tec_set[run][tec][set_name] = 0\n for set_name in set_names:\n run_tec_set[run][tec][set_name] = run_fold_tec_best_conf[run][fold][tec].mapes[set_name]\n # Print results for each run\n for run in range(0, self._campaign_configuration['General']['run_num']):\n self._logger.info(\"Printing results for run %s\", str(run))\n overall_run_best = ()\n # Print data of single techniques\n for technique in run_tec_set[run]:\n self._logger.info(\"---Best result for %s - (Training MAPE is %f - HP Selection MAPE is %f) - Validation MAPE is %f\", technique, run_tec_set[run][technique][\"training\"], run_tec_set[run][technique][\"hp_selection\"], run_tec_set[run][technique][\"validation\"])\n\n # Compute which is the best technique\n if not overall_run_best or run_tec_set[run][technique][\"hp_selection\"] < overall_run_best[1][\"hp_selection\"]:\n overall_run_best = (technique, run_tec_set[run][technique])\n\n self._logger.info(\"---Overall best result is %s - (Training MAPE is %f - HP Selection MAPE is %f) - Validation MAPE is %f\", overall_run_best[0], overall_run_best[1][\"training\"], overall_run_best[1][\"hp_selection\"], overall_run_best[1][\"validation\"])\n\n # Overall best will contain as first argument the technique with the best (across runs) average (across folds) mape on validation; now we consider on all the runs and on all the folds the configuraiton of this technique with best validation mape\n\n elif (validation, hp_selection) in {(\"All\", \"KFold\"), (\"HoldOut\", \"KFold\"), (\"Extrapolation\", \"KFold\")}:\n folds = float(self._campaign_configuration['General']['folds'])\n # For each run, for each technique, for each configuration, the aggregated mape\n run_tec_conf_set = recursivedict()\n\n # Hyperparameter search aggregating over folders\n for conf in self._exp_confs:\n run = int(conf.get_signature()[0].replace(\"run_\", \"\"))\n fold = int(conf.get_signature()[2].replace(\"f\", \"\"))\n technique = conf.technique\n configuration = str(conf.get_signature()[4:])\n if \"hp_selection\" not in run_tec_conf_set[run][technique][configuration]:\n for set_name in set_names:\n run_tec_conf_set[run][technique][configuration][set_name] = 0\n for set_name in set_names:\n run_tec_conf_set[run][technique][configuration][set_name] = run_tec_conf_set[run][technique][configuration][set_name] + conf.mapes[set_name] / folds\n\n # Select the best configuration for each technique across different folders\n run_tec_best_conf = recursivedict()\n for run in run_tec_conf_set:\n for tec in run_tec_conf_set[run]:\n for conf in run_tec_conf_set[run][tec]:\n if tec not in run_tec_best_conf[run] or run_tec_conf_set[run][tec][conf][\"hp_selection\"] < run_tec_best_conf[run][tec][1][\"hp_selection\"]:\n run_tec_best_conf[run][tec] = (conf, run_tec_conf_set[run][tec][conf])\n\n # Print results for each run\n for run in range(0, self._campaign_configuration['General']['run_num']):\n self._logger.info(\"Printing results for run %s\", run)\n overall_run_best = () # (technique, configuration, mapes)\n # Print data of single techniques\n for technique in run_tec_best_conf[run]:\n temp = run_tec_best_conf[run][technique]\n self._logger.info(\"---Best result for %s - Configuration is %s - (Training MAPE is %f - HP Selection MAPE is %f) - Validation MAPE is %f\", technique, temp[0], temp[1][\"training\"], temp[1][\"hp_selection\"], temp[1][\"validation\"])\n\n # Compute which is the best technique\n if not overall_run_best or temp[1][\"hp_selection\"] < overall_run_best[2][\"hp_selection\"]:\n overall_run_best = (technique, temp[0], temp[1])\n\n self._logger.info(\"---Overall best result is %s %s - (Training MAPE is %f - HP Selection MAPE is %f) - Validation MAPE is %f\", overall_run_best[0], overall_run_best[1], overall_run_best[2][\"training\"], overall_run_best[2][\"hp_selection\"], overall_run_best[2][\"validation\"])\n\n elif (validation, hp_selection) in {(\"KFold\", \"KFold\")}:\n folds = float(self._campaign_configuration['General']['folds'])\n # For each run, for each external fold, for each technique, the aggregated mape\n run_efold_tec_conf_set = recursivedict()\n\n # Hyperparameter search aggregating over internal folders\n for conf in self._exp_confs:\n run = int(conf.get_signature()[0].replace(\"run_\", \"\"))\n ext_fold = int(conf.get_signature()[2].replace(\"f\", \"\"))\n technique = conf.technique\n configuration = str(conf.get_signature()[4:])\n if \"hp_selection\" not in run_tec_conf_set[run][technique][configuration]:\n for set_name in set_names:\n run_tec_conf_set[run][technique][configuration][set_name] = 0\n for set_name in set_names:\n run_tec_conf_set[run][technique][configuration][set_name] = run_tec_conf_set[run][technique][configuration][set_name] + (conf.mapes[set_name] / (folds * folds))\n if configuration not in run_efold_tec_conf_set[run][ext_fold][technique]:\n for set_name in set_names:\n run_efold_tec_conf_set[run][ext_fold][technique][configuration][set_name] = 0\n for set_name in set_names:\n run_efold_tec_conf_set[run][ext_fold][technique][configuration][set_name] = run_efold_tec_conf_set[run][ext_fold][technique][configuration][set_name] + (conf.mapes[set_name] / (folds * folds))\n\n # Select the best configuration for each technique in each external fold across different internal folders\n run_efold_tec_best_conf = recursivedict()\n for run in run_efold_tec_conf_set:\n for efold in run_efold_tec_conf_set[run]:\n for tec in run_efold_tec_conf_set[run][efold]:\n for conf in run_efold_tec_conf_set[run][efold][tec]:\n if conf not in run_efold_tec_best_conf[run][efold][tec] or run_efold_tec_conf_set[run][efold][tec][conf][\"hp_selection\"] < run_efold_tec_best_conf[run][efold][tec][1][\"hp_selection\"]:\n run_efold_tec_best_conf[run][efold][tec] = (conf, run_efold_tec_conf_set[run][efold][tec][conf], run_efold_tec_conf_set[run][efold][tec][conf])\n\n # Aggregate on external folds\n run_tec_set = recursivedict()\n for run in run_efold_tec_best_conf:\n for efold in run_efold_tec_best_conf[run]:\n for tec in run_efold_tec_best_conf[run][efold]:\n if \"hp_selection\" not in run_tec_set[run][tec]:\n for set_name in set_names:\n run_tec_set[run][tec][set_name] = 0\n for set_name in set_names:\n run_tec_set[run][tec][set_name] = run_tec_set[run][tec][set_name] + run_efold_tec_best_conf[run][efold][tec][1][set_name]\n\n # Print results for each run\n for run in range(0, self._campaign_configuration['General']['run_num']):\n self._logger.info(\"Printing results for run %s\", run)\n overall_run_best = ()\n # Print data of single techniques\n for technique in run_tec_set[run]:\n self._logger.info(\"---Best result for %s - (Training MAPE is %f - HP Selection MAPE is %f) - Validation MAPE is %f\", technique, run_tec_set[run][technique][\"training\"], run_tec_set[run][technique][\"hp_selection\"], run_tec_set[run][technique][\"validation\"])\n\n # Compute which is the best technique\n if not overall_run_best or run_tec_set[run][technique][\"hp_selection\"] < overall_run_best[1][\"hp_selection\"]:\n overall_run_best = (technique, run_tec_set[run][technique])\n\n self._logger.info(\"---Overall best result is %s - (Training MAPE is %f - HP Selection MAPE is %f) - Validation MAPE is %f\", overall_run_best[0], overall_run_best[1][\"training\"], overall_run_best[1][\"hp_selection\"], overall_run_best[1][\"validation\"])\n\n else:\n self._logger.error(\"Unexpected combination: %s\", str((validation, hp_selection)))\n sys.exit(1)\n best_confs = {}\n best_technique = None\n for conf in self._exp_confs:\n technique = conf.technique\n if technique not in best_confs or conf.mapes[\"validation\"] < best_confs[technique].mapes[\"validation\"]:\n best_confs[technique] = conf\n for technique in best_confs:\n if not best_technique or best_confs[technique].mapes[\"validation\"] < best_confs[best_technique].mapes[\"validation\"]:\n best_technique = technique\n if bool(self._campaign_configuration['General']['details']):\n for run in run_tec_conf_set:\n for tec in run_tec_conf_set[run]:\n for conf in run_tec_conf_set[run][tec]:\n assert \"hp_selection\" in run_tec_conf_set[run][tec][conf]\n assert \"validation\" in run_tec_conf_set[run][tec][conf], \"training MAPE not found for \" + str(run) + str(tec) + str(conf)\n self._logger.info(\"Run %s - Technique %s - Conf %s - Training MAPE %f - Test MAPE %f\", str(run), ec.enum_to_configuration_label[tec], str(conf), run_tec_conf_set[run][tec][conf][\"hp_selection\"], run_tec_conf_set[run][tec][conf][\"validation\"])\n return best_confs, best_technique", "def predict_settings(features, sys_cl, form_factors):\n classifiers = get_classification_models()\n cl_models_to_use = classifiers[sys_cl]\n settings = {}\n for ipop, struct in enumerate(sys_cl.split('__')):\n pop_id = 'pop{}'.format(ipop)\n ff_nm = form_factors[pop_id+'_form'][0]\n\n # evaluate any modelable settings for this structure\n for stg_nm in xrsdefs.modelable_structure_settings[struct]:\n stg_result = cl_models_to_use[pop_id][stg_nm].predict(\n cl_models_to_use[pop_id][stg_nm].get_x_array(features))\n settings[pop_id+'_'+stg_nm] = (stg_result[0][0], stg_result[1][0]) \n\n # evaluate any modelable settings for this form factor\n for stg_nm in xrsdefs.modelable_form_factor_settings[ff_nm]:\n stg_result = cl_models_to_use[pop_id][ff_nm][stg_nm].predict(\n cl_models_to_use[pop_id][ff_nm][stg_nm].get_x_array(features))\n settings[pop_id+'_'+stg_nm] = (stg_result[0][0], stg_result[1][0]) \n return settings", "def compute_ideal_kmer(fqdict):\n res = {}\n for i in fqdict:\n species = fqdict[i][\"species\"]\n try: val = res[species]\n except KeyError: val = float(\"inf\") #init new species in result\n minlen = min(fqdict[i][\"avgLengths\"])\n if fqdict[i][\"is_paired_end\"]: minlen = minlen/2 #integer division\n res[species] = min(val,minlen)\n for i in res:\n res[i] = res[i]/2\n if res[i] % 2 == 0: res[i] -= 1 #force odd number\n if res[i]>=31: res[i] = None\n return res", "def info(self):\n meta = {\n \"name\": self.name,\n \"description\": self.description,\n \"version\": self.version,\n \"labels\": self.labels,\n \"models\": {k: v.info() for k, v in self._infers.items() if v.is_valid()},\n \"trainers\": {k: v.info() for k, v in self._trainers.items()},\n \"strategies\": {k: v.info() for k, v in self._strategies.items()},\n \"scoring\": {k: v.info() for k, v in self._scoring_methods.items()},\n \"train_stats\": {k: v.stats() for k, v in self._trainers.items()},\n \"datastore\": self._datastore.status(),\n }\n\n # If labels are not provided, aggregate from all individual infers\n if not self.labels:\n merged = []\n for labels in [v.get(\"labels\", []) for v in meta[\"models\"].values()]:\n if labels and isinstance(labels, dict):\n labels = [k for k, _ in sorted(labels.items(), key=lambda item: item[1])] # type: ignore\n for label in labels:\n if label not in merged:\n merged.append(label)\n meta[\"labels\"] = merged\n\n return meta", "def get_process_schema():\n dictio = {}\n # reads the session\n session = request.args.get('session', type=str)\n # reads the requested process name\n process = request.args.get('process', default='receipt', type=str)\n if check_session_validity(session):\n user = get_user_from_session(session)\n if lh.check_user_log_visibility(user, process):\n Commons.semaphore_matplot.acquire()\n try:\n # reads the decoration\n decoration = request.args.get('decoration', default='freq', type=str)\n # reads the typeOfModel\n type_of_model = request.args.get('typeOfModel', default='dfg', type=str)\n # reads the simplicity\n simplicity = request.args.get('simplicity', default=0.6, type=float)\n variant = type_of_model + \"_\" + decoration\n parameters = {\"decreasingFactor\": simplicity}\n handler = lh.get_handler_for_process_and_session(process, session)\n filters_chain = handler.get_filters_chain_repr()\n ps_repr = process+\"@@\"+variant+\"@@\"+str(simplicity)+\"@@\"+filters_chain\n saved_obj = lh.get_object_memory(ps_repr)\n if saved_obj is not None:\n base64 = saved_obj[0]\n model = saved_obj[1]\n format = saved_obj[2]\n this_handler = saved_obj[3]\n else:\n base64, model, format, this_handler = handler.get_schema(\n variant=variant,\n parameters=parameters)\n lh.save_object_memory(ps_repr, [base64, model, format, this_handler])\n if model is not None:\n model = model.decode('utf-8')\n dictio = {\"base64\": base64.decode('utf-8'), \"model\": model, \"format\": format, \"handler\": this_handler}\n except:\n pass\n Commons.semaphore_matplot.release()\n ret = jsonify(dictio)\n return ret", "def fittingOptionGetter(self) -> Optional[FittingOptions]:\n if DEBUG:\n print(\"GUI...: \", 'getter in gui called')\n # get the current model selected\n model = self.getCurrentModel()\n if model is None:\n return None\n # get the parameters for current model\n parameters = lmParameters()\n for i in range(self.param_table.rowCount()):\n table_item = self.param_table.verticalHeaderItem(i)\n assert isinstance(table_item, QtWidgets.QTableWidgetItem)\n param_name = table_item.text()\n param = lmParameter(param_name)\n item0 = self.param_table.cellWidget(i, 0)\n item1 = self.param_table.cellWidget(i, 1)\n item2 = self.param_table.cellWidget(i, 2)\n item3 = self.param_table.cellWidget(i, 3)\n assert isinstance(item0, QtWidgets.QCheckBox)\n assert isinstance(item1, OptionSpinbox)\n assert isinstance(item2, NumberInput)\n assert isinstance(item3, NumberInput)\n param.vary = not item0.isChecked()\n param.value = item1.value()\n param.min = item2.value()\n param.max = item3.value()\n parameters[param_name] = param\n\n fitting_options = FittingOptions(model, parameters, self.dry_run)\n if DEBUG:\n print(\"GUI...: \", 'getter in gui got', fitting_options)\n return fitting_options", "def get_form_distribution(articles, the_form):\n instances = get_inst_with_counts(articles, the_form)\n return sorted(instances.items(), key=lambda x: x[1], reverse=True)", "def calcFSIC(self, filter):\n # List of all superset filters\n superSetFilters = self.superSetFilters(filter)\n\n fsic = {}\n for i in superSetFilters:\n fsic = self.giveMaxDict([fsic, self.syncDataStructure[i]])\n return fsic" ]
[ "0.5544596", "0.54875684", "0.5375364", "0.5314797", "0.5159251", "0.51468754", "0.51262325", "0.5124719", "0.50992376", "0.5055376", "0.5029532", "0.5029249", "0.49983555", "0.4981368", "0.4980224", "0.49532855", "0.49397266", "0.4906085", "0.48953032", "0.48906365", "0.48836026", "0.4882673", "0.4843186", "0.48415104", "0.48413715", "0.4830175", "0.48076466", "0.48036355", "0.47884378", "0.47757092" ]
0.75372094
0
Locates rows in which NaNs appear.
def locate_nan_rows(arr): # Count the number of NaNs in each row nan_counts = np.sum(~np.isfinite(arr), axis=1) # Trigger on a NaN appearing anywhere in a line/row nans, = np.where(nan_counts > 1) return frozenset(nans)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _nan_cells(traces):\n # Find all cells with NaNs\n nancells = []\n ncells = -1\n for cs in traces:\n if len(traces[cs]) > 0:\n ncells = np.shape(traces[cs])[1]\n ns = np.sum(np.sum(np.invert(np.isfinite(\n traces[cs])), axis=2), axis=0)\n vals = np.arange(ncells)\n nancells.extend(vals[ns > 0])\n\n # Set _mask_cells if it hasn't been set\n out = np.zeros(ncells, dtype=bool)\n\n # Convert nancells to a list of good cells\n nancells = np.array(list(set(nancells)))\n if len(nancells) > 0:\n print('Warning: %i cells have NaNs'%len(nancells))\n out[nancells] = True\n\n return out", "def check_and_interpolate_nans(df):\n nan_count = df.isna().sum().sum()\n if nan_count > 0:\n df.interpolate(method='linear', inplace=True)\n return df", "def columns_with_na_values(data):\n aux = data.isna().sum() > 0\n return aux.index[aux.values].values", "def checkNaN(data):\n if data.isnull().values.any():\n N = data.isnull().sum().sum()\n print(\"There are {} missing values.\".format(N))", "def is_nan(self, row_data):\n return math.isnan(row_data)", "def show_nan(df):\n nan_df = df[(~df['tweet_user_location'].str.lower().isin(\n [x.lower() for x in LOCATION_DISCARD])) & df['geonameid'].isnull()]\n print(f'Number of NaNs: {len(nan_df.index)}')\n return nan_df", "def find_first_non_nan(array):\n for index, value in enumerate(array):\n if not np.isnan(value):\n return index", "def check_missing_values(col):\n return np.sum(np.isnan(col))", "def checkfornan(chosen_df):\n if not chosen_df.isnull().values.any():\n raise ValueError('NaN in DataFrame')", "def removeNans(data):\n for i in data[:]:\n ind = data.index(i)\n for j in i:\n if np.isnan(j):\n data.remove(i)\n break\n return data", "def get_nan_rows(data_dict):\n\n counts = dict.fromkeys(data_dict.itervalues().next().keys(), 0)\n for record in data_dict:\n person = data_dict[record]\n for field in person:\n if person[field] == 'NaN':\n counts[field] += 1\n\n print(\"Number of NaN rows in the data: \")\n pprint(counts)\n print_separator_line", "def nan_value(data):\n return data.isnull().any()", "def checkNaN(data_dict):\n for k, v in data_dict.iteritems():\n mark = True\n for feature, value in v.iteritems():\n if (value != 'NaN') and (feature != 'poi'):\n mark = False\n break\n if mark:\n print k\n print v['poi']", "def na_complain(X):\n na_values_present = np.isnan(X).sum()\n if na_values_present:\n raise ValueError(\"Na's found in data matrix.\")", "def cnan(x):\n if np.isnan(x).sum()>0:\n import pdb\n pdb.set_trace()", "def isnan(self):\n return self.isAny( (lambda x: np.isnan(x)) )", "def get_missing(self):\n missing_values = self.df[self.col_name].isnull().sum()\n return missing_values", "def remove_nans(arr):\n not_nan = [i for i in range(len(arr)) if not np.isnan(arr[i])]\n\n return not_nan, arr[not_nan]", "def get_nan_idx(column, df):\n return df[df[column].isna()].index.values", "def find_nan_in_fits():\n # get nan values\n mynans = []\n for i in range(1000):\n dat = getdata('stamp_0/stamp_%d.fits.gz' % i)\n mysum = np.sum(dat)\n #print(mysum)\n if np.isnan(mysum):\n mynans.append(i)\n print('stamp_%d/stamp_%d.fits.gz ' % (i,k) , 'has sum = ', mysum)\n \n return mynans", "def fill_nans(data):\n for col in data.columns:\n data[col].fillna(-999, inplace=True)", "def purgeNanEveryWhere(df):\n #Row-wise dropping\n toDrop = np.array([])\n for i in range(df.shape[0]):\n if( np.sum ( pd.isnull(df.iloc[i]) ) == df.shape[1]-1 ):\n toDrop= np.append(toDrop,i)\n df.drop(df.index[toDrop.astype(int)],inplace=True) \n #Column-wise dropping\n for col in df.columns:\n arr = pd.notnull(df[col])\n nnan = np.sum(arr) \n if (nnan == df.shape[1]):\n df.drop(col,inplace=True,axis=1)\n return df", "def where_na_like(l):\n bool_index = np.array(map(lambda x: np.isinf(x) or \\\n pandas.isnull(x), l))\n return np.where(bool_index)[0]", "def _nan_data(data, to_nan=0.2):\n # Number of values to be NaNed as int\n to_nan = int(len(data) * to_nan)\n # Existing NaN's as indicies\n existing_nans = data[data.isnull() == True].index\n return to_nan, existing_nans", "def has_nans(tensor, verbose=True):\n tensor_numpy = tensor.data.cpu().numpy().flatten()\n where_nan = np.argwhere(tensor_numpy != tensor_numpy)\n\n nan_count = len(where_nan)\n nan = nan_count != 0\n\n if verbose and nan:\n print(f\"Encountered {nan_count} NaNs\")\n\n return nan", "def pd_isnan(val):\n return val is None or val != val", "def missing_values():\n print('Missings in the train data:', train_data.isnull().sum())", "def fill_nan(A):\n\tinds = np.arange(A.shape[0])\n\tgood = np.where(np.isfinite(A))\n\tA[np.isnan(A)] = np.interp(inds[np.isnan(A)], inds[good], A[good])\n\treturn A", "def check_missing_data(df): \n df_lng = pd.melt(df) #Convert to long data\n null_variables = df_lng.value.isnull()\n \n return pd.crosstab(df_lng.variable, null_variables)", "def check_nan(self):\n # generate array for easier handling\n values = np.swapaxes(self.psf.psf_value, 0, 2)\n fail_count = 0\n\n # loop over energies\n for i, arr in enumerate(values):\n energy_hi = self.psf.energy_hi[i]\n energy_lo = self.psf.energy_lo[i]\n\n # check if bin is outside of safe energy threshold\n if self.psf.energy_thresh_lo > energy_hi:\n continue\n if self.psf.energy_thresh_hi < energy_lo:\n continue\n\n # loop over offsets\n for arr2 in arr:\n\n # loop over deltas\n for v in arr2:\n\n # check for nan\n if math.isnan(v.value):\n # add to fail counter\n fail_count += 1\n break\n\n results = {}\n if fail_count == 0:\n results[\"status\"] = \"ok\"\n else:\n results[\"status\"] = \"failed\"\n results[\"n_failed_bins\"] = fail_count\n\n self.results[\"nan\"] = results" ]
[ "0.692097", "0.6758517", "0.6730393", "0.66897637", "0.6636106", "0.65987986", "0.6463813", "0.6380045", "0.6338059", "0.6218934", "0.62157136", "0.6210175", "0.61391306", "0.61234987", "0.61117244", "0.6083047", "0.60659695", "0.60418135", "0.60125595", "0.60119325", "0.60112536", "0.6005158", "0.59992987", "0.59966856", "0.59852135", "0.5919009", "0.5889655", "0.5864719", "0.5858444", "0.5857676" ]
0.82622176
0
Sanitizes the dict 'record' for writing to 'table', i.e., restricts to keys which appear as columns of table.
def sanitize_record(record, table): try: columns = table.columns except AttributeError: columns = vars(table) return {key: value for key, value in record.items() if key in columns}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _sanitise_fields(self, record):\n sanitised = {}\n for k, v in record.items():\n new_key = k.replace('(', '_').replace(')', '_')\n sanitised[new_key] = v\n return sanitised", "def clean_record(self):\n _dict = {\n key: value for (key, value) in self.record.items() if not key in\n BAMBOO_RESERVED_KEYS\n }\n return remove_mongo_reserved_keys(_dict)", "def preprocess_record(record):\n automatic_fields = ['created_at', 'modified_at']\n record = serialize_fields(filter_out_dict_keys(record, automatic_fields))\n\n return record", "def sanitize_record(self, record):\n def remove_ids(d):\n if isinstance(d, dict):\n if 'id' in d:\n del d['id']\n for key, val in d.items():\n if isinstance(val, (dict, list)):\n remove_ids(val)\n if isinstance(d, list):\n for i in d:\n if isinstance(i, (dict, list)):\n remove_ids(i)\n return d\n\n record = remove_ids(record)\n for answer in record['answers']:\n answer.pop('feeds', None)\n return record", "def cleanup_record(schema, record):\n if not isinstance(record, dict) and not isinstance(record, list):\n return record\n\n elif isinstance(record, list):\n nr = []\n for item in record:\n nr.append(cleanup_record(schema, item))\n return nr\n\n elif isinstance(record, dict):\n nr = {}\n for key, value in record.items():\n nkey = bigquery_transformed_key(key)\n nr[nkey] = cleanup_record(schema, value)\n return nr\n\n else:\n raise Exception(f\"unhandled instance of record: {record}\")", "def insert_record(record, table):\n\n\ttups = [(key, val) for key, val in record.iteritems()]\n\tkeys = [key for key, val in tups]\n\tvals = [val for key, val in tups]\n\n\tconn = get_database_connection(port = 2001)\n\tcursor = conn.cursor()\n\n\tnum_cols = len(keys)\n\tkey_str = ','.join(keys)\n\tval_str = ','.join(['%s'] * num_cols)\n\n\tqry = \"REPLACE INTO %s (%s) VALUES (%s)\" % (table, key_str, val_str)\n\tcursor.execute(qry, vals)\n\n\tconn.commit()\n\tcursor.close()\n\tconn.close()", "def _sanitize(data_dict):\n return data_dict", "def clean_keys_of_slashes(record):\n for key in list(record):\n value = record[key]\n if \"/\" in key:\n # replace with _\n record[key.replace(\"/\", \"_\")] = record.pop(key)\n # Check if the value is a list containing nested dict and apply same\n if value:\n if isinstance(value, list) and isinstance(value[0], dict):\n for v in value:\n clean_keys_of_slashes(v)\n\n return record", "def conform_output_data(rowdict,fields_to_show=''):\n rowdict['TimeStamp'] = str(rowdict['TimeStamp'])\n if fields_to_show:\n rowdict= removed_fields(fields_to_show, rowdict)\n return rowdict", "def _postprocess_record(record, hide=_CONFIDENTIAL_FIELDS):\n record = hide_confidential_fields(record, hide)\n record = unserialize_fields(record, hide)\n\n convert_float_timestamp2str(record)\n\n return record", "def _format_sql(self, trade, table):\n\n trade = copy(trade)\n for key, value in trade.items():\n\n if value is None:\n trade[key] = 'NULL'\n elif key == 'date':\n value = tb.DateConvert(value).date\n\n if isinstance(value, str):\n trade[key] = f\"'{value}'\"\n\n return {k:v for k,v in trade.items() if k in self.fields[table]}", "def savedict(self, obj, table):\n if not isinstance(obj, dict): return False\n\n keys = ['`%s`' % key for key in obj.keys()]\n values = [None if value == '' else value for value in obj.values()]\n\n sql = 'REPLACE INTO %s (%s) VALUES (%s)' % (table, ','.join(keys), ','.join(['%s'] * len(values)))\n self.execute(sql, values)", "def hide_confidential_fields(record, fields=_CONFIDENTIAL_FIELDS):\n if not(isinstance(record, dict) and fields):\n return record\n\n keys = list(record.keys())\n keys = (k for k in keys for f in fields if k == f or k.endswith('.'+f))\n\n return merge_dicts(record, {k: '********' for k in keys if record[k]})", "def prepare_record_data_for_DB_insert(record_data: Dict) -> Dict:\n if record_data[\"artist\"] is None or record_data[\"title\"] is None:\n raise AssertionError(\"Artist and / or Title cannot be None.\")\n\n artist_list = [art.strip() for art in record_data[\"artist\"].split(\";\")]\n artist_country_list = [\n co.strip() for co in record_data[\"artist_country\"].split(\";\")\n ]\n label_list = [lab.strip() for lab in record_data[\"label\"].split(\";\")]\n\n if len(artist_list) != len(artist_country_list):\n raise AssertionError(\n \"Need the same number of artists and artist countries.\"\n )\n\n record_data[\"artist\"] = artist_list\n record_data[\"artist_country\"] = artist_country_list\n record_data[\"label\"] = label_list\n return record_data", "def savedict(self, obj, table):\n\t\tif not isinstance(obj, dict): return False\n\n\t\tkeys = ['`%s`' % key for key in obj.keys()]\n\t\tvalues = [None if value == '' else value for value in obj.values()]\n\n\t\tsql = 'REPLACE INTO %s (%s) VALUES (%s)' % (table, ','.join(keys), ','.join(['%s'] * len(values)))\n\t\tself.execute(sql, values)", "def _strip_all(dic):\n for k, v in dic.items():\n\n if len(v) == 0:\n dic[k] = 'NULL'\n if isinstance(v, str):\n v = v.strip().replace('\\t', '').replace('\\n', '').encode('utf-8', 'ignore')\n dic[k] = v\n\n return dic", "def __scrub_data(self, data):\n all_columns = list(self.columns.keys())\n \n result = {}\n try:\n for column, value in data.items():\n column = column.lower()\n column_definition = self.columns[column]\n #TODO: validate type/length/etc\n all_columns.remove(column)\n result[column] = value\n except KeyError:\n raise Exception('Sorry, the column, \\\"' + column + '\\\" doesn\\'t exist in the table, \\\"' + self.name + '\\\".')\n \n #now add default values\n for lefover_column in all_columns:\n result[lefover_column] = None\n\n return result", "def put_record(self, record):\r\n row = [record.get(field) for field in self.fields.names()]\r\n\r\n self.put(row)", "def clean_dict(self,dict_to_clean):\n for i in dict_to_clean: \n try:\n float( dict_to_clean[i] ) \n except:\n dict_to_clean[ i ] = \"'%s'\"%( dict_to_clean[i ].replace(\"'\",\"\").replace('\"',\"\") )", "def recordval(record, key):\n return re.sub(r'\\s+', ' ', raw_recordval(record, key))", "def raw_recordval(record, key):\n if key in record:\n return str(record[key]).strip()\n return \"\"", "def alter_details(self, parsed_details_df):\n\n parsed_details_df = parsed_details_df[~pandas.isnull(parsed_details_df.key)]\n parsed_details_df[\"key\"] = parsed_details_df[\"key\"].apply(lambda key: key.replace(\":\", \"\").strip().upper())\n parsed_details_df[\"key\"] = parsed_details_df[\"key\"].apply(\n lambda key: self.details_mapping[key] if key in self.details_mapping.keys() else key)\n parsed_details_df.drop_duplicates(subset =\"key\", inplace = True)\n return parsed_details_df", "def sanitise(data, keys=None):\n keys = keys or settings.SENSITIVE_DATA_KEYS\n\n def recurse(leaf):\n for k, v in leaf.iteritems():\n if isinstance(v, dict):\n recurse(v)\n if k in keys:\n leaf[k] = '*' * 8\n\n recurse(data)\n return data", "def _clean_flat_dict(header):\n return { key: val for (key,val) in header.items()\n if isinstance(val, (str,int,float,complex,bool)) }", "def sweep_record(r):\n return dict_sweep(r, vals=[None])", "def sanitize(key, value):\n \n # We do special handling for response keys\n # list (str was delimited by commas)\n if key == 'txn_refs':\n return value.split(',')\n \n # datetime\n elif key in ('date', 'expiry'):\n return str_to_date(value, key=key)\n\n # int\n elif key in ('split_count', 'duration', 'talktime', 'entries_count', 'total_entries_count'):\n return int(value)\n\n # float\n elif key in ('rate', 'total_cost', 'debit', 'balance', 'points', 'bonus'):\n return float(value)\n\n # bool\n elif key in ('auto_extend_status',):\n if value == 'disabled':\n return True\n else:\n return False\n\n # list of objects\n elif key == 'entries':\n # Turns each entry (a dict) into an object\n i = 0\n for entry in value:\n obj = obj_dic(entry)\n value[i] = obj\n i += 1\n return value\n\n else:\n return value", "def _validate_data_for_sql_table(data: dict, table: dict):\n for key, val in data.items():\n if val is None:\n if not table[key].is_nullable:\n raise DataTypeError(message=f\"Column '{key}' is not nullable, but \\\n value provided is None.\")\n elif not isinstance(val, table[key].data_type):\n raise DataTypeError(message=f\"Invalid data type for '{key}'.\")\n elif isinstance(val, str):\n if isinstance(table[key].precision, int):\n if len(val) > table[key].precision:\n raise DataTypeError(message=f\"String of chars too long for '{key}'. \\\n It must be {table[key][3]} chars maximum.\")\n return", "def constructSavedTable(table, records):\n columns = []\n colNames = []\n for column in table.tableColumns:\n col = {}\n for k,v in column.iteritems():\n if k == \"sizeCalculated\" or k == \"sizeCorrected\" or k == 'min':\n continue\n elif k == \"field\":\n colNames.append(v)\n col[str(k)] = str(v)\n columns.append(col)\n return constructTable(table, records, columns, colNames)", "def _fix_query_table(table):\n for i in table.columns:\n tdtype = table[i].dtype.char\n if tdtype in ('b', 'B', 'S', 'a', 'O'):\n row = process_list(string_fix, table[i])\n table[i] = np.array(row, dtype=str)\n return table", "def clean_table(self):\n return False" ]
[ "0.7768705", "0.65373653", "0.62795115", "0.60765547", "0.6009481", "0.59517825", "0.5927419", "0.5893116", "0.5849048", "0.584391", "0.5767355", "0.57646644", "0.57419175", "0.5702649", "0.5685503", "0.5644447", "0.55450684", "0.54949075", "0.54631734", "0.54530233", "0.54444677", "0.54331875", "0.5413107", "0.54119414", "0.5378883", "0.5356819", "0.5338702", "0.53207284", "0.53197145", "0.53086954" ]
0.8198284
0
Parse a string representation of a dictionary, e.g.,
def parse_string_dict(dict_as_string): new_dict = ast.literal_eval(dict_as_string[1:-1]) new_dict = {key: parse_string(val) for key, val in new_dict.items()} return new_dict
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_dict(txt):\n pairs = txt[txt.index('{')+1:txt.rindex('}')].split(',') # need to inplement a correct split by comma\n d = {}\n for p in pairs:\n if p:\n splt = p.split(':')\n key = splt[0].strip()\n value = splt[1].strip()\n if value[0] == '{':\n value = parse_dict(value)\n d[key] = value\n return d", "def str2dict(string):\n res_dict = {}\n for keyvalue in string.split(','):\n (key, value) = keyvalue.split('=', 1)\n res_dict[key] = value\n return res_dict", "def test_parse_str(parser):\n doc = parser.parse('{\"hello\": \"world\"}')\n assert doc.as_dict() == {'hello': 'world'}", "def strToDict(dictRawStr):\n dictStr = removeJsonComment(dictRawStr)\n\n # True/False -> true, false\n dictStr = re.sub(\"\\s*True\\s*(,?)\", \"true\\g<1>\", dictStr)\n dictStr = re.sub(\"\\s*False\\s*(,?)\", \"false\\g<1>\", dictStr)\n\n curDict = json.loads(dictStr)\n\n return curDict", "def dict_option(s):\n return _convert(s, (dict,))", "def _handle_dict(string):\n dict_lines = [line.split(Parser.FIELD_DELIM) for line in string.split(Parser.LINE_DELIM)\n if Parser.FIELD_DELIM in line]\n cur_dict = 0\n results = [{}]\n for line in dict_lines:\n if line[0] in results[cur_dict]:\n results.append({})\n cur_dict += 1\n results[cur_dict][line[0]] = line[1]\n return results", "def parse_from_str(self, config_str):\n if not config_str:\n return {}\n config_dict = {}\n try:\n for kv_pair in config_str.split(','):\n if not kv_pair: # skip empty string\n continue\n k, v = kv_pair.split('=')\n config_dict[k.strip()] = eval_str_fn(v.strip())\n return config_dict\n except ValueError:\n raise ValueError('Invalid config_str: {}'.format(config_str))", "def json_loads(s: Union[bytes, str]) -> Dict[str, Any]:\n return json.loads(ensure_text(s, \"utf-8\"))", "def parsekv(inputString):\n mDict = dict()\n parts = inputString.split('&')\n for item in parts:\n if (item.count('=') != 1):\n raise ValueError(\"Need a singular = sign in str. %s\" % (item, ))\n key, value = item.split('=')\n # If we can convert the string value to an int, great, otherwise\n # leave it as a string.\n try:\n mDict[key] = int(value)\n except ValueError:\n mDict[key] = value\n return mDict", "def parse_bytes_to_dict(bytes_to_parse):\n return ast.literal_eval(bytes_to_parse.decode(\"utf-8\"))", "def string_to_dict(value):\n if not value:\n return None\n values = json.loads(value)\n for key in values:\n if values[key] == \"\":\n values[key] = None\n\n return values", "def read_string(self, string, **kwds):\n self._dict.update(json.loads(string))", "def read_dict(txt_file_path):\n txt_file = open(txt_file_path,'r')\n txt_raw = txt_file.read()\n txt_as_dict = ast.literal_eval(txt_raw)\n txt_file.close()\n return txt_as_dict", "async def parse(self, raw: str) -> dict:", "def parse_from_string(self, file_content: str):\n self._split_to_tokens(file_content)\n if not self._convert_tokens_to_dict():\n log.error('Failed to generate dictionary representation of file.')\n return None\n return self._result", "def message_to_dict(message):\n message_dict = {}\n if isinstance(message, str):\n tmp = re.sub(\"[{}\\\"]\", '', message).split(',')\n for string in tmp:\n var = string.split(':')\n message_dict[var[0]] = var[1]\n return message_dict", "def rawtodictonary(rawstring):\n\n if is_python3():\n rawstring = bytes(rawstring).decode('utf-8') \n \n raw = rawstring.split(NULL)[:-2]\n\n data = {}\n for i in range(0,len(raw) - 1,2):\n\n key,val = raw[i], raw[i+1]\n keyel = key.split(\"\\x1c\")\n\n if len(keyel) == 1:\n if key in data:\n data[key][None] = val\n else:\n data[key]=val\n else:\n if keyel[0] in data and not isinstance(data[keyel[0]], dict):\n data[keyel[0]]={ None: data[keyel[0]] }\n\n try:\n data[keyel[0]][keyel[1]] = val\n except TypeError:\n data[keyel[0]] = {keyel[1] : val}\n except KeyError:\n data[keyel[0]] = {keyel[1] : val}\n\n return data", "def parse_key_value_pairs(arg_string):\n try:\n return {key: value for (key, value) in [tuple(str(arg).split('=', 1)) for arg in arg_string]}\n except ValueError:\n raise click.ClickException(\"argument string must be in the form x=y\")", "def parse_from_string(config_pair):\n key, value = config_pair.split(\"=\")\n value = literal_eval(value)\n current_config_keys = key.split('.')[::-1]\n last_config_value = {current_config_keys[0]: value}\n for current_config_subkey in current_config_keys[1:]:\n last_config_value = {current_config_subkey: last_config_value}\n return last_config_value", "def from_json_string(my_str):\n import json\n return json.loads(my_str)", "def makeDict(self, s):\n out = {}\n entries = s.split(self.dataDelimiterEntry)\n for e in entries:\n if e == \"\":\n continue\n c = e.split(self.dataDelimiterKey)\n out[c[0]] = c[1]\n return out", "def to_dict(json_str):\n # OK, really unnecessary func, but a concession to less experienced users\n # todo: use json module instead, json.dumps()\n query = ast.literal_eval(json_str)\n return query", "def from_string(string):\n # in order to complete this lab we are going to use the python lib json in which we have the function json.loads\n # which will automatically load a json from a string\n return json.loads(string)", "def _kv_to_dict(meta):\n try:\n return dict(m.split(\"=\", 1) for m in meta)\n except ValueError:\n raise _errors.MachineError(\"Invalid parameter (%s).\" % (meta, ))", "def __parse_options_dict(options_str):\n # type: (str) -> Dict[str, str]\n opts = options_str.split('&') # type: List[str]\n res = {} # Type: Dict\n\n for opt in opts:\n key, value = opt.split('=') # type: List[str, str]\n res[key] = value # type: str\n\n return res", "def parse_prefs_file(prefs_string):\r\n try:\r\n prefs = dict(eval(prefs_string))\r\n except TypeError:\r\n raise QiimeParseError(\r\n \"Invalid prefs file. Prefs file must contain a valid prefs dictionary.\")\r\n return prefs", "def parse_dict_items(data: Sequence[str]) -> dict:\n info = {}\n # if there is only one element, it may contains a fully \"jsonified\" string\n # or an item of a dict\n if len(data) == 1:\n delimiter = _get_delimiter(data[0])\n if delimiter is None:\n try:\n return json.loads(data[0])\n except json.JSONDecodeError:\n raise click.UsageError(JSON_ERROR_MESSAGE.format(data[0]))\n else:\n return _parse_item(data[0])\n for item in data:\n info.update(_parse_item(item))\n return info", "def _parse_item(item: str) -> dict:\n delimiter = _get_delimiter(item)\n key, value = item.split(delimiter)\n if delimiter == '=':\n return {key: value}\n else:\n try:\n return {key: json.loads(value)}\n except json.JSONDecodeError:\n raise click.UsageError(JSON_ERROR_MESSAGE.format(item))", "def ConvertJsonIntoDict(string):\n if len(string) == 0:\n print >> sys.stderr, ('Error could not parse empty string')\n raise Exception('JSON data missing')\n\n try:\n jsondata = json.loads(string)\n except ValueError, e:\n print >> sys.stderr, ('Error parsing string: \"%s\"' % string)\n raise e\n return jsondata", "def decodemeta(data):\n d = {}\n for l in data.split('\\0'):\n if l:\n key, value = l.split(':')\n d[key] = value\n return d" ]
[ "0.6813349", "0.67471504", "0.65554583", "0.6505521", "0.6483303", "0.6417136", "0.63636893", "0.629334", "0.6261636", "0.62039405", "0.6178307", "0.6155987", "0.6088658", "0.6058405", "0.60359", "0.60221696", "0.5994995", "0.5986573", "0.59692", "0.5951037", "0.59507596", "0.5942097", "0.5903739", "0.58911365", "0.58903307", "0.5860675", "0.58584714", "0.5846", "0.5841577", "0.5826603" ]
0.79582787
0
Parse a string representation of an array of gvars into an array of gvars. This operation arises frequently, for example, when reading from the various "glance" tables, which store preprocessed data.
def parse_string(str_arr): def to_arr(str_arr): """ Switch to list. """ row = str_arr.replace(']', '').\ replace('[', '').\ replace('{', '').\ replace('}', '').\ replace('\n', '').split() if '+-' in row: row = kludge_gvars(row) row = [gv.gvar(str(elt)) for elt in row] return np.array(row) def kludge_gvars(mangled): """ Occasionally, gvars get rendered to strings as, e.g., -4e-06 +- 1 instead of -0.000006(1.0). This makes a complete mess of trying to parse the a list of gvar which has been turned into a string, e.g., '[1(2) 1 +- 2 0.003(2)]', since the usual str.split() separates '1 +- 2' --> ['1','+-','2']. This function is a kludge which works around this difficulty. """ # Loop in reverse looking for '+-', but don't run off the end for idx in range(len(mangled) - 1)[::-1]: if mangled[idx + 1] == '+-': reunited = ' '.join(mangled[idx:idx + 3]) # Throw away the used elements... for _ in range(3): mangled.pop(idx) # Repair the list with reunited gvar string mangled.insert(idx, reunited) return mangled return to_arr(str_arr)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse(arr_str):\n return arr_str.rstrip().replace(' ', '').split(',')[:-1]", "def convert_strings_to_array(strings):\n row_strings = strings.split(\"\\n\")\n new_array = np.array([[float(i) for i in row_string.split(\",\")] for row_string in row_strings])\n shape = new_array.shape\n if shape[1]==2:\n return new_array\n elif shape[0]==2:\n return new_array.T\n else:\n print \"Currently only accepting arrays of shape (2,x) or (x,2)\"\n return None", "def parse_string(s):\n # type: (str) -> Union[str, np.ndarray, float]\n v = re.sub(r'[\\[\\]]', '', s)\n\n if ',' in v:\n v = v.split(',')\n elif ';' in v:\n v = v.split(';')\n\n try:\n v = np.atleast_1d(np.array(v, dtype=float))\n if v.size == 1:\n v = v[0]\n return v\n except ValueError:\n return s", "def __build_array(string):\n ar = []\n tmp = string.split('.')\n\n for item in tmp:\n ar.append( item.strip().strip('[').strip(']').strip() )\n\n return ar", "def vars(svars):\n return np.array([pm.var(var) for var in svars.split()])", "def str_popsizes_to_array(str_popsizes, delim=\",\"):\n popsizes = []\n for curr_sizes in str_popsizes:\n float_sizes = map(float, curr_sizes.split(delim))\n popsizes.append(float_sizes)\n return np.array(popsizes)", "def gx_coords1(s: str) -> list[float]:\n return numarray(s.split(\" \"))", "def str_to_numpy(string_array):\n if pd.isnull(string_array):\n return(np.NaN)\n else:\n return np.array(ast.literal_eval(string_array))", "def _parseVec(self, str):\r\n\t\tvec = []\r\n\t\tsplt = str.split()\r\n\t\tfor i in range(0,len(splt)):\r\n\t\t\tvec.append(self._parseNumber(splt[i]))\r\n\t\treturn vec", "def parseArr(self, s) :\n \n rc = []\n if s.startswith('[') and s.endswith(']') :\n s = s[1:-1]\n z = s.split(',')\n for p in z :\n if p.find('..') >= 0 :\n zz = p.split('..')\n if len(zz) == 2 :\n b = self.str2raw(zz[0])\n e = self.str2raw(zz[1])\n b = self.safe2Int(b)\n e = self.safe2Int(e)\n if not b == None and not e == None and (e >= e):\n for i in range(b, e + 1) :\n rc.append(str(i))\n \n else :\n p = self.str2raw(p)\n rc.append(str(p))\n pass\n return rc", "def parseArr(s) :\n\n rc = []\n if s.startswith('[') and s.endswith(']') :\n s = s[1:-1]\n z = s.split(',')\n for p in z :\n if p.find('..') >= 0 :\n zz = p.split('..')\n if len(zz)==2 :\n b = str2raw(zz[0])\n e = str2raw(zz[1])\n b = safe2Int(b)\n e = safe2Int(e)\n if not b==None and not e==None and (e >= e):\n for i in range(b,e+1) :\n rc.append(str(i))\n\n else :\n p = str2raw(p)\n rc.append(str(p))\n pass\n return rc", "def string_to_array(s):\n\n if isinstance(s, str):\n out = s.split(\"|\")\n elif math.isnan(s):\n out = []\n else:\n raise ValueError(\"Value must be either string of nan\")\n return out", "def concatenated_string_to_array(string):\r\n return np.array([int(x) for x in string.split(\",\")])", "def concatenated_string_to_array(string):\n return np.array([int(x) for x in string.split(\",\")])", "def catsStringToArray(catsString):\n return list(map(int, catsString.strip('[]').split(',')))", "def coords1(s: str) -> list[float]:\n return numarray(re.sub(SPACE, \"\", s).split(\",\"))", "def string_to_array(arg):\n\n res = arg.replace('[', '').replace(']', '').replace(',', '')\n return np.array(res.split(' '), dtype=np.int8)", "def parse_input(giant_string):\n X_train_part, Y_train_part, X_test_part = giant_string.split(\"XXX\")\n\n X_train_row_strings = X_train_part.split(\"S\")\n X_train_rows = [[float(x) for x in row.split(\",\")] for row in X_train_row_strings]\n X_train = np.array(X_train_rows)\n\n Y_train = concatenated_string_to_array(Y_train_part)\n\n X_test_row_strings = X_test_part.split(\"S\")\n X_test_rows = [[float(x) for x in row.split(\",\")] for row in X_test_row_strings]\n X_test = np.array(X_test_rows)\n\n return X_train, Y_train, X_test", "def parseVars(fp):\n\n try:\n ln = fp.readline()\n p = re.compile(r'^Variaveis\\s*(?:#.*)?$')\n m = p.match(ln)\n if m == None:\n raise ParseError(ParseError.rnamesMsg)\n\n ln = fp.readline()\n p = re.compile(r'^\\{\\s*(.*)\\s*\\}\\s*(?:#.*)?$')\n m = p.match(ln)\n if m == None:\n raise ParseError(ParseError.rnamesMsg)\n\n a = m.group(1).split(',')\n a[:] = map(str.strip, a)\n return set(a)\n\n except:\n raise", "def parse_input(giant_string):\r\n X_train_part, Y_train_part, X_test_part = giant_string.split(\"XXX\")\r\n\r\n X_train_row_strings = X_train_part.split(\"S\")\r\n X_train_rows = [[float(x) for x in row.split(\",\")] for row in X_train_row_strings]\r\n X_train = np.array(X_train_rows)\r\n\r\n Y_train = concatenated_string_to_array(Y_train_part)\r\n\r\n X_test_row_strings = X_test_part.split(\"S\")\r\n X_test_rows = [[float(x) for x in row.split(\",\")] for row in X_test_row_strings]\r\n X_test = np.array(X_test_rows)\r\n\r\n return X_train, Y_train, X_test", "def str2np(s: str) -> np.ndarray:\n return np.array(json.loads(s))", "def from_str(cls, string):\n # If quotes are found, parse it as a Python string literal after adding\n # brackets around\n if '\"' in string or \"'\" in string:\n string = '[' + string + ']'\n l = ast.literal_eval(string)\n return [str(x) for x in l]\n # Otherwise, just split on commas\n else:\n return string.split(',')", "def parseGGA(data):\n #print \"GGA received\"\n # print data\n return pack([\"utc\", \"lat\", \"ns\", \"lon\", \"ew\", \"quality\"\n \"numSats\", \"horizontalDilution\", \"altitude\",\n \"altitudeUnits\", \"separation\", \"separationUnits\",\n \"age\", \"reference\"],\n data)", "def get_str_arrays(self):\n return self._fin.readline().strip('\\n').strip(' ').split(' ')", "def parse_string_list(data):\n txt = data.decode()\n x = ast.literal_eval(txt)\n return x", "def format_string_to_list(self, avi_string):\n\n repls = ('[', ''), (']', ''), (\"'\", \"\")\n avi_string = reduce(lambda a, kv: a.replace(*kv), repls, avi_string)\n return avi_string.split(',')", "def parse_input(userstring):\n xsplit = userstring.split()\n stringtovalues = [float(x) for x in xsplit]\n\n return stringtovalues", "def load_variable_from_file(filepath, variable_names):\n if not isinstance(variable_names, (list,tuple)): # only one variable requested\n variable_names = [variable_names,]\n variables = [[],] * len(variable_names)\n previous_line = \"\"\n var_names = None\n with open(filepath, \"rb\") as f:\n for line in f.readlines():\n # Skip headers\n line = line.decode(\"utf-8\")\n if line[0]==\"#\" or line.strip()==\"end\":\n previous_line = line\n var_names = None\n continue\n if var_names is None:\n var_names = previous_line.split()[1:]\n # clean up name list\n while \"vs\" in var_names:\n var_names.remove(\"vs\")\n sp = line.split()\n for ivar, name in enumerate(variable_names):\n if name in var_names:\n index = var_names.index(name)\n variables[ivar].append(float(sp[index]))\n # we're done reading these variables, co\n for ivar in range(len(variables)):\n if len(variables[ivar]) > 0 and isinstance(variables[ivar], list):\n variables[ivar] = numpy.array(variables[ivar])\n if len(variable_names) > 1:\n return variable_names\n else: # only one variable read in\n return variables[0]", "def strToFloatArray(line, delim=\",\"):\n\tarr = line.split(delim)\n\treturn [float(a) for a in arr]", "def _parse_numbers(self, numberstr: str):\n numbers = []\n currentnumber = \"\"\n\n for c in numberstr:\n if c.isdigit() or c == '-' or c == '.':\n currentnumber += c\n elif len(currentnumber) > 0:\n numbers.append(float(currentnumber))\n currentnumber = \"\"\n if len(currentnumber) > 0:\n numbers.append(float(currentnumber))\n\n return np.array(numbers)" ]
[ "0.63184726", "0.5962173", "0.5946905", "0.593627", "0.59265995", "0.5860213", "0.579188", "0.56896275", "0.5681272", "0.5645227", "0.5643714", "0.56135464", "0.5585567", "0.5552104", "0.5551407", "0.5538226", "0.5523727", "0.5477755", "0.5459959", "0.5417494", "0.5411672", "0.54105717", "0.538897", "0.53138214", "0.53053683", "0.5226275", "0.51979506", "0.517632", "0.5155271", "0.5146558" ]
0.79506093
0
Occasionally, gvars get rendered to strings as, e.g., 4e06 + 1 instead of 0.000006(1.0). This makes a complete mess of trying to parse the a list of gvar which has been turned into a string, e.g., '[1(2) 1 + 2 0.003(2)]', since the usual str.split() separates '1 + 2' > ['1','+','2']. This function is a kludge which works around this difficulty.
def kludge_gvars(mangled): # Loop in reverse looking for '+-', but don't run off the end for idx in range(len(mangled) - 1)[::-1]: if mangled[idx + 1] == '+-': reunited = ' '.join(mangled[idx:idx + 3]) # Throw away the used elements... for _ in range(3): mangled.pop(idx) # Repair the list with reunited gvar string mangled.insert(idx, reunited) return mangled
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_string(str_arr):\n def to_arr(str_arr):\n \"\"\" Switch to list. \"\"\"\n row = str_arr.replace(']', '').\\\n replace('[', '').\\\n replace('{', '').\\\n replace('}', '').\\\n replace('\\n', '').split()\n\n if '+-' in row:\n row = kludge_gvars(row)\n row = [gv.gvar(str(elt)) for elt in row]\n return np.array(row)\n\n def kludge_gvars(mangled):\n \"\"\"\n Occasionally, gvars get rendered to strings as, e.g.,\n -4e-06 +- 1 instead of -0.000006(1.0). This makes a\n complete mess of trying to parse the a list of gvar\n which has been turned into a string, e.g.,\n '[1(2) 1 +- 2 0.003(2)]', since the usual str.split()\n separates '1 +- 2' --> ['1','+-','2']. This function is\n a kludge which works around this difficulty.\n \"\"\"\n # Loop in reverse looking for '+-', but don't run off the end\n for idx in range(len(mangled) - 1)[::-1]:\n if mangled[idx + 1] == '+-':\n reunited = ' '.join(mangled[idx:idx + 3])\n # Throw away the used elements...\n for _ in range(3):\n mangled.pop(idx)\n # Repair the list with reunited gvar string\n mangled.insert(idx, reunited)\n return mangled\n\n return to_arr(str_arr)", "def get_numbers_operators(text: str, var_dict: dict, svar_dict:dict) -> (list, list, dict):\n\n\n # Define regex to extract all numbers in a string, as well as placeholders for intermediate results.\n # These placeholders start with a character, followed by a sequence of characters and numbers.\n # Use re.findall method to get a list of all numbers from the string.\n variables_regex = r\"((?<=[\\+\\-\\*\\/\\^\\,])|^)\\s*[\\+\\-]?\\s*(\\d+\\.?\\d*(e-?\\d+)?|[A-Za-z]+[A-Za-z0-9]*)\"\n var_list = re.findall(variables_regex, text)\n var_list = [i[1] for i in var_list]\n\n # Create dynamic view objects of the keys in var_dict and svar_dict.\n var_dict_keys = var_dict.keys() # returns DYNAMIC view object\n svar_dict_keys = svar_dict.keys()\n\n # Loop over var_list to assign variables to numbers and to copy saved variables from svar_dict to var_dict.\n for idx, entry in enumerate(var_list):\n # Do nothing if an entry is already stored in var_dict\n if not entry in var_dict_keys:\n # Check if entry is contained in svar_dict\n if not entry in svar_dict_keys:\n var_list[idx] = float(entry)\n else:\n var_list[idx] = svar_dict[entry]\n else:\n var_list[idx] = var_dict.pop(entry)\n\n \n operator_string = re.sub(variables_regex, '', text)\n operator_list = [i for i in operator_string if i !=' ']\n\n # Return both lists and the dictionairy.\n return var_list, operator_list, var_dict", "def testtofloatString ( self ):\r\n\t\tr = re.compile ( 'frac' )\r\n\t\tfor fracTup1, expRes in self.knownFloatStringValues:\r\n\t\t\tfrac1 = eval ( r.sub ( 'frac.frac', fracTup1 ) ) \r\n\t\t\tself.assertEqual ( frac1.tofloatString (), expRes )", "def _get_vars(symbol: Union[str, int]) -> str:\n if isinstance(symbol, str):\n return {\n 'circle': 'var b1=n.round(t,2);',\n 'square': 'var b1=n.round(t,2);',\n 'diamond': 'var b1=n.round(t*1.3,2);',\n 'hexagram': 'var b1=n.round(t,2);var b2=n.round(t/2,2);var b3=n.round(t*Math.sqrt(3)/2,2);'\n }[symbol]\n return {\n 37: 'var d1=n.round(t*1.2,2);var d2=n.round(t*1.6,2);var d3=n.round(t*0.8,2);',\n 38: 'var d1=n.round(t*1.2,2);var d2=n.round(t*1.6,2);var d3=n.round(t*0.8,2);',\n 39: 'var d1=n.round(t*1.2,2);var d2=n.round(t*1.6,2);var d3=n.round(t*0.8,2);',\n 40: 'var d1=n.round(t*1.2,2);var d2=n.round(t*1.6,2);var d3=n.round(t*0.8,2);',\n 34: 'var d1=n.round(t,2);',\n 33: 'var d1=n.round(t*1.4,2);',\n 35: 'var d1=n.round(t*1.2,2);var d2=n.round(t*0.85,2);',\n 36: 'var d1=n.round(t/2,2);var d2=n.round(t,2);'\n }[symbol]", "def reassemble_parens(cls, propstr, errstr, context, splitstr=','):\n vars = list()\n proplist = propstr.split(splitstr)\n while len(proplist) > 0:\n var = proplist.pop(0)\n while var.count('(') != var.count(')'):\n if len(proplist) == 0:\n raise ParseSyntaxError(errstr, token=propstr, context=context)\n # End if\n var = var + ',' + proplist.pop(0)\n # End while\n var = var.strip()\n if len(var) > 0:\n vars.append(var)\n # End if\n # End while\n return vars", "def repeated_decimals(tokens: List[TOKEN], local_dict: DICT, global_dict: DICT):\n result: List[TOKEN] = []\n\n def is_digit(s):\n return all(i in '0123456789_' for i in s)\n\n # num will running match any DECIMAL [ INTEGER ]\n num: List[TOKEN] = []\n for toknum, tokval in tokens:\n if toknum == NUMBER:\n if (not num and '.' in tokval and 'e' not in tokval.lower() and\n 'j' not in tokval.lower()):\n num.append((toknum, tokval))\n elif is_digit(tokval)and len(num) == 2:\n num.append((toknum, tokval))\n elif is_digit(tokval) and len(num) == 3 and is_digit(num[-1][1]):\n # Python 2 tokenizes 00123 as '00', '123'\n # Python 3 tokenizes 01289 as '012', '89'\n num.append((toknum, tokval))\n else:\n num = []\n elif toknum == OP:\n if tokval == '[' and len(num) == 1:\n num.append((OP, tokval))\n elif tokval == ']' and len(num) >= 3:\n num.append((OP, tokval))\n elif tokval == '.' and not num:\n # handle .[1]\n num.append((NUMBER, '0.'))\n else:\n num = []\n else:\n num = []\n\n result.append((toknum, tokval))\n\n if num and num[-1][1] == ']':\n # pre.post[repetend] = a + b/c + d/e where a = pre, b/c = post,\n # and d/e = repetend\n result = result[:-len(num)]\n pre, post = num[0][1].split('.')\n repetend = num[2][1]\n if len(num) == 5:\n repetend += num[3][1]\n\n pre = pre.replace('_', '')\n post = post.replace('_', '')\n repetend = repetend.replace('_', '')\n\n zeros = '0'*len(post)\n post, repetends = [w.lstrip('0') for w in [post, repetend]]\n # or else interpreted as octal\n\n a = pre or '0'\n b, c = post or '0', '1' + zeros\n d, e = repetends, ('9'*len(repetend)) + zeros\n\n seq = [\n (OP, '('),\n (NAME, 'Integer'),\n (OP, '('),\n (NUMBER, a),\n (OP, ')'),\n (OP, '+'),\n (NAME, 'Rational'),\n (OP, '('),\n (NUMBER, b),\n (OP, ','),\n (NUMBER, c),\n (OP, ')'),\n (OP, '+'),\n (NAME, 'Rational'),\n (OP, '('),\n (NUMBER, d),\n (OP, ','),\n (NUMBER, e),\n (OP, ')'),\n (OP, ')'),\n ]\n result.extend(seq)\n num = []\n\n return result", "def gx_coords1(s: str) -> list[float]:\n return numarray(s.split(\" \"))", "def float_vct_str ( vct , format = '%.5g' ) :\n try :\n return '[ ' + ', '.join ( [ format % v for v in vct ] ) + ' ]' \n except TypeError :\n pass\n return float_vct_str ( vct , format = '%.5g' )", "def get_grist (value):\n assert is_iterable_typed(value, basestring) or isinstance(value, basestring)\n def get_grist_one (name):\n split = __re_grist_and_value.match (name)\n if not split:\n return ''\n else:\n return split.group (1)\n\n if isinstance (value, str):\n return get_grist_one (value)\n else:\n return [ get_grist_one (v) for v in value ]", "def parse_input(userstring):\n xsplit = userstring.split()\n stringtovalues = [float(x) for x in xsplit]\n\n return stringtovalues", "def parse1DList(self,string):\r\n string = string.replace(\"[\",\"\")\r\n string = string.replace(\"]\",\"\")\r\n string = string.split(\",\")\r\n for i in xrange(len(string)):\r\n string[i] = float(string[i])\r\n string = list(string)\r\n return string", "def testrawString ( self ):\r\n\t\tr = re.compile ( 'frac' )\r\n\t\tfor fracTup1, expRes in self.knownRawStringValues:\r\n\t\t\tfrac1 = eval ( r.sub ( 'frac.frac', fracTup1 ) )\r\n\t\t\tself.assertEqual ( frac1.rawString (), str ( expRes ))", "def get_list_vars(my_vars):\n lists = []\n for var in my_vars:\n try:\n temp = my_vars[var].getValue()\n #print var + '=' + str(temp)\n except ValueError:\n lists.append(var)\n return lists", "def parse_float_list(string):\n new_list = []\n convert_fun = int\n for num in string[1:-1].split(';'):\n if '/' in num:\n num = float(Fraction(num))\n convert_fun = float\n elif ',' in num or '.' in num:\n num = float(num.replace(',', '.'))\n convert_fun = float\n elif num == \"inf\":\n convert_fun = float\n new_list.append(num)\n return [convert_fun(x) for x in new_list]", "def eval_variable(data):\n if data.lower() == \"true\":\n return True\n if data.lower() == \"false\":\n return False\n if data[0] == \"{\":\n # preserve order (don't use sets)\n # replace('`', '') is a fix for problem #8394\n return list(eval(data.replace(\"{\", \"[\").replace(\"}\", \"]\").replace('`', '')))\n if data[0] in ['\"', \"'\"]:\n return str(eval(data)).strip(\"'\\\"\")\n if data[0] in string.ascii_letters:\n # unquoted string\n return str(data.strip())\n if data[0].isdigit():\n return int(eval(data))\n return eval(data)", "def _parse_flags(self, flags):\n s = ''\n for flag in flags:\n if len(s):\n s += ' | '\n s += 'gf.sim.VariableFlag.%s' % (flag)\n if len(s):\n return s\n else:\n return '0'", "def parseValue(expr):\n\n\ttry:\n\t\treturn eval(expr)\n\texcept:\n\t\treturn eval(re.sub(\"\\s+\", \",\", expr))\n\telse:\n\t\treturn expr", "def parseVars(fp):\n\n try:\n ln = fp.readline()\n p = re.compile(r'^Variaveis\\s*(?:#.*)?$')\n m = p.match(ln)\n if m == None:\n raise ParseError(ParseError.rnamesMsg)\n\n ln = fp.readline()\n p = re.compile(r'^\\{\\s*(.*)\\s*\\}\\s*(?:#.*)?$')\n m = p.match(ln)\n if m == None:\n raise ParseError(ParseError.rnamesMsg)\n\n a = m.group(1).split(',')\n a[:] = map(str.strip, a)\n return set(a)\n\n except:\n raise", "def parse_format(var_sample):\n # ugh\n ret = []\n # Parsing format information\n # Need to see what all these could be...\n if None in var_sample[\"GT\"]:\n ret.append(3)\n elif var_sample[\"GT\"] == (0, 0):\n ret.append(0)\n elif var_sample[\"GT\"] == (0, 1):\n ret.append(1)\n elif var_sample[\"GT\"] == (1, 1):\n ret.append(2)\n \n ret.extend([var_sample[\"GQ\"] if var_sample[\"GQ\"] is not None else 0,\n var_sample[\"OV\"],\n var_sample[\"DP\"], # be careful these aren't '.'\n #split where _r is ref-allele and _a is alt-allele\n var_sample[\"AD\"][0],\n var_sample[\"AD\"][1],\n var_sample[\"PDP\"],\n var_sample[\"PAD\"][0],\n var_sample[\"PAD\"][1],\n var_sample[\"US\"][0],\n var_sample[\"US\"][1],\n var_sample[\"DS\"][0],\n var_sample[\"DS\"][1],\n var_sample[\"UC\"][0],\n var_sample[\"UC\"][1],\n var_sample[\"DC\"][0],\n var_sample[\"DC\"][1],\n var_sample[\"UDC\"][0],\n var_sample[\"UDC\"][1],\n var_sample[\"UCC\"][0],\n var_sample[\"UCC\"][1],\n var_sample[\"DDC\"][0],\n var_sample[\"DDC\"][1],\n var_sample[\"DCC\"][0],\n var_sample[\"DCC\"][1],\n var_sample[\"UMO\"][0],\n var_sample[\"UMO\"][1],\n var_sample[\"DMO\"][0],\n var_sample[\"DMO\"][1],\n var_sample[\"UXO\"][0],\n var_sample[\"UXO\"][1],\n var_sample[\"DXO\"][0],\n var_sample[\"DXO\"][1],\n var_sample[\"NR\"][0],\n var_sample[\"NR\"][1],\n var_sample[\"MO\"][0],\n var_sample[\"MO\"][1],\n var_sample[\"XO\"][0],\n var_sample[\"XO\"][1],\n var_sample[\"XC\"][0],\n var_sample[\"XC\"][1],\n var_sample[\"AC\"][0],\n var_sample[\"AC\"][1],\n var_sample[\"MC\"][0],\n var_sample[\"MC\"][1],\n var_sample[\"EC\"][0],\n var_sample[\"EC\"][1],\n var_sample[\"PL\"][0] if var_sample[\"PL\"][0] is not None else 0,\n var_sample[\"PL\"][1] if var_sample[\"PL\"][0] is not None else 0,\n var_sample[\"PL\"][2] if var_sample[\"PL\"][0] is not None else 0])\n return ret\n #END", "def parseGGA(data):\n #print \"GGA received\"\n # print data\n return pack([\"utc\", \"lat\", \"ns\", \"lon\", \"ew\", \"quality\"\n \"numSats\", \"horizontalDilution\", \"altitude\",\n \"altitudeUnits\", \"separation\", \"separationUnits\",\n \"age\", \"reference\"],\n data)", "def gpvtg_convert(line):\r\n gps = line.strip().split(',')\r\n #check data\r\n if gps[1] == '0.00': \r\n return\r\n #jsondata = {'Horizontal speed': gps[7] + ' kmph or ' + gps[5] + 'knots'}\r\n return []", "def testStringify(self):\n v1 = Vector(1, 6, -8, 0)\n assert ('%s' % v1) == '[ 1.000000, 6.000000, -8.000000, 0.000000 ]'", "def _fs (v):\r\n try : \r\n v = float(v)\r\n except : \r\n v = tuple([float (ss) for ss in \r\n v.replace('(', '').replace(')', '').split(',')])\r\n return v", "def parse_var_val(vstr, unit):\n\t# First we assume it has no suffix, and hence already in the target units\n\ttry:\n\t\treturn float(vstr)\n\texcept ValueError:\n\t\tpass\n\n\t# Try to get the unit suffix and convert the first part to a number\n\tres = var_re.match(vstr)\n\tif res is None:\n\t\traise ValueError('Cannot parse variable value ' + vstr)\n\n\tunit_old = res.group(2) # original unit\n\tval_old = float(res.group(1)) # original value\n\n\tif unit == unit_old:\n\t\treturn val_old\n\n\ttry:\n\t\treturn convert_val(val_old, unit_old, unit)\n\texcept KeyError:\n\t\traise ValueError('Can\\'t convert from unit ' + unit_old + ' to ' + unit)", "def parse_float(expression, variables):\n log.debug(\"parse_float {}\".format(expression))\n try:\n return float(expression)\n except ValueError:\n raw_tokens = tokenise_expression(expression)\n tokens = []\n for token in raw_tokens:\n if token in variables:\n tokens.append(variables[token])\n else:\n # Handle unusual format 2.2d6 which means 2.2e+6.\n dformat_matches = re.match(\"(-?\\\\d+.?\\\\d*)d(-?\\\\d+)\", token)\n if dformat_matches:\n mantissa, exponent = dformat_matches.groups()\n tokens.append(\"{}e{}\".format(mantissa, exponent))\n else:\n tokens.append(token)\n\n log.debug(tokens)\n\n def evaluate(tokens):\n if len(tokens) == 1:\n return float(tokens[0])\n if len(tokens) == 2:\n if tokens[0] == \"+\":\n return float(tokens[1])\n elif tokens[0] == \"-\":\n return -float(tokens[1])\n\n # Remove superfluous outer parentheses.\n if tokens[0] == \"(\" and tokens[-1] == \")\":\n return evaluate(tokens[1:-1])\n # First evaluate contents of parentheses.\n try:\n b1 = tokens.index(\"(\")\n b2 = len(tokens) - 1 - tokens[::-1].index(\")\")\n return evaluate(tokens[:b1] + [evaluate(tokens[b1 + 1:b2])]\n + tokens[b2 + 1:])\n except ValueError:\n # No open parentheses found.\n pass\n # Evaluate / and * from left to right.\n for i, token in enumerate(tokens[:-1]):\n if token == \"/\":\n return evaluate(tokens[:i-1] + [float(tokens[i-1])\n / float(tokens[i+1])] + tokens[i+2:])\n if token == \"*\":\n return evaluate(tokens[:i-1] + [float(tokens[i-1])\n * float(tokens[i+1])] + tokens[i+2:])\n # Evaluate + and - from left to right.\n for i, token in enumerate(tokens[:-1]):\n if token == \"+\":\n return evaluate(tokens[:i-1] + [float(tokens[i-1])\n + float(tokens[i+1])] + tokens[i+2:])\n if token == \"-\":\n return evaluate(tokens[:i-1] + [float(tokens[i-1])\n - float(tokens[i+1])] + tokens[i+2:])\n\n return evaluate(tokens)", "def vars(svars):\n return np.array([pm.var(var) for var in svars.split()])", "def string_to_list(str):\n global legal_operands\n\n op_list = []\n string_iterator = 0\n while string_iterator < len(str): # Go over the string.\n\n # If found a number, find the end of it and add it to the list.\n if str[string_iterator] in legal_operands:\n start_index = string_iterator # Start index of the number.\n while string_iterator < len(str): # Find the end index of\n # the number.\n if str[string_iterator] in legal_operands:\n string_iterator = string_iterator + 1\n else:\n break\n\n # Append to the list the number that has been found.\n op_list.append(float(str[start_index:string_iterator]))\n\n # Insert the operators to the list.\n if string_iterator < len(str):\n while string_iterator < len(str):\n if str[string_iterator] in o.legal_operators: # If found\n # a legal operator, add it to the list.\n op_list.append(str[string_iterator])\n string_iterator = string_iterator + 1\n else:\n break\n\n return op_list", "def gpgsa_convert(line):\r\n gps = line.strip().split(',')\r\n #check data\r\n if gps[2] == '1':\r\n return\r\n if gps[2] == '2':\r\n fix = '2D fix'\r\n else:\r\n fix = '3D fix'\r\n return [fix]", "def parse3DList(self,string):\r\n string = string.replace(\"[\",\"\")\r\n string = string.replace(\"]],\", \"**\")\r\n string = string.replace(\"],\",\"*\")\r\n string = string.replace(\"]\", \"\")\r\n string = string.split(\"**\")\r\n temp = []\r\n for i in string:\r\n temp.append(i.split(\"*\"))\r\n string = copy.deepcopy(temp)\r\n for i in xrange(len(string)):\r\n for j in xrange(len(string[i])):\r\n string[i][j] = string[i][j].split(\",\")\r\n for i in xrange(len(string)):\r\n for j in xrange(len(string[i])):\r\n for k in xrange(len(string[i][j])):\r\n string[i][j][k] = float(string[i][j][k])\r\n string[i][j] = list(string[i][j])\r\n return string", "def _parse(val: str):\n\n if not isinstance(val, str):\n raise TypeError(\"Method requires string input\")\n\n value = re.findall(r'^([-+]?\\d*\\.\\d*(?=\\s)|\\d+(?=\\s))', val)\n if not (value and val[:len(value[0])] == value[0]):\n return val, None\n\n # string starts with value\n value = value[0]\n val = val[len(value):]\n\n val = val.strip()\n if val:\n unit = val\n else:\n unit = 'dimensionless'\n\n return value, unit" ]
[ "0.73184675", "0.5792925", "0.56992584", "0.56166935", "0.5463357", "0.53742534", "0.5349036", "0.53430367", "0.5340777", "0.5338637", "0.53287876", "0.5321281", "0.531979", "0.5301091", "0.5295797", "0.5219765", "0.5208033", "0.516347", "0.51579887", "0.5147878", "0.51474804", "0.5141542", "0.51373655", "0.5130437", "0.51035005", "0.50550216", "0.50467587", "0.5041345", "0.5029606", "0.501752" ]
0.6465308
1
Upsert the content of a DataFrame into a db table
def upsert(engine, table_name, dataframe): # Reflect table from db metadata = sqla.MetaData(bind=engine) table = sqla.Table( table_name, metadata, autoload=True, autoload_with=engine) # Unpackage DataFrame records = [] for _, row in dataframe.iterrows(): # Edge case: serial primary keys, e.g., may not be in the row yet records.append({col.name: row[col.name] for col in table.columns if col.name in row}) # get list of fields making up primary key primary_keys = [ key.name for key in sqla.inspection.inspect(table).primary_key] assert len(primary_keys) == 1 # assemble base statement stmt = sqla.dialects.postgresql.insert(table).values(records) # Isolate non-primary keys for updating update_dict = { col.name: col for col in stmt.excluded if not col.primary_key} # Edge case: all columns make up a primary key # Then upsert <--> 'on conflict do nothing' if update_dict == {}: LOGGER.warning('No updateable columns found for table %s. Skipping upsert.', table_name) # Still want to upsert without error. # TODO: implement insert_ignore() # insert_ignore(table_name, records) return None # Assemble statement with 'on conflict do update' clause update_stmt = stmt.on_conflict_do_update( index_elements=primary_keys, set_=update_dict, ) LOGGER.debug(update_stmt) # execute with engine.connect() as conn: result = conn.execute(update_stmt) return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def insert_data(df, database, table, db_uri):\n try:\n engine = sqlalchemy.create_engine(db_uri)\n df = create_hash_id(df)\n\n def create_insert_sql(x):\n cols = \"`\" + \"`,`\".join(list(df.columns)) + \"`\"\n values = \"\\'\" + \"\\',\\'\".join(list(x)) + \"\\'\"\n sql = f\"INSERT INTO `{database}`.`{table}` ({cols}) VALUES ({values});\"\n try:\n engine.execute(sql)\n except exc.IntegrityError:\n pass\n\n df.apply(lambda x: create_insert_sql(x), axis=1)\n # df.to_sql(name=table, con=engine, if_exists='append', index=False)\n except Exception as e:\n raise Exception(str(e))", "def df_to_db(dataframe, tablename, engine,\n index=False, index_label=None, if_exists='append',\n chunksize=100000):\n dataframe.to_sql(tablename,\n con=engine,\n index=index,\n index_label=index_label,\n if_exists=if_exists,\n chunksize=chunksize\n )", "def write_to_db(df, table_name):\n df = df.assign(_xerum_import_ts=pd.Timestamp.now())\n df.columns = map(str.lower, df.columns)\n df.to_sql(table_name, con=engine, if_exists='replace', index=False, method='multi')\n return queries.row_cnt()[\"row_cnt\"]", "def save_to_db(\n df: pd.DataFrame,\n collection: pymongo.collection.Collection,\n replace: bool\n ):\n records = df.to_dict(\"records\")\n if replace:\n collection.drop()\n collection.insert_many(records)", "def update_db(self, data_frame: pd.DataFrame):\n\n cols = \"`,`\".join([str(i) for i in data_frame.columns.tolist()])\n for _, row in data_frame.iterrows():\n sql = \"INSERT OR IGNORE INTO Weather (`\" + cols + \"`) VALUES (\" + \"?,\"*(len(row)-1) + \"?)\"\n self.connection.execute(sql, tuple(row))\n self.connection.commit()", "def upsertToDB(self,table,records):\r\n\t\ttry:\r\n\t\t\t#engine, meta = self.connectToDB(dbName)\r\n\t\t\tconn = engine.connect()\r\n\t\t\t#table = Table(tableName, meta)\r\n\r\n\t\t\t# Check data type of records\r\n\t\t\tif isinstance(records, pd.DataFrame):\r\n\t\t\t\trecords = records.to_dict('records')\r\n\t\t\telif isinstance(records, dict):\r\n\t\t\t\tpass\r\n\t\t\telse:\r\n\t\t\t\traise Exception(\"Record Type {} is wrong\".format(type(records)))\r\n\r\n\t\t\t# Check if there is any column not in table\r\n\t\t\t\"\"\"\r\n\t\t\tselect_stmt = select([table])\r\n\t\t\tresult = conn.execute(select_stmt)\r\n\t\t\t\r\n\t\t\tresult.close()\r\n\t\t\t\"\"\"\r\n\r\n\t\t\t# To do, check if batch upsert is possible\r\n\t\t\tfor record in records:\r\n\t\t\t\tinsert_stmt = insert(table).values(record)\r\n\t\t\t\t#record.pop(pk)\r\n\t\t\t\tupsert_stmt = insert_stmt.on_duplicate_key_update(**record)\r\n\t\t\t\tconn.execute(upsert_stmt)\r\n\r\n\t\t\t#res.close()\r\n\t\t\tconn.close()\r\n\t\t\tself.logger.info(\"{} reocrds have been upsert into table {}\".format(len(records),table.__table__))\r\n\t\texcept Exception as e:\r\n\r\n\t\t\tif re.search('Unconsumed column names',str(e)):\r\n\t\t\t\tself.logger.error('On line {} - {}'.format(sys.exc_info()[2].tb_lineno, e))\r\n\t\t\telse:\r\n\t\t\t\tself.logger.error('On line {} - {}'.format(sys.exc_info()[2].tb_lineno, e))\r\n\t\t\t\texit(1)", "def store_partial_df(df, table_name):\r\n cursor = hana.cursor()\r\n pbar = tqdm(total=len(df.index))\r\n\r\n for index, row in df.iterrows():\r\n pbar.update(1)\r\n statement = 'INSERT INTO \\\"NIKOLAI\\\".\\\"'+table_name+'\\\" ('\r\n for colname in map(str, row.index.tolist()):\r\n statement += '\\\"'+ colname + '\\\",'\r\n statement = statement[:-1] +') VALUES ('\r\n #for value in map(str, row.tolist()):\r\n for value in row.tolist():\r\n if value != value:\r\n statement += 'null,'\r\n elif isinstance(value, int) or isinstance(value, float):\r\n statement += str(value) + ','\r\n else:\r\n statement += '\\''+ str(value) + '\\','\r\n\r\n cursor.execute(statement[:-1] +');')\r\n\r\n pbar.close()\r\n hana.commit()", "def insert_into_mysql(df, tablename, cols_id=None, engine=None):\n\n if engine is None:\n engine = _get_mysql_engine()\n\n # If date is present, delete rows of same date\n if \"date\" in df.columns:\n # Transform date to string to avoid SQL problems\n df[\"date\"] = pd.to_datetime(df[\"date\"]).dt.strftime(\"%Y-%m-%d\")\n\n # Get all dates present in df to insert\n dates = df[\"date\"].unique()\n\n # Delete data of same dates as the data that will be inserted\n sentence = c.DELETE.format(table=tablename, dates=\"','\".join(dates))\n with engine.connect() as connection:\n connection.execute(sentence)\n\n # Truncate all data or keep non duplicated\n else:\n\n # Try to merge with existing data\n if cols_id is not None:\n # retrive existing data\n df_prev = get_df_mysql(tablename, engine=engine)\n\n # Drop duplicated data\n df = pd.concat([df, df_prev], sort=False)\n df.drop_duplicates(subset=cols_id, inplace=True)\n\n with engine.connect() as connection:\n connection.execute(c.TRUNCATE.format(tablename))\n\n # Insert into SQL\n df.to_sql(name=tablename, con=engine, if_exists=\"append\", index=False)\n\n log.info(\"Data inserted into %s table with shape %s\" % (tablename, df.shape))", "def syncToDBUsingPandas(self,df,db,table,syncType='append'):\r\n\t\tself.logger.info(\"Calling syncToDBUsingPandas function\")\r\n\t\ttry:\r\n\t\t\t#df['created_time'] = datetime.datetime.now()\r\n\t\t\t#df['updated_time'] = datetime.datetime.now()\r\n\t\t\tdblink = 'mysql+mysqldb://{}:{}@{}/{}?charset=utf8'.format(self.__username,self.__password,self.__host,db)\r\n\t\t\tengine = create_engine(dblink,encoding='utf-8')\r\n\t\t\t#df.to_sql(table,engine,chunksize=1000,dtype={\"Agency\": String(50),\"Platform\":String(50),\"Likes\":Integer},index=False,if_exists='append',encoding='utf-8')\r\n\t\t\tdf.to_sql(table,engine,chunksize=1000,index=False,if_exists=syncType)\r\n\r\n\t\texcept Exception as e:\r\n\t\t\tself.logger.error('On line {} - {}'.format(sys.exc_info()[2].tb_lineno,e))\r\n\t\t\texit(1)", "def insert_df(conn, table_name: str, df: pd.DataFrame):\n # To CSV\n output = StringIO()\n df.to_csv(output, sep='\\t', header=False)\n output.seek(0)\n\n # Insert data\n cursor = conn.cursor()\n\n if isinstance(df.index, pd.MultiIndex):\n columns = list(df.index.names) + list(df.columns)\n else:\n columns = [df.index.name] + list(df.columns)\n\n cursor.copy_from(output, table_name, sep='\\t', null='', columns=columns)\n conn.commit()\n cursor.close()", "def insert_df(df, cur, table):\n\n df_columns = list(df)\n\n string_buffer = io.StringIO()\n df.to_csv(string_buffer, index=False, header=False, sep='|')\n string_buffer.seek(0)\n\n tmp_table = \"tmp_table\"\n\n cur.execute(\n f\"\"\"\n CREATE TEMP TABLE {tmp_table}\n AS\n SELECT * \n FROM {table}\n WITH NO DATA\n \"\"\"\n )\n\n cur.copy_from(file=string_buffer, table=tmp_table, sep='|', null=\"\", columns=df_columns)\n\n cur.execute(\n f\"\"\"\n INSERT INTO {table}\n SELECT *\n FROM {tmp_table}\n ON CONFLICT DO NOTHING\n \"\"\"\n )\n\n cur.execute(\n f\"\"\"\n DROP TABLE {tmp_table}\n \"\"\"\n )", "def upsert(self, tablename, keyfld, df, robust=True):\n\n # Check that table exists and keyfld exists both in the Table and the\n # Dataframe\n if robust:\n if tablename in self.getTableNames():\n if not ((keyfld in df.columns) and\n (keyfld in self.getColumnNames(tablename))):\n print(\"Upsert function failed: Key field does not exist\",\n \"in the selected table and/or dataframe\")\n return\n else:\n print('Upsert function failed: Table does not exist')\n return\n\n # Reorder dataframe to make sure that the key field goes first\n flds = [keyfld] + [x for x in df.columns if x != keyfld]\n df = df[flds]\n\n if robust:\n # Create new columns if necessary\n for clname in df.columns:\n if clname not in self.getColumnNames(tablename):\n if df[clname].dtypes == np.float64:\n self.addTableColumn(tablename, clname, 'DOUBLE')\n else:\n if df[clname].dtypes == np.int64:\n self.addTableColumn(tablename, clname, 'INTEGER')\n else:\n self.addTableColumn(tablename, clname, 'TEXT')\n\n # Check which values are already in the table, and split\n # the dataframe into records that need to be updated, and\n # records that need to be inserted\n keyintable = self.readDBtable(tablename, limit=None,\n selectOptions=keyfld)\n keyintable = keyintable[keyfld].tolist()\n values = [tuple(x) for x in df.values]\n values_insert = list(filter(lambda x: x[0] not in keyintable, values))\n values_update = list(filter(lambda x: x[0] in keyintable, values))\n\n if len(values_update):\n self.setField(tablename, keyfld, df.columns[1:].tolist(),\n values_update)\n if len(values_insert):\n self.insertInTable(tablename, df.columns.tolist(), values_insert)\n\n return", "def df2db(self, df: pd.DataFrame, tab_name):\n\n self.execute(\"set hive.execution.engine = tez\")\n self.execute(\"set tez.queue.name = sephora_internal\")\n self.execute(\"drop table if exists {table_name}\".format(table_name=tab_name))\n df.to_sql(tab_name, self.engine, method='multi', index=False)", "def df2sql(df, table_name, database_url):\r\n conn = sqlite3.connect(database_url)\r\n df.to_sql(table_name, conn, if_exists='replace', index = False)\r\n conn.commit()", "def insert_into_db(dataframe, query):\n for col, row in dataframe.iterrows():\n cursor.execute(query.format(tuple(row)))\n print (tuple(row))\n connection.commit()", "def upsert(con: Connectable,\n df: pd.DataFrame,\n table_name: str,\n if_row_exists: str,\n schema: Union[str, None] = None,\n create_schema: bool = False,\n create_table: bool = True,\n add_new_columns: bool = False,\n adapt_dtype_of_empty_db_columns: bool = False,\n chunksize: Union[int, None] = None,\n dtype: Union[dict, None] = None,\n yield_chunks: bool = False) -> UpsertResult:\n # verify arguments\n if if_row_exists not in ('ignore', 'update'):\n raise ValueError('if_row_exists must be \"ignore\" or \"update\"')\n if chunksize is None:\n chunksize = len(df) # we'll attempt to insert the whole df at once\n else:\n validate_chunksize_param(chunksize=chunksize)\n\n # create object that will execute all SQL operations\n executor = Executor(df=df, table_name=table_name, schema=schema, create_schema=create_schema,\n create_table=create_table, dtype=dtype, add_new_columns=add_new_columns,\n adapt_dtype_of_empty_db_columns=adapt_dtype_of_empty_db_columns)\n\n # execute SQL operations\n if not yield_chunks:\n executor.execute(connectable=con, if_row_exists=if_row_exists, chunksize=chunksize)\n return None\n else:\n return executor.execute_yield(connectable=con, if_row_exists=if_row_exists, chunksize=chunksize)", "def df2db(self, df: pd.DataFrame, tab_name, append=False):\n if append:\n df.to_sql(name=tab_name, con=self.engine, if_exists='append', index=False)\n else:\n self.execute(\"drop table if exists {table_name}\".format(table_name=tab_name))\n df.to_sql(name=tab_name, con=self.engine, if_exists='fail', index=False)", "def create_db_dataframe(self, df, table_name):\n try:\n print(\"-I- Writing \" + table_name + \" with DataFrame\")\n df.to_sql(name=table_name, con=self.engine, if_exists='replace', index=True)\n print(\"-I- Write complete.\")\n except Exception as e:\n print(\"-W- \" + str(e))", "def append_db_dataframe(self, df, table_name):\n try:\n print(\"-I- Appending \" + table_name + \" with DataFrame\")\n df.to_sql(name=table_name, con=self.engine, if_exists='append', index=True)\n print(\"-I- Append complete.\")\n except Exception as e:\n print(\"-W- \" + str(e))", "def upsert_table(self, df_diff, n_batch=5000, debug=False):\n\n n_items = len(df_diff)\n queries = []\n upsert_query = ' '.join(\n ('INSERT INTO \"{tablename}\"(\"cartodb_id\", \"{colname}\")',\n 'VALUES ({cartodb_id}, {colval})',\n 'ON CONFLICT (\"cartodb_id\")',\n 'DO UPDATE SET \"{colname}\" = {colval}',\n 'WHERE EXCLUDED.\"cartodb_id\" = {cartodb_id};'))\n n_batches = n_items // n_batch\n batch_num = 1\n for row_num, row in enumerate(df_diff.iteritems()):\n # if debug: print(row)\n cartodb_id = row[0][0]\n colname = row[0][1]\n pgtype = dtype_to_pgtype(self[colname].dtype, colname)\n # fill query template\n temp_query = upsert_query.format(\n tablename=self.get_carto_tablename(),\n colname=colname,\n colval=numpy_val_to_pg_val(self.loc[cartodb_id][colname],\n pgtype),\n cartodb_id=cartodb_id)\n\n queries.append(temp_query)\n\n # run batch if at n_batch queries, or at last item\n if (len(queries) == n_batch) or (row_num == n_items - 1):\n batchquery = '\\n'.join(queries)\n print(\"{curr_batch} of {n_batches}\".format(\n curr_batch=batch_num,\n n_batches=n_batches))\n batch_num = batch_num + 1\n if debug: print(\"Num chars in batch: {}\".format(len(batchquery)))\n if debug: print(batchquery)\n\n # send batch query to carto\n resp = self.carto_sql_client.send(batchquery)\n if debug: print(resp)\n\n # clear for another batch\n queries = []\n\n return None", "def add_to_database(self, df):\n \n from sqlalchemy import create_engine\n \n engine = create_engine(\"mysql://dublinbus:somepaawsord/researchpracticum\")\n con = engine.connect()\n df.to_sql(con=con, name='TimeTables', if_exists='append')\n con.close()", "def save_to_db(df_incidences):\n\n engine = db.get_engine()\n\n df = df_incidences\n\n dict_db_cols = {'BFS_Nr': 'bfsNr', 'Datum': 'date', '14d_Incidence': 'incidence',\n 'Neue_Faelle_Gemeinde': 'cases', 'Rolling_Sum': 'cases_cumsum_14d'}\n\n df_db = df[dict_db_cols.keys()].copy()\n df_db.rename(columns=dict_db_cols, inplace=True)\n\n df_db.to_sql('incidence', engine, if_exists='append', index=False)", "def append_data(self, table_name, df):\n\t\tself.__check_colnames(table_name, df)\n\t\tif self.__dbfile is not None:\n\t\t\tdf.to_sql(table_name, self._conn, index=False, if_exists=\"append\")", "def save_data(df: pd.DataFrame, database_filename: str) -> None:\n engine = create_engine(f\"sqlite:///{database_filename}\")\n df.to_sql(Path(database_filename).stem, engine, index=False, if_exists=\"replace\")", "def save_data(df, database_filename):\n engine = create_engine(f\"sqlite:///{database_filename}\")\n df.to_sql(\"YourTableName\", engine, index=False, if_exists=\"replace\")", "def InsertDFtoDB(table: str, schema: str, dataframe: pd.DataFrame, dtype=None, con=None, engine: Engine=None):\r\n if engine is None:\r\n if con is None:\r\n raise TypeError('Either con or engine must be provided')\r\n else:\r\n con_url = \"mssql+pyodbc://{user}:{pw}@{server}/{db}?driver=SQL+Server+Native+Client+11.0\"\r\n con_url = con_url.format(user=con_dict_agentext_pro['user'], pw=con_dict_agentext_pro['pw'],\r\n server=con_dict_agentext_pro['server'], db=con_dict_agentext_pro['db'])\r\n engine = create_engine(con_url)\r\n \r\n print(\"Start Insertion\")\r\n dataframe.to_sql(name=table, con=engine, if_exists='append', index=False, schema=schema, dtype=dtype)\r\n print(\"Finish Insertion\")", "def Insert_To_Snowflake(self, df_to_insert, db_table, truncate=True, d_params=None, identifiers_flag=False, over_write_field=None):\n try:\n print('Refresh Started')\n ctx = self.__Connect_To_Snowflake(d_params)\n cs = ctx.cursor()\n result = self.__checkDBTables(cs, db_table)\n if result:\n i = 1 # no action required, the table exists in the database\n else:\n print(f\"Table doesn't exists, creating table {db_table}\")\n ctx.cursor().execute(f\"CREATE OR REPLACE TABLE {self.__db_table_dict[db_table]}\")\n if over_write_field is not None:\n condition = f\"\"\"{over_write_field} in ({', '.join([\"'\"+str(x)+\"'\" for x in df_to_insert[over_write_field].unique()])})\"\"\"\n allrows = cs.execute(f\"delete from {db_table} where {condition}\").fetchall()\n if truncate:\n allrows=cs.execute(f\"\"\" truncate table {db_table} \"\"\").fetchall()\n\n if identifiers_flag:\n success, nchunks, nrows, _ = write_pandas(ctx, df_to_insert, db_table, quote_identifiers=True)\n else:\n success, nchunks, nrows, _ = write_pandas(ctx, df_to_insert, db_table, quote_identifiers=False)\n ctx.close()\n print(f'Refresh Sucessfull :: Success:{str(success)}; Chunks: {str(nchunks)}; Rows: {str(nrows)}')\n\n except Exception as ex:\n # sys.stdout = open(\"log.txt\", \"w\")\n print(f\"Unable to the write {db_table} to snowflake - exiting run\")\n print(datetime.now().replace(microsecond=0), ex)\n sys.exit()\n return None", "def write_df_to_db(df, db_path):\n print \"Writing to 'results' table in db: \", db_path\n conn = sqlite3.connect(db_path)\n df.to_sql(\"results\", con=conn,if_exists='replace')", "def appendData(self, dataframe, tableName, truncate=False):\n if truncate:\n truncateSetting = 'replace'\n else:\n truncateSetting = 'append'\n dataframe.to_sql(name=tableName, con=self.writeConn, if_exists=truncateSetting, index=False)", "def to_database(self, dataframe, name, db, if_exists, chunksize=50000, dtypes=None, index=False, save_if_error=False):\n\n if dtypes is None:\n dtypes = {}\n conn = self.connect_to_database(db=db)\n if dataframe.shape[0] != 0:\n print(\n \"Writing to table : \" + name + \" and database : \" + db + \" if exists : \" + if_exists + \" shape : \" + str(\n dataframe.shape))\n try:\n dataframe.to_sql(name=name,\n con=conn,\n if_exists=if_exists,\n chunksize=chunksize,\n dtype=dtypes,\n index=index)\n conn.connect().connection.close()\n except Exception as e:\n print(e)\n print(\"Bug in uploading dataframe, it has been writen in error_uploading{}_{}.csv\".format(db, name))\n if save_if_error:\n self.to_csv(dataframe=dataframe, file_path=\"error_uploading{}_{}.csv\".format(db, name), index=index)\n\n else:\n print(\"Dataframe is empty\")" ]
[ "0.72344434", "0.71951675", "0.715494", "0.7140779", "0.70340526", "0.7006468", "0.6899513", "0.6865268", "0.6854902", "0.68426687", "0.67907673", "0.6712339", "0.6708589", "0.670759", "0.67042094", "0.66936225", "0.6693597", "0.66493917", "0.6645235", "0.659", "0.657526", "0.65700567", "0.65597373", "0.65420306", "0.6534332", "0.65339416", "0.64892375", "0.64611703", "0.64453024", "0.6433448" ]
0.7500038
0
Writes the dictionary 'src' to the table named 'table_name'.
def write(engine, table_name, src, return_id=True, do_update=False): query = build_upsert_query(engine, table_name, src, do_update=do_update) LOGGER.debug(query) engine.execute(query) if return_id: return fetch_id(engine, table_name, src)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def copy_table(source_table, destination_table, db='default'):\n try:\n with connections[db].cursor() as cursor:\n cursor.execute('CREATE TABLE IF NOT EXISTS %s LIKE %s;' % (destination_table, source_table))\n except:\n pass", "def _schema_write(self, table: TableSchema) -> None:\n with open(self.schemas / (table['name'] + '.json'), 'w') as f:\n json.dump(table, f, indent=True)", "def save(file, table):\n pq.write_table(pa.Table.from_pandas(table), file)", "def write_table(table, file_path):\n\n\twith open(file_path, 'w') as file:\n\t\tfile.write(table)", "def export_sql(meta, data, output):\n\n tables = [table for table in meta.sorted_tables if table.name in data]\n preparer = IdentifierPreparer(meta.bind.dialect)\n prepare_column = lambda column: preparer.format_column(column, name=column.name)\n output_file = open(output, 'w')\n\n for table in tables:\n columns = ', '.join([ prepare_column(column) for column in table.columns.values() ])\n for row in data[table.name].values():\n values = list(map(_transform, list(row.values())))\n insert = \"INSERT INTO %s (%s) VALUES (%s);\\n\" % (\n preparer.format_table(table, name=table.name),\n columns,\n ', '.join(values)\n )\n output_file.write(insert)\n\n output_file.close()", "def Write(self):\n table_data = self._TABLE.build(self._timestamps)\n self._zip_file.writestr(self._stream_name, table_data)", "def update_table_in_file(table, source_file):\n with open(source_file, 'r') as source, \\\n tempfile.NamedTemporaryFile('w', delete=False) as temp:\n source_lines = source.readlines()\n\n table_start = index_tag_in_lines(source_lines, tag='Table Start')\n table_end = index_tag_in_lines(source_lines, tag='Table End')\n print(f'Found table_start tag at line no: {table_start}')\n print(f'Found table_end tag at line no: {table_end}')\n assert table_end > table_start, 'Table End must be after Table Start'\n\n table_written = False\n for line_no, line in enumerate(source_lines):\n if line_no <= table_start or line_no >= table_end:\n temp.write(line)\n elif not table_written: # write table once\n temp.writelines(table)\n table_written = True\n\n backup_file = source_file.with_suffix('.md.bkp')\n os.rename(source_file, backup_file)\n print(f'Original file backed up at: {backup_file}')\n\n shutil.copy(temp.name, source_file)", "def copy_table(self, source_table_name, dest_table_name):\n logger.info(f\"Creating {dest_table_name} from {source_table_name}\")\n try:\n self._submit_single_q(COPY_TABLE_DDL.format(new_table_name=dest_table_name, old_table_name=source_table_name))\n except connector.errors.ProgrammingError as e:\n logger.error(f\"Table {dest_table_name} already exists. Exiting.\")\n return\n except Exception as e:\n logger.error(\"Failed creating table\")\n raise\n\n try:\n q = COPY_TABLE_DATA_DML.format(new_table_name=dest_table_name, old_table_name=source_table_name)\n self._submit_single_q(COPY_TABLE_DATA_DML.format(new_table_name=dest_table_name, old_table_name=source_table_name))\n except Exception as e:\n logger.error(\"Failed inserting data\")\n raise e\n\n # print(self._submit_single_q(\"checksum table xfrm_product, staging_product\")", "def save_table(date, table):\n if os.path.isfile(date+\".table\"):\n file_using = open(date+\".table\", \"w\")\n else:\n return False\n file_using.seek(0)\n file_using.truncate()\n for line in table:\n file_using.write(\"{},{},{},{},{}\\n\".format(line[0], line[1], line[2], line[3], line[4]))\n file_using.close()", "def copyData(self, src_schema, src_table, src_columns, dest_schema, dest_table, dest_columns):\r\n sql = 'INSERT INTO {} ( {} ) SELECT {} FROM {}'.format(self.encodeTableName(dest_schema, dest_table), ','.join(dest_columns),\r\n ','.join(src_columns), self.encodeTableName(src_schema, src_table))\r\n return self.runSql(sql)", "def write_data_to_result_table(self, id_column, path_src,\n path_result, size_of_result_file):\n self.cursor.execute(\"\"\"INSERT INTO result_files\n VALUES('1','2','3','4');\"\"\")", "def write_to(self, fname, **kwargs):\n data = self.to_Table()\n data.write(fname, **kwargs)", "def import_table(ctx: DataFunctionContext, table_name: str, copy: bool = True):\n target_storage = ctx.execution_config.get_target_storage()\n if ensure_bool(copy):\n as_identifier = target_storage.get_api().get_quoted_identifier\n sql = f\"select * from {as_identifier(table_name)}\"\n # TODO: DRY this pattern\n sdf = SqlDataFunctionWrapper(sql)\n\n def get_sql(*args, **kwargs):\n return sql\n\n sdf.get_compiled_sql = get_sql\n return sdf(ctx)\n else:\n ctx.emit(\n name=table_name,\n storage=target_storage,\n data_format=\"table\",\n create_alias_only=True,\n )", "def htable_put(table, key, value):", "def write_to_destination(DbClass, src_vals, dest_timestamps, set_cols):\n session = get_db_session(\"dest\")\n write_count = 0\n for sv in src_vals:\n if sv[\"timestamp\"] in dest_timestamps:\n print(f\"{sv['timestamp']} already in destination database\")\n continue\n new_row = DbClass()\n for k, v in sv.items():\n setattr(new_row, k, v)\n for k, v in set_cols.items():\n setattr(new_row, k, v)\n print(f\"adding data for {sv['timestamp']}\")\n session.add(new_row)\n write_count += 1\n session.commit()\n session_close(session)\n print(f\"Wrote {write_count} rows to destination database\")\n return True", "def Write(self):\n table_data = self._TABLE.build(self._offsets)\n self._zip_file.writestr(self._stream_name, table_data)", "def table_from_frame(self, frame, table_name, conn=None, if_exists='fail', index=False,\n index_label=None, schema=None, chunksize=None, copy=True):\n \n table = SQLTable(table_name, self, frame=frame, table_setup=True, index=index,\n if_exists=if_exists, index_label=index_label, schema=schema)\n \n table.create()\n \n # check for potentially case sensitivity issues (GH7815)\n if table_name not in self.engine.table_names(schema=schema or self.meta.schema):\n warnings.warn(\"The provided table name '{0}' is not found exactly \"\n \"as such in the database after writing the table, \"\n \"possibly due to case sensitivity issues. Consider \"\n \"using lower case table names.\".format(name), UserWarning)\n \n \n table.insert(conn=conn, bulk=True, chunksize=chunksize, copy=copy)", "def save_table(data, out_file):\n logging.info(\"Saving table\")\n #header, data = data\n #out = pd.DataFrame(data=data, columns = header.keys())\n joblib.dump(data, out_file)", "def add_table_to_hdf(self, run_group, type_dict, data, name = 'bla',filename = []):\n\t\tif filename == []:\n\t\t\tfilename = self.edf_operator.inputFileName\n\t\t\t\n\t\tthis_table = self.h5f.createTable(run_group, name, type_dict, '%s in file %s' % (name, self.edf_operator.inputFileName))\n\t\t\n\t\trow = this_table.row\n\t\tfor r in data:\n\t\t\tfor par in r.keys():\n\t\t\t\trow[par] = r[par]\n\t\t\trow.append()\n\t\tthis_table.flush()", "def table_dump_query(table_name, path, rows_per_dump):\n return\"\"\"\n DEFINE TEMP-TABLE tt NO-UNDO LIKE %(table_name)s\n FIELD rec_id AS RECID\n FIELD epoch_time AS INT64.\n\n DEFINE VARIABLE epoch AS DATETIME NO-UNDO.\n DEFINE VARIABLE unixTime AS INT64 NO-UNDO.\n DEFINE VARIABLE htt AS HANDLE NO-UNDO.\n DEFINE VARIABLE cFileName AS CHARACTER NO-UNDO FORMAT \"x(60)\".\n DEFINE VARIABLE rowCount as INT64 NO-UNDO.\n\n epoch = DATETIME(1,1,1970,0,0,0,0).\n rowCount = 0.\n\n htt = TEMP-TABLE tt:HANDLE.\n\n FOR EACH platte.%(table_name)s NO-LOCK:\n IF rowCount = %(rows_per_dump)s THEN DO: \n unixTime = interval(NOW, epoch, \"milliseconds\").\n cFileName = \"%(path)s/t__%(table_name)s__e__\" + STRING(unixTime) + \"__insert.json\".\n htt:WRITE-JSON(\"FILE\", cFileName + \"_partial\", TRUE).\n OS-RENAME VALUE(cFileName + \"_partial\") VALUE(cFileName).\n rowCount = 0.\n EMPTY TEMP-TABLE tt.\n END.\n rowCount = rowCount + 1.\n CREATE tt.\n BUFFER-COPY %(table_name)s TO tt.\n tt.rec_id = RECID(%(table_name)s).\n unixTime = interval(NOW, epoch, \"milliseconds\").\n tt.epoch_time = unixTime.\n END.\n unixTime = interval(NOW, epoch, \"milliseconds\").\n cFileName = \"%(path)s/t__%(table_name)s__e__\" + STRING(unixTime) + \"__insert.json\".\n htt:WRITE-JSON(\"FILE\", cFileName + \"_partial\", TRUE).\n OS-RENAME VALUE(cFileName + \"_partial\") VALUE(cFileName)\n \n\"\"\" % {'path': path, 'table_name': table_name, 'rows_per_dump': rows_per_dump}", "def __write_source(self, handle, nbr):\n try:\n source = self.database.get_source_from_handle(handle)\n self.__write_row(nbr, handle, source)\n except:\n source = \"NOT FOUND\"\n self.__write_row(nbr, handle, source)", "def export(self, out=sys.stdout):\n\n tablemodel = None\n for x in self.tables:\n if x.name == self.config.table:\n tablemodel = x\n \n if tablemodel is None:\n return\n \n # output the header\n tableinstance = tablemodel(self.dbpath)\n fieldnames = list(tableinstance.fieldnames()) \n out.write(\"\\t\".join(fieldnames) + \"\\n\") \n # output the table contents\n generator = DBGenerator(tablemodel(self.dbpath))\n for row in generator.next():\n temp = [str(row[_]) for _ in fieldnames]\n out.write(\"\\t\".join(temp) + \"\\n\")", "def store_in_sql(cache, db_handler, TABLE_NAME):\r\n if len(cache) == 0 and len(cache[0]) == 0:\r\n return\r\n\r\n # create new table for new streaming event\r\n if TABLE_NAME[0] == '':\r\n TABLE_NAME[0] = db_handler.create_table(cache[0])\r\n \r\n # write message to database\r\n for message in cache:\r\n db_handler.insert_entry(message)", "def save_dst_to_file(dst, dir_file):\n dst = dst.sort_values('event')\n store = pd.HDFStore(dir_file, \"w\", complib=str(\"zlib\"), complevel=4)\n store.put('dataframe', dst, format='table', data_columns=True)\n store.close()", "def store_hive_table(data, directory, file_name):\n table_name = directory + \".\" + file_name\n data.write.saveAsTable(table_name)", "def transfer(self, en_cls, table_name, db_name):\n self.into_en_map(en_cls, table_name, db_name, create=False)\n self._start()", "def generate_edge_tablename(src_label, label, dst_label):\n\n tablename = 'edge_{}{}{}'.format(\n src_label.replace('_', ''),\n label.replace('_', ''),\n dst_label.replace('_', ''),\n )\n\n # If the name is too long, prepend it with the first 8 hex of it's hash\n # truncate the each part of the name\n if len(tablename) > 40:\n oldname = tablename\n logger.debug('Edge tablename {} too long, shortening'.format(oldname))\n tablename = 'edge_{}_{}'.format(\n str(hashlib.md5(tablename.encode('utf-8')).hexdigest())[:8],\n \"{}{}{}\".format(\n ''.join([a[:2] for a in src_label.split('_')])[:10],\n ''.join([a[:2] for a in label.split('_')])[:7],\n ''.join([a[:2] for a in dst_label.split('_')])[:10],\n )\n )\n logger.debug('Shortening {} -> {}'.format(oldname, tablename))\n\n return tablename", "def save_table_scraperwiki(uniques,table,name):\n for row in table:\n scraperwiki.sqlite.save(\n unique_keys=uniques\n , data=row\n , table_name=name\n )", "def dump(self, packet):\n # TODO\n packet['type'] = \"table\"\n src = packet['src']\n packet['src'] = packet['dst']\n packet['dst'] = src\n\n table_list = []\n\n # TODO fill out table string with routing table\n table_string = \"\"\n # TODO asking for int indexes instead of string for route?\n for ip in self.updates.keys():\n # TODO have to fill ip address of peer\n\n entry = {'network' : self.updates[ip][MESG][NTWK], 'netmask' : self.updates[ip][MESG][NMSK], 'peer' : ip}\n table_list.append(entry)\n packet[MESG] = table_list\n msg = json.dumps(packet)\n #print(json.dumps(packet, sort_keys=True, indent=4))\n\n sock = self.sockets[src]\n sock.sendall(msg.encode())\n return True", "def copy_file_to_table(self, schema, table, filepath):\n fields = \", \".join(self.schemas[schema][table][0])\n sql = f'set role {self.write_role}; ' \\\n f'COPY {schema}.{table}( {fields} ) FROM stdin WITH DELIMITER \\',\\' CSV header;'\n return sql, open(filepath, 'r')" ]
[ "0.59626514", "0.5931484", "0.5827578", "0.5823976", "0.57344264", "0.57290435", "0.56961876", "0.5692005", "0.56391126", "0.56303656", "0.5589605", "0.5557386", "0.55331343", "0.5516523", "0.55136675", "0.5495206", "0.54833347", "0.5448401", "0.54384375", "0.5437824", "0.53929424", "0.53879285", "0.5386984", "0.53833133", "0.535674", "0.5339844", "0.533096", "0.53215206", "0.5315581", "0.53096384" ]
0.62851363
0
Gets the unique columns from the table's contraints.
def get_unique_columns(table): for constraint in table.constraints: if isinstance(constraint, sqla.UniqueConstraint): return constraint.columns # We should never get this far. # All tables in my db should have unique constraints assert False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unique_cols(self):\n return list(set([coord[1] for coord in self.landscape]))", "def get_attr_cols(self):\n all_cols = np.arange(self.col_count)\n attr_cols = np.setdiff1d(all_cols, self.time_cols)\n return attr_cols", "def constraints(self):\n ans = self.execute(self.commands.get_constraints(self.db.name, self.name))\n return [Constraint(*tup) for tup in ans]", "def columns(self):\n return set(self.native_schema)", "def get_all_columns(self):\n df = self.get_prep_data()\n col = [c for c in df.columns if c not in ['target', 'idd', 'ft_data_dt']]\n return col", "def _constraints_affecting_columns(self, table_name, columns, type='UNIQUE'):\r\n qn = self.quote_name\r\n\r\n if self.dry_run:\r\n raise ValueError(\"Cannot get constraints for columns during a dry run.\")\r\n columns = set(columns)\r\n rows = self.execute(\"\"\"\r\n SELECT user_cons_columns.constraint_name, user_cons_columns.column_name\r\n FROM user_constraints\r\n JOIN user_cons_columns ON\r\n user_constraints.table_name = user_cons_columns.table_name AND \r\n user_constraints.constraint_name = user_cons_columns.constraint_name\r\n WHERE user_constraints.table_name = '%s' AND\r\n user_constraints.constraint_type = '%s'\r\n \"\"\" % (qn(table_name), self.constraits_dict[type]))\r\n # Load into a dict\r\n mapping = {}\r\n for constraint, column in rows:\r\n mapping.setdefault(constraint, set())\r\n mapping[constraint].add(column)\r\n # Find ones affecting these columns\r\n for constraint, itscols in mapping.items():\r\n if itscols == columns:\r\n yield constraint", "def get_all_columns_name(input_glob):\n reader = tf.python_io.TableReader(input_glob,\n selected_cols=\"\",\n excluded_cols=\"\",\n slice_id=0,\n slice_count=1,\n num_threads=0,\n capacity=0)\n schemas = reader.get_schema()\n return set([col_name for col_name, _, _ in schemas])", "def missing_columns(self):\r\n _missing_columns = set(self.reqd_columns).difference(set(self.all_columns))\r\n return list(_missing_columns)", "def _primary_key_columns(cls):\n return [col for col in cls._columns() if getattr(cls, col).primary_key]", "def graphcols(self):\n columns = []\n table = self.__parent_table\n for col in self.__column_list:\n columns.append(table.table_column(col).title())\n return columns", "def _these_columns_cannot_annotate_exp_cons(self):\n _cols = set([]) #\n for param_name, req_cols in self.required_columns.items():\n _cols |= req_cols\n\n return _cols | self.other_useful_columns", "def get_columns(self):\n if self.dbtype == 'pg':\n q = \"select attname from pg_class, pg_attribute where relname = %s and attrelid = pg_class.oid and attnum > 0 and attisdropped = false;\"\n else:\n q = \"select columns.name from columns, tables where tables.name = %s and tables.id = columns.table_id;\"\n ret = []\n for (attr,) in self.query(q, self.tablename):\n ret.append(str(attr))\n return ret", "def get_constraints(model):\n with connection.cursor() as cursor:\n return connection.introspection.get_constraints(cursor, model._meta.db_table)", "def get_table_columns(self):\n raise NotImplementedError(\"Please implement this method\")", "def base_columns(self):\r\n _base_columns = set(self.all_columns).intersection(set(self.reqd_columns))\r\n return list(_base_columns)", "def table_constraints(self) -> 'outputs.TableConstraintsResponse':\n return pulumi.get(self, \"table_constraints\")", "def columns(self):\n cursor = self._connection.cursor()\n cursor.execute('PRAGMA table_info(' + self._table + ')')\n return [x[1] for x in cursor.fetchall()]", "def columns(self):\n cursor = self._connection.cursor()\n cursor.execute('PRAGMA table_info(' + self._table + ')')\n return [x[1] for x in cursor.fetchall()]", "def get_column_names(self):\n # here, creating combined column/volue column names for uniqueness\n colname_temp = list()\n for column in self.col_value:\n colname_temp.append(self.question_column + \"-\" + str(column))\n return colname_temp", "def _emphasized_columns(self) -> Iterable[int]:\n return range(self._PRIMARY)", "def columns(self, table_name):\n table = self._create_table(table_name)\n return [c.name for c in table.c]", "def columns(self):\n return self._columns.keys()", "def columns(self):\n result = self.execute(self.commands.table_columns(self.name))\n return [x[0] for x in result]", "def unique_constraint_reflection(self):\n return exclusions.closed()", "def getDataCols(data: List[Dict]) -> List[str]:\n notAudit = list(\n filter(\n partial(is_not, None),\n list(\n map(\n lambda x: x.get(\"col_name\")\n if x.get(\"is_audit_col\") == \"N\"\n else None,\n data,\n )\n ),\n )\n )\n primary = getPrimaryKeys(data)\n\n return [item for item in notAudit if item not in primary]", "def columns(self):\n return self._names_to_cols.values()", "def _columns(cursor, table):\n cursor.execute('''\n SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name = %s\n ''', (table, ))\n return [column['column_name'] for column in cursor.fetchall()]", "def get_cols_drop():", "def get_sql_columns(self, request):\n cur = self.execute(request)\n col_name_list = [tuple[0] for tuple in cur.description]\n cur.close()\n return col_name_list", "def get_id_columns(df):\n id_cols = []\n cols= df.columns\n df_len = len(df)\n for column in cols:\n if df[column].nunique() == df_len:\n id_cols.append(column)\n\n return id_cols" ]
[ "0.65937907", "0.6296229", "0.6262106", "0.62196285", "0.61645097", "0.6027742", "0.6019557", "0.5972942", "0.59648323", "0.59523666", "0.5932074", "0.59089965", "0.5894359", "0.5856457", "0.58301574", "0.5824588", "0.5823334", "0.5823334", "0.5815539", "0.5815517", "0.58052593", "0.58051175", "0.5789492", "0.5783468", "0.57497424", "0.57493645", "0.57355297", "0.5722808", "0.5697853", "0.56709605" ]
0.85034263
0
Builds a raw SQL query for "upserting" data into the database.
def build_upsert_query(engine, table_name, src_dict, do_update=False): def _for_pgsql(value, dtype): """ Converts a python datatype to the appropriate string (including, e.g., \ the necessary single quotes and/or brackets ) for use in a raw \ postgresql query. Args: value: (various datatypes) the value in question dtype: str, the datatype Returns: str, with the necessary formatting """ if dtype.startswith(('int', 'float', 'double', 'numeric')): if value is None: return "Null" elif str(value).lower() == 'nan': return "'nan'" elif dtype.endswith('[]'): value = ', '.join([str(v) for v in value]) value = "'{" + value + "}'" return value else: return str(value) elif dtype.startswith('time'): if value is None: return "Null" else: return "'" + str(value) + "'" elif dtype.startswith('bool'): if value is None: raise ValueError("Error: bool should not be None.") else: if str(value).startswith(('t', 'T')): return str(True) else: return str(False) elif dtype.startswith('json'): # In this case, value itself should be a dict value = ','.join(['"{k}":"{v}"'.format(k=k, v=v) for k, v in value.items()]) value = "'{" + value + "}'" return value elif dtype == 'text[]': value = ', '.join(['"' + str(v) + '"' for v in value]) value = "'{" + str(value) + "}'" return value else: if str(value).startswith('$delim$') and\ str(value).endswith('$delim$'): return str(value) if '::' in str(value): value = str(value).split("::")[0].strip("'") return "'" + str(value) + "'" def _get_values(uprow, types): """ Gets a list of values for use in a raw SQL query, e.g., INSERT INTO table_name (column1, column2, ...) VALUES (value1, value2, ...); This function returns a string "value1, value2, ..." Args: uprow: dict, containing the values types: dict, containing the data types of the values Return: str, containing the values as described above. """ tmp_uprow = {k: _for_pgsql(v, types[k]) for k, v in uprow.items()} mappable = ",".join(["{" + str(k) + "}" for k in uprow.keys()]) values = mappable.format(**tmp_uprow) return values def _get_set_pairs(uprow, types): """ Gets a list of "set pairs" for use in a raw SQL query, e.g., INSERT INTO table_name (column1, column2, ...) VALUES (value1, value2, ...) ON CONFLOCT (column1) DO UPDATE SET column1=value1, column2=value2 This function returns a string "column1=value1, column=value2 Args: uprow: dict, containing the values types: dict, containing the data types of the values Return: str, containing the "set pairs" as described above. """ pairs = [] for key, val in uprow.items(): pairs.append("{0}={1}".format(key, _for_pgsql(val, types[key]))) return ", ".join(pairs) # Mirror table from DB meta = sqla.MetaData(bind=engine) insp = sqla.inspect(engine) table = sqla.Table(table_name, meta, autoload=True, autoload_with=engine) table_cols = [str(col).split('.')[1] for col in table.columns] # Collect dict entries that also appear in the table as a "row" uprow = {key: src_dict[key] for key in src_dict if key in table_cols} # Load defaults and collect types types = {} for column in insp.get_columns(table_name, default=True): name = column['name'] if (name not in uprow) and (name != 'id'): uprow[name] = column['default'] types[name] = str(column['type']).lower() # Build base query columns = "{keylist}".format(keylist=', '.join(uprow.keys())) values = _get_values(uprow, types) query = "INSERT INTO {table_name}\n".format(table_name=table_name) query += "({columns})\n".format(columns=columns) query += "VALUES\n" query += "({values})\n".format(values=values) # Fetch unique columns unique_constraints = insp.get_unique_constraints(table_name) # Handle potential conflicts if len(unique_constraints) > 0: unique_cols = insp.get_unique_constraints(table_name)[ 0]['column_names'] if len(unique_cols) > 1: unique_cols = ", ".join([str(col) for col in list(unique_cols)]) else: unique_cols = ', '.join(unique_cols) if do_update: set_clause = "ON CONFLICT ({unique_cols}) DO UPDATE SET\n".\ format(unique_cols=unique_cols) set_clause += _get_set_pairs(uprow, types) query += set_clause else: query += "ON CONFLICT ({unique_cols}) DO NOTHING\n".\ format(unique_cols=unique_cols) else: # No unique constraints, look for primary key instead primary_key = [c for c in table.columns if c.primary_key] if len(primary_key) == 1: primary_key, = primary_key # Ditch reference to foreign table if '.' in str(primary_key): primary_key = str(primary_key).split('.')[-1] else: tmp = [] for col in primary_key: # Ditch reference to foreign table if '.' in str(col): col = str(col).split('.')[-1] tmp.append(col) primary_key = ", ".join(tmp) if do_update: set_clause = "ON CONFLICT ({primary_key}) DO UPDATE SET\n".\ format(primary_key=primary_key) set_clause += _get_set_pairs(uprow, types) query += set_clause else: query += "ON CONFLICT ({primary_key}) DO NOTHING\n".\ format(primary_key=primary_key) query += ';' return query
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _generate_upsert_sql(mon_loc):\n mon_loc_db = [(k, _manipulate_values(v, k in TIME_COLUMNS)) for k, v in mon_loc.items()]\n all_columns = ','.join(col for (col, _) in mon_loc_db)\n all_values = ','.join(value for (_, value) in mon_loc_db)\n update_query = ','.join(f\"{k}={v}\" for (k, v) in mon_loc_db if k not in ['AGENCY_CD', 'SITE_NO'])\n\n statement = (\n f\"MERGE INTO GW_DATA_PORTAL.WELL_REGISTRY_STG a \"\n f\"USING (SELECT '{mon_loc['AGENCY_CD']}' AGENCY_CD, '{mon_loc['SITE_NO']}' \"\n f\"SITE_NO FROM DUAL) b ON (a.AGENCY_CD = b.AGENCY_CD AND a.SITE_NO = b.SITE_NO) \"\n f\"WHEN MATCHED THEN UPDATE SET {update_query} \"\n f\"WHEN NOT MATCHED THEN INSERT ({all_columns}) VALUES ({all_values})\"\n )\n return statement", "def _upsert(cursor, table, data, pk):\n stamped = table in ('game', 'drive', 'play')\n update_set = ['%s = %s' % (k, '%s') for k, _ in data]\n if stamped:\n update_set.append('time_updated = NOW()')\n update_set = ', '.join(update_set)\n\n insert_fields = [k for k, _ in data]\n insert_places = ['%s' for _ in data]\n if stamped:\n insert_fields.append('time_inserted')\n insert_fields.append('time_updated')\n insert_places.append('NOW()')\n insert_places.append('NOW()')\n insert_fields = ', '.join(insert_fields)\n insert_places = ', '.join(insert_places)\n\n pk_cond = ' AND '.join(['%s = %s' % (k, '%s') for k, _ in pk])\n q = '''\n UPDATE %s SET %s WHERE %s;\n ''' % (table, update_set, pk_cond)\n q += '''\n INSERT INTO %s (%s)\n SELECT %s WHERE NOT EXISTS (SELECT 1 FROM %s WHERE %s)\n ''' % (table, insert_fields, insert_places, table, pk_cond)\n\n values = [v for _, v in data]\n pk_values = [v for _, v in pk]\n try:\n cursor.execute(q, values + pk_values + values + pk_values)\n except psycopg2.ProgrammingError as e:\n raise e", "def get_scrub_sql():\r\n # it seems incredibly hard to get SQLAlchemy to emit a fully-compiled SQL\r\n # string that including data values. i gave up after trying this method with\r\n # the \"dialect\" sqlalchemy.dialects.mysql.mysqldb.MySQLDialect()\r\n # https://sqlalchemy.readthedocs.org/en/latest/faq/sqlexpressions.html\r\n # #how-do-i-render-sql-expressions-as-strings-possibly-with-bound\r\n # -parameters-inlined\r\n sql_format = (\"update %(table)s set %(col)s = %(sub_value)s \"\r\n \"where %(col)s is not null;\")\r\n return '\\n'.join(\r\n sql_format % dict(table=c.table.name, col=c.name, sub_value=v)\r\n for c, v in get_scrub_columns().iteritems())", "def _generate_upsert_pgsql(mon_loc):\n mon_loc_db = [(k, _manipulate_values(v, k in TIME_COLUMNS)) for k, v in mon_loc.items()]\n all_columns = '\"' + '\",\"'.join(col for (col, _) in mon_loc_db if col not in ['INSERT_USER_ID', 'UPDATE_USER_ID', 'REVIEW_FLAG'])\n all_columns += '\",\"GEOM\"'\n all_values = ','.join(value for (key, value) in mon_loc_db if key not in ['INSERT_USER_ID', 'UPDATE_USER_ID', 'REVIEW_FLAG'])\n geom_col = f\" ST_SetSRID(ST_MakePoint({mon_loc['DEC_LONG_VA']},{mon_loc['DEC_LAT_VA']}),4269) \"\n all_values += \",\" + geom_col\n update_query = ','.join(f'\"{k}\"={v}' for (k, v) in mon_loc_db if k not in ['AGENCY_CD', 'SITE_NO', 'INSERT_USER_ID', 'UPDATE_USER_ID', 'REVIEW_FLAG'])\n update_query += ', \"GEOM\"=' + geom_col\n\n statement = (\n f'INSERT INTO \"GW_DATA_PORTAL\".\"WELL_REGISTRY_MAIN\" ({all_columns}) VALUES ({all_values}) '\n f'ON CONFLICT(\"AGENCY_CD\", \"SITE_NO\") DO UPDATE SET {update_query}'\n )\n return statement", "def build_insert_query(self, query, columns, table_name):\n cols = \"\"\n values = \"\"\n on_dupe_values = \"\"\n\n for column in columns:\n cols += \"`{}`, \".format(column)\n values += \"%({})s, \".format(column)\n on_dupe_values += \"{} = VALUES({}), \".format(column, column)\n\n # Remove trailing whitespace and commas\n cols = cols.rstrip().rstrip(\",\")\n values = values.rstrip().rstrip(\",\")\n on_dupe_values = on_dupe_values.rstrip().rstrip(\",\")\n\n query = query.format(table_name=table_name, cols=cols, values=values, on_dupe_values=on_dupe_values)\n return query", "def getSQL_update(table, **kwargs):\n kvs = ''\n kvs_where = ''\n for k, v in kwargs.items():\n if k.startswith('where'):\n kvs_where += k[5:] + '='\n if isNumber(v) or v == 'null':\n kvs_where += str(v) + ' and '\n else:\n kvs_where += \"'\" + v + \"' and \"\n else:\n if not v:\n continue\n if isNumber(v) or v == 'null':\n kvs += k + '=' + str(v) + ','\n else:\n kvs += k + \"='\" + v + \"',\"\n\n if kvs_where == '':\n return 'UPDATE %s SET %s' % (table, kvs[:-1])\n return 'UPDATE %s SET %s WHERE %s' % (table, kvs[:-1], kvs_where[:-4])", "def getSQL_update_ex(table, dict):\n kvs = ''\n kvs_where = ''\n for k, v in dict.items():\n if k.startswith('where'):\n kvs_where += k[5:] + '='\n if isNumber(v) or v == 'null':\n kvs_where += str(v) + ' and '\n else:\n kvs_where += \"'\" + v + \"' and \"\n else:\n if not v:\n continue\n if isNumber(v) or v == 'null':\n kvs += k + '=' + str(v) + ','\n else:\n kvs += k + \"='\" + v + \"',\"\n\n if kvs_where == '':\n return 'UPDATE %s SET %s' % (table, kvs[:-1])\n return 'UPDATE %s SET %s WHERE %s' % (table, kvs[:-1], kvs_where[:-4])", "def upsert(saved_query):\n saved_query.save()\n return saved_query", "def _query_insert(self, sql, data=None):\n\n conn = psycopg2.connect(self.connect_args)\n cur = conn.cursor()\n cur.execute(sql, data)\n conn.commit()\n cur.close()\n conn.close()", "def construct_query(self):\n reader = QueryReader(filepath=self.filepath, filename=self.filename, raw_sql=self.raw_sql, params=self.params)\n return reader.sql", "def pack_for_insert(table_name: str,\n data: dict):\n\n # Decide which table apply insert query to\n columns_list = product_table_columns # \"product\" by default\n while Switch(table_name):\n if case('listing_info'):\n columns_list = listing_info_table_columns\n if case('selling_status'):\n columns_list = selling_status_table_columns\n if case('shipping_info'):\n columns_list = shipping_info_table_columns\n break\n\n # Compose condition string for Update statement from several set operations separated by commas\n columns_string = r\"{}\".format(tuple([i for i in columns_list])).replace(\"'\", \"\")\n\n # Init service variables\n values_string, count = r\"\", 0\n\n # Update Statement string with commas separating set values\n for column in columns_list:\n for key, value in data.items():\n if column == key:\n values_string = values_string + r'\"{}\"{}'.format(value, (\", \" if count<len(data)-1 else ''))\n count += 1\n\n return r\"REPLACE INTO {} {} VALUES ({});\".format(table_name, columns_string, values_string)", "def build_query(start, end):\n query = \"INSERT INTO {table} VALUES \\n{values}\"\n values = \"\"\n for _ in range(start, end + 1):\n row = mock_clicks(_)\n if _ == end:\n values += f\"\\t{row}\\n\"\n else:\n values += f\"\\t{row},\\n\"\n return query.format(table=TABLE_NAME, values=values).rstrip(\",\")", "def conditions_as_sql(self, prewhere=False):\n q_object = self._prewhere_q if prewhere else self._where_q\n return q_object.to_sql(self._model_cls)", "def upsert_db(data: List[Dict[str, Any]]):\n questions = data[\"items\"][:5]\n timestamp = f\"{DATE:%Y-%m-%d %H:%M}\"\n convert_epoch = datetime.datetime.utcfromtimestamp\n\n db = sqlite_utils.Database(ROOT / \"stackoverflow.db\")\n db[\"questions\"].upsert_all(\n (\n {\n \"question_id\": row[\"question_id\"],\n \"title\": row[\"title\"],\n \"tags\": \",\".join(row[\"tags\"]),\n \"owner_id\": row[\"owner\"][\"user_id\"],\n \"is_answered\": row[\"is_answered\"],\n \"view_count\": row[\"view_count\"],\n \"answer_count\": row[\"answer_count\"],\n \"score\": row[\"score\"],\n \"site\": row[\"link\"].split(\".\")[0].split(\"/\")[-1],\n \"link\": row[\"link\"],\n \"creation_date\": f'{convert_epoch(row[\"creation_date\"]):%Y-%m-%d %H:%M}',\n \"inserted_date\": timestamp\n }\n for row in questions\n ),\n pk=\"question_id\"\n )\n\n db[\"users\"].upsert_all(\n (\n {\n \"user_id\": row[\"owner\"][\"user_id\"],\n \"user_type\": row[\"owner\"][\"user_type\"],\n \"display_name\": row[\"owner\"][\"display_name\"],\n \"link\": row[\"owner\"][\"link\"],\n \"site\": row[\"link\"].split(\".\")[0].split(\"/\")[-1],\n \"inserted_date\": timestamp \n }\n for row in questions\n ),\n pk=\"user_id\"\n )", "def _build_statement(self, query, query_key, beets_key):\n statement = \"\"\n if query_key in query:\n for query_string in query[query_key]:\n if '\"' in query_string:\n statement += \" and %s = \\'%s\\' \" % (beets_key,\n query_string)\n else:\n statement += ' and %s = \\\"%s\\\" ' % (beets_key,\n query_string)\n return statement", "def upsert_table(self, df_diff, n_batch=5000, debug=False):\n\n n_items = len(df_diff)\n queries = []\n upsert_query = ' '.join(\n ('INSERT INTO \"{tablename}\"(\"cartodb_id\", \"{colname}\")',\n 'VALUES ({cartodb_id}, {colval})',\n 'ON CONFLICT (\"cartodb_id\")',\n 'DO UPDATE SET \"{colname}\" = {colval}',\n 'WHERE EXCLUDED.\"cartodb_id\" = {cartodb_id};'))\n n_batches = n_items // n_batch\n batch_num = 1\n for row_num, row in enumerate(df_diff.iteritems()):\n # if debug: print(row)\n cartodb_id = row[0][0]\n colname = row[0][1]\n pgtype = dtype_to_pgtype(self[colname].dtype, colname)\n # fill query template\n temp_query = upsert_query.format(\n tablename=self.get_carto_tablename(),\n colname=colname,\n colval=numpy_val_to_pg_val(self.loc[cartodb_id][colname],\n pgtype),\n cartodb_id=cartodb_id)\n\n queries.append(temp_query)\n\n # run batch if at n_batch queries, or at last item\n if (len(queries) == n_batch) or (row_num == n_items - 1):\n batchquery = '\\n'.join(queries)\n print(\"{curr_batch} of {n_batches}\".format(\n curr_batch=batch_num,\n n_batches=n_batches))\n batch_num = batch_num + 1\n if debug: print(\"Num chars in batch: {}\".format(len(batchquery)))\n if debug: print(batchquery)\n\n # send batch query to carto\n resp = self.carto_sql_client.send(batchquery)\n if debug: print(resp)\n\n # clear for another batch\n queries = []\n\n return None", "def generate_update_sql(self, fieldupdate, condition):\n return \"UPDATE %s SET %s WHERE %s\" % (self.tablename, fieldupdate, condition)", "def getSQL_insert_ex(table, dict):\n ks = ''\n vs = ''\n for k, v in dict.items():\n ks += k + ','\n if v == None:\n v = 'null'\n if isNumber(v) or v == 'null':\n vs += str(v) + ','\n elif str(type(v)) == \"<type 'datetime.datetime'>\":\n vs += \"'\" + v.strftime('%Y-%m-%d %H:%M:%S') + \"',\"\n elif str(type(v)) == \"<type 'datetime.time'>\":\n vs += \"'\" + v.strftime('%H:%M:%S') + \"',\"\n else:\n vs += \"'\" + v + \"',\"\n\n return 'INSERT INTO %s (%s) VALUES (%s)' % (table, ks[:-1], vs[:-1])", "def getSQL_insert(table, **kwargs):\n ks = ''\n vs = ''\n for k, v in kwargs.items():\n ks += k + ','\n if isNumber(v) or v == 'null':\n vs += str(v) + ','\n else:\n vs += \"'\" + v + \"',\"\n\n return 'INSERT INTO %s (%s) VALUES (%s)' % (table, ks[:-1], vs[:-1])", "def sql(self, q):\r\n params = base.get_params(None, locals())\r\n url = '{0}/{1}'.format(self.get_url(), 'sql')\r\n\r\n return http.Request('POST', url, params), parsers.parse_json", "def to_upsert():\n \n return (out['parameters']['dataset'], out['parameters']['timezone'], \n out['parameters']['rows'], out['parameters']['format'], \n out['parameters']['refine']['ano'], out['parameters']['refine']['mes'], \n out['parameters']['metadata']['fecha_ejecucion'], \n out['parameters']['metadata']['parametros_url'], \n out['parameters']['metadata']['ip_address'], \n out['parameters']['metadata']['usuario'], \n out['parameters']['metadata']['nombre_archivo'], \n out['parameters']['metadata']['ruta'])", "def execute_sql(self, return_id=False):\n data = {}\n for (field, value), column in zip(self.query.values, self.query.columns):\n data[column] = python2db(field.db_type(connection=self.connection), value)\n # every object should have a unique pk\n pk_field = self.query.model._meta.pk\n pk_name = pk_field.attname\n\n db_table = self.query.get_meta().db_table\n res = self.connection.db_connection.index(data, self.connection.db_name, db_table, pk)\n\n #TODO: remove or timeout the refresh\n self.connection.db_connection.refresh([self.connection.db_name])\n return res['_id']", "def form_insert_query(self, table_name, input_data, table_fields_names=None, table_fields_types=None):\n\n\t\t# creating first part of the query -> section with columns' names\n\t\tquery_table_structure = self.build_query_part(table_fields_names, table_fields_types, query_part=1)\n\n\t\t# creating second part of the query -> section with values\n\t\tquery_values = self.build_query_part(input_data, table_fields_types, query_part=2)\n\t\t\n\t\t# form query\n \t\tquery = 'INSERT INTO ' + table_name + ' ' + query_table_structure + ' VALUES ' + query_values\n\n\t\treturn query", "def _assemble(self):\n assert self._kw, \"Call values() first\"\n names = ','.join(list(self._kw))\n holders = ','.join(f'%({name})s' for name in self._kw)\n return (\n f'insert into {self._table} '\n f'({names}) values ({holders}) '\n f'returning {self._returning}'\n ), self._kw.copy()", "def sql(self):\n\n if not self._table_names:\n raise ValueError('UPDATE requires at least one table')\n if not self._values and not self._values_raw:\n raise ValueError('UPDATE requires at least one value')\n\n table_refs = [', '.join(self._table_names)]\n param_values = []\n col_names = []\n inline_values = []\n set_values = []\n\n self._append_join_table_refs(self._table_names[0], table_refs)\n\n if self._values:\n for col, val in self._values.iteritems():\n col_names.append(col)\n self._parameterize_values(val, inline_values, param_values)\n\n for col in self._values_raw:\n val, val_params = self._values_raw[col]\n col_names.append(col)\n inline_values.append(val)\n if val_params is not None and self.placeholder:\n param_values.extend(val_params)\n\n assert len(col_names) == len(inline_values)\n for col, val in zip(col_names, inline_values):\n set_values.append(u'{0}={1}'.format(self.quote_col_ref(col), val))\n\n # MySQL UPDATE syntax as of 5.7:\n #\n # Single-table syntax:\n #\n # UPDATE [LOW_PRIORITY] [IGNORE] table_reference\n # SET col_name1={expr1|DEFAULT} [, col_name2={expr2|DEFAULT}] ...\n # [WHERE where_condition]\n # [ORDER BY ...]\n # [LIMIT row_count]\n #\n # Multiple-table syntax:\n #\n # UPDATE [LOW_PRIORITY] [IGNORE] table_references\n # SET col_name1={expr1|DEFAULT} [, col_name2={expr2|DEFAULT}] ...\n # [WHERE where_condition]\n\n sql = ['UPDATE']\n\n if self.query_options:\n sql.extend(self.query_options)\n\n if self.ignore_error:\n sql.append('IGNORE')\n\n sql.append(' '.join(table_refs))\n\n sql.append('SET')\n sql.append(', '.join(set_values))\n\n if self._where_cond_root.has_conds:\n sql.append('WHERE')\n sql.append(self._where_cond_root.sql(param_values))\n\n if self._orderby_conds:\n if len(self._table_names) + len(self._join_refs) > 1:\n raise ValueError('Multiple-table UPDATE does not support ORDER BY')\n\n sql.append('ORDER BY')\n sql.append(', '.join(self._orderby_conds))\n\n if self._limit:\n if len(self._table_names) + len(self._join_refs) > 1:\n raise ValueError('Multiple-table UPDATE does not support LIMIT')\n\n sql.append('LIMIT {0}'.format(self._limit))\n\n if self.placeholder:\n return ' '.join(sql), param_values if param_values else None\n assert not param_values\n return ' '.join(sql)", "def Generate(self):\n clauses = [self.main_clause] + self.use_clauses + self.join_clauses\n if self.where_conds:\n if self.or_where_conds:\n clauses.append('WHERE ' + '\\n OR '.join(self.where_conds))\n else:\n clauses.append('WHERE ' + '\\n AND '.join(self.where_conds))\n if self.group_by_terms:\n clauses.append('GROUP BY ' + ', '.join(self.group_by_terms))\n if self.having_conds:\n assert self.group_by_terms\n clauses.append('HAVING %s' % ','.join(self.having_conds))\n if self.order_by_terms:\n clauses.append('ORDER BY ' + ', '.join(self.order_by_terms))\n\n if self.limit and self.offset:\n clauses.append('LIMIT %d OFFSET %d' % (self.limit, self.offset))\n elif self.limit:\n clauses.append('LIMIT %d' % self.limit)\n elif self.offset:\n clauses.append('LIMIT %d OFFSET %d' % (sys.maxint, self.offset))\n\n if self.insert_args:\n clauses.append('VALUES (' + PlaceHolders(self.insert_args[0]) + ')')\n args = self.insert_args\n if self.duplicate_update_cols:\n clauses.append('ON DUPLICATE KEY UPDATE %s' % (\n ', '.join(['%s=VALUES(%s)' % (col, col)\n for col in self.duplicate_update_cols])))\n assert not (self.join_args + self.update_args + self.where_args +\n self.group_by_args + self.order_by_args + self.having_args)\n else:\n args = (self.join_args + self.update_args + self.where_args +\n self.group_by_args + self.having_args + self.order_by_args)\n assert not (self.insert_args + self.duplicate_update_cols)\n\n args = _BoolsToInts(args)\n stmt_str = '\\n'.join(clause for clause in clauses if clause)\n\n assert _IsValidStatement(stmt_str), stmt_str\n return stmt_str, args", "def sql_merge(sqls=[],clauseTables=[]):\n\tsql = ''\n\tfor statement in sqls:\n\t\tsql += statement + \" AND \"\n\tsql = sql[0:-5]\n\ttabs = set(clauseTables)\n\treturn dict(sql=sql,clauseTables=tabs)", "def postgres_db_upsert(data, db_credentials):\n\n\tconn = None\n\tinsert_statement = \"\"\"\n\t\tINSERT INTO csgo_match_results (\n\t\t\thash_id, team_1, team_2, team_1_score, team_2_score, tournament, matchtype, match_time\n\t\t)\n\t\tVALUES (%s, %s, %s, %s, %s, %s, %s, %s)\n\t\tON CONFLICT (hash_id) DO UPDATE \n\t\tSET match_time = EXCLUDED.match_time;\n\t\"\"\"\n\n\ttry:\n\t\tconn = psycopg2.connect(**db_credentials)\n\t\tcursor = conn.cursor()\n\t\tcursor.executemany(insert_statement, data)\n\t\tconn.commit()\n\t\tcursor.close()\n\t\tlogger.info('Inserted %s rows.', len(data))\n\texcept psycopg2.DatabaseError as e:\n\t\tlogger.error('Failed to insert %s rows into database.', len(data))\n\t\tlogger.error('Error: %s', e)\n\tfinally:\n\t\tif conn:\n\t\t\tconn.close()", "def test_upsert_user(self):\n db = database.Database()\n db.upsert_user('nick', 100, 100)\n\n the_args, _ = db._cursor.execute.call_args\n sql = the_args[0]\n expected_sql = 'INSERT INTO quota_violations (username, triggered, last_notified)\\n VALUES (%s, %s, %s)\\n ON CONFLICT (username)\\n DO UPDATE SET\\n (triggered, last_notified)\\n = (EXCLUDED.triggered, EXCLUDED.last_notified);\\n '\n\n self.assertEqual(sql, expected_sql)", "def generate_insert_sql(self, fields, values):\n return \"INSERT INTO %s (%s) VALUES(%s)\" % (self.tablename, fields,\n ','.join([sql_quote(x) for x in values]))" ]
[ "0.6856745", "0.62934977", "0.6097199", "0.6094336", "0.6082441", "0.6049717", "0.59614617", "0.5931908", "0.5901273", "0.57127017", "0.56941617", "0.56601083", "0.56386846", "0.557427", "0.55658185", "0.55294424", "0.5524274", "0.5510159", "0.5497966", "0.54956716", "0.5486575", "0.54429364", "0.5441229", "0.54390645", "0.5436801", "0.5433535", "0.543115", "0.54294205", "0.5423502", "0.5387975" ]
0.72920215
0
Converts a python datatype to the appropriate string (including, e.g., \ the necessary single quotes and/or brackets ) for use in a raw \ postgresql query.
def _for_pgsql(value, dtype): if dtype.startswith(('int', 'float', 'double', 'numeric')): if value is None: return "Null" elif str(value).lower() == 'nan': return "'nan'" elif dtype.endswith('[]'): value = ', '.join([str(v) for v in value]) value = "'{" + value + "}'" return value else: return str(value) elif dtype.startswith('time'): if value is None: return "Null" else: return "'" + str(value) + "'" elif dtype.startswith('bool'): if value is None: raise ValueError("Error: bool should not be None.") else: if str(value).startswith(('t', 'T')): return str(True) else: return str(False) elif dtype.startswith('json'): # In this case, value itself should be a dict value = ','.join(['"{k}":"{v}"'.format(k=k, v=v) for k, v in value.items()]) value = "'{" + value + "}'" return value elif dtype == 'text[]': value = ', '.join(['"' + str(v) + '"' for v in value]) value = "'{" + str(value) + "}'" return value else: if str(value).startswith('$delim$') and\ str(value).endswith('$delim$'): return str(value) if '::' in str(value): value = str(value).split("::")[0].strip("'") return "'" + str(value) + "'"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def typecast(dtype: Any) -> str:\n if dtype is int:\n return \"Int64\"\n elif dtype is float:\n return \"Float64\"\n elif dtype is bool:\n return \"bool\"\n return \"string\"", "def escapeinput_data_for_sql(self, value, sql_type):\n\t\t# print value\n\t\tvalue = value.replace('\\'', '\"')\n\t\tvalue = value.replace(',', '_')\n\t\t\n\t\tif len(value) == 0:\n\t\t\tif sql_type in ('BIGINT', 'INTEGER', 'FLOAT', 'DOUBLE'):\n\t\t\t\treturn '0'\n\t\t\tif sql_type == 'NVARCHAR':\n\t\t\t\treturn '\\'\\''\n\t\telse:\n\t\t\tif sql_type in ('BIGINT', 'INTEGER', 'FLOAT', 'DOUBLE'):\n\t\t\t\t# return value\n\t\t\t\treturn '\\'' + value + '\\''\n\t\t\tif sql_type == 'NVARCHAR':\n\t\t\t\treturn '\\'' + value + '\\''\n\n\t\treturn '\\'' + value + '\\''", "def python_type_to_sql_type(_python_type):\n if _python_type == str:\n return 'string'\n elif _python_type == bytes:\n return \"blob\"\n elif _python_type == float:\n return \"float\"\n elif _python_type == int:\n return \"integer\"\n elif _python_type == datetime:\n return \"datetime\"\n elif _python_type == bool:\n return \"boolean\"\n else:\n raise Exception(\"python_type_to_sql_type: _type_code \\\"\" + str(_python_type) + \"\\\"not supported\")", "def _handle_sql_types(value):\n if type(value) is datetime:\n return value.isoformat()\n return str(value)", "def field_cast_sql(self, db_type, internal_type=None):\n if db_type and db_type.lower() == 'blob':\n return 'CAST(%s as nvarchar)'\n return '%s'", "def cast(elem, psql_type):\n if psql_type == 'real':\n return float(format(elem, '.6g'))\n elif psql_type == 'double precision':\n return float(format(elem, '.15g'))\n elif psql_type == 'timestamp':\n if isinstance(elem, pd.Timestamp):\n return elem.to_pydatetime()\n else:\n return elem\n elif psql_type == 'text':\n if type(elem) == float:\n return \"NaN\"\n return str(elem)\n else:\n return elem", "def sqlify(obj):\n # because `1 == True and hash(1) == hash(True)`\n # we have to do this the hard way...\n\n if obj is None:\n return 'NULL'\n elif obj is True:\n return \"'t'\"\n elif obj is False:\n return \"'f'\"\n elif isinstance(obj, long):\n return str(obj)\n elif datetime and isinstance(obj, datetime.datetime):\n return repr(obj.isoformat())\n else:\n if isinstance(obj, unicode): obj = obj.encode('utf8')\n return repr(obj)", "def sql_type(dtype):\n if dtype.kind in (\"i\",\"u\",\"f\"):\n # It's a numeric type\n if dtype == np.int32:\n return \"integer\"\n elif dtype == np.int64:\n return \"bigint\"\n elif dtype == np.float32:\n return \"real\"\n elif dtype == np.float64:\n return \"float\"\n else:\n raise ValueError(\"Unsupported data type \"+str(dtype))\n elif dtype.kind == \"S\":\n # It's a string\n # Note: this assumes 1 byte = 1 character!\n return (\"char(%d)\" % dtype.itemsize)\n else:\n # Not numeric or string, don't know what to do with this!\n raise ValueError(\"Unsupported data type \"+str(dtype))", "def quote(value, typeCode, string_escaper=_safe):\n q = dbTypeMap.get(typeCode, None)\n if q is None:\n raise DBError(\"Type %s not known\" % typeCode)\n if value is None:\n return 'null'\n if q == NOQUOTE:\n return str(value)\n elif q == USEQUOTE:\n if typeCode.startswith('bool'):\n if value:\n value = '1'\n else:\n value = '0'\n if typeCode == \"bytea\":\n l = [\"'\"]\n for c in value:\n i = ord(c)\n if i == 0:\n l.append(\"\\\\\\\\000\")\n elif i == 92:\n l.append(c * 4)\n elif 32 <= i <= 126:\n l.append(c)\n else:\n l.append(\"\\\\%03o\" % i)\n l.append(\"'\")\n return \"\".join(l)\n if not isinstance(value, types.StringType) and \\\n not isinstance(value, types.UnicodeType):\n value = str(value)\n return \"'%s'\" % string_escaper(value)", "def escape(self, value) -> str:\n def to_str(val):\n if isinstance(val, bytes):\n val = val.decode('utf-8')\n return QuotedString(val).getquoted().decode('utf-8')\n func = self.python_type\n if isinstance(value, (datetime.datetime, datetime.date)):\n value = str(value)\n func = to_str\n if issubclass(self.python_type, str):\n func = to_str\n return func(value)", "def dtype_to_pgtype(dtype, colname):\n if colname in ('the_geom', 'the_geom_webmercator'):\n return 'geometry'\n else:\n if dtype == 'float64':\n return 'numeric'\n elif dtype == 'int64':\n return 'int'\n elif dtype == 'datetime64[ns]':\n return 'date'\n elif dtype == 'bool':\n return 'boolean'\n else:\n return 'text'\n\n return None", "def db_cast(self):\n if self.is_int:\n return 'BIGINT'\n return 'TEXT'", "def get_type_string(data):\r\n data_type = type(data)\r\n\r\n if data_type in (int, long):\r\n return 'integer'\r\n elif data_type == float:\r\n return 'float'\r\n elif data_type == bool:\r\n return 'boolean'\r\n elif data_type in (list, tuple):\r\n return 'list'\r\n elif data_type == dict:\r\n return 'hash'\r\n elif data is None:\r\n return 'null'\r\n elif isinstance(data, basestring):\r\n return 'string'", "def _as_string(self, name):\n org_type = self._get_type(name)\n if org_type == 'string': return None\n valid = ['single', 'delimited set', 'int', 'float', 'date']\n if not org_type in valid:\n msg = 'Cannot convert variable {} of type {} to text!'\n raise TypeError(msg.format(name, org_type))\n self._meta['columns'][name]['type'] = 'string'\n if self._get_type in ['single', 'delimited set']:\n self._meta['columns'][name].pop('values')\n self._data[name] = self._data[name].astype(str)\n return None", "def mongo_to_python_type(field, data):\n if isinstance(field, ObjectIdField):\n return str(data)\n elif isinstance(field, DecimalField):\n return data\n elif isinstance(field, BooleanField):\n return data\n else:\n return str(data)", "def _patched_cast(self, db_type):\n # see https://code.djangoproject.com/ticket/11580\n if db_type and db_type.endswith('LOB'):\n return \"DBMS_LOB.SUBSTR(%s,2000,1)\"\n else:\n return \"%s\"", "def _escape_value(cls, value):\n numbers = (decimal.Decimal, int, float)\n if value is None:\n return 'NULL'\n elif isinstance(value, str):\n return \"'{}'\".format(value.replace(\"'\", \"''\"))\n elif isinstance(value, numbers):\n return value\n elif isinstance(value, datetime):\n time_formatted = value.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]\n return \"TIMESTAMP '{}'\".format(time_formatted)\n else:\n raise ValueError('Cannot escape {}'.format(type(value)))", "def execute_cast_datetime_or_timestamp_to_string(op, data, type, **kwargs):\n return str(data)", "def convert_data_type(self, datatype):\n converted = Engine.convert_data_type(self, datatype)\n if \"NUMERIC\" in converted:\n converted = \"NUMERIC\"\n elif \"VARCHAR\" in converted:\n try:\n length = int(converted.split('(')[1].split(')')[0].split(',')[0])\n if length > 255:\n converted = \"TEXT\"\n except BaseException:\n pass\n return converted", "def fmt(self, val):\n if type(val) in self.QUOTABLE_TYPES:\n s = decode_string(val)\n return u\"{0}{1}{2}\".format(self.quotechar, s, self.quotechar)\n else:\n return decode_string(str(val))", "def _unicode_output(cursor, name, default_type, size, precision, scale):\n if default_type in (\n cx_Oracle.STRING,\n cx_Oracle.LONG_STRING,\n cx_Oracle.FIXED_CHAR,\n cx_Oracle.CLOB,\n ):\n return cursor.var(str, size, cursor.arraysize)", "def data_type_str(self):\n return data_ref_type_str(self.data_type)", "def sql_to_python_type(sql_type):\n if sql_type.startswith(\"CHAR(\"):\n return str\n\n if sql_type.startswith(\"INTERVAL\"):\n # Calcite will always convert to milliseconds\n # no matter what the actual interval is\n # I am not sure if this breaks somewhere,\n # but so far it works\n return lambda x: timedelta(milliseconds=int(x))\n\n if sql_type.startswith(\"DECIMAL(\"):\n # We use np.float64 always\n return np.float64\n\n try:\n return _SQL_TO_PYTHON[sql_type]\n except KeyError: # pragma: no cover\n raise NotImplementedError(f\"The SQL type {sql_type} is not implemented (yet)\")", "def t_STRING(t):\n return t", "def getquoted(self):\n if self.is_geometry:\n # Psycopg will figure out whether to use E'\\\\000' or '\\000'.\n return b\"%s(%s)\" % (\n b\"ST_GeogFromWKB\" if self.geography else b\"ST_GeomFromEWKB\",\n sql.quote(self.ewkb).encode(),\n )\n else:\n # For rasters, add explicit type cast to WKB string.\n return b\"'%s'::raster\" % self.ewkb.hex().encode()", "def value_to_string(self, value, type_class, param_info=None):\n if isinstance(value, Entry):\n var = self.get_variable(value.code_entry)\n if isinstance(value.target, list):\n return \"tuple(%s)\" % var\n return var\n else:\n if type_class == TypeClass.STRING:\n return '\"%s\"' % value\n elif type_class == TypeClass.ENUM:\n name = value.typeName\n suffix = self.get_last_part(name)\n upper_chars = [c for c in suffix if c.isupper()]\n as_name = \"%s_%s\" % (\"\".join(upper_chars), value.value)\n self.add_import('%s.%s' % (value.typeName, value.value), as_name)\n #return value.value\n return as_name\n elif type_class == TypeClass.CHAR:\n return \"uno.Char(\\\"%s\\\")\" % value.value\n elif type_class == TypeClass.SEQUENCE:\n comp_type, n = self.parse_seq(param_info)\n _comp_type_class = comp_type.getTypeClass()\n str_val = [self.value_to_string(v, _comp_type_class) for v in value]\n return \"(%s)\" % \", \".join(str_val)\n else:\n return str(value)", "def basictypestring(\n dtype_char: ty.Text,\n length: int = -1,\n byteorder: ty.Text = \"=\"\n ) -> ty.Text:\n # only specify length if we have a variable width type\n lengthstr = str(length) if dtype_char in VARLENGTH_TYPE_CHARS else \"\"\n # only specify byte order if we need to\n byteorderchar = byteorder if byteorder not in (\"=\", \"|\") else \"\"\n return f\"{byteorderchar}{dtype_char}{lengthstr}\"", "def convert_raw_type_to_xdm_type(schema_type: str) -> str:\n converting_dict = {\n \"string\": SCHEMA_TYPE_STRING,\n \"int\": SCHEMA_TYPE_NUMBER,\n \"boolean\": SCHEMA_TYPE_BOOLEAN,\n }\n\n return converting_dict.get(schema_type, SCHEMA_TYPE_STRING)", "def format_literal(term: Literal) -> str:\n lang = term.language\n dtype = term.datatype\n lit = str(term)\n if lang is not None or dtype is not None:\n return term.n3()\n if re.fullmatch(r_integer, lit):\n dtype = XSD.integer\n elif re.fullmatch(r_decimal, lit):\n dtype = XSD.decimal\n elif re.fullmatch(r_double, lit):\n dtype = XSD.double\n elif re.fullmatch(r_boolean, lit):\n dtype = XSD.boolean\n return Literal(lit, lang, dtype).n3()", "def quote(value):\n return DoubleQuotedScalarString(value)" ]
[ "0.72594017", "0.7176732", "0.70983726", "0.701285", "0.7011267", "0.69050854", "0.6897729", "0.6891294", "0.66907203", "0.65747005", "0.65560377", "0.6526946", "0.6434319", "0.6389201", "0.6378415", "0.6377122", "0.6342456", "0.6311862", "0.62993383", "0.6254328", "0.62502825", "0.61804223", "0.6164433", "0.615651", "0.615621", "0.6148549", "0.6095789", "0.60755116", "0.6066884", "0.60565346" ]
0.7943686
0
Gets a list of "set pairs" for use in a raw SQL query, e.g., INSERT INTO table_name (column1, column2, ...) VALUES (value1, value2, ...) ON CONFLOCT (column1) DO UPDATE SET column1=value1, column2=value2 This function returns a string "column1=value1, column=value2
def _get_set_pairs(uprow, types): pairs = [] for key, val in uprow.items(): pairs.append("{0}={1}".format(key, _for_pgsql(val, types[key]))) return ", ".join(pairs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_sql_update_set_formatted_string(keys_list: List[str]):\n\n return \", \".join([f\"{key} = :{key}\" for key in keys_list])", "def sql_filtered_insert(table, set_columns, values):\n for index in range(len(set_columns) - 1, -1, -1):\n if values[index] is None:\n del set_columns[index]\n del values[index]\n values_fields = ['?'] * len(set_columns)\n query_columns = ', '.join(set_columns)\n values_fields = ', '.join(values_fields)\n query = f'INSERT INTO {table} ({query_columns}) VALUES ({values_fields})'\n return query, values", "def get_synset_pairs(synset: Synset) -> list:\n # Remove phrasal expressions from the literals\n literals = remove_phrases(synset.literals)\n\n # Generate a list of unique pairs representing the cartesian product of the list of literals of the single synset\n pairs = unique([tuple(sorted((w1, w2), key=itemgetter(0))) for w1 in literals for w2 in literals if not w1 == w2])\n return pairs", "def collect_set_string_fragments(fragment_list, the_set):\n fragment_list.append(\"{\")\n first = True\n for v in the_set:\n if not first:\n fragment_list.append(\",\")\n else:\n first = False\n fragment_list.append(repr(v))\n fragment_list.append(\"}\")", "def get_cursor_values(self, keys: Set[str]) -> Mapping[str, str]:", "def get_schema(self):\n return ', '.join('%s:%s' % (col, self.schema[col]) for col in self.schema)", "def get_schema(self):\n return ', '.join(\n '%s:%s' % (col, self.schema[col]) for col in self.schema)", "def PlaceHolders(sql_args):\n return ','.join('%s' for _ in sql_args)", "def _sqllist(values):\n items = []\n items.append('(')\n for i, v in enumerate(values):\n if i != 0:\n items.append(', ')\n items.append(sqlparam(v))\n items.append(')')\n return SQLQuery(items)", "def _getValues(self):\n res = {}\n for colname, column in self._iterNameColumn():\n res[colname] = column.toSql(self._values[colname])\n return res", "def prepare(self, conn): # real signature unknown; restored from __doc__\n return set(*(), **{})", "def sets(set_id, set_name, series):\n if (set_id):\n format_set_info(find_set(set_id))\n else:\n params = build_up_set_params(set_name, series)\n print(params)\n param_list=''\n for k, v in params.items():\n param_list += (f'{k}:\"{v}\" ')\n param_list = param_list.strip()\n click.echo(param_list) \n sets = Set.where(q=param_list)\n for pset in sets:\n format_set_info(pset)", "def _assemble(self):\n assert self._kw, \"Call values() first\"\n names = ','.join(list(self._kw))\n holders = ','.join(f'%({name})s' for name in self._kw)\n return (\n f'insert into {self._table} '\n f'({names}) values ({holders}) '\n f'returning {self._returning}'\n ), self._kw.copy()", "def str(self) -> List[Tuple[str, str]]:\n kl = self.keys()\n vl = self.values()\n return [str(kl[idx]) + \",\" + str(vl[idx]) for idx in range(len(kl))]", "def colNames_string(self):\n # SELECT column_name FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = 'some_table';\n return \"SELECT column_name FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = \"", "def pretty_set(s):\n return 'set(%r)' % sorted(s, key=repr)", "def get_values(self):\n return set(self._table.keys())", "def get_value_list(self, insupdel=0):\n #NOTE: statement = [record, {...}]\n result = []\n if insupdel == StatementType.INSERT:\n statements = self.statements_insert\n elif insupdel == StatementType.UPDATE:\n statements = self.statements_update\n elif insupdel == StatementType.DELETE:\n statements = self.statements_delete\n for statement in statements:\n result.append([key for key, val in statement[1].items()])\n return result", "def get_set(lines):\n return [(right, wrong) for (right, wrongs) in (line.split(':') for line in lines) for wrong in wrongs.split()]", "def get_sql_columns(self, request):\n cur = self.execute(request)\n col_name_list = [tuple[0] for tuple in cur.description]\n cur.close()\n return col_name_list", "def comma_lists(self):\n\n comma_lists = []\n\n for i, arg_set in enumerate(self.arguments):\n comma_lists.append(', '.join([str(s) for s in arg_set]))\n\n return tuple(comma_lists)", "def sql(self):\n if not self._selects:\n raise ValueError('No SELECT statements are specified')\n\n sql = []\n param_values = []\n\n # MySQL SELECT syntax as of 5.7:\n #\n # SELECT ...\n # UNION [ALL | DISTINCT] SELECT ...\n # [UNION [ALL | DISTINCT] SELECT ...]\n\n if self.query_options:\n sql.extend(self.query_options)\n\n for stmt in self._selects:\n if isinstance(stmt, mysqlstmt.Select):\n select_sql, select_params = stmt.sql()\n stmtsql = select_sql\n if select_params is not None:\n param_values.extend(select_params)\n else:\n stmtsql = stmt\n\n if sql:\n if self._distinct is False:\n sql.append('UNION ALL')\n else:\n sql.append('UNION')\n\n sql.append(u'({0})'.format(stmtsql))\n\n if self._orderby_conds:\n sql.append('ORDER BY')\n sql.append(', '.join(self._orderby_conds))\n\n if self._limit is not None:\n row_count, offset = self._limit\n if offset > 0:\n sql.append('LIMIT {0},{1}'.format(offset, row_count))\n else:\n sql.append('LIMIT {0}'.format(row_count))\n\n if self.placeholder:\n return ' '.join(sql), param_values if param_values else None\n assert not param_values\n return ' '.join(sql)", "def tag_dict_values (self):\r\n\r\n if self.using_database:\r\n value_tuple = (notebookname,)\r\n db_cursor.execute(\"SELECT keyword \"\r\n +\"FROM keys_to_indexes\"\r\n +\" WHERE notebook=?;\",\r\n value_tuple)\r\n\r\n fetched = db_cursor.fetchall()\r\n if fetched:\r\n return {index[0].strip() for index in fetched}\r\n return set()\r\n\r\n return self.tag_dict.values()", "def mk_sql_list(ls):\n res = \"(\" + ' '.join([str(elem) for elem in intersperse(\",\", ls)]) + \")\"\n return res", "def sql_attribute_unpacker(self, where_string_list: List[str]) -> Tuple[List[str], List[str]]:\n\n if not where_string_list or len(where_string_list) == 0:\n raise ValueError(\"The list of strings containing the attributes is missing.\")\n\n join_attributes_set: set = set()\n selection_attributes_set: set = set()\n\n for where_string in where_string_list:\n attrs = re.split(\" AND \", where_string, flags=re.IGNORECASE)\n\n for index, attr in enumerate(attrs):\n if re.match(r'.+\\s*=\\s*[^\\d\"\\']*$', attr):\n join_attributes_set.add(attr.strip())\n else:\n for operator in self.operators:\n if operator in attr:\n attr = attr.split(operator)[0].strip()\n selection_attributes_set.add(attr)\n break\n\n return list(join_attributes_set), list(selection_attributes_set)", "def sql(self):\n return ';\\n'.join([x.sql() for x in self._statements]) + ';'", "def get_symmetrized_bond_set(bond_force):\n\n bond_set = set()\n n_bonds = bond_force.getNumBonds()\n\n for k in range(n_bonds):\n (i0, i1, r0, k0) = bond_force.getBondParameters(k)\n bond_set.add((i0, i1))\n bond_set.add((i1, i0))\n\n return bond_set", "def _aggregate_set_id_element_pairs(self, setpairs):\n set_ids = set([entry[0] for entry in setpairs])\n listlist = [[entry for entry in setpairs if entry[0] == set_id]\n for set_id in set_ids]\n result = [(pairlist[0][0], set([entry[1] for entry in pairlist]))\n for pairlist in listlist]\n return result", "def _create_sql_columns(self, a_list):\n result = \"\"\n \n cpt = 0\n for elem in a_list:\n if cpt == 0:\n result += \"%s\" % (elem)\n else:\n result += \", %s\" % (elem)\n cpt += 1\n\n return result", "def sql_filtered_update(table, set_columns, where_columns, values):\n for index in range(len(set_columns) - 1, -1, -1):\n if values[index] is None:\n del set_columns[index]\n del values[index]\n set_columns = [col + ' = ?' for col in set_columns]\n columns_to_set = ', '.join(set_columns)\n where_columns = [col + ' = ?' for col in where_columns]\n where_condition = ' AND '.join(where_columns)\n query = f'UPDATE {table} SET {columns_to_set} WHERE {where_condition}'\n return query, values" ]
[ "0.60592306", "0.5937135", "0.5752619", "0.55114925", "0.5503095", "0.5496429", "0.546202", "0.5449382", "0.5340713", "0.5311094", "0.5305011", "0.52147573", "0.51948994", "0.5189435", "0.51842463", "0.5181503", "0.51571256", "0.5146145", "0.51329595", "0.5124207", "0.5090333", "0.5077104", "0.5066822", "0.5064706", "0.5048319", "0.50250196", "0.5002706", "0.49969542", "0.49866468", "0.49865922" ]
0.7233016
0
Fetches a list of correlators matching the specified form factor.
def fetch_basenames(engine, form_factor): for key in ['current', 'm_mother', 'm_daughter', 'm_spectator', 'momentum']: if key not in form_factor: raise KeyError(f"Required key '{key}' is missing.") def abspath(dirname): return os.path.join(pathlib.Path(__file__).parent.absolute(), dirname) # 2pt correlators like 'P5-P5_RW_RW_d_d_m0.002426_m0.002426_p000' mother = "%_RW_RW_d_d_m{m_mother}_m{m_spectator}_p000%fine" daughter = "%_RW_RW_d_d_m{m_daughter}_m{m_spectator}_{momentum}%fine" if form_factor['m_daughter'] < form_factor['m_spectator']: daughter = "%_RW_RW_d_d_m{m_spectator}_m{m_daughter}_{momentum}%fine" # 3pt correlators like 'P5-P5_RW_RW_d_d_m0.002426_m0.002426_p000', corr3 = "%_{current}_T%_m{m_mother}_RW_RW_x_d_m{m_spectator}_m{m_daughter}_{momentum}%fine" params = { 'mother': mother.format(**form_factor), 'daughter': daughter.format(**form_factor), 'corr3': corr3.format(**form_factor)} queries = aiosql.from_path(abspath("sql/"), "sqlite3") with db.connection_scope(engine) as conn: corrs = queries.postgres.get_correlator_names(conn, **params) return np.squeeze(np.array(corrs))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_corr_ids(engine, basename):\n query = (\n \"SELECT id AS correlator_id \"\n \"FROM correlators \"\n f\"WHERE name LIKE '{basename}%%';\")\n corr_ids = pd.read_sql_query(query, engine)['correlator_id']\n return corr_ids", "def _all_word_forms(self):\n all_word_forms = []\n for word in self.cursor.execute(f\"\"\"\n SELECT word_form\n FROM {self.table_name}\n WHERE lemma='{self.word}'\n \"\"\"):\n all_word_forms.append(word[0])\n if all_word_forms == []:\n return None\n return all_word_forms", "def get_corrmat(self, f):\n return self._get_corrmat(f)", "def get_forms(self):\n module = self.get_module()\n return (module.get_form_by_unique_id(form.form_id) for form in self._get_forms())", "def get_all_candidates(self) -> list:", "def dslq_completers(self, event):\n # print(dir(event), event)\n\n # FIXME only first line gets the autocomplete!\n if event.line.startswith(\"%%\"):\n event.line = event.line[1:] # reduce cell symbol (double %) to line symbol\n for command in [\"%dslloopdf\", \"%dsldf\", \"%dslloop\", \"%dsl\"]:\n if command in event.line: # first match will return results\n doc = Document(event.line.replace(command, \"\"))\n c = CleverCompleter()\n res = c.get_completions(doc, None)\n # print(res)\n return [x.text for x in res]", "def get_calibrators(science_spectrum_type, config):\n\n if science_spectrum_type in [\"CHIRON_CSV\", \"CHIRON_FITS\"]:\n spectrograph = \"CHIRON\"\n elif science_spectrum_type in [\"EXPRES_FITS\"]:\n spectrograph = \"EXPRES\"\n else:\n raise Exception(\"Did not recognize science spectrum type to get calibrator for.\")\n\n\n cal_pxs = []\n for calibrator in config[\"calibrators\"][spectrograph]:\n cal_pxs.append((calibrator[ADDR_IND][shi.ORD_IND],\n calibrator[ADDR_IND][shi.LOPX_IND] + calibrator[PX_IND]))\n return cal_pxs", "def compute_correlations(struc_df, option, gamma, alpha):\n n_states = len(np.unique(struc_df.objnum))\n nodes = network.temp_node_info()\n adjacency = network.adjacency_mat(nodes)\n L = compute_limit_matrix(0.5, adjacency, n_states)\n L_vector = L.flatten()\n M = learn_sr(struc_df, gamma, alpha)\n M = M[2, 6]\n M_vector = M.flatten()\n\n if option == \"norm\":\n print(\"Norm of L - M: \")\n print(la.norm(L_vector - M_vector, np.inf))\n\n if option == \"correlation\":\n print(\"Correlation of L, M: \")\n print(np.dot(L_vector, M_vector) /\n (la.norm(L_vector) * la.norm(M_vector)))", "def get_form_factor_data(form_factor_id, engines, apply_alias=True, sanitize=True):\n query = (\n \"SELECT ens_id, RTRIM(name, '-_fine') as BASENAME, corr_type \"\n \"FROM junction_form_factor AS junction \"\n \"JOIN correlator_n_point AS corr ON (corr.corr_id = junction.corr_id) \"\n \"WHERE (form_factor_id = {form_factor_id}) AND (name LIKE '%%fine');\"\n )\n query = query.format(form_factor_id=form_factor_id)\n dataframe = pd.read_sql_query(query, engines['postgres'])\n ens_id = dataframe['ens_id'].unique().item()\n basenames = dataframe['basename'].values\n\n # Grab a list of necessary correlators, in particular identifying the\n # source and sink 2pt functions. This line gives a map from the full\n # basename to a name like 'source' or 'sink'.\n aliases = alias.get_aliases(basenames)\n # Apply any further renaming, e.g., 'sink' --> 'heavy-light'\n name_map = alias.apply_naming_convention(aliases)\n data = {}\n for basename in aliases:\n key = name_map[basename] if apply_alias else basename\n try:\n data[key] = hdf5_cache.get_correlator(engines[ens_id], basename)\n except ValueError as err:\n LOGGER.warning(\"WARNING: Unable to load %s\", key)\n if sanitize:\n data, nan_rows = sanitize_data(data)\n if nan_rows:\n LOGGER.warning(\"WARNING: NaNs found while sanitizing: %s\", nan_rows)\n return data", "def searchFormuSet(index, concForm, formulSet):\r\n ans = []\r\n lis = []\r\n for subset in formulSet:\r\n if (concForm == subset[1][2]):\r\n lis.append(index)\r\n lis.append(subset[1])\r\n ans.append(lis)\r\n lis = []\r\n return ans", "def get_forms_by_field(formset, field_info):\n field_name, field_value = field_info.split('|')\n return formset.get_forms_by_field(field_name, field_value)", "def factor_list(f):\n coeff, factors = dmp_factor_list(f.rep, f.lev, f.dom)\n return coeff, [ (f.per(g), k) for g, k in factors ]", "def _getChoices(self, acronym):\n # get matches from acronymDB\n matches = []\n if(acronym in self.acronymDB):\n matches += self.acronymDB[acronym]\n if(acronym[-1] == \"s\" and acronym[:-1] in self.acronymDB):\n matches += self.acronymDB[acronym]\n\n # create training data\n X_train, y_train = [], []\n for definition, articleID, ignored_var in matches:\n text = self.articleDB[articleID]\n X_train.append(\n ExpansionChoice(article_id=articleID, article_text=text))\n y_train.append(definition)\n\n # create y labels to group similar acronyms\n y_labels, labelToExpansion = self._processChoices(y_train)\n\n return X_train, y_labels, labelToExpansion", "def get_corr_genes(ensemble, query):\n\tif \";\" in query:\n\t\treturn []\n\n\ttry:\n\t\tcorr_genes = db.get_engine(current_app, 'methylation_data').execute(\"SELECT * FROM {}_correlated_genes WHERE gene1 LIKE %s\".format(ensemble), (query+'%%',)).fetchall()\n\texcept exc.ProgrammingError as e:\n\t\tnow = datetime.datetime.now()\n\t\tprint(\"[{}] ERROR in app(get_corr_genes): {}\".format(str(now), e))\n\t\tsys.stdout.flush()\n\t\treturn []\n\n\tcorr_genes = [ {\"rank\": i+1, \"gene_name\": get_gene_by_id(row.gene2)[0]['gene_name'], \"correlation\": row.correlation, \"gene_id\": row.gene2} for i, row in enumerate(corr_genes)]\n\treturn corr_genes", "def find_corr (mdp,num):\n ctr=0 # line counter\n mdp.corr_file.seek(0)\n lfsp=mdp.corr_file.read().split(\"\\n\")\n for i in range(0,len(lfsp)-1):\n lin=lfsp[i].strip() # strip preceeding and trailing spaces?\n if lin.startswith(\"0\"): # check whether it is the right form\n if test_line_type(lin,0):\n ctr += 1\n if ctr == num: # only reaches this point if it's the needed correlator\n for j in range(i+1,i+mdp.corr_len): # test for the correct format, number of lines\n if not test_line_type(lfsp[j].strip(),j-i):\n print \"Invalid correlator\"\n return -1\n ##endif ! test_line_type\n ##endfor j in range\n #print \"valid correlator\"\n return i\n ##endif ctr==num\n ##endif test_line_type\n ##endif startswith(0)\n ##endfor i in range\n return -2", "def get_forms(console: Console, sess: requests.Session, form_id: str = \"General_Record_2020v2.0\"):\n raw_resp = get_url(url=f\"https://forms.agterra.com/api/{form_id}/GetAll/0\", sess=sess)\n\n if raw_resp.status_code != 200:\n console.log(f\"[red] Something went wrong, we got status [white]{raw_resp.status_code}\")\n json_data = raw_resp.json()\n console.log(f\"Message Data: {json_data}\")\n\n json_data = raw_resp.json()\n\n return json_data", "def full_colexification(forms):\n # We assume the forms to be sorted by clics_form already:\n expected, seen = len(forms), 0\n for _, _forms in itertools.groupby(forms, lambda f: f.clics_form):\n cids = set()\n fs = []\n for f in _forms:\n seen += 1\n if f.concepticon_id not in cids:\n fs.append(f)\n cids.add(f.concepticon_id)\n yield fs\n if expected != seen: # pragma: no cover\n raise ValueError('forms not properly ordered')", "def check_relevant_forms(L):\n L2 = list()\n for F in L.values():\n # needed_ev=(\n S = F.shim_corr\n # print \"S=\",S\n ok_ev = 0\n for g in S:\n if g.atkin_lehner_eigenvalue() == -1:\n ok_ev = ok_ev + 1\n if ok_ev > 0:\n print(\"Number of ok forms on \", F.space.WR.N, \" :\", ok_ev)\n F.list_coefficents('D', fd=True, neg=False, nmin=0, nmax=1000, latex=False, nd=50, prime=True)\n L2.append(F)\n return L2", "def get_fedcm_account_list(self):\n pass", "def get_all_relaxed_candidates_after_generation(self, gen):\n q = 'relaxed=1,extinct=0,generation<={0}'\n entries = self.c.select(q.format(gen))\n\n trajs = []\n for v in entries:\n t = self.get_atoms(id=v.id)\n t.info['confid'] = v.gaid\n t.info['relax_id'] = v.id\n trajs.append(t)\n trajs.sort(key=lambda x: get_raw_score(x),\n reverse=True)\n return trajs", "def get_literature_recids_for_orcid(orcid):\n orcid_object = f'[{{\"schema\": \"ORCID\", \"value\": \"{orcid}\"}}]'\n # this first query is written in a way that can use the index on (json -> ids)\n\n try:\n author_rec_uuid = (\n db.session.query(RecordMetadata.id)\n .filter(\n type_coerce(RecordMetadata.json, JSONB)[\"ids\"].contains(orcid_object)\n )\n .one()\n .id\n )\n except NoResultFound:\n LOGGER.warning(\"No profile is associated with this account!\", orcid=orcid)\n return []\n\n author_record = (\n db.session.query(PersistentIdentifier)\n .filter(\n PersistentIdentifier.object_type == \"rec\",\n PersistentIdentifier.object_uuid == author_rec_uuid,\n PersistentIdentifier.pid_type == \"aut\",\n )\n .one()\n )\n\n author_recid = (\n author_record.pid_value\n if not author_record.is_redirected()\n else InspireRedirect.get_redirect(author_record).pid_value\n )\n\n query = Q(\"match\", authors__curated_relation=True) & Q(\n \"match\", **{\"authors.record.$ref\": author_recid}\n )\n search_by_curated_author = (\n LiteratureSearch()\n .query(\"nested\", path=\"authors\", query=query)\n .params(_source=[\"control_number\"], size=9999)\n )\n\n return [el[\"control_number\"] for el in search_by_curated_author]", "def factor_list_include(f):\n factors = dmp_factor_list_include(f.rep, f.lev, f.dom)\n return [ (f.per(g), k) for g, k in factors ]", "def read_data(engine, correlator_ids):\n def _handle_ids(correlator_ids):\n list_of_ids = [str(elt) for elt in correlator_ids]\n return '({0})'.format(','.join(list_of_ids))\n query = (\n \"SELECT data.*, correlators.name FROM data \"\n \"JOIN correlators ON correlators.id = data.correlator_id \"\n f\"WHERE correlator_id IN {_handle_ids(correlator_ids)};\")\n return pd.read_sql_query(query, engine)", "def get_recommended_formations(opponent_formation, league, limit):\n\n cursor = mc.get_db_cursor(mc.DB_NAME)\n select_query = \"\"\"SELECT formation_1 FROM recom_formation\n WHERE formation_2 = %s\n AND league = %s\n ORDER BY formation_points DESC limit %s\"\"\"\n insert_value = (opponent_formation, league, limit)\n cursor.execute(select_query, insert_value)\n formations = cursor.fetchall()\n\n # initializes a dictionary for storing the recom formations\n result_dict = {\n \"recoms\": []\n }\n # save all the formations in the dictionary\n for formation in formations:\n result_dict[\"recoms\"].append(formation[0])\n return result_dict", "def forms_for_users(user_ids, domain):\n\n all_forms = []\n\n for user_id in user_ids:\n user = CommCareUser.get_by_user_id(user_id, domain=domain)\n good_forms = [f for f in user.get_forms() if check_form_domain(f, domain)]\n all_forms.extend(good_forms)\n\n all_forms.sort(key=lambda form: form.received_on)\n return all_forms", "def get_contacts(self):\n contacts = Membership.objects.filter(entity = self, key_contact = True).order_by('importance_to_entity')\n return contacts", "def get_patient_lookup_results(form):\n args = form.data\n data = {k: v for (k, v) in args.items() if not is_empty_form_value(v)}\n if (not form.validate()) or (len(data) == 0):\n return {\"has_form_data\": False, \"tb\": None}\n where = \"where \" + (\" and \".join(\"(X.[{}] = ?)\".format(k) for k, _ in data.items()))\n params = [v for _, v in data.items()]\n q = (\"select top 1000 X.* \" +\n \"from {schema}.PersonCombined X \".format(schema=app_schema) +\n where)\n tb = pd.read_sql(q, engine, params=params)\n return {\"has_form_data\": True, \"tb\": tb}", "def _factors_for_flowable(self, fb, qq, cx, **kwargs):\n self._check_factors(qq)\n try:\n cl = self._qlookup(qq, fb)\n except NoFQEntry:\n return\n if cx is None:\n for v in cl.cfs():\n yield v\n else:\n for v in cl.find(cx, **kwargs):\n yield v", "def getConferencesByTopicSearch(self, request):\n conferences = self._getConferencesByTopicSearch(request)\n # Need to fetch organiser displayName from profiles\n # Get all keys and use get_multi for speed\n organisers = [\n (ndb.Key(Profile, conf.organizerUserId)) for conf in conferences\n ]\n profiles = ndb.get_multi(organisers)\n # Put display names in a dict for easier fetching\n names = {}\n for profile in profiles:\n names[profile.key.id()] = profile.displayName\n # Return individual ConferenceForm object per Conference\n # Return individual ConferenceForm object per Conference\n return ConferenceForms(\n items=[\n self._copyConferenceToForm(conf, names[conf.organizerUserId])\n for conf in conferences\n ]\n )", "def get_conversions(self):\n query = prefixes + \"\"\"\n SELECT DISTINCT ?controller ?controllerName ?controllerActivity\n ?product ?productName ?reactant ?reactantName ?stmt\n WHERE {\n ?stmt a belvoc:Statement .\n ?stmt belvoc:hasRelationship ?rel .\n ?stmt belvoc:hasSubject ?subject .\n ?stmt belvoc:hasObject ?rxn .\n ?subject a belvoc:AbundanceActivity .\n ?subject belvoc:hasActivityType ?controllerActivity .\n ?subject belvoc:hasChild ?controller .\n ?controller belvoc:hasConcept ?controllerName .\n ?rxn a belvoc:Reaction .\n ?rxn belvoc:hasChild ?reactants .\n ?reactants rdfs:label ?reactLabel .\n FILTER (regex(?reactLabel, \"^reactants.*\"))\n ?rxn belvoc:hasChild ?products .\n ?products rdfs:label ?prodLabel .\n FILTER (regex(?prodLabel, \"^products.*\"))\n ?reactants belvoc:hasChild ?reactant .\n ?products belvoc:hasChild ?product .\n ?reactant belvoc:hasConcept ?reactantName .\n ?product belvoc:hasConcept ?productName .\n }\n \"\"\"\n res = self.g.query(query)\n # We need to collect all pieces of the same statement so that we can\n # collect multiple reactants and products\n stmt_map = collections.defaultdict(list)\n for stmt in res:\n stmt_map[stmt[-1]].append(stmt)\n for stmts in stmt_map.values():\n # First we get the shared part of the Statement\n stmt = stmts[0]\n subj = self._get_agent(stmt[1], stmt[0])\n evidence = self._get_evidence(stmt[-1])\n stmt_str = strip_statement(stmt[-1])\n # Now we collect the participants\n obj_from_map = {}\n obj_to_map = {}\n for stmt in stmts:\n reactant_name = stmt[6]\n product_name = stmt[4]\n if reactant_name not in obj_from_map:\n obj_from_map[reactant_name] = \\\n self._get_agent(stmt[6], stmt[5])\n if product_name not in obj_to_map:\n obj_to_map[product_name] = \\\n self._get_agent(stmt[4], stmt[3])\n obj_from = list(obj_from_map.values())\n obj_to = list(obj_to_map.values())\n st = Conversion(subj, obj_from, obj_to, evidence=evidence)\n # If we've matched a pattern, mark this as a converted statement\n self.statements.append(st)\n self.converted_direct_stmts.append(stmt_str)" ]
[ "0.5065303", "0.49833453", "0.48512962", "0.48390126", "0.47684383", "0.47402024", "0.47051248", "0.46746743", "0.46732098", "0.46706173", "0.46581146", "0.4630977", "0.46242094", "0.461068", "0.46085858", "0.46028313", "0.46000963", "0.45869887", "0.4582591", "0.45775443", "0.45509544", "0.45499337", "0.4547764", "0.45213494", "0.4490507", "0.44664857", "0.44657117", "0.44537106", "0.44502857", "0.44467735" ]
0.5916252
0
Gets an alias for a quark mass (e.g., '1.0 m_light') from a table.
def get_alias(a_fm, description, quark_mass): quark = conventions.quark_masses mask = utils.bundle_mask(quark, a_fm=a_fm, description=description, mq=quark_mass) return utils.extract_unique(quark[mask], 'alias')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _masses_string(self):\n return_str = 'Masses\\n\\n'\n for at in self.atom_types:\n return_str += '{} {:9.5f} # {}\\n'.format( at.atom_type_index, float(at.mass), at.label)\n return_str += '\\n'\n return return_str", "def loc_massmet(mass):\n return np.interp(mass, massmet[:, 0], massmet[:, 1])", "def aliased_for_cypher(self):\n return '{} AS {}'.format(self.for_cypher(), self.alias_for_cypher)", "def get_quote_table_field(field, stock_ticker):\n quote_table = si.get_quote_table(stock_ticker)\n return quote_table[field]", "def get_med_by_id(self):\n return \"SELECT * FROM medic WHERE id = %s\"", "def magic(self, alias):\n if alias in self.aliases:\n return self.aliases[alias]\n else:\n return \"%%{}\\n\".format(alias)", "def AMED(e00200, MARS, AMED_thd, _sey, AMED_trt, FICA_mc_trt, FICA_ss_trt):\n\n # ratio of income subject to AMED tax = (1 - 0.5*(FICA_mc_trt+FICA_ss_trt)\n _amed = AMED_trt * max(0, e00200 +\n max(0, _sey) * (1 - 0.5 *\n (FICA_mc_trt + FICA_ss_trt)) -\n AMED_thd[MARS - 1])\n\n return _amed", "def alias(self) -> 'Literal[\"ARP\", \"InARP\", \"RARP\", \"DRARP\"]':\n return self._acnm", "def _sym_constant(self, table: Mapping[int, str]) -> str:\n try:\n return table[self.sym]\n except KeyError:\n return str(self.sym)", "def get_mq(a_fm, description, quark_alias):\n quark = conventions.quark_masses\n mask = utils.bundle_mask(quark, a_fm=a_fm, description=description, alias=quark_alias)\n return utils.extract_unique(quark[mask], 'mq')", "def test_get_alias():\n c = Curve(data=np.linspace(1, 20, 2), mnemonic='DT')\n alias = {'Sonic': ['DT', 'foo']}\n assert c.get_alias(alias) == ['Sonic']", "def find_alias(self, table: str) -> Optional[AliasInfo]:\n alias_info = [\n t\n for t in self.select_info.table_aliases\n if t.aliased and t.ref_str == table\n ]\n assert len(alias_info) <= 1\n return alias_info[0] if alias_info else None", "def get_mlt_phys(sed_name):\n\n new_name = sed_name.replace('+','-').replace('a','-').split('-')\n\n logg_sgn_dex = len(new_name[0])\n\n if sed_name[logg_sgn_dex] == '-':\n logg_sgn = 1.0\n elif sed_name[logg_sgn_dex] == '+':\n logg_sgn = -1.0\n else:\n raise RuntimeError('Cannot get logg_sgn for %s' % sed_name)\n\n metallicity_sgn_dex = len(new_name[0]) + len(new_name[1]) + 1\n\n if sed_name[metallicity_sgn_dex] == '-':\n metallicity_sgn = -1.0\n elif sed_name[metallicity_sgn_dex] == '+':\n metallicity_sgn = 1.0\n else:\n raise RuntimeError('Cannot get metallicity_sgn for %s' % sed_name)\n\n teff = 100.0*float(new_name[0][3:])\n metallicity = metallicity_sgn*float(new_name[2])\n logg = logg_sgn*float(new_name[1])\n\n return teff, metallicity, logg", "def get_phylogenetic_row_metric(name):\r\n # looks for name, inserting one_sample to find functions\r\n # in qiime.beta_metrics\r\n return getattr(qiime.beta_metrics, 'one_sample_' + name.lower())", "def molar_mass_amu():\n return Equivalency([(si.g / si.mol, misc.u)], \"molar_mass_amu\")", "def aliased_column(self, name: str) -> SelectType:\n\n # TODO: This method should use an aliased column from the SDK once\n # that is available to skip these hacks that we currently have to\n # do aliasing.\n resolved = self.resolve_column_name(name)\n column = Column(resolved)\n\n # If the expected alias is identical to the resolved snuba column,\n # no need to do this aliasing trick.\n #\n # Additionally, tags of the form `tags[...]` can't be aliased again\n # because it confuses the sdk.\n if name == resolved:\n return column\n\n # If the expected aliases differs from the resolved snuba column,\n # make sure to alias the expression appropriately so we get back\n # the column with the correct names.\n return AliasedExpression(column, self.tag_to_prefixed_map.get(name, name))", "def variation_name(self) -> str:\n return \"amplification\"", "def extract_placename(query_result):\n if 'name' in query_result.keys():\n return query_result['name']\n else:\n return np.float('nan')", "def __str__(self):\n return \"%s (%s) - atomic number: %d, atomic weight: %g amu\" \\\n % (self.name, self.symbol, self.atomicNumber, self.atomicWeight)", "def getMeasure(unique_name):", "def getMeasure(unique_name):", "def get_table_name(query: str) -> str:\n find_table_name_from_query = r'(FROM `)(\\w+.\\w+)(`)'\n search_result = re.search(find_table_name_from_query, query)\n if search_result:\n return search_result.group(2)\n return \"Unrecognized table name\"", "def _alias_if(self, column: str) -> str:\n table_def = self.table_def\n \n if table_def.type == GlueTableDef.TYPE_GLUE_CATALOG:\n alias_db_name, alias_table_name = self.resolver.to_alias(*table_def.table_spec)\n return GlueTable.translate_alias(alias_db_name, alias_table_name, column)\n\n elif table_def.type == GlueTableDef.TYPE_PARQUET:\n print(\"_alias_if parquet column: {column}\")\n return column\n\n else:\n raise ValueError(f\"_alias_if: Unrecognized GlueTableDef: {table_def.type}\")", "def aliased_for_output(self):\n return '{} AS {}'.format(self.for_return(), self.output_alias_for_cypher)", "def get_mass(elem):\n return mass[get_num(elem)]", "def get_alias(conn, alias):\n rows = retrieve_alias(conn, alias)\n [print(\"%-20s: %s\" % x) for x in rows]", "def name(self):\n return self._meural_device[\"alias\"]", "def m1(self):\n return self.mass[0]", "def avl_mass_string(self):\n x,y,z = self.center_of_gravity_global.as_tuple()\n ixx, iyy, izz = self.inertia_xx, self.inertia_yy, self.inertia_zz\n template = \"{0} {1} {2} {3} {4} {5} {6}\".format(self.mass,\n x,\n y,\n z,\n ixx,\n iyy,\n izz)\n return template", "def convert_tth_to_q(tth, lam=1.5406):\n\n q = (np.pi*4.0/lam)*np.sin(tth/360*np.pi)\n return q" ]
[ "0.52080846", "0.50464433", "0.49144888", "0.48627576", "0.48407552", "0.4818113", "0.48110572", "0.47752094", "0.47583362", "0.47391355", "0.47382006", "0.47375157", "0.47296125", "0.47048503", "0.46953273", "0.4686729", "0.4680874", "0.46636742", "0.4646696", "0.46301967", "0.46301967", "0.46117255", "0.4608901", "0.46027875", "0.4596186", "0.4569966", "0.45568734", "0.45385677", "0.45251265", "0.45153987" ]
0.65231496
0
Test that variant libraries load and initialize.
def test_load_variant(variant): try: f = fvs.FVS(variant) except ImportError: pytest.skip('No variant library: {}'.format(variant)) return None except: raise assert f.variant == variant assert not f.fvslib is None f = None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_load_libs(self):\n script = 'var %s = {foo: \"foo\"};' % _global\n\n with js_file(script) as path:\n utils.load_libs([path])\n\n self.assertEqual('foo', utils.run_script('%s.foo' % _global))\n self.assertEqual('true', utils.run_script('delete %s.foo' % _global))", "def test_init():\n\n test_project = tempfile.mkdtemp(dir=self._tempdir)\n\n inventory_fname = os.path.join(test_project, \".inventory.toml\")\n config_fname = os.path.join(test_project, \".config.toml\")\n\n assert 0 == subprocess.call([\n sys.executable, \"-u\", \"-m\", \"avalon.inventory\", \"--init\"\n ], cwd=test_project)\n\n assert os.path.isfile(inventory_fname), \".inventory.toml not found\"\n assert os.path.isfile(config_fname), \".config.toml not found\"\n\n with open(inventory_fname) as f:\n inventory_dict = toml.load(f)\n assert_equals(inventory_dict, inventory.DEFAULTS[\"inventory\"])\n\n with open(config_fname) as f:\n config_dict = toml.load(f)\n assert_equals(config_dict, inventory.DEFAULTS[\"config\"])", "def test_library(self):\n self.assertEqual(LibraryConfig.name, \"library\")", "def test_list_available_libraries(self):\n _ = LibraryFactory.create(modulestore=self.store)\n all_libraries = self.tools.list_available_libraries()\n assert all_libraries\n assert len(all_libraries) == 1", "def __init__(self):\n\n if VorpatestLibrary._initialized:\n return\n\n sys.stderr.write(\"** Initializing VorpatestLibrary\\n\")\n\n VorpatestLibrary._initialized = True\n\n # Initialize the path to the vorpaline executables\n VorpatestLibrary._bin_dir = os.getenv('VORPALINE_BIN_DIR')\n if VorpatestLibrary._bin_dir is None:\n raise RuntimeError(\"Environment variable VORPALINE_BIN_DIR is not set\")\n\n # Check for execution with valgrind\n if os.getenv('VORPALINE_WITH_VALGRIND') != None:\n VorpatestLibrary._exec_wrapper = _Valgrind()\n \n # Check for execution with callgrind\n elif os.getenv('VORPALINE_WITH_CALLGRIND') != None:\n VorpatestLibrary._exec_wrapper = _Callgrind()\n\n # Initialize the test index\n VorpatestLibrary._test_index = 0", "def test_load_vspec():\n global _vspec\n _vspec = SourceSpectrum.from_vega()", "def test_01_Init(self):\n pass", "def initialize():\n _check_python_version()", "def test_libs_config(self):\n libs = [l for l in os.listdir(framework_libs_dir()) if l != 'libs.conf']\n self.assertTrue(sorted(libs), sorted(self.conf.options('libs')))", "def test_load(loqusdbapi, mocker, loqusdb_output):\n # GIVEN a loqusdb api and some info about a case\n family_id = 'test'\n ped_path = 'a ped path'\n vcf_path = 'a vcf path'\n\n # WHEN uploading a case with 15 variants to loqusdb\n mocker.patch.object(subprocess, 'check_output')\n subprocess.check_output.return_value = loqusdb_output\n\n data = loqusdbapi.load(family_id, ped_path, vcf_path)\n\n # THEN assert that the number of variants is 15\n\n assert data['variants'] == 15", "def testInitialize(self):\n dependency_definition = dependencies.DependencyDefinition('test')\n self.assertIsNotNone(dependency_definition)", "def test_version_initialize(self):\n instance = ClassVersionInitialize()\n self.assertEqual(instance.version_straight(), \"1.1.1\")\n self.assertEqual(instance.version_default(), \"1.1.2\")\n self.assertEqual(instance.version_both(), \"1.1.3\")", "def test_load(self):\n (spec, check) = bundylogging.load()\n # It returns the checking function\n self.assertEqual(check, bundylogging.check)\n # The plugin stores it's spec\n self.assertEqual(spec, bundylogging.spec)", "def init_lib():\n res = cl().random_init()\n if res < 0:\n raise ValueError('Library initialization error: %s' % res)\n return res", "def test_sphere_init():\n Sphere(5)", "def test_loqusdb_variant(mocker, loqus_extension):\n # GIVEN a return value from loqusdb\n return_value = (\n b'{\"homozygote\": 0, \"hemizygote\": 0, \"observations\": 1, \"chrom\": \"1\", \"start\": '\n b'235918688, \"end\": 235918693, \"ref\": \"CAAAAG\", \"alt\": \"C\", \"families\": [\"643594\"],'\n b' \"total\": 3}'\n )\n mocker.patch.object(subprocess, \"check_output\")\n subprocess.check_output.return_value = return_value\n # WHEN fetching the variant info\n var_info = loqus_extension.get_variant({\"_id\": \"a variant\"})\n\n # THEN assert the info was parsed correct\n assert var_info[\"total\"] == 3", "def setUp(self):\n self._plugin = spotlight_volume.SpotlightVolumePlugin()\n self._parser = plist.PlistParser()", "def loadLibMVA(config):\n mvalibpath = config.getTagStringDefault(\"libMVA\",\"libTMVA.so\")\n QFramework.START(\"l.\",\"attempting to load MVA library from '{:s}'\".format(mvalibpath.Data()))\n try:\n ROOT.gSystem.Load(mvalibpath.Data())\n QFramework.END(QFramework.TQMessageStream.OK)\n return True\n except:\n QFramework.END(QFramework.TQMessageStream.FAIL)\n QFramework.BREAK(\"unable to load MVA library - please specify the proper path via the 'libMVA' config option\")\n return False", "def test_variants(capsys):\n\n wd_path = os.path.join(packagedir, \"sample_data\", \"special_wd_variants\")\n init_wd(wd_path, os.path.join(packagedir, \"sample_data\", \"sample_reads_in\"), remove_analysis=False)\n\n # clean up possible old results\n for file in ['config_used.yaml', 'variants_raw/variants_merged.csv']:\n if os.path.exists(os.path.join(wd_path, 'analysis', file)):\n os.unlink(os.path.join(wd_path, 'analysis', file))\n\n # now update file modification time to pretend we called variants\n for file in ['targets.bed', 'targets_merged.bed', 'versions/gatk.txt', 'variants_raw/S1.vcf']:\n pathlib.Path(os.path.join(wd_path, 'analysis', file)).touch()\n\n rules_manual = [\n '--resume',\n os.path.join('analysis', 'variants_raw', 'variants_summary.csv'), \n ]\n\n # just run the variants rule, we can't run from scratch since we won't have a caller\n captured = check_run(capsys, wd_path, rules = rules_manual, run=False)\n # make sure we are not trying to rerun everything\n # NOTE: this will contain output from above, so we can't fail on align_pe\n assert not 'call_variants_raw' in captured.out.strip()\n # make sure we want to reannotate\n assert 'variants_merge_unannotated' in captured.out.strip()\n # now actually run\n captured = check_run(capsys, wd_path, rules = rules_manual)\n\n # check variant files\n variants_merged = pd.read_csv(os.path.join(wd_path, 'analysis', 'variants_raw', 'variants_merged.csv'), index_col=['Chr', 'Start'])\n assert len(variants_merged) == 5\n assert len(variants_merged.loc['U00096.3', 35]) == 1\n assert len(variants_merged.loc['U00096.3', 36]) == 1\n assert len(variants_merged.loc['U00096.3', 37]) == 1\n assert len(variants_merged.loc['U00096.3', 45]) == 2\n\n variants_summary = pd.read_csv(os.path.join(wd_path, 'analysis', 'variants_raw', 'variants_summary.csv'), index_col=['Chr', 'Start', 'Alt'])\n assert len(variants_summary) == 5\n\n assert variants_summary.loc['U00096.3', 35, 'C']['Ref'] == 'T'\n assert variants_summary.loc['U00096.3', 36, 'A']['Ref'] == 'C'\n assert variants_summary.loc['U00096.3', 37, 'T']['Ref'] == 'TGTG'\n assert variants_summary.loc['U00096.3', 45, 'G']['Ref'] == 'T'\n assert variants_summary.loc['U00096.3', 45, 'C']['Ref'] == 'T'\n\n assert variants_summary.loc['U00096.3', 35, 'C']['Var_Zygosity'] == 'Het'\n assert variants_summary.loc['U00096.3', 36, 'A']['Var_Zygosity'] == 'HOM'\n assert variants_summary.loc['U00096.3', 37, 'T']['Var_Zygosity'] == 'Het'\n assert variants_summary.loc['U00096.3', 45, 'G']['Var_Zygosity'] == 'REF'\n assert variants_summary.loc['U00096.3', 45, 'C']['Var_Zygosity'] == 'Het'\n\n assert variants_summary.loc['U00096.3', 35, 'C']['Target'] == 'target_0'\n assert variants_summary.loc['U00096.3', 36, 'A']['Target'] == 'target_0'\n assert variants_summary.loc['U00096.3', 37, 'T']['Target'] == 'target_0'\n assert pd.isna(variants_summary.loc['U00096.3', 45, 'G']['Target'])\n assert pd.isna(variants_summary.loc['U00096.3', 45, 'C']['Target'])\n\n assert variants_summary.loc['U00096.3', 35, 'C']['Var_FailedFilters'] == 'badReads'\n assert variants_summary.loc['U00096.3', 45, 'G']['Var_FailedFilters'] == 'badReads'\n assert variants_summary.loc['U00096.3', 45, 'C']['Var_FailedFilters'] == 'badReads'\n assert variants_summary['Var_FailedFilters'].isnull().sum() == 2", "def test_load_first(install_mockery, mock_fetch, mock_archive, mock_packages):\n install(\"[email protected]\")\n install(\"[email protected]\")\n\n # Now there are two versions of libelf, which should cause an error\n out = load(\"--sh\", \"libelf\", fail_on_error=False)\n assert \"matches multiple packages\" in out\n assert \"Use a more specific spec\" in out\n\n # Using --first should avoid the error condition\n load(\"--sh\", \"--first\", \"libelf\")", "def _load_libs():\n if sys.platform == \"darwin\":\n lib_name = \"libtiledbvcf.dylib\"\n elif sys.platform == \"win32\":\n lib_name = \"tiledbvcf.dll\"\n else:\n lib_name = \"libtiledbvcf.so\"\n\n try:\n # Try loading the bundled native library.\n lib_dir = os.path.dirname(os.path.abspath(__file__))\n ctypes.CDLL(os.path.join(lib_dir, lib_name))\n except OSError as e:\n # Otherwise try loading by name only.\n ctypes.CDLL(lib_name)", "def main():\n testlib = VorpatestLibrary()\n testlib.prepare_test()\n testlib.run_vorpaline(*sys.argv[1:])\n testlib.run_vorpastat()\n testlib.cleanup_test()", "def test_init_success(self):\n found = False\n try:\n pyint = Interpreter()\n except InitializationException: \n found = True\n self.assertFalse(found)", "def test_LC18_import(): \n\tdef test(): \n\t\ttry: \n\t\t\tfrom .. import LC18 \n\t\texcept: \n\t\t\treturn False \n\t\treturn True \n\treturn [\"vice.yields.ccsne.LC18\", test]", "def test_load_includes_run_env(install_mockery, mock_fetch, mock_archive, mock_packages):\n install(\"mpileaks\")\n\n sh_out = load(\"--sh\", \"mpileaks\")\n csh_out = load(\"--csh\", \"mpileaks\")\n\n assert \"export FOOBAR=mpileaks\" in sh_out\n assert \"setenv FOOBAR mpileaks\" in csh_out", "def test_003_library(settings, inspector):\n sources = [\n os.path.join(settings.sample_path, 'main_syntax.scss'),\n os.path.join(settings.sample_path, 'main_commented.scss'),\n os.path.join(settings.sample_path, 'main_basic.scss'),\n os.path.join(settings.sample_path, 'main_depth_import-1.scss'),\n os.path.join(settings.sample_path, 'main_depth_import-2.scss'),\n os.path.join(settings.sample_path, 'main_depth_import-3.scss'),\n os.path.join(settings.sample_path, 'main_with_subimports.scss'),\n os.path.join(settings.sample_path, 'main_using_libs.scss'),\n os.path.join(settings.sample_path, 'main_circular_0.scss'),\n os.path.join(settings.sample_path, 'main_circular_1.scss'),\n os.path.join(settings.sample_path, 'main_circular_2.scss'),\n os.path.join(settings.sample_path, 'main_circular_3.scss'),\n os.path.join(settings.sample_path, 'main_circular_4.scss'),\n os.path.join(settings.sample_path, 'main_circular_bridge.scss'),\n os.path.join(settings.sample_path, 'main_circular_5.scss'),\n ]\n sourcepath = os.path.join(settings.lib1_path, 'components/_panels.scss')\n\n inspector.inspect(*sources, library_paths=settings.libraries_fixture_paths)\n\n parents = inspector.parents(sourcepath)\n assert parents == set([\n os.path.join(settings.lib1_path, 'library_1_fullstack.scss'),\n os.path.join(settings.sample_path, 'main_using_libs.scss'),\n ])", "def _setup(self):", "def _setup(self):", "def test_init(self):\n self._api.Init(lang=\"eng+osd\")\n self.assertEqual(self._api.GetInitLanguagesAsString(), \"eng+osd\")\n self._api.Init(lang=\"eng\")\n self.assertEqual(self._api.GetInitLanguagesAsString(), \"eng\")\n self._api.Init(oem=tesserocr.OEM.TESSERACT_ONLY)\n self.assertEqual(self._api.oem(), tesserocr.OEM.TESSERACT_ONLY)", "def test_init_default(self):\n self._test_init_default()" ]
[ "0.63410246", "0.6289428", "0.60788304", "0.6077451", "0.60713", "0.6050606", "0.6022049", "0.5979783", "0.59417397", "0.59394777", "0.589408", "0.589276", "0.58614826", "0.583448", "0.58229786", "0.58219177", "0.5816688", "0.5816469", "0.57959235", "0.57952666", "0.57904017", "0.57882226", "0.57749516", "0.57722324", "0.5759142", "0.5736915", "0.5688444", "0.5688444", "0.56763554", "0.5664331" ]
0.7319842
0
Set a new sudoku matrix with new matrix
def set_sudoku_matrix(self, matrix): self.sudoku_matrix = matrix
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_sudoku(size):\r\n def mutate_list_1(lst, size):\r\n \"\"\"Helper function for removing part of a list from the beginning and add it to the end.\"\"\"\r\n count = 0\r\n while count < size:\r\n elem = lst[0]\r\n lst.remove(elem)\r\n lst.append(elem)\r\n count += 1\r\n return lst\r\n\r\n def mutate_list_2(lst):\r\n \"\"\"Helper function for removing element from the beginning of a list and add it to the end.\"\"\"\r\n elem = lst[0]\r\n lst.remove(elem)\r\n lst.append(elem)\r\n return lst\r\n\r\n count = 0\r\n matrix_length = size ** 2 # define a size of matrix\r\n matrix = [[] * matrix_length] # create an empty matrix\r\n matrix[0] = range(1, matrix_length + 1) # set a first row to a range from 1 to size ** 2\r\n while count < matrix_length - 1:\r\n l = matrix[count][:] # create a new list object that is a copy of previous row in a matrix\r\n if (count + 1) % size == 0: # check if a row in inner square of a matrix\r\n l = matrix[count - (size-1)][:] # if it is, l set to the first row of previous square\r\n matrix.append(mutate_list_2(l))\r\n else:\r\n matrix.append(mutate_list_1(l, size)) # mutate l and add it to the matrix\r\n count += 1\r\n\r\n\r\n return matrix", "def setUp(self):\r\n self.matrix = array(\r\n [[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12], [13, 14, 15, 16]])\r\n self.cells = [(0, 1), (1, 3)]\r\n self.cells2 = [(0, 2), (2, 3)]", "def newMatrix(self):\n self.matrix = makeMatrix()\n for row in range(self.matrix.getHeight()):\n for column in range(self.matrix.getWidth()):\n self.canvasGrid[row][column].draw(self.matrix[row][column])", "def reiniciarMatrix(self):\n self.matrixMAPA = []\n self.rellenarMatrix()", "def test_set_cell(self):\n self.sudoku.set_cell((2, 2), 0)\n self.assertEqual(0, self.sudoku.get_cell((2, 2)))", "def clone(self):\n new_board = utils.copy_matrix(self.board)\n return Sudoku(new_board, self.block_size)", "def update_sudoku(fit, m):\n row, col, n = fit\n try:\n if m[row][col] != 0:\n raise ValueError\n m[row][col] = n\n except ValueError:\n raise ValueError('This coordinate has already been updated.')\n return m", "def get_sudoku_matrix(self):\n return self.sudoku_matrix", "def setPuzzle():\n matrix = tuple() # This will be a tuple of tuples to hold the original puzzle set\n\n matrix += ((0, 25, 0, 21, 0, 4, 0, 8, 0, 17, 0),)\n matrix += ((12, 22, 13, 8, 18, 8, 0, 18, 2, 13, 8),)\n matrix += ((0, 14, 0, 24, 0, 21, 0, 22, 0, 22, 0),)\n matrix += ((5, 13, 26, 20, 0, 16, 20, 9, 13, 7, 13),)\n matrix += ((0, 7, 0, 5, 0, 20, 0, 3, 0, 0, 9),)\n matrix += ((20, 16, 22, 0, 0, 0, 0, 0, 21, 17, 3),)\n matrix += ((17, 0, 0, 8, 0, 23, 0, 1, 0, 21, 0),)\n matrix += ((9, 21, 10, 11, 4, 20, 0, 10, 21, 3, 18),)\n matrix += ((0, 18, 0, 4, 0, 8, 0, 13, 0, 3, 0),)\n matrix += ((7, 22, 6, 21, 0, 18, 21, 25, 17, 20, 18),)\n matrix += ((0, 9, 0, 18, 0, 19, 0, 8, 0, 15, 0),)\n\n return matrix", "def fill_board(self):\n slope = 0\n for i in range(0, len(self.row_map.keys())):\n for j in range(0, len(self.row_map.keys())):\n key = self.row_map[i + 1] + str(j + 1)\n value = int(self.raw_data[j + (8 * i + slope)])\n self.sudoku_board.update({key: value})\n slope += 1", "def solve_sudoku(sudoku):\n # Define the solution matrix that represents the sudoku puzzle\n solution = Matrix(9, 9, 1, 9)\n\n # Set up the model\n model = Model()\n\n # Set the constraints for the filled in cells\n for i in xrange(0, 9):\n for j in xrange(0, 9):\n if sudoku[i, j] > 0:\n model.add(solution[i, j] == int(sudoku[i, j]))\n\n # Add the constraint that all rows need to be different\n model.add([AllDiff(x) for x in solution.row])\n # Add the constraint that all columns need to be different\n model.add([AllDiff(y) for y in solution.col])\n \n # Add the constraint that all cells need to be different\n for i in xrange(0, 3):\n for j in xrange(0, 3):\n # Generate the constraint for each cell\n # x goes over the rows in each cell\n # y goes over the columns in each cell\n model.add(AllDiff(\n [solution[x, y] for x in xrange(i*3, (i+1)*3) for y in xrange(j*3, (j+1)*3)]))\n\n # Load a solver and solve the problem\n solver = model.load('MiniSat')\n solver.solve()\n return solution", "def sudoku_solver(m):\n square_sides = int(sqrt(len(m)))\n dicts = initialize_dicts(m, square_sides)\n dicts, square_coords = populate_dicts(m, square_sides, dicts)\n dicts = get_missing(dicts)\n candidates = get_candidates(m, dicts, square_coords)\n m, candidates = scan_sudoku(m, dicts, square_coords, candidates)\n single_candidates = single_candidate(candidates, square_coords, dicts)\n m, candidates = fill_fit(m, dicts, square_coords, single_candidates=single_candidates)\n candidates = get_candidates(m, dicts, square_coords)\n naked_sets_fields_row, naked_sets_fields_cols = find_naked_sets(candidates, dicts, setlength=2)\n candidates, naked_sets = remove_naked_sets_from_candidates(candidates, naked_sets_fields_row, naked_sets_fields_cols)\n candidates = get_candidates(m, dicts, square_coords, naked_sets)\n naked_sets_fields_row, naked_sets_fields_cols = find_naked_sets(candidates, dicts, setlength=3)\n return m", "def __update_matrix(self, old_matrix_view):\n # if we've cleaned dirt - we will see it on our next move, so we substitute only unseen cells\n # which are marked with \"o\"\n new_matrix_view = []\n for row in range(self.matrix_rows):\n new_matrix_view.append([char for char in input()])\n\n if old_matrix_view:\n for row in range(self.matrix_rows):\n for col in range(self.matrix_cols):\n if new_matrix_view[row][col] == \"o\":\n new_matrix_view[row][col] = old_matrix_view[row][col]\n\n return new_matrix_view", "def solveSudoku(self, board: List[List[str]]) -> None:\n # initialize the hashmaps\n for row in range(self.size):\n for col in range(self.size):\n value = board[row][col]\n if value != '.':\n self.rows[row].add(value)\n self.cols[col].add(value)\n self.cells[self.cell_idx(row, col)].add(value)\n \n # start backtracking at the first field\n self.backtrack(board, 0)\n return board", "def set_board(board):", "def rellenarMatrix(self):\n for i in range(0, 26):\n self.matrixMAPA.append([])\n for j in range(0, 26):\n self.matrixMAPA[i].append((0, str(i)+\"-\"+str(j)))", "def solveSudoku(self, board: List[List[str]]) -> None:\n def getLocs(board):#初始化,获取需要填充的位置,记录为一个栈\n locs = []\n for row in range(9):\n for col in range(9):\n if board[row][col] == '.':\n locs.append((row, col))\n return locs\n\n def getMaps(board):#定义三个字典,跟踪9行、9列和9块的已填充数字,采用数据结构为defaultdict\n from collections import defaultdict as dd\n rowMap = [dd(int) for _ in range(9)]\n colMap = [dd(int) for _ in range(9)]\n blockMap = [dd(int) for _ in range(9)]\n for row in range(9):\n for col in range(9):\n if board[row][col] != '.':\n num = int(board[row][col])\n rowMap[row][num] += 1\n colMap[col][num] += 1\n bolckIndex = int(row/3)*3+int(col/3)\n blockMap[bolckIndex][num] += 1\n return rowMap, colMap, blockMap\n\n def fillBoard(board, locs):#递归填充剩余的数独空位置\n if not locs:\n return True\n row, col = locs.pop()#弹出一个待填充位置\n bolckIndex = int(row/3)*3+int(col/3)\n found = False\n for num in range(1, 10):\n if found:\n break\n if not rowMap[row][num] and not colMap[col][num] and not blockMap[bolckIndex][num]:\n ##如果当前行、当前列和当前块均不存在该数字,则将数字更新到相应行、列、块,并尝试填充\n rowMap[row][num] = 1\n colMap[col][num] = 1\n blockMap[bolckIndex][num] = 1\n board[row][col] = str(num)\n found = fillBoard(board, locs)#递归到下一层填充\n rowMap[row][num] = 0##状态回溯,将填充的位置清空\n colMap[col][num] = 0\n blockMap[bolckIndex][num] = 0\n if not found:##如果本轮都无法求解,则回溯到初始状态,继续从前面再填充\n locs.append((row, col))\n board[row][col] = '.'\n return found\n\n rowMap, colMap, blockMap = getMaps(board)\n locs = getLocs(board)\n fillBoard(board, locs)", "def solveSudoku(self, board: 'List[List[str]]') -> 'None':\n\n select = '.'\n row_set = []\n col_set = []\n arr_set = []\n\n for row in range(9):\n for col in range(9):\n if col == 0:\n row_set.append(set('123456789'))\n if row == 0:\n col_set.append(set('123456789'))\n if row % 3 == 0 and col % 3 == 0:\n arr_set.append(set('123456789'))\n\n if board[row][col].isdigit():\n row_set[row].remove(board[row][col])\n col_set[col].remove(board[row][col])\n arr_index = (row - row % 3) + col // 3\n arr_set[arr_index].remove(board[row][col])", "def __init__(self, size, given_cells):\n self.ROWS = string.ascii_uppercase[:size ** 2]\n self.COLS = [str(i) for i in range(1, size ** 2)]\n self.size = size\n self.given_cells = given_cells\n self.board = self.create_board()\n self.squares = [utility.cross(i, j) for i in [self.ROWS[i:i + size] for i in range(0, len(self.ROWS), size)]\n for j in [self.COLS[i:i + size] for i in range(0, len(self.COLS), size)]]\n self.attach_neighbors()\n self.update_neighbor_values_by_given()\n print(\"Initial board:\")\n GUI.print_sudoku(self.board, self.size)", "def set_up_matrix():\n matrix= []\n row= \"1 9 3 4 5\"\n row= to_int(row)\n matrix.append(row)\n row= \"2 30 4 5 6\"\n row= to_int(row)\n matrix.append(row)\n row= \"3 8 5 6 7\"\n row= to_int(row)\n matrix.append(row)\n row= \"4 5 6 7 8\"\n row= to_int(row)\n matrix.append(row)\n row= \"5 6 7 8 9\"\n row= to_int(row)\n matrix.append(row)\n return matrix", "def solveSudoku(self, board: List[List[str]]) -> None:\n def dfs(idx):\n if idx == len(blankIdx):\n return True\n else:\n i, j = blankIdx[idx]\n for num in rg:\n num += 1\n if (num not in rows[i] and\n num not in cols[j] and\n num not in boxs[i//3][j//3]):\n board[i][j]=str(num)\n rows[i].add(num)\n cols[j].add(num)\n boxs[i//3][j//3].add(num)\n if dfs(idx+1):\n return True\n board[i][j] = blank\n rows[i].remove(num)\n cols[j].remove(num)\n boxs[i//3][j//3].remove(num)\n \n rg,blank = range(9), \".\"\n rows = [set() for _ in rg]\n cols = [set() for _ in rg]\n boxs = [[set() for _ in range(3)] for j in range(3)]\n blankIdx = list()\n for i in rg:\n for j in rg:\n if board[i][j]!=blank:\n ele = int(board[i][j])\n rows[i].add(ele)\n cols[j].add(ele)\n boxs[i//3][j//3].add(ele)\n else:\n blankIdx.append((i,j))\n dfs(0)", "def solveSudoku(board):\n # represents all numbers in a specific row, col, box\n # format: if (5,9) is in rows, that means row 5 contains digit 9\n\t\t# format: if (3, 2) is in cols, that means col 3 contains digit 2\n\t\t# format: if (0,2,8) is in boxes, that means box (0,2) contains 8\n\t\t# cellsToFill is a stack that holds all the (i,j) cells we need to fill\n rows, cols, boxes = set(), set(), set()\n cellsToFill = []\n m, n = len(board), len(board[0])\n \n def initDataSets():\n for i in range(m):\n for j in range(n):\n char = board[i][j]\n if char == '.':\n cellsToFill.append((i,j))\n else:\n addToDataSets((i, char), (j, char), (i//3, j//3, char))\n\n def addToDataSets(curRow, curCol, curBox):\n rows.add(curRow)\n cols.add(curCol)\n boxes.add(curBox)\n \n def removeFromDataSets(curRow, curCol, curBox):\n rows.remove(curRow)\n cols.remove(curCol)\n boxes.remove(curBox)\n \n def backtrack():\n if not cellsToFill:\n return True\n \n i, j = cellsToFill.pop()\n for char in '123456789':\n # check if the number is already in a row/col/box, if it is then skip to the next number\n curRow, curCol, curBox = (i, char), (j, char), (i//3, j//3, char)\n if curRow in rows or curCol in cols or curBox in boxes: continue\n \n # if not, add the number to the row/col/box\n addToDataSets(curRow, curCol, curBox)\n board[i][j] = char\n \n # start the recursive call for inserting the next number\n if (backtrack()):\n return True\n \n # backtrack wasn't successful, remove the number from the row/col/box\n removeFromDataSets(curRow, curCol, curBox)\n board[i][j] = '.'\n \n cellsToFill.append((i,j))\n return False\n \n initDataSets()\n print(board)\n backtrack()", "def matSet(mat, r, c, v):\n mat[r][c]=v", "def __init__(self, idx, sudoku, is_given=False, cands=None, notify=False):\n self.sudoku = sudoku\n self.idx = idx\n self.is_given = is_given\n\n self.row = idx // 9 + 1\n self.col = idx % 9 + 1\n self.box = ((idx % 9) // 3) + 3 * (idx // 9 // 3) + 1\n\n #used for debugging purposes\n self.notify = notify\n\n if cands is not None:\n self.cands = cands", "def solveSudoku(self, board) -> None:\n # Get size of board\n n = len(board)\n \n # Initialise Hashmaps\n rowMap, colMap, boxMap = {}, {}, {}\n \n # Create set for each index in row, col and box hashmaps\n for i in range(n):\n \n rowMap[i] = set()\n colMap[i] = set()\n boxMap[i] = set()\n\n # Add values to board\n for i in range(n):\n for j in range(n):\n \n # Get value on board\n val = board[i][j]\n valBoxId = self.getBoxId(i,j)\n \n # Insert to respective hashmaps\n if val != \".\":\n rowMap[i].add(val)\n colMap[j].add(val)\n boxMap[valBoxId].add(val)\n \n # Perform backtracking\n self.solveBacktrack(board, rowMap, colMap, boxMap, 0, 0)\n\n return board", "def sudoku_solver(board):\n row, col= find_empty(board)\n if row == -1 and col == -1:\n return True\n for i in range(1, 10):\n if valid(board, row, col, i):\n board[row][col] = i\n if sudoku_solver(board):\n return True\n board[row][col] = 0\n return False", "def create_sudoku(self)->list:\n grid = [[None for x in range(9)] for row in range(9)]\n for row in range(0,9):\n for column in range(0,9):\n if row <= 2 and column <=2:\n grid[row][column] = cell.Cell(0)\n elif row <= 2 and 3 <= column <= 5:\n grid[row][column] = cell.Cell(1)\n elif row <= 2 and 6 <= column <= 8:\n grid[row][column] = cell.Cell(2)\n elif 3 <= row <= 5 and column <= 2:\n grid[row][column] = cell.Cell(3)\n elif 3 <= row <= 5 and 3 <= column <= 5:\n grid[row][column] = cell.Cell(4)\n elif 3 <= row <= 5 and 6 <= column <= 8:\n grid[row][column] = cell.Cell(5)\n elif 6 <= row <= 8 and column <= 2:\n grid[row][column] = cell.Cell(6)\n elif 6 <= row <= 8 and 3 <= column <= 5:\n grid[row][column] = cell.Cell(7)\n elif 6 <= row <= 8 and 6 <= column <= 8:\n grid[row][column] = cell.Cell(8)\n return grid", "def reset(self):\n # replace with your code\n self.board = [[0 for dummy_index in range(self.grid_width)] for dummy_inner_index in range(self.grid_height)]", "def solveSudoku(self, board: List[List[str]]) -> None:\n\n avBoard = [[1 << 10 - 2] * 9 for _ in range(9)]\n\n self.initBoard(board, avBoard)\n while not self.isSolved(board):\n # print(avBoard)\n px, py, v = self.findUniqueOnBoard(board, avBoard)\n print(px, py, v)\n board[px][py] = v\n avBoard[px][py] = 0\n self.invalidate(px, py, v, board, avBoard)", "def resetBoard(self):\n\t\tself.board = np.zeros((self.boardSize,self.boardSize))" ]
[ "0.6697235", "0.6546988", "0.6452423", "0.6357415", "0.6334121", "0.6234853", "0.6204221", "0.61754245", "0.60981035", "0.6080238", "0.6060617", "0.5981177", "0.59771913", "0.59540534", "0.5935847", "0.59132874", "0.5873409", "0.5855094", "0.5833647", "0.5826307", "0.5797247", "0.5795562", "0.5754245", "0.5740916", "0.5727942", "0.5717914", "0.56998855", "0.56870615", "0.56835824", "0.56780416" ]
0.75624293
0
Print the sudoku matrix in the console
def print_sudoku_matrix(self): row_list = 'ABCDEFGHI' print " 1 2 3 4 5 6 7 8 9 " for i in range(9): if i % 3 == 0: print " +-------+-------+-------+" var = row_list[i] + " " for j in range(9): if j % 3 == 0: var += "| " if self.sudoku_matrix[i][j].get_cell_value() == 0: var += "." else: var += str(self.sudoku_matrix[i][j].get_cell_value()) var += " " print var + "|" print " +-------+-------+-------+ \n"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def print_sudoku_solution(solution):\n for row in range(9):\n for col in range(9):\n print solution['%d-%d' % (row, col)][0],\n if col == 2 or col == 5:\n print '|',\n print\n if row == 2 or row == 5:\n print '------+-------+------'", "def print_sudoku(sudoku, name='SUDOKU'):\n\n print \"### {} ###\".format(name)\n for row in sudoku:\n print row", "def display(sudoku_map):\n width = 1+max(len(sudoku_map[s]) for s in squares)\n line = '+'.join(['-'*width*3]*3)\n for r in rows:\n print(''.join(sudoku_map[r+c].center(width) + ('|' if c in '36' else '') for c in cols))\n \n if r in 'CF':\n print(line)\n print()", "def print_board(self, board):\n print(\"Sudoku Board:\")\n count = 0\n for row in board:\n string = \"\"\n for num in range(len(row)):\n if row[num] != 0:\n string += str(row[num])\n else:\n string += \"_\"\n if num != len(row) - 1:\n string += \" \"\n if (num+1) % 3 == 0 and num != len(row) - 1:\n string += \"| \"\n print(string)\n count += 1\n if count % 3 == 0 and count < 9:\n print(\"_______________________________\")", "def print_grid(puzzle: str) -> None:\r\n grid = generate_grid(puzzle)\r\n print(grid)", "def printPuzzle(self):\n for i in range(9):\n print(self.puzzle[0][i], end=\" \")\n for n in range(1, 9):\n print()\n for m in range(9):\n print(self.puzzle[n][m], end=\" \")\n print(\"\\n\")", "def printGrid(grid):\n print(\"-\"*25)\n for i in range(9):\n print(\"|\", end=\" \")\n for j in range(9):\n print(grid[i][j], end=\" \")\n if (j % 3 == 2):\n print(\"|\", end=\" \")\n print()\n if (i % 3 == 2):\n print(\"-\"*25)\n \"\"\"\n Testing that solver works properly.\n \"\"\"", "def print_board(self):\n\n print\n\n for row in xrange(8):\n for column in xrange(8):\n if self.squares[row][column]:\n print self.squares[row][column],; sys.stdout.write(u'')\n else:\n if self.dark_square((row, column)):\n print u' __ ',; sys.stdout.write(u'')\n else:\n print u' . ',; sys.stdout.write(u'')\n print\n print", "def print_board(self):\n num_rows = len(self.board)\n num_cols = len(self.board[0])\n \n for i in range(num_rows):\n if i % 3 == 0 and i != 0:\n print(\"- - - - - - - - - - - -\")\n \n for j in range(num_cols):\n if j % 3 == 0 and j != 0:\n print(\" | \", end=\"\")\n \n if j == 8:\n print(self.board[i][j])\n else:\n number = str(self.board[i][j])\n print(\"{} \".format(number), end='')", "def print_board(self):\n\n print(\"=\" * 10)\n for row in self._board_matrix:\n for entry in row:\n if entry is None:\n print(\"_\", end=\"\")\n else:\n print(entry.length, end=\"\")\n print(\"\")\n print(\"=\" * 10)", "def print_puzzle(board):\n\n row_size = get_row_size(board)\n output = '\\n'\n\n for idx, val in enumerate(board):\n output += \" {} \".format(val)\n if idx % row_size == row_size - 1:\n output += \"\\n\"\n\n return output", "def print_matrix(matrix):\n\n print(result_is)\n max_len = max((len(str(round(n))) for row in matrix for n in row))\n cell_pattern = \"{{:{pos}.{part}f}}\"\\\n .format(pos=max_len + max_decimals + 2, part=max_decimals)\n for row in matrix:\n row_gen = (cell_pattern.format(cell) for cell in row)\n print(*row_gen)", "def show(self):\n\t\tprint(\"Square Matrix:\")\n\t\tfor i in range(0, len(self.lables)):\n\t\t\tprint(self.matrix[i])", "def _print_matrix(self):\n print(self.matrix)", "def display_board(self):\n print('*' + '*'.join(['**']*len(self.board[0])) + '*')\n for row in self.board:\n print('|' + ' '.join([('%s' % square) for square in row]) + '|')\n print('*' + '*'.join(['**']*len(self.board[0])) + '*')", "def print_matrix(matrix):\n for i in range(len(matrix)):\n for j in range(len(matrix[0])):\n print(matrix[i][j], end='\\t')\n print('')", "def print_board(self):\n for row in self.board:\n for col in row:\n print(col, end=\"\")\n print()", "def print_board(self):\n\n board = self.get_board()\n row = 9\n while row > -1:\n print(row, board[row])\n row -= 1\n print(\" 0 1 2 3 4 5 6 7 8 9\")", "def solveSudoku(self, board):\n self.back_track(board)\n print(board)", "def print_board(self):\r\n for row in range(len(self.board)):\r\n line = str(row)+\": \"\r\n for cell in self.board[row]:\r\n line += cell + \" \"\r\n print(line)\r\n print(\" A B C D E\")", "def print_board(self):\n for row in range(len(self.board)):\n line = str(row)+\": \"\n for cell in self.board[row]:\n line += cell + \" \"\n print(line)\n print(\" A B C D E\")", "def print_board(self):\n for row in range(len(self.board)):\n line = str(row)+\": \"\n for cell in self.board[row]:\n line += cell + \" \"\n print(line)\n print(\" A B C D E\")", "def print_board(self):\n line = \"---------------------\"\n for i, row in enumerate(self.board):\n row_string = \"\"\n for j, col in enumerate(row):\n if j == 3 or j == 6:\n row_string += \"| \"\n row_string += str(row[j]) + \" \"\n print(row_string)\n if i == 2 or i == 5:\n print(line)", "def PrintMatrix(self):\n # loop through the rows\n for i in range(self.rows):\n # intialise the matrix\n mat = []\n # loop through the column\n for j in range(self.cols):\n # append matrix element\n mat.append(self.matrix[i][j])\n # print the matrix\n print(mat)", "def print(self):\n for row in self.board:\n print(row)", "def print_board(self):\n print(\" 1 2 3 4 5 6 7\")\n for row in range(self.playable_row_range[0], self.playable_row_range[1]):\n for col in range(self.playable_column_range[0], self.playable_column_range[1]):\n print(\"[{piece}]\".format(piece=self.board[row][col]), end=\" \")\n print('\\n', end=\"\")\n print(\"\\n\")", "def print_matrix(matrix):\n [print(*line) for line in matrix]", "def print_board(self):\n for i in range(self.size):\n print(\" \".join(self.board[i]))\n print(\"\\n\")", "def show_board(self): \n for row in range(self.n):\n for col in range(self.n):\n if [row, col] in self.queens:\n print (' Q ', end = '')\n else:\n print (' - ', end = '')\n print()\n print()", "def show_np(mat):\n for x in range(15):\n for y in range(15):\n if (x == 7) and (y == 7):\n print(\"\\033[%d;%d;%dm**\\033[0m\" % (0, 33, 41), end='')\n elif mat[x, y, 0] > 0:\n print(\"\\033[%d;%d;%dm \\033[0m\" % (0, 31, 41), end='')\n elif mat[x, y, 1] > 0:\n print(\"\\033[%d;%d;%dm \\033[0m\" % (0, 32, 42), end='')\n else:\n print(\" \", end='')\n print(\"\")" ]
[ "0.76974213", "0.7565851", "0.742648", "0.7263379", "0.70990515", "0.7066005", "0.70275044", "0.70215726", "0.69965345", "0.69322765", "0.6923831", "0.689935", "0.68883264", "0.6876396", "0.6810913", "0.678954", "0.6756092", "0.6743901", "0.6702986", "0.66843426", "0.6683793", "0.6683793", "0.66822356", "0.668047", "0.6679912", "0.66688955", "0.66625595", "0.66591036", "0.66538644", "0.6642834" ]
0.8277411
0
Hide cell in the Sudoku Matrix
def hide_values_in_matrix(self, difficult): row = random.randint(0, 8) column = random.randint(0, 8) if (difficult != 0): self.sudoku_matrix[row][column].set_cell_visibility(True) self.sudoku_matrix[row][column].set_cell_value(0) self.hide_values_in_matrix(difficult - 1) else: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _hide_numbers(self):\n global counter\n\n # num of attempts allow for more blocks to be removed\n attempts = self._difficulty\n\n while attempts > 0:\n # selecting random cell and rotational counterpart\n row = randint(0, 8)\n col = randint(0, 8)\n while self._grid_init[row][col] == 0:\n row = randint(0, 8)\n col = randint(0, 8)\n\n # backing up in case removal is gives multiple solutions\n backupone = self._grid_init[row][col]\n backuptwo = self._grid_init[8 - row][8 - col]\n self._grid_init[row][col] = 0\n self._grid_init[8 - row][8 - col] = 0\n\n # cloning grid to test number of solutions\n test_puzzle = []\n for r in range(0, 9):\n test_puzzle.append(self._grid_init[r][:])\n\n # counter for num solutions is set to 0\n counter = 0\n\n # check num of solutions\n self._solve_puzzle(test_puzzle)\n\n # if num of solutions is not one, replace the two blocks\n if counter != 1:\n self._grid_init[row][col] = backupone\n self._grid_init[8 - row][8 - col] = backuptwo\n attempts -= 1", "def hidden_round(self):\n self.change = False\n for row in range(self.board_size):\n hidden = self.find_singles(self.possibles[row])\n hidden = [[num, (row, pos)] for num, pos in hidden]\n if hidden:\n self.set_hidden(hidden)\n for col in range(self.board_size):\n hidden = self.find_singles([self.possibles[row][col] for row in range(self.board_size)])\n hidden = [[num, (pos, col)] for num, pos in hidden]\n if hidden:\n self.set_hidden(hidden)\n for index in range(self.board_size):\n squ = self.squares[index]\n hidden = self.find_singles([self.possibles[cell[0]][cell[1]] for cell in squ])\n hidden = [[num, squ[pos]] for num, pos in hidden]\n if hidden:\n self.set_hidden(hidden)", "def disable_cells(self):\n for index in range(0, 9):\n cell = getattr(self, 'cell_' + str(index))\n cell.config(state=DISABLED)", "def hide(self):\n self.row_box.grid_remove()\n self.field_name_box.grid_remove()\n self.field_name_label.grid_remove()\n self.value_box.grid_remove()\n self.active_value_widget.grid_remove()", "def cells_off(self):\n self.plotter.cells_off(self.ax)\n self.fig.canvas.draw()", "def setNoHiddenLines():\n dislin.nohide()", "def clearCell(self, (xIndex, yIndex)):\n changed = self.grid[xIndex][yIndex] == True\n self.grid[xIndex][yIndex] = False\n if changed:\n self.drawSquare((xIndex, yIndex))", "def hidden():\n return False", "def unHide(self):\n self.visible = True", "def toggle_satni_grid(self, x):\r\n self.konfig.satni.set_grid(x)\r\n self.satniGraf.toggle_grid(x)", "def switch_rawhide(self, key, rows):\n self.controller.set_context('rawhide')", "def graph_exclude_bits(self, targ_row=None, targ_col=None):\n self.bitcell_array.graph_exclude_bits(targ_row, targ_col)", "def cells_off(self,ax):\n self.cells.off(ax)", "def get_cells_to_hide(self, level):\n level = LEVEL[level]\n bottom = level[BOTTOM]\n top = level[TOP]\n return random.randint(bottom, top)", "def hide_invisible_headers(self):\n # Hide all the non selected columns\n col_index = 0\n for header in self.column_headers_all:\n if header in self.column_headers:\n self.csv_data_table.setColumnHidden(col_index, False)\n self.file_changed = True\n self.set_save_enabled(True)\n else:\n self.csv_data_table.setColumnHidden(col_index, True)\n col_index = col_index + 1", "def hide(self):\n self.visible = False", "def toggle_zero_grid(self, x):\r\n self.konfig.zero.set_grid(x)\r\n self.zeroGraf.toggle_grid(x)", "def uncover_blanks(self, row, col):\n checked = {}\n to_be_checked = []\n to_be_checked.append((row, col))\n while len(to_be_checked) > 0:\n sq_row, sq_col = to_be_checked.pop()\n if checked.has_key((sq_row, sq_col)):\n continue\n checked[(sq_row, sq_col)] = True\n if not self.valid_square(sq_row, sq_col):\n continue\n if self.array[sq_row][sq_col].visible is True:\n continue\n square = self.array[sq_row][sq_col]\n square.visible = True\n self.squares_left -= 1\n if square.type == SquareType.BLANK:\n start_row = sq_row-1\n start_col = sq_col-1\n end_row = sq_row+1\n end_col = sq_col+1\n for i in range(start_row, end_row+1):\n for j in range(start_col, end_col+1):\n if not checked.has_key((i, j)):\n to_be_checked.append((i, j))", "def hide_figure_grid(fig: object, grid: object) -> None:\n grid.grid(False)", "def reveal_cells(self, grid_size, pokemon_locations, index):\n number = self.number_at_cell(pokemon_locations, grid_size, index)\n self.replace_character_at_index(index, str(number))\n clear = self.big_fun_search(grid_size, pokemon_locations, index)\n for i in clear:\n if self._game_board[i] != FLAG:\n number = self.number_at_cell(pokemon_locations, grid_size, i)\n self.replace_character_at_index(i, str(number))\n\n return self._game_board", "def hide(self):\n self.set_visible(False)", "def toggle(self, row: int, col: int):\n\t\t\n\t\tnbrs = self.neighbors(row, col)\n\t\tself.board[row - 1][col - 1] = not self.board[row - 1][col - 1]\n\t\tfor nbr in nbrs:\n\t\t\tself.board[nbr[0] - 1][nbr[1] - 1] = not self.board[nbr[0] - 1][nbr[1] - 1]", "def reset_hidden(hidden, mask):\n if len(mask) != 0:\n hidden[:, mask, :] = 0\n \n return hidden", "def toggle_minutni_grid(self, x):\r\n self.konfig.minutni.set_grid(x)\r\n self.minutniGraf.toggle_grid(x)", "def render_grid(grid):\n rows = grid.shape[0]\n cols = grid.shape[1]\n for row in range(rows):\n for col in range(cols):\n if grid[row, col] != 1 and grid[row, col] != 0:\n grid[row, col] = 1\n return grid", "def hiding(self, name, hide, axis='y', hide_values=True):\n for n in name:\n collection = 'columns' if not self.is_array(n) else 'masks'\n if 'rules' not in self._meta[collection][n]:\n self._meta[collection][n]['rules'] = {'x': {}, 'y': {}}\n if not isinstance(hide, list): hide = [hide]\n\n if collection == 'masks' and 'y' in axis and not hide_values:\n raise ValueError('Cannot hide mask items on y axis!')\n for ax in axis:\n if collection == 'masks' and ax == 'x' and not hide_values:\n sources = self.sources(n)\n h = [sources[idx-1]\n for idx, s in enumerate(sources, start=1) if idx in hide]\n else:\n h = self._clean_codes_against_meta(n, hide)\n if set(h) == set(self._get_valuemap(n, 'codes')):\n msg = \"Cannot hide all values of '{}'' on '{}'-axis\"\n raise ValueError(msg.format(n, ax))\n if collection == 'masks' and ax == 'x' and hide_values:\n for s in self.sources(n):\n self.hiding(s, h, 'x')\n else:\n rule_update = {'dropx': {'values': h}}\n self._meta[collection][n]['rules'][ax].update(rule_update)\n return None", "def _hide_labels(self):\n pass", "def ensure_hidden(self):\n self.set_visible(False)", "def reset_possible(self, num, row, col):\n self.possibles[row][col] = []\n for c in range(self.board_size):\n print(num, row, c)\n if num in self.possibles[row][c]:\n self.possibles[row][c].discard(num)\n for r in range(self.board_size):\n if num in self.possibles[r][col]:\n self.possibles[r][col].discard(num)\n index = self.get_square_index((row, col))\n squ = self.squares[index]\n for cell in squ:\n if num in self.possibles[cell[0]][cell[1]]:\n self.possibles[cell[0]][cell[1]].discard(num)", "def delete_value(loc):\r\n (application.ui.__getattribute__(f'cell{loc.column+1}{loc.row+1}')).setText(\"\")\r\n sudoku_grid[loc.row, loc.column] = 0\r\n global cnt_free_cells\r\n cnt_free_cells += 1" ]
[ "0.65699524", "0.64596504", "0.6226179", "0.6132422", "0.6060004", "0.5992399", "0.59386015", "0.58700484", "0.5865002", "0.5862993", "0.58458006", "0.5845631", "0.5832931", "0.5831914", "0.58127666", "0.57925963", "0.5776719", "0.57695794", "0.5753077", "0.5743643", "0.5735773", "0.5722374", "0.5714577", "0.5696888", "0.5686551", "0.5673691", "0.56635684", "0.56355226", "0.56192553", "0.5599822" ]
0.81077296
0