query
stringlengths
9
9.05k
document
stringlengths
10
222k
metadata
dict
negatives
listlengths
30
30
negative_scores
listlengths
30
30
document_score
stringlengths
4
10
document_rank
stringclasses
2 values
Return whether Index supports a specific attribute.
def supports_index_feature(attr_name): return supports_indexes and hasattr(_test_index, attr_name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_attribute(self):\r\n return conf.lib.clang_isAttribute(self)", "def has_attribute(self, name):\n return name in self.schema", "def exists(self, index):\n return self._node.attributes.has_public_attribute(index)", "def has_attribute(self, attribute):\n return (attribute in self.attribute_list)", "def has_attribute(self, name):\n\n pass", "def _checkAttributeIndex(self, index):\n\n if index > len(self._attributes):\n raise IndexError(\"'\" + str(index) + \"' is out of the range of 'attributes' array.\")\n\n return True", "def isAttribute(self, p_int): # real signature unknown; restored from __doc__\n return False", "def UseAttribute(self) -> bool:", "def hasAttribute(self, *args):\n return _libsbml.XMLAttributes_hasAttribute(self, *args)", "def has_attribute(self, key):\n return key in self.__dict", "def hasAttribute(self, attrib):\n return self._dqa(attrib) in self.attributes", "def has_attr(self, key):\n return key in self.attrs", "def __contains__(self, attribute_name):\n return False # pragma: no cover", "def getAttributeByIndex(self, index):\n\n if self._checkAttributeIndex(index) is not True:\n return False\n\n return self._attributes[index]", "def hasRequiredAttributes(self):\n return _libsbml.SpeciesTypeComponentIndex_hasRequiredAttributes(self)", "def __contains__(self, attr):\n return attr in self._config", "def IDX_CHECK(attribute_name):\n if attribute_name == 'Alt':\n return 0\n if attribute_name == 'Bar':\n return 1\n if attribute_name == 'Fri':\n return 2\n if attribute_name == 'Hun':\n return 3\n if attribute_name == 'Pat':\n return 4\n if attribute_name == 'Price':\n return 5\n if attribute_name == 'Rain':\n return 6\n if attribute_name == 'Res':\n return 7\n if attribute_name == 'Type':\n return 8\n if attribute_name == 'Est':\n return 9", "def sk_attr(est, attr):\n from sklearn.utils.validation import check_is_fitted\n from sklearn.exceptions import NotFittedError\n try:\n check_is_fitted(est, attr)\n return True\n except NotFittedError:\n return False", "def _is_encodable_attribute(name):\n if name == '_meta':\n return True\n elif name.startswith(\"_\") or name.startswith(\"__\") or name == \"ext\":\n return False\n else:\n return True", "def is_attr_exist(self, section_name: str, attr_name: str) -> bool:\n pass", "def is_valid(self, attribute: Attribute) -> bool:\n return self.get_data_type() == attribute.type", "def has_attr_with_name(self, name):\n for attr in self:\n if attr.name == name:\n return True\n\n return False", "def hasAttr(self, *args):\n return _libsbml.XMLToken_hasAttr(self, *args)", "def hasAttribute(self, p_str, p_str_1=None): # real signature unknown; restored from __doc__ with multiple overloads\n return False", "def contains_attr(self, gi):\n if gi is None:\n return False\n for gi_obj in self.gradual_items:\n if gi.attribute_col == gi_obj.attribute_col:\n return True\n return False", "def isattribute(tokens, x):\n\n # True if token is a column and next token is not an operator\n return Token.iscolumn(tokens[x]) and not Token.isoperator(Token.get(tokens, x + 1))", "def is_valid_attribute(self, attr):\n return self.is_valid(attr)", "def is_valid_attribute(self, attr):\n try:\n self.validate_attribute(attr)\n return True\n except etal.LabelsSchemaError:\n return False", "def is_valid_attribute(self, attr):\n try:\n self.validate_attribute(attr)\n return True\n except etal.LabelsSchemaError:\n return False", "def has_attribute(self, attribute: str) -> bool:\n return any([\n key_node.value == attribute for key_node, _ in self.yaml_node.value\n ])" ]
[ "0.7082171", "0.671865", "0.67019475", "0.6647597", "0.6562679", "0.65566695", "0.65162414", "0.6499327", "0.6332971", "0.6257881", "0.62174255", "0.6215965", "0.6213659", "0.62081057", "0.6135819", "0.6025148", "0.60145885", "0.5995824", "0.59782404", "0.5945429", "0.59370095", "0.5929017", "0.59234226", "0.59050846", "0.59031826", "0.58988655", "0.5890159", "0.58881056", "0.58881056", "0.58861744" ]
0.7971018
0
Takes given filename containing version, optionally updates the version and saves it, and returns the version.
def ReadAndUpdateVersion(version_filename, update_position=None): if os.path.exists(version_filename): current_version = open(version_filename).readlines()[0] numbers = current_version.split('.') if update_position: numbers[update_position] = '%02d' % (int(numbers[update_position]) + 1) if update_position < -1: numbers[update_position + 1:] = ['00'] * -(update_position + 1) version = '.'.join(numbers) else: version = FIRST_VERSION with open(version_filename, 'w') as fout: fout.write(version) print('\n'.join(['Version %s' % version])) return version
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def versioned(filename, version, force_version=False, full_path=True):\n if not '.' in filename:\n return None\n\n if USE_VERSIONING or force_version:\n dotindex = filename.rindex('.')\n filename = u'%s.%s%s' % (filename[:dotindex], version, filename[dotindex:])\n\n if full_path:\n return static(filename)\n\n return filename", "def bumpversion(path=\"setup.cfg\"):\n config = ConfigParser()\n config.read(path)\n cfg = open(path, 'w')\n new_version = \"0.0.0\"\n if config.has_option('metadata', 'version'):\n old_version = config.get('metadata', 'version')\n major, minor, patch = old_version.split(\".\")\n new_version = \"%s.%s.%s\" % (major, minor, int(patch) + 1)\n if not config.has_section('metadata'):\n config.add_section('metadata')\n config.set('metadata', 'version', new_version)\n config.write(cfg)\n cfg.close()\n return new_version", "def version_file_restore(func):\n\n @wraps(func)\n def wrapper(*args, **kwargs):\n version_file = cli.version_file()\n version_file_existed = os.path.isfile(version_file)\n orig_version = kolibri.__version__\n kwargs[\"orig_version\"] = orig_version\n\n if version_file_existed:\n kwargs[\"version_file\"] = version_file\n\n func(*args, **kwargs)\n\n if version_file_existed:\n open(version_file, \"w\").write(orig_version)\n\n return wrapper", "def _save_version_file(cls, hivemind_version, git_revision, git_date):\n with open(\"hive/version.py\", 'w') as version_file:\n version_file.write(\"# generated by setup.py\\n\")\n version_file.write(\"# contents will be overwritten\\n\")\n version_file.write(\"VERSION = '{}'\\n\".format(hivemind_version))\n version_file.write(\"GIT_REVISION = '{}'\\n\".format(git_revision))\n version_file.write(\"GIT_DATE = '{}'\\n\".format(git_date))", "def version(version_file=default_version_file, osp_package=default_osp_package):\n\n if os.path.exists(version_file):\n (version_string, version_name) = version_from_file(version_file)\n\n else:\n package_info = get_package_info(osp_package)\n repo_name = get_package_repo_name(package_info)\n version_string = get_version_from_repo_name(repo_name)\n\n if version_string == None:\n version_string = \"unknown\"\n \n return version_string", "def get_version(self, directory, version_file_name='.version'):\n if self.path_exists(directory) and (version_file_name in os.listdir(directory)):\n f = open(directory + '/' + version_file_name)\n version = f.read()\n f.close()\n return version\n return None", "def __write_build_version(file_path, identifier, version):\n\n with open(file_path) as fp:\n lines = fp.readlines()\n\n new_lines = []\n for line in lines:\n if line.find(identifier) > -1:\n parts = line.split(identifier)\n parts[-1] = version + '\\n'\n new_line = identifier.join(parts)\n new_lines.append(new_line)\n else:\n new_lines.append(line)\n\n fp2 = open(file_path, 'w')\n fp2.write(''.join(new_lines))\n fp2.close()", "def set_version(self, bundle, ctx, filename, version):", "def updateFile(filename, content):\n\tfilename = adaptPath(filename)\n\tif filename != None:\n\t\ttry:\n\t\t\toldContent = open(filename, \"r\").read()\n\t\texcept IOError:\n\t\t\toldContent = \"\"\n\t\tif oldContent != content:\n\t\t\tfile = open (filename, \"w\")\n\t\t\tfile.write(content)\n\t\t\tfile.close()\n\treturn content", "def update_setupcfg_version(filename, version):\n\n setup_cfg = open(filename).readlines()\n current_section = None\n updated = False\n\n for idx, line in enumerate(setup_cfg):\n m = ConfigParser.SECTCRE.match(line)\n if m:\n if current_section == 'metadata':\n # We already parsed the entire metadata section without finding\n # a version line, and are now moving into a new section\n break\n current_section = m.group('header')\n continue\n\n if '=' not in line:\n continue\n\n opt, val = line.split('=', 1)\n opt, val = opt.strip(), val.strip()\n if current_section == 'metadata' and opt == 'version':\n setup_cfg[idx] = 'version = %s\\n' % version\n updated = True\n break\n\n if updated:\n open(filename, 'w').writelines(setup_cfg)\n logger.info(\"Set %s's version to %r\" % (os.path.basename(filename),\n version))", "def build_version_file(provided_version):\n version_json = Path(VERSION_FILE_NAME)\n\n if provided_version == 'auto':\n # Read version.json\n with version_json.open('r') as version_file:\n version = json.load(version_file)\n current_version = version.get('version')\n\n version_parts = [int(part) for part in current_version.split('.')]\n version_parts[-1] += 1 # auto increment last version part. Major + Minor versions must be set manually\n provided_version = '.'.join(str(part) for part in version_parts)\n\n with version_json.open('w') as version_file:\n json.dump({'version': provided_version}, version_file)", "def write_version(settings, version, force=False):\n semver_path = settings['semver_path']\n filename = settings['semver_branch']\n path = os.path.join(semver_path, filename)\n logger.debug(f'write version:{version} to path:{path} with force:{force}')\n\n path_exists = os.path.exists(path)\n if path_exists:\n current_version = read_version(settings)\n if current_version == version:\n logger.debug(f'version is same as current version {current_version}')\n return\n\n if not path_exists or force:\n write_file(path, version)\n semver_repo = Repo(semver_path)\n index = semver_repo.index\n index.add([filename])\n semver_user_name = settings['semver_user_name']\n semver_user_email = settings['semver_user_email']\n author = Actor(semver_user_name, semver_user_email)\n index.commit(f'semver({filename}): {version}', author=author, committer=author, parent_commits=None)", "def update_version(filename, new_version, vers_attr='__version__'):\n lines = []\n with open(filename, 'r') as handle:\n lines = handle.readlines()\n\n updated = False\n for lineno, line in enumerate(lines):\n if line.startswith(vers_attr):\n lines[lineno] = \"{0} = \\\"{1}\\\"\\n\".format(vers_attr, new_version)\n updated = True\n break\n if not updated:\n lines.append(\"{0} = \\\"{1}\\\"\\n\".format(vers_attr, new_version))\n\n with open(filename, 'w') as handle:\n handle.writelines(lines)", "def set_version(self, version):\n\n def update_version(version, filepath):\n with open(filepath, \"r\") as stream:\n contents = stream.read()\n\n new_contents = _fix_contents_version(contents, version)\n assert contents != new_contents\n with open(filepath, \"w\") as stream:\n stream.write(new_contents)\n\n update_version(version, os.path.join(\".\", \"package.json\"))\n update_version(version, os.path.join(\".\", \"src\", \"setup.py\"))\n update_version(\n version, os.path.join(\".\", \"src\", \"robocorp_code\", \"__init__.py\")\n )", "def update_version(self, version):\n self._metadata['version'] = version\n\n if self._type == '.json':\n with open(self._filename, 'w') as f:\n f.write(json.dumps(self._metadata, indent=2))\n\n dof_filename = os.path.join(self.path, self.name + '.dof')\n if os.path.isfile(dof_filename):\n dof_file = DOFFile(dof_filename)\n dof_file.update_version(version)", "def save_version(version_name, yml):\n\n output_1 = version(version_name)\n output_2 = path(yml)\n return ' - Save version ' + output_1 + '\\n' + output_2", "def get_version_filename(filename):\n return re.search(r'\\d+', filename).group(0)", "async def update_version(self, version: int):\n async with open(self.__file_name, mode=\"r\") as auth_file:\n tag_data = json.loads(await auth_file.read())\n await auth_file.close()\n async with open(self.__file_name, mode=\"w\") as auth:\n tag_data[\"version\"] = version\n await auth.write(json.dumps(tag_data, indent=2, sort_keys=True))\n await auth.close()\n self.__version = version", "def _get_version(self, identifier: Identifier,\n version: Optional[int] = None) -> DocMetadata:\n parent_path = self._get_parent_path(identifier=identifier,\n version=version)\n path = os.path.join(parent_path,\n (f'{identifier.filename}.abs' if not version\n else f'{identifier.filename}v{version}.abs'))\n return self.parse_abs_file(filename=path)", "def save(self, new=None, timeout=2):\n if new: self.update(new) # allow two operations (update + save) with a single command\n if not self._updated: return # nothing to do\n thisPkg = os.path.dirname(__file__)\n filename = os.path.join(thisPkg, c.FOLDER_JSON, c.FILE_GAME_VERSIONS)\n fParts = c.FILE_GAME_VERSIONS.split('.')\n newFile = os.path.join(thisPkg, c.FOLDER_JSON, \"%s_%s.%s\"%(fParts[0], dateFormat.now(), fParts[1]))\n if not os.path.isfile(newFile):\n #fParts = c.FILE_GAME_VERSIONS.split('.')\n #newFile = \"%s%s%s_%s.%s\"%(c.FOLDER_JSON, os.sep, fParts[0], dateFormat.now(), fParts[1])\n #if not os.path.isfile(newFile):\n #print(filename)\n #print(newFile)\n os.rename(filename, newFile) # backup existing version file\n recordKeys = [(record[\"version\"], record) for record in Handler.ALL_VERS_DATA.values()]\n data = [r for k,r in sorted(recordKeys)] # i.e. get values sorted by version key\n start = time.time()\n while time.time()-start < timeout: # allow multiple retries if multiple processes fight over the version file\n try:\n with open(filename, \"wb\") as f:\n f.write(str.encode(json.dumps(data, indent=4, sort_keys=True))) # python3 requires encoding str => bytes to write to file\n self._updated = False\n return\n except IOError: pass # continue waiting for file to be available\n raise # after timeout, prior exception is what matters", "def removeVersionFromFilename(filename):\n return filename[:-3]", "def get_plugin_version(filename):\n m = PLUGIN_PACKAGE_RE.search(filename or '')\n if m:\n return m.group(3)\n else:\n return None", "def write_version_file(version):\n try:\n git_log = subprocess.check_output(\n ['git', 'log', '-1', '--pretty=%h %ai']).decode('utf-8')\n git_diff = (subprocess.check_output(['git', 'diff', '.']) +\n subprocess.check_output(\n ['git', 'diff', '--cached', '.'])).decode('utf-8')\n if git_diff == '':\n git_status = '(CLEAN) ' + git_log\n else:\n git_status = '(UNCLEAN) ' + git_log\n except Exception as e:\n print(\"Unable to obtain git version information, exception: {}\"\n .format(e))\n git_status = ''\n\n version_file = '.version'\n if os.path.isfile(version_file) is False:\n with open('bilby/' + version_file, 'w+') as f:\n f.write('{}: {}'.format(version, git_status))\n\n return version_file", "def update_version():\n version = os.environ.get('TRAVIS_COMMIT', None) or \\\n subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'])\n version_file = path.join('slingsby', 'VERSION')\n with open(version_file, 'w') as fh:\n fh.write(version)", "def get_version():\n\n current_dir = os.path.dirname(os.path.realpath(__file__))\n version_path = os.path.join(current_dir, VERSION_FILE)\n\n with open(version_path, 'r') as version_fd:\n return version_fd.read().strip()", "def _filepath(self, filename):\n return os.path.join(self.root, self.version, filename)", "def get_version(file, name=\"__version__\"):\n path = os.path.realpath(file)\n local_namespace = {}\n exec(open(path).read(), {}, local_namespace)\n return local_namespace[name]", "def version(self):\n a = re.search('(?<=_V)\\d{1,2}', self.fname)\n if a is None:\n return None\n else:\n return int(a.group())", "def commitVersion(self, tempFile, stamp):\n os.rename(tempFile, self.getFile(stamp))", "def get_versioned_file(package, component):\n\n candidate = None\n candidate_version = None\n\n for fn in resources.contents(package):\n if fn.startswith('__'):\n continue\n\n # There must be one '-' separator.\n name, version = fn.rsplit('-', maxsplit=1)\n\n for ext in ('.py', '.h', '.c', '.cpp'):\n if version.endswith(ext):\n version = version[:-len(ext)]\n break\n\n try:\n version = VersionNumber.parse_version_number(version)\n except UserException:\n continue\n\n if version > component.version:\n # This is for a later version so we can ignore it.\n continue\n\n if candidate is None or candidate_version < version:\n # This is a better candidate than we have so far.\n candidate = fn\n candidate_version = version\n\n return candidate" ]
[ "0.7239123", "0.6710091", "0.6640225", "0.6498644", "0.63722545", "0.634427", "0.6290618", "0.62780464", "0.6245203", "0.6243732", "0.6232061", "0.62206507", "0.6211574", "0.6179349", "0.6108471", "0.6086702", "0.60851413", "0.6081705", "0.5965097", "0.5928732", "0.5923892", "0.59027386", "0.5895889", "0.5865548", "0.5865258", "0.58481133", "0.58349055", "0.5832698", "0.57902074", "0.57596624" ]
0.72257257
1
This method updates the calculated rating of a review. It takes the currently assigned ratings for each property, adds them up, then divides by the total points to calculate the percentage of points given. Once that value is calculated, it is divided by 0.2 to the value into a "5 star" rating.
def calculate(self): rating = 0 props = ['aroma', 'appearance', 'taste', 'palate', 'bottle_style'] for item in props: rating += getattr(self, item, 0) self.overall = (rating / self.total) / .2
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_rating_average(self, rating):\n self.num_ratings += 1\n self.rating_total += rating\n self.save(update_fields=[\"num_ratings\", \"rating_total\"])\n self.average_rating = int(round(self.rating_total/self.num_ratings))\n self.save(update_fields=[\"average_rating\"])\n return", "def get_rating(self):\n self.total = sum(int(review['stars']) for review in self.reviews.values())\n if self.total > 0:\n return round(self.total / self.reviews.count(), 1)\n else:\n return self.total", "def updateUserRating(definition, increase):\n user = mongo.db.users.find_one({\"_id\": definition[\"submitted_by\"]})\n mongo.db.users.update_one(\n {\"_id\": user[\"_id\"]},\n {\"$inc\": {\"total_rating\": increase}})", "def update_attendance_rate(self):\n\n total_attendees = self.attendee_set.all().count()\n attended = self.attendee_set\\\n .filter(presented=True)\\\n .count()\n self.attendance_rate = attended / total_attendees\n assert(self.attendance_rate != None)\n self.save()", "def get_rating(self):\n if not (self.votes and self.score):\n return 0\n return float(self.score)/(self.votes+self.field.weight)", "def average_rating(self):\n return ( self.rating_1 + self.rating_2 + self.rating_3) / 3", "def update_comment_score(self, loginID, commentID, attrib_name):\n self.cursor.execute(\"SELECT rating FROM rates WHERE loginID = %s AND commentID = %s\", (loginID, commentID))\n old_rating = self.cursor.fetchall()\n if old_rating:\n # This user already rated this comment. Change the rating.\n if old_rating[0][0] == attrib_name:\n # Remove the rating, because the user already voted for this.\n self.cursor.execute(\"UPDATE comment SET \" + attrib_name + \"=\" + attrib_name + \"-1 WHERE commentID=%s\",\n (commentID,))\n self.cursor.execute(\"\"\"DELETE FROM rates WHERE loginID=%s AND commentID=%s\"\"\",\n (loginID, commentID))\n else:\n self.cursor.execute(\n \"UPDATE comment SET \" + old_rating[0][0] + \"=\" + old_rating[0][0] + \"-1, \" + attrib_name\n + \"=\" + attrib_name + \"+1 WHERE commentID=%s\"\"\", (commentID,))\n self.cursor.execute(\"\"\"UPDATE rates SET rating=%s WHERE loginID=%s AND commentID=%s\"\"\",\n (attrib_name, loginID, commentID))\n else:\n # New rating, just need to update one value and add a new rating tuple to rates\n self.cursor.execute(\"UPDATE comment SET \" + attrib_name + \"=\" + attrib_name + \"+1 WHERE commentID=%s\",\n (commentID,))\n self.cursor.execute(\"\"\"INSERT INTO rates VALUES (%s,%s,%s)\"\"\", (loginID, commentID, attrib_name))\n self.db.commit()\n self.update_comment_avg_score(commentID)", "def review(self, performance_rating):\n self.correct = performance_rating >= 0.6\n now = datetime.datetime.now()\n if self.date_last_reviewed is None:\n self.date_last_reviewed = now\n percent_overdue = self.percent_overdue\n self.difficulty += percent_overdue / 17 * (8 - 9 * performance_rating)\n self.difficulty = max(0, min(self.difficulty, 1)) # clamp difficulty to [0, 1]\n difficulty_weight = 3 - 1.7 * self.difficulty\n if self.correct:\n self.days_between = 1 + (difficulty_weight - 1) * percent_overdue\n else:\n self.days_between = max(1, 1 / (difficulty_weight ** 2))\n self.date_last_reviewed = now", "def average_rating(self):\n return ( self.rating_1 + self.rating_2 + self.rating_3 + self.rating_4 + self.rating_5 + self.rating_6 + self.rating_7) / 7", "def __updateRatings(oldRatings, winner):\n r1, r2 = oldRatings\n R1 = 10 ** (float(r1) / 400)\n R2 = 10 ** (float(r2) / 400)\n E1 = R1 / (R1 + R2)\n E2 = R2 / (R1 + R2)\n\n S1 = 0\n S2 = 0\n if winner == 0:\n S1 = 1\n else:\n S2 = 1\n\n K = 32\n player1NewRating = int(round(r1 + K * (S1 - E1)))\n player2NewRating = int(round(r2 + K * (S2 - E2)))\n\n return (player1NewRating, player2NewRating)", "def rating(self):\n average = self.review.all().aggregate(Avg('rating'))['rating__avg']\n if not average:\n return 0\n return average", "def average_review_stars():\n # get all un-counted reviews\n reviews = Review.query.filter_by(marked=False).join(Restaurant)\\\n .with_entities(Review, Restaurant).all()\n logging.info(f\"Averaging review stars of {len(reviews)} retrieved reviews..\")\n for review, restaurant in reviews:\n # compute running mean of reviews\n restaurant.num_reviews += 1\n restaurant.avg_stars = 1/restaurant.num_reviews * \\\n (restaurant.avg_stars * (restaurant.num_reviews-1) + review.stars)\n review.marked = True\n # update rows \n db.session.commit()", "def totalRating(self):\r\n result = 0\r\n for v in self.votes:\r\n result += v.voteType.weight\r\n\r\n return result", "def update_girl(self, hash, new_rate):\n image = self._db.girls.find_one({'_id': hash})\n total_average = self.average(image['rating'], new_rate, image['count'])\n\n self._db.girls.find_one_and_update(\n {'_id': hash}, {'$inc': {'count': 1},\n '$set': {'rating': total_average}},\n return_document=pymongo.ReturnDocument.AFTER)", "def updatePropensity(self, k, rewardType=SUBMISSION, commentCount = 0):\r\n reward = self.rewardFunctions[self.R](rewardType, commentCount)\r\n #print \"Reward: %s\" %reward\r\n #print \"Propensity before updating: %s\" %(self.propensity.weights[k])\r\n self.propensity.update(k, reward)\r\n #print \"Propensity after updating: %s\" %(self.propensity.weights[k])\r", "def update_performance(self, reviewed_verbs: VerbReview) -> None:\n self._update_lambdas_of_reviewed_words(reviewed_verbs)\n self._update_last_time_reviewed()\n self._update_min_to_review(reviewed_verbs)", "def update_average_book_rating(self, isbn):\n self.cursor.execute(\"\"\"UPDATE book SET avg_rating = total_rating_score / num_ratings WHERE \n ISBN=%s\"\"\", (isbn,))\n self.db.commit()", "def set_rating(self, value):\n try:\n self._rating = float(value)\n except ValueError:\n pass", "def get_real_rating(self):\n if not (self.votes and self.score):\n return 0\n return float(self.score)/self.votes", "def _rate_exploration(self, exp_id, num_ratings, rating):\n # Each user id needs to be unique since each user can only give an\n # exploration one rating.\n user_ids = ['user%d' % i for i in range(num_ratings)]\n for user_id in user_ids:\n rating_services.assign_rating_to_exploration(\n user_id, exp_id, rating)", "def set_score(self, points):\n self.score += points", "def average_rating(self):\n ratings = AttractionRating.objects.filter(attraction=self)\n total_rating = 0\n for rating in ratings:\n total_rating += rating.rating\n\n # If there are no rating, then we set the average to 0\n # otherwise we calculate the average\n try:\n avg = total_rating / len(ratings)\n except ZeroDivisionError:\n avg = total_rating\n\n return avg", "def rating(self):\n try:\n return self._rating\n except AttributeError:\n if self.is_deprecated:\n rating = self.size\n else:\n rating = self.size * 2\n chron_supplement = max([0, 10 - self.chronorder])\n rating += (chron_supplement * 0.5)\n rating += self.archetype()\n self._rating = rating\n return self._rating", "def update_score():\n pass", "def update_book_scores(self):\n self.cursor.execute(\"\"\"UPDATE book SET avg_rating=NULL, total_rating_score=0, num_ratings=0\"\"\")\n self.db.commit()\n self.cursor.execute(\"\"\"SELECT * FROM comment\"\"\")\n for comment in self.cursor.fetchall():\n self.cursor.execute(\"\"\"UPDATE book SET total_rating_score=total_rating_score+%s,\n num_ratings=num_ratings+1 WHERE ISBN=%s\"\"\", (comment[3], comment[1]))\n self.db.commit()\n self.update_average_book_rating(comment[1])", "def save(self, *args, **kwargs):\n self.item.rates_total += 1\n self.item.average_rate += (self.item.average_rate + self.rate) / self.item.rates_total\n self.item.save()\n super(Rate, self).save(*args, **kwargs)", "def rating( self, restaurant_id, user_id, k, reg ):\n\n\t\t# extract the reviews of the user and recalculate the baseline rating \n\t\tuser_reviews = self.df[ self.df['user_id'] == user_id ]\n\t\tmean_all = self.df['stars'].mean()\n\t\tmean_user = user_reviews['user_avg'].values[0]\n\t\tmean_item = self.df['business_avg'][ self.df['business_id'] == restaurant_id ].values[0]\n\t\tbaseline = mean_user + mean_item - mean_all\n\n\t\tscores_numerator = []\n\t\tscores_denominator = []\n\t\tnearest = self.knearest_amongst_user_rated( restaurant_id, user_id, k = 7, reg = 3.0 )\n\n\t\tfor biz_id, sim, _ in nearest:\n\t\t\treviews = user_reviews[ user_reviews['business_id'] == biz_id ]\n\t\t\treviews_avg = reviews['business_avg'].values[0]\n\t\t\treviews_stars = reviews['stars'].values[0]\t\t\t\n\t\t\treviews_baseline = mean_user + reviews_avg - mean_all\n\t\t\tscores_numerator.append( sim * ( reviews_stars - reviews_baseline ) )\n\t\t\tscores_denominator.append(sim)\n\n\t\tscores = baseline + sum(scores_numerator) / sum(scores_denominator)\n\t\treturn scores", "def ratings(self, ratings):\n\n self._ratings = ratings", "def ratings(self, ratings):\n\n self._ratings = ratings", "def update_comment_avg_score(self, commentID):\n self.cursor.execute(\"\"\"UPDATE comment SET avg_usefulness=(2*veryUseful+useful)/(veryUseful+useful+useless)\n WHERE commentID=%s\"\"\", (commentID,))\n self.db.commit()" ]
[ "0.68534154", "0.66318846", "0.6197724", "0.61154777", "0.60824764", "0.6031516", "0.59556174", "0.59047467", "0.58960396", "0.588009", "0.5867817", "0.582206", "0.5821888", "0.58122706", "0.580732", "0.57773197", "0.5774083", "0.57408386", "0.5719941", "0.5719647", "0.5699746", "0.5699609", "0.56957275", "0.5684758", "0.56819326", "0.56811655", "0.5628061", "0.5626537", "0.5626537", "0.56260866" ]
0.69563735
0
Creates a new review for the given beer
def post(id): try: beer = Beer.objects.get(id=id) except mongoengine.DoesNotExist: return flask.Response('No beer with id {} found'.format(id), 404) except: return flask.Response('Invalid id {}'.format(id), 400) data = flask.request.get_json() # check to see if a review was already created for this beer from this # user try: Review.objects.get(beer=beer, user=flask.request.user) except mongoengine.DoesNotExist: pass else: return flask.Response('You\'ve already created a review for beer {}'.format(id), 400) review = Review(beer=beer, user=flask.request.user) props = ['aroma', 'appearance', 'taste', 'palate', 'bottle_style'] for item in props: if item in data: setattr(review, item, data[item]) review.calculate() try: review.save() except mongoengine.ValidationError as exp: return flask.Response('{}'.format(exp), 400) beer.rating = Review.objects.all().filter(beer=beer).average('overall') beer.save() return JSONResponse(review.to_json())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def addreview(self, name, year, genre, rating, review, reviewer):\n pass", "def post_review(recipe_id=None):\n\n if not storage.get(Recipe, recipe_id):\n abort(404)\n data = request.get_json()\n if not data:\n abort(400, 'Not a JSON')\n if 'user_id' not in data.keys():\n abort(400, 'Missing user_id')\n if not storage.get(User, data['user_id']):\n abort(404)\n if 'text' not in data.keys():\n abort(400, 'Missing text')\n data['recipe_id'] = recipe_id\n new_review = Review(**data)\n storage.new(new_review)\n storage.save()\n return make_response(jsonify(new_review.to_dict()), 201)", "def add_new_review(restaurant_id):\n # If the user isn't logged in, send to the login page\n if helper.handle_login(login_session) is False:\n return redirect('/login')\n\n if request.method == 'POST':\n post = request.get_json()\n if 'username' not in login_session:\n new_review = Reviews(reviewer_name='anonymous',\n review=post.get('review'),\n stars=post.get('stars'),\n restaurant_id=restaurant_id,\n time=datetime.utcnow())\n else:\n new_review = Reviews(reviewer_name=login_session['username'],\n review=post.get('review'),\n stars=post.get('stars'),\n restaurant_id=restaurant_id,\n time=datetime.utcnow())\n session.add(new_review)\n session.commit()\n\n return redirect(url_for('restaurants_page'))", "def new():\n\n add_review = True\n\n form = CreateReview()\n if form.validate_on_submit():\n\n try:\n review = {\n \"score\": float(form.score.data),\n \"description\": form.description.data,\n \"games_id\": form.game_id.data,\n \"users_id\": form.users_id.data\n }\n\n print(review)\n new_review = Reviews()\n new_review.create(**review)\n \n # add employee to the database\n flash('You have successfully created a Review.')\n except:\n # in case department name already exists\n flash('Error: review already exists.')\n \n\n # redirect to the login page\n return redirect(url_for('review.index'))\n\n return render_template('review/new.html', action=\"Add\", add_review=add_review, form=form, title=\"Add Review\")", "def new_review(place_id):\n body_dic = request.get_json()\n place = storage.get(Place, place_id)\n if not place:\n abort(404)\n if not body_dic:\n return jsonify({'error': 'Not a JSON'}), 400\n if \"user_id\" not in body_dic:\n return jsonify({'error': 'Missing user_id'}), 400\n user = storage.get(User, body_dic.get(\"user_id\", None))\n if not user:\n abort(404)\n if \"text\" not in body_dic:\n return jsonify({'error': 'Missing text'}), 400\n\n new_review = Review(**body_dic)\n setattr(new_review, \"place_id\", place_id)\n storage.new(new_review)\n storage.save()\n return jsonify(new_review.to_dict()), 201", "def create_review(place_id=None):\n place = storage.get(Place, place_id)\n if place:\n review = request.get_json()\n if not review:\n abort(400, \"Not a JSON\")\n if \"user_id\" not in review:\n abort(400, \"Missing user_id\")\n if not storage.get(\"User\", review[\"user_id\"]):\n abort(404)\n if \"text\" not in review:\n abort(400, \"Missing text\")\n else:\n review['place_id'] = place.id\n new_review = Review(**review)\n storage.new(new_review)\n storage.save()\n return jsonify(new_review.to_dict()), 201\n abort(404)", "def create_review(place_id):\n place = storage.get(\"Place\", place_id)\n if place is None:\n abort(404)\n if not request.get_json():\n return jsonify({'error': 'Not a JSON'}), 400\n if 'user_id' not in request.get_json():\n return jsonify({'error': 'Missing user_id'}), 400\n user = storage.get(\"User\", request.get_json().get('user_id'))\n if user is None:\n abort(404)\n user_id = request.get_json().get('user_id')\n if 'text' not in request.get_json():\n return jsonify({'error': 'Missing text'}), 400\n text = request.get_json().get('text')\n obj = Review(text=text, place_id=place_id, user_id=user_id)\n obj.save()\n return jsonify(obj.to_dict()), 201", "def test_create_review(self):\n yield self.nodes[0].overlay.create_project(\"test\", \"specpointer\", \"01-02-03\", 300, \"EUR\", 5)\n yield self.deliver_messages()\n project = self.nodes[1].overlay.persistence.get_projects()[0]\n yield self.nodes[1].overlay.create_submission(project['public_key'].decode('hex'), project['id'], 'test')\n yield self.deliver_messages()\n\n # Do a review\n submission = self.nodes[0].overlay.persistence.get_submissions_for_project(project['public_key'].decode('hex'), project['id'])[0]\n yield self.nodes[0].overlay.create_review(submission['public_key'].decode('hex'), submission['id'], 'test')\n yield self.deliver_messages()\n\n self.assertTrue(self.nodes[1].overlay.persistence.get_reviews(submission['public_key'].decode('hex'), submission['id']))", "def add_review(self):\n url = \"/review/create/%s\" % self.picture.id\n self.browser.get(\"%s%s\" %\n (str(self.live_server_url), url))\n\n select = Select(self.browser.find_element_by_id(\n \"id_score_intention\"))\n select.select_by_index(4)\n select = Select(self.browser.find_element_by_id(\n \"id_score_technical\"))\n select.select_by_index(4)\n select = Select(self.browser.find_element_by_id(\n \"id_score_picture\"))\n select.select_by_index(4)\n select = Select(self.browser.find_element_by_id(\n \"id_score_global\"))\n select.select_by_index(4)\n\n self.browser.find_element_by_id(\n \"id_comment_intention\").send_keys(\"Commentaire intention\")\n\n submission_button = self.browser.find_element_by_class_name(\n 'btn-secondary')\n submission_button.click()\n time.sleep(2)\n html = self.browser.page_source\n self.assertInHTML(\"\"\"\n <h4 class=\"rouge-fonce\">Critique de test_login</h4>\n \"\"\",\n html)\n self.assertInHTML(\"\"\"\n <strong>Note moyenne de la revue : 4,0</strong>\n \"\"\",\n html)", "def review():\r\n\r\n # Ensure isbn_number is submitted\r\n if not request.form.get(\"isbn_number\"):\r\n return apology(\"Invalid book\", 403)\r\n\r\n # Ensure review is submitted\r\n if not request.form.get(\"review\"):\r\n return apology(\"Text is not submitted\", 403)\r\n\r\n # Check if book exist, if not error out\r\n\r\n # add review to db\r\n\r\n return redirect(url_for(details, isbn_number=request.form.get(\"isbn_number\")))", "def review_add(request):\n result = {}\n\n u = request.user\n\n p = Product.objects.get_by_sku(request.POST['sku'])\n\n if p is None:\n result[\"result\"] = '0'\n elif TransactionLineItem.objects.filter(transaction__party=u, product=p).count() > 0:\n # need to check if I bought this item\n\n r, created = Review.objects.get_or_create(reviewer=u, product=p)\n r.content =request.POST['content']\n r.rating=int(request.POST['rating'])\n\n # reply to review request\n rto = request.POST.get('reply_to', None)\n if rto:\n rev_request = ReviewRequest.objects.get(id=int(rto))\n r.reply_to.add(rev_request)\n # change wish item review status to review=2\n for w in Wishlist.objects.filter(product=p, party=rev_request.requester):\n w.review = Wishlist.REVIEW_RESPONDED\n w.save()\n \n r.public = bool(request.POST['public'])\n r.save() \n\n # add a feed\n f = Feed(actor=u, action=Feed.REVIEWED, product=p) \n f.save()\n \n result[\"result\"] = str(r.id)\n else:\n result['result'] = '-1'\n\n return JSONHttpResponse(result)", "def save_review():\n prod_id = int(request.vars.prod_id)\n logger.info(\"saving review on prod_id {%s}\" %prod_id)\n content = request.vars.content\n db.reviews.update_or_insert(\n (db.reviews.prod_id == prod_id) & (db.reviews.user_email == auth.user.email),\n prod_id = prod_id,\n user_email = auth.user.email,\n review_content = content\n )\n return \"ok\" # Might be useful in debugging.", "def create_review(place_id):\n place = storage.get(\"Place\", place_id)\n if place is None:\n abort(404)\n req_json = request.get_json()\n if req_json is None:\n return make_response(jsonify({'error': \"Not a JSON\"}), 400)\n if 'user_id' not in req_json.keys():\n return make_response(jsonify({'error': \"Missing user_id\"}), 400)\n uid = req_json.get(\"user_id\")\n user = storage.get(\"User\", uid)\n if user is None:\n abort(404)\n if 'text' not in req_json.keys():\n return make_response(jsonify({'error': \"Missing text\"}), 400)\n req_json[\"place_id\"] = place_id\n data = Review(**req_json)\n data.save()\n return jsonify(data.to_json()), 201", "def insert(self, movie_name, year_released, genre, rating, review, reviewer):\n params = {'movie_name': movie_name, 'year_released': year_released,'genre':genre, 'rating': rating, 'review': review, 'reviewer': reviewer}\n self.movie_reviews.append(params)\n return True", "def put_review(review_id):\n ignored_data = [\"id\", \"created_at\", \"updated_at\", \"user_id\", \"place_id\"]\n return put(cls, review_id, ignored_data)", "def newreview():\n objectid = request.values.get('objectid', 0, type=int)\n if not objectid:\n abort(400)\n workflow_object = workflow_object_class.get(objectid)\n\n form = AuthorUpdateForm(\n data=workflow_object.extra_data[\"formdata\"], is_review=True)\n ctx = {\n \"action\": url_for('.reviewhandler', objectid=objectid),\n \"name\": \"authorUpdateForm\",\n \"id\": \"authorUpdateForm\",\n \"objectid\": objectid\n }\n\n return render_template('authors/forms/review_form.html', form=form, **ctx)", "def add_review(self, review: Review):\n raise NotImplementedError", "def review(self, review):\n self._review = review", "def test_save_review(self):\n self.new_review.save_review()\n self.assertTrue(len(Review.query.all()) > 0)", "def add_restaurant_review():\n username = sign_up.get_username()\n if username:\n add_var = dict(user=username, restaurant_name=\"\", restaurant_address=\"\",\n restaurant_item=\"\", item_comments=\"\", item_price=\"\",\n restaurant_ranking=\"\", restaurant_rating=\"\",\n restaurant_rating_reason=\"\", address=\"\", restaurant_chosen=\"\",\n address_chosen=\"\")\n return bottle.template('add_review', add_var=add_var)\n else:\n return bottle.template('login',\n dict(user_error=\"Sorry, you need to be logged in to submit a review, please log below:\", pw_error=\"\"))", "def create_book():\n Book.objects.create(book_id=\"test_id\",\n title=\"test_title\",\n authors=\"test_author\",\n published_date=\"2021\",\n categories=[\"test_category\"],\n average_rating=5,\n ratings_count=5,\n thumbnail=\"http://books.google.com/books/test\"\n )", "def submit_obj_for_review(selenium, obj, reviewer):\n review_comment = string_utils.StringMethods.random_string()\n _get_ui_service(selenium, obj).submit_for_review(\n obj, reviewer.email, review_comment)\n obj.update_attrs(\n review=entities_factory.ReviewsFactory().create(reviewers=reviewer))\n exp_comment = entities_factory.CommentsFactory().create(\n description=element.Common.REVIEW_COMMENT_PATTERN.format(\n # reviewers emails in review comment message need to be sorted\n # as they are displayed in UI in random order\n emails=', '.join(sorted(obj.review[\"reviewers\"])),\n comment=review_comment))\n exp_comment.created_at = rest_service.ObjectsInfoService().get_comment_obj(\n paren_obj=obj, comment_description=review_comment).created_at\n obj.comments = [exp_comment.repr_ui()]\n return obj", "def review(book_id):\n\n # User id from current session\n user_id = session[\"user_id\"]\n # Form data\n try:\n rating = request.form.get('rating')\n text = request.form.get('review-text')\n except ValueError:\n return error('Something went wrong with submission.', 400)\n\n # Has user already submitted a review for this book\n book_id_duplicates = db.execute(\n \"SELECT user_id from reviews \"\n \"WHERE book_id = :book_id \"\n \"AND user_id = :user_id\",\n {'book_id': book_id, 'user_id': user_id}).fetchone()\n if book_id_duplicates is not None:\n return error('Only one submission per book allowed!', 403)\n\n _review = {\n \"user_id\": user_id,\n \"book_id\": int(book_id),\n \"rating\": int(rating),\n \"text\": text.rstrip() # Should user leave new line in textarea\n }\n\n # Save user review\n db.execute(\n \"INSERT INTO reviews (user_id, book_id, rating, text)\"\n \"VALUES (:user_id, :book_id, :rating, :text)\", _review)\n db.commit()\n\n # Reload the page, rendering their review\n return redirect(url_for(\"book\", book_id=book_id))", "def save_movie_and_review(name, fi_name, imdb_id, reviewer, review):\n db = __get_session()\n movie_rec = db.query(Movie).filter_by(imdb_id=imdb_id).first()\n\n if not movie_rec:\n\n movie_rec = Movie(name=name,\n imdb_id=imdb_id,\n fi_name=fi_name)\n\n db.add(movie_rec)\n db.commit()\n\n movie_id = movie_rec.id\n\n review_rec = Review(reviewer=reviewer,\n review_txt=review,\n timestamp=datetime.datetime.now(),\n movie_id=movie_id)\n\n db.add(review_rec)\n db.commit()\n review_id = review_rec.id\n db.close()\n\n return review_id", "def review(self, review: object):\n\n self._review = review", "def submit_review():\n \n reviewer = request.form.get('reviewer')\n review = request.form.get('review')\n name = request.form.get('name')\n fi_name = request.form.get('fi_name')\n imdb_id = request.form.get('imdb_id')\n year = request.form.get('year')\n timestamp = request.form.get('timestamp')\n\n # Save review and movie first, if no record yet\n review_id = save_movie_and_review(name, fi_name, imdb_id, reviewer, review)\n if review_id:\n return \"Thank you, \" + reviewer + \". Your review was saved!\"\n else:\n return \"Something went wrong!\"", "def reviews_collection(request):\n if request.method == \"POST\":\n data = json.loads(request.body)\n task = Task.objects.get(id=data.get(\"taskId\", \"\"))\n reviewer = User.objects.get(username=data.get(\"reviewer\", \"\"))\n reviewee = User.objects.get(username=data.get(\"reviewee\", \"\"))\n rating = data.get(\"rating\", \"\")\n content = data.get(\"content\", \"\")\n\n review = Review(\n task=task,\n reviewer=reviewer,\n reviewee=reviewee,\n rating=rating,\n content=content\n )\n review.save()\n\n serializer = ReviewSerializer(review)\n return Response(serializer.data)", "def review(user_id, item_id, text, rating):\n if Review.objects.filter(user=user_id, item=item_id):\n return \"You already wrote a review!\"\n\n form = ReviewForm({\n 'user': user_id,\n 'item': item_id,\n 'text': text,\n 'rating': rating,\n 'agrees': 0,\n 'thanks': 0\n })\n if form.is_valid():\n form.save()\n return False\n return \"Something was wrong with the review you submitted!\"", "def holdingpenreview():\n objectid = request.values.get('objectid', 0, type=int)\n approved = request.values.get('approved', False, type=bool)\n ticket = request.values.get('ticket', False, type=bool)\n if not objectid:\n abort(400)\n workflow_object = workflow_object_class.get(objectid)\n workflow_object.extra_data[\"approved\"] = approved\n workflow_object.extra_data[\"ticket\"] = ticket\n workflow_object.save()\n db.session.commit()\n\n resume.delay(workflow_object.id)\n\n return render_template('authors/forms/new_review_accepted.html',\n approved=approved)", "def write_review(request):\n form = ReviewForm\n\n if request.method == 'POST':\n form_data = {\n 'title': request.POST['title'],\n 'description': request.POST['description'],\n 'author': request.POST['author'],\n }\n\n form = ReviewForm(form_data)\n\n if form.is_valid:\n form.save()\n messages.success(\n request, f'Review added successfully! Thanks!')\n else:\n messages.error(\n request, f'Upps something went wrong, please try again')\n\n context = {\n 'form': form\n }\n return render(request, 'reviews/write_review.html', context)" ]
[ "0.6573553", "0.6502498", "0.6285934", "0.6278751", "0.62220836", "0.62138295", "0.6156375", "0.61320794", "0.6116113", "0.609118", "0.60615915", "0.6058494", "0.6015865", "0.6005575", "0.6003493", "0.59815794", "0.5904925", "0.5881915", "0.58576345", "0.58488315", "0.5848608", "0.5824803", "0.5821273", "0.57954633", "0.57862645", "0.5763701", "0.5739368", "0.57214504", "0.57213855", "0.57205844" ]
0.7248955
0
Convert a sequence of key accesses into a path string representing a path below the top level key in redis
def key_sequence_to_path(sequence: List[str]): return Path.rootPath() + ".".join(sequence)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sub_key(dirname):\n return SUB_PREFIX + dirname", "def GetKeyByPath(self, key_path):", "def path_to_key_sequence(path: str):\n if path == Path.rootPath():\n return []\n return path.split(\".\")[1:]", "def GetSubkeyByPath(self, key_path):", "def as_key(key):\n return key.lstrip('/').rstrip('/')", "def path(self):\n p = self\n\n name = [p.name()]\n offsets = set([p._offset])\n while p.has_parent_key():\n p = p.parent_key()\n if p._offset in offsets:\n name.append(\"[path cycle]\")\n break\n name.append(p.name())\n offsets.add(p._offset)\n return '\\\\'.join(reversed(name))", "def build_flattened_key(prefix, key):\n return key if not prefix else prefix + \".\" + key", "def setKeyPath(*args, **kwargs)->List[AnyStr]:\n pass", "def get_path(self, key):\n return get_path(self, key)", "def actual_key(self, key):\n key_list = []\n if key.scope == Scope.children:\n key_list.append('children')\n elif key.scope == Scope.parent:\n key_list.append('parent')\n else:\n key_list.append([\"usage\", \"definition\", \"type\", \"all\"][key.scope.block])\n\n if key.block_scope_id is not None:\n key_list.append(key.block_scope_id)\n if key.student_id:\n key_list.append(key.student_id)\n return \".\".join(key_list)", "def r_key(self, *args):\n parts = [self.r_prefix]\n parts.extend(args)\n return \":\".join(parts)", "def root_given_key(prob_key):\n root = ''\n for i, info in enumerate(prob_key):\n if i != 0:\n root += '_'\n root += str(info)\n return root.replace('.', '_')", "def key_join(self, key, encode=True):\n if isinstance(key, str):\n parts = key.split('/')\n else:\n parts = key\n new_parts = []\n\n for part in parts:\n if isinstance(part, bytes):\n part = part.decode(\"utf-8\")\n if encode:\n part = quote(str(part))\n new_parts.append(part)\n\n return '/'.join(new_parts)", "def get_url_from_keys(keys, path_root):\n query_str = ''\n for key in keys:\n parts = key.split(HASH_KEY_DELIMETER)\n if parts[0] in REQUEST_META_BASE:\n path_root += parts[1] + '/'\n elif parts[0] in REQUEST_META_QUERY_STR:\n query_str += parts[0] + '=' + parts[1] + '&'\n\n if not path_root:\n raise MetricsAPIError()\n if query_str:\n url = path_root[:-1] + '?' + query_str[:-1]\n else:\n url = path_root\n return url", "def ikeys(self, prefix=''):", "def json_full_path(base_path, key):\n if base_path is None or base_path == \"\":\n return key\n else:\n return f'{base_path}.{key}'", "def build_path(key_dict, path_string):\n for key, value in key_dict.items():\n path_string = re.sub('\\$\\{' + key + '\\}', value, path_string)\n\n return path_string", "def _extract_immediate_prefix(obj_key:str)->str:\n immed_prefix = \"\"\n if len(obj_key.split(\"/\")) > 1:\n immed_prefix = obj_key.split(\"/\")[-2]\n \n return immed_prefix", "def strkey(item):\n return '%s:%s:%s' % (item['group_id'], item['artifact_id'], item['version'])", "def GetSubkeys(self):", "def __getitem__(self, key):\n path = self.path\n if self.path_is_string:\n path = [path]\n return path[key]", "def reprkey(path):\n return ReprKey(\n path=storepath(path),\n hash=get_hash(path),\n tag=config_tag)", "def key_path(self):\n keypath = self._get_field('System', 'keypath')\n localpath = \"/\".join(__file__.split('/')[:-1])\n return join(localpath, keypath)", "def build_key(key):\n return os.path.join(PREFIX, key)", "def _make_path(keys, value, ext, version=None):\n if isinstance(keys, (list, tuple)):\n keys = '/'.join(keys)\n\n version_str = ''\n if version:\n version_str = '.{0}'.format(version)\n\n path = '{keys}/{value}{version}{ext}'.format(\n keys=keys,\n value=value,\n version=version_str,\n ext=ext\n )\n\n return path", "def _shorten_key(telstate, key):\n for prefix in telstate.prefixes:\n if key.startswith(prefix):\n return key[len(prefix):]\n return ''", "def get_routes_from_cache(key: str) -> str:\n\n val = client.get(key)\n return val", "def GetRootKey(self):", "def key_from_path(self, path=\"\", base_dir=\"\"):\n path = path.replace(base_dir, '')\n if path[0] == os.path.sep:\n path = path[1:]\n return os.path.join(self.rootdir, path)", "def key(self)->str:\n return \"{}:{}.{}.{}\".format(self.source, self.db, self.ed, self.rec)" ]
[ "0.6561175", "0.646753", "0.63690233", "0.6362882", "0.6326689", "0.6214568", "0.6203694", "0.61824983", "0.6133761", "0.596251", "0.5954664", "0.59117234", "0.58686566", "0.58153015", "0.5785836", "0.5785692", "0.57508826", "0.5740406", "0.57283944", "0.571764", "0.56980115", "0.5694851", "0.56708103", "0.56615025", "0.5626678", "0.5611213", "0.55953383", "0.5583283", "0.55704635", "0.5566848" ]
0.67574036
0
Return a copy of dictionary with the paths formed by key_sequences removed
def copy_dictionary_without_paths(dictionary: Dict, key_sequence: List[List[str]]): ret = {} possibles = [ks for ks in key_sequence if len(ks) == 1] possibles = set(reduce(lambda x, y: x + y, possibles, [])) for k, v in dictionary.items(): if k in possibles: continue if type(v) == dict: ret[k] = copy_dictionary_without_paths(v, [ks[1:] for ks in key_sequence if len(ks) > 1]) else: ret[k] = v return ret
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_keys_from_dict(dictionary, keys):\n\n # Copy dictionary\n dictionary_updated = dictionary.copy()\n try:\n [dictionary_updated.pop(key) for key in keys]\n except:\n print(\"Error: No ratio and sampling strategy parameters\")\n return dictionary_updated", "def _clean_paths(paths):\n\n\tclean_paths = {key: np.concatenate([path[key] for path in paths]) for key in paths[0].keys()}\n\n\treturn clean_paths", "def removeAllKeys(self) -> None:\n ...", "def task_2_remove_dict_fields(data: DT, redundant_keys: List[str]) -> DT:\n dict2 = copy.deepcopy(data)\n for item in dict2:\n for key in redundant_keys:\n item.pop(key)\n return dict2", "def remove_keys(d, keys):\n pp = deepcopy(d)\n if isinstance(keys, (list, tuple)):\n for k in keys:\n pp.pop(k, None)\n else:\n pp.pop(keys, None)\n return pp", "def _filter_dict(src_dict, key_set):\n for k in set(src_dict.keys()) - key_set:\n src_dict.pop(k)", "def remove_keys(_dict, keys):\n if not _dict:\n return None\n new = dict(_dict)\n for key in keys:\n new.pop(key, None)\n return new", "def remove_prefix(self, state_dict, prefix):\n return {\n (lambda x: x.split(prefix, 1)[-1] if x.startswith(prefix) else x)(\n key\n ): value\n for key, value in state_dict.items()\n }", "def _cleanse_dict(original):\n return {k: v for k, v in original.items() if \"_pass\" not in k}", "def removeDic(dic, key):\n pass", "def discard(self, key):\r\n if key in self.map: \r\n key, prev, next = self.map.pop(key)\r\n prev[NEXT] = next\r\n next[PREV] = prev", "def remove_keys(_dict, _keys):\n if isinstance(_keys, str):\n if _keys in _dict:\n del _dict[_keys]\n else:\n for _key in _keys:\n _dict = remove_keys(_dict, _key)\n return _dict", "def eliminate_key (self,key):\r\n\r\n if self.using_shelf:\r\n\r\n del self.key_dict[str(key)]", "def _cleanse_dict(original):\n return dict((k, v) for k, v in original.items() if \"_pass\" not in k)", "def copy_backward_mapping(self) -> Dict[str, Set[str]]:\n return deepcopy(self._backward_mapping)", "def keep_in_dictionary(self,dictionary,*keys):\r\n remove_keys = [k for k in dictionary if k not in keys]\r\n self.remove_from_dictionary(dictionary,*remove_keys)", "def drop_keys(d):\n if isinstance(d, dict):\n return {\n k: drop_keys(v)\n for k, v in d.items()\n if k not in [\"propNames\", \"package\"]\n and v is not None\n and not (k == \"children\" and v == \"\")\n }\n elif isinstance(d, list):\n return [drop_keys(x) for x in d]\n return d", "def remove_keys(data: dict, keys: list[str]) -> None:\n for k in keys:\n _ = data.pop(k, None)", "def _remove_keys(results: dict, remove: list) -> dict:\n removed = {}\n for key, val in results.items():\n if key not in remove:\n removed[key] = val\n return removed", "def _delete_volatile_keys(self, solr_dict):\n\n def delete(del_solr_dict, path_list):\n k = path_list[0]\n if k in del_solr_dict:\n if len(path_list) > 1:\n delete(del_solr_dict[k], path_list[1:])\n else:\n del del_solr_dict[k]\n\n delete(solr_dict, ['response', 'maxScore'])\n delete(solr_dict, ['responseHeader', 'QTime'])", "def __cleanState__(self, stateDict):\n for k in list(stateDict.keys()):\n if k.startswith('_'):\n stateDict.pop(k)\n return stateDict", "def keep_entry(dict_input, parent_key, child_keys):\n\n dict_output = dict()\n\n child_keys = [''.join((parent_key, '_', child_key)) for child_key in child_keys]\n\n for key, value in dict_input.items():\n if key.startswith(parent_key) and key not in child_keys:\n pass\n else:\n dict_output.update({key: value})\n\n return dict_output", "def _remove_reverse_mapping(self, reverse_key, key):\n self._reverse_store[reverse_key].remove(key)\n if not self._reverse_store[reverse_key]:\n del self._reverse_store[reverse_key]", "def cleanStep(idict):\n for step in ['input', 'output']:\n data = idict.get(step, {})\n for key, values in data.items():\n for elem in values:\n for skip in ['pfn', 'InputPFN', 'OutputPFN', 'inputpfns']:\n if skip in elem:\n del elem[skip]\n data[key] = values\n return idict", "def del_dict_keys(dict_in, keys):\n for key in keys:\n if key in dict_in:\n del dict_in[key]\n return dict_in", "def delete_key_HELPER(data_dict, key_list, key_to_delete):\n data_dict = get_key_from_dict_HELPER(data_dict, key_list[:-1])\n data_dict.pop(key_to_delete)\n return data_dict", "def remove_states(self, keys: list):\n if self.spec.graph:\n self.spec.graph.clear_children(keys)", "def clean_dict(to_clean):\n for k in list(to_clean.keys()):\n if not to_clean.get(k):\n to_clean.pop(k)", "def without_keys(d, keys):\n return {x: d[x] for x in d if x not in keys}", "def prune(self): # HashMap.prune\n for hashval, list in self.contentHash.iteritems():\n newlist=[]\n for entry in list:\n if not entry.deleted:\n newlist.append(entry)\n self.contentHash[hashval]=newlist" ]
[ "0.6578352", "0.6472215", "0.616475", "0.6164459", "0.61450326", "0.6133661", "0.60783386", "0.60522205", "0.6036807", "0.6010666", "0.5991608", "0.59915197", "0.598256", "0.59222156", "0.59201723", "0.5909378", "0.5890413", "0.5851687", "0.5850681", "0.5796644", "0.5791199", "0.5771799", "0.57681125", "0.571113", "0.57074004", "0.5693822", "0.56807244", "0.56761724", "0.5638454", "0.56201065" ]
0.72551376
0
Given a list of paths and a prefix, return the paths that
def filter_paths_by_prefix(paths: Iterable[str], prefix: str): if prefix == Path.rootPath(): return paths, paths full_paths, suffixes = [], [] for p in paths: if p.startswith(prefix): suffixes.append(p[len(prefix):]) full_paths.append(p) return full_paths, suffixes
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_prefixes(buildout):\n\n prefixes = parse_list(buildout.get('prefixes', ''))\n return [os.path.abspath(k) for k in prefixes if os.path.exists(k)]", "def get_files_prefix(prefixes, dirname, Lshow=None, Ldir=None):\n matched_files=[]\n for pref in prefixes:\n print(f\"prefix: {pref} in {whereami()} of module {__name__}\")\n for fname in os.listdir(dirname):\n # re.match finds only prefix\n if re.match(pref, fname):\n if not Ldir and os.path.isdir(fname):\n continue\n matched_files.append(fname)\n #print (pref, fname)\n return matched_files", "def _get_prefix_and_relative_path(self, path_list):\n # example of path: s3://custom-bucket/exp-1/exp-1-join-id-time-stamp/train\n # use s3 bucket as prefix\n # allow data from different experiments but in same account\n parts = path_list[0].split(\"/\")\n shared_prefix = \"/\".join(parts[0:3]) # s3://custom-bucket\n key_path_list = []\n\n for path in path_list:\n parts = path.split(\"/\")\n prefix = \"/\".join(parts[0:3])\n if prefix != shared_prefix:\n logger.error(\n f\" Prefix `{prefix}` is different from the shared prefix '{shared_prefix}'. \"\n \"Data in the list are not coming from same s3 bucket.\"\n )\n object_path = \"/\".join(parts[3:])\n key_path_list.append(object_path)\n\n return shared_prefix, key_path_list", "def startswith(list, prefix):\n\n return list[:len(prefix)] == prefix", "def common_path_prefix(paths, sep=os.path.sep):\n def allnamesequal(name):\n return all(n==name[0] for n in name[1:])\n bydirectorylevels = zip(*[p.split(sep) for p in paths])\n return sep.join(x[0] for x in takewhile(allnamesequal, bydirectorylevels))", "def items_with_prefix(self, prefix):\n node = self.get_node(self.root, 0, prefix)\n # look at the middle subtree only (since only it has exact matches)\n return self.collect(node.middle, prefix)", "def get_common_prefixes(bucket, prefix):\n if not prefix.endswith('/'):\n prefix += \"/\"\n client = boto3.client('s3')\n paginator = client.get_paginator('list_objects')\n result = paginator.paginate(Bucket=bucket, Delimiter='/', Prefix=prefix)\n return [common_prefix['Prefix'].split('/')[-2]\n for common_prefix in result.search(\"CommonPrefixes\")\n if common_prefix]", "def getpaths_fromfile(input_prefix_, file_handle_):\n\n input_paths = []\n\n for line in file_handle_:\n line = line.strip()\n if line != \"\":\n dirname = line\n path = os.path.join(input_prefix_, \"%s*\" % dirname)\n input_paths.append(tuple([dirname, path]))\n\n return input_paths", "def getpaths_fromdir(input_prefix_, directory_):\n path = os.path.join(input_prefix_, \"%s*\" % directory_, \"*\")\n return [tuple([directory_, path])]", "def list_keys(bucket, path, suffix=None):\n\t# Apparently there is no easy way of doing this except to loop over the result\n\t# chek the parameters delimiter='', marker=''\n\t# then the list returns boto.s3.prefix.Prefix objects on matches\n\tfiles = []\n\tpath = path.strip('/')\n\tfor key in bucket.list(path):\n\t\trelative_path = key.name.replace(path, '').lstrip('/')\n\t\tif not relative_path:\n\t\t\t# Empty\n\t\t\tcontinue\n\t\tif '/' in relative_path.strip('/'):\n\t\t\t# Skip sub-folders\n\t\t\tcontinue\n\n\t\tif not suffix or relative_path.endswith(suffix):\n\t\t\tfiles.append(relative_path)\n\treturn files", "def words_start_with_prefix(self, prefix: str) -> List[str]:\n if not self.starts_with(prefix):\n return []\n if self.search(prefix):\n return [prefix]\n\n curr = self.root\n for ch in prefix:\n curr = curr.children.get(ch)\n return self._get_word(prefix, curr)", "def try_prefix_paths(prefix, config):\n\n def _fn(path):\n if isinstance(path, str):\n new_path = os.path.join(prefix, path)\n if tf.io.gfile.exists(new_path):\n return new_path\n return path\n\n return _map_config_values(config, _fn)", "def files(path: str, prefix: str = None, suffix: str = None):\n for file in os.listdir(path):\n if os.path.isfile(os.path.join(path, file)):\n if (prefix is None or file.startswith(prefix)) and (suffix is None or file.endswith(suffix)):\n yield file", "def urlrepos(prefix, roothead, paths):\n for path in paths:\n path = os.path.normpath(path)\n yield (prefix + '/' +\n util.pconvert(path[len(roothead):]).lstrip('/')).strip('/'), path", "def get_dirs_prefix(wdir, prefix, excludes=None, Lshow=True, Ldir=True):\n matched_dirs=[]\n for fname in os.listdir(wdir):\n # re.match finds only prefix\n if os.path.isdir(fname) and re.match(prefix, fname):\n if excludes:\n tag=False\n for ex in excludes:\n if re.search(ex, fname):\n tag=True\n break\n if not tag :\n matched_dirs.append(fname)\n print (fname)\n else:\n matched_dirs.append(fname)\n print (fname)\n return matched_dirs", "def find_peers(prefix, sorted_list):\n result = []\n for row in sorted_list:\n if row[0] == prefix: # row[0] should be the prefix\n result.append(row[1]) # row[1] should be the peer IP\n return result", "def find_tryouts(data_dir, prefix):\n filenames = os.listdir(data_dir)\n return sorted(list(map(lambda d: os.path.join(data_dir, d), filter(lambda s: s.startswith(prefix), filenames))))", "def list_local(paths, prefix=None):\n results = []\n for path in paths:\n if os.path.isdir(path):\n for filename in os.listdir(path):\n fullpath = os.path.join(path, filename)\n if os.path.isdir(fullpath):\n results += list_local([fullpath], prefix)\n else:\n fullname = fullpath\n if prefix and fullname.startswith(prefix):\n fullname = fullname[len(prefix):]\n mtime = datetime.datetime.fromtimestamp(\n os.path.getmtime(fullpath), tz=utc)\n results += [{\"Key\": fullname,\n \"LastModified\": mtime.strftime(\n '%a, %d %b %Y %H:%M:%S %Z')}]\n else:\n fullpath = path\n fullname = fullpath\n if prefix and fullname.startswith(prefix):\n fullname = fullname[len(prefix):]\n mtime = datetime.datetime.fromtimestamp(\n os.path.getmtime(fullpath), tz=utc)\n results += [{\"Key\": fullname,\n \"LastModified\": mtime.strftime(\n '%a, %d %b %Y %H:%M:%S %Z')}]\n return results", "def get_subfolders(self, prefix):\n result = self.client.list_objects(Bucket=self.bucket, Prefix=prefix, Delimiter='/')\n prefixes = []\n for o in result.get('CommonPrefixes', []):\n prefixes.append(o.get('Prefix'))\n return prefixes", "def list_s3_files(bucket, prefix):\n \n s3 = boto3.client('s3')\n\n if type(prefix) != list:\n prefix = [prefix]\n \n # Loop over prefixes:\n file_list = []\n for p in prefix:\n \n # Load one prefix:\n response = s3.list_objects_v2(Bucket=bucket, Prefix=p)\n if response['KeyCount'] > 0:\n file_list = file_list + [d['Key'] for d in response['Contents']]\n while response['IsTruncated']:\n response = s3.list_objects_v2(Bucket=bucket, Prefix=p, StartAfter=file_list[-1])\n file_list = file_list + [d['Key'] for d in response['Contents']] \n \n return file_list", "def transform_prefix(filenames, prefix_old, prefix_new):\n\n new_filenames = set([])\n len_prefix_old = len(prefix_old)\n # loop over the list of files and remove the prefix\n for name in filenames:\n name = name[len_prefix_old:]\n new_filenames.add(prefix_new + name)\n\n\n return new_filenames", "def prefixSearch(self, prefix: str, _prec=\"\"):\n if prefix == \"\":\n # prefix exhasuted, match all\n yield from self.keys(_prec)\n else:\n try:\n # prefix not exhausted, traverse further\n chld = self.children[prefix[0]]\n yield from chld.prefixSearch(prefix[1:], _prec + self.ch)\n except IndexError:\n yield None\n except KeyError:\n yield None", "def get_nodes_starting_with(self, prefixes):\n if self._remove_name_quotes:\n prefixes = [pref[1:-1] if pref[0] == pref[-1] == \"'\" or pref[0] == pref[-1] == '\"' else pref for pref in prefixes]\n return [node for name, node in self.node_names.items() if name.startswith(tuple(prefixes))]", "def get_dynamic_prefixes():\n retval = set()\n\n # Matches all literal chars (not regexp metachars), but we do\n # allow a leading ^.\n prefix_re = re.compile(r'^[^$.?*+()<>\\[\\]]+')\n\n all_routes = route_map.generate_route_map()\n for routes_for_one_handler in all_routes:\n if ('<file>' in routes_for_one_handler[1] or\n '<directory>' in routes_for_one_handler[1]):\n # This is a static handler, so ignore it.\n continue\n\n handler_regex = routes_for_one_handler[0].pattern\n if handler_regex in ('^.*$', '^/.*$'):\n # This is the catch-all handler, so we need to add in all\n # its routes. The route-info proper starts at list elt 2.\n for route_info in routes_for_one_handler[2:]:\n url = route_info[0].pattern\n m = prefix_re.match(url)\n if m:\n retval.add(url[:m.end()])\n else:\n # We can just use the url that matches this handler as\n # a whole.\n m = prefix_re.match(handler_regex)\n if m:\n retval.add(handler_regex[:m.end()])\n\n return retval", "def prefix_fun(prefix_str: str) -> list:\n\n\n\t\tf = []\n\n\t\tfor i in range(len(prefix_str)):\n\t\t\tj = i\n\t\t\toffset = 0\n\t\t\twhile j > 0:\n\t\t\t\tj -= 1\n\t\t\t\t# print(i, j, offset, prefix_str[j], prefix_str[i-offset])\n\t\t\t\tif prefix_str[j] == prefix_str[i - offset]:\n\t\t\t\t\toffset += 1\n\t\t\t\telse:\n\t\t\t\t\tj += offset\n\t\t\t\t\toffset = 0\n\t\t\tf.append(offset)\n\t\t\t# print('append', offset)\n\n\t\treturn f", "def getpaths_fromwindow(input_prefix_, window_, start_date_):\n input_paths = []\n d = start_date_\n for _ in range(window_):\n d -= datetime.timedelta(days=1)\n dirname = DATE_TEMPLATE % (d.year, d.month, d.day)\n path = os.path.join(input_prefix_, \"%s*\" % dirname)\n input_paths.append(tuple([\"%s-0000\" % dirname, path]))\n\n return input_paths", "def get_all_paths(coll, prefix_path=(), stop_at=None, stop_below=None):\n assert stop_at is None or stop_below is None, 'Only one of stop_at or stop_below can be used.'\n if stop_below is not None and stop_below in str(last(butlast(prefix_path))):\n return [[]]\n if stop_at is not None and stop_at in str(last(prefix_path)):\n return [[]]\n if isinstance(coll, dict) or isinstance(coll, Munch) or isinstance(coll, list):\n if isinstance(coll, dict) or isinstance(coll, Munch):\n items = iteritems(coll)\n else:\n items = enumerate(coll)\n\n return list(cat(map(lambda t: list(map(lambda p: [t[0]] + p,\n get_all_paths(t[1],\n prefix_path=list(prefix_path) + [t[0]],\n stop_at=stop_at,\n stop_below=stop_below)\n )),\n items))\n )\n else:\n return [[]]", "def startsWith(self, prefix):\n now = self.tree\n for i in prefix:\n if i in now:\n now = now[i]\n else:\n return False\n return True", "def get_realization_paths(rootdir, folder_prefix): #{{{\n fnames = []\n for root, dirs, files in os.walk(rootdir):\n if(root=='.'):\n dirs.sort()\n for adir in dirs:\n if(adir.startswith(folder_prefix)):\n fnames.append(rootdir + '/' + adir)\n\n return fnames #}}}", "def add_item_to_list(given_list, prefix):\n new_list = []\n if given_list:\n for item in given_list:\n item.lstrip()\n if item.startswith(\"http://\") or item.startswith(\"https://\") or item.startswith(\"//\"):\n if item.startswith(prefix):\n new_list.append(item)\n else:\n new_list.append(prefix + '/' + item)\n return new_list" ]
[ "0.70755565", "0.70603555", "0.7013892", "0.6863514", "0.6859876", "0.6569287", "0.65553904", "0.6512858", "0.650851", "0.6486868", "0.6460788", "0.6330737", "0.62926936", "0.6262605", "0.6248747", "0.6228626", "0.6223441", "0.61959684", "0.6190801", "0.6190228", "0.6180863", "0.61600935", "0.6111131", "0.611036", "0.6094501", "0.60724264", "0.60632956", "0.606054", "0.603285", "0.6002846" ]
0.8435369
0
Insert value into dictionary at path specified by key_sequence. If the sequence of keys does not exist, it will be made
def insert_into_dictionary(dictionary: Dict, key_sequence: List[str], value): parent = dictionary for key in key_sequence[:-1]: if key not in parent.keys(): parent[key] = {} parent = parent[key] parent[key_sequence[-1]] = value
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _set_by_path(dic, keys, value, create_missing=True):\n d = dic\n i = 0\n n_key = len(keys) - 1\n while i < n_key:\n k = keys[i]\n if isinstance(k, int):\n assert isinstance(d, list), \"Internal Error: %s is Expected as a list for %s.\" % (d, k)\n\n while len(d) <= k:\n d.insert(k, {})\n d = d[k]\n elif k in d:\n d = d[k]\n elif create_missing:\n next_key = keys[i + 1]\n if isinstance(next_key, int):\n if isinstance(d, list):\n d.insert(k, [])\n else:\n d[k] = []\n else:\n d[k] = {}\n d = d[k]\n else:\n return dic\n i += 1\n\n if isinstance(d, list) and keys[-1] >= len(d):\n d.insert(keys[-1], value)\n else:\n d[keys[-1]] = value\n return dic", "def insert(self, index, key, value):\n if key in self:\n # FIXME: efficiency?\n del self[key]\n self._sequence.insert(index, key)\n dict.__setitem__(self, key, value)", "def insert(self, key, value):\n\t\tself.__insert(key, value, key[1:])", "def try_insert(self, cache_key, paths):\r\n pass", "def insert(self, key):\r\n index = self.search(key)\r\n self.keys.insert(index, key)", "def insert_key(self, key : str, value : int) -> None:\n \n hash_key = self.hash_key(key)\n head = self.array[hash_key]\n \n while head.next:\n if head.next.key == key:\n head.next.value = value\n return\n head = head.next\n head.next = Node(key,value)\n self.keys.append(key)", "def _insert(self, key, value):\n entry = self._lookup(key)\n if entry.value is None:\n self.used += 1\n if entry.key is not dummy:\n self.filled += 1\n entry.key = key\n entry.hash = self.first_hash(key)\n entry.value = value", "def insert(self, index, key, value):\r\n if key in self.keyOrder:\r\n n = self.keyOrder.index(key)\r\n del self.keyOrder[n]\r\n if n < index:\r\n index -= 1\r\n self.keyOrder.insert(index, key)\r\n super(OrderedDict, self).__setitem__(key, value)", "def __setitem__(self, path, value):\n\n path = self.__check_path__(path)\n\n # d - dict, p - path (keys sequence)\n def set_key(d, p):\n k = p[0]\n\n if len(p) == 1:\n d[k] = value\n else:\n if not isinstance(d.setdefault(k, self._factory()), dict):\n d[k] = self._factory()\n set_key(d[k], p[1:])\n\n set_key(self.__dict__, path)", "def _insert(self, key):\n self.tree.insert(key)", "def add_by_list_of_keys(dictionary: Dict, key_path: List[Any], value: Any) -> Dict:\n key = key_path[0]\n dictionary[key] = (\n value\n if len(key_path) == 1\n else add_by_list_of_keys(\n dictionary[key] if key in dictionary else dict(),\n key_path[1:],\n value,\n )\n )\n return dictionary", "def _insert_item(self, key: _KT, value: _VT) -> None:\n dict.__setitem__(self, key, value)", "def add_by_dot_path(dictionary: Dict, key_path: str, value: Any) -> Dict:\n return add_by_list_of_keys(dictionary, key_path.split(\".\"), value)", "def insert(self, key, value=None):\n if isinstance(key, list):\n for k in key:\n self.insert(k)\n else:\n if key == self.key:\n # update key: value\n self.value = value\n elif key < self.key:\n if self.left == None:\n self.left = Tree(key, value)\n else:\n self.left.insert(key, value)\n else:\n if self.right == None:\n self.right = Tree(key, value)\n else:\n self.right.insert(key, value)", "def insert(self, pathlist):\n node = self.root\n for letter in pathlist:\n child = node.get(letter)\n if not child:\n node[letter] = {}\n node = node[letter]", "def test_utils_set_dict_value_from_path_creating_new_fields():\n dictionary = {}\n ralph_utils.set_dict_value_from_path(dictionary, [\"foo\", \"bar\"], \"baz\")\n assert dictionary == {\"foo\": {\"bar\": \"baz\"}}", "def copy_dictionary_without_paths(dictionary: Dict, key_sequence: List[List[str]]):\n ret = {}\n possibles = [ks for ks in key_sequence if len(ks) == 1]\n possibles = set(reduce(lambda x, y: x + y, possibles, []))\n for k, v in dictionary.items():\n if k in possibles:\n continue\n if type(v) == dict:\n ret[k] = copy_dictionary_without_paths(v, [ks[1:] for ks in key_sequence if len(ks) > 1])\n else:\n ret[k] = v\n return ret", "def insert(self, key, value):\n hash_key = hash(key) % self.length\n bucket = self.array[hash_key]\n for idx, key_val_pair in enumerate(bucket):\n k, v = key_val_pair\n if k == key:\n bucket[idx] = [key, value]\n return\n bucket.append([key, value])", "def _set_item(dic: dict, keys: list, value):\n\tdic = _get_item(dic, keys[:-1])\n\tdic[keys[-1]] = value", "def _put(self, key: str, value):\n current_storage_dict = self._storage\n sub_keys = key.split('.')\n i = 1\n length = len(sub_keys)\n for sub_key in sub_keys:\n if i < length:\n if sub_key not in current_storage_dict:\n current_storage_dict[sub_key] = dict()\n current_storage_dict = current_storage_dict[sub_key]\n elif sub_key in current_storage_dict and isinstance(current_storage_dict[sub_key], dict):\n current_storage_dict = current_storage_dict[sub_key]\n else:\n raise TypeError('Cannot overwrite key {}'.format(key))\n\n else:\n current_storage_dict[sub_key] = value\n\n i += 1", "def insert(self, key, value):\n self.root.insert(key, value)\n\n # Update the new root if need be.\n node = self.root\n while node.parent != None:\n node = node.parent\n self.root = node", "def append_by_dot_path(dictionary: Dict, key_path: str, value: Any) -> Dict:\n try:\n get_by_dot_path(dictionary, key_path).append(value)\n except KeyError:\n add_by_dot_path(dictionary, key_path, [value])\n return dictionary", "def _set_by_path(tree, keys, value):\n _get_by_path(tree, keys[:-1])[keys[-1]] = value", "def _set_by_path(tree, keys, value):\n _get_by_path(tree, keys[:-1])[keys[-1]] = value", "def _key_generated(self, key, index):\n self.keys[self.get_address(key)] = key\n self.last_generated_index = index", "def set(cls, hierarchical_dict: dict, key: str, value: Any) -> None:\n # split according to '.'\n hierarchical_key = key.split(\".\")\n\n # go over the the dictionary according to the path, create the nodes that does not exist\n element = hierarchical_dict\n for key in hierarchical_key[:-1]:\n if key not in element:\n element[key] = {}\n element = element[key]\n\n # set the value\n element[hierarchical_key[-1]] = value", "def addSequence(self, key):\r\n self.is_empty = False\r\n current = self.root\r\n i = 0\r\n highest_leaf = self.addSequence_aux(current, key, i)\r\n # Updating the root\r\n # If it is the first element that is added to the database\r\n if highest_leaf.highest_freq is None:\r\n current.highest_freq = highest_leaf\r\n current.frequency = highest_leaf.frequency\r\n current.index_next = highest_leaf.index\r\n else:\r\n # Compare the frequency if it is not the first element on the database\r\n if current.frequency < highest_leaf.frequency:\r\n current.frequency = highest_leaf.frequency\r\n current.highest_freq = highest_leaf.highest_freq\r\n current.index_next = highest_leaf.index\r\n # If the frequency is equal then compare the lexicographical order\r\n elif current.frequency == highest_leaf.frequency:\r\n if current.index_next >= highest_leaf.index:\r\n current.frequency = highest_leaf.frequency\r\n current.highest_freq = highest_leaf.highest_freq\r\n current.index_next = highest_leaf.index", "def set_deep(config, key_seq, new_val):\n if 1 == len(key_seq):\n config[key_seq[0]] = new_val\n else:\n set_deep(config[key_seq[0]], key_seq[1:], new_val)", "def _add_item(dic: dict, keys: list, value):\n\tfor key in keys[:-1]:\n\t\tdic = dic.setdefault(key, {})\n\n\tdic[keys[-1]] = value", "def insert(self, key, value):\n # Resize array here if necessary.\n if key < 0: key = 0\n elif key > len(self): key = len(self)\n if key < len(self):\n for j in range(len(self), key, -1):\n self._items[j] = self._items[j - 1]\n self._items[key] = value\n self._size += 1\n self.incModCount()" ]
[ "0.6372905", "0.61910766", "0.60806626", "0.6001647", "0.58221465", "0.58184624", "0.5739152", "0.5730706", "0.57285595", "0.5695364", "0.5618897", "0.56144315", "0.55202925", "0.5516547", "0.54848045", "0.5456877", "0.5423328", "0.5423068", "0.53977156", "0.538743", "0.5375429", "0.53703463", "0.5343763", "0.5343763", "0.53217554", "0.5311516", "0.53096455", "0.5308596", "0.529284", "0.5281357" ]
0.7882464
0
Ensure the key name provided is legal
def check_valid_key_name(name): if type(name) not in [str]: return False bad_chars = ["*", ".", "&&&&"] for k in bad_chars: if k in name: return False return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _check_key_name(cls, name):\n return (isinstance(name, basestring) and\n re.match('^[A-Za-z][A-Za-z0-9_]*$', name) and\n not hasattr(cls, name))", "def isValidKey(key):\n return True", "def _check_key(key): # type: (str) -> None\n if not key:\n raise ValueError('Key must not be empty.')\n if '.' in key:\n raise ValueError('Key must not contain dots.')", "def _check_key(self, key):\n raise NotImplementedError", "def clean_key_name(self):\n key = self.cleaned_data['key_name']\n # Ensure key starts with prefix\n if not key.startswith(self.get_key_prefix()):\n raise forms.ValidationError('Key does not have required prefix.')\n # Ensure key exists\n if not self.get_upload_key():\n raise forms.ValidationError('Key does not exist.')\n return key", "def need_name(dictionary, raise_error=True):\r\n return key_checker(['name'])(dictionary, raise_error)", "def _validate_name(self, key, name):\n \n name = Project._condition_name(name)\n \n return name", "def _validate_key(self, key):\n if isinstance(key, str):\n key = unicode(key, 'utf-8')\n elif not isinstance(key, unicode):\n raise TypeError(\n \"`key` must be `str` or `unicode`, not `{}`\".format(\n key.__class__.__name__)\n )\n return key", "def test_invalid_key(self):\n with pytest.raises(yaenv.EnvError) as err:\n _ = yaenv.core.EnvVar('221b=\"starts with number\"')\n assert 'Invalid key' in str(err.value)\n with pytest.raises(yaenv.EnvError) as err:\n _ = yaenv.core.EnvVar('_=\"not assignable\"')\n assert 'Invalid key' in str(err.value)\n with pytest.raises(yaenv.EnvError) as err:\n _ = yaenv.core.EnvVar('o-o=\"invalid character\"')\n assert 'Invalid key' in str(err.value)", "def _check_special_token_identifier(key):\n if not (key.endswith('_token') and key != '_token'):\n raise ValueError('Each key needs to have the form \"name_token\".'\n ' Received {}'.format(key))", "def _validKey(entry):\n # be forward compatible to zope3 contained objects\n raw_id = getattr(entry, '__name__', '')\n if not raw_id:\n raw_id = entry.getId()\n\n # This substitution is based on the description of cite key restrictions at\n # http://bibdesk.sourceforge.net/manual/BibDesk%20Help_2.html\n return VALIDIDPAT.sub('', raw_id)", "def _raiseIfWebsafeKeyNotValid(websafeKey, kind):\n # Check that websafeKey is not None\n if not websafeKey:\n raise endpoints.BadRequestException(\n \"Websafe key not provided for '%s'\" % kind)\n # Try to decode the websafe key into a real key\n try:\n key = ndb.Key(urlsafe=websafeKey)\n except:\n raise endpoints.BadRequestException(\n \"Websafe key provided for '%s' could not be decoded: %s\" %\n (kind, websafeKey))\n # Ensure that the key is of the desired kind\n if key.kind() != kind:\n raise endpoints.BadRequestException(\n \"Websafe key is not of the '%s' kind: %s\" % (kind, websafeKey))\n # If all is well, return the key\n return key", "def test_generate_key(self): \n k = Key().generate()\n self.assertRegex(k, \"[a-zA-Z0-9+\\/]+={0,2}\")", "def test_is_valid_label_key_invalid_input():\n # test length violations\n assert not is_valid_label_key(key=None) # Too short\n assert not is_valid_label_key(key=\"\") # Too short\n assert not is_valid_label_key(key=f\"{'p' * 254}/n\") # prefix too long\n assert not is_valid_label_key(key=\"/n\") # prefix too short\n assert not is_valid_label_key(key=\"p/\") # name too short\n assert not is_valid_label_key(key=\"a\" * 254) # name too long\n assert not is_valid_label_key(key=f\"d/{'b'*64}\") # name too long\n # test first character violations (not alphanum)\n assert not is_valid_label_key(key=\"-a\")\n assert not is_valid_label_key(key=\".b\")\n assert not is_valid_label_key(key=\" c\")\n # test last character violations (not alphanum)\n assert not is_valid_label_key(key=\"a-\")\n assert not is_valid_label_key(key=\"b.\")\n assert not is_valid_label_key(key=\"c \")\n assert not is_valid_label_key(key=\"sw33T#\")\n # test middle characters violations\n assert not is_valid_label_key(key=\"a$$a\")\n assert not is_valid_label_key(key=\"b b\")", "def test_is_valid_annotation_key_invalid_input():\n # test length violations\n assert not is_valid_annotation_key(key=None) # Too short\n assert not is_valid_annotation_key(key=\"\") # Too short\n assert not is_valid_annotation_key(key=f\"{'p' * 254}/n\") # prefix too long\n assert not is_valid_annotation_key(key=\"/n\") # prefix too short\n assert not is_valid_annotation_key(key=\"p/\") # name too short\n assert not is_valid_annotation_key(key=\"a\" * 254) # name too long\n assert not is_valid_annotation_key(key=f\"d/{'b'*64}\") # name too long\n # test first character violations (not alphanum)\n assert not is_valid_annotation_key(key=\"-a\")\n assert not is_valid_annotation_key(key=\".b\")\n assert not is_valid_annotation_key(key=\" c\")\n # test last character violations (not alphanum)\n assert not is_valid_annotation_key(key=\"a-\")\n assert not is_valid_annotation_key(key=\"b.\")\n assert not is_valid_annotation_key(key=\"c \")\n assert not is_valid_annotation_key(key=\"sw33T#\")\n # test middle characters violations\n assert not is_valid_annotation_key(key=\"a$$a\")\n assert not is_valid_annotation_key(key=\"b b\")", "def _check_name(self):\n\t\tpass", "def prepare_key(self, key):\n return smart_str(key)", "def _name_check(self, name, *args, chk_dict=None):\n if name is not None and len(name) > 0:\n lst = list(args)\n lst.append(name)\n if self._key_check(lst, chk_dict=chk_dict):\n result = EnvironmentDict._EXISTS\n else:\n result = EnvironmentDict._VALID\n else:\n result = EnvironmentDict._INVALID\n raise ValueError(f'Invalid name: {name}')\n return result", "def ensure_key_contents(bucket_name, key_name, contents):\n bucket = self.conn.get_bucket(bucket_name)\n key = bucket.get_key(key_name)\n self.assertEquals(key.get_contents_as_string(), contents,\n \"Bad contents\")", "def test_getKey_nokey(self):\n filename = os.path.join(os.getcwd(), 'sekrit')\n key = crypto.getKey(filename)\n self.failUnlessIsInstance(key, basestring,\n \"key isn't a string! type=%r\" % type(key))", "def test_valid_key(self):\n f = lws.valid_data_key\n assert f('string', int, r'string') is False\n assert f('string', str, r'test') is False\n assert f(123, int, '123') is False\n assert f(123.00, float, '123') is False\n assert f('123', str, r'[0-9]*') is True", "def test_invalid_chars_ssck(self):\r\n valid_base = SlashSeparatedCourseKey(u'org.dept-1%2', u'course.sub-2%3', u'run.faster-4%5')\r\n for key in SlashSeparatedCourseKey.KEY_FIELDS:\r\n with self.assertRaises(InvalidKeyError):\r\n # this ends up calling the constructor where the legality check should occur\r\n valid_base.replace(**{key: u'funny thing'})", "def test_is_valid_label_key_valid_input():\n # test valid label keys\n assert is_valid_label_key(key=\"l0l\")\n assert is_valid_label_key(key=\"l0L\")\n assert is_valid_label_key(key=\"L-l\")\n assert is_valid_label_key(key=\"L.L\")\n assert is_valid_label_key(key=\"4-you\")\n assert is_valid_label_key(key=\"you.2\")\n assert is_valid_label_key(key=\"p/n\")\n assert is_valid_label_key(key=\"prefix/you.2\")\n assert is_valid_label_key(key=\"how.sad/to-see\")\n assert is_valid_label_key(key=f\"{'d'*253}/{'n'*63}\")", "def _is_valid_keyspace_name(self, keyspace_name):\n if keyspace_name == None or not keyspace_name:\n return False\n return re.match(r\"^[a-z_]*[^-]$\", keyspace_name)", "def __missing__(self, key):\n global MISSING\n MISSING = key # For debugging - save name of missing key\n return INVALID", "def _key_name(self, key):\n if type(key) == type(\"\"):\n return str(curses.keyname(ord(key)).decode(\"utf-8\"))\n return False", "def check_type( string_key ) : \r\n\r\n if type( string_key ) != type( '' ) : \r\n\r\n # raise self.__class__( \"'%s': EGI wants the key to be four _characters_ (not %s) !\" % (type(string_key), ) ) \r\n raise Eggog( \"'%s': EGI wants the key to be four _characters_ (not %s) !\" % (type(string_key), ) ) \r\n \r\n else :\r\n \r\n return True", "def check_keys(self):", "def _get_key_name(self, name):\n base_path = force_text(self.location)\n final_path = urljoin(base_path + \"/\", name)\n name = os.path.normpath(final_path.lstrip('/'))\n\n if six.PY2:\n name = name.encode('utf-8')\n return name", "def test_validate_bookmark_key(self):\n valid_names = ['hodor', 'ostrich', 'potato123', 'dashy-key']\n invalid_names = ['thisnameisabittoolong', 'funny/characters', '-flag']\n\n for n in valid_names:\n self.assertTrue(bookmarks.BookmarkManager.validate_key(n))\n\n for n in invalid_names:\n self.assertFalse(bookmarks.BookmarkManager.validate_key(n))" ]
[ "0.78318816", "0.71168035", "0.70732856", "0.70552856", "0.7041611", "0.6899042", "0.67258614", "0.6718385", "0.64835036", "0.64728975", "0.64275926", "0.63844997", "0.633739", "0.6335194", "0.6309751", "0.63031626", "0.62937796", "0.6276736", "0.6256404", "0.62518495", "0.62502754", "0.6249968", "0.62392193", "0.62371546", "0.62359756", "0.6211511", "0.6177006", "0.6168342", "0.6137072", "0.61360246" ]
0.7321008
1
It reduces all buffers across network copies
def average_buffers(network): size = int(dist.get_world_size()) with torch.no_grad(): for buf in network.buffers(): dist.all_reduce(buf, op=dist.ReduceOp.SUM) buf.div_(size)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _refresh_buffers(self) -> None:", "def Allreduce(\n self,\n sendbuf: Union[DNDarray, torch.Tensor, Any],\n recvbuf: Union[DNDarray, torch.Tensor, Any],\n op: MPI.Op = MPI.SUM,\n ):\n ret, sbuf, rbuf, buf = self.__reduce_like(self.handle.Allreduce, sendbuf, recvbuf, op)\n if buf is not None and isinstance(buf, torch.Tensor) and buf.is_cuda and not CUDA_AWARE_MPI:\n buf.copy_(rbuf)\n return ret", "def Reduce(\n self,\n sendbuf: Union[DNDarray, torch.Tensor, Any],\n recvbuf: Union[DNDarray, torch.Tensor, Any],\n op: MPI.Op = MPI.SUM,\n root: int = 0,\n ):\n ret, sbuf, rbuf, buf = self.__reduce_like(self.handle.Reduce, sendbuf, recvbuf, op, root)\n if buf is not None and isinstance(buf, torch.Tensor) and buf.is_cuda and not CUDA_AWARE_MPI:\n buf.copy_(rbuf)\n return ret", "def _buffer_all(self):\n self._buffer()", "def create_buffers(self):", "def pc_output_buffers_full_avg(self, *args):\n return _spacegrant_swig.ax25_udp_pdu_receiver_sptr_pc_output_buffers_full_avg(self, *args)", "def pc_output_buffers_full_avg(self, *args):\n return _spacegrant_swig.udp_debug_sptr_pc_output_buffers_full_avg(self, *args)", "def swap_buffers(self):\n raise NotImplementedError()", "def Allreduce8(net, blobs, reduced_affix, gpu_indices):\n reduced = [None] * 8\n # Reduction level 1\n for i in [0, 2, 4, 6]:\n reduced[i] = net.Add(\n [blobs[i], blobs[i + 1]],\n blobs[i] + reduced_affix,\n device_option=OnGPU(gpu_indices[i])\n )\n # Reduction level 2\n for i in [0, 4]:\n reduced[i] = net.Add(\n [reduced[i], reduced[i + 2]],\n str(blobs[i]) + reduced_affix,\n device_option=OnGPU(gpu_indices[i])\n )\n # Reduction level 3: this involves a copy.\n reduced_4_copy = reduced[4].Copy(\n [],\n str(reduced[4]) + '_copy',\n device_option=OnGPU(gpu_indices[0])\n )\n reduced[0] = reduced[0].Add(\n reduced_4_copy,\n reduced[0],\n device_option=OnGPU(gpu_indices[0])\n )\n # Broadcast level 1\n reduced[4] = reduced[0].Copy(\n [],\n reduced[4],\n device_option=OnGPU(gpu_indices[4])\n )\n # Broadcast level 2\n for i in [2, 6]:\n reduced[i] = reduced[i - 2].Copy(\n [],\n reduced[i],\n device_option=OnGPU(gpu_indices[i])\n )\n # Broadcast level 3\n for i in [1, 3, 5, 7]:\n reduced[i] = reduced[i - 1].Copy(\n [],\n blobs[i] + reduced_affix,\n device_option=OnGPU(gpu_indices[i])\n )\n return reduced", "def freeze_encoder(self):\n self.dfs_freeze(self.net.conv1)\n self.dfs_freeze(self.net.conv2)\n self.dfs_freeze(self.net.conv3)\n self.dfs_freeze(self.net.conv4)\n self.dfs_freeze(self.net.conv5)", "def sync_buffers(self, model: nn.Module) -> None:\n # if not update buffer, copy buffer from orig model\n if self.update_buffers:\n warnings.warn(\n '`update_buffers` is set to True in this ema model, and '\n 'buffers will be updated in `update_parameters`.')\n\n avg_buffer = itertools.chain(self.module.buffers())\n orig_buffer = itertools.chain(model.buffers())\n for b_avg, b_orig in zip(avg_buffer, orig_buffer):\n b_avg.data.copy_(b_orig.data)", "def sync_buffers(self, model: nn.Module) -> None:\n # if not update buffer, copy buffer from orig model\n if self.update_buffers:\n warnings.warn(\n '`update_buffers` is set to True in this ema model, and '\n 'buffers will be updated in `update_parameters`.')\n\n avg_buffer = itertools.chain(self.module.buffers())\n orig_buffer = itertools.chain(model.buffers())\n for b_avg, b_orig in zip(avg_buffer, orig_buffer):\n b_avg.data.copy_(b_orig.data)", "def port_buffer_drop():", "def pc_output_buffers_full(self, *args):\n return _spacegrant_swig.ax25_udp_pdu_receiver_sptr_pc_output_buffers_full(self, *args)", "def pc_output_buffers_full_avg(self, *args):\n return _spacegrant_swig.binary_sink_sptr_pc_output_buffers_full_avg(self, *args)", "def pc_output_buffers_full_avg(self, *args):\n return _TestA_swig.my_qpsk_demod_cb_sptr_pc_output_buffers_full_avg(self, *args)", "def pc_output_buffers_full(self, *args):\n return _TestA_swig.my_qpsk_demod_cb_sptr_pc_output_buffers_full(self, *args)", "def pc_output_buffers_full_avg(self, *args):\n return _spacegrant_swig.ax25_udp_pdu_gen_sptr_pc_output_buffers_full_avg(self, *args)", "def pc_output_buffers_full(self, *args) -> \"std::vector< float,std::allocator< float > >\":\n return _beamforming_swig.doaesprit_sptr_pc_output_buffers_full(self, *args)", "def SendBufferSize(self) -> int:", "def SendBufferSize(self) -> int:", "def sendBuffer():\n dislin.sendbf()", "def ReceiveBufferSize(self) -> int:", "def ReceiveBufferSize(self) -> int:", "def pc_output_buffers_full_avg(self, *args):\n return _spacegrant_swig.message_debug_sptr_pc_output_buffers_full_avg(self, *args)", "def pc_output_buffers_full(self, *args):\n return _spacegrant_swig.binary_sink_sptr_pc_output_buffers_full(self, *args)", "def pc_output_buffers_full(self, *args):\n return _spacegrant_swig.udp_debug_sptr_pc_output_buffers_full(self, *args)", "def pc_output_buffers_full(self, *args):\n return _uhd_swig.usrp_sink_sptr_pc_output_buffers_full(self, *args)", "def pc_output_buffers_full_avg(self, *args):\n return _spacegrant_swig.ax25_pdu_packer_sptr_pc_output_buffers_full_avg(self, *args)", "def __init__(self, buffer_size=3000):\n self.buffer = []\n self.buffer_size = buffer_size" ]
[ "0.6124415", "0.598823", "0.5967219", "0.5920144", "0.5800446", "0.5794461", "0.576535", "0.5741834", "0.57155806", "0.56905043", "0.5683074", "0.5683074", "0.56442", "0.5642326", "0.5638489", "0.56378144", "0.56323874", "0.5623122", "0.5610344", "0.56079584", "0.56079584", "0.56011665", "0.55893975", "0.55893975", "0.55881816", "0.5573869", "0.5573207", "0.55714864", "0.5565885", "0.5553097" ]
0.62437236
0
Permet de poser la question correspondant aux infos manquante pour le voyage
def question_generator(self): self.status_conv = 'yes_no_question_asked' questions = config.questions if not self.voyage.get('voyageurs') and 'voyageur_add' not in self.infos_needed: self.infos_needed.append('voyageur_add') if self.infos_needed: if self.is_hotel_needed() and 'hotel' not in self.infos_needed and 'hotel' not in self.voyage: self.infos_needed.insert(1, 'hotel') self.hotel_asked = True key = self.infos_needed[0] self.info_asked = key return questions[key] else : self.status_conv = 'confirmation_asked' return self.conv_recap()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ask_question(index, attributes):\n \n print(\"ask_question, index: \", str(index))\n\n curr_question = quiz.list_fragen[attributes[\"sess_questions\"][index]].get_frage()\n print(\"@ask_question: \", curr_question)\n\n print(\"@ask_question before if \")\n if len(attributes[\"scores\"]) > 1:\n print(\"@ask_question if > 1\")\n text = \"<s>Frage {0} an Spieler {1}:</s> <s>{2}</s>\".format(int(attributes[\"current_round\"]),\\\n attributes[\"current_player\"], curr_question)\n else:\n print(\"@ask_question else\")\n text = \"<s>Frage {0}:</s> <s>{1}</s>\".format(int(attributes[\"current_round\"]),\\\n curr_question)\n \n text = slower_speech(text)\n text += TICK_BEEP_SOUND\n \n print(\"@ask_question before setatts\")\n attributes[\"current_question\"] = curr_question\n print(\"@ask_question before setatts\")\n\n #returns string here excepcionally because response is formed elsewhere\n return text", "def show_question(self):\n print(self.question)", "def show_question(self, text, option0, option1, option2=\"\"):\r\n\r\n raise NotImplementedError", "def show_question(title, message, cancel=True):\n\n pass", "def question(self):\n return self.details[KEY_QUESTION]", "def ask_dialog(self, title=\"\", vars=[], help=\"\"):\n\t\tpass", "def make_extra_questions_txt(self):\n raise NotImplementedError", "async def question(self, channel_id, user_infos, user_id, team_id):\n\n q = random.choice(self.questions) # Random question selection from list\n answers = q[\"badAnswers\"] + [q[\"goodAnswer\"]] # Save all possible answers\n goodAnswer = q[\"goodAnswer\"] # Save the good answer\n random.shuffle(answers) # Shuffle everything\n\n choices = {} # Dict of choices\n\n for i in range(len(answers)): # For every possible answer\n choices[str(i + 1)] = answers[i]; # Fill the choices dict with normal people understandable indexes\n\n message = \"{} \\n\".format(q[\"question\"]) # Start the string question message\n\n for key in sorted(choices):\n message += (\"Reponse {} : {} \\n\").format(key, choices[key]) # Add choices to question message\n\n id = 0\n for i in range(len(choices)):\n if choices[str(i + 1)] == goodAnswer: # Retrieve the good answer id (lol). Should probably do differently...\n id = i + 1\n\n self.currentAskedQuestions[user_id] = str(id) # Put the entry in the dict with good answer id\n return await self.sendText(message, channel_id,user_infos, team_id)", "def question_new_search():", "def question(update, context):\n bot = context.bot\n user = update.message.from_user\n inc_msg = str.lower(update.message.text)\n\n # answer why questions with a reasons from database\n if 'waarom' in inc_msg:\n\n # return a random reason from file\n with open(REASONS) as file:\n lines = file.readlines()\n msg = random.choice(lines)\n\n # answer other questions with\n else:\n # TODO: introduce random silence\n rng = random.random()\n\n if rng < 0.9 and not 'rob' not in inc_msg:\n return\n options = [\n f\"Vraag het maar niet aan mij, ik ben niet alwetend.\",\n (\"https://lmgtfy.com/?q=\" + inc_msg.replace(\" \", \"+\") + \"&pp=1&s=g&t=w\"),\n f\"Ja he dat weet ik toch ook niet, google dat maar ff {user.first_name}...\"\n ]\n\n msg = random.choice(options)\n time.sleep(HUMAN_DELAY * len(msg))\n\n bot.send_message(chat_id=update.message.chat_id, text=msg,\n reply_to_message_id=update.message.message_id,\n parse_mode=ParseMode.MARKDOWN)", "def ask(self, question):\n\n\t\t# If you're just trying to test voice detection, you can uncomment\n\t\t# the following 5 lines. Bobby will guess \"yellow flashlight\" and will prompt\n\t\t# you to correct him by saying \"blue flashlight\"\n\n\t\t# fake_answers = [\"no\", \"yes\", \"yes\", \"yes\", \"no\", \"yes\", \"yes\"]\n\t\t# global count\n\t\t# count += 1\n\t\t# print question\n\t\t# return fake_answers[count - 1]\n\n\t\t# self.say(question)\n\t\t# #starts listening for an answer\n\t\t# self.asr.subscribe(\"TEST_ASR\")\n\t\t# data = (None, 0)\n\t\t# while not data[0]:\n\t\t# \tdata = self.mem.getData(\"WordRecognized\")\n\t\t# #stops listening after he hears yes or no\n\t\t# self.asr.unsubscribe(\"TEST_ASR\")\n\t\t#\n\t\t# print data\n\t\t#\n\t\t# for word in self.yes_no_vocab:\n\t\t# \tfor syn in self.yes_no_vocab[word]:\n\t\t# \t\tif data[0] == syn:\n\t\t# \t\t\treturn word", "def question_new_validate():", "def description_ques(analysis):\n if analysis.sv[0].vrb_tense.startswith('present'):\n analysis.sv[0].vrb_tense = 'present progressive'\n if analysis.sv[0].vrb_tense.startswith('past'):\n analysis.sv[0].vrb_tense = 'present progressive'\n sentence = y_o_question(analysis)\n for i in sentence:\n if i == 'liking':\n sentence[sentence.index(i)] = 'like'\n return ['what'] + sentence", "def question(dico):\n l = []\n for i in range(len(dico)):\n l.append(dico[i][0])\n affichage_question(dico,l)", "def test_question_with_choices(self):\n question = create_question(question_text='Question with choices', days=0)\n response = self.client.get(reverse('polls:details', args=(question.id, )))\n self.assertContains(response, question.question_text)", "def __init__(self, question, answer):\n\n self.question = question\n self.answer = answer\n\n self.q_and_a = {\n 'Question:': self.question,\n 'Correct Answer:': self.answer,\n }", "def ask_msg(self, context):\n msg = self._get_base_message(self.ASK_QUESTION)\n self._add_thread(msg)\n self._add_relationship(msg, self.for_relationship)\n msg['text'] = self.question\n msg['detail'] = self.descr\n msg['valid_responses'] = self.valid_responses or []\n msg['signature_required'] = self.signature_required\n return msg", "async def faq(self, ctx):\n embed = discord.Embed(title='FAQ',\n color=self.bot.color)\n entries = {'How do I add this bot to my server?':\n 'Use `invite` or click the link in `help` (you must have Manage Server permissions).',\n 'Hey, can you add (some feature)?':\n 'Use `suggest`.',\n 'None of the commands are working!':\n 'The bot may be missing permissions or you may have been automatically blacklisted for spam. '\n 'If the problem persists, report it.',\n 'What character is that in the profile picture?':\n '[Shiro from Sewayaki Kitsune no Senko-san!](https://myanimelist.net/character/167062/Shiro)'}\n for name, value in entries.items():\n embed.add_field(name=name, value=value, inline=False)\n embed.set_footer(text='Have other questions? Join the support discord or PM me @Trackpad#1234.')\n\n await ctx.send(embed=embed)", "def get_answers(self):\r\n pass", "def ask_questions():\n print(story.prompts)\n prompts = story.prompts\n \n return render_template(\"questions.html\", prompts = prompts)", "def decision(question):\n return click.confirm(question, show_default=True)", "def w_question(analysis):\n if analysis.sv:\n #Opinion is a what question so we have to make some changes\n if analysis.sv[0].vrb_main[0].endswith('like'):\n verb = analysis.sv[0].vrb_main[0]\n analysis.sv[0].vrb_main[0] = verb[:len(verb) - 4] + 'think+of'\n\n #processing as yes or no question\n phrase = y_o_question(analysis)\n\n #Specific processing for invitation\n if analysis.aim == 'invitation':\n return ['how', 'about'] + phrase[1:]\n\n #Specific processing for classification\n if analysis.aim.startswith('classification'):\n aim_question = other_functions.list_rebuilding(analysis.aim)\n return ['what', 'kind', 'of'] + aim_question[1:] + phrase\n\n #It is an how question\n if other_functions.is_an_adj(analysis.aim) == 1:\n return ['how'] + [analysis.aim] + phrase\n elif analysis.aim == 'manner':\n return ['how'] + phrase\n\n if analysis.aim == 'thing' or analysis.aim == 'situation' or analysis.aim == 'explication' or analysis.aim == 'opinion':\n return ['what'] + phrase\n return ['what'] + [analysis.aim] + phrase", "def test_quick_answer(self):\n pass", "def acceptQuestion(self, i, name):\n\t\tprint('question is', i)\n\t\tself.server.accQuestion.emit(i, name)", "def win():\r\n if q1 == \"f\" and answer == 1955 and answer2 == 28:\r\n print(\"Congratulations! You answered all of the questions correctly!\")\r\n else:\r\n print(\"You did not get all the questions correct. :( \")\r\n print(\"Just keep swimming and you'll get it next time!\")", "def start_game(answer, session):\n\n print(\"start_game, answer: \", answer)\n\n attributes = reset_attributes()\n\n if answer == \"einem spieler\":\n answer = \"1\"\n if answer == \"vier spieler\":\n answer = \"4\"\n\n if answer in [str(x) for x in range(1, 5)]:\n curr_round = 1\n curr_player = 1\n state = \"Gameon\"\n scores = {x:0 for x in range(1, int(answer)+1)}\n sess_fragen = populate_questions(scores)\n \n attributes[\"question_index\"] = 0\n attributes[\"current_round\"] = curr_round\n attributes[\"current_player\"] = curr_player\n attributes[\"state\"] = state\n attributes[\"scores\"] = scores\n attributes[\"sess_questions\"] = sess_fragen\n\n if answer == \"1\":\n text = \"<s>Alles klar. \"+ TEXT_BREAK + \"Wir beginnen ein Spiel mit einem Spieler.\"+\\\n \"</s> <s>Das Quiz enthält {} Fragen.\\\n </s>\".format(TOTAL_ROUNDS)\n else:\n text = \"<s>Alles klar.\" + TEXT_BREAK + \"Wir beginnen ein Spiel mit {} Spielern\"\\\n .format(answer) +\\\n \"</s><s> Es werden jeweils {} Fragen an jeden Spieler gestellt.\\\n </s>\".format(TOTAL_ROUNDS)\n\n frage1 = ask_question(0, attributes)\n text += TICK_HELP_MESSAGE\n text += frage1\n card_text = \"Spiel mit {0} Spielern begonnen.\\n\".format(len(scores)) + clear_tags(frage1)\n\n else:\n richtige_zahl_prompt = \"Sag eine Nummer zwischen 1 und 4.\"\n text = \"Ungültige Spielerzahl. \" + richtige_zahl_prompt\n frage1 = SPIELER_PROMPT_TEXT\n card_text = text\n\n attributes[\"current_question\"] = frage1\n attributes[\"speech_output\"] = text\n attributes[\"reprompt_text\"] = frage1\n \n return response(text, should_end_session=False, reprompt_text=frage1, \\\n attributes=attributes, card_text=card_text)", "async def info(self, ctx):\n\n level = await self.get_player_level(ctx.author)\n embed = discord.Embed()\n embed.colour = discord.Colour.blurple()\n embed.set_author(name=str(ctx.author), icon_url=ctx.author.avatar_url)\n\n embed.title = f'Your current level : {level}'\n\n embed.add_field(name='Question', value=f'{self.enigmas[level][\"question\"]}')\n\n embed.set_footer(text='I love Ducks')\n\n await ctx.send(embed=embed)", "def show_info(self):\n print(\"Problem number: \" + str(self.number))\n print(\"Problem name: \" + str(self.name))\n print(\"Problem description: \" + str(self.desc))", "def user_question():\n return input('What would you like? (espresso/latte/cappuccino): ')", "def parse_question(save=False):\n\n text = read_screen(save)\n lines = text.splitlines()\n question = \"\"\n options = list()\n flag = False\n\n for line in lines:\n if not flag:\n question = question + \" \" + line\n if '?' in line:\n flag = True\n continue\n if flag:\n if line != '':\n options.append(line)\n\n return question, options" ]
[ "0.6656517", "0.64856565", "0.6297228", "0.61885995", "0.6117319", "0.6103682", "0.60901123", "0.607107", "0.60502553", "0.59895825", "0.5979764", "0.5957236", "0.5946883", "0.5912476", "0.5895169", "0.58443356", "0.5833472", "0.58222497", "0.58212095", "0.5807978", "0.58051157", "0.5803229", "0.5797498", "0.5782904", "0.5781899", "0.5779946", "0.5772695", "0.5737692", "0.57264304", "0.57161814" ]
0.64970094
1
Read the data file into dicts.
def get_data(fname: str) -> dict: with open(fname) as f: return [rec.split() for rec in f.read().split("\n\n")]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read_data(self, file_name):\n test_data_dict = {}\n i = 0\n file_handler = open(file_name)\n for data in file_handler.readlines():\n if i == 0:\n pass\n else:\n if data[len(data) - 1] == '\\n':\n data = data[:len(data) - 1]\n test_data_dict[i] = data\n i += 1\n file_handler.close()\n return test_data_dict", "def read(self):\n dictionary = {}\n with open(self.path) as file:\n key_header = \"\"\n for line in file:\n entry = line.strip().split()\n if len(entry) == 0:\n continue\n if len(entry) == 1:\n key_header = entry[0]+\"_\"\n else:\n key = entry[0].strip()\n value = reduce(lambda x1, y1: x1+\" \" + y1, entry[1:])\n dictionary[key_header+key] = value\n return dictionary", "def read_datafile_to_dictionaries(fname, CONFIG):\n logger.info(\"Reading {} to dictionaries.\".format(fname))\n\n ftype = infer_ftype(fname.lower())\n\n if ftype == \"ACCESSIONS\":\n data_dicts = []\n elif ftype == \"FASTA\":\n data_dicts = read_fasta_to_dicts(fname, CONFIG)\n elif ftype == \"JSON\":\n data_dicts = []\n else:\n logger.error(\"Unknown input filetype for {}. Fatal.\".format(ftype)); sys.exit(2)\n\n return (ftype, data_dicts)", "def file_to_dictionary():\n\n return;", "def _load_dict(infile):\n\n # read the data into a list\n data = []\n\n # open the file\n f = open(infile)\n\n for line in f:\n # ignore hashed lines\n if not line.startswith('#') and not line.startswith('@'):\n\n # mind to strip newlines\n data.append(line.strip('\\n\\r').split('\\t'))\n \n # create the dictionary in which the data will be stored\n d = {}\n\n # check for first line, if a local ID is given in the header (or simply\n # \"ID\"), take this line as the ID, otherwise create it\n if data[0][0].lower() in ['local_id','localid']:\n local_id = True\n else:\n local_id = False\n\n # iterate over data and fill the dictionary (a bit inefficient, but enough\n # for the moment)\n i = 1\n for line in data[1:]:\n if local_id:\n d[int(line[0])] = line[1:]\n else:\n d[i] = line\n i += 1\n\n # assign the header to d[0]\n if local_id:\n d[0] = [x.lower() for x in data[0][1:]]\n else:\n d[0] = [x.lower() for x in data[0]]\n\n # return the stuff\n return d", "def read(file_path: str) -> dict:\n\n if not os.path.isfile(file_path):\n raise FileNotFoundError(\"The file `%s` must exist and be a BLM file\" % file_path)\n\n file_contents = open(file_path, 'r').read()\n headers = parse_headers(file_contents)\n definitions = parse_definitions(headers, file_contents)\n data = parse_data(headers, definitions, file_contents)\n\n return {'headers': headers, 'definitions': definitions, 'data': data}", "def read_data():\r\n\r\n if os.path.isfile(os.getcwd() + \"/www/access_list.txt\") and os.stat(os.getcwd() + \"/www/access_list.txt\").st_size != 0:\r\n data = json.load(open(os.getcwd() + \"/www/access_list.txt\"))\r\n return collections.defaultdict(dict, data)\r\n else:\r\n return collections.defaultdict(dict)", "def read(self, data_path: str = None, *args, **kwargs) -> Dict:\n\n with open(data_path) as f:\n content = f.readlines()\n\n dataset = dict()\n dataset[\"train\"] = [(line,) for line in content]\n dataset[\"valid\"] = []\n dataset[\"test\"] = []\n\n return dataset", "def ReadFromFile(self):\n\n data = \"\"\n try:\n with open(self.fileLoc, \"r\") as file:\n data += file.read()\n except IOError:\n with open(self.fileLoc, \"w\") as file:\n file.write(\" \")\n return {}\n \n if len(data) == 0:\n return {}\n\n data = self.Decrypt(data)\n\n data = \"\".join(data.split())\n kvstrings = data.split(\"%\")\n kvstrings = filter(None, kvstrings)\n\n pairs = {}\n for x in kvstrings:\n kv = x.split(\":\")\n pairs[kv[0]] = kv[1]\n\n return pairs", "def load_data(self):\n logging.debug('Loading data from file ({})...'.format(self.file_name))\n parsed_data = list()\n with open(self.file_name) as file_data:\n for line in file_data.readlines():\n temp = dict()\n if 'JD' in line:\n continue\n line = line.split()\n temp['ts'], temp['mag'], temp['dif'] = float(line[0][:14]), float(line[1]), float(line[2])\n temp['f_mag'] = self.kalman_filter(temp['mag'])\n temp['dt'] = self.jd_to_datetime(temp['ts'])\n temp['dt_cor'] = self.jd_to_datetime(temp['ts'] - TIME_CRT)\n parsed_data.append(temp)\n logging.debug(' {} records loaded.'.format(len(parsed_data)))\n logging.debug(parsed_data[0])\n self.data_stream = parsed_data", "def parse_data(infile):\n blocks = re.compile(' '.join(['=' * 9] * 8))\n dashes = re.compile('^-{79}$')\n title = re.compile('^Timings for (.*)$')\n row = re.compile(' '.join(['(.{9})'] * 7) + ' (.{8,9})')\n\n lines = infile.readlines()\n\n data = co.OrderedDict()\n index = 0\n\n while index < len(lines):\n line = lines[index]\n\n if blocks.match(line):\n try:\n name = title.match(lines[index + 1]).group(1)\n except Exception:\n index += 1\n continue\n\n data[name] = {}\n\n assert dashes.match(lines[index + 2])\n\n cols = parse_row(row, lines[index + 3])\n\n assert blocks.match(lines[index + 4])\n\n get_row = parse_row(row, lines[index + 5])\n assert get_row[0] == 'get'\n\n set_row = parse_row(row, lines[index + 6])\n assert set_row[0] == 'set'\n\n delete_row = parse_row(row, lines[index + 7])\n assert delete_row[0] == 'delete'\n\n assert blocks.match(lines[index + 9])\n\n data[name]['get'] = dict(zip(cols, get_row))\n data[name]['set'] = dict(zip(cols, set_row))\n data[name]['delete'] = dict(zip(cols, delete_row))\n\n index += 10\n else:\n index += 1\n\n return data", "def _load_data(name):\n filename = os.path.join(os.path.abspath(os.path.dirname(__file__)),\n 'data', name)\n with np.load(filename) as f:\n return dict(f.items())", "def readData(self,datafile = None):\n self.datafile = datafile or self.datafile\n self.data = []\n for line in open(self.datafile):\n userid,itemid,record,_ = line.split()\n self.data.append((userid,itemid,int(record)))", "def readData(infile):\n dd = {}\n for line in infile:\n if line[0] == \"#\":\n continue\n d = line[:-1].split(\"\\t\")\n contig, start, end, score = d[0], int(d[3]), int(d[4]), float(d[5])\n # if contig != \"I\": continue\n if contig not in dd:\n dd[contig] = []\n dd[contig].append((start, end, score))\n return dd", "def read_reference_data():\n return {f:read_local_file(f) for f in os.listdir(DATA_DIR)}", "def read_from_file():\n\n plik=open('data.txt')\n for line in plik:\n (key, temp, weat, pres)=line.split()\n dict_from_file[key]=[temp, weat.replace('_',' '), pres]", "def load_data_from_filename(self, filename):\n try:\n if self.verbose:\n print 'Getting data from ' + filename\n\n self.data_dict = {}\n\n with open(filename, 'rt') as f:\n for line in f:\n data_match = re.match(r'^(\\d+)[\\,|\\t|\\s|\\|](\\d+)$', line)\n if data_match:\n node = int(data_match.group(1))\n part = int(data_match.group(2))\n\n if part in self.data_dict:\n self.data_dict[part].append(node)\n else:\n self.data_dict[part] = [node]\n\n except Exception, e:\n print 'Unexpected error:', str(e)\n print 'Problems loading data from file.'\n exit()", "def get_data_from_file(file_name):\n stocks = []\n with open(file_name) as fh:\n keys = line2words(fh.readline()) # assigns the first line of the text document as the keys\n for line in fh: # reads the subsequent lines and assigns them as the as the values\n stocks.append(dict(zip(keys, line2words(line))))\n return stocks", "def file_read(filename):\n f = open(filename, 'r') \n d_str = f.read() \n f.close()\n\n d = dict(eval(d_str))\n return d", "async def load(self, file: IO) -> dict:", "def read_record(file_, num_evo_entries):\n dict_ = {}\n\n while True:\n next_line = file_.readline()\n case = switch(next_line)\n if case('[ID]' + '\\n'):\n id_ = file_.readline()[:-1]\n dict_.update({'id': id_})\n elif case('[PRIMARY]' + '\\n'):\n primary = letter_to_num(file_.readline()[:-1], _aa_dict)\n dict_.update({'primary': primary})\n elif case('[EVOLUTIONARY]' + '\\n'):\n evolutionary = []\n for residue in range(num_evo_entries):\n evolutionary.append([float(step) for step in file_.readline().split()])\n dict_.update({'evolutionary': evolutionary})\n elif case('[SECONDARY]' + '\\n'):\n secondary = letter_to_num(file_.readline()[:-1], _dssp_dict)\n dict_.update({'secondary': secondary})\n elif case('[TERTIARY]' + '\\n'):\n tertiary = []\n for axis in range(NUM_DIMENSIONS): \n tertiary.append([float(coord) for coord in file_.readline().split()])\n dict_.update({'tertiary': tertiary})\n elif case('[MASK]' + '\\n'):\n mask = letter_to_num(file_.readline()[:-1], _mask_dict)\n dict_.update({'mask': mask})\n elif case('\\n'):\n return dict_\n elif case(''):\n return None", "def getBMData(filename):\n\n data = {}\n f = open(filename)\n line = f.readline() \n data['name'], data['gender'], data['age'] = [], [], []\n data['division'], data['country'], data['time'] = [], [], []\n while line != '':\n split = line.split(',')\n data['name'].append(split[0])\n data['gender'].append(split[1])\n data['age'].append(int(split[2]))\n data['division'].append(int(split[3]))\n data['country'].append(split[4]) \n data['time'].append(float(split[5][:-1])) #remove \\n\n line = f.readline()\n f.close()\n return data", "def create_dicts():\n load_data_for_dict('data/atis/train/seq.in', 'data/atis/voc/vocabulary.json')\n load_data_for_dict('data/atis/valid/seq.in', 'data/atis/voc/vocabulary.json')\n load_data_for_dict('data/atis/test/seq.in', 'data/atis/voc/vocabulary.json') \n load_data_for_dict('data/atis/train/seq.out', 'data/atis/voc/slot_vocabulary.json')", "def retrieve_data(file_paths):\n data_dict = {}\n for file_path in file_paths:\n with open(file_path) as fh:\n try:\n content = yaml.load(fh.read())\n except yaml.YAMLError as e:\n raise MiuraException(\n \"Unable to parse yaml at {0}: \\n {1}\".format(\n file_path,\n str(e)\n ))\n if not isinstance(content, dict):\n raise MiuraException(\n \"{0} is does not translate to a dictionary!\".format(file_path)\n )\n data_dict.update(content)\n return data_dict", "def DictData(self):\n reader = csv.DictReader( open( self.file, \"rU\" ), dialect = \"excel\" )\n return reader", "def sdf_reader(cls, filename, dbIdentifier = \"LM_ID\"):\n res_dict = {}\n with open(filename) as fp:\n line = fp.readline()\n line_id = \"\"\n line_dict = {}\n while line:\n if line.startswith(\">\"):\n if dbIdentifier in line:\n if line_id:\n res_dict[line_id] = line_dict\n line_dict = {}\n line_id = \"\"\n line_id = fp.readline().rstrip()\n else:\n key = line.split(\"<\")[1].split(\">\")[0]\n line_dict[key] = fp.readline().rstrip()\n line = fp.readline()\n\n fp.close()\n return res_dict", "def load_data() -> Tuple[Dict, Dict]:\n\n female_dict, male_dict = dict(), dict()\n\n for dfile, g_dict in zip(\n ['data/female.txt', 'data/male.txt'],\n [female_dict, male_dict]):\n\n with open(dfile, 'r') as fh:\n\n for line in fh:\n splits = line.split()\n country = splits[0]\n g_dict[country] = set(splits[1:])\n\n return female_dict, male_dict", "def get_recipes_dict(filename, mode_type, encode):\n with open(filename, mode_type, encoding=encode) as file:\n recipe_dict = dict()\n for line in file:\n dish = line.strip()\n amount = int(file.readline())\n buffer_list = list()\n for item in range(amount):\n ingredient, quantity, measure = file.readline().split('|')\n buffer_list.append(\n {'ingredient_name': ingredient.strip(), 'quantity': int(quantity), 'measure': measure.strip()}\n )\n recipe_dict[dish] = buffer_list\n file.readline()\n return recipe_dict", "def load_data(self):\n df = pandas.read_csv(self.path)\n self.data_dict = df.to_dict(orient=\"list\")\n return self.data_dict", "def read_file(file_path):\n\n output_dict = dict()\n try:\n if os.path.exists(file_path):\n with open(file_path) as fd:\n output = fd.readlines()\n for idx in range(len(output)):\n key_info = output[idx].split('=')[0].strip()\n value_info = output[idx].split('=')[1].strip()\n output_dict[key_info] = value_info\n return output_dict\n except Exception as e:\n SysTools.logger.warning(\"Read file:%s failed, reason:%s\" % (file_path, str(e)))" ]
[ "0.7438967", "0.7316724", "0.69993526", "0.69232804", "0.6786655", "0.67822355", "0.67553395", "0.6743297", "0.66417485", "0.660935", "0.657544", "0.6519889", "0.646409", "0.64569414", "0.64356375", "0.6424636", "0.6400758", "0.6400161", "0.637106", "0.6336366", "0.63237756", "0.63045484", "0.6281403", "0.62768996", "0.62734044", "0.6255505", "0.62481356", "0.6246713", "0.62365586", "0.6229283" ]
0.734245
1
opens the info window
def open_info_dialog(self): info_dialog = InfoDialog() info_dialog.exec_()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def showInfoWindow():\n\treturn 0", "def on_info_click(self, event):\n def on_close(event, wind):\n wind.Close()\n wind.Destroy()\n event.Skip()\n wind = wx.PopupTransientWindow(self, wx.RAISED_BORDER)\n if self.auto_save.GetValue():\n info = \"'auto-save' is currently selected. Temperature bounds will be saved when you click 'next' or 'back'.\"\n else:\n info = \"'auto-save' is not selected. Temperature bounds will only be saved when you click 'save'.\"\n text = wx.StaticText(wind, -1, info)\n box = wx.StaticBox(wind, -1, 'Info:')\n boxSizer = wx.StaticBoxSizer(box, wx.VERTICAL)\n boxSizer.Add(text, 5, wx.ALL | wx.CENTER)\n exit_btn = wx.Button(wind, wx.ID_EXIT, 'Close')\n wind.Bind(wx.EVT_BUTTON, lambda evt: on_close(evt, wind), exit_btn)\n boxSizer.Add(exit_btn, 5, wx.ALL | wx.CENTER)\n wind.SetSizer(boxSizer)\n wind.Layout()\n wind.Popup()", "def window_info_toggle():\n window_info.hide() if window_info.showing else window_info.show()", "def action_world_info(self):\n dialog = WorldInfoDialog(self, self.world, self.config)\n dialog.exec()\n\n # Re-focus the main window\n self.activateWindow()", "def get_info():\r\n app = application.Application()\r\n\r\n app.start(r\"C:\\\\AL50022\\\\Circ\\\\bin\\\\Circ.exe\")\r\n\r\n app.Circ.menu_select(\"View\")", "def openAbout(self):\r\n dlg = AboutDialog()\r\n dlg.show()\r\n dlg.exec_()", "def show(self, window):\r\n\r\n return", "def Open(self):\n self._is_open = True\n def closure(pane):\n if not pane.IsShown():\n pane.Show(True)\n self._PaneInfoOperation(closure)", "def about_developer(self):\r\n self.pop_window(title=\"About\", \r\n msg=\"ChikonEye Version: 2.0.1 \\nDeveloper Info:\\nName : Ashraf Minhaj \\nEmail : [email protected] \\nsite : ashrafminhajfb.blogspot.com \\nyouTube : fusebatti\")", "def _open_window(self):\r\n\t\t# Creating the window\r\n\t\tself._window = Window(self, Locations.RESTAL)", "def showInfo(parent,message,title=_('Information')):\r\n return askStyled(parent,message,title,wx.OK|wx.ICON_INFORMATION)", "def show_about():\r\n\tmsg = messagebox\r\n\tmsg.showinfo(\"\", '''Creator: Ellis, Kevin\r\nOrganization: n/a\r\nDescription: Retrieve the network information from a database\r\nDate: 2020208\r\nVersion: 1.4''')", "def show_about():\n messagebox.showinfo(\n title='About', message=\"PyLNP - Lazy Newb Pack Python Edition\\n\\n\"\n \"Port by Pidgeot\\n\\nOriginal program: LucasUP, TolyK/aTolyK\")", "def showHelpPopupWin(self, button, info_text):\n if self._help_popup_win:\n self._help_popup_win.close()\n self._help_popup_win = None\n else:\n self._help_popup_win = info_window.showInfoWindow(parent=self,\n ctrl=button,\n info_text=info_text)", "def showinfo(self, msg):\n tkinter.messagebox.showinfo('Information', msg)", "def info(text, window=None):\n message(text, u'Informação', M_INFO, B_OK, window)", "def __editShowCodeInfo(self):\n self.showEditorInfo(self.activeWindow())", "def details_window(self, instance: Union[Nobleman, Location]):\n window = tk.Toplevel()\n window.title(instance.name)\n window.protocol(\"WM_DELETE_WINDOW\",\n partial(self.close_details_window, instance))\n self.register_extra_window(instance, window)\n self.generate_window_content(instance, window)", "def show_info(title, message):\n\n pass", "def info_dialog(self, title, message):\n return self._impl.info_dialog(title, message)", "def show_info_message(self, msg, msecs=3):\n\n message.PopupMessage.info(msg, parent=self, duration=msecs, closable=True)", "def show_window(self):\n self.show()", "def launchHelpWindow(self):\r\n self.popup(\"Help\",HELP,geom=\"350x200\")", "def about(self, widget):\n self.about_dialog.show()", "def about():\r\n url = 'https://engineering.tau.ac.il/tauengalumni'\r\n source_url = 'https://github.com/EranPer/tauengalumni'\r\n\r\n layout = [[sg.Text('TAU Engineering Alumni Registering and Sticker Printing System.')],\r\n [sg.Text('Made by Eran Perelman. 2021')],\r\n [sg.Text('TAU Engineering Alumni Website',\r\n enable_events=True, key='-LINK-', font=('Arial underline', 11))],\r\n [sg.Text('Source Code',\r\n enable_events=True, key='-SOURCE_CODE-', font=('Arial underline', 11))],\r\n [sg.B('Ok')]]\r\n\r\n window = sg.Window(\"TAU Engineering Alumni Registering and Sticker Printing System\", layout)\r\n\r\n while True:\r\n event, values = window.read()\r\n if event == '-LINK-':\r\n webbrowser.open(url)\r\n if event == '-SOURCE_CODE-':\r\n webbrowser.open(source_url)\r\n if event == 'Ok':\r\n break\r\n\r\n window.close()", "def show(self):\n self.Show()", "def _info(self, message):\r\n dlg = wx.MessageDialog(self, message,\r\n 'xmi2magik',\r\n wx.OK | wx.ICON_INFORMATION\r\n )\r\n dlg.ShowModal()\r\n dlg.Destroy()", "def _about_dialogue(self):\n webbrowser.open('https://github.com/ldrumm/yubikey-totp-gui')", "def aboutmenu(self):\n tkMessageBox.showinfo(\"About This Program\", \"The project of PSIT subject in 2014.\\nThis program is unit converter program.\")", "def openAboutApp(self):\n self.about_Window = QtWidgets.QDialog()\n self.about_ui = Ui_aboutDialog()\n self.about_ui.setupUi(self.about_Window)\n self.about_Window.show()" ]
[ "0.8189632", "0.71575576", "0.6955355", "0.6811716", "0.67940843", "0.67372173", "0.6709553", "0.6677812", "0.66064024", "0.6605875", "0.6594258", "0.65664667", "0.6501804", "0.6482363", "0.6466345", "0.64624065", "0.6458878", "0.6444091", "0.6436782", "0.6403368", "0.63622516", "0.63596106", "0.6350652", "0.635031", "0.634326", "0.63185996", "0.6308324", "0.62664765", "0.6251523", "0.62085575" ]
0.8544418
0
returns a list of unique datasets in a directory
def list_unique_datasets(root, type ='roi'): import os lst_dir = os.listdir(root) lst_datasets = [] for item in lst_dir: if f".{type}.hdf5" in item: rt,camera_name,dataset_name,chunk,extension = split_raw_filename(item) if dataset_name not in lst_datasets: lst_datasets.append(dataset_name) return lst_datasets
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_dataset_ids(clean_folder):\n files = os.listdir(clean_folder)\n\n datasets = list(set([i.split('.')[0] for i in files]))\n\n return [d for d in datasets if d + '.otu_table.clean.feather' in files and d + '.metadata.clean.feather' in files]", "def _list_datasets_from_dir(path: github_api.GithubPath) -> List[str]:\n if not path.exists():\n # Should be fault-tolerant in the future\n raise FileNotFoundError(f'Could not find datasets at {path}')\n return sorted([ds for ds in path.iterdir() if _is_dataset_path(ds)])", "def getDatasets(self, dirname, dataset_list):\r\n \r\n files = self.loadDirectory(dirname)\r\n \r\n result = []\r\n for dataset_name in dataset_list:\r\n arr = np.concatenate([f[dataset_name] for f in files])\r\n result.append(arr)\r\n \r\n return result", "def get_unique_hashes():\n return list( set( [ filename.split(\"_\")[0] for filename in os.listdir(CACHE_DIRECTORY) ] ) )", "def get_dataset_list(data_folder=None):\r\n\r\n known_dataset_list = [\"raw_clean_32\", # High Versus Low inhibitory Stimuli of Tinnitus and control patients\r\n \"Distress2010\", # Tinnitus Distress patients (1, 2, 3, 4 Distress) - resting state\r\n \"NormativeDB\", # Control Patient for Distress2010 - resting state (250+ files)\r\n \"Tinnitus_EEG\" # augmented dataset from Distress2010 (300+ subjects)\r\n ]\r\n if data_folder is None:\r\n dataset_list = known_dataset_list\r\n else:\r\n dataset_list = []\r\n with os.scandir(data_folder) as it:\r\n for entry in it:\r\n if (not entry.name.startswith('.')\r\n and not entry.is_file()\r\n and entry.name in known_dataset_list):\r\n dataset_list.append(entry.name)\r\n\r\n if len(dataset_list) == 0:\r\n print(\"get_dataset_list: didn't found any compatible dataset in folder \" + data_folder)\r\n return dataset_list", "def get_user_ids() -> List[str]:\n listOfFiles = os.listdir('public_dataset')\n listOfFiles.remove('data_description.pdf')\n try:\n listOfFiles.remove('.DS_Store')\n except:\n pass\n return listOfFiles", "def get_data_files():\n\n data_files = []\n for d, dirs, filenames in os.walk(share_jupyterhub):\n rel_d = os.path.relpath(d, here)\n data_files.append((rel_d, [os.path.join(rel_d, f) for f in filenames]))\n return data_files", "def datasets(dtype=\"*\", name=\"*\", rootdir=None, fullpath=False):\n if rootdir is None:\n rootdir = Path(cf.options.rootdir).expanduser() / \"datasets\" / API_VERSION\n else:\n rootdir = Path(rootdir).expanduser()\n if not rootdir.exists():\n raise ValueError(f\"rootdir does not exist: {rootdir}\")\n\n files = sorted(glob(str(rootdir/ FreezableAPI.to_slug(dtype,name))))\n if fullpath:\n files = files\n else:\n files = [os.path.basename(x) for x in files]\n return files", "def list_all():\n if os.path.exists(DATA_DIR):\n return os.listdir(DATA_DIR)\n return []", "def get_downloaded():\n\n result = set()\n\n for name in os.listdir(PATH):\n filename, ext = name.split('.')\n if ext not in EXTENSIONS:\n continue\n\n ts, username, id = filename.split('+')\n result.add(id)\n return result", "def get_all_available_dataset_names(dataset_folder=DATASET_FOLDER, limit_datasets=None):\n datasets = glob('{}/*/dataset.py'.format(dataset_folder))\n datasets += glob('{}/dataset_*.py'.format(dataset_folder))\n\n dataset_folders = [\n x.replace('/dataset.py', '').replace('dataset_', '').replace('.py', '').replace('/', '.').split('.')[-1]\n for x in datasets\n ]\n\n if limit_datasets is not None:\n dataset_folders = [x for x in dataset_folders if x in limit_datasets]\n\n return sorted(dataset_folders)", "def get_available_datasets():\n files = [file for file in glob.glob(os.path.join(MODULE_ROOT, \"datasets/*.json\"))]\n datasets = []\n for file in files:\n with open(file, \"r\") as f:\n dataset_info = json.load(f)\n datasets.append(dataset_info)\n return datasets", "def getDataFiles(directoryName):\r\n \r\n return listdir(directoryName)", "def list_datasets():\n return METADATA.keys()", "def loadDirectory(self, dirname):\r\n cachelist=os.listdir(dirname)\r\n testlist=fnmatch.filter(cachelist,'*.hdf5')\r\n \r\n for file_ in testlist:\r\n print(\"Using {0}\".format(file_))\r\n \r\n files = [h5py.File(os.path.join(dirname, fn),'r') for fn in testlist]\r\n return files", "def folder(fpath):\n file_paths = glob.glob(fpath + '/*.dat')\n return list(file_paths)", "def KittiTestDataset(test_root_path):\n \n names = os.listdir(test_root_path)\n dataset = [[os.path.join(test_root_path, name)] for name in names]\n \n return dataset", "def get_datasets(sim_args):\n if len(sim_args.data_folders) == 1 and sim_args.data_folders[0] == 'all':\n data_tags = [\n 'Webscope_C14_Set1',\n 'Webscope_C14_Set2',\n 'MSLR-WEB10k',\n 'NP2003',\n 'NP2004',\n 'HP2003',\n 'HP2004',\n 'TD2003',\n 'TD2004',\n 'MQ2007',\n 'MQ2008',\n 'OHSUMED',\n ]\n elif len(sim_args.data_folders) == 1 and sim_args.data_folders[0] == 'CIKM2017':\n data_tags = [\n 'MSLR-WEB10k',\n 'NP2003',\n 'NP2004',\n 'HP2003',\n 'HP2004',\n 'TD2003',\n 'TD2004',\n 'MQ2007',\n 'MQ2008',\n 'OHSUMED',\n ]\n elif len(sim_args.data_folders) == 1 and sim_args.data_folders[0] == 'letor64':\n data_tags = [\n 'NP2003',\n 'NP2004',\n 'HP2003',\n 'HP2004',\n 'TD2003',\n 'TD2004',\n ]\n # random.shuffle(data_tags)\n else:\n data_tags = sim_args.data_folders\n for data_tag in data_tags:\n assert data_tag in DATASET_COLLECTION, 'Command line input is currently not supported.'\n yield DATASET_COLLECTION[data_tag]", "def list_datasets():\n datasets = {}\n for datafile in HERE.glob(\"*.csv.gz\"):\n index = False\n name = datafile.name[:-7]\n if \".indexed\" in name:\n name = name.replace(\".indexed\", \"\")\n index = True\n datasets[name] = {\"index\": index, \"file\": datafile}\n return datasets", "def get_ids(directory):\n return (f[:-4] for f in os.listdir(directory))", "def scan(self):\n try:\n for dataset_folder in os.scandir(\n self.path_dict['DATASETS_FOLDER']): # phase one -> scan local datasets dir\n if not dataset_folder.name.startswith('.') and dataset_folder.is_dir():\n self.local_datasets.append(dataset_folder.name)\n print(\"Local dataset found : \", dataset_folder.name, 'Folder size',\n self.get_tree_size(\n os.path.join(self.path_dict['DATASETS_FOLDER'], dataset_folder.name)) / 10 ** 6,\n 'MB')\n for dataset in self.to_be_used_datasets:\n if dataset not in self.local_datasets:\n print(dataset, ' verisetinin bilgisayarınızda yüklü olmadığı görüldü. İndirilecek.')\n self.download_queue.append(dataset)\n print(\"Eğer bir verisetinin yanlış indirildiğini düşünüyorsanız, \"\n \"verisetini silip programı tekrar çalıştırın.\")\n return self.local_datasets\n except:\n print(\"Dataset Okuma sırasında bir hata oluşmuş olabilir.\")", "def get_data_files(dirname):\r\n flist = []\r\n for dirpath, _dirnames, filenames in os.walk(dirname):\r\n for fname in filenames:\r\n flist.append(osp.join(dirpath, fname))\r\n return flist", "def fixture_sets(*args):\n return [os.path.join(*args, dir)\n for dir in os.listdir(os.path.join(FIXTURE_DATA, *args))\n if os.path.isdir(os.path.join(FIXTURE_DATA, *args, dir))\n ]", "def get_dataset_filelist(dataset):\n\n query = {\n \"_source\": {\n \"includes\": [\"info.directory\", \"info.name\"]\n },\n \"query\": {\n \"match_phrase_prefix\": {\n \"info.directory.analyzed\": dataset\n }\n }\n }\n\n es = CEDAElasticsearchClient()\n results = scan(es, query=query, index='opensearch-files')\n\n file_list = [\n os.path.join(\n item['_source']['info']['directory'],\n item['_source']['info']['name']\n ) for item in results\n ]\n\n return file_list", "def find_data(self):\n data_list = []\n for root, dirs, files in os.walk(pathfinder.data_path()):\n for name in files:\n data_list.append(os.path.join(root, name))\n return data_list", "def load(self):\n\t\t# Initialize empty list\n\t\tdata_files = []\n\n\t\t# Append the Drusen files to the list\n\t\tfor single_file in os.listdir(self.data_dir):\n\t\t\tdata_files.append(single_file)\n\t\treturn data_files", "def list_all(train_dir):\r\n path = train_dir\r\n result = []\r\n for fn in os.listdir(path): #fn 表示的是文件名\r\n result.append(fn)\r\n return result", "def _generate_datasets(self):\n datasets = list()\n for fname in sorted(os.listdir(self.base_dir)):\n if not self._filename_re.match(fname):\n continue\n\n file_path = os.path.join(self.base_dir, fname)\n try:\n fh = self._open_hdf5(file_path)\n\n except (IOError, OSError) as e:\n warnings.warn('Cannot access {}; skipped'.format(file_path))\n print(e)\n continue\n\n for key in fh:\n if self._groupname_re.match(key.lstrip('/')):\n datasets.append(ObjectTableWrapper(fh, key, self._schema))\n continue\n\n warn_msg = 'incorrect group name \"{}\" in {}; skipped this group'\n warnings.warn(warn_msg.format(os.path.basename(file_path), key))\n\n return datasets", "def files(self):\n all_files = set()\n for label in self.filesets:\n all_files.update(self.filesets[label])\n return all_files", "def importAllDatasets(directory):\n head_index = findIndex(temp_list, \"Gaze\")\n point_index = findIndex(temp_list, \"Point\")\n grab_index = findIndex(temp_list, \"Grab\")\n pos_index = findIndex(temp_list, \"Position\")\n\n head_data = pd.read_csv(temp_list[head_index]) if head_index != None else None\n point_data = pd.read_csv(temp_list[point_index]) if point_index != None else None\n grab_data = pd.read_csv(temp_list[grab_index]) if grab_index != None else None\n pos_data = pd.read_csv(temp_list[pos_index]) if pos_index != None else None\n\n\n return head_data, point_data, grab_data, pos_data" ]
[ "0.7031487", "0.6984933", "0.67306", "0.66171193", "0.6566382", "0.64591396", "0.64051497", "0.6381419", "0.63651735", "0.6338045", "0.63201356", "0.626137", "0.625739", "0.6252171", "0.6217978", "0.61235845", "0.6122705", "0.61049205", "0.60466945", "0.6039118", "0.603777", "0.6012733", "0.5987767", "0.5963795", "0.5960864", "0.5944245", "0.59146255", "0.5914456", "0.5914321", "0.59008235" ]
0.77526206
0
Test the primes under 10 and 100
def test_primes_under_10(self): self.assertEqual(sieve(10), [2, 3, 5, 7]) self.assertEqual(sieve(100), [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_with_10_prime_numbers(self):\n numbers = [3,5,7,11,13,17,19,23,29,31]\n for number in numbers:\n self.assertFalse(has_divisors(number, int(math.sqrt(number) // 1) + 1), \"Number {} is a prime number.\".format(number))", "def test_prime_10(self):\n\t self.assertTrue(prime_generator(10), [2, 3, 5, 7])", "def test_12():\n assert primes(12) == [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61]", "def test_with_10_not_prime_numbers(self):\n numbers = [4,8,10,15,20,155,270,300,444,985]\n for number in numbers:\n self.assertTrue(has_divisors(number, int(math.sqrt(number) // 1) + 1), \"Number {} is not a prime number.\".format(number))", "def test_primes_under_1000000(self):\n self.assertEqual(len(sieve(100)), 25)\n self.assertEqual(len(sieve(1000)), 168)\n self.assertEqual(len(sieve(10000)), 1229)\n self.assertEqual(len(sieve(100000)), 9592)\n self.assertEqual(len(sieve(1000000)), 78498)", "def test_is_prime_valid(self):\n sol = solution.Solution();\n self.assertTrue(sol.isPrime(2))\n self.assertTrue(sol.isPrime(3))\n self.assertTrue(sol.isPrime(7))\n #self.assertTrue(sol.isPrime(863))", "def primenumber(x):\n if x >= 2:\n for y in range(2,x):\n if not (x % y):\n return False\n else:\n return False\n return True", "def test_prime(n):\n if SIEVE[n]:\n return True\n else:\n return False", "def comprobar_primo(num):\n primo = True\n for i in range(2, num):\n if num%i == 0:\n primo = False\n return primo", "def test_prime_12(self):\n\t self.assertTrue(prime_generator(12), [2, 3, 5, 7, 11])", "def basicIsPrime(n,K=100):\n if n % 2 == 0:\n return n == 2\n if n in primesList.lessThanHundredThousand:\n return True\n return None", "def test_25(self):\n self.assertFalse(is_prime(25))", "def isPrime(n, primes):\n\n k = math.log(n, 2) # number of bits in n\n r = getRounds(k)\n\n return checks(n, primes, r) # run checks", "def test_is_prime_invalid(self):\n sol = solution.Solution();\n self.assertFalse(sol.isPrime(1))\n self.assertFalse(sol.isPrime(4))\n self.assertFalse(sol.isPrime(6))\n #self.assertFalse(sol.isPrime(864))", "def prime_test(n,p):\n for i in range(2, p):\n thing = 1\n while thing == 1:\n if n % i == 0:\n n = n/i\n else:\n thing = 0\n if n == 1:\n return False\n return True", "def start_prime_test():", "def return_prime_numbers_less_tahn_100():\r\n primes = []\r\n for num in range(100):\r\n is_prime = True\r\n for i in range(2, num):\r\n if num % i == 0:\r\n is_prime = False \r\n if is_prime:\r\n primes.append(num)\r\n return primes", "def getPrime(bits):\n\twhile(True) :\n\t\t# on continue a tirer des nombres tant que l'on n'a pas trouve de nombre premier\n\t\tp = getrandbits(bits)\n\t\tif(miller_rabin(p,100)) :\n\t\t\treturn p", "def prime_numbers(max_number_eval=100):\n prime_numbers_list = list(next_prime(max_number_eval))\n print('The prime numbers from 2 to {} are:{}'.format(max_number_eval, prime_numbers_list))", "def isprime(x):\n if x <= 1: return False \n if x % 2 == 0: return x == 2\n for k in range(3, int(sqrt(x))+1, 2): \n if x % k == 0: return False\n return True", "def test_8(self):\n self.assertFalse(is_prime(8))", "def isprime(n=936):\n if n < 3: return False\n for i in range(2, n):\n if n % i == 0:\n return False\n return True", "def test_5():\n assert primes(5) == [2, 3, 5, 7, 11]", "def isprime(x):\n # 1 and 0 are not primes\n if( x < 2):\n return False\n if( x == 2):\n return True\n # All evens are not prime\n if (x % 2 == 0):\n return False\n\n # check others, up x / 2\n else:\n for y in range(3, int(x**(0.5)+1), 2):\n ##print(y)\n if( x % y == 0):\n return False\n return True", "def test_15(self):\n\t self.assertTrue(prime_generator(15), [2, 3, 5, 7, 11, 13])", "def isprime(n):\n if n % 2 == 0:return False\n return all(n % i for i in range(3, int(n**0.5) + 1, 2))", "def is_prime(n):\r\n if n in (2, 3, 5, 7, 11, 13, 17, 19): return(True)\r\n if (n<=1 or n%2==0 or n%3==0): return(False)\r\n # determine upper limit of test range =>\r\n ulimit = (int(math.ceil(math.sqrt(n)))+1)\r\n return(not any(n%k==0 for k in range(3, ulimit, 2)))", "def primes():\n yield 2\n candidate = 3\n while True:\n for i in range(3, int(sqrt(candidate)) + 1, 2):\n if (candidate % i) == 0:\n break\n else:\n yield candidate\n candidate += 2", "def primes():\n yield 2\n candidate = 3\n while True:\n for i in range(3, int(sqrt(candidate)) + 1, 2):\n if (candidate % i) == 0:\n break\n else:\n yield candidate\n candidate += 2", "def isPrime(x):\n for i in range(2,int(x**0.5)+1):\n if (x % i == 0):\n return False\n\n return True" ]
[ "0.80114126", "0.7838949", "0.7714869", "0.7712949", "0.7498481", "0.74840343", "0.74631464", "0.7443868", "0.74101394", "0.73933285", "0.7363734", "0.7359168", "0.7355548", "0.7354168", "0.7318888", "0.7302952", "0.72974956", "0.72494286", "0.7239387", "0.722427", "0.7209835", "0.7205473", "0.7191587", "0.71770805", "0.716228", "0.7148613", "0.7142538", "0.71337384", "0.71337384", "0.7119306" ]
0.79695326
1
Check the lengths of the list of primes up to 1000000
def test_primes_under_1000000(self): self.assertEqual(len(sieve(100)), 25) self.assertEqual(len(sieve(1000)), 168) self.assertEqual(len(sieve(10000)), 1229) self.assertEqual(len(sieve(100000)), 9592) self.assertEqual(len(sieve(1000000)), 78498)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def count_prime():\n nums = []\n for i in range(2, 10000):\n if is_prime(i):\n nums.append(i)\n return nums", "def main() -> int:\n\n a = None\n for n, g in enumerate(gen_primes(100000, 1000000)):\n repeat, indices = check_if_has_3_repeated_digits(str(g))\n if repeat:\n a = check_for_family_of_primes(repeat, indices, list(str(g)))\n if len(a) > 7 and min(a) > 100000:\n EULER_LOGGER.debug(f\"{a}\")\n a = min([int(i) for i in a])\n break\n\n return a", "def check_almost_prime(num, primes_list):\n no_factors = 0\n max_prime = num - 1\n for prime in primes_list:\n if prime > max_prime:\n break\n while num % prime == 0:\n no_factors += 1\n num /= prime\n if no_factors > 2:\n return 0\n if no_factors == 2:\n return 1\n return 0", "def n_primes(n):\n primes = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59,\n 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127,\n 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193,\n 197, 199, 211, 223, 227, 229, 233, 239, 241, 251, 257, 263, 269,\n 271, 277, 281, 283, 293, 307, 311, 313, 317, 331, 337, 347, 349,\n 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, 419, 421, 431,\n 433, 439, 443, 449, 457, 461, 463, 467, 479, 487, 491, 499, 503,\n 509, 521, 523, 541, 547, 557, 563, 569, 571, 577, 587, 593, 599,\n 601, 607, 613, 617, 619, 631, 641, 643, 647, 653, 659, 661, 673,\n 677, 683, 691, 701, 709, 719, 727, 733, 739, 743, 751, 757, 761,\n 769, 773, 787, 797, 809, 811, 821, 823, 827, 829, 839, 853, 857,\n 859, 863, 877, 881, 883, 887, 907, 911, 919, 929, 937, 941, 947,\n 953, 967, 971, 977, 983, 991, 997][:n]\n\n if len(primes) < n:\n big_number = 2000\n while 'Not enough primes':\n primes = primes_from_2_to(big_number)[:n]\n if len(primes) == n:\n break\n big_number += 1000\n\n return primes", "def n_length_primes(n):\n assert n > 0, \"Cannot generate a list of %d length primes.\" % n\n a = []\n for i in range(10**(n-1), 10**n):\n if is_prime(i):\n a.append(str(i))\n return a", "def primes_list(n):\n count = 0\n if n <= 7:\n p_list = [2, 3, 5, 7, 11, 13, 17]\n return p_list[:n]\n else:\n upper_bound = int(n * log(n) + n * log(log(n)))\n return primes(upper_bound)[:n]", "def test_primes_under_10(self):\n self.assertEqual(sieve(10), [2, 3, 5, 7])\n self.assertEqual(sieve(100), [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31,\n 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97])", "def replace_count_primes(n):\n s = list(str(n))\n length = len(s)\n print(n)\n for c in combinations(digits[:length], mu):\n test_list = []\n for i in range(10):\n new_s = deepcopy(s)\n for place in c:\n new_s[int(place)] = str(i)\n new_n = int(''.join(new_s))\n if no_leading_zeroes and len(str(new_n)) < length:\n continue\n if is_prime(new_n):\n test_list.append(new_n)\n if(len(test_list) >= 8):\n print(\"FOUND!!!\")\n print(test_list)\n return True\n return False", "def primes(lim):\n limsqrt = ceil(sqrt(lim))\n s = [ True ] * (lim + 1)\n for i in range(2, ceil(sqrt(lim))):\n if s[i]:\n k = 0\n while True:\n l = i * i + k * i\n if l > lim: break\n k += 1\n s[l] = False\n return [i for i in range(2, lim + 1) if s[i]]", "def __len__(self):\n return len(self.primes)", "def test_if_it_outputs_correct_output_for_numbers_greater_than_50(self):\n self.assertEquals(len(prime_numbers(55)), 16)", "def count_almost_prime(a, b, primes_list):\n count = 0\n for num in range(a, b+1): \n count += check_almost_prime(num, primes_list)\n return count", "def fast():\n #get the first 4-6 digit primes\n for num in xrange(1000,1000000+1):\n if not is_prime[num]:\n continue\n num = str(num)\n do_check = False\n for r in '012':\n if num.count(r):\n do_check = True\n break\n if not do_check:\n continue\n prime_candidates = []\n fail_count = 0\n for m in '0123456789':\n check_n = num.replace(r, m)\n if check_n[0] != '0' and is_prime[int(check_n)]:\n prime_candidates.append(check_n)\n else:\n fail_count += 1\n if fail_count > 2:\n break\n if fail_count <= 2:\n print \"Answer:\", prime_candidates[0]\n break", "def getPrimes(limit): \n a = range(2,int(sqrt(limit)+1))\n isPrime = [True]*limit\n for n in a:\n if isPrime[n]:\n # for all primes, each multiple of prime from prime*prime to the end must not be prime\n for i in xrange(n*n, limit, n): \n isPrime[i] = False\n primes = [i for i in xrange(2,len(isPrime)) if isPrime[i]]\n return primes", "def sieve(limit):\n primes = []\n\n s = xrange(2, limit + 1)\n while len(s) != 0:\n primes.append(s[0])\n s = [n for n in s if (n % s[0]) != 0]\n\n return primes", "def eratosthenes_mem(limit):\n if isinstance(limit, (int, float)) and limit == int(limit):\n limit = int(limit)\n else:\n raise ValueError\n primes = [2]\n multiples = [2]\n limit += 1\n for candidate in range(3, limit):\n if candidate not in multiples:\n primes.append(candidate)\n multiples.append(2*candidate)\n for i, m in enumerate(multiples):\n if m <= candidate:\n multiples[i] += primes[i]\n return np.asarray(primes)", "def eratosthenes(limit):\n if isinstance(limit, (int, float)) and limit == int(limit):\n limit = int(limit)\n else:\n raise ValueError\n primes = []\n mask = [1]*(limit+1)\n for i in range(2, limit+1):\n if mask[i]:\n primes.append(i)\n for j in range(i*i, limit+1, i):\n mask[j] = 0\n return np.asarray(primes)", "def truncatable_primes():\n list_tp = []\n i = 8\n while len(list_tp) < 11:\n if is_truncatable(i):\n list_tp.append(i)\n i += 1\n if i % 100 == 0:\n print(\"i : \", i)\n return list_tp, sum(list_tp)", "def list_primes(limit):\n sieve = [False]*2 + [True] * (limit-2)\n n = 2\n while n <= sqrt(limit):\n if sieve[n]:\n yield n\n for m in xrange(n**2, limit, n): # multiples\n sieve[m] = False # mark multiples as non prime\n n += 1\n while n < limit:\n if sieve[n]:\n yield n\n n += 1", "def test_12():\n assert primes(12) == [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61]", "def primes(count):\n\n prime_numbers = [2]\n next_num = 3 \n\n def is_prime(next_num):\n if next_num % 2 == 0:\n return False \n \n for i in range(3, next_num, 2):\n if next_num % i == 0:\n return False \n return True \n\n while count > len(prime_numbers): \n if is_prime(next_num): \n prime_numbers.append(next_num)\n next_num += 1\n\n return prime_numbers", "def primes(numOfPrimes):\n\n primes = []\n # we want to start at 2003, which is the first prime after 2000, seeing as\n # we absolutely need to fit all 2000 keys on the hash table,\n i = 2003\n\n while len(primes) < numOfPrimes:\n isPrime = True\n\n for k in range(2, i):\n if i % k == 0:\n isPrime = False\n break\n\n if isPrime:\n primes.append(i)\n i += 1\n\n return primes", "def count_circular_primes(upper_bound=999999):\n\n is_prime = eulerlib.list_primality(upper_bound)\n\n def is_circular_prime(input_n):\n n_string = str(input_n)\n return all(\n is_prime[int(n_string[i:] + n_string[:i])] for i in range(len(n_string))\n )\n\n ans = sum(1 for i in range(len(is_prime)) if is_circular_prime(i))\n return ans", "def test_with_10_prime_numbers(self):\n numbers = [3,5,7,11,13,17,19,23,29,31]\n for number in numbers:\n self.assertFalse(has_divisors(number, int(math.sqrt(number) // 1) + 1), \"Number {} is a prime number.\".format(number))", "def primesList(n):\n sieve = [True]*n\n for i in range(3,int(n**0.5)+1,2):\n if sieve[i]:\n sieve[2*i::i] = [False]*(len(sieve[2*i::i]))\n return [2]+[i for i in range(3,n,2) if sieve[i]]", "def isPrime(n, primes):\n\n k = math.log(n, 2) # number of bits in n\n r = getRounds(k)\n\n return checks(n, primes, r) # run checks", "def getListOfPrimes(k = 40, n = 1000000):\n\n low = 2 ** (k - 1) # smallest number k bits could be\n lim = min(int(math.sqrt(low)), n + 1) # we don't want to generate any primes larger than n\n\n numList = [True] * lim # initialise boolean list\n primes = [] # initialise list of primes\n\n for i in range(2, lim): # loop through list from index 2\n if numList[i]: # if it is True\n primes.append(i) # must be prime\n\n for j in range(i*i, lim, i): # loop through multiples\n numList[j] = False # setting them to false\n\n return primes # return ptimes", "def prime_pi(n):\n if n < 2:\n return 0\n\n primes = sieve(n)\n return len(primes)", "def primes(n):\n return [i for i in xrange(1, n + 1) if mr_prime(i)]", "def primes(count):\n\n prime_nums = [2]\n prime = 3\n\n for i in range(1, count):\n\n while prime not in [3, 5, 7] and (\n prime % 3 == 0 or prime % 5 == 0 or prime % 7 == 0\n ):\n prime += 2\n\n prime_nums.append(prime)\n prime += 2\n\n return prime_nums" ]
[ "0.72410476", "0.70842636", "0.70530206", "0.7051215", "0.70129216", "0.698902", "0.6978667", "0.6943401", "0.6928235", "0.6921163", "0.689601", "0.68873644", "0.68835855", "0.6882051", "0.6874404", "0.6851525", "0.6791846", "0.678916", "0.67570394", "0.67552835", "0.6745485", "0.6727215", "0.67193735", "0.6713911", "0.6706925", "0.66987294", "0.6688098", "0.6677964", "0.66509426", "0.6624041" ]
0.84035337
0
Kill any active children, returning any that were not terminated within timeout.
def kill_children(timeout=1) -> List[psutil.Process]: procs = child_manager.children_pop_all() for p in procs: try: p.terminate() except psutil.NoSuchProcess: pass gone, alive = psutil.wait_procs(procs, timeout=timeout) for p in alive: logger.warning("Cleaning up child: %d", p.pid) p.kill() return alive
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def wait_children(self, timeout=None):\n self.join_children(timeout=timeout)\n return [x.get() for x in self.children]", "def join_children(self, timeout=None):\n\n time_start = time.time()\n child_count = len(self.children)\n\n for proc in self.children:\n\n # if a child thread has already exited, we don't need to wait on anything -\n # it's already good to go and can be considered joined. Otherwise we will likely\n # double call notify_stop which is a bad thing.\n if proc.proc.dead:\n continue\n\n time_elapsed = time.time() - time_start\n if timeout is not None:\n time_remaining = timeout - time_elapsed\n\n if time_remaining > 0:\n # The nice way; let it do cleanup\n try:\n proc.notify_stop()\n proc.join(time_remaining)\n except Exception:\n # not playing nice? just kill it.\n proc.stop()\n\n else:\n # Out of time. Cya, sucker\n proc.stop()\n else:\n proc.join()\n\n time_elapsed = time.time() - time_start\n #log.debug('Took %.2fs to shutdown %d child threads', time_elapsed, child_count)\n\n return time_elapsed", "def _KillChildren(cls, bg_tasks, log_level=logging.WARNING):\n logging.log(log_level, 'Killing tasks: %r', bg_tasks)\n siglist = (\n (signal.SIGXCPU, cls.SIGTERM_TIMEOUT),\n (signal.SIGTERM, cls.SIGKILL_TIMEOUT),\n (signal.SIGKILL, None),\n )\n first = True\n for sig, timeout in siglist:\n # Send signal to all tasks.\n for task in bg_tasks:\n task.Kill(sig, log_level, first)\n first = False\n\n # Wait for all tasks to exit, if requested.\n if timeout is None:\n for task in bg_tasks:\n task.join()\n task.Cleanup()\n break\n\n # Wait until timeout expires.\n end_time = time.time() + timeout\n while bg_tasks:\n time_left = end_time - time.time()\n if time_left <= 0:\n break\n task = bg_tasks[-1]\n task.join(time_left)\n if task.exitcode is not None:\n task.Cleanup()\n bg_tasks.pop()", "def collect_children(self):\n\t\twhile self.active_children:\n\t\t\tif len(self.active_children) < self.max_children:\n\t\t\t\toptions = os.WNOHANG\n\t\t\telse:\n\t\t\t\t# If the maximum number of children are already\n\t\t\t\t# running, block while waiting for a child to exit\n\t\t\t\toptions = 0\n\t\t\ttry:\n\t\t\t\tpid, status = os.waitpid(0, options)\n\t\t\texcept os.error:\n\t\t\t\tpid = None\n\t\t\tif not pid: break\n\t\t\tself.active_children.remove(pid)", "def killall(cleanup=lambda:None, wait_s=16):\n # TODO(infinity0): log this somewhere, maybe\n global _isTerminating, _CHILD_PROCS\n if _isTerminating: return\n _isTerminating = True\n # terminate all\n for proc in _CHILD_PROCS:\n if proc.poll() is None:\n proc.terminate()\n # wait and make sure they're dead\n for i in range(wait_s):\n _CHILD_PROCS = [proc for proc in _CHILD_PROCS\n if proc.poll() is None]\n if not _CHILD_PROCS: break\n time.sleep(1)\n # if still existing, kill them\n for proc in _CHILD_PROCS:\n if proc.poll() is None:\n proc.kill()\n time.sleep(0.5)\n # reap any zombies\n for proc in _CHILD_PROCS:\n proc.poll()\n cleanup()", "def reap_children(children, config, logger):\n to_delete = []\n current_time = time.time()\n for eventid, info in children.items():\n returncode = info['popen'].poll()\n if returncode is not None:\n logger.info('Reaped child for event %s (return code %d)' %\n (eventid, returncode))\n to_delete.append(eventid)\n continue\n #\n # Kill children who take too long\n #\n if info['start_time'] + config['max_process_time'] < current_time:\n logger.warning('Event %s taking too long, killing' % eventid)\n info['popen'].kill()\n info['popen'].wait()\n logger.warning('Reaped child for killed event %s' % eventid)\n to_delete.append(eventid)\n\n for eventid in to_delete:\n del children[eventid]\n\n return", "def destroy_children(self, parent_id: str) -> list:\n try:\n children = self.find_child_containers(parent_id)\n\n tasks = []\n for child in children:\n tasks += self.destroy(child.labels[LABEL_TASK_ID])\n\n return tasks\n\n except requests.exceptions.ConnectionError:\n raise ProviderError('Docker engine unavailable')", "def contained_children(timeout=1, assert_graceful=True) -> ContextManager[ChildManager]:\n try:\n yield child_manager\n finally:\n alive = kill_children(timeout)\n num_alive = len(alive)\n # Get current exception - if something was raised we should be raising\n # that.\n # XXX: Need to check use cases to see if there are any cases where\n # we are expecting an exception outside of the 'contained_children'\n # block.\n _, exc, _ = sys.exc_info()\n if assert_graceful and exc is None:\n assert not num_alive, f\"Unexpected children still alive: {alive}\"", "def shutdown_system():\n yield None\n active = active_children()\n for child in active:\n child.kill()", "def cleanup(self):\n log = logging.getLogger('mailman.runner')\n # Send SIGTERMs to all the child processes and wait for them all to\n # exit.\n for pid in self._kids:\n try:\n os.kill(pid, signal.SIGTERM)\n except OSError as error:\n if error.errno == errno.ESRCH:\n # The child has already exited.\n log.info('ESRCH on pid: %d', pid)\n # Wait for all the children to go away.\n while self._kids:\n try:\n pid, status = os.wait()\n self._kids.drop(pid)\n except OSError as error:\n if error.errno == errno.ECHILD:\n break\n elif error.errno == errno.EINTR:\n continue\n raise", "def kill_processes(self) -> None:\n for process in [p for p in self.processes if p.is_running()]:\n for child in process.children(recursive=True):\n if child.is_running():\n child.kill()\n\n process.kill()", "def _AbortJoin(self, timeout=None):\n for pid, process in self._processes_per_pid.items():\n logger.debug('Waiting for process: {0:s} (PID: {1:d}).'.format(\n process.name, pid))\n process.join(timeout=timeout)\n if not process.is_alive():\n logger.debug('Process {0:s} (PID: {1:d}) stopped.'.format(\n process.name, pid))", "def terminate_process_and_children(self, name):\n if name not in self.jobs:\n print(\"[%s] does not exist as a process!\", name)\n ppid = self.jobs[name]['process'].pid\n try:\n parent_proc = psutil.Process(ppid)\n except psutil.NoSuchProcess:\n return\n children = parent_proc.children(recursive=True)\n for proc in children:\n l.debug(proc)\n try:\n proc.send_signal(signal.SIGKILL)\n except:\n pass", "def kill_process_children(pid):\n root_process_path = \"/proc/{pid}/task/{pid}/children\".format(pid=pid)\n if not os.path.isfile(root_process_path):\n return\n with open(root_process_path) as children_list_file:\n children_list_pid = children_list_file.read().split()\n\n for child_pid in children_list_pid:\n children_proc_path = \"/proc/%s/task/%s/children\" % (\n child_pid,\n child_pid,\n )\n if not os.path.isfile(children_proc_path):\n continue\n with open(children_proc_path) as children_list_file_2:\n children_list_pid_2 = children_list_file_2.read().split()\n for _pid in children_list_pid_2:\n try:\n os.kill(int(_pid), signal.SIGTERM)\n except ProcessLookupError:\n continue\n try:\n os.kill(int(child_pid), signal.SIGTERM)\n except ProcessLookupError:\n continue", "def kill_processes(self):\n for proc in self.processes:\n if proc['proc'].poll() is not None:\n proc['proc'].terminate()", "def kill_all():\n compose_kill_all()", "def shutdown(self, timeout=30.0):\n self._shutting_down = True\n self._shutdown_event.set(True)\n unset = shutdown_or_die(timeout) # Failsafe in case the following doesn't work\n elapsed = self.join_children(timeout)\n #self.stop()\n\n unset()\n return elapsed", "def terminateAll(self):\n with self.__queueLock:\n for queue in [self.__queue, self.__clientQueue]:\n queue.clear()\n\n for runList in [self.__running, self.__clientRunning]:\n unfinishedRuns = [run for run in runList if run is not None]\n for run in unfinishedRuns:\n run.kill()", "def kill(pids):\n for pid in pids:\n process = psutil.Process(pid)\n for proc in process.children(recursive=True):\n proc.kill()\n process.kill()\n return", "def quit_driver_and_reap_children(driver):\n driver.quit()\n try:\n pid = True\n while pid:\n pid = os.waitpid(-1, os.WNOHANG)\n except ChildProcessError:\n pass", "def kill_process_family(pid, exit_code=None, timeout=None):\n\n if timeout is not None:\n end_time = time.time() + timeout\n else:\n end_time = None\n while True:\n children = get_child_pids(pid)\n if not children:\n break\n if end_time is not None and time.time() >= end_time:\n raise TimeoutError(\"Unable to kill child processes.\")\n for child in children:\n kill_process_family(child, exit_code)\n kill_process(pid, exit_code)", "def cleanup_loop(self, timeout=None):\n\n if timeout is None:\n timeout_string = \"no\"\n timeout = -1\n else:\n timeout_string = \"{0} s\".format(timeout)\n\n logger.info(\"Cleaning up all futures with {0} timeout\"\n .format(timeout_string))\n\n t0 = time.Time.now().unix\n badstatuslist = ['cancelled', 'error', 'lost']\n while len(self.futures):\n elapsedtime = time.Time.now().unix - t0\n if (elapsedtime > timeout) and (timeout >= 0):\n badstatuslist += ['pending']\n self.cleanup(badstatuslist=badstatuslist)\n sleep(10)", "def get_children(ppid):\n\n pid_dct = {}\n for proc in build_process_list():\n proc[\"_children\"] = []\n pid_dct[proc[\"pid\"]] = proc\n\n # fill in children array\n for pid in list(pid_dct.keys()):\n parent_pid = pid_dct[pid][\"parent_pid\"]\n\n if parent_pid in pid_dct:\n pid_dct[parent_pid][\"_children\"].append(pid)\n\n # now just walk down the tree\n if ppid is None or ppid not in pid_dct:\n # process has quit, we exit\n return []\n\n accepted = []\n to_accept = collections.deque([ppid, ])\n \n while to_accept:\n head = pid_dct[to_accept.popleft()]\n\n # do not include the monitoring pid\n if head[\"pid\"] != ppid:\n accepted.append(head)\n\n to_accept.extend(head.get(\"_children\", []))\n head[\"children\"] = head[\"_children\"]\n del head[\"_children\"]\n\n # deleting children breaks infinite loops\n # but Dima, can a process tree contain a loop? yes - via race-condition in reading procfs\n\n return accepted", "def _terminateAll(self):\n\n # Termination of all processes\n try :\n for process in self.processes:\n process.terminate()\n except AttributeError:\n pass\n\n return", "def kill_manager(self) -> None:\n\n for p in self.process_list:\n p.terminate()\n # NOTE: Seems Python does not appreciate if close is called too quickly.\n sleep(0.5)\n # Release the resources held by the Proess (Python 3.7 and up)\n p.close()", "def force_kill_all(self):\n assert self._running\n # Make sure _active_runs is fully populated by start thread.\n self._pipe_thread.join()\n with self._state_lock:\n if not self._active_runs:\n # already complete, don't kill\n return\n self._force_killed = True\n\n for run in self._active_runs:\n run.kill()", "def _kill_running_processes(self):\n # Kill any rouge processes that are still running.\n with _thread_lock:\n killed = []\n for pid in self._pids:\n try:\n os.kill(pid, _KILLED_BY_ANYPYTOOLS)\n killed.append(str(pid))\n except:\n pass\n self._pids.clear()", "def kill_child_processes(parent_pid, sig=signal.SIGTERM):\n try:\n parent = psutil.Process(parent_pid)\n except psutil.NoSuchProcess:\n return\n children = parent.children(recursive=True)\n for process in children:\n try:\n process.send_signal(sig)\n except psutil.NoSuchProcess:\n return", "def abort(self):\n if self.processes is None:\n return\n\n for p in self.processes:\n if p.poll() is None:\n p.terminate()\n try:\n p.wait(timeout=2)\n except subprocess.TimeoutExpired:\n p.kill()\n # Don't catch the TimeoutExpired exception as\n # wait should return immediately after the process\n # was killed. If this wait times out just let\n # the exception terminate the execution as\n # something has serriously gone wrong if the\\\n # process is still running.\n p.wait(timeout=5)", "def kill_all(self):\n #cancel all events in queue and raise RuntimeError if unsuccessful\n for event in self.sch.queue:\n try:\n self.sch.cancel(event[0])\n except RuntimeError:\n self.log.exception(\"Error killing top level event: %s\", self.sch.queue[0])\n #Print success if no RuntimeError\n if self.sch.empty():\n self.log.info(\"Successfully killed all events\")" ]
[ "0.69803846", "0.69150037", "0.681604", "0.6730367", "0.66708744", "0.65329", "0.6406285", "0.64052707", "0.62206906", "0.61242473", "0.610742", "0.60288936", "0.59727114", "0.58997834", "0.58517337", "0.58473897", "0.5846386", "0.58226556", "0.5820407", "0.579071", "0.5779552", "0.57209665", "0.56953245", "0.55701774", "0.5533987", "0.55329186", "0.5532366", "0.55137295", "0.55114585", "0.54997784" ]
0.83679694
0
Returns the total for the given tenant.
def get_total(self, **kwargs): LOG.warning('WARNING: /v1/report/total/ endpoint is deprecated, ' 'please use /v1/report/summary instead.') authorized_args = [ 'begin', 'end', 'tenant_id', 'service', 'all_tenants'] url = self.get_url('total', kwargs, authorized_args) return self.api_client.get(url).json()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def table_total(self):\n total = 0.00\n\n for customer in self.customers:\n total = total + customer.get_total()\n\n return total", "def get_total_paid(self):\n return sum(self.paid)", "def get_total(self):\n\n base_price = 5\n total = (1 + int(self.tax)) * int(self.qty) * base_price\n\n return total", "def GetTotal(self):\n return(self.total)", "def get_tenant_usage(self, tenant_id):\n return self._get(_quota.TenantUsage, tenant_id)", "async def get_total(self, payload: str = None):\n if payload is None:\n raise RequiredPayload(\"You must provide at least one payload.\")\n data = await self._send_payload(payload)\n return data['total']", "def _get_sum_total(\n self, cr, uid, brw, operand, number_month=None,\n one_per=False, bag=None, context=None):\n context = context and dict(context) or {}\n res = 0\n\n # If the report is two or twelve columns, will choose the field needed\n # to make the sum\n if context.get('whole_fy', False) or one_per:\n field_name = 'ytd'\n else:\n field_name = 'period_%s' % str(number_month)\n\n # It takes the sum of the total_ids & operand_ids\n for ttt in getattr(brw, operand):\n res += bag[ttt.id].get(field_name, 0.0)\n return res", "def get_total_trans(all_customers_data, trans_column):\n return all_customers_data.select(trans_column).distinct().count()", "def get_total(self):\n\n base_price = self.get_base_price()\n\n if self.species == \"Christmas\":\n base_price = base_price * 1.5\n\n total = (1 + self.tax) * self.qty * base_price\n\n return total", "def get_transaction_totals(self, params=None):\n return self.get(f\"{self.gateway_path}/totals\", params)", "def sum_transactions(profile):\n total = 0\n try:\n transactions = profile['$properties']['$transactions']\n for t in transactions:\n total = total + t['$amount']\n except KeyError:\n pass\n return {'Revenue': total}", "def get_balance(self, acc: Account) -> Decimal:\n return sum_queryset(self.get_entries(acc))", "def get_total_spent() -> int:\n return base.Balances(balance_of).get(bank_account)", "def get_total_redeem(self):\n total = 0\n for redeem in self.get_redeems():\n total += redeem.get_total()\n return total", "def get_total(self):\n\n base_price = self.get_base_price()\n if self.species == \"christmas melon\":\n base_price = base_price * 1.5\n\n total = ((1 + self.tax) * self.qty * base_price)\n\n return total", "def amount(self):\n return self.subtotal + self.tax_subtotal + self.shipping", "def get_totals(self):\n return self._get('app_totals')", "def total(self):\n if self.dynamic:\n self._update_db_obj()\n return self._db_obj.total", "def get_total(self):\n\n base_price=5\n if self.species == \"Christmas\":\n base_price=1.5*base_price\n \n total = (1 + self.tax) * self.qty * base_price\n\n if self.order_type==\"international\" and self.qty<10:\n total+=3\n\n return total", "def total_spent(self):\n total_sum = Order.objects.filter(\n email=self.email).aggregate(\n Sum('total_price')\n ).get('total_price__sum')\n return round(total_sum, 4) if total_sum else 0", "def _total_d(self):\n debit = 0.0\n for l in self.data:\n debit += l['debit']\n self.t_credit += l['credit']\n self.t_balance += l['balance']\n return debit", "def get_total(self):\n\n base_price = 5\n \n if self.species == \"Christmas melon\":\n base_price = base_price * 1.5 \n\n total = (1 + self.tax) * self.qty * base_price \n\n if self.order_type == \"international\" and self.qty>10:\n total += 3\n\n\n return total", "def total(self, desired_period: int = 12):\n self._trigger_gather()\n result = Decimal(0)\n for item in self.elements:\n result += item.income.amount(desired_period)\n return(Decimal(result))", "def total(self, desired_period: int = 12):\n self._trigger_gather()\n result = Decimal(0)\n for item in self.elements:\n result += item.income.amount(desired_period)\n return(Decimal(result))", "def total(self) -> float:\n return self._total", "def get_total(self):\n\n self.base_price = self.get_base_price()\n\n if self.species == \"christmas melon\":\n self.base_price = self.base_price * 1.5\n\n total = (1 + self.tax) * self.qty * self.base_price\n return total", "def total_spent(self):\n\n approved_jobs = self.approved_jobs()\n expenses = self.expenses()\n\n total = 0\n for job in approved_jobs:\n total += job.total_paid\n\n for expense in expenses:\n total += expense.amount\n\n return float(round(total, 2))", "def get_total(self):\n # method on the class DomesticMelonOrder\n base_price = 5\n\n if self.species == \"Christmas melons\":\n base_price = base_price * 1.5\n\n total = (1 + self.tax) * self.qty * base_price\n\n return total", "def compute_total_paid(self):\n total = 0.0\n for line in self.loan_ids:\n if line.pay:\n total += line.amount\n self.total_paid = total", "def total_paid(self) -> Decimal:\n return self.total_principal + self.total_interest" ]
[ "0.62531126", "0.6162233", "0.61108077", "0.60956305", "0.6016143", "0.5912477", "0.5870869", "0.5847878", "0.5786993", "0.57543814", "0.57539463", "0.5750175", "0.57361287", "0.57264996", "0.5717916", "0.5709568", "0.5695177", "0.5694699", "0.5693841", "0.56890893", "0.5667125", "0.5651598", "0.56277114", "0.56277114", "0.5623779", "0.560435", "0.5599567", "0.5587492", "0.55768496", "0.55704266" ]
0.6835537
0
Sets the ssc id (default is sscmainnet1)
def set_id(self, ssc_id): self.ssc_id = ssc_id
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def srs_id(self, srs_id):\n self.logger.debug(\"In 'srs_id' setter.\")\n\n if len(srs_id) < 3:\n raise Exception(\"SRS ID is too short, must be more than 3 characters.\")\n\n self._srs_id = srs_id", "def nucleus_security_id(self, nucleus_security_id):\n\n self._nucleus_security_id = nucleus_security_id", "def sso_id(self, sso_id):\n\n self._sso_id = sso_id", "def set_id(zsocket):\n identity = f\"{randint(0, 0x10000):04x}-{randint(0, 0x10000):04x}\"\n zsocket.setsockopt_string(zmq.IDENTITY, identity)", "def set_id(self, uid):\n self.nccl_id = uid\n return self.nccl_id", "def set_network_id(self, sNetworkId):\n\t\tcall_sdk_function('PrlVirtNet_SetNetworkId', self.handle, sNetworkId)", "def vcn_id(self, vcn_id):\n self._vcn_id = vcn_id", "def sid(self, sid):\n self._sid = sid", "def cisid(self):\n return self._cisid", "def _set_id(self, value):\n pass", "def slb_id(self) -> str:\n return pulumi.get(self, \"slb_id\")", "def set_cid(self, cid):\n self.__cid = cid", "def set_cid(self, cid):\n self.__cid = cid", "def setSnr(tel, snr):\n simuConfig[\"SNRS\"] = snr", "def set_id(self, id):\n self.id = id\n print(\"self id = \" + str(self.id))", "def pcie_id(self, pcie_id: int):\r\n self._pcie_id = pcie_id", "def id(self): \n if self.cloudnet:\n return self.cloudnet.id\n else:\n return None", "def iccid(self, iccid):\n\n self._iccid = iccid", "def stp_id(self, stp_id):\n\n self._stp_id = stp_id", "def setId(self, *args):\n return _libsbml.Port_setId(self, *args)", "def _set_id(self):\n raise NotImplementedError()", "def _setintermediary_institution_56C(self, val):\n self.swift_obj.IntermediaryInstitution_C = val\n self.swift_obj.IntermediaryInstitution_C.swiftTag = '56C'", "def setId(self, *args):\n return _libsbml.Compartment_setId(self, *args)", "def set_sessid(sessid):\n filename = path.join(path.expanduser('~'), '.profrc')\n config = configparser.ConfigParser()\n config.read(filename)\n config.set('DEFAULT', 'Session', sessid)\n with open(filename, 'w') as configfile:\n print(\"write a new sessid\")\n config.write(configfile)", "def set_SiteID(self, value):\n super(GetCategoriesInputSet, self)._set_input('SiteID', value)", "def psid(self, psid):\n\n self._psid = psid", "def sportsbook_id(self, sportsbook_id):\n\n self._sportsbook_id = sportsbook_id", "def set_course_id(self, course_id):\n self.ID = course_id", "def idToMQTTClientID(id:str, isCSE:bool=True) -> str:\n\treturn f'{\"C::\" if isCSE else \"A::\"}{id.lstrip(\"/\")}'", "def set_client_id(self):\n data = self.receive() # deserialized data\n client_id = data['clientid'] # extracts client id from data\n self.client_id = client_id # sets the client id to this client\n print(\"Successfully connected to server: \" + self.userInfo['host'] + \" / \" + str(self.userInfo['port']))\n print(\"Your client info is:\\n\" + \"Client Name: \" + self.userInfo['name'] + \"\\nClient ID: \" + str(client_id))" ]
[ "0.6161884", "0.6093355", "0.6029974", "0.6004457", "0.5978946", "0.5875039", "0.58659774", "0.5729421", "0.56342816", "0.56043476", "0.55065644", "0.55062157", "0.55062157", "0.5500351", "0.54839903", "0.5438406", "0.5390628", "0.53811723", "0.53690624", "0.5362894", "0.5361214", "0.5350322", "0.53434384", "0.53271437", "0.53215814", "0.5282866", "0.52812934", "0.5275254", "0.5272448", "0.5251385" ]
0.8160854
0
Changes the wallet account
def change_account(self, account): check_account = Account(account, steem_instance=self.steem) self.account = check_account["name"] self.refresh()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setaccount(self, vergeaddress, account):\n return self.proxy.setaccount(vergeaddress, account)", "def set_account(self, account: str):\n ret = self._call_txtrader_api('set_account', {'account': account})\n if ret:\n self.account = account\n return ret", "def put_account(self, account):\n \n pass", "def account(self, account: str):\n self._account = account", "def update(self, account):\n model = models.load('Account', account)\n return self.client.update_account(model=model)", "def account(self, account):\n\n self._account = account", "def account(self, account):\n\n self._account = account", "def account(self, account):\n\n self._account = account", "def account(self, account):\n\n self._account = account", "def change_password(change_account):\n change_data(change_account, changed_data='password')", "def setAccount(self, account_id):\n self.data_struct['_setAccount'] = account_id", "def save_accounts(account):\n account.save_account()", "def save_accounts(account):\n account.save_account()", "def save(self, *args, **kwargs):\n wallet = self.wallet.withdraw(self.value)\n super(Payment, self).save(*args, **kwargs)", "def onAccountUpdate(self, data):\n pass", "def save_account(self):\n Credential.account_list.append(self)", "def account_amount(self, account_amount):\n\n self._account_amount = account_amount", "def cmd_account_change_settings(client, args):\n fields = data_fields(args, client.allowed_account_fields)\n account_settings = client.change_account_settings(args.user, fields)\n generate_output({'account_settings': account_settings})", "def account_balance(self, account_balance):\n\n self._account_balance = account_balance", "def account_balance(self, account_balance):\n\n self._account_balance = account_balance", "def __update_accounts(self):\n\t\tfor acct in self.wallet:\n\t\t\tif len(get_unspent(acct[\"address\"], self.testnet))!=0:\n\t\t\t\tacct[\"status\"] = \"in use\"\n\t\t\telse:\n\t\t\t\tspent = get_spent(acct[\"address\"], self.testnet)\n\t\t\t\tconfirm = (s[\"confirmations\"] >= 6 for s in spent)\n\t\t\t\tif len(spent) > 0 and all(confirm):\n\t\t\t\t\tacct[\"status\"] = \"used\"\n\t\t\t\telif len(spent) > 0:\n\t\t\t\t\tacct[\"status\"] = \"in use\"\n\t\tself.header[\"LAST_UPDATE_TIME\"] = str(round(time.time()))\n\t\toutput = [self.header, *self.wallet]\n\t\twith open(self.filepath, 'w+') as f:\n\t\t\tjson.dump(output, f)", "def update_account(self, account, new_password, include_address=True, include_id=True):\n if account not in self.accounts:\n raise ValueError('Account not managed by account service')\n if account.locked:\n raise ValueError('Cannot update locked account')\n if account.path is None:\n raise ValueError('Account not stored on disk')\n assert os.path.isabs(account.path)\n\n # create new account\n log.debug('creating new account')\n new_account = Account.new(new_password, key=account.privkey, uuid=account.uuid)\n new_account.path = account.path\n\n # generate unique path and move old keystore file there\n backup_path = account.path + '~'\n i = 1\n while os.path.exists(backup_path):\n backup_path = backup_path[:backup_path.rfind('~') + 1] + str(i)\n i += 1\n assert not os.path.exists(backup_path)\n log.info('moving old keystore file to backup location', **{'from': account.path,\n 'to': backup_path})\n try:\n shutil.move(account.path, backup_path)\n except:\n log.error('could not backup keystore, stopping account update',\n **{'from': account.path, 'to': backup_path})\n raise\n\n assert os.path.exists(backup_path)\n assert not os.path.exists(new_account.path)\n account.path = backup_path\n\n # remove old account from manager (not from disk yet) and add new account\n self.accounts.remove(account)\n assert account not in self.accounts\n try:\n self.add_account(new_account, include_address, include_id)\n except:\n log.error('adding new account failed, recovering from backup')\n shutil.move(backup_path, new_account.path)\n self.accounts.append(account)\n self.accounts.sort(key=lambda account: account.path)\n raise\n\n assert os.path.exists(new_account.path)\n assert new_account in self.accounts\n\n # everything was successful (we are still here), so delete old keystore file\n log.info('deleting backup of old keystore', path=backup_path)\n try:\n os.remove(backup_path)\n except:\n log.error('failed to delete no longer needed backup of old keystore',\n path=account.path)\n raise\n\n # set members of account to values of new_account\n account.keystore = new_account.keystore\n account.path = new_account.path\n assert account.__dict__ == new_account.__dict__\n # replace new_account by old account in account list\n self.accounts.append(account)\n self.accounts.remove(new_account)\n self.accounts.sort(key=lambda account: account.path)\n log.debug('account update successful')", "def updateAccountBalance(self):\n account = self.tdameritrade.getAccount()\n\n liquidation_value = float(\n account[\"securitiesAccount\"][\"currentBalances\"][\"liquidationValue\"])\n\n available_for_trading = float(\n account[\"securitiesAccount\"][\"currentBalances\"][\"cashAvailableForTrading\"])\n\n self.users.update_one({\"Name\": self.user[\"Name\"]}, {\"$set\": {\n f\"Accounts.{self.account_id}.Account_Balance\": liquidation_value, f\"Accounts.{self.account_id}.Available_For_Trading\": available_for_trading}})", "def update_account_data(self):\n self.ensure_one()\n getattr(self, '%s_update_account_data' % self.provider, lambda: None)()", "def set_balance(self, user, to):\n to_exec = \"UPDATE users SET balance = %s WHERE snowflake_pk = %s\"\n self.__cursor.execute(to_exec, (to, user.id,))\n self.__connection.commit()", "def test_client_bank_account_update(self):\n pass", "def account_name(self, account_name):\n\n self._account_name = account_name", "def account_name(self, account_name):\n\n self._account_name = account_name", "def withdraw_money():\n print(\"\\n\")\n print(messages.account_credentials)\n u_id = pyip.inputInt(\"Your Id: \", greaterThan=0)\n password = pyip.inputPassword(\"Your Password: \")\n\n credentials = {\"id\":u_id, \"password\":password}\n result = BankOperationsBackend.withdraw_money(credentials)\n start_again() if result else BankOperationsUi.withdraw_money()", "def deposit_money():\n print(\"\\n\")\n print(messages.account_credentials)\n u_id = pyip.inputInt(\"Your Id: \", greaterThan=0)\n password = pyip.inputPassword(\"Your Password: \")\n\n credentials = {\"id\":u_id, \"password\":password}\n result = BankOperationsBackend.deposit_money(credentials)\n start_again() if result else BankOperationsUi.deposit_money()" ]
[ "0.71350896", "0.7134241", "0.6935623", "0.6772499", "0.67456084", "0.6619017", "0.6619017", "0.6619017", "0.6619017", "0.6570755", "0.6416708", "0.6350566", "0.6350566", "0.63418996", "0.63365614", "0.6236376", "0.62000555", "0.61877316", "0.61869353", "0.61869353", "0.61117", "0.6090022", "0.60401297", "0.6025502", "0.60042876", "0.6000379", "0.59565043", "0.59565043", "0.5954411", "0.5937062" ]
0.7437371
0
Transfer a token to another account.
def transfer(self, to, amount, symbol, memo=""): token_in_wallet = self.get_token(symbol) if token_in_wallet is None: raise TokenNotInWallet("%s is not in wallet." % symbol) if float(token_in_wallet["balance"]) < float(amount): raise InsufficientTokenAmount("Only %.3f in wallet" % float(token_in_wallet["balance"])) token = Token(symbol, api=self.api) quant_amount = token.quantize(amount) if quant_amount <= decimal.Decimal("0"): raise InvalidTokenAmount("Amount to transfer is below token precision of %d" % token["precision"]) check_to = Account(to, steem_instance=self.steem) contract_payload = {"symbol":symbol.upper(),"to":to,"quantity":str(quant_amount),"memo":memo} json_data = {"contractName":"tokens","contractAction":"transfer", "contractPayload":contract_payload} tx = self.steem.custom_json(self.ssc_id, json_data, required_auths=[self.account]) return tx
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def token_transfer(ctx, amount, to):\n ocean = ctx.obj['ocean']\n account = ocean.account\n ocean.tokens.transfer(to, int(amount), account)\n echo({\n 'amount': amount,\n 'from': {\n 'address': account.address,\n 'balance': Token.get_instance().get_token_balance(account.address),\n },\n 'to': {\n 'address': to,\n 'balance': Token.get_instance().get_token_balance(to),\n }\n })", "def make_transfer(cls, token: str, from_: str, to_: str, amount: int):\n src_wallet = Wallet.select( # pragma: no branch\n lambda w: w.address == from_ and w.user.token == token\n ).first()\n if not src_wallet:\n raise WalletNotFound(f'Wallet {from_} not found or is not yours')\n tgt_wallet = Wallet.select( # pragma: no branch\n lambda w: w.address == to_\n ).first()\n if not tgt_wallet:\n raise WalletNotFound(f'Wallet {to_} not found')\n fee = None\n to_withdraw = Decimal(str(amount))\n if src_wallet.user != tgt_wallet.user:\n fee = Decimal('0.015') * amount\n to_withdraw = amount + fee\n if src_wallet.balance < to_withdraw:\n raise NotEnoughFunds()\n src_wallet.balance = src_wallet.balance - to_withdraw\n tgt_wallet.balance = tgt_wallet.balance + amount\n extra = {}\n if fee:\n collect_transaction_fee(fee)\n extra['fee'] = fee\n cls(src_wallet=src_wallet, tgt_wallet=tgt_wallet, amount=amount, **extra)", "def test_transfer(chain, token, shareholder1, boogieman):\n\n set_state(chain, token, canTransferFlag=True)\n token.transact({\"from\": shareholder1}).transfer(boogieman, 4000)\n assert token.call().balanceOf(shareholder1) == 0\n assert token.call().balanceOf(boogieman) == 4000", "async def transfer(self, code: str, amount, fromAccount, toAccount, params={}):\n # transferring between derivatives wallet and regular wallet is not documented in their API\n # however we support it in CCXT(from just looking at web inspector)\n await self.load_markets()\n accountsByType = self.safe_value(self.options, 'v2AccountsByType', {})\n fromId = self.safe_string(accountsByType, fromAccount)\n if fromId is None:\n keys = list(accountsByType.keys())\n raise ArgumentsRequired(self.id + ' transfer() fromAccount must be one of ' + ', '.join(keys))\n toId = self.safe_string(accountsByType, toAccount)\n if toId is None:\n keys = list(accountsByType.keys())\n raise ArgumentsRequired(self.id + ' transfer() toAccount must be one of ' + ', '.join(keys))\n currency = self.currency(code)\n fromCurrencyId = self.convert_derivatives_id(currency, fromAccount)\n toCurrencyId = self.convert_derivatives_id(currency, toAccount)\n requestedAmount = self.currency_to_precision(code, amount)\n # self request is slightly different from v1 fromAccount -> from\n request = {\n 'amount': requestedAmount,\n 'currency': fromCurrencyId,\n 'currency_to': toCurrencyId,\n 'from': fromId,\n 'to': toId,\n }\n response = await self.privatePostAuthWTransfer(self.extend(request, params))\n #\n # [\n # 1616451183763,\n # \"acc_tf\",\n # null,\n # null,\n # [\n # 1616451183763,\n # \"exchange\",\n # \"margin\",\n # null,\n # \"UST\",\n # \"UST\",\n # null,\n # 1\n # ],\n # null,\n # \"SUCCESS\",\n # \"1.0 Tether USDt transfered from Exchange to Margin\"\n # ]\n #\n error = self.safe_string(response, 0)\n if error == 'error':\n message = self.safe_string(response, 2, '')\n # same message v1\n self.throw_exactly_matched_exception(self.exceptions['exact'], message, self.id + ' ' + message)\n raise ExchangeError(self.id + ' ' + message)\n return self.parse_transfer(response, currency)", "def send_and_receive(self, token):\n self._send_token(token)\n return self._receive_token()", "async def transfer(self, ctx: commands.Context, to: discord.Member, amount: int):\r\n from_ = ctx.author\r\n currency = await bank.get_currency_name(ctx.guild)\r\n\r\n try:\r\n await bank.transfer_credits(from_, to, amount)\r\n except (ValueError, errors.BalanceTooHigh) as e:\r\n return await ctx.send(str(e))\r\n\r\n await ctx.send(\r\n _(\"{user} transferred {num} {currency} to {other_user}\").format(\r\n user=from_.display_name,\r\n num=humanize_number(amount),\r\n currency=currency,\r\n other_user=to.display_name,\r\n )\r\n )", "def rule_transfer(self, st_acct, st_acct2, st_idx):\n if self.active_token_ids.get(st_acct):\n # choose from the caller's valid NFT token IDs, if there are any\n idx = int(st_idx * len(self.active_token_ids[st_acct]))\n token_id = self.active_token_ids[st_acct][idx]\n self.swap.transferFrom(st_acct, st_acct2, token_id, {\"from\": st_acct})\n self.active_token_ids[st_acct].remove(token_id)\n self.active_token_ids.setdefault(st_acct2, []).append(token_id)\n else:\n # if the caller does not own any NFTs, choose from any token ID\n token_ids = self._all_token_ids()\n idx = int(st_idx * len(token_ids))\n token_id = token_ids[idx]\n with brownie.reverts():\n self.swap.transferFrom(st_acct, st_acct2, token_id, {\"from\": st_acct})", "def transfer(self, amount):\n if not self.destination.can_accept_credit(amount):\n raise ValueError(\"Destination account can not accept a credit of {0}\".format(amount))\n self.source.debit(amount)\n self.destination.credit(amount)", "def transfer_to(self, amount, another_user):\n if self.__balance >= amount and self.__is_logged_in:\n self.__balance = float(Decimal(str(self.__balance - amount)))\n another_user.deposit(amount)\n self.register_operation(self.ACTIONS['TRANSFERING'], amount, another_user)\n return True\n\n return False", "def transfer(self, giver, receiver, amount):\n \n # Try to withdraw the full amount from the giver.\n # This will be either the amount or the giver's full balance.\n # Whatever the result, deposit it to the receiver's account.\n receiver.account.deposit(\n giver.account.withdraw(amount)\n )", "def exchange_tokens(self):\n raise NotImplementedError()", "def transfer(self, request, *args, **kwargs):\n origin_account = self.get_object()\n destiny_account = request.data.get(\"id_conta\", None)\n amount = request.data.get(\"valor\", None)\n account_serializer = self.get_serializer()\n\n try:\n transfer = account_serializer.transfer(origin_account, destiny_account, amount)\n except ObjectDoesNotExist as obj:\n return Response({\"detail\": \"Could not transfer the amount: Destiny account does not exist.\",\n \"status_code\": status.HTTP_404_NOT_FOUND}, status=status.HTTP_404_NOT_FOUND)\n except ValueError as ve:\n return Response({\"detail\": \"Could not transfer the amount: {0}.\".format(ve),\n \"status_code\": status.HTTP_400_BAD_REQUEST}, status=status.HTTP_400_BAD_REQUEST)\n\n return Response(transfer)", "def transfer(self, origin, dest, amount):\n or_elem = 0\n de_elem = 0\n for elem in self.account:\n if origin == elem.id or origin == elem.name:\n or_elem = elem\n if dest == elem.id or dest == elem.name:\n de_elem = elem\n if or_elem == 0 or de_elem == 0:\n print(\"Couldn't find account.\")\n return False\n if self.corrupted(or_elem) or self.corrupted(de_elem):\n print(\"Corrupted account.\")\n return False\n if amount <= 0 or or_elem.value < amount:\n print(\"Invalid amount.\")\n return False\n or_elem.transfer(-amount)\n de_elem.transfer(amount)\n print(\"Transfer successful.\")\n return True", "def exchange_token(self, code):\n access_token_url = OAUTH_ROOT + '/access_token'\n params = {\n 'client_id': self.client_id,\n 'client_secret': self.client_secret,\n 'redirect_uri': self.redirect_uri,\n 'code': code,\n }\n resp = requests.get(access_token_url, params=params)\n if not resp.ok:\n raise MixcloudOauthError(\"Could not get access token.\")\n return resp.json()['access_token']", "def test_transfer_bypass_token(chain, token, carrier, shareholder1, boogieman):\n\n set_state(chain, token, canTransferFlag=True)\n with pytest.raises(ValueError):\n # This call must always come from token contract\n carrier.transact().transfer(shareholder1, boogieman, True)", "def _update_token(token):\n session.token = token", "def token_id_to(self, token_id_to):\n\n self._token_id_to = token_id_to", "def create_token_account(sender, created, instance, **kwargs):\n list_of_models = (\"Person\", \"Company\")\n if sender.__name__ in list_of_models:\n if created:\n # create user's token and reimbursement accounts.\n create_limited_credit_account(\n user=instance, account_type=django_settings.TOKEN_ACCOUNT\n )\n create_limited_credit_account(\n user=instance, account_type=django_settings.REIMBURSEMENT_ACCOUNT\n )\n action.send(instance, verb=u\"joined stageroute.\")\n UserPreference.objects.create(user=instance)\n else:\n if not Action.objects.filter(\n target_object_id=instance.id,\n description=\"Updated Profile\",\n timestamp__gte=datetime.now(tz=timezone.utc) - timedelta(minutes=2),\n ).exists():\n action.send(\n instance,\n verb=u\"updated the profile.\",\n description=u\"Updated Profile\",\n )\n\n if instance.is_profile_approved and instance.preferences.email_notification:\n context = {\"first_name\": instance.first_name}\n ProfileApprovedEmailNotification(instance.email, context=context).send()", "def save_token(self, token, request):\n client = request.client\n if request.user:\n user_id = request.user.pk\n else:\n user_id = client.user_id\n item = self.token_model(\n client_id=client.client_id,\n user_id=user_id,\n **token\n )\n item.save()\n return item", "def transaction_to_player(self, origin, amount, receiver):\r\n print(\"Transfering\", amount, origin, \"->\", receiver)\r\n try:\r\n origin.transfer(-amount)\r\n except:\r\n # the origin will became bankrupt, so declare it and remove it from the game\r\n origin.set_bankrupt(self, receiver)\r\n self.remove_player(origin)\r\n return\r\n\r\n try:\r\n receiver.transfer(amount)\r\n except:\r\n # the receiver will became bankrupt, so declare it\r\n receiver.set_bankrupt(self, origin)\r\n self.remove_player(receiver)", "def use_account(self, token, url=QE_URL, **kwargs):\n credentials = Credentials(token, url, **kwargs)\n\n self._append_account(credentials)", "def save_bearer_token(self, token, request, *args, **kwargs):\n log.debug('Save bearer token %r', token)\n self._tokensetter(token, request, *args, **kwargs)\n return request.client.default_redirect_uri", "def set_transfer(to, amount):\n balance = ebb.balanceOf(to)\n ebb.approve(accounts[0], balance, {\"from\": to})\n ebb.transferFrom(to, accounts[0], balance, {\"from\": _cfg.OWNER})\n assert ebb.balanceOf(to) == 0\n ebb.transfer(to, Cent(amount), {\"from\": _cfg.OWNER})", "def test_access_account_info_with_token(self):\n\n print(\" --------------------------- Test 6 - Access Account Information ----------------------------\")\n\n user_id = uuid.uuid4()\n password = \"my-precious\"\n currency = \"EUR\"\n\n register_user(user_id, password, currency)\n response = login_user(user_id, password)\n\n self.assertTrue(response.json()['message']['auth_token'])\n\n auth_token = response.json()['message']['auth_token']\n headers = {'Content-Type': \"application/json\", 'Authorization': auth_token}\n\n data = \"{\\\"amount\\\" : 20.0}\"\n requests.post('http://192.168.85-208/account/amount', headers=headers, data=data)\n requests.post('http://192.168.85-208/account/amount', headers=headers, data=data)\n requests.post('http://192.168.85-208/account/amount', headers=headers, data=data)\n\n # Get the buyer account information to check if the money comes in\n response = requests.get('http://0.0.0.0:5000/account', headers=headers)\n print(json.dumps(response.json()['message'], indent=4))", "def exchange_code(self, code):\n data = {\n 'client_id': self.client_id,\n 'client_secret': self.client_secret,\n 'grant_type': 'authorization_code',\n 'code': code,\n 'redirect_uri': self.redirect_uri,\n 'scope': 'identify'\n }\n\n headers = {\n 'Content-Type': 'application/x-www-form-urlencoded'\n }\n\n access_token = self.http_client.post(\n f'{self.api_endpoint}/oauth2/token', headers, data=data)\n return access_token", "def transfer_money(request):\n source = Account.objects.get(pk=int(request.POST.get('source-id', False)))\n destination = Account.objects.get(pk=int(request.POST.get('destination-id', False)))\n amount = float(request.POST.get('amount', False))\n enough_cash = source.available_cash >= amount\n if enough_cash:\n source.available_cash -= amount\n source.save()\n destination.available_cash += amount\n destination.save()\n messages.success(request, 'OK 200: Transfer successfully executed.')\n else:\n messages.error(request, f'Error 400: Tried to transfer {amount} from {source.name}, but only had {source.available_cash} available.')\n \n transaction = Transaction(description=f\"Transfer from {source.name} to {destination.name}.\", success=enough_cash, cash_amount=amount, source_account=source, \n destination_account=destination)\n transaction.save()\n\n return redirect('overview')", "def renew_token(cls, token_obj: \"AuthToken\") -> None:\n token_obj.renew_token(renewed_by=cls)", "def put_account(self, account):\n \n pass", "def transfer(self, amount, target):\n\n connection = sqlite3.connect('/home/BorneAgain/Desktop/flasktest/accounts.db')\n\n cursor = connection.cursor()\n\n cursor.execute(\"\"\"select * from accounts where name=?\"\"\", (target, ))\n\n if len(cursor.fetchall()) > 0:\n\n self.withdraw(amount)\n\n cursor.execute(\"\"\"update accounts set amount=amount+? where name=?\"\"\", (amount, target))\n \n connection.commit()\n\n return cursor.fetchall()\n \n else:\n\n return None", "def login_token(self, token):\n self.token = token # this will also set the refresh_token to None" ]
[ "0.7828118", "0.61128455", "0.60857874", "0.60298026", "0.60056615", "0.597364", "0.59008574", "0.5854642", "0.5806969", "0.5676092", "0.55882776", "0.5564915", "0.5542804", "0.55134624", "0.548216", "0.54781884", "0.54602224", "0.5428062", "0.54226655", "0.5394105", "0.53359425", "0.52895635", "0.5279014", "0.5272988", "0.5272267", "0.5269836", "0.52688", "0.5254994", "0.52482355", "0.5245763" ]
0.6379537
1
Issues a specific token amount.
def issue(self, to, amount, symbol): token = Token(symbol, api=self.api) if token["issuer"] != self.account: raise TokenIssueNotPermitted("%s is not the issuer of token %s" % (self.account, symbol)) if token["maxSupply"] == token["supply"]: raise MaxSupplyReached("%s has reached is maximum supply of %d" % (symbol, token["maxSupply"])) quant_amount = token.quantize(amount) if quant_amount <= decimal.Decimal("0"): raise InvalidTokenAmount("Amount to issue is below token precision of %d" % token["precision"]) check_to = Account(to, steem_instance=self.steem) contract_payload = {"symbol":symbol.upper(),"to":to,"quantity":str(quant_amount)} json_data = {"contractName":"tokens","contractAction":"issue", "contractPayload":contract_payload} tx = self.steem.custom_json(self.ssc_id, json_data, required_auths=[self.account]) return tx
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_token(self, amount):\n self.M += amount", "def token_transfer(ctx, amount, to):\n ocean = ctx.obj['ocean']\n account = ocean.account\n ocean.tokens.transfer(to, int(amount), account)\n echo({\n 'amount': amount,\n 'from': {\n 'address': account.address,\n 'balance': Token.get_instance().get_token_balance(account.address),\n },\n 'to': {\n 'address': to,\n 'balance': Token.get_instance().get_token_balance(to),\n }\n })", "def taker(self, amount, token):\n if self.pair.get_base_token() == token:\n return self.buy(amount)\n\n if self.pair.get_quote_token() == token:\n return self.sell(amount)", "def add_tokens(self, units, amount):\r\n\t\tself.total_units_consumed+= units\r\n\t\tself.total_amount_spent+= amount\r\n\t\tformatted_units= format(self.total_units_consumed, \".2f\")\r\n\t\tprint(f\"\\nAdded {units}.\\nTotal units bought {formatted_units}\")", "def mint(amount: int) -> int:\n global total_supply\n\n _assert_is_bank(context.sender)\n total_supply = base.mint(balance_of, total_supply, context.sender, amount)\n return total_supply", "def grant(to_address: str, amount: int) -> int:\n _assert_is_bank(context.sender)\n\n mint(amount)\n transfer(to_address, amount)\n return total_supply", "async def money(ctx):\n pass", "def remove_token(self, amount):\n self.M -= amount", "def mint(_to: address, _amount: uint256) -> bool:\n\n assert msg.sender == self.owner, \"Access is denied.\"\n assert self.totalSupply + _amount <= self.maximumSupply, \"You cannot print those many tokens.\"\n assert not self.mintingFinished, \"Minting cannot be performed anymore.\"\n\n self.totalSupply += _amount\n self.balances[_to] += _amount\n\n log.Mint(_to, _amount)\n log.Transfer(ZERO_ADDRESS, _to, _amount)\n\n return True", "async def _pay_money(ctx, user : discord.Member, amount : int):\n if amount<0:\n await bot.reply(\"You can't pay someone a negative amount!\")\n elif user==ctx.message.author:\n await bot.reply(\"You can't pay yourself!\")\n else:\n await transfer(bot, ctx.message.author, user, amount)", "def set_amount(self, amount):\n self.amount = amount", "def add(self, amount):\n self.amount += amount", "def increment(self, amount):\n pass", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "def amount(self, amount):\n\n self._amount = amount", "async def deposit(ctx, money:int):\n author = ctx.message.author\n if str(author) in settings.BOT_ADMIN:\n database.add_pokedollars(author, money)\n await ctx.send(\"funds deposited\")\n else:\n await ctx.send(\"You are not the bot admin. Go awai.\")", "def pay(self, amt: float):\n self._money += amt", "def deposit(account, amount):\n pass", "def withdraw(account, amount):\n pass", "def post_amount_input(message, bot):\n # print(message.text)\n try:\n chat_id = message.chat.id\n amount_entered = message.text\n if amount_entered=='Cancel':\n raise Exception(\"Cancelling record!!\")\n amount_value = validate_entered_amount(amount_entered) # validate\n if amount_value == 0: # cannot be $0 spending\n raise Exception(\"Spent amount has to be a non-zero number.\")\n\n user_bills['cost'] = float(amount_value)\n # print(user_bills)\n # print(user_bills['cost'])\n\n user_bills['timestamp'] = datetime.now()\n # print(user_bills['timestamp'])\n # print(count)\n # print(user_çcbills['number'])\n\n user_history = db.user_bills.find({'user_telegram_id' : message.chat.id})\n maximum = 0\n for rec in user_history:\n maximum = max(maximum, rec['number'])\n # print(maximum)\n # print('done')\n\n # global count_\n user_bills['number'] = maximum+1\n # count_ += 1\n\n get_sharing_details(message, bot)\n\n except Exception as e:\n bot.reply_to(message,str(e))\n display_text = \"\"\n for c in commands: # generate help text out of the commands dictionary defined at the top\n display_text += \"/\" + c + \": \"\n display_text += commands[c] + \"\\n\"\n bot.send_message(chat_id, 'Please select a menu option from below:')\n bot.send_message(chat_id, display_text)", "def transfer(self, to, amount, symbol, memo=\"\"):\r\n token_in_wallet = self.get_token(symbol)\r\n if token_in_wallet is None:\r\n raise TokenNotInWallet(\"%s is not in wallet.\" % symbol)\r\n if float(token_in_wallet[\"balance\"]) < float(amount):\r\n raise InsufficientTokenAmount(\"Only %.3f in wallet\" % float(token_in_wallet[\"balance\"]))\r\n token = Token(symbol, api=self.api)\r\n quant_amount = token.quantize(amount)\r\n if quant_amount <= decimal.Decimal(\"0\"):\r\n raise InvalidTokenAmount(\"Amount to transfer is below token precision of %d\" % token[\"precision\"])\r\n check_to = Account(to, steem_instance=self.steem)\r\n contract_payload = {\"symbol\":symbol.upper(),\"to\":to,\"quantity\":str(quant_amount),\"memo\":memo}\r\n json_data = {\"contractName\":\"tokens\",\"contractAction\":\"transfer\",\r\n \"contractPayload\":contract_payload}\r\n tx = self.steem.custom_json(self.ssc_id, json_data, required_auths=[self.account])\r\n return tx", "def fund(node_index, amount):\n node = Node.from_index(node_index)\n sending_node = Node.from_index(1)\n destination_address = address(node)\n run_lncli(sending_node, f'sendcoins {destination_address} {amount}')", "def deposit(self, amount):\n self.balance += amount" ]
[ "0.7084912", "0.60524404", "0.5868739", "0.5844773", "0.5841103", "0.5838695", "0.5806996", "0.5803967", "0.5790781", "0.5707415", "0.56977123", "0.55994946", "0.5595049", "0.5540172", "0.5540172", "0.5540172", "0.5540172", "0.5540172", "0.5540172", "0.5540172", "0.5540172", "0.5540172", "0.55262923", "0.5505913", "0.5465304", "0.5417496", "0.5416542", "0.5410527", "0.54030025", "0.5382772" ]
0.7078184
1
Returns the buy book for the wallet account. When symbol is set, the order book from the given token is shown.
def get_buy_book(self, symbol=None, limit=100, offset=0): if symbol is None: buy_book = self.api.find("market", "buyBook", query={"account": self.account}, limit=limit, offset=offset) else: buy_book = self.api.find("market", "buyBook", query={"symbol": symbol, "account": self.account}, limit=limit, offset=offset) return buy_book
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_ticker_book(self, symbol: Symbol):\n api_params = {\n \"symbol\": symbol.value\n }\n\n return self.request.get(path='/ticker/book', params=api_params)", "def book_ticker(self, symbol=''):\n params = {\n 'symbol': symbol,\n }\n return self._quote_get('ticker/bookTicker', params=params)", "def get_order_book(self, tickerSymbol):\n return", "def fetch_order_book(self, symbol: str, limit: Optional[int] = None, params={}):\n self.load_markets()\n market = self.market(symbol)\n request = {\n 'pair': market['id'],\n }\n orderbook = self.publicGetExchangesPairOrderbook(self.extend(request, params))\n return self.parse_order_book(orderbook, symbol)", "async def fetch_order_book(self, symbol: str, limit: Optional[int] = None, params={}):\n await self.load_markets()\n market = self.market(symbol)\n request = {\n 'pair': market['id'],\n }\n response = await self.publicGetPairDepth(self.extend(request, params))\n orderbook = self.safe_value(response, 'data', {})\n timestamp = self.safe_integer(orderbook, 'timestamp')\n return self.parse_order_book(orderbook, market['symbol'], timestamp)", "async def fetch_order_book(self, symbol: str, limit: Optional[int] = None, params={}):\n await self.load_markets()\n market = self.market(symbol)\n request = {\n 'symbol': market['id'],\n }\n if limit is not None:\n request['depth'] = limit\n response = await self.publicGetOrderBookL2(self.extend(request, params))\n result = {\n 'symbol': symbol,\n 'bids': [],\n 'asks': [],\n 'timestamp': None,\n 'datetime': None,\n 'nonce': None,\n }\n for i in range(0, len(response)):\n order = response[i]\n side = 'asks' if (order['side'] == 'Sell') else 'bids'\n amount = self.convert_from_raw_quantity(symbol, self.safe_string(order, 'size'))\n price = self.safe_number(order, 'price')\n # https://github.com/ccxt/ccxt/issues/4926\n # https://github.com/ccxt/ccxt/issues/4927\n # the exchange sometimes returns null price in the orderbook\n if price is not None:\n result[side].append([price, amount])\n result['bids'] = self.sort_by(result['bids'], 0, True)\n result['asks'] = self.sort_by(result['asks'], 0)\n return result", "def get_market_orderbook(self, market):\n return self.__call__('market', 'getmarketorderbook',\n {'marketname': market})", "def create_get_order_book_ticker_request(self, symbol: Optional[str] = None) -> Request:", "def get_sell_book(self, symbol=None, limit=100, offset=0): \r\n if symbol is None:\r\n sell_book = self.api.find(\"market\", \"sellBook\", query={\"account\": self.account}, limit=limit, offset=offset)\r\n else:\r\n sell_book = self.api.find(\"market\", \"sellBook\", query={\"symbol\": symbol, \"account\": self.account}, limit=limit, offset=offset)\r\n return sell_book", "def public_order_book(self, market_symbol, depth=25):\n return self.get(f'markets/{market_symbol}/orderbook', {\n 'depth': depth\n })", "async def fetch_order_book(self, symbol: str, limit: Optional[int] = None, params={}):\n await self.load_markets()\n precision = self.safe_value(self.options, 'precision', 'R0')\n market = self.market(symbol)\n request = {\n 'symbol': market['id'],\n 'precision': precision,\n }\n if limit is not None:\n request['len'] = limit # 25 or 100\n fullRequest = self.extend(request, params)\n orderbook = await self.publicGetBookSymbolPrecision(fullRequest)\n timestamp = self.milliseconds()\n result = {\n 'symbol': market['symbol'],\n 'bids': [],\n 'asks': [],\n 'timestamp': timestamp,\n 'datetime': self.iso8601(timestamp),\n 'nonce': None,\n }\n priceIndex = 1 if (fullRequest['precision'] == 'R0') else 0\n for i in range(0, len(orderbook)):\n order = orderbook[i]\n price = self.safe_number(order, priceIndex)\n signedAmount = self.safe_string(order, 2)\n amount = Precise.string_abs(signedAmount)\n side = 'bids' if Precise.string_gt(signedAmount, '0') else 'asks'\n result[side].append([price, self.parse_number(amount)])\n result['bids'] = self.sort_by(result['bids'], 0, True)\n result['asks'] = self.sort_by(result['asks'], 0)\n return result", "def order_book_fetch(self, symbol):\n pass", "def create_get_order_book_request(self, symbol: str,\n limit: Optional[int] = None\n ) -> Request:", "def get_orderbook(self, pair='XBTZAR'):\n\n data = {'pair': pair}\n query_string = build_query_string(data)\n\n r = requests.get(build_api_call(self.base_url, None, 'orderbook', query_string))\n if r.status_code == 200:\n return r.json()", "async def watch_order_book(self, symbol: str, limit: Optional[int] = None, params={}):\n await self.load_markets()\n market = self.market(symbol)\n symbol = market['symbol']\n instrumentName = market['id']\n if market['spot']:\n instrumentName = market['baseId'] + '-' + market['quoteId']\n url = self.urls['api']['ws']\n params = self.omit(params, 'type')\n messageHash = 'orderbook:' + symbol\n subscribe = {\n 'jsonrpc': '2.0',\n 'id': self.request_id(),\n 'method': '/public/subscribe',\n 'params': {\n 'channels': [\n 'book.' + instrumentName + '.raw',\n ],\n },\n }\n request = self.deep_extend(subscribe, params)\n orderbook = await self.watch(url, messageHash, request, messageHash)\n return orderbook.limit()", "def order_book_fetch(self, symbol):\n orderbook = self.orderbooks[symbol]\n asks = [[float(price), float(stats[0]) * float(stats[1])] for price, stats in orderbook['asks'].items()]\n bids = [[float(price), float(stats[0]) * float(stats[1])] for price, stats in orderbook['bids'].items()]\n return asks, bids, orderbook", "async def watch_order_book(self, symbol: str, limit: Optional[int] = None, params={}):\n await self.load_markets()\n market = self.market(symbol)\n symbol = market['symbol']\n name = 'orderbook'\n messageHash = name + '_' + market['id'].lower()\n url = self.urls['api']['ws'] + messageHash + '/' + self.options['uuid']\n client = self.client(url)\n subscription = {\n 'name': name,\n 'symbol': symbol,\n 'messageHash': messageHash,\n 'method': self.handle_order_book,\n }\n if not (messageHash in client.subscriptions):\n self.orderbooks[symbol] = self.order_book({})\n client.subscriptions[messageHash] = subscription\n options = self.safe_value(self.options, 'fetchOrderBookSnapshot', {})\n delay = self.safe_integer(options, 'delay', self.rateLimit)\n # fetch the snapshot in a separate async call after a warmup delay\n self.delay(delay, self.fetch_order_book_snapshot, client, subscription)\n orderbook = await self.watch(url, messageHash, None, messageHash, subscription)\n return orderbook.limit()", "def get_mytrade(self, symbol):\n payload = {'symbol': symbol}\n return self.signed_request('GET', '/api/v3/myTrades', **payload)", "def returnOrderBook(self, currency_pair=\"all\", depth=50):\n pass", "def books(Symbol='tBTCUSD', Precision='P0', **params):\n endpoint = f'book/{Symbol}/{Precision}'\n return request(authenticate=False, version=2, endpoint=endpoint, method='GET', query_params=params)", "def get_chain_info(self, symbol: str): \n return self.trader.fetch_chain_info(symbol)", "def query_symbol_bars(self, symbol: str):\n return self._call_txtrader_api('query_symbol_bars', {'symbol': symbol})", "def order_book(symbol: str,\n number_of_data_points: int = 1,\n exchange: str = CRYPTO_EXCHANGE,\n rate_limit: bool = True):\n try:\n check_exchange_existence(exchange=exchange)\n return asyncio.get_event_loop().run_until_complete(\n getOrderBook(symbol=symbol,\n number_of_data_points=number_of_data_points,\n exchange=exchange,\n rate_limit=rate_limit))\n except Exception as exception:\n logger.error('Oops! An error Occurred ⚠️')\n raise exception", "def buy(self, currency_pair, rate, amount):\n return self.api_query('buy', {\"CurrencyPair\": currency_pair, \"rate\": rate, \"amount\": amount})", "def open_buy(self, symbol, price):\n\n\t\ttransaction = {\n\t\t\t\"tradeTransInfo\": {\n\t\t\t\t\"cmd\": xAPIConnector.TransactionSide.BUY,\n\t\t\t\t\"order\": 0,\n\t\t\t\t\"price\": price,\n\t\t\t\t\"symbol\": symbol,\n\t\t\t\t\"type\": xAPIConnector.TransactionType.ORDER_OPEN,\n\t\t\t\t\"volume\": 1\n\t\t\t}\n\t\t}\n\t\tresponse = self.command_execute('tradeTransaction', transaction)\n\t\tprint('Buy ', symbol, ' for ', price, ', status: ', response['status'])", "def get_option_market_data(self, symbol: str): \n return self.trader.fetch_option_market_data(symbol)", "def marketBuy(self, currency_pair, amount):\n # calcular o rate num 'for'\n asks = self.rOrderBook(currency_pair=currency_pair, field='asks')\n list_resp = []\n for ask in asks:\n if ask[1] < amount:\n bought = self.limitBuy(currency_pair, rate=ask[0], amount=ask[1], ioc=True)\n list_resp.append(bought)\n amount -= ask[1]\n elif ask[1] >= amount:\n bought = self.limitBuy(currency_pair, rate=ask[0], amount=amount, ioc=True)\n list_resp.append(bought)\n amount -= amount\n break\n return list_resp", "def buy(self, symbol: str=None, quantity: int=0, in_force: str='gtc', extended: bool=False):\n return self.trader.buy(symbol, quantity, in_force, extended)", "def buy(self, amount):\n trades = []\n buy_amount = 0\n precision = pow(10, self.pair.get_quote_token().get_decimals() - self.pair.get_base_token().get_decimals())\n for i in range(len(self.book[Trade.WAY_SELL])):\n offer = self.book[Trade.WAY_SELL][i]\n amount_quote = offer.get_quote_amount()\n amount_base = offer.get_base_amount()\n price = offer.get_price()\n\n if amount_base >= amount:\n tmp = int(\"%d\" % (amount / price * precision))\n trade = Trade(self.pair, Trade.WAY_BUY, price, amount, tmp, time.time(), fee_currency=self.pair.get_exchange().get_fee_token())\n buy_amount = buy_amount + trade.get_amount_quote()\n trades.append(trade)\n return trades, int(buy_amount)\n\n '''\n Is the offered amount less than needed, you can only buy the offered amount and continue with next offer.\n '''\n trade = Trade(self.pair, Trade.WAY_BUY, price, amount_base, amount_quote, time.time(), fee_currency=self.pair.get_exchange().get_fee_token())\n buy_amount = buy_amount + trade.get_amount_quote()\n amount = amount - amount_base\n trades = trades + [trade]\n\n '''\n Not enough volume or amount to high\n '''\n raise KeyError(\"Not enough offers in orderbook. Low volume or amount to high.\")", "def returnOrderBook(self, limit=25):\n orders = self.dpay.rpc.get_order_book(limit, api=\"market_history\")\n r = {\"asks\": [], \"bids\": []}\n for side in [\"bids\", \"asks\"]:\n for o in orders[side]:\n r[side].append({\n 'price': float(o[\"price\"]),\n 'bbd': o[\"bbd\"] / 10 ** 3,\n 'dpay': o[\"dpay\"] / 10 ** 3,\n })\n return r" ]
[ "0.66823006", "0.65527743", "0.6475742", "0.6460369", "0.62905794", "0.62434053", "0.62104696", "0.62011075", "0.6194737", "0.6161263", "0.61371", "0.6124734", "0.6092546", "0.5844744", "0.58292174", "0.5801955", "0.57990044", "0.57884", "0.5781246", "0.55208474", "0.5463246", "0.5435673", "0.54326564", "0.5427458", "0.5424794", "0.5414975", "0.5350721", "0.5345467", "0.53378224", "0.5310597" ]
0.7860041
0
Returns the sell book for the wallet account. When symbol is set, the order book from the given token is shown.
def get_sell_book(self, symbol=None, limit=100, offset=0): if symbol is None: sell_book = self.api.find("market", "sellBook", query={"account": self.account}, limit=limit, offset=offset) else: sell_book = self.api.find("market", "sellBook", query={"symbol": symbol, "account": self.account}, limit=limit, offset=offset) return sell_book
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_buy_book(self, symbol=None, limit=100, offset=0):\r\n if symbol is None:\r\n buy_book = self.api.find(\"market\", \"buyBook\", query={\"account\": self.account}, limit=limit, offset=offset)\r\n else:\r\n buy_book = self.api.find(\"market\", \"buyBook\", query={\"symbol\": symbol, \"account\": self.account}, limit=limit, offset=offset)\r\n return buy_book", "def get_ticker_book(self, symbol: Symbol):\n api_params = {\n \"symbol\": symbol.value\n }\n\n return self.request.get(path='/ticker/book', params=api_params)", "def book_ticker(self, symbol=''):\n params = {\n 'symbol': symbol,\n }\n return self._quote_get('ticker/bookTicker', params=params)", "def fetch_order_book(self, symbol: str, limit: Optional[int] = None, params={}):\n self.load_markets()\n market = self.market(symbol)\n request = {\n 'pair': market['id'],\n }\n orderbook = self.publicGetExchangesPairOrderbook(self.extend(request, params))\n return self.parse_order_book(orderbook, symbol)", "async def fetch_order_book(self, symbol: str, limit: Optional[int] = None, params={}):\n await self.load_markets()\n market = self.market(symbol)\n request = {\n 'symbol': market['id'],\n }\n if limit is not None:\n request['depth'] = limit\n response = await self.publicGetOrderBookL2(self.extend(request, params))\n result = {\n 'symbol': symbol,\n 'bids': [],\n 'asks': [],\n 'timestamp': None,\n 'datetime': None,\n 'nonce': None,\n }\n for i in range(0, len(response)):\n order = response[i]\n side = 'asks' if (order['side'] == 'Sell') else 'bids'\n amount = self.convert_from_raw_quantity(symbol, self.safe_string(order, 'size'))\n price = self.safe_number(order, 'price')\n # https://github.com/ccxt/ccxt/issues/4926\n # https://github.com/ccxt/ccxt/issues/4927\n # the exchange sometimes returns null price in the orderbook\n if price is not None:\n result[side].append([price, amount])\n result['bids'] = self.sort_by(result['bids'], 0, True)\n result['asks'] = self.sort_by(result['asks'], 0)\n return result", "async def fetch_order_book(self, symbol: str, limit: Optional[int] = None, params={}):\n await self.load_markets()\n market = self.market(symbol)\n request = {\n 'pair': market['id'],\n }\n response = await self.publicGetPairDepth(self.extend(request, params))\n orderbook = self.safe_value(response, 'data', {})\n timestamp = self.safe_integer(orderbook, 'timestamp')\n return self.parse_order_book(orderbook, market['symbol'], timestamp)", "async def fetch_order_book(self, symbol: str, limit: Optional[int] = None, params={}):\n await self.load_markets()\n precision = self.safe_value(self.options, 'precision', 'R0')\n market = self.market(symbol)\n request = {\n 'symbol': market['id'],\n 'precision': precision,\n }\n if limit is not None:\n request['len'] = limit # 25 or 100\n fullRequest = self.extend(request, params)\n orderbook = await self.publicGetBookSymbolPrecision(fullRequest)\n timestamp = self.milliseconds()\n result = {\n 'symbol': market['symbol'],\n 'bids': [],\n 'asks': [],\n 'timestamp': timestamp,\n 'datetime': self.iso8601(timestamp),\n 'nonce': None,\n }\n priceIndex = 1 if (fullRequest['precision'] == 'R0') else 0\n for i in range(0, len(orderbook)):\n order = orderbook[i]\n price = self.safe_number(order, priceIndex)\n signedAmount = self.safe_string(order, 2)\n amount = Precise.string_abs(signedAmount)\n side = 'bids' if Precise.string_gt(signedAmount, '0') else 'asks'\n result[side].append([price, self.parse_number(amount)])\n result['bids'] = self.sort_by(result['bids'], 0, True)\n result['asks'] = self.sort_by(result['asks'], 0)\n return result", "def get_market_orderbook(self, market):\n return self.__call__('market', 'getmarketorderbook',\n {'marketname': market})", "def order_book_fetch(self, symbol):\n pass", "def get_order_book(self, tickerSymbol):\n return", "def order_book_fetch(self, symbol):\n orderbook = self.orderbooks[symbol]\n asks = [[float(price), float(stats[0]) * float(stats[1])] for price, stats in orderbook['asks'].items()]\n bids = [[float(price), float(stats[0]) * float(stats[1])] for price, stats in orderbook['bids'].items()]\n return asks, bids, orderbook", "def public_order_book(self, market_symbol, depth=25):\n return self.get(f'markets/{market_symbol}/orderbook', {\n 'depth': depth\n })", "def marketSell(self, currency_pair, amount):\n # calcular o rate num 'for'\n bids = rOrderBook(currency_pair=currency_pair, field='bids')\n list_resp = []\n for bid in bids:\n if bid[1] < amount:\n sold = self.limitSell(currency_pair, rate=bid[0], amount=bid[1], ioc=True)\n list_resp.append(sold)\n amount -= bid[0]\n elif bid[1] >= amount:\n sold = self.limitSell(currency_pair, rate=bid[0], amount=amount, ioc=True)\n list_resp.append(sold)\n amount -= amount\n break\n return list_resp", "def orderSell(self, rate = None, amount = None):\r\n\t\treturn OrderBuy(self.pair, rate, amount)", "def book_for_sale(self):\n try:\n return self.book_set.filter(book_type=get_model('books', 'Book').TO_SELL)[0]\n except:\n None", "def create_get_order_book_ticker_request(self, symbol: Optional[str] = None) -> Request:", "def get_orderbook(self, pair='XBTZAR'):\n\n data = {'pair': pair}\n query_string = build_query_string(data)\n\n r = requests.get(build_api_call(self.base_url, None, 'orderbook', query_string))\n if r.status_code == 200:\n return r.json()", "async def watch_order_book(self, symbol: str, limit: Optional[int] = None, params={}):\n await self.load_markets()\n market = self.market(symbol)\n symbol = market['symbol']\n instrumentName = market['id']\n if market['spot']:\n instrumentName = market['baseId'] + '-' + market['quoteId']\n url = self.urls['api']['ws']\n params = self.omit(params, 'type')\n messageHash = 'orderbook:' + symbol\n subscribe = {\n 'jsonrpc': '2.0',\n 'id': self.request_id(),\n 'method': '/public/subscribe',\n 'params': {\n 'channels': [\n 'book.' + instrumentName + '.raw',\n ],\n },\n }\n request = self.deep_extend(subscribe, params)\n orderbook = await self.watch(url, messageHash, request, messageHash)\n return orderbook.limit()", "def create_get_order_book_request(self, symbol: str,\n limit: Optional[int] = None\n ) -> Request:", "def get_mytrade(self, symbol):\n payload = {'symbol': symbol}\n return self.signed_request('GET', '/api/v3/myTrades', **payload)", "async def watch_order_book(self, symbol: str, limit: Optional[int] = None, params={}):\n await self.load_markets()\n market = self.market(symbol)\n symbol = market['symbol']\n name = 'orderbook'\n messageHash = name + '_' + market['id'].lower()\n url = self.urls['api']['ws'] + messageHash + '/' + self.options['uuid']\n client = self.client(url)\n subscription = {\n 'name': name,\n 'symbol': symbol,\n 'messageHash': messageHash,\n 'method': self.handle_order_book,\n }\n if not (messageHash in client.subscriptions):\n self.orderbooks[symbol] = self.order_book({})\n client.subscriptions[messageHash] = subscription\n options = self.safe_value(self.options, 'fetchOrderBookSnapshot', {})\n delay = self.safe_integer(options, 'delay', self.rateLimit)\n # fetch the snapshot in a separate async call after a warmup delay\n self.delay(delay, self.fetch_order_book_snapshot, client, subscription)\n orderbook = await self.watch(url, messageHash, None, messageHash, subscription)\n return orderbook.limit()", "def returnOrderBook(self, currency_pair=\"all\", depth=50):\n pass", "def sell(self, amount):\n trades = []\n sell_amount = 0\n precision = pow(10, self.pair.get_base_token().get_decimals() - self.pair.get_quote_token().get_decimals())\n for i in range(len(self.book[Trade.WAY_BUY])):\n offer = self.book[Trade.WAY_BUY][i]\n amount_quote = offer.get_quote_amount()\n amount_base = offer.get_base_amount()\n price = offer.get_price()\n\n if amount_quote >= amount:\n tmp = amount * price * precision\n tmp = int(tmp)\n trade = Trade(self.pair, Trade.WAY_SELL, price, tmp, amount, time.time(), fee_currency=self.pair.get_exchange().get_fee_token())\n sell_amount = sell_amount + trade.get_amount_base()\n trades.append(trade)\n return trades, int(sell_amount)\n\n '''\n Is the offered amount less than needed, you can only buy the offered amount and continue\n '''\n trade = Trade(self.pair, Trade.WAY_SELL, price, amount_base, amount_quote, time.time(), fee_currency=self.pair.get_exchange().get_fee_token())\n amount = amount - amount_quote\n sell_amount = sell_amount + trade.get_amount_base()\n trades = trades + [trade]\n\n '''\n Not enough volume or amount to high\n '''\n raise KeyError(\"Not enough offers in orderbook. Low volume or amount to high.\")", "def sell(self, currency_pair, rate, amount):\n return self.api_query('sell', {\"currencyPair\": currency_pair, \"rate\": rate, \"amount\": amount})", "def get_order_book(self, pair):\r\n method = self.public_endpoints['order_book']['method']\r\n url = self.base_url + self.public_endpoints['order_book']['url'].format(pairId=pair)\r\n req = requests.request(method, url)\r\n res = req.json()\r\n\r\n if res['success'] == True:\r\n return res[\"result\"]\r\n else:\r\n return res", "def latest_L2_order_book_entry(symbol: str,\n exchange: str = CRYPTO_EXCHANGE,\n rate_limit: bool = True):\n try:\n check_exchange_existence(exchange=exchange)\n response = asyncio.get_event_loop().run_until_complete(\n getOrderBookL2(symbol=symbol,\n number_of_data_points=1,\n exchange=exchange,\n rate_limit=rate_limit))\n latest_orderbook_entry_dict = {}\n latest_orderbook_entry_dict['symbol'] = symbol\n latest_orderbook_entry_dict['ask'] = response['asks'][0][0] if len(\n response['asks']) > 0 else None\n latest_orderbook_entry_dict['asksize'] = response['asks'][0][1] if len(\n response['asks']) > 0 else None\n latest_orderbook_entry_dict['bid'] = response['bids'][0][0] if len(\n response['bids']) > 0 else None\n latest_orderbook_entry_dict['bidsize'] = response['bids'][0][1] if len(\n response['bids']) > 0 else None\n latest_orderbook_entry_dict['datetime'] = response['datetime']\n latest_orderbook_entry_dict['nonce'] = response['nonce']\n return latest_orderbook_entry_dict\n except Exception as exception:\n logger.error('Oops! An error Occurred ⚠️')\n raise exception", "def get_option_market_data(self, symbol: str): \n return self.trader.fetch_option_market_data(symbol)", "def sell(self, bar, volume):\n self.place(Order(symbol=bar.symbol,\n volume=volume,\n price=bar.close,\n transaction=TransactionType.SELL,\n timestamp=bar.timestamp))", "def latest_order_book_entry(symbol: str,\n exchange: str = CRYPTO_EXCHANGE,\n rate_limit: bool = True):\n try:\n check_exchange_existence(exchange=exchange)\n response = asyncio.get_event_loop().run_until_complete(\n getOrderBook(symbol=symbol,\n number_of_data_points=1,\n exchange=exchange,\n rate_limit=rate_limit))\n latest_orderbook_entry_dict = {}\n latest_orderbook_entry_dict['symbol'] = symbol\n latest_orderbook_entry_dict['ask'] = response['asks'][0][0] if len(\n response['asks']) > 0 else None\n latest_orderbook_entry_dict['asksize'] = response['asks'][0][1] if len(\n response['asks']) > 0 else None\n latest_orderbook_entry_dict['bid'] = response['bids'][0][0] if len(\n response['bids']) > 0 else None\n latest_orderbook_entry_dict['bidsize'] = response['bids'][0][1] if len(\n response['bids']) > 0 else None\n latest_orderbook_entry_dict['datetime'] = response['datetime']\n latest_orderbook_entry_dict['nonce'] = response['nonce']\n return latest_orderbook_entry_dict\n except Exception as exception:\n logger.error('Oops! An error Occurred ⚠️')\n raise exception", "def books(Symbol='tBTCUSD', Precision='P0', **params):\n endpoint = f'book/{Symbol}/{Precision}'\n return request(authenticate=False, version=2, endpoint=endpoint, method='GET', query_params=params)" ]
[ "0.71708095", "0.6858757", "0.6845278", "0.6777183", "0.6576272", "0.64677733", "0.64433914", "0.6294736", "0.62651837", "0.6260835", "0.59918576", "0.5936571", "0.5870502", "0.5838228", "0.58012784", "0.57994854", "0.57797", "0.57177216", "0.57014185", "0.56959134", "0.5679543", "0.56587106", "0.5597469", "0.55661094", "0.55264837", "0.5512766", "0.54996645", "0.5485841", "0.5476252", "0.5420937" ]
0.782693
0
List a folder. Return a dict mapping unicode filenames to FileMetadata|FolderMetadata entries.
def list_folder(dbx, folder, subfolder): path = '/%s/%s' % (folder, subfolder.replace(os.path.sep, '/')) while '//' in path: path = path.replace('//', '/') path = path.rstrip('/') try: with stopwatch('list_folder'): res = dbx.files_list_folder(path) except dropbox.exceptions.ApiError as err: print('Folder listing failed for', path, '-- assumed empty:', err) return {} else: rv = {} for entry in res.entries: rv[entry.name] = entry return rv
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def filelist(folder):\n file_dict={}\n folderlist = glob.glob(os.getcwd()+\"/\"+folder+\"/*\")\n for i in tqdm(folderlist):\n filelist = glob.glob(i+\"/*\")\n filename = i.rsplit(\"/\")[-1]\n file_dict[filename]= filelist\n\n return file_dict", "def ListFolder(self, path): # real signature unknown; restored from __doc__\n pass", "def list_folder(self, prefix=\"\", root=\"\", delimiter=\"/\"):\n LOGGER.info(\"listing: (%s, %s)\", root, prefix)\n abspath = self.make_path(prefix)\n LOGGER.info(\"listing abs: %s\", abspath)\n absroot = self.make_path(root)\n starts = len(absroot)\n files = []\n folders = []\n if os.path.isdir(abspath):\n with os.scandir(abspath) as item:\n for entry in item:\n is_file = entry.is_file()\n size = entry.stat().st_size if is_file else None\n mime = (\n mimetypes.guess_type(entry.path) if is_file else None\n )\n if entry.name[0] != \".\":\n item = {\n \"name\": entry.name,\n \"path\": entry.path[starts:],\n \"file\": is_file,\n \"size\": size,\n \"type\": mime,\n }\n if is_file:\n files.append(item)\n else:\n folders.append(item)\n return {\"files\": files, \"folders\": folders}", "async def _list_folder(self, folder_id: int) -> Tuple[List[Dict[str, Any]], int]:\n if not self.api_key:\n raise NotAuthenticated(\"You need to pass an API key\")\n url = urljoin(self.API, \"folders/\")\n headers = {\"X-API-KEY\": self.api_key}\n data = {}\n if folder_id:\n data = {\"folder_id\": folder_id}\n async with self.session() as session:\n async with session.get(url, params=data, headers=headers) as resp:\n result = await resp.json()\n return result, resp.status", "def read_from_folder(folder):\n\n raw_dict = dict()\n\n for file in os.listdir(folder):\n raw = read_raw_from_file(os.path.join(folder, file))\n raw_dict[file] = raw\n\n return raw_dict", "def parse_folder(self, path):\n\n for filename in os.listdir(path):\n self.parse_file(os.path.join(path, filename), filename)\n return self.country_dict, self.hre_dict, self.name_dict", "def dropbox_list_folder(path):\n #social_auth = user.social_auth.filter(provider=\"dropbox-oauth2\").get()\n session = requests.Session()\n session.headers.update({\n \"Authorization\": \"Bearer {token}\".format(token='Ss5giw2X76IAAAAAAAAUNpW5cbpxT1pHzTL--XHaHW2QLer1iP-CCCqzqi3Mn2jQ'),\n \"Content-Type\": \"application/json\",\n })\n response = session.post(\n url=\"https://api.dropboxapi.com/2/files/list_folder\",\n json={\"path\": path},\n )\n response.raise_for_status()\n content = response.json()\n for entry in content['entries']:\n yield undotted_keys(entry)\n\n while content['has_more']:\n response = session.post(\n url=\"https://api.dropboxapi.com/2/files/list_folder/continue\",\n json={\"cursor\": content[\"cursor\"]},\n )\n response.raise_for_status()\n content = response.json()\n for entry in content['entries']:\n yield undotted_keys(entry)", "def listFolders(folderRoot):\n return os.listdir(folderRoot)", "def get_directory(self, directory: str) -> List[Dict]:\n raise NotImplementedError", "def get_folder_list():\n if exists_key_store('folders:list'):\n return get_key_store('folders:list')\n else:\n # initialize folder list with root (All)\n set_key_store('folders:counter', 0)\n rpush_key_store('folders:list', {'id': 0, 'parent': -1, 'name': 'All'})\n return get_key_store('folders:list')", "def extract_folder_file_structure() -> Dict[str, List[str]]:\n folders_and_files = {}\n for path_to_folder in glob.glob(f\"{ZULIPTERMINAL}/**/\", recursive=True):\n complete_directory_path = Path(path_to_folder)\n if complete_directory_path.name in FOLDERS_TO_EXCLUDE:\n continue\n relative_directory_path = complete_directory_path.relative_to(ROOT_DIRECTORY)\n if str(relative_directory_path) not in DESC_FOR_NO_FILE_FOLDERS:\n files_in_directory = [\n file.name\n for file in complete_directory_path.glob(\"*.py\")\n if file.name != \"__init__.py\"\n ]\n folders_and_files[str(relative_directory_path)] = files_in_directory\n return folders_and_files", "def list_folder(self, c_folder_or_c_path):\n raise NotImplementedError", "def ls(self, folder_id: int = -1) -> list:\n print('ls', folder_id)\n if folder_id == -1:\n folder_id = self.default_dir\n url = 'https://webapi.115.com/files?aid=1&cid={}&o=user_ptime&asc=0&offset=0&show_dir=1&limit=115&code=&scid=' \\\n '&snap=0&natsort=1&custom_order=2&source=&format=json&type=&star=&is_q=&is_share='.format(folder_id)\n result = self.s.get(url, headers={'Referer': referer['115'].format(self.default_dir)}).json()\n if result['errNo'] == 0:\n data = result['data']\n return data", "def list_dir(request, file):\n\n\t#determine the parent location to list files and folders\n\tif file is None:\n\t\tfile = '/'\n\telse:\n\t\tfile = '/' + file\n\n\tparent = file\n\n\t#create dictionary to be returned\n\tdata = {}\n\tdata[\"id\"] = file\n\tdata[\"label\"] = 'name'\n\tdata[\"children\"] = []\n\tdata[\"name\"] = parent\n\n\tF = []\n\tD = []\n\n\t#attempt to list, unless permissions prohibit then return no children\n\ttry:\n\t\t#insert names of files and directories into respective arrays\n\t\tfor file in os.listdir(parent):\n\t\t\tfilepath = parent + file\n\t\t\tif os.path.isfile(filepath):\n\t\t\t\tif not file.startswith('.'):\n\t\t\t\t\tF.append(file)\n\t\t\telif os.path.isdir(filepath):\n\t\t\t\tD.append(file)\n\n\t\t#sort files and directories arrays\n\t\tF.sort()\n\t\tD.sort()\n\n\t\t#insert new item for each file and directory into children\n\t\tfor name in F:\n\t\t\tdata[\"children\"].append({ \"type\": \"file\",\n\t\t\t\t\t\t \"id\": parent + name,\n\t\t\t\t\t\t \"name\": name })\n\n\t\tfor name in D:\n\t\t\tdata[\"children\"].append({ \"type\": \"folder\",\n\t\t\t\t\t\t \"id\": parent + name,\n\t\t\t\t\t\t \"name\": name,\n\t\t\t\t\t\t \"children\": True })\n\texcept:\n\t\tdata[\"children\"] = []\n\n\treturn HttpResponse(str(json.dumps(data)),\n\t\t\t content_type=\"application/json\")", "def list_directory_files(directory):\n fs_ = fs.open_fs(directory)\n file_list = []\n for file_name in fs_.walk.files():\n file_details = fs_.getinfo(file_name, namespaces=['details'])\n file_list.append({'name': file_name.lstrip('/'),\n 'last-modified': file_details.modified.\n strftime(WORKFLOW_TIME_FORMAT),\n 'size': file_details.size})\n return file_list", "def parse_folder(self, path):\n\n data = []\n for filename in os.listdir(path):\n data.append(self.parse_file(os.path.join(path, filename), filename))\n return data", "def parse(cls, raw_folder: str) -> Dict[str, Any]:\n folder_path = os.path.abspath(raw_folder)\n data = dict()\n files = os.listdir(folder_path)\n for file in files:\n if is_ignored(file):\n continue\n try:\n file = os.path.join(raw_folder, file)\n datum = cls.process_file(file)\n except FileNotCompatible:\n continue\n\n _, kwrd = os.path.split(file)\n kwrd = os.path.splitext(kwrd)[0]\n data[kwrd] = datum\n\n return data", "def list_files(self) -> Dict[str, str]:\n try:\n results = self._service.files().list(\n q='trashed=false',\n pageSize=10,\n fields=\"nextPageToken, files(id, name)\"\n ).execute()\n except HttpError as e:\n logger.error(\"Failed to list all files in Drive. %s\", e)\n raise DriveServiceError\n\n items = results.get('files', [])\n\n return {i['id']: i['name'] for i in items}", "def folder_name(self): \n folders = []\n for folder in self.folders:\n folders.append(folder)\n return folders", "def list_files_in_directory(self):\n lesson_file_dict = dict()\n lesson_file_dict[\"files\"] = []\n\n directory_list = listdir(self.sub_dir)\n for directory in directory_list:\n if isfile(join(self.sub_dir, directory)):\n lesson_file_dict[\"files\"].append(directory)\n\n return lesson_file_dict", "def parse_folder(file_folder: str) -> Tuple[list, list, list]:\n\n raw_files = [\n _\n for _ in os.listdir(file_folder)\n if _.lower().endswith(\".raw\") or _.lower().endswith(\".d\") or _.lower().endswith(\".mzml\")\n ]\n fasta_files = [_ for _ in os.listdir(file_folder) if _.lower().endswith(\".fasta\")]\n db_files = [\n _ for _ in os.listdir(file_folder) if _.lower().endswith(\".db_data.hdf\")\n ]\n\n return raw_files, fasta_files, db_files", "def get_list(profiles_folder, logger):\n profile_list = []\n with scandir(profiles_folder) as it:\n for entry in it:\n if entry.is_file():\n filepath = profiles_folder + entry.name\n profile = json_from_file(filepath, logger)\n if profile is not None:\n try:\n profile_list.append({\"filepath\":filepath, \"name\":profile[\"name\"], \"description\":profile[\"description\"]})\n except AttributeError:\n logger.error(\"Missing attributes in \" + filepath)\n logger.error(str(profile))\n return profile_list", "def show_kml_list():\n out = []\n\n for filename in os.listdir(settings.KML_OUTPUT_DIR):\n path = os.path.join(settings.KML_OUTPUT_DIR, filename)\n if os.path.isdir(path):\n continue\n f = open(path)\n content = f.read(300)\n f.close()\n name = KML_NAME_RE.search(content)\n if not name:\n continue\n out.append((name.group(1), filename))\n\n return {'items': sorted(out, cmp=lambda a, b: dumb_czech_cmp(a, b)), 'MEDIA_URL': settings.MEDIA_URL}", "def process_folder(root, path=\"\"):\n myDict = {}\n if path:\n if root.cd(path):\n for key in ROOT.gDirectory.GetListOfKeys():\n filterKey(root, key, path, myDict, \"__List\")\n else:\n for key in ROOT.gDirectory.GetListOfKeys():\n mypath = ROOT.gDirectory.GetPathStatic()\n filterKey(root, key, mypath, myDict, \"\")\n ROOT.gDirectory.cd(mypath)\n return myDict", "def list_dir(self, path):", "def get_folder_list(args):\n\tif not args.folders:\n\t\treturn None\n\n\tif os.path.isfile(args.folders):\n\t\treturn [x.strip() for x in list(open(args.folders, 'r'))]\n\n\telse:\n\t\treturn [x.strip() for x in args.folders.split(',')]", "def list_directory2(self, mdir, limit=None, marker=None):\n log.debug('ListDirectory %r', mdir)\n\n query = {}\n if limit:\n query[\"limit\"] = limit\n if marker:\n query[\"marker\"] = marker\n\n res, content = self._request(mdir, \"GET\", query=query)\n if res[\"status\"] != \"200\":\n raise errors.MantaAPIError(res, content)\n lines = content.splitlines(False)\n dirents = []\n for line in lines:\n if not line.strip():\n continue\n try:\n dirents.append(json.loads(line))\n except ValueError:\n raise errors.MantaError('invalid directory entry: %r' % line)\n return res, dirents", "def _get_file_list(folder):\n tree = [x for x in os.walk(folder)]\n files = [os.path.join(t[0], y) for t in tree for y in t[2]]\n return [os.path.relpath(x, start=folder)\n for x in files if x != LOCAL_METADATA_FILE]", "def ls(self):\n files = self.drive.files().list().execute().get(\"files\", [])\n for f in files:\n print(f[\"name\"], f[\"mimeType\"])", "def get_files_in_folder(self, folder_id: str) -> list:\n if folder_id:\n response = self.service.files().list(\n q=f\"parents = '{folder_id}'\",\n spaces='drive',\n fields='nextPageToken, files(id, name, kind, mimeType, trashed, createdTime, owners)',\n pageToken=None).execute()\n else:\n response = self.service.files().list(\n # q=f\"parents = '{folder_id}'\",\n spaces='drive',\n fields='nextPageToken, files(id, name, kind, mimeType, trashed, createdTime, owners)',\n pageToken=None).execute()\n items = response.get('files', [])\n\n folder_list = []\n for item in items:\n zen = item['owners']\n folder_details = {\n 'owner_name': zen[0]['displayName'],\n 'owner_kind': zen[0]['kind'],\n 'fileid': item['id'],\n 'filename': item['name'],\n 'file_kind': item['kind'],\n 'mime_type': item['mimeType'],\n 'trashed': item['trashed'],\n 'created_time': item['createdTime']\n }\n folder_list.append(folder_details)\n return folder_list" ]
[ "0.7113107", "0.6950174", "0.68306595", "0.68007314", "0.65157694", "0.6244095", "0.6208136", "0.6185927", "0.61017084", "0.606622", "0.6065129", "0.60376316", "0.6015897", "0.601543", "0.6008996", "0.60082483", "0.5999453", "0.58966297", "0.58930933", "0.58663845", "0.5864652", "0.5860993", "0.5857441", "0.58420587", "0.5839492", "0.5828738", "0.5820965", "0.5816325", "0.5788886", "0.57711667" ]
0.71694803
0
Checks if this operator is a comparison operator.
def is_comparison_op(self): return self.value in ["=", "!=", "<", "<=", ">", ">="]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def isOperator(self):\n return _libsbml.ASTNode_isOperator(self)", "def isOperator(self, *args):\n return _libsbml.ASTBasePlugin_isOperator(self, *args)", "def is_comparison(node):\n return isinstance(node, Comparison)", "def __eq__(self, other: 'OperatorConfig'):\n operator_name = self.operator_name == other.operator_name\n return (self.params == other.params\n and operator_name)", "def is_valid_operator(self, operator):\n if operator in self.operators_dict.keys():\n return True\n else:\n return False", "def __eq__(self, other):\n if self.op is None:\n if all(x.isdigit() for x in self.version):\n return self.relative_eq(other)\n elif 'x' in self.version:\n return self.compare_wild_card(other)\n elif self.version.startswith('*'):\n return True\n elif '~' == self.op:\n return self.compare_approximate(other)\n elif '<=' == self.op:\n return self.relative_lte(other)\n elif '<' == self.op:\n return self.relative_lt(other)\n elif '>=' == self.op:\n return self.relative_gte(other)\n elif '>' == self.op:\n return self.relative_gt(other)\n elif '^' == self.op:\n return self.compare_compatible(other)\n raise Exception(\n \"invalid comparison between {0} {1}\".format(self, other)\n )", "def isoperator(token):\n\n # Token is an operator\n return token and token.lower() in Token.OPERATORS", "def is_operator(self, symbol: str) -> bool:\n return symbol in self.operators", "def is_operator(operator):\n\t\tlist_of_operators = [\"+\", \"-\", \"*\", \"/\"]\n\t\treturn operator in list_of_operators", "def __equals__(self, to_compare):\n try:\n # Try to compare - this likely fails when it is compared to a non\n # ActuatorControlTarget object\n return \\\n (self.group == to_compare.group) and \\\n (self.controls == to_compare.controls)\n\n except AttributeError:\n return False", "def __equals__(self, to_compare):\n try:\n # Try to compare - this likely fails when it is compared to a non\n # ActuatorOutputStatus object\n return \\\n (self.active == to_compare.active) and \\\n (self.actuator == to_compare.actuator)\n\n except AttributeError:\n return False", "def operator(self) -> Optional[LogicalOperator]:\n return self.__operator", "def __eq__(self, other):\n return (other is self) or (isinstance(other, Expr)\n and self.op == other.op and self.args == other.args)", "def __eq__(self, other):\n \n equals = (len(self.orbital_operators) == len(other.orbital_operators)) and (self.orbital_operators == other.orbital_operators).all() and (self.orbital_labels == other.orbital_labels).all() and (self.op_type == other.op_type)\n \n return equals", "def __eq__(self, other: 'LTL'):\n if self.formula == other.formula:\n return True\n implied_a = self >= other\n implied_b = self <= other\n return implied_a and implied_b", "def compare(value1, operation, value2):\n if operation == None:\n return False\n \n operation = operation.upper()\n\n if operation in [\"=\", \"==\", \"EQ\"]:\n return ObjectComparator.are_equal(value1, value2)\n if operation in [\"!=\", \"<>\", \"NE\"]:\n return ObjectComparator.are_not_equal(value1, value2)\n if operation in [\"<\", \"LT\"]:\n return ObjectComparator.less(value1, value2)\n if operation in [\"<=\", \"LE\"]:\n return ObjectComparator.are_equal(value1, value2) or ObjectComparator.less(value1, value2)\n if operation in [\">\", \"GT\"]:\n return ObjectComparator.more(value1, value2)\n if operation in [\">=\", \"GE\"]:\n return ObjectComparator.are_equal(value1, value2) or ObjectComparator.more(value1, value2)\n if operation == \"LIKE\":\n return ObjectComparator.match(value1, value2)\n\n return True", "def is_operator(formula):\n return is_binary_operator(formula) or isinstance(formula, Not)", "def is_operator(obj):\n return isinstance(obj, Token) and obj[0] not in '/01234567890+-.<[('", "def __eq__(self, other) -> bool:\n if other is None or not isinstance(other, Graph):\n name = other.name if other else None\n print(f'{name} is not a Graph object.')\n return False\n\n def match(op1: Operator, op2: Operator) -> bool:\n if not op1.equals(op2):\n print(f'{op1.name} is different.')\n return False\n\n # check input nodes and further\n for i1, i2 in zip(op1.input_ops.values(), op2.input_ops.values()):\n if not match(i1, i2):\n return False\n return True\n\n for o1, o2 in zip(self.get_outputs(), other.get_outputs()):\n if not match(o1, o2):\n return False\n return True", "def compare(self, operator, value, **kw):\n\n return operator(self.comparator, value)", "def __eq__(self, other):\n if not isinstance(other, UserOperatorsDataForSearch):\n return False\n\n return self.to_dict() == other.to_dict()", "def __gt__(self, other):\n return self.__cmp__(other) > 0", "def __equals__(self, to_compare):\n try:\n # Try to compare - this likely fails when it is compared to a non\n # GpsInfo object\n return \\\n (self.num_satellites == to_compare.num_satellites) and \\\n (self.fix_type == to_compare.fix_type)\n\n except AttributeError:\n return False", "def _cmp_(self, other):\n if(not isinstance(other, VVHarmonicWeakMaassForms)):\n return False\n eq = (self.multiplier() == other.WR) and (self._weight_rat == other._weight_rat)\n eq = eq and (self.prec == other.prec) and (self._sym_type == other._sym_type)\n eq = eq and (self._is_dual_rep == other._is_dual_rep)\n return eq", "def __equals__(self, to_compare):\n try:\n # Try to compare - this likely fails when it is compared to a non\n # RcStatus object\n return \\\n (self.was_available_once == to_compare.was_available_once) and \\\n (self.is_available == to_compare.is_available) and \\\n (self.signal_strength_percent == to_compare.signal_strength_percent)\n\n except AttributeError:\n return False", "def __eq__(self, other: t.Any) -> bool:\n return self._op_bool('__eq__', other)", "def __equals__(self, to_compare):\n try:\n # Try to compare - this likely fails when it is compared to a non\n # Battery object\n return \\\n (self.voltage_v == to_compare.voltage_v) and \\\n (self.remaining_percent == to_compare.remaining_percent)\n\n except AttributeError:\n return False", "def operator(self):\n return self.__operator", "def comparator(self) -> Operator:\n return self.__comparator", "def __eq__(self, other):\n return isinstance(other, self.__class__)" ]
[ "0.7321015", "0.72628", "0.68260705", "0.66146785", "0.65818447", "0.6483728", "0.6332287", "0.62300766", "0.61627215", "0.6155684", "0.61439794", "0.6050088", "0.5994527", "0.5971696", "0.5937225", "0.59370035", "0.5907571", "0.58884007", "0.5831833", "0.58108664", "0.58076423", "0.5801536", "0.5793793", "0.5787748", "0.5778381", "0.5769103", "0.5729804", "0.57267493", "0.57165", "0.5696777" ]
0.7520363
0
Checks if this operator is an arithmetic operator.
def is_arithmetic_op(self): return self.value in ["+", "-"]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def isOperator(self):\n return _libsbml.ASTNode_isOperator(self)", "def is_operator(operator):\n\t\tlist_of_operators = [\"+\", \"-\", \"*\", \"/\"]\n\t\treturn operator in list_of_operators", "def isOperator(self, *args):\n return _libsbml.ASTBasePlugin_isOperator(self, *args)", "def isoperator(token):\n\n # Token is an operator\n return token and token.lower() in Token.OPERATORS", "def _is_arithmetic(self, words):\n if words[0] in ['add', 'sub', 'neg', 'eq', 'gt', 'lt', 'and', 'or', 'not']:\n if len(words) != 1:\n raise SyntaxError(\"File line {}: Invalid number of arguments for C_ARITHMETIC command.\".format(self._file_line))\n return True\n else:\n return False", "def is_operator(obj):\n return isinstance(obj, Token) and obj[0] not in '/01234567890+-.<[('", "def is_operator(formula):\n return is_binary_operator(formula) or isinstance(formula, Not)", "def is_valid_operator(self, operator):\n if operator in self.operators_dict.keys():\n return True\n else:\n return False", "def is_arithmetic(type):\n return is_integral(type) or is_floating_point(type)", "def is_operator(self, string):\n if string in '+-/*^()':\n return string\n else:\n return False", "def is_operator(t_char):\r\n eax = 1\r\n if ord(t_char) == 42:\r\n # prodotto *\r\n eax = 0\r\n \r\n if ord(t_char) == 43:\r\n # somma +\r\n eax = 0\r\n \r\n if ord(t_char) == 45:\r\n # sottrazione -\r\n eax = 0\r\n \r\n if ord(t_char) == 47:\r\n # divisione /\r\n eax = 0\r\n \r\n return eax", "def isOperand(self, token):\n if len(token) == 1:\n if token in self.operands:\n return True\n elif len(token) > 1:\n validChars = self.operands + '+-'\n for eachChar in token:\n if eachChar not in validChars:\n return False\n return True", "def is_unary_operator(oper):\n # definition:\n # memeber in class\n # ret-type operator symbol()\n # ret-type operator [++ --](int)\n # globally\n # ret-type operator symbol( arg )\n # ret-type operator [++ --](X&, int)\n symbols = ['!', '&', '~', '*', '+', '++', '-', '--']\n if not isinstance(oper, calldef.operator_t):\n return False\n if oper.symbol not in symbols:\n return False\n if isinstance(oper, calldef.member_operator_t):\n if 0 == len(oper.arguments):\n return True\n elif oper.symbol in ['++', '--'] and \\\n isinstance(oper.arguments[0].type, cpptypes.int_t):\n return True\n else:\n return False\n else:\n if 1 == len(oper.arguments):\n return True\n elif oper.symbol in ['++', '--'] \\\n and 2 == len(oper.arguments) \\\n and isinstance(oper.arguments[1].type, cpptypes.int_t):\n # may be I need to add additional check whether first argument is\n # reference or not?\n return True\n else:\n return False", "def is_operator(self, symbol: str) -> bool:\n return symbol in self.operators", "def raise_OperatorError(self):\n\n operators = ['%', '*', '**', '-', '+', '/', '//']\n\n if self.operator not in operators:\n print(\n f\"OperatorError: {repr(self.operator)}, is not known, use any of {repr(operators)}\")\n return False\n\n return True", "def check_number_operands(operator: loxtoken.Token, op1: Any, op2: Any = None) -> bool:\n if op2 is None:\n if isinstance(op1, float):\n return True\n raise_error(LoxRuntimeError, operator, \"Operand must be a number.\")\n else:\n if isinstance(op1, float) and isinstance(op2, float):\n return True\n raise_error(LoxRuntimeError, operator, \"Both operands must be a number.\")\n return False", "def _is_unary_op(op):\n if op.type == TokenType.BitwiseNot:\n return True\n return False", "def is_operator(node):\n return node.startswith('$')", "def isOperator(user):\n return isUserType(user, Operator)", "def isOp(self):\n return True", "def evaluate(self, operand: object) -> bool:\n pass", "def operator_present(input_str): # HELPER\n operator_list = ['+','-','*','/','**','<<','>>']\n\n if input_str in operator_list:\n return True\n else: return False", "def do_is(op_left, op_right):\n if isa(op_left, float) and isa(op_right, float):\n return op_left == op_right\n return op_left is op_right", "def test_operator(self):\n\n tokens = list(Lexer(\"+-*/^%\").generate_tokens())\n answer = [Token(TokenType.PLUS),\n Token(TokenType.MINUS),\n Token(TokenType.MULTIPLY),\n Token(TokenType.DIVIDE),\n Token(TokenType.EXPONENT),\n Token(TokenType.MODULO)]\n self.assertEqual(tokens, answer)", "def test_arithmetic(self):\n for test in [\n TypeTest(sir.BinOpCode(name = 'OP_ADD', left = sir.Int(5), right = sir.Int(6)), SymbolType.Integer),\n TypeTest(sir.BinOpCode(name = 'OP_ADD', left = sir.Bytes('05'), right = sir.Bytes('06')), SymbolType.Integer),\n ]:\n self._test(test)", "def is_binary_operator(oper):\n # definition:\n # memeber in class\n # ret-type operator symbol(arg)\n # globally\n # ret-type operator symbol( arg1, arg2 )\n symbols = [\n ',', '()', '[]', '!=', '%', '%=', '&', '&&', '&=', '*', '*=', '+',\n '+=', '-', '-=', '->', '->*', '/', '/=', '<', '<<', '<<=', '<=', '=',\n '==', '>', '>=', '>>', '>>=', '^', '^=', '|', '|=', '||']\n if not isinstance(oper, calldef.operator_t):\n return False\n if oper.symbol not in symbols:\n return False\n if isinstance(oper, calldef.member_operator_t):\n if 1 == len(oper.arguments):\n return True\n else:\n return False\n else:\n if 2 == len(oper.arguments):\n return True\n else:\n return False", "def is_operand(t_char):\r\n eax = 0 # parto dicendo che e' un numero\r\n\r\n if ord(t_char) < 48:\r\n # non e' numero (ascii < carattere \"0\")\r\n eax = 1\r\n \r\n if ord(t_char) > 57:\r\n # non e' numero (ascii > carattere \"9\")\r\n eax = 1\r\n\r\n return eax", "def operator(self):\n col = self.pos\n operators = [\"||\", \"&&\", \">>\", \"<<\", \"!=\", \">=\", \"<=\", \"==\", \"##\"] + \\\n [\"-\", \"+\", \"!\", \"*\", \"/\", \"|\", \"&\", \"^\", \"<\", \">\", \"?\", \":\", \"~\", \"#\", \"=\", \"%\"]\n try:\n index = self.match_any(operators)\n\n op = Operator(self.line, col, self.prev_white, operators[index])\n return op\n except TokenError:\n self.pos = col\n raise TokenError(\"Invalid operator.\")", "def is_comparison_op(self):\r\n return self.value in [\"=\", \"!=\", \"<\", \"<=\", \">\", \">=\"]", "def is_operator(cls, method_name):\n try:\n getattr(cls, method_name)\n except Exception:\n return False\n return Scenario.meta(cls, \"operator\", method_name, default=False)" ]
[ "0.72987086", "0.7168655", "0.70389915", "0.6944585", "0.6697497", "0.66260475", "0.6606036", "0.6585253", "0.65331554", "0.6493527", "0.64781654", "0.6464818", "0.64532834", "0.64515954", "0.63138485", "0.6309521", "0.6233353", "0.6156094", "0.6153355", "0.61440074", "0.60181165", "0.5999424", "0.59664017", "0.587087", "0.58584976", "0.5831183", "0.58126354", "0.57841915", "0.57698786", "0.5723662" ]
0.8123092
0
La funcion recibe el servicio, la ciudad y la fecha.
def get_precio_de_servicio(**kwargs): servicio = kwargs.get("servicio") ciudad = kwargs.get("ciudad") if ciudad and servicio: fecha = kwargs.get("fecha") if 'fecha' in kwargs else datetime.date.today() precios = [precio for precio in PrecioDeServicio.objects.filter(servicio=servicio).filter(ciudad=ciudad).order_by( '-inicio_de_vigencia') if not precio.fin_de_vigencia] if precios: for p in precios: if p.inicio_de_vigencia <= fecha: return p else: # En caso de No encontrar precios registrado en la ciudad indicada, busca en todas las ciudades precios = [precio for precio in PrecioDeServicio.objects.filter(servicio=servicio).order_by( '-inicio_de_vigencia') if not precio.fin_de_vigencia] if precios: for p in precios: if p.inicio_de_vigencia <= fecha: return p
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getIntervenciones():", "def getStatVentesJour(self, in_data):\n try:\n date_jour = in_data['date_jour']\n dt = dateutil.parser.parse(date_jour)\n except:\n out_data = {\n 'success': False\n }\n return out_data\n localtime = dt.astimezone (pytz.timezone('Europe/Paris'))\n debut = datetime(localtime.year, localtime.month, localtime.day)\n\n commandes=[]\n ventes=[]\n ca = 0\n nb_commandes = 0\n nb_souscriptions = 0\n for heure in range(0,24) :\n time_debut = debut + timedelta(hours=heure)\n timestamp = calendar.timegm(time_debut.timetuple()) * 1000\n time_fin = time_debut + timedelta(hours=1)\n # ch_list = CommandeHistory.objects.filter(etat='PAY',date__gte=time_debut, date__lt=time_fin)\n c_list = Commande.objects.filter(etat='PAY',date__gte=time_debut,date__lt=time_fin).distinct()\n total_euros = 0\n total_commandes = 0\n total_souscriptions = 0\n for commande in c_list:\n total_euros += commande.montant\n for souscription in commande.souscription_set.all():\n total_souscriptions += souscription.quantite\n total_commandes += 1\n ca += total_euros\n nb_souscriptions += total_souscriptions\n nb_commandes += total_commandes\n commandes.append([timestamp,total_commandes])\n ventes.append([timestamp,total_euros])\n\n serie_list = [\n {\n 'label': \"commandes\",\n 'data': commandes,\n 'yaxis': 1\n },\n {\n 'label': \"€\",\n 'data': ventes,\n 'yaxis': 2\n }\n ]\n\n options = {\n \"series\": {\n \"lines\": {\n \"show\": True,\n \"fill\": True\n },\n \"points\": { \"show\": True }\n },\n 'axisLabels': {\n 'show': True\n },\n \"xaxis\": {\n \"mode\": \"time\",\n \"timeformat\": \"%Hh\"\n },\n \"yaxes\": [\n {\n 'axisLabel': 'commandes',\n \"tickColor\":[\"#fff\"],\n \"tickDecimals\": 0,\n \"min\":0\n },\n {\n 'axisLabel': \"CA\",\n \"position\": \"right\",\n \"tickDecimals\": 0,\n \"min\":0\n }\n ],\n \"grid\": {\n \"hoverable\": True,\n \"borderWidth\": 1,\n \"markings\": [ { \"yaxis\": { \"from\": 0, \"to\": 300 }, \"color\": \"#fff\" }]\n },\n \"colors\": [\"rgb(138,75,117)\", \"rgb(71,160,62)\"],\n \"tooltip\":True,\n \"tooltipOpts\": {\n \"content\": \"%x : %y %s\"\n },\n \"legend\": {\n \"show\": True,\n \"labelFormatter\": None, # null or (fn: string, series object -> string)\n #\"labelBoxBorderColor\": color,\n #noColumns: number\n #'position': \"ne\" or \"nw\" or \"se\" or \"sw\"\n #margin: number of pixels or [x margin, y margin]\n #backgroundColor: null or color\n #backgroundOpacity: number between 0 and 1\n #container: null or jQuery object/DOM element/jQuery expression\n #sorted: null/false, true, \"ascending\", \"descending\", \"reverse\", or a comparator\n }\n };\n\n\n out_data = {\n 'success': True,\n 'souscriptions': serie_list,\n 'options': options,\n 'ca':ca,\n 'nb_commandes':nb_commandes,\n 'nb_souscriptions':nb_souscriptions\n }\n return out_data", "def getStatVentesMois(self, in_data):\n\n try:\n date_debut = in_data['date_debut']\n dt_debut = dateutil.parser.parse(date_debut)\n date_fin = in_data['date_fin']\n dt_fin = dateutil.parser.parse(date_fin)\n except:\n out_data = {\n 'success': False\n }\n return out_data\n\n local_dt_debut = dt_debut.astimezone (pytz.timezone('Europe/Paris'))\n debut = datetime(local_dt_debut.year, local_dt_debut.month, local_dt_debut.day)\n local_dt_fin = dt_fin.astimezone (pytz.timezone('Europe/Paris'))\n fin = datetime(local_dt_fin.year, local_dt_fin.month, local_dt_fin.day) + timedelta(days=1)\n\n commandes=[]\n ventes=[]\n day = 0\n stop = False\n ca = 0\n nb_commandes = 0\n nb_souscriptions = 0\n while not stop :\n time_debut = debut + timedelta(days=day)\n timestamp = calendar.timegm(time_debut.timetuple()) * 1000\n time_fin = time_debut + timedelta(days=1)\n c_list = Commande.objects.filter(etat='PAY',date__gte=time_debut,date__lt=time_fin).distinct()\n # ch_list = CommandeHistory.objects.filter(etat='PAY',date__gte=time_debut, date__lt=time_fin)\n total_euros = 0\n total_souscriptions = 0\n total_commandes = 0\n\n for commande in c_list:\n total_euros += commande.montant\n for souscription in commande.souscription_set.all():\n total_souscriptions += souscription.quantite\n total_commandes += 1\n\n ca+=total_euros\n nb_souscriptions+=total_souscriptions\n nb_commandes+=total_commandes\n commandes.append([timestamp,total_commandes])\n ventes.append([timestamp,total_euros])\n day += 1\n if (debut + timedelta(days=day))>=fin:\n stop=True\n\n serie_list = [\n {\n 'label': \"commandes\",\n 'data': commandes,\n 'yaxis': 1\n },\n {\n 'label': \"€\",\n 'data': ventes,\n 'yaxis': 2\n }\n ]\n\n options = {\n \"series\": {\n \"lines\": {\n \"show\": True,\n \"fill\": True\n },\n \"points\": { \"show\": True }\n },\n 'axisLabels': {\n 'show': True\n },\n \"xaxis\": {\n \"mode\": \"time\",\n \"timeformat\": \"%e %b\",\n \"monthNames\": [\"jan\", \"fev\", \"mar\", \"avr\", \"mai\", \"juin\", \"juil\", \"aout\", \"sept\", \"oct\", \"nov\", \"dec\"]\n },\n \"yaxes\": [\n {\n 'axisLabel': 'commandes',\n \"tickColor\":[\"#fff\"],\n \"tickDecimals\": 0,\n \"min\":0\n },\n {\n 'axisLabel': \"CA\",\n \"position\": \"right\",\n \"tickColor\":[\"#fff\"],\n \"tickDecimals\": 0,\n \"min\":0\n }\n ],\n \"grid\": {\n \"hoverable\": True,\n \"borderWidth\": 1\n },\n \"colors\": [\"rgb(138,75,117)\", \"rgb(71,160,62)\"],\n \"tooltip\":True,\n \"tooltipOpts\": {\n \"content\": \"%x : %y %s\"\n },\n \"legend\": {\n \"show\": True,\n \"labelFormatter\": None, # null or (fn: string, series object -> string)\n #\"labelBoxBorderColor\": color,\n #noColumns: number\n #'position': \"ne\" or \"nw\" or \"se\" or \"sw\"\n #margin: number of pixels or [x margin, y margin]\n #backgroundColor: null or color\n #backgroundOpacity: number between 0 and 1\n #container: null or jQuery object/DOM element/jQuery expression\n #sorted: null/false, true, \"ascending\", \"descending\", \"reverse\", or a comparator\n }\n };\n\n\n out_data = {\n 'success': True,\n 'souscriptions': serie_list,\n 'options': options,\n 'ca':ca,\n 'nb_commandes':nb_commandes,\n 'nb_souscriptions':nb_souscriptions\n }\n return out_data", "def getDate(sock):\n months = {\n \"english\": [\n \"January\",\n \"February\",\n \"March\",\n \"April\",\n \"May\",\n \"June\",\n \"July\",\n \"August\",\n \"September\",\n \"October\",\n \"November\",\n \"December\",\n ],\n \"maori\": [\n \"Kohitatea\",\n \"Hui-tanguru\",\n \"Poutu ̄-te-rangi\",\n \"Paenga-whawha\",\n \"Haratua\",\n \"Pipiri\",\n \"Hongongoi\",\n \"Here-turi-koka\",\n \"Mahuru\",\n \"Whiringa-a-nuku\",\n \"Whiringa-a-rangi\",\n \"Hakihea\",\n ],\n \"german\": [\n \"Januar\",\n \"Februar\",\n \"Marz\",\n \"April\",\n \"Mai\",\n \"Juni\",\n \"Juli\",\n \"August\",\n \"September\",\n \"Oktober\",\n \"November\",\n \"Dezember\",\n ],\n }\n\n MagicNo = 0x497E .to_bytes(2, \"big\")\n PacketType = 0x0002 .to_bytes(2, \"big\")\n if sock is s_english:\n LanguageCode = 0x0001\n flag = \"english\"\n elif sock is s_maori:\n LanguageCode = 0x0002\n flag = \"maori\"\n elif sock is s_german:\n LanguageCode = 0x0003\n flag = \"german\"\n date = datetime.datetime.today()\n LanguageCode = LanguageCode.to_bytes(2, \"big\")\n year = date.year.to_bytes(2, \"big\")\n language_months = months[flag]\n chosen_month = language_months[(date.month - 1)]\n month = date.month.to_bytes(1, \"big\")\n day = date.day.to_bytes(1, \"big\")\n hour = date.hour.to_bytes(1, \"big\")\n minute = date.minute.to_bytes(1, \"big\")\n if flag == \"english\":\n text = \"Today's date is {} {}, {}\".format(chosen_month, date.day, date.year)\n elif flag == \"maori\":\n text = \"Ko te ra o tenei ra ko {} {}, {}\".format(\n chosen_month, date.day, date.year\n )\n else:\n text = \"Heute ist der {} {}, {}\".format(chosen_month, date.day, date.year)\n\n lengthNow = len(text)\n length = lengthNow.to_bytes(1, \"big\")\n\n bytelist = [\n MagicNo,\n PacketType,\n LanguageCode,\n year,\n month,\n day,\n hour,\n minute,\n length,\n ]\n\n out = bytearray()\n\n for byteset in bytelist:\n out += byteset\n\n out.extend(text.encode(\"utf-8\"))\n\n return out", "def adiciona_servico(self, dados={}):\n\n self.user = self.uPersistencia.buscarUsuario(\n id_usuario=dados['id_usuario'])\n\n if not self.user.getId():\n return {'status': False,\n 'msg': 'usuario nao existe',\n 'dados': dados}\n elif self.user.getIdTipo() != 2:\n # apenas usuarios do tipo prestador (2) podem adicionar\n # servicos\n return {'status': False,\n 'msg': 'adicionar servico apenas valido para prestadores',\n 'dados': dados}\n else:\n # instancia da model de servicos\n self.srv = BuscaServicos()\n\n if self.srv.busca_servico_usuario(id_usuario=self.user.getId(),\n id_servico=dados['id_servico']):\n # usuario ja presta o servico solicitado para cadastrar.\n # entao retornamos msg de operacao nao concluida\n return {'status': 0,\n 'msg': (('%s (%d) ja presta esse servico,' +\n ' operacao cancelada.') % (self.getNome(),\n self.getId())),\n 'dados': dados}\n\n res = self.user.adicionar_servico(id_servico=dados['id_servico'])\n\n return {'status': (res and 1 or 0),\n 'msg': (res and 'servico cadastrado com sucesso' or\n 'servico nao cadastrado'),\n 'dados': dados}", "def on_btnAltaOtroServicio_clicked(self,widget):\n try:\n\n codigoReservaServicio=variables.lblCodigoReservaServicio.get_text()\n if (variables.lblCodigoReservaServicio.get_text() != \"\"):\n if(variables.entTipoServicio.get_text()!=\"\"):\n if (variables.entPrecioServicio.get_text() != \"\"):\n existeOtroServicio = False\n for registro in variables.listFactura:\n if registro[0] == variables.entTipoServicio.get_text():\n existeOtroServicio = True\n if existeOtroServicio == False:\n precio = str(variables.entPrecioServicio.get_text())\n concepto = str(variables.entTipoServicio.get_text())\n datos = (codigoReservaServicio, concepto, precio)\n funciones_servicios.insertarServicio(datos)\n\n else:\n variables.mensajeError = \"No puedes insertar otro servicio extra con el mismo nombre\"\n variables.vError.show()\n variables.entTipoServicio.set_text(\"\")\n variables.entPrecioServicio.set_text(\"\")\n\n else:\n variables.mensajeError = \"Debes insertar un precio servicio\"\n variables.vError.show()\n\n else:\n variables.mensajeError = \"Debes insertar un tipo de servicio\"\n variables.vError.show()\n funciones_servicios.listadoServicio(variables.listServicios,codigoReservaServicio)\n funciones_factura.listadoServicios(variables.listFactura,variables.codr,variables.lblHabitacionServicio.get_text())\n funciones_factura.calcularPreciosServicios()\n\n else:\n variables.mensajeError = \"Debes seleccionar un codigo de reserva\"\n variables.vError.show()\n\n except Exception as e:\n print(\"Error alta servicio\")\n print(e)", "def _process_info(resp: suds.sudsobject) -> dict:\n last = resp.ultimoValor\n return dict(fonte = str(resp.fonte),\n gestor = str(resp.gestorProprietario),\n freq = str(resp.periodicidadeSigla),\n nome = str(resp.nomeCompleto),\n number = int(resp.oid),\n final = dt(last.ano, last.mes, last.dia))", "def getVentasPendientes(self, idVenta=\"\", idCliente=\"\", idAbono=\"\",fecha=\"\"):\n if idVenta == \"\" and idCliente == \"\" and idAbono == \"\" and fecha == \"\":\n return self.conexion.ejecutarSQL(\"select v.id,v.fecha,v.total,tP.tipo,c.id, c.nombres || ' ' || c.primer_apellido from ventas v, tipoPagos tP, clientes c \\\n where v.id_TipoPago=tP.id and v.id_Cliente=c.id and v.estado='Pendiente'\")\n elif idVenta != \"\":\n return self.conexion.ejecutarSQL(\"select v.id,v.fecha,v.total,tP.tipo,c.id, c.nombres || ' ' || c.primer_apellido from ventas v, tipoPagos tP, clientes c \\\n where v.id_TipoPago=tP.id and v.id_Cliente=c.id and \\\n v.estado='Pendiente' and v.id=%s\"%(idVenta))\n elif idCliente != \"\":\n return self.conexion.ejecutarSQL(\"select v.id,v.fecha,v.total,tP.tipo,c.id, c.nombres || ' ' || c.primer_apellido from ventas v, tipoPagos tP, clientes c \\\n where v.id_TipoPago=tP.id and v.id_Cliente=c.id and \\\n v.estado='Pendiente' and v.id_cliente='%s'\"%(idCliente))\n\telif idAbono != \"\":\n\t return self.conexion.ejecutarSQL(\"select v.id,v.fecha,v.total,tP.tipo,c.id, c.nombres || ' ' || c.primer_apellido from ventas v, tipoPagos tP, clientes c, Abonos a\\\n where v.id_TipoPago=tP.id and v.id_Cliente=c.id and v.estado='Pendiente'\\\n\t and a.id_venta=v.id and a.id=%s\"%(idAbono))\n\telif fecha != \"\":\n\t return self.conexion.ejecutarSQL(\"select v.id,v.fecha,v.total,tP.tipo,c.id, c.nombres || ' ' || c.primer_apellido from ventas v, tipoPagos tP, clientes c \\\n where v.id_TipoPago=tP.id and v.id_Cliente=c.id and v.estado='Pendiente' and\\\n\t v.fecha between '%s' and '%s'\"%(fecha[0],fecha[1]))", "def getStatVentesAnnee(self, in_data):\n\n try:\n date_debut = in_data['date_debut']\n dt_debut = dateutil.parser.parse(date_debut)\n date_fin = in_data['date_fin']\n dt_fin = dateutil.parser.parse(date_fin)\n except:\n out_data = {\n 'success': False\n }\n return out_data\n\n local_dt_debut = dt_debut.astimezone (pytz.timezone('Europe/Paris'))\n debut = datetime(local_dt_debut.year, local_dt_debut.month,1)\n local_dt_fin = dt_fin.astimezone (pytz.timezone('Europe/Paris'))\n fin = datetime(local_dt_fin.year, local_dt_fin.month,1) + relativedelta(months=+1)\n\n commandes=[]\n ventes=[]\n month = 0\n stop = False\n ca = 0\n nb_commandes = 0\n nb_souscriptions = 0\n while not stop :\n time_debut = debut + relativedelta(months=+month)\n timestamp = calendar.timegm(time_debut.timetuple()) * 1000\n time_fin = time_debut + relativedelta(months=+1)\n # ch_list = CommandeHistory.objects.filter(etat='PAY',date__gte=time_debut, date__lt=time_fin)\n c_list = Commande.objects.filter(etat='PAY',date__gte=time_debut,date__lt=time_fin).distinct()\n total_euros = 0\n total_souscriptions = 0\n total_commandes = 0\n for commande in c_list:\n total_euros += commande.montant\n for souscription in commande.souscription_set.all():\n total_souscriptions += souscription.quantite\n total_commandes += 1\n\n ca+=total_euros\n nb_souscriptions+=total_souscriptions\n nb_commandes+=total_commandes\n commandes.append([timestamp,total_commandes])\n ventes.append([timestamp,total_euros])\n month += 1\n if (debut + relativedelta(months=+month))>=fin:\n stop=True\n\n serie_list = [\n {\n 'label': \"commandes\",\n 'data': commandes,\n 'yaxis': 1\n },\n {\n 'label': \"€\",\n 'data': ventes,\n 'yaxis': 2\n }\n ]\n\n options = {\n \"series\": {\n \"lines\": {\n \"show\": True,\n \"fill\": True\n },\n \"points\": { \"show\": True }\n },\n 'axisLabels': {\n 'show': True\n },\n \"xaxis\": {\n \"mode\": \"time\",\n \"timeformat\": \"%b %y\",\n \"monthNames\": [\"jan\", \"fev\", \"mar\", \"avr\", \"mai\", \"juin\", \"juil\", \"aout\", \"sept\", \"oct\", \"nov\", \"dec\"]\n },\n \"yaxes\": [\n {\n 'axisLabel': 'commandes',\n \"tickColor\":[\"#fff\"],\n \"tickDecimals\": 0,\n \"min\":0\n },\n {\n 'axisLabel': \"CA\",\n \"position\": \"right\",\n \"tickDecimals\": 0,\n \"min\":0\n }\n ],\n \"grid\": {\n \"hoverable\": True,\n \"borderWidth\": 1\n },\n \"colors\": [\"rgb(138,75,117)\", \"rgb(71,160,62)\"],\n \"tooltip\":True,\n \"tooltipOpts\": {\n \"content\": \"%x : %y %s\"\n },\n \"legend\": {\n \"show\": True,\n \"labelFormatter\": None, # null or (fn: string, series object -> string)\n #\"labelBoxBorderColor\": color,\n #noColumns: number\n #'position': \"ne\" or \"nw\" or \"se\" or \"sw\"\n #margin: number of pixels or [x margin, y margin]\n #backgroundColor: null or color\n #backgroundOpacity: number between 0 and 1\n #container: null or jQuery object/DOM element/jQuery expression\n #sorted: null/false, true, \"ascending\", \"descending\", \"reverse\", or a comparator\n }\n };\n\n\n out_data = {\n 'success': True,\n 'souscriptions': serie_list,\n 'options': options,\n 'ca':ca,\n 'nb_commandes':nb_commandes,\n 'nb_souscriptions':nb_souscriptions\n }\n return out_data", "def pacMare(date, estac):\n monthList = [\"JAN\", \"FEV\", \"MAR\", \"ABR\", \"MAI\", \"JUN\", \"JUL\",\n \"AGO\", \"SET\", \"OUT\", \"NOV\", \"DEZ\"]\n an = date.year\n Mesl = date.month\n strmes = monthList[Mesl-1]\n di = date.day\n data1 = \"%s/%s/%s\" %(di, Mesl, an)\n\n DT = 1\n HI = -3\n d0 = 1\n\n estacoes = Estacao()\n constantes = Constantes()\n cadastro = Cadastro()\n combinacoes = Combinacoes()\n\n f = estacoes.data['name'].index(estac)\n Cod = estacoes.data['ID'][f]\n LA1 = estacoes.data['latG'][f]\n LA2 = estacoes.data['latM'][f]\n LO1 = estacoes.data['lonG'][f]\n LO2 = estacoes.data['lonM'][f]\n nc = estacoes.data['ncomp'][f]\n NM = estacoes.data['nm'][f]\n fu = estacoes.data['fuso'][f]\n ca = estacoes.data['carta'][f]\n hemlat = estacoes.data['hemlat'][f]\n hemlon = estacoes.data['hemlon'][f]\n \n infoList = []\n lat = base10Tobase60(lat=base60Tobase10(LA1, LA2, hemlat))\n lon = base10Tobase60(lon=base60Tobase10(LO1, LO2, hemlon))\n latSTR = u\"Lat: %s\" % lat\n lonSTR = u\"Lon: %s\" % lon\n ncSTR = u\"Componentes: %s\" %(nc)\n nmSTR = u\"Nível Médio: %s cm\" %(int(NM))\n fuSTR = u\"Fuso: - %sh\" %(int(fu))\n caSTR = u\"Número Carta: %s\" %(ca)\n\n infoList.append(latSTR)\n infoList.append(lonSTR)\n infoList.append(ncSTR)\n infoList.append(nmSTR)\n infoList.append(fuSTR)\n infoList.append(caSTR)\n\n f = constantes.data['ID'].index(Cod)\n ai = constantes.data['const'][ f:f+nc ]\n h = constantes.data['amp'][ f:f+nc ]\n G = constantes.data['phase'][ f:f+nc ]\n HH = h[:]\n GG = G[:]\n\n MK, constID = [],[]\n for k in range(nc):\n f = cadastro.data['const'].index(ai[k])\n MK.append(cadastro.data['M'][f])\n constID.append(cadastro.data['cod'][f])\n MK = str2int(MK)\n constID = str2int(constID)\n\n BB, CC = [],[]\n for k in range(nc):\n f = combinacoes.data['ID'].index(constID[k])\n aux = combinacoes.data['subs'][ f: f+MK[k] ]\n aux = str2float(aux)\n BB.append(aux)\n aux = combinacoes.data['comb'][ f: f+MK[k] ]\n aux = str2float(aux)\n CC.append(aux)\n\n cdat = open(web2pyPath + \"modules/data/Vdata.txt\")\n V = []\n for line in cdat.readlines():\n line2 = line.strip('\\r\\n').split(',')\n line2 = str2float(line2)\n V.append(line2)\n\n D = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]\n n = 30\n\n # calculo dos elementos astronomicos\n MB = float(an % 4)\n MC = float(an % 100)\n MD = float(an % 400)\n dd = float(di)\n\n if MB == 0 and MC != 0 or MD == 0:\n D[2] = 29\n\n i1 = float(an / 100)\n i2 = i1 - 19\n if i2 != 0:\n t1 = i2\n j1 = abs(i2)\n c3 = j1 / i2\n t2 = t1 * t1 * c3\n c1 = int(j1 * 0.75 + 0.5) * c3\n else:\n t1 = 0.\n t2 = 0.\n c1 = 0.\n\n s0 = 277.0224 + 307.8831 * t1 - 0.0011 * t2 - 13.1764 * c1\n h0 = 280.1895 + 0.7689 * t1 + 0.0003 * t2 - 0.9856 * c1\n p0 = 334.3853 + 109.034 * t1 - 0.0103 * t2 - 0.1114 * c1\n nl = 100.7902 + 134.142 * t1 - 0.0021 * t2 - 0.053 * c1\n P1 = 281.2208 + 1.7192 * t1 + 0.00045 * t2 - 0.000047 * c1\n\n for i in range(Mesl):\n di = float(di + D[i])\n\n # bug de 2001\n if an <= 2000:\n di = di - 1 \n\n IA = i1 * 100\n BI = an - IA\n\n AI = int((BI - 1) * 0.25); AI = float(AI)\n if MD == 0: AI = AI + 1\n AD = AI + di\n N2 = n * DT * 0.5\n AV = N2\n SN = AV / 10000\n b = [None]\n b.append( s0 + 129.38481 * BI + 13.1764 * AD )\n b.append( h0 - 0.23872 * BI + 0.98565 * AD )\n b.append( p0 + 40.66249 * BI + 0.1114 * AD )\n b.append(None)\n b.append( nl + 19.32818 * BI + 0.05295 * AD )\n b.append( P1 + 0.01718 * BI + 0.000047 * AD )\n b[0] = b[2] - b[1]\n b[4] = 90.\n b.append( b[3] + N2 * 0.00464183 )\n b.append( b[5] + N2 * 0.00220641 )\n b.append( b[6] + N2 * 0.00000196 )\n\n a = [ [0.,1.,0.], [0.,2.,0.], [0.,3.,0.], [0.,0.,2.], [0.,1.,2.], [1.,0.,-1.], \n [2.,-1.,-1.], [2.,-1.,0.], [2.,-1.,1.], [2.,0.,0.], [2.,1.,0.], \n [2.,2.,0.], [2.,3.,0.] ]\n\n b[0] = b[0] + HI * 14.49205211\n b[1] = b[1] + HI * 0.54902653\n b[2] = b[2] + HI * 0.0410686\n b[3] = b[3] + HI * 0.00464183\n b[5] = b[5] + HI * 0.00220641\n b[6] = b[6] + HI * 0.00000196\n\n z, Q = [], []\n for i in range(13):\n s = 0.\n for J in range(3):\n s = s + a[i][J] * b[J + 7]\n \n XX = s * 0.017453\n z.append(np.cos(XX))\n Q.append(np.sin(XX))\n\n W = []\n for i in range(37):\n WQ = 0.\n for J in range(5):\n WQ = WQ + V[i][J] * b[J]\n \n if i == 13 or i == 30:\n W.append( WQ + b[9] )\n elif i == 17 or i == 32:\n W.append( WQ - b[9] )\n else:\n W.append(WQ)\n\n F, U = [], []\n for k in range(38):\n F.append(None) # apenas para facilitar a copia do codigo em VB\n U.append(None) # depois, ambos serao popped-up\n z.insert(0, None) # idem\n Q.insert(0, None) # idem\n\n F[1] = 1\n F[2] = 1\n F[3] = 1 - 0.0307 * z[1] + 0.0007 * z[2] - 0.0534 * z[10] - 0.0218 * z[11] - 0.0059 * z[12]\n F[4] = 1 + 0.4142 * z[1] + 0.0377 * z[2] - 0.0008 * z[3] - 0.0028 * z[8] + 0.0431 * z[10] - 0.0023 * z[11]\n F[5] = 1 + 0.4141 * z[1] + 0.0384 * z[2] - 0.003 * z[7] - 0.003 * z[9] + 0.0179 * z[10] - 0.004 * z[12] - 0.0017 * z[13]\n F[6] = 1 + 0.1885 * z[1] - 0.0063 * z[2] - 0.0063 * z[12]\n F[7] = 1 + 0.1884 * z[1] - 0.0061 * z[2] - 0.0087 * z[10]\n F[8] = 1 + 0.1884 * z[1] - 0.0057 * z[2] + 0.0007 * z[6] - 0.0028 * z[10] - 0.0039 * z[12] - 0.0007 * z[13]\n F[9] = 1 + 0.1881 * z[1] - 0.0058 * z[2] - 0.0576 * z[10] + 0.0175 * z[11]\n F[10] = 1 + 0.1885 * z[1] - 0.0058 * z[2] + 0.0001 * z[8] - 0.0054 * z[10] - 0.001 * z[11]\n F[11] = 1 - 0.2454 * z[1] - 0.0142 * z[2] + 0.0445 * z[10]\n F[12] = 1 + 0.1714 * z[1] - 0.0054 * z[2] + 0.3596 * z[10] + 0.0664 * z[11] - 0.0057 * z[12]\n F[13] = 1 + 0.1905 * z[1]\n F[14] = 1 - 0.0078 * z[1]\n F[15] = 1 - 0.0112 * z[1] + 0.0007 * z[2] - 0.0004 * z[4] - 0.0015 * z[10] - 0.0003 * z[11]\n F[16] = 1\n F[17] = 1 + 0.1158 * z[1] - 0.0029 * z[2] + 0.0001 * z[11]\n F[18] = 1 + 0.019 * z[1]\n F[19] = 1 - 0.0384 * z[1] - 0.0185 * z[2] + 0.0132 * z[4] + 0.0105 * z[8] + 0.0344 * z[10]\n F[20] = 1 + 0.1676 * z[1] + 0.03 * z[11]\n F[21] = 1 + 0.1685 * z[1] - 0.0047 * z[2] - 0.0152 * z[10] - 0.0098 * z[11] - 0.0057 * z[12]\n F[22] = 1 + 0.6398 * z[1] + 0.1342 * z[2] + 0.008500001 * z[3] + 0.0296 * z[8] + 0.1496 * z[10] - 0.0037 * z[11]\n F[23] = 1 - 0.0337 * z[1]\n F[24] = 1 - 0.0374 * z[1] - 0.061 * z[12]\n F[25] = 1 - 0.0375 * z[1]\n F[26] = 1 - 0.0373 * z[1] + 0.0004 * z[2] + 0.0007 * z[6] - 0.0039 * z[12]\n F[27] = 1 - 0.0373 * z[1] + 0.0042 * z[10] - 0.0036 * z[11]\n F[28] = 1 - 0.0373 * z[1] + 0.0004 * z[2] + 0.0005 * z[10] - 0.0001 * z[11]\n F[29] = 1 - 0.0448 * z[1]\n F[30] = 1 - 0.0367 * z[1] + 0.0047 * z[8] - 0.2505 * z[10] - 0.1102 * z[11] - 0.0156 * z[12]\n F[31] = 1\n F[32] = 1 - 0.0022 * z[1]\n F[33] = 1 - 0.2535 * z[4] + 0.0141 * z[5]\n F[34] = 1 + 0.2852 * z[1] + 0.0324 * z[2]\n F[35] = 1 + 0.4389 * z[1] + 0.0487 * z[2] + 0.0487 * z[10] + 0.065 * z[11]\n F[36] = 1 + 0.4168 * z[1] + 0.0466 * z[2] - 0.078 * z[10]\n F[37] = 1 - 0.0564 * z[1]\n\n U[1] = 0\n U[2] = 0\n U[3] = 0.0007 * Q[1] - 0.0008 * Q[2] - 0.0534 * Q[10] - 0.0218 * Q[11] - 0.0059 * Q[12]\n U[4] = 0.4142 * Q[1] + 0.0377 * Q[2] - 0.0008 * Q[3] + 0.0027 * Q[8] - 0.0432 * Q[10] + 0.0022 * Q[11]\n U[5] = 0.4142 * Q[1] + 0.0384 * Q[2] + 0.003 * Q[7] + 0.003 * Q[9] - 0.018 * Q[10] - 0.004 * Q[12] - 0.0017 * Q[13]\n U[6] = -0.1885 * Q[1] + 0.0062 * Q[2] + 0.0062 * Q[12]\n U[7] = -0.1884 * Q[1] + 0.006 * Q[2] - 0.0087 * Q[10]\n U[8] = -0.1884 * Q[1] + 0.0057 * Q[2] - 0.0008 * Q[6] - 0.0028 * Q[10] + 0.0039 * Q[12] + 0.0007 * Q[13]\n U[9] = -0.1882 * Q[1] + 0.0057 * Q[2] - 0.0576 * Q[10] + 0.0175 * Q[11]\n U[10] = -0.1885 * Q[1] + 0.0057 * Q[2] + 0.0001 * Q[8] - 0.0064 * Q[10] - 0.001 * Q[11]\n U[11] = -0.1886 * Q[1] - 0.0142 * Q[2] - 0.0446 * Q[10]\n U[12] = -0.2294 * Q[1] - 0.3596 * Q[10] - 0.0665 * Q[11] + 0.0057 * Q[12]\n U[13] = 0.246 * Q[1]\n U[14] = 0.0077 * Q[1]\n U[15] = 0.0111 * Q[1] - 0.0008 * Q[2] - 0.0004 * Q[4] - 0.0015 * Q[10] - 0.0003 * Q[11]\n U[16] = 0\n U[17] = 0.1554 * Q[1] - 0.003 * Q[2] - 0.0002 * Q[11]\n U[18] = 0.019 * Q[1]\n U[19] = -0.0384 * Q[1] - 0.0185 * Q[2] - 0.0132 * Q[4] - 0.0106 * Q[8] - 0.0344 * Q[10]\n U[20] = 0.231 * Q[1] - 0.03 * Q[11]\n U[21] = 0.2274 * Q[1] - 0.0047 * Q[2] - 0.0152 * Q[10] - 0.0098 * Q[11] - 0.0057 * Q[12]\n U[22] = 0.6398 * Q[1] + 0.1342 * Q[2] - 0.0296 * Q[8] - 0.1497 * Q[10] + 0.0037 * Q[11]\n U[23] = 0.0373 * Q[1]\n U[24] = 0.0373 * Q[1] + 0.006 * Q[12]\n U[25] = 0.0373 * Q[1] - 0.0005 * Q[2] - 0.0008 * Q[6] + 0.0039 * Q[12]\n U[26] = 0.0373 * Q[1] - 0.0005 * Q[2] - 0.0008 * Q[6] + 0.0039 * Q[12]\n U[27] = 0.0373 * Q[1] + 0.0042 * Q[10] + 0.0036 * Q[11]\n U[28] = 0.0373 * Q[1] - 0.0005 * Q[2] + 0.0005 * Q[9] + 0.0001 * Q[11]\n U[29] = 0.0487 * Q[1]\n U[30] = 0.0366 * Q[1] + 0.0047 * Q[8] - 0.2505 * Q[9] - 0.1102 * Q[11]\n U[31] = 0\n U[32] = -0.0022 * Q[1]\n U[33] = -0.2535 * Q[4] + 0.0141 * Q[5]\n U[34] = 0.3108 * Q[1] + 0.0324 * Q[2]\n U[35] = 0.4389 * Q[1] + 0.0487 * Q[2] - 0.0488 * Q[9] - 0.065 * Q[11]\n U[36] = 0.4542 * Q[1] + 0.0466 * Q[2] - 0.0078 * Q[10]\n U[37] = 0.0563 * Q[1]\n\n z.pop(0)\n Q.pop(0)\n F.pop(0)\n U.pop(0)\n AV = n * DT * 0.5\n\n for i in range(37):\n XX = F[i]\n YY = U[i]\n F[i] = np.sqrt( XX ** 2 + YY ** 2 )\n U[i] = W[i] + np.arctan(YY / XX) * 57.29578\n U[i] = U[i] - int(U[i] / 360) * 360\n if U[i] < 0: U[i] = U[i] + 360\n\n\n # calculo das alturas\n HC, GC = [],[]\n for k in range(110):\n HC.append(0)\n GC.append(0)\n\n for i in range(nc):\n s = 0.\n WQ = 0.\n T = 1.\n\n for J in range(MK[i]):\n jj = int(BB[i][J])\n kk = CC[i][J]\n T = T * F[jj-1] ** abs(kk)\n s = s + U[jj-1] * kk\n WQ = WQ + V[jj-1][5] * kk\n ZQ = s\n \n h[i] = T * h[i]\n s = s - G[i]\n if s < 0: s = s + 360.\n G[i] = s\n try: \n W[i] = WQ * DT\n except IndexError:\n W.append( WQ * DT )\n HC[i] = T * HC[i]\n ZQ = ZQ - GC[i]\n if ZQ < 0: ZQ = ZQ + 360.\n GC[i] = ZQ\n\n x, Y2, y = [],[],[]\n MM = 0\n for i in range(n):\n s = 0.\n ZQ = 0.\n\n for j in range(nc):\n AA = G[j] * 0.017453\n s = s + h[j] * np.cos(AA)\n G[j] = G[j] + W[j]\n AC = GC[j] * 0.017453\n ZQ = ZQ + HC[j] * np.cos(AC)\n GC[j] = GC[j] + W[j]\n\n x.append(s + NM)\n Y2.append(x[i])\n y.append(ZQ + MM)\n\n x = np.array(x, dtype=np.float32)\n x = x/100.\n h = x[3:-3]\n hours = np.arange(24)\n years, months, days = 0*hours+an, 0*hours+Mesl, 0*hours+int(dd)\n time = []\n for year, month, day, hour in zip(years, months, days, hours):\n time.append( dt.datetime(year, month, day, hour) )\n\n time = mpldates.date2num(time)\n time2 = np.linspace(time[0], time[-1], 500)\n\n interp = interp1d(time, h, kind='cubic')\n h2 = interp(time2)\n\n dh = np.gradient(h2)\n dhSign = dh > 0\n # gathering pairs\n pairs = []\n for k in range(len(dh)-1):\n pairs.append([dhSign[k], dhSign[k+1]])\n\n f = []\n for k in range(len(pairs)):\n if pairs[k] == [True, False] or pairs[k] == [False, True]:\n f.append(k)\n\n datas = mpldates.num2date(time2[f])\n hora = []\n for data in datas:\n hora.append(\"%02i:%02i\" %(data.hour, data.minute))\n altura = h2[f]\n altura = ['%.1f' % a for a in altura]\n\n return infoList, hora, altura, time2, h2", "def dibujo(self, req):\n\n gridmap_points = rospy.ServiceProxy('gridmap_points', GridmapPoints)\n\n resp = gridmap_points()\n rospy.loginfo('Corriendo servicio gridmap points\"{}\"'.format(resp.points))\n\n rospy.loginfo('Corriendo servicio dibujo')\n gridmap_preprocesado, w, h = self.get_graph(resp.points)\n\n self.vrep_to_gridmap(req.inicio, w, h)\n\n self.ruta = []\n\n derecha, izquierda, up, down, nodo = self.center()\n if req.figura == 'pato':\n self.pato(derecha, izquierda, up, down, nodo)\n elif req.figura == 'pez':\n self.pez(derecha, izquierda, up, down, nodo)\n elif req.figura == 'jirafa':\n self.jirafa(derecha, izquierda, up, down, nodo)\n elif req.figura == 'gato':\n self.gato(derecha, izquierda, up, down, nodo)\n else:\n rospy.logerr('Metodo desconocido \"{}\"'.format(req.metodo))\n return None\n\n self.save_ruta(gridmap_preprocesado)\n\n response = DibujoResponse()\n response.rutax, response.rutay = self.gridmap_to_vrep(w, h)\n return response", "def on_btnBajaServicio_clicked(self, widget):\n try:\n codigoReservaServicio = variables.lblCodigoReservaServicio.get_text()\n if(variables.codigoServicio!= \"\"):\n funciones_servicios.eliminarServicio(variables.codigoServicio)\n funciones_servicios.listadoServicio(variables.listServicios, codigoReservaServicio)\n funciones_factura.listadoServicios(variables.listFactura,variables.codr,variables.lblHabitacionServicio.get_text())\n funciones_factura.calcularPreciosServicios()\n else:\n variables.mensajeError = \"Debes seleccionar un servicio\"\n variables.vError.show()\n\n\n except:\n print (\"error baja servicio\")", "def __busca_notas(self, tipo_busca, intervalo_inicial, intervalo_final, serie, \r\n\t\t chave_acesso):\r\n info_consulta = \"11\" # padrao 1\r\n\tresposta = \" \"*231 # padrao 230\r\n\t#resposta = None \r\n\r\n status = self.dll.rRetornarInformacao_NFCe_Daruma(tipo_busca, \r\n intervalo_inicial, intervalo_final, serie, chave_acesso, \r\n\t info_consulta, resposta) \r\n\tif status !=1:\r\n\t if status == -1:\r\n\t\traise Exception(\"-1: Erro encontrado na execucao do metodo\")\r\n elif status == -2:\r\n\t\traise Exception(\"-2: Chave Invalida\")\r\n\t elif status == -3:\r\n\t\traise Exception(\"-3: Falha no schema XML.\")\r\n\t elif status == -4:\r\n\t\traise Exception(\"-4: XML fora do padrao\")\r\n\t elif status == -5:\r\n\t\traise Exception(\"-5: Erro generico\")\r\n\t elif status == -8:\r\n\t\traise Exception(\"-8: Usuario nao Autorizado\")\r\n elif status == -9:\r\n\t\traise Exception(\"-9: Usuario nao Licenciado\")\r\n\t elif status == -10:\r\n\t\traise Exception(\"-10: Documento e Ambiente nao identificados\")\r\n\t elif status == -13:\r\n\t\traise Exception(\"-13: Tipo de Documento nao identificado\")\r\n elif status == -14:\r\n\t\traise Exception(\"-14: Erro retornado pelo WebService.\")\r\n elif status == -52:\r\n\t\traise Exception(\"-52: Erro ao gravar em arquivo temporario\")\r\n elif status == -99:\r\n\t\traise Exception(\"-99: Parametros invalidos ou ponteiro nulo de pametros\")\r\n elif status == -99:\r\n\t\traise Exception(\"-103: Nao foram encontradas as DLLs auxiliaes\")\r\n\t else:\r\n\t\traise Exception(\"Erro ao executar o metodo Retornar Informacao.\")", "def get_service(self):", "def __get_data(self):\n ips = self.server.JUGADORES.keys()\n convida = list(ips)\n retorno = \"\"\n for ip in ips:\n nick = self.server.JUGADORES[ip]['nick']\n tanque = self.server.JUGADORES[ip]['path']\n energia = self.server.JUGADORES[ip]['energia']\n vidas = self.server.JUGADORES[ip]['vidas']\n puntos = self.server.JUGADORES[ip]['puntos']\n posicion = self.server.JUGADORES[ip]['pos']\n bala = self.server.JUGADORES[ip]['bala']\n\n datos = \"%s,%s,%s,%s,%s,%s,%s,%s\" % (ip, nick, tanque,\n posicion, vidas, energia, puntos, bala)\n\n explosion = self.server.JUGADORES[ip]['explosiones'].get(\n self.client_address[0], False)\n if explosion:\n datos = \"%s,%s\" % (datos, explosion)\n del(self.server.JUGADORES[ip][\n 'explosiones'][self.client_address[0]])\n\n retorno = \"%s%s||\" % (retorno, datos)\n if vidas == 0:\n convida.remove(ip)\n\n if len(ips) > 1 and len(convida) == 1:\n return \"END\"\n else:\n return retorno.strip()", "def getCoeficienteEstabilizacionDeReferencia(self):\n #Obtener la url de descarga del cvs\n urlPackage=\"https://datos.gob.ar/api/3/action/package_show?id=sspm-cer-uva-uvi\"\n s=requests.get(urlPackage).content\n objJson = json.loads(s)\n resultado = objJson['result']['resources']\n selector = 0\n ultimoResultado = resultado[selector]\n urlDescarga = ultimoResultado['url']\n descripcion = ultimoResultado['description']\n print(\"Descargando: {}\".format(descripcion))\n print(\"Archivo: {}\".format(urlDescarga))\n \n #Descargar la url con cvs y generar pandas dataframe\n contenidoCVS = requests.get(urlDescarga).content\n flujoCVS = io.StringIO(contenidoCVS.decode('utf-8'))\n df_temp = pd.read_csv(flujoCVS)\n \n #transform string to datetime\n df_temp['indice_tiempo'] = pd.to_datetime(df_temp['indice_tiempo'], format='%Y-%m-%d', errors='ignore')\n df_temp['indice_tiempo'] = df_temp['indice_tiempo'].dt.date\n #set index\n df_temp.set_index('indice_tiempo', inplace=True)\n \n return df_temp", "def le_infotudo(info_file):\n \n infoarq = open(info_file, 'r')\n infodata = infoarq.read()\n infoarq.close()\n \n info_temp = infodata.split('\\n')\n \n # ... lendo data de inicio da simulacao\n info_date = info_temp[5]\n info_date = info_date.split(' ')\n \n lista = list()\n for i in range(len(info_date)):\n if info_date[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_date[j]\n \n dstart = datetime(int(info_date[2]), int(info_date[1]), int(info_date[0]), int(info_date[3]))\n \n # ... lendo nt e dt\n info_timestep = info_temp[8]\n info_timestep = info_timestep.split(' ')\n \n lista = list()\n for i in range(len(info_timestep)):\n if info_timestep[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_timestep[j]\n \n nt = info_timestep[0]\n dt = info_timestep[1]\n dt = dt.split(\".\")\n dt = dt[0]\n \n # ... lendo nc\n info_nc = info_temp[11]\n info_nc = info_nc.split(' ')\n \n lista = list()\n for i in range(len(info_nc)):\n if info_nc[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_nc[j]\n \n nc = info_nc[0]\n \n return int(nc), int(nt), int(dt), dstart", "def le_infotudo(info_file):\n \n infoarq = open(info_file, 'r')\n infodata = infoarq.read()\n infoarq.close()\n \n info_temp = infodata.split('\\n')\n \n # ... lendo data de inicio da simulacao\n info_date = info_temp[5]\n info_date = info_date.split(' ')\n \n lista = list()\n for i in range(len(info_date)):\n if info_date[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_date[j]\n \n dstart = datetime(int(info_date[2]), int(info_date[1]), int(info_date[0]), int(info_date[3]))\n \n # ... lendo nt e dt\n info_timestep = info_temp[8]\n info_timestep = info_timestep.split(' ')\n \n lista = list()\n for i in range(len(info_timestep)):\n if info_timestep[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_timestep[j]\n \n nt = info_timestep[0]\n dt = info_timestep[1]\n dt = dt.split(\".\")\n dt = dt[0]\n \n # ... lendo nc\n info_nc = info_temp[11]\n info_nc = info_nc.split(' ')\n \n lista = list()\n for i in range(len(info_nc)):\n if info_nc[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_nc[j]\n \n nc = info_nc[0]\n \n return int(nc), int(nt), int(dt), dstart", "def le_infotudo(info_file):\n \n infoarq = open(info_file, 'r')\n infodata = infoarq.read()\n infoarq.close()\n \n info_temp = infodata.split('\\n')\n \n # ... lendo data de inicio da simulacao\n info_date = info_temp[5]\n info_date = info_date.split(' ')\n \n lista = list()\n for i in range(len(info_date)):\n if info_date[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_date[j]\n \n dstart = datetime(int(info_date[2]), int(info_date[1]), int(info_date[0]), int(info_date[3]))\n \n # ... lendo nt e dt\n info_timestep = info_temp[8]\n info_timestep = info_timestep.split(' ')\n \n lista = list()\n for i in range(len(info_timestep)):\n if info_timestep[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_timestep[j]\n \n nt = info_timestep[0]\n dt = info_timestep[1]\n dt = dt.split(\".\")\n dt = dt[0]\n \n # ... lendo nc\n info_nc = info_temp[11]\n info_nc = info_nc.split(' ')\n \n lista = list()\n for i in range(len(info_nc)):\n if info_nc[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_nc[j]\n \n nc = info_nc[0]\n \n return int(nc), int(nt), int(dt), dstart", "def le_infotudo(info_file):\n \n infoarq = open(info_file, 'r')\n infodata = infoarq.read()\n infoarq.close()\n \n info_temp = infodata.split('\\n')\n \n # ... lendo data de inicio da simulacao\n info_date = info_temp[5]\n info_date = info_date.split(' ')\n \n lista = list()\n for i in range(len(info_date)):\n if info_date[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_date[j]\n \n dstart = datetime(int(info_date[2]), int(info_date[1]), int(info_date[0]), int(info_date[3]))\n \n # ... lendo nt e dt\n info_timestep = info_temp[8]\n info_timestep = info_timestep.split(' ')\n \n lista = list()\n for i in range(len(info_timestep)):\n if info_timestep[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_timestep[j]\n \n nt = info_timestep[0]\n dt = info_timestep[1]\n dt = dt.split(\".\")\n dt = dt[0]\n \n # ... lendo nc\n info_nc = info_temp[11]\n info_nc = info_nc.split(' ')\n \n lista = list()\n for i in range(len(info_nc)):\n if info_nc[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_nc[j]\n \n nc = info_nc[0]\n \n return int(nc), int(nt), int(dt), dstart", "def le_infotudo(info_file):\n \n infoarq = open(info_file, 'r')\n infodata = infoarq.read()\n infoarq.close()\n \n info_temp = infodata.split('\\n')\n \n # ... lendo data de inicio da simulacao\n info_date = info_temp[5]\n info_date = info_date.split(' ')\n \n lista = list()\n for i in range(len(info_date)):\n if info_date[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_date[j]\n \n dstart = datetime(int(info_date[2]), int(info_date[1]), int(info_date[0]), int(info_date[3]))\n \n # ... lendo nt e dt\n info_timestep = info_temp[8]\n info_timestep = info_timestep.split(' ')\n \n lista = list()\n for i in range(len(info_timestep)):\n if info_timestep[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_timestep[j]\n \n nt = info_timestep[0]\n dt = info_timestep[1]\n dt = dt.split(\".\")\n dt = dt[0]\n \n # ... lendo nc\n info_nc = info_temp[11]\n info_nc = info_nc.split(' ')\n \n lista = list()\n for i in range(len(info_nc)):\n if info_nc[i] == '':\n lista.append(i)\n \n for j in reversed(lista):\n del info_nc[j]\n \n nc = info_nc[0]\n \n return int(nc), int(nt), int(dt), dstart", "def print_facturas(self, data):\n #pruebas = self.env['calling'].search([('state', '=', 'active')])\n # self.nuevo = self.env['account.invoice'].search([('type','=','out invoice')])\n\n\n if self.date_from and self.date_to:\n fecha_inicio = self.date_from\n fecha_fin = self.date_to\n\n if datetime.strptime(fecha_inicio, DATE_FORMAT) >= datetime.strptime(fecha_fin, DATE_FORMAT):\n raise ValidationError('Advertencia! La fecha de inicio no puede ser superior a la fecha final')\n\n fecha_actual = str(date.today())\n if datetime.strptime(fecha_inicio, DATE_FORMAT) > datetime.strptime(fecha_actual, DATE_FORMAT):\n raise ValidationError('Advertencia! La fecha de inicio no puede ser mayor a la fecha actual')\n elif datetime.strptime(fecha_fin, DATE_FORMAT) > datetime.strptime(fecha_actual, DATE_FORMAT):\n raise ValidationError('Advertencia! La fecha de final no puede ser mayor a la fecha actual')\n\n calling_obj = self.env['calling']\n calling_ids = calling_obj.search(\n [('calling_date', '>=', fecha_inicio), ('calling_date', '<=', fecha_fin)])\n if calling_ids:\n ids = []\n for id in calling_ids:\n ids.append(id.id)\n datas = self.read(self.ids)[0]\n data = {\n 'ids': ids,\n 'model': 'report.tys_calling.report_services_sede',\n 'form': {\n 'datas': datas,\n 'date_from': self.date_from,\n 'date_to': self.date_to,\n 'sede': self.sede.id,\n 'all':self.all,\n },\n 'context': self._context\n }\n return self.env.ref('tys_calling.report_services_for_sede').report_action(self, data=data, config=False)\n else:\n raise ValidationError('Advertencia! No existen llamadas entre las fechas seleccionadas')\n\n\n # use `module_name.report_id` as reference.\n # `report_action()` will call `get_report_values()` and pass `data` automatically.", "def get_curso_fecha_inicio(request):\n if request.method == 'GET':\n serializer = CursoSerializer(data=request.data)\n if serializer.is_valid():\n serializer.save()\n if \"curso\" in serializer.validated_data:\n try:\n curso = Curso.nodes.get(nombre__icontains=serializer.validated_data[\"curso\"])\n resp = {\"nombre_curso\": curso.__dict__[\"nombre\"], \"fecha_inicio\": curso.__dict__[\"fecha_inicio\"]}\n return JsonResponse(resp, status=status.HTTP_200_OK)\n except Curso.DoesNotExist:\n print(\"Cant find (CURSO) \", serializer.validated_data[\"curso\"])\n return JsonResponse({\"error\": \"(CURSO) \" + serializer.validated_data[\"curso\"] + \" not found \"},\n status=status.HTTP_404_NOT_FOUND)\n if \"codigo\" in serializer.validated_data:\n try:\n curso = Curso.nodes.get(cod__icontains=serializer.validated_data[\"codigo\"])\n resp = {\"nombre_curso\": curso.__dict__[\"nombre\"], \"fecha_inicio\": curso.__dict__[\"fecha_inicio\"]}\n return JsonResponse(resp, status=status.HTTP_200_OK)\n except Curso.DoesNotExist:\n print(\"Cant find (CURSO) \", serializer.validated_data[\"codigo\"])\n return JsonResponse({\"error\": \"(CURSO) \" + serializer.validated_data[\"codigo\"] + \" not found \"},\n status=status.HTTP_404_NOT_FOUND)\n if \"synonym\" in serializer.validated_data:\n try:\n sinonimo = Sinonimo.nodes.get(sinonimo__icontains=serializer.validated_data[\"synonym\"])\n print(sinonimo.__dict__[\"curso\"][\"nombre\"])\n resp = {\"nombre_curso\": \"WIP\"}\n # resp = {\"nombre_curso\": curso.__dict__[\"nombre\"], \"fecha_inicio\": curso.__dict__[\"fecha_inicio\"]}\n return JsonResponse(resp, status=status.HTTP_200_OK)\n except Curso.DoesNotExist:\n print(\"Cant find (CURSO) \", serializer.validated_data[\"synonym\"])\n return JsonResponse({\"error\": \"(CURSO) \" + serializer.validated_data[\"synonym\"] + \" not found \"},\n status=status.HTTP_404_NOT_FOUND)\n return JsonResponse(serializer.errors, status=status.HTTP_404_NOT_FOUND)", "def generate_request(params):\n \n headers = {'Bmx-Token': myToken}\n \n\n url_udis=\"https://www.banxico.org.mx/SieAPIRest/service/v1/series/{series}/datos/{fecha_i}/{fecha_f}\".format(\n fecha_i=params['fecha_ini'],\n fecha_f=params.get(\"fecha_fin\"),\n series=params.get(\"serie\")\n )\n \n \n response = requests.get(url_udis, headers=headers) \n \n if response.status_code == 200:\n return response.json()", "def deleta_servico(self, dados={}):\n\n self.user = self.uPersistencia.buscarUsuario(\n id_usuario=dados['id_usuario'])\n\n if not self.user.getId():\n return {'status': False,\n 'msg': 'usuario nao existe',\n 'dados': dados}\n elif self.user.getIdTipo() != 2:\n # apenas usuarios do tipo prestador (2) podem adicionar\n # servicos\n return {'status': False,\n 'msg': 'deletar servicos valido apenas para prestadores',\n 'dados': dados}\n else:\n res = self.user.deletar_servico(id_servico=dados['id_servico'])\n\n return {'status': (res and 1 or 0),\n 'msg': (res and 'servico deletado com sucesso' or\n 'servico nao cadastrado'),\n 'dados': dados}", "def collect_data():\n\n \"Aqui va el codigo de alberto para recoger los datos que puede venir en forma de diccionario\"\n #TODO: Función para recoger los datos de los bms y meterlos en diccionarios (Alberto jr.)\n\n bms1 = dict()\n bms2 = dict()\n bms3 = dict()\n general = dict()\n\n\n # Ejemplos de datos para meter en los diccionarios\n\n temperature = 35.5\n voltage1 = 15.2\n voltage2 = 14.8\n date = time.strftime(\"%Y-%m-%d\") # Current date\n t = time.strftime(\"%H:%M:%S\") # Current time\n\n return bms1, bms2, bms3, general", "def getCambiosQafectanCaja(self, fechaInicio, fechaFin, usuarioColaborador=\"\"):\n\tif usuarioColaborador == \"\" and fechaInicio == \"\" and fechaFin == \"\":\n\t return self.conexion.ejecutarSQL(\"\"\"select c.id, c.fecha, c.hora, c.codigo_Producto_entra, c.codigo_Producto_sale, c.id_Venta, c.excedente, c.usuario_Colaborador\n from cambios c, ventas v\n where c.id_Venta = v.id\n and c.fecha != v.fecha\"\"\")\n elif usuarioColaborador == \"\":\n return self.conexion.ejecutarSQL(\"\"\"select c.id, c.fecha, c.hora, c.codigo_Producto_entra, c.codigo_Producto_sale, c.id_Venta, c.excedente, c.usuario_Colaborador\n from cambios c, ventas v\n where c.id_Venta = v.id\n and c.fecha != v.fecha\n and c.fecha between '%s' and '%s'\"\"\" %(fechaInicio,fechaFin))\n else:\n return self.conexion.ejecutarSQL(\"\"\"select c.id, c.fecha, c.hora, c.codigo_Producto_entra, c.codigo_Producto_sale, c.id_Venta, c.excedente, c.usuario_Colaborador\n from cambios c, ventas v\n where c.id_Venta = v.id\n and c.fecha != v.fecha\n and c.fecha between '%s' and '%s'\n and c.usuario_Colaborador = '%s'\"\"\" %(fechaInicio,fechaFin,usuarioColaborador))", "def __init__(self, periodo, reunion, sesion, tipo_sesion, fecha):\n self.periodo = periodo\n self.reunion = reunion\n self.sesion = sesion\n self.tipo_sesion = tipo_sesion\n self.fecha = fecha\n\n self.html_version_taquigrafica = None\n self.dialogo = None\n self.intervenciones = []\n self.intervenciones_por_diputado = {}", "def getVentas(self, fechaInicio, fechaFin, usuarioColaborador=\"\"):\n if usuarioColaborador == \"\":\n return self.conexion.ejecutarSQL(\"select v.id, v.fecha, v.hora, v.subtotal, v.totalIVA, v.total, v.estado, v.usuario_Colaborador, \\\n v.id_Cliente, v.id_TipoPago, tP.tipo from ventas v, tipoPagos tP where v.id_tipoPago=tP.id \\\n and v.fecha between '%s' and '%s'\" %(fechaInicio,fechaFin))\n else:\n return self.conexion.ejecutarSQL(\"select v.id, v.fecha, v.hora, v.subtotal, v.totalIVA, v.total, v.estado, v.usuario_Colaborador, \\\n v.id_Cliente, v.id_TipoPago, tP.tipo from ventas v, tipoPagos tP where v.id_tipoPago=tP.id \\\n and v.fecha between '%s' and '%s' \\\n and usuario_colaborador='%s'\" %(fechaInicio,fechaFin,usuarioColaborador))", "def getAbonos(self, fechaInicio, fechaFin, usuarioColaborador=\"\"):\n\tif usuarioColaborador == \"\" and fechaInicio == \"\" and fechaFin == \"\":\n\t return self.conexion.ejecutarSQL(\"select a.id, a.fecha, a.hora, a.valor, a.id_venta, a.usuario_Colaborador, a.id_TipoPago, tP.tipo\\\n from abonos a, tipoPagos tP where a.id_tipoPago=tP.id\")\n elif usuarioColaborador == \"\":\n return self.conexion.ejecutarSQL(\"select a.id, a.fecha, a.hora, a.valor, a.id_venta, a.usuario_Colaborador, a.id_TipoPago, tP.tipo\\\n from abonos a, tipoPagos tP where a.id_tipoPago=tP.id \\\n and a.fecha between '%s' and '%s'\" %(fechaInicio,fechaFin))\n else:\n return self.conexion.ejecutarSQL(\"select a.id, a.fecha, a.hora, a.valor, a.id_venta, a.usuario_Colaborador, a.id_TipoPago, tP.tipo\\\n from abonos a, tipoPagos tP where a.id_tipoPago=tP.id \\\n and a.fecha between '%s' and '%s' \\\n and usuario_colaborador='%s'\" %(fechaInicio,fechaFin,usuarioColaborador))" ]
[ "0.5977341", "0.56048363", "0.55636686", "0.55508226", "0.5490343", "0.5456728", "0.5362048", "0.5352262", "0.5350753", "0.5344096", "0.53391474", "0.53209996", "0.5319985", "0.53018814", "0.52821594", "0.5279239", "0.52485096", "0.52485096", "0.52485096", "0.52485096", "0.52485096", "0.5239139", "0.522316", "0.5221975", "0.5207311", "0.52023226", "0.517194", "0.5167027", "0.5134588", "0.5111634" ]
0.6173133
0
Transform PoseStamped detection into base_link frame. Returns PoseStamped in base_link frame.
def _transform_to_base_link(self, detection, timeout=3.0): self.swarmie.xform.waitForTransform( self.rovername + '/base_link', detection.pose.header.frame_id, detection.pose.header.stamp, rospy.Duration(timeout) ) return self.swarmie.xform.transformPose(self.rovername + '/base_link', detection.pose)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def makePoseStampedFromGraspFrame(self, graspFrame):\n iiwaLinkEEFrame = self.getIiwaLinkEEFrameFromGraspFrame(graspFrame)\n poseDict = spartanUtils.poseFromTransform(iiwaLinkEEFrame)\n poseMsg = rosUtils.ROSPoseMsgFromPose(poseDict)\n poseStamped = geometry_msgs.msg.PoseStamped()\n poseStamped.pose = poseMsg\n poseStamped.header.frame_id = \"base\"\n\n return poseStamped", "def transform_to_object_frame(self, pose: Pose,\n bullet_object: 'bullet_world.Object', link_name: str = None) -> Union[Pose, None]:\n if link_name:\n target_frame = bullet_object.get_link_tf_frame(link_name)\n else:\n target_frame = bullet_object.tf_frame\n return self.transform_pose(pose, target_frame)", "def annotated_frame(self, original_frame):\n frame = original_frame.copy()\n\n if self.pupils_located:\n color = (0, 255, 0)\n x_left, y_left = self.pupil_left_coords()\n x_right, y_right = self.pupil_right_coords()\n cv2.line(frame, (x_left - 5, y_left), (x_left + 5, y_left), color)\n cv2.line(frame, (x_left, y_left - 5), (x_left, y_left + 5), color)\n cv2.line(frame, (x_right - 5, y_right), (x_right + 5, y_right), color)\n cv2.line(frame, (x_right, y_right - 5), (x_right, y_right + 5), color)\n\n return frame", "def annotated_frame(self):\n frame = self.frame.copy()\n if self.pupils_located:\n color = (0, 255, 0)\n x_left, y_left = self.pupil_left_coords()\n x_right, y_right = self.pupil_right_coords()\n # cv2.line(frame, (x_left - 5, y_left), (x_left + 5, y_left), color)\n # cv2.line(frame, (x_left, y_left - 5), (x_left, y_left + 5), color)\n # cv2.line(frame, (x_right - 5, y_right), (x_right + 5, y_right), color)\n # cv2.line(frame, (x_right, y_right - 5), (x_right, y_right + 5), color)\n return frame", "def add_pose(self):\n base_pose = PoseStamped()\n try:\n # Convert pose to base frame\n self.data['poses']['marker'].header.stamp = \\\n self.tfl.getLatestCommonTime(self.params['world'],\n self.data['poses']\n ['marker'].header.frame_id)\n base_pose = self.tfl.transformPose(self.params['world'],\n self.data['poses']['marker'])\n except (TfE, LookupException, ConnectivityException):\n Me.error_message(\"Error transforming pose \" +\n self.data['poses']['marker'].header.frame_id)\n\n self.data['poses']['path'].header.frame_id = self.params['world']\n self.data['poses']['path'].header.stamp = Time.now()\n self.data['poses']['path'].poses.append(deepcopy(base_pose.pose))\n\n Me.info_message(self.data['poses']['path'])\n return", "def _get_pointing_pose(self, point_entity):\n # type: (Entity, Arm) -> FrameStamped\n # Compute the frame w.r.t. base link\n fs_robot = point_entity.pose.projectToFrame(self.robot.base_link_frame, self.robot.tf_buffer)\n\n # Set the orientation to unity\n fs_robot.frame.M = kdl.Rotation()\n\n return fs_robot", "def laserLinkTransformPub(self, timestamp):\n\n ts_msg = TransformStamped()\n ts_msg.header.stamp = timestamp\n ts_msg.header.frame_id = self.base_frame\n ts_msg.child_frame_id = self.scan_frame\n ts_msg.transform.translation.x = self.car_config[\"scan_dist_to_base\"]\n ts_msg.transform.rotation.w = 1\n self.br.sendTransform(ts_msg)", "def transform_pose(self, pose: Pose, target_frame: str) -> Union[Pose, None]:\n copy_pose = pose.copy()\n copy_pose.header.stamp = rospy.Time(0)\n if not self.canTransform(target_frame, pose.frame, rospy.Time(0)):\n rospy.logerr(\n f\"Can not transform pose: \\n {pose}\\n to frame: {target_frame}.\\n Maybe try calling 'update_transforms_for_object'\")\n return\n new_pose = super().transformPose(target_frame, copy_pose)\n\n copy_pose.pose = new_pose.pose\n copy_pose.header.frame_id = new_pose.header.frame_id\n copy_pose.header.stamp = rospy.Time.now()\n\n return Pose(*copy_pose.to_list(), frame=new_pose.header.frame_id)", "def transform(self, passed_stamped_pose):\n # Creating / Updating transform with latest translation and rotation.\n transform = TransformStamped()\n transform.header = rospy.get_rostime()\n transform.transform.translation = Point(self.translation[0],self.translation[1], 0.0)\n transform.transform.rotation = Quaternion(self.rotation[0],self.rotation[1],self.rotation[2],self.rotation[3])\n\n # pose = PoseStamped(passed_stamped_pose.header, passed_stamped_pose.pose)\n pose = tf2_geometry_msgs.do_transform_pose(passed_stamped_pose, transform)\n \n return pose", "def make_transform(parent_frame: str, child_frame: str) -> TransformStamped:\n out = TransformStamped()\n out.header.frame_id = parent_frame\n out.child_frame_id = child_frame\n # some random data\n out.transform.rotation.w = 1\n out.transform.translation.x = 0.1\n\n return out", "def calculate_frame(self):\n frame = self.stream.read()\n self.keypoints, self.image = self.openpose.forward(frame, True)", "def make_pose(self, position, orientation, frame):\n\n pose = PoseStamped()\n pose.header.frame_id = frame\n pose.pose.position.x = position[0]\n pose.pose.position.y = position[1]\n pose.pose.position.z = position[2]\n pose.pose.orientation.w = orientation[0]\n pose.pose.orientation.x = orientation[1]\n pose.pose.orientation.y = orientation[2]\n pose.pose.orientation.z = orientation[3]\n return pose", "def get_goal_ee_pose(self):\n #self.target_endpoint = #magic tf call that I can add ie the pose of the palm from camera aruco detection\n while True:\n try:\n translation, rotation = self.listener.lookupTransform('world_frame', 'palm_frame_camera', rospy.Time()) # ee_frame_camera_flipped\n break # once the transform is obtained move on\n except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):\n continue # if it fails try again\n point = [translation[0], translation[1], translation[2]]\n self.target_endpoint = np.array(point)\n # rospy.logerr(self.target_endpoint)", "def scan_received(self, msg):\n # print msg\n if not(self.initialized):\n # wait for initialization to complete\n return\n\n if not(self.tf_listener.canTransform(self.base_frame,msg.header.frame_id,msg.header.stamp)):\n # need to know how to transform the laser to the base frame\n # this will be given by either Gazebo or neato_node\n return\n\n if not(self.tf_listener.canTransform(self.base_frame,self.odom_frame,msg.header.stamp)):\n # need to know how to transform between base and odometric frames\n # this will eventually be published by either Gazebo or neato_node\n return\n\n # print 'msg.header.frame_id', msg.header.frame_id\n # calculate pose of laser relative ot the robot base\n p = PoseStamped(header=Header(stamp=rospy.Time(0),\n frame_id=msg.header.frame_id))\n self.laser_pose = self.tf_listener.transformPose(self.base_frame,p)\n\n # find out where the robot thinks it is based on its odometry\n # listener.getLatestCommonTime(\"/base_link\",object_pose_in.header.frame_id)\n # p = PoseStamped(header=Header(stamp=msg.header.stamp,\n p = PoseStamped(header=Header(stamp=self.tf_listener.getLatestCommonTime(self.base_frame, self.map_frame),\n # p = PoseStamped(header=Header(stamp=rospy.Time.now(),\n frame_id=self.base_frame),\n pose=Pose())\n # p_aux = PoseStamped(header=Header(stamp=self.tf_listener.getLatestCommonTime(\"/base_link\",\"/map\"),\n p_aux = PoseStamped(header=Header(stamp=self.tf_listener.getLatestCommonTime(self.odom_frame, self.map_frame),\n # p_aux = PoseStamped(header=Header(stamp=rospy.Time.now(),\n frame_id=self.odom_frame),\n pose=Pose())\n odom_aux = self.tf_listener.transformPose(self.map_frame, p_aux)\n odom_aux_xy_theta = convert_pose_to_xy_and_theta(odom_aux.pose)\n # print 'odom_aux_xy_theta', odom_aux_xy_theta\n\n self.odom_pose = self.tf_listener.transformPose(self.odom_frame, p)\n # print 'self.odom_pose', self.odom_pose\n # (trans, root) = self.tf_listener.lookupTransform(self.odom_frame, self.base_frame, rospy.Time(0))\n # self.odom_pose = trans\n # print trans, root\n new_odom_xy_theta = convert_pose_to_xy_and_theta(self.odom_pose.pose)\n # new_odom_xy_theta = convert_pose_to_xy_and_theta(self.laser_pose.pose)\n xy_theta_aux = (new_odom_xy_theta[0]+odom_aux_xy_theta[0], \n new_odom_xy_theta[1]+odom_aux_xy_theta[1], new_odom_xy_theta[2])\n self.xy_theta_aux = xy_theta_aux\n\n if not(self.particle_cloud):\n self.initialize_particle_cloud(xy_theta_aux)\n self.current_odom_xy_theta = new_odom_xy_theta\n\n elif (math.fabs(new_odom_xy_theta[0] - self.current_odom_xy_theta[0]) > self.linear_mov or\n math.fabs(new_odom_xy_theta[1] - self.current_odom_xy_theta[1]) > self.linear_mov or\n math.fabs(new_odom_xy_theta[2] - self.current_odom_xy_theta[2]) > self.angular_mov):\n\n self.update_particles_with_odom(msg)\n self.update_particles_with_laser(msg)\n self.resample_particles()\n\n self.publish_particles(msg)", "def build_pose_stamped_msg(self):\n \n # Hand first\n ps_msg = PoseStamped()\n ps_msg.header.stamp = rospy.Time.now()\n ps_msg.header.frame_id = FRAME_ID\n \n if not DEBUG_TEST:\n position = self.hand.palm_position\n\n # Set position values in the message\n for j, attr in enumerate(POS_ATTRIBUTES):\n val = getattr(position, attr)\n setattr(ps_msg.pose.position, attr, val) \n \n # Get pose\n direction = self.hand.direction\n normal = self.hand.palm_normal\n\n # Get orientation values from hand vectors\n roll = normal.roll\n pitch = normal.pitch\n yaw = direction.yaw\n\n else:\n ((x, y, z), (pitch, yaw, roll)) = self.test_pose()\n ps_msg.pose.position.x = x\n ps_msg.pose.position.y = y\n ps_msg.pose.position.z = z\n \n # Convert RPY to Quaternion \n quaternion = transformations.quaternion_from_euler(roll, pitch, yaw)\n \n # Set orientation quaternion in the message\n ps_msg.pose.orientation.x = quaternion[0]\n ps_msg.pose.orientation.y = quaternion[1]\n ps_msg.pose.orientation.z = quaternion[2]\n ps_msg.pose.orientation.w = quaternion[3]\n \n # return the PoseStamped messages\n print ps_msg\n return ps_msg", "def process_frame(self, frame):\n\t\treturn frame", "def _preprocess(self, ob):\n # Take the max over prev and current frames.\n if self.last_frame is not None:\n ob_comb = np.maximum(ob, self.last_frame)\n else:\n ob_comb = ob\n self.last_frame = ob\n\n # Convert to YUV, extract Y, resize, and crop.\n r, g, b = ob_comb[:, :, 0], ob_comb[:, :, 1], ob_comb[:, :, 2]\n y = 0.299 * r + 0.587 * g + 0.114 * b\n y_resized = cv2.resize(y, (84, 110), interpolation=cv2.INTER_LINEAR)\n y_cropped = y_resized[13:-13, :]\n return y_cropped", "def project_roi(self, roi, frame_id=None):\n response = self.project_rois(rois=[roi]).points[0]\n\n # Convert to VectorStamped\n result = VectorStamped(x=response.point.x, y=response.point.y, z=response.point.z,\n frame_id=response.header.frame_id)\n\n # If necessary, transform the point\n if frame_id is not None:\n print(\"Transforming roi to {}\".format(frame_id))\n result = result.projectToFrame(frame_id=frame_id, tf_listener=self.tf_listener)\n\n # Return the result\n return result", "def inception_v4_base(sample_shape, final_endpoint='Inception/Mixed_7d',\n aux_endpoint='Inception/Mixed_6e'):\n name = 'InceptionV4'\n end_points = {}\n net = ffnet.FeedForwardNet()\n\n def final_aux_check(block_name):\n if block_name == final_endpoint:\n return True\n if block_name == aux_endpoint:\n aux = aux_endpoint + '-aux'\n end_points[aux] = net.add(Split(aux, 2))\n return False\n\n # 299 x 299 x 3\n blk = name + '/Conv2d_1a_3x3'\n net.add(Conv2D(blk, 32, 3, 2, border_mode='VALID', use_bias=False,\n input_sample_shape=sample_shape))\n net.add(BatchNormalization('%s/BatchNorm' % blk))\n end_points[blk] = net.add(Activation('%s/relu' % blk))\n if final_aux_check(blk):\n return net, end_points\n\n # 149 x 149 x 32\n blk = name + '/Conv2d_2a_3x3'\n end_points[blk] = conv2d(net, blk, 32, 3, border_mode='VALID')\n if final_aux_check(blk):\n return net, end_points\n\n # 147 x 147 x 32\n blk = name + '/Conv2d_2b_3x3'\n end_points[blk] = conv2d(net, blk, 64, 3)\n if final_aux_check(blk):\n return net, end_points\n\n # 147 x 147 x 64\n blk = name + '/Mixed_3a'\n s = net.add(Split('%s/Split' % blk, 2))\n br0 = net.add(MaxPooling2D('%s/Branch_0/MaxPool_0a_3x3' % blk, 3, 2,\n border_mode='VALID'), s)\n br1 = conv2d(net, '%s/Branch_1/Conv2d_0a_3x3' % blk, 96, 3, 2,\n border_mode='VALID', src=s)\n end_points[blk] = net.add(Concat('%s/Concat' % blk, 1), [br0, br1])\n if final_aux_check(blk):\n return net, end_points\n\n # 73 x 73 x 160\n blk = name + '/Mixed_4a'\n s = net.add(Split('%s/Split' % blk, 2))\n br0 = conv2d(net, '%s/Branch_0/Conv2d_0a_1x1' % blk, 64, 1, src=s)\n br0 = conv2d(net, '%s/Branch_0/Conv2d_1a_3x3' % blk, 96, 3,\n border_mode='VALID')\n br1 = conv2d(net, '%s/Branch_1/Conv2d_0a_1x1' % blk, 64, 1, src=s)\n br1 = conv2d(net, '%s/Branch_1/Conv2d_0b_1x7' % blk, 64, (1, 7))\n br1 = conv2d(net, '%s/Branch_1/Conv2d_0c_7x1' % blk, 64, (7, 1))\n br1 = conv2d(net, '%s/Branch_1/Conv2d_1a_3x3' % blk, 96, 3,\n border_mode='VALID')\n end_points[blk] = net.add(Concat('%s/Concat' % blk, 1), [br0, br1])\n if final_aux_check(blk):\n return net, end_points\n\n # 71 x 71 x 192\n blk = name + '/Mixed_5a'\n s = net.add(Split('%s/Split' % blk, 2))\n br0 = conv2d(net, '%s/Branch_0/Conv2d_1a_3x3' % blk, 192, 3, 2,\n border_mode='VALID', src=s)\n br1 = net.add(MaxPooling2D('%s/Branch_1/MaxPool_1a_3x3' % blk, 3, 2,\n border_mode='VALID'), s)\n end_points[blk] = net.add(Concat('%s/Concat' % blk, 1), [br0, br1])\n if final_aux_check(blk):\n return net, end_points\n\n # 35 x 35 x 384\n # 4 x Inception-A blocks\n for idx in range(4):\n blk = name + '/Mixed_5' + chr(ord('b') + idx)\n end_points[blk] = block_inception_a(blk, net)\n if final_aux_check(blk):\n return net, end_points\n\n # 35 x 35 x 384\n # Reduction-A block\n blk = name + '/Mixed_6a'\n end_points[blk] = block_reduction_a(blk, net)\n if final_aux_check(blk):\n return net, end_points[blk], end_points\n\n # 17 x 17 x 1024\n # 7 x Inception-B blocks\n for idx in range(7):\n blk = name + '/Mixed_6' + chr(ord('b') + idx)\n end_points[blk] = block_inception_b(blk, net)\n if final_aux_check(blk):\n return net, end_points\n\n # 17 x 17 x 1024\n # Reduction-B block\n blk = name + '/Mixed_7a'\n end_points[blk] = block_reduction_b(blk, net)\n if final_aux_check(blk):\n return net, end_points\n\n # 8 x 8 x 1536\n # 3 x Inception-C blocks\n for idx in range(3):\n blk = name + '/Mixed_7' + chr(ord('b') + idx)\n end_points[blk] = block_inception_c(blk, net)\n if final_aux_check(blk):\n return net, end_points\n\n assert final_endpoint == blk, \\\n 'final_enpoint = %s is not in the net' % final_endpoint", "def get_frame(self, frame: int) -> BaseImage:\n return self.sequence[frame]", "def get_image_pose_blob(im, target_scale, target_max_size, entry):\n processed_im, im_scale = prep_im_for_blob(\n im, cfg.PIXEL_MEANS, target_scale, target_max_size\n )\n ###### load pose data from .bin file\n if 'LIP_val' in cfg.TEST.DATASETS[0]:\n# pred_pose_data = '/home/gaomingda/Downloads/gaomingda/dataset/LIP_JPP_pred_pose/val'\n# pose_line_data = '/home/gaomingda/Downloads/gaomingda/dataset/LIP_JPP_pose_edge/val'\n pred_pose_data = '/home/gaomingda/datasets/lip_body25/val_images'\n if 'LIP_test' in cfg.TEST.DATASETS[0]:\n pred_pose_data = '/home/gaomingda/Downloads/gaomingda/dataset/lip_body25/testing_images'\n if 'ATR' in cfg.TEST.DATASETS[0]:\n# pred_pose_data = '/home/gaomingda/Downloads/gaomingda/dataset/ATR_JPP_pred_pose'\n# pred_pose_data = '/home/gaomingda/Downloads/gaomingda/dataset/ATR_JPP_crop_pred_pose'\n pred_pose_data = '/home/gaomingda/Downloads/gaomingda/dataset/ATR_openpose'\n# pred_pose_data = '/home/gaomingda/Downloads/gaomingda/dataset/LIP_JPP_pred_pose/val'\n pred_pose_path = os.path.join(pred_pose_data, 'heatmap', entry['id']+'.bin')\n pred_ = np.fromfile(pred_pose_path, dtype=np.float32)\n if 'LIP' in cfg.TEST.DATASETS[0]:\n pred_ = pred_.reshape(48, 48, 26)\n else:\n pred_ = pred_.reshape(48, 48, 26)\n choose_flag = -1\n if choose_flag==0:\n # select 0-16 channel\n print(\"select 0-16 channel\")\n pred_ = pred_[:, :, 0:16]\n if choose_flag==1:\n # arm 2,3,4,5,6,7\n print(\"set arm ids pose to 0\")\n arm_ids = [2,3,4,5,6,7]\n for arm_id in arm_ids:\n pred_[:, :, arm_id] = 0\n if choose_flag==2:\n print(\"set leg ids pose to 0\")\n leg_ids = [9,10,12,13]\n for arm_id in leg_ids:\n pred_[:, :, arm_id] = 0\n if choose_flag==3:\n print(\"set foot ids pose to 0\")\n foot_ids = [11,22,23,24,14,19,20,21]\n for arm_id in foot_ids:\n pred_[:, :, arm_id] = 0\n # pose line\n# pred_pose_path = os.path.join(pose_line_data, entry['id']+'.bin')\n# pose_line = np.fromfile(pred_pose_path, dtype=np.float32)\n# pose_line = pose_line.reshape(48, 48)\n pose_line = np.zeros((48, 48), dtype=np.float32)\n \n blob, pose_blob, pose_line_blob = im_list_to_blob_andPose(processed_im, np.expand_dims(pred_, axis=0), \n np.expand_dims(pose_line, axis=0))\n \n height, width = blob.shape[2], blob.shape[3]\n im_info = np.hstack((height, width, im_scale))[np.newaxis, :]\n return blob, im_scale, im_info.astype(np.float32), pose_blob, pose_line_blob", "def publish_pose(self):\n pose_msg = PoseWithCovarianceStamped()\n pose_msg.header.stamp = self.current_frame.time\n pose_msg.header.frame_id = \"map\"\n pose_msg.pose.pose = g2r(self.current_frame.pose3)\n\n cov = 1e-4 * np.identity(6, np.float32)\n # FIXME Use cov in current_frame\n cov[np.ix_((0, 1, 5), (0, 1, 5))] = self.current_keyframe.transf_cov\n pose_msg.pose.covariance = cov.ravel().tolist()\n self.pose_pub.publish(pose_msg)\n\n o2m = self.current_frame.pose3.compose(self.current_frame.dr_pose3.inverse())\n o2m = g2r(o2m)\n p = o2m.position\n q = o2m.orientation\n self.tf.sendTransform(\n (p.x, p.y, p.z),\n [q.x, q.y, q.z, q.w],\n self.current_frame.time,\n \"odom\",\n \"map\",\n )\n\n odom_msg = Odometry()\n odom_msg.header = pose_msg.header\n odom_msg.pose.pose = pose_msg.pose.pose\n odom_msg.child_frame_id = \"base_link\"\n odom_msg.twist.twist = self.current_frame.twist\n self.odom_pub.publish(odom_msg)", "def get_ef_pose(pose_listener): \n if LOCAL_TEST: # dummy\n return np.array([[-0.1915, 0.8724, -0.4498, 0.6041],\n [ 0.7355, 0.4309, 0.5228, -0.0031],\n [ 0.6499, -0.2307, -0.7242, 0.3213],\n [ 0., 0., 0., 1. ]])\n else:\n base_frame = 'measured/base_link'\n target_frame = 'measured/panda_hand'\n try:\n tf_pose = pose_listener.lookupTransform(base_frame, target_frame, rospy.Time(0))\n pose = make_pose(tf_pose)\n except (tf2_ros.LookupException,\n tf2_ros.ConnectivityException,\n tf2_ros.ExtrapolationException):\n pose = None\n print('cannot find end-effector pose')\n sys.exit(1)\n return pose", "def createROSPose(position, rotation, frame_id='/base'):\n header = Header(stamp=rospy.Time.now(), frame_id=frame_id)\n stampedPose = PoseStamped(header=header)\n stampedPose.pose.position.x = position[0]\n stampedPose.pose.position.y = position[1]\n stampedPose.pose.position.z = position[2]\n stampedPose.pose.orientation.x = rotation[0]\n stampedPose.pose.orientation.y = rotation[1]\n stampedPose.pose.orientation.z = rotation[2]\n stampedPose.pose.orientation.w = rotation[3]\n return stampedPose", "def process_pipeline(frame, keep_state=True):\n\n global line_lt, line_rt, processed_frames\n\n # undistort the image using coefficients found in calibration\n undistorted_img = undistort(frame, mtx, dist)\n\n # binarize the frame and highlight lane lines\n binarized_img = binarize(undistorted_img)\n\n # perspective transform to obtain bird's eye view\n birdeye_img, matrix, inversed_matrix = birdeye(binarized_img, visualise=False)\n\n # 2 order polynomial curve fit onto lane lines found\n if processed_frames > 0 and keep_state and line_lt.detected and line_rt.detected:\n find_lane_by_previous_fits(birdeye_img, line_lt, line_rt, visualise=False)\n else:\n find_lane_by_sliding_windows(birdeye_img, line_lt, line_rt, n_windows=9, visualise=False)\n\n # compute offset in meter from center of the lane\n offset_meter = offset_from_lane_center(line_lt, line_rt, frame_width=frame.shape[1])\n\n # draw the surface enclosed by lane lines back onto the original frame\n blend_on_road = draw_back_onto_the_road(undistorted_img, inversed_matrix, line_lt, line_rt, keep_state)\n mean_curvature_meter = np.mean([line_lt.curvature_meter, line_rt.curvature_meter])\n font = cv2.FONT_HERSHEY_SIMPLEX\n cv2.putText(blend_on_road, 'Curvature radius: {:.02f}m'.format(mean_curvature_meter), (60, 60), font, 1,\n (255, 255, 255), 2)\n cv2.putText(blend_on_road, 'Offset from center: {:.02f}m'.format(offset_meter), (60, 90), font, 1,\n (255, 255, 255), 2)\n\n processed_frames += 1\n\n return blend_on_road", "def captureNextFrame(self):\r\n mainls = []\r\n\r\n\r\n ret, readFrame = self.capture.read()\r\n\r\n if (ret == True):\r\n self.currentFrame = cv2.cvtColor(readFrame, cv2.COLOR_BGR2RGB)\r\n self.faceDetection(self.currentFrame)\r\n self.currentFrame = self.bbFrame", "def join_anchor_base_score(anchor_df, base_df):\n joined_df = (anchor_df.merge(base_df, how='inner', left_on=['target_guide', 'condition'],\n right_on=['anchor_guide', 'condition'], suffixes=['', '_target'])\n .drop('anchor_guide_target', axis=1))\n return joined_df", "def convert_to_point_cloud2(cloud):\n header = Header()\n header.frame_id = \"base_link\"\n header.stamp = rospy.Time.now()\n return point_cloud2.create_cloud_xyz32(header, cloud)", "def get_original_frame(self, frame):\n if self._pad_top is not None:\n frame = frame[self._pad_top:frame.shape[0] - self._pad_bottom,\n self._pad_left:frame.shape[1] - self._pad_right]\n if self._scale_factor is not None and self._scale_factor != 1:\n frame = cv2.resize(frame,\n (int(frame.shape[1] / self._scale_factor),\n int(frame.shape[0] / self._scale_factor)))\n return frame", "def callback(self, pose_array):\n with self._lock:\n pose_array_msg = PoseArray()\n\n # Camera frame to tag frame(s)\n if (len(pose_array.transforms)==0):\n self._pose_detections = None\n self._tag_pose_pub.publish(pose_array_msg)\n return\n\n pose_detections = np.zeros((len(pose_array.transforms),3))\n for i in range(len(pose_array.transforms)):\n pose_msg = Pose()\n tag_id = pose_array.transforms[i].fiducial_id\n\n transform_cam2tag = pose_array.transforms[i].transform\n # print \"transform_cam2tag = \", transform_cam2tag\n poselist_cam2tag = transform2poselist(transform_cam2tag)\n poselist_base2tag = transformPose(self._lr, poselist_cam2tag, 'camera', 'base_link')\n poselist_tag2base = invPoselist(poselist_base2tag)\n # print \"poselist_tag2base = \", poselist_tag2base\n poselist_map2base = transformPose(self._lr, poselist_tag2base, 'apriltag_'+str(tag_id), 'map')\n # print \"poselist_map2base = \", poselist_map2base\n pubFrame(self._br, pose = poselist_map2base, frame_id = '/base_link', parent_frame_id = '/map')\n\n robot_pose3d = lookupTransform(self._lr, '/map', '/base_link')\n robot_position2d = robot_pose3d[0:2]\n robot_yaw = tf.transformations.euler_from_quaternion(robot_pose3d[3:7])[2]\n robot_pose2d = robot_position2d + [robot_yaw]\n pose_detections[i] = np.array(robot_pose2d)\n\n pose_msg.position.x = robot_pose3d[0]\n pose_msg.position.y = robot_pose3d[1]\n pose_msg.orientation.x = robot_pose3d[3]\n pose_msg.orientation.y = robot_pose3d[4]\n pose_msg.orientation.z = robot_pose3d[5]\n pose_msg.orientation.w = robot_pose3d[6]\n pose_array_msg.poses.append(pose_msg)\n \n self._tag_pose_pub.publish(pose_array_msg)\n self._pose_detections = pose_detections" ]
[ "0.56539935", "0.53922534", "0.5311693", "0.5271331", "0.52625394", "0.5036834", "0.49239218", "0.4911125", "0.4782191", "0.47520468", "0.4737187", "0.47025004", "0.4676885", "0.46542126", "0.46408707", "0.45992538", "0.45896745", "0.4587845", "0.457565", "0.45676693", "0.45565298", "0.45305592", "0.45067796", "0.44943294", "0.4472259", "0.44641", "0.4440728", "0.44283178", "0.44266585", "0.4398477" ]
0.6804362
0
Transform PoseStamped detection into /odom frame. Returns PoseStamped in /odom frame.
def _transform_to_odom(self, detection, timeout=3.0): self.swarmie.xform.waitForTransform( self.rovername + '/odom', detection.pose.header.frame_id, detection.pose.header.stamp, rospy.Duration(timeout) ) return self.swarmie.xform.transformPose(self.rovername + '/odom', detection.pose)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fix_map_to_odom_transform(self, msg):\n (translation, rotation) = convert_pose_inverse_transform(self.robot_pose)\n p = PoseStamped(pose=convert_translation_rotation_to_pose(translation, rotation),\n header=Header(stamp=msg.header.stamp, frame_id=self.base_frame))\n self.odom_to_map = self.tf_listener.transformPose(self.odom_frame, p)\n (self.translation, self.rotation) = convert_pose_inverse_transform(self.odom_to_map.pose)", "def build_pose_stamped_msg(self):\n \n # Hand first\n ps_msg = PoseStamped()\n ps_msg.header.stamp = rospy.Time.now()\n ps_msg.header.frame_id = FRAME_ID\n \n if not DEBUG_TEST:\n position = self.hand.palm_position\n\n # Set position values in the message\n for j, attr in enumerate(POS_ATTRIBUTES):\n val = getattr(position, attr)\n setattr(ps_msg.pose.position, attr, val) \n \n # Get pose\n direction = self.hand.direction\n normal = self.hand.palm_normal\n\n # Get orientation values from hand vectors\n roll = normal.roll\n pitch = normal.pitch\n yaw = direction.yaw\n\n else:\n ((x, y, z), (pitch, yaw, roll)) = self.test_pose()\n ps_msg.pose.position.x = x\n ps_msg.pose.position.y = y\n ps_msg.pose.position.z = z\n \n # Convert RPY to Quaternion \n quaternion = transformations.quaternion_from_euler(roll, pitch, yaw)\n \n # Set orientation quaternion in the message\n ps_msg.pose.orientation.x = quaternion[0]\n ps_msg.pose.orientation.y = quaternion[1]\n ps_msg.pose.orientation.z = quaternion[2]\n ps_msg.pose.orientation.w = quaternion[3]\n \n # return the PoseStamped messages\n print ps_msg\n return ps_msg", "def publish_pose(self):\n pose_msg = PoseWithCovarianceStamped()\n pose_msg.header.stamp = self.current_frame.time\n pose_msg.header.frame_id = \"map\"\n pose_msg.pose.pose = g2r(self.current_frame.pose3)\n\n cov = 1e-4 * np.identity(6, np.float32)\n # FIXME Use cov in current_frame\n cov[np.ix_((0, 1, 5), (0, 1, 5))] = self.current_keyframe.transf_cov\n pose_msg.pose.covariance = cov.ravel().tolist()\n self.pose_pub.publish(pose_msg)\n\n o2m = self.current_frame.pose3.compose(self.current_frame.dr_pose3.inverse())\n o2m = g2r(o2m)\n p = o2m.position\n q = o2m.orientation\n self.tf.sendTransform(\n (p.x, p.y, p.z),\n [q.x, q.y, q.z, q.w],\n self.current_frame.time,\n \"odom\",\n \"map\",\n )\n\n odom_msg = Odometry()\n odom_msg.header = pose_msg.header\n odom_msg.pose.pose = pose_msg.pose.pose\n odom_msg.child_frame_id = \"base_link\"\n odom_msg.twist.twist = self.current_frame.twist\n self.odom_pub.publish(odom_msg)", "def fix_map_to_odom_transform(self, robot_pose, timestamp):\n (translation, rotation) = \\\n self.convert_pose_inverse_transform(robot_pose)\n p = PoseStamped(\n pose=self.convert_translation_rotation_to_pose(translation,\n rotation),\n header=Header(stamp=timestamp, frame_id='base_link'))\n self.tf_listener.waitForTransform('base_link',\n 'odom',\n timestamp,\n rospy.Duration(2.0)) # Extended duration due to tf timeout error\n self.odom_to_map = self.tf_listener.transformPose('odom', p)\n (self.translation, self.rotation) = \\\n self.convert_pose_inverse_transform(self.odom_to_map.pose)", "def make_pose(self, position, orientation, frame):\n\n pose = PoseStamped()\n pose.header.frame_id = frame\n pose.pose.position.x = position[0]\n pose.pose.position.y = position[1]\n pose.pose.position.z = position[2]\n pose.pose.orientation.w = orientation[0]\n pose.pose.orientation.x = orientation[1]\n pose.pose.orientation.y = orientation[2]\n pose.pose.orientation.z = orientation[3]\n return pose", "def _publish_odometry(self):\n # only publish if we have a subscriber\n if self._odom_pub.get_num_connections() == 0:\n return\n\n now = rospy.Time.now()\n odom = Odometry()\n odom.header.frame_id = self._odom_frame\n odom.header.stamp = now\n odom.child_frame_id = self._footprint_frame\n odom.pose.pose.position.x = self._cozmo.pose.position.x * 0.001\n odom.pose.pose.position.y = self._cozmo.pose.position.y * 0.001\n odom.pose.pose.position.z = self._cozmo.pose.position.z * 0.001\n q = quaternion_from_euler(.0, .0, self._cozmo.pose_angle.radians)\n odom.pose.pose.orientation.x = q[0]\n odom.pose.pose.orientation.y = q[1]\n odom.pose.pose.orientation.z = q[2]\n odom.pose.pose.orientation.w = q[3]\n odom.pose.covariance = np.diag([1e-2, 1e-2, 1e-2, 1e3, 1e3, 1e-1]).ravel()\n odom.twist.twist.linear.x = self._lin_vel\n odom.twist.twist.angular.z = self._ang_vel\n odom.twist.covariance = np.diag([1e-2, 1e3, 1e3, 1e3, 1e3, 1e-2]).ravel()\n self._odom_pub.publish(odom)", "def broadcast_odom_frame(self, pose, current_time):\n x, y, th, vx, vy, vth = pose\n odom_quat = tf.transformations.quaternion_from_euler(0, 0, th)\n\n transform_stamped = TransformStamped()\n transform_stamped.header.stamp = current_time\n transform_stamped.header.frame_id = 'odom'\n transform_stamped.child_frame_id = 'base_link'\n\n transform_stamped.transform.translation.x = x\n transform_stamped.transform.translation.y = y\n transform_stamped.transform.translation.z = 0.0\n\n transform_stamped.transform.rotation.x = odom_quat[0]\n transform_stamped.transform.rotation.y = odom_quat[1]\n transform_stamped.transform.rotation.z = odom_quat[2]\n transform_stamped.transform.rotation.w = odom_quat[3]\n\n self.odom_frame_broadcaster.sendTransform(transform_stamped)", "def _pose_from_odom(self, odom): \n pose = odom.pose.pose.position\n return [pose.x, pose.y, pose.z]", "def makePoseStampedFromGraspFrame(self, graspFrame):\n iiwaLinkEEFrame = self.getIiwaLinkEEFrameFromGraspFrame(graspFrame)\n poseDict = spartanUtils.poseFromTransform(iiwaLinkEEFrame)\n poseMsg = rosUtils.ROSPoseMsgFromPose(poseDict)\n poseStamped = geometry_msgs.msg.PoseStamped()\n poseStamped.pose = poseMsg\n poseStamped.header.frame_id = \"base\"\n\n return poseStamped", "def get_odom(tf_listener, odom_frame, base_frame):\n try:\n (trans, rot) = tf_listener.lookupTransform(odom_frame, base_frame, rospy.Time(0))\n except (tf.Exception, tf.ConnectivityException, tf.LookupException):\n rospy.loginfo(\"TF Exception\")\n return\n\n return (Point(*trans), Quaternion(*rot))", "def createROSPose(position, rotation, frame_id='/base'):\n header = Header(stamp=rospy.Time.now(), frame_id=frame_id)\n stampedPose = PoseStamped(header=header)\n stampedPose.pose.position.x = position[0]\n stampedPose.pose.position.y = position[1]\n stampedPose.pose.position.z = position[2]\n stampedPose.pose.orientation.x = rotation[0]\n stampedPose.pose.orientation.y = rotation[1]\n stampedPose.pose.orientation.z = rotation[2]\n stampedPose.pose.orientation.w = rotation[3]\n return stampedPose", "def publish_odometry(self, odom_combined):\n\n if not self.last_time:\n # set up initial times and pose\n rospy.loginfo(\"Setting up initial position\")\n self.last_time, self.last_x, y, self.last_theta = self.current_pose(odom_combined)\n return\n\n # publish to the /odom topic\n odom = Odometry()\n odom.header.stamp = rospy.Time.now()\n odom.header.frame_id = \"/base_link\"\n odom.pose = odom_combined.pose\n\n current_time, x, y, theta = self.current_pose(odom_combined)\n dt = current_time - self.last_time\n dt = dt.to_sec()\n d_x = x - self.last_x\n d_theta = theta - self.last_theta\n odom.twist.twist = Twist(Vector3(d_x/dt, 0, 0), Vector3(0, 0, d_theta/dt))\n\n self.odom_publisher.publish(odom)\n\n self.last_time, self.last_x, self.last_theta = current_time, x, theta", "def readOdom(msg):\n global pose\n global xPosition\n global yPosition\n global theta\n global odom_list\n global odom_tf\n try:\n pose = msg.pose\n geo_quat = pose.pose.orientation\n q = [geo_quat.x, geo_quat.y, geo_quat.z, geo_quat.w]\n odom_tf.sendTransform((pose.pose.position.x, pose.pose.position.y, 0), \n (pose.pose.orientation.x, pose.pose.orientation.y,pose.pose.orientation.z,pose.pose.orientation.w),rospy.Time.now(),\"base_footprint\",\"odom\")\n #Convert transform to global usable coordinates (x, y, theta)\n (trans, rot) = odom_list.lookupTransform('map', 'base_footprint', rospy.Time(0))\n roll, pitch, yaw = euler_from_quaternion(rot)\n theta = yaw * (180.0/math.pi)\n xPosition = trans[0]\n yPosition = trans[1]\n except:\n print \"waiting\"", "def transform_pose(self, pose: Pose, target_frame: str) -> Union[Pose, None]:\n copy_pose = pose.copy()\n copy_pose.header.stamp = rospy.Time(0)\n if not self.canTransform(target_frame, pose.frame, rospy.Time(0)):\n rospy.logerr(\n f\"Can not transform pose: \\n {pose}\\n to frame: {target_frame}.\\n Maybe try calling 'update_transforms_for_object'\")\n return\n new_pose = super().transformPose(target_frame, copy_pose)\n\n copy_pose.pose = new_pose.pose\n copy_pose.header.frame_id = new_pose.header.frame_id\n copy_pose.header.stamp = rospy.Time.now()\n\n return Pose(*copy_pose.to_list(), frame=new_pose.header.frame_id)", "def annotated_frame(self):\n frame = self.frame.copy()\n if self.pupils_located:\n color = (0, 255, 0)\n x_left, y_left = self.pupil_left_coords()\n x_right, y_right = self.pupil_right_coords()\n # cv2.line(frame, (x_left - 5, y_left), (x_left + 5, y_left), color)\n # cv2.line(frame, (x_left, y_left - 5), (x_left, y_left + 5), color)\n # cv2.line(frame, (x_right - 5, y_right), (x_right + 5, y_right), color)\n # cv2.line(frame, (x_right, y_right - 5), (x_right, y_right + 5), color)\n return frame", "def test_metadata_to_odom_0(self):\n data = ET.parse(\"data/metadata_0.xml\")\n data_str = ET.tostring(data.getroot())\n\n dict = tesse_ros_bridge.utils.parse_metadata(data_str)\n\n odom = tesse_ros_bridge.utils.metadata_to_odom(dict, 0, \"f1\", \"f2\")\n self.assertEqual(odom.header.stamp, 0)\n self.assertEqual(odom.header.frame_id, \"f1\")\n self.assertEqual(odom.child_frame_id, \"f2\")\n self.assertEqual(odom.pose.pose.position.x, dict['position'][0])\n self.assertEqual(odom.pose.pose.position.y, dict['position'][1])\n self.assertEqual(odom.pose.pose.position.z, dict['position'][2])\n self.assertEqual(odom.pose.pose.orientation.x, dict['quaternion'][0])\n self.assertEqual(odom.pose.pose.orientation.y, dict['quaternion'][1])\n self.assertEqual(odom.pose.pose.orientation.z, dict['quaternion'][2])\n self.assertEqual(odom.pose.pose.orientation.w, dict['quaternion'][3])\n self.assertEqual(odom.twist.twist.linear.x, dict['velocity'][0])\n self.assertEqual(odom.twist.twist.linear.y, dict['velocity'][1])\n self.assertEqual(odom.twist.twist.linear.z, dict['velocity'][2])\n self.assertEqual(odom.twist.twist.angular.x, dict['ang_vel'][0])\n self.assertEqual(odom.twist.twist.angular.y, dict['ang_vel'][1])\n self.assertEqual(odom.twist.twist.angular.z, dict['ang_vel'][2])", "def o3dpc_to_rospc(o3dpc, frame_id=None, stamp=None):\n\n cloud_npy = np.asarray(copy.deepcopy(o3dpc.points))\n is_color = o3dpc.colors\n \n\n n_points = len(cloud_npy[:, 0])\n if is_color:\n data = np.zeros(n_points, dtype=[\n ('x', np.float32),\n ('y', np.float32),\n ('z', np.float32),\n ('rgb', np.uint32)\n ])\n else:\n data = np.zeros(n_points, dtype=[\n ('x', np.float32),\n ('y', np.float32),\n ('z', np.float32)\n ])\n data['x'] = cloud_npy[:, 0]\n data['y'] = cloud_npy[:, 1]\n data['z'] = cloud_npy[:, 2]\n \n if is_color:\n rgb_npy = np.asarray(copy.deepcopy(o3dpc.colors))\n rgb_npy = np.floor(rgb_npy*255) # nx3 matrix\n rgb_npy = rgb_npy[:, 0] * BIT_MOVE_16 + rgb_npy[:, 1] * BIT_MOVE_8 + rgb_npy[:, 2] \n rgb_npy = rgb_npy.astype(np.uint32)\n data['rgb'] = rgb_npy\n\n rospc = ros_numpy.msgify(PointCloud2, data)\n if frame_id is not None:\n rospc.header.frame_id = frame_id\n\n if stamp is None:\n rospc.header.stamp = rospy.Time.now()\n else:\n rospc.header.stamp = stamp\n rospc.height = 1\n rospc.width = n_points\n rospc.fields = []\n rospc.fields.append(PointField(\n name=\"x\",\n offset=0,\n datatype=PointField.FLOAT32, count=1))\n rospc.fields.append(PointField(\n name=\"y\",\n offset=4,\n datatype=PointField.FLOAT32, count=1))\n rospc.fields.append(PointField(\n name=\"z\",\n offset=8,\n datatype=PointField.FLOAT32, count=1)) \n\n if is_color:\n rospc.fields.append(PointField(\n name=\"rgb\",\n offset=12,\n datatype=PointField.UINT32, count=1)) \n rospc.point_step = 16\n else:\n rospc.point_step = 12\n \n rospc.is_bigendian = False\n rospc.row_step = rospc.point_step * n_points\n rospc.is_dense = True\n return rospc", "def transform(self, passed_stamped_pose):\n # Creating / Updating transform with latest translation and rotation.\n transform = TransformStamped()\n transform.header = rospy.get_rostime()\n transform.transform.translation = Point(self.translation[0],self.translation[1], 0.0)\n transform.transform.rotation = Quaternion(self.rotation[0],self.rotation[1],self.rotation[2],self.rotation[3])\n\n # pose = PoseStamped(passed_stamped_pose.header, passed_stamped_pose.pose)\n pose = tf2_geometry_msgs.do_transform_pose(passed_stamped_pose, transform)\n \n return pose", "def odom_callback(self, msg):\n self.odom = quat_to_euler(msg.pose.pose.orientation)\n self.odom = rectify_angle_pi(self.odom)", "def serialize(self, buff):\n try:\n _x = self\n buff.write(_get_struct_2i3I().pack(_x.manip_return_code, _x.object_grabber_return_code, _x.des_gripper_pose.header.seq, _x.des_gripper_pose.header.stamp.secs, _x.des_gripper_pose.header.stamp.nsecs))\n _x = self.des_gripper_pose.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.Struct('<I%ss'%length).pack(length, _x))\n _x = self\n buff.write(_get_struct_7di3I().pack(_x.des_gripper_pose.pose.position.x, _x.des_gripper_pose.pose.position.y, _x.des_gripper_pose.pose.position.z, _x.des_gripper_pose.pose.orientation.x, _x.des_gripper_pose.pose.orientation.y, _x.des_gripper_pose.pose.orientation.z, _x.des_gripper_pose.pose.orientation.w, _x.object_finder_return_code, _x.object_pose.header.seq, _x.object_pose.header.stamp.secs, _x.object_pose.header.stamp.nsecs))\n _x = self.object_pose.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.Struct('<I%ss'%length).pack(length, _x))\n _x = self\n buff.write(_get_struct_7d().pack(_x.object_pose.pose.position.x, _x.object_pose.pose.position.y, _x.object_pose.pose.position.z, _x.object_pose.pose.orientation.x, _x.object_pose.pose.orientation.y, _x.object_pose.pose.orientation.z, _x.object_pose.pose.orientation.w))\n except struct.error as se: self._check_types(struct.error(\"%s: '%s' when writing '%s'\" % (type(se), str(se), str(locals().get('_x', self)))))\n except TypeError as te: self._check_types(ValueError(\"%s: '%s' when writing '%s'\" % (type(te), str(te), str(locals().get('_x', self)))))", "def __toPoseSpace(self, pose):\n poseSpacePath = pose.replace(\".psd\", \".png\")\n log('copying to pose space')\n FlixNuke().toPose(pose, poseSpacePath)\n\n return poseSpacePath", "def annotated_frame(self, original_frame):\n frame = original_frame.copy()\n\n if self.pupils_located:\n color = (0, 255, 0)\n x_left, y_left = self.pupil_left_coords()\n x_right, y_right = self.pupil_right_coords()\n cv2.line(frame, (x_left - 5, y_left), (x_left + 5, y_left), color)\n cv2.line(frame, (x_left, y_left - 5), (x_left, y_left + 5), color)\n cv2.line(frame, (x_right - 5, y_right), (x_right + 5, y_right), color)\n cv2.line(frame, (x_right, y_right - 5), (x_right, y_right + 5), color)\n\n return frame", "def human_readable_pose2d(self, pose):\n\n\t\t# create a quaternion from the pose\n\t\tquaternion = (\n\t\tpose.orientation.x,\n\t\tpose.orientation.y,\n\t\tpose.orientation.z,\n\t\tpose.orientation.w\n\t\t)\n\n\t\t# convert quaternion rotation to euler rotation\n\t\troll, pitch, yaw = euler_from_quaternion(quaternion)\n\n\t\tresult = (\n\t\tpose.position.x, # x position\n\t\tpose.position.y, # y position\n\t\tyaw # theta angle\n\t\t)\n\n\t\treturn result", "def handle_pose(msg):\n global sensor_cfg\n global no_position\n global body_frame\n global frame_cfg\n\n quat = np.array([msg.pose.orientation.x, msg.pose.orientation.y, msg.pose.orientation.z, msg.pose.orientation.w])\n pos = np.array([msg.pose.position.x*1000, msg.pose.position.y*1000, msg.pose.position.z*1000])\n\n if position_mode == \"zero_pos\":\n pos = np.array([0, 0, 0])\n elif position_mode == \"relative\":\n pos = pos - parent_position\n\n br = tf.TransformBroadcaster()\n\n br.sendTransform(pos,\n quat,\n msg.header.stamp,\n body_frame,\n msg.header.frame_id)\n\n for k in frame_cfg:\n br.sendTransform(np.array([float(x) for x in frame_cfg[k][\"position\"].split(\" \")]),\n np.array([float(x) for x in frame_cfg[k][\"pose\"].split(\" \")]),\n rospy.Time.now(),\n k,\n body_frame)\n\n for k in sensor_cfg:\n br.sendTransform(np.array([float(x) for x in sensor_cfg[k][\"position\"].split(\" \")]),\n np.array([float(x) for x in sensor_cfg[k][\"pose\"].split(\" \")]),\n rospy.Time.now(),\n k,\n body_frame)\n\n for k in thruster_cfg:\n br.sendTransform(np.array([float(x) for x in sensor_cfg[k][\"position\"].split(\" \")]),\n np.array([float(x) for x in sensor_cfg[k][\"pose\"].split(\" \")]),\n rospy.Time.now(),\n k,\n body_frame)", "def reformat_pose_to_dict(self, now_pose):\n # now_pose è un dict in particolare { pose : [ {position : [{x : value , y:value , z:value} ] } , {orientation : [] } }\n # devo convertire i quaternioni in amgoli di eulero...estrarre i quaternioni da pose_now e convertirli in angoli RPY\n\n lato_corto_2 = 1.65 #1.45 # offset parcheggio\n \n #correggo gli offset x centrare le macchine nei parcheggi\n\n if abs(round(now_pose.position.x,2)) == 22.45:\n if now_pose.position.x < 0 :\n now_pose.position.x+=lato_corto_2\n now_pose.position.y-=0.4\n else :\n now_pose.position.x-=lato_corto_2\n now_pose.position.y+=0.4\n \n if abs(round(now_pose.position.y,2)) == 22.45:\n if now_pose.position.y < 0 :\n now_pose.position.y+=lato_corto_2\n now_pose.position.x+=0.4\n else :\n now_pose.position.y-=lato_corto_2\n now_pose.position.x-=0.4\n\n # correggo la z per renderla uguale all'asfalto che viene spownata nel mondo\n\n offset_asfalto = 0.3\n\n x = now_pose.position.x\n y = now_pose.position.y\n z = now_pose.position.z + offset_asfalto\n\n q1 = now_pose.orientation.x\n q2 = now_pose.orientation.y\n q3 = now_pose.orientation.z\n q4 = now_pose.orientation.w\n\n\n # converto i quaternioni in angoli di rulero RPY in radianti\n orientation_list = [q1,q2,q3,q4]\n\n euler = euler_from_quaternion( orientation_list )\n roll = euler[0]\n pitch = euler[1]\n yaw = round(euler[2],2) + np.pi\n\n\n # creo la lista dei parametri che mi servono nel campo pose:[] del file .yaml\n\n lista_parametri = [x ,y ,z ,roll ,pitch ,yaw ]\n\n # creo un dict con tutti i campi di cui ho bisogno nel file .yaml\n # settare le chiavi 'name' , ' type ' , 'package' , ' pose ' secondo le proprie necessità\n # i due stili sono equivalenti : usare quello preferito\n \"\"\"\n {\"name\" : \"park1\" , \n \"type\" : \"sdf\" , \n \"package\" : \"object_spawner\" , \n \"pose \":self.seq(lista_parametri) \n }\n \n \"\"\"\n lista_veicoli = ['macchina','pickup','ferrari','prius_hybrid','car_lexus','car_polo','car_volvo','car_golf']\n num_veicoli = 1\n\n #modificare qui implementando una funzione randomica se si vogliono piu veicoli casuali spawnati\n elemento_lista = {'name' : lista_veicoli[3],\n 'type': 'sdf',\n 'package': 'object_spawner',\n 'pose': self.seq( x , y , z , roll , pitch , yaw)}\n #\"\"\"\n # elemento_lista = {'name' : 'ferrari',\n # 'type': 'urdf',\n # 'package': 'autopark',\n # 'pose': self.seq( x , y , z , roll , pitch , yaw)}\n\n return elemento_lista", "def callback(self, pose_array):\n with self._lock:\n pose_array_msg = PoseArray()\n\n # Camera frame to tag frame(s)\n if (len(pose_array.transforms)==0):\n self._pose_detections = None\n self._tag_pose_pub.publish(pose_array_msg)\n return\n\n pose_detections = np.zeros((len(pose_array.transforms),3))\n for i in range(len(pose_array.transforms)):\n pose_msg = Pose()\n tag_id = pose_array.transforms[i].fiducial_id\n\n transform_cam2tag = pose_array.transforms[i].transform\n # print \"transform_cam2tag = \", transform_cam2tag\n poselist_cam2tag = transform2poselist(transform_cam2tag)\n poselist_base2tag = transformPose(self._lr, poselist_cam2tag, 'camera', 'base_link')\n poselist_tag2base = invPoselist(poselist_base2tag)\n # print \"poselist_tag2base = \", poselist_tag2base\n poselist_map2base = transformPose(self._lr, poselist_tag2base, 'apriltag_'+str(tag_id), 'map')\n # print \"poselist_map2base = \", poselist_map2base\n pubFrame(self._br, pose = poselist_map2base, frame_id = '/base_link', parent_frame_id = '/map')\n\n robot_pose3d = lookupTransform(self._lr, '/map', '/base_link')\n robot_position2d = robot_pose3d[0:2]\n robot_yaw = tf.transformations.euler_from_quaternion(robot_pose3d[3:7])[2]\n robot_pose2d = robot_position2d + [robot_yaw]\n pose_detections[i] = np.array(robot_pose2d)\n\n pose_msg.position.x = robot_pose3d[0]\n pose_msg.position.y = robot_pose3d[1]\n pose_msg.orientation.x = robot_pose3d[3]\n pose_msg.orientation.y = robot_pose3d[4]\n pose_msg.orientation.z = robot_pose3d[5]\n pose_msg.orientation.w = robot_pose3d[6]\n pose_array_msg.poses.append(pose_msg)\n \n self._tag_pose_pub.publish(pose_array_msg)\n self._pose_detections = pose_detections", "def getOdom(self, msg):\n\n self.pos = msg.pose.pose.position\n self.ori = msg.pose.pose.orientation\n \n\n self.controller()", "def process_odom(self, msg):\n if (self.last_odom != None and\n msg.south_to_north_position != self.last_odom.south_to_north_position):\n delta = msg.south_to_north_position - self.last_odom.south_to_north_position\n self.pf.predict(delta)\n self.last_odom = msg", "def yolo_test_video(self):\n # Open the input video, blocking call\n inputVideo = cv2.VideoCapture(self.inputFile)\n\t\t\n # Get infomration about the input video\n codec = int(inputVideo.get(cv2.CAP_PROP_FOURCC))\n fps = int(inputVideo.get(cv2.CAP_PROP_FPS))\n frameWidth = int(inputVideo.get(cv2.CAP_PROP_FRAME_WIDTH))\n frameHeight = int(inputVideo.get(cv2.CAP_PROP_FRAME_HEIGHT))\n\n # Open the output stream\n outputVideo = cv2.VideoWriter(self.outputFile,\n codec,\n fps,\n (frameWidth,frameHeight))\n frameIndex = inputVideo.get(cv2.CAP_PROP_POS_FRAMES)\n totalFrames = inputVideo.get(cv2.CAP_PROP_FRAME_COUNT)\n \t \n\tavgGrabTime = 0\n\tavgYoloTime = 0\n\tavgWriteTime = 0\n \n # For each frame in the video\n while True:\n \n startTime = time.time()\n \n # Calculate the time it takes to grab a frame\n startGrabTime = time.time()\n grabbed, frame = inputVideo.read()\n endGrabTime = time.time() \n\t avgGrabTime+=(endGrabTime-startGrabTime)\n\t \n\n if grabbed:\n\t\t\n # Calculate the time it takes to run YOLO pipeline \n\t\tstartYoloTime = time.time()\n annotatedFrame, predictedObjects = self.detect_from_image(frame)\n\t\tendYoloTime = time.time()\n\t\tavgYoloTime+= ( endYoloTime - startYoloTime)\n\n frameIndex = inputVideo.get(cv2.CAP_PROP_POS_FRAMES)\n \t\n\t\tcurrentTime = time.time()\n\t\telapsedTime = currentTime - startTime\n\t\tcurrentFPS = (1)/elapsedTime \n\t\t \t\n #cv2.rectangle(annotatedFrame, (0, 0), (30, 30), (0,0,0), -1)\n cv2.putText(\n annotatedFrame, 'FPS' + ': %.2f' % currentFPS,\n (15, 15), cv2.FONT_HERSHEY_SIMPLEX, 0.5,\n (255, 255, 255), 2\n )\n\t\t\n # Calculate the time it takes to write an annotated frame to video\n\t\tstartWriteTime = time.time()\n outputVideo.write(annotatedFrame)\n\t\tendWriteTime = time.time()\n\t\tavgWriteTime +=(endWriteTime - startWriteTime)\n\t\n else:\n inputVideo.set(cv2.CAP_PROP_POS_FRAMES, frameIndex-1)\n cv2.waitKey(100)\n\n if frameIndex==totalFrames:\n break\n\t\t\n inputVideo.release()\n outputVideo.release()\n cv2.destroyAllWindows()\n \n avgGrabTime/=totalFrames\n avgYoloTime/=totalFrames\n avgWriteTime/=totalFrames\n\n if self.verbose:\n print ('Average time for extracting compressed video frame : %.3f' %avgGrabTime)\n print ('Average time for YOLO object detection : %.3f' %avgYoloTime )\n print ('Average time for writing frame to video : %.3f' %avgWriteTime)", "def get_pose_of_model(self, robot_name):\n pose_now = self.gz_model_obj.get_model_pose(robot_name)\n \n return pose_now" ]
[ "0.61992025", "0.5914986", "0.5914425", "0.583971", "0.58341193", "0.5548618", "0.553005", "0.5486628", "0.5481845", "0.54486555", "0.5400877", "0.52915186", "0.52890486", "0.526186", "0.5221597", "0.51837945", "0.51579654", "0.515357", "0.5136488", "0.5136463", "0.50342727", "0.5002903", "0.4989187", "0.49648914", "0.49562496", "0.49545184", "0.49518532", "0.49491552", "0.49214053", "0.48878562" ]
0.678048
0
Returns True if it looks like the rover is inside the ring of home tags and needs to get out! Returns False if no home tags in view.
def is_inside_home_ring(self, detections): YAW_THRESHOLD = 1.3 # radians see_home_tag = False good_orientations = 0 bad_orientations = 0 for detection in detections: if detection.id == 256: see_home_tag = True home_detection = self._transform_to_base_link(detection) quat = [home_detection.pose.orientation.x, home_detection.pose.orientation.y, home_detection.pose.orientation.z, home_detection.pose.orientation.w] _r, _p, y = tf.transformations.euler_from_quaternion(quat) y -= math.pi / 2 y = angles.normalize_angle(y) if abs(y) < YAW_THRESHOLD: bad_orientations += 1 else: good_orientations += 1 if not see_home_tag: return False return bad_orientations >= good_orientations
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sees_home_tag(self):\n detections = self.swarmie.get_latest_targets().detections\n\n for detection in detections:\n if detection.id == 256:\n return True\n\n return False", "def goal_occupied(self, view):\n for line in view.obstacles:\n if linesegdist2(line.p1, line.p2, self.goal) < self.radius ** 2:\n return True\n\n for p in view.pedestrians:\n if p.velocity.length2() == 0.0:\n if p.position.distance_to2(self.goal) < p.radius:\n return True\n\n return False", "def ishome(self) -> bool:\n pass", "def ishome(self):\n return self._plrevgeoloc.isHome", "def someone_home(self) -> bool:\n return self._someone_home", "def isCurrentPlayerHome(self):\r\n \r\n #creates corresponding starting and ending points for each player\r\n if self.getTurn() == RED:\r\n start = 0\r\n end = 18\r\n else:\r\n start = 6\r\n end = 24\r\n \r\n #checks whether the current player has checkers on corresponding points\r\n for i in range(start, end):\r\n if self.points[i].getTeam() == self.getTurn():\r\n return False\r\n \r\n return True", "def is_home(self):\n return bool([\n device for device in self.devices\n if device.is_home and device.is_phone\n ])", "def active(self):\n return self.home is not None and self.away is not None and self.winner is None", "def is_in_box_rt(self, rt):\n regions = self.boxes_rt.at(rt)\n if len(regions) > 0:\n return True\n else:\n return False", "def is_in_hotspot(self):\r\n in_hotspot = False\r\n hotspots = parser.parse_hotspot_bed()\r\n \r\n if hotspots.get(self.chrom): \r\n chrom_hotspots = hotspots[self.chrom]\r\n \r\n for interval in chrom_hotspots: \r\n if interval[0] <= self.pos <= interval[1]:\r\n in_hotspot = True\r\n break\r\n \r\n return in_hotspot", "def is_home_page(self):\n return not self.title and self.category is None", "def has_geotag(self):\n place = self.place\n # TODO: Add the feature where it considers a tweet to be geotagged\n # if the user place coordinates have been encoded by PlaceInfo().\n # user_coords = self.metadata.user_place_coordinates\n return place is not None", "def can_be_displayed_on_homepage(self):\n\n return self.filter(is_spotlighted=True).has_logo()", "def isHomePage(self):\n home = self.getHome(self.url)\n if home == self.url:\n return True\n if home == self.url + '/':\n return True\n return False", "def is_hometown(town):\n if town == 'orlando':\n is_hometown = True\n else:\n is_hometown = False\n return is_hometown", "def _is_safe_to_back_up(self):\n # Only back up if we're far enough away from home for it\n # to be safe. Don't want to back up into the nest!\n home_loc = self.swarmie.get_home_odom_location()\n current_loc = self.swarmie.get_odom_location().get_pose()\n dist = math.sqrt((home_loc.x - current_loc.x) ** 2\n + (home_loc.y - current_loc.y) ** 2)\n if dist > 1.5:\n return True\n\n angle_to_home = self.get_angle_to_face_point(home_loc)\n if abs(angle_to_home) < math.pi / 2:\n return True\n\n return False", "def is_island(self):\n return bool(not self.children.exists() and not self.parents.exists())", "def is_unrolled_out_leaf(self, game):\n return self._plays[game] == 0", "def is_game_over(self):\n return self.state.all_avatars_placed() and self.state.is_game_over()", "def is_unhappy(self):\n #checked!#\n ###your code here###\n same=0\n for i in self.home.neighbors:\n if i.occupant!=None:\n if i.occupant.group==self.group:\n same+=1\n happniess=float(same)/len(self.home.neighbors)\n if happniess<self.happiness_threshold:\n return True\n else:\n return False", "def is_home(self):\n return self.last_seen.seconds / 60 <= 2 and self.last_seen.days == 0", "def contains(self, or_index):\n oring = self.ring_list[or_index]\n\n # take a test point from somewhere in the middle of the open ring\n pt = oring.path.point(0.5)\n\n if self.is_core:\n if oring.maxR > self.outer.maxR:\n return False\n return path_encloses_pt(pt, self.outside_point, self.outer.path)\n\n if oring.maxR > self.outer.maxR or oring.minR < self.inner.minR:\n return False\n return path_encloses_pt(pt, self.outside_point, self.outer.path) and \\\n not path_encloses_pt(pt, self.outside_point, self.inner.path)", "def has_root_lanes(self):\n return flask.request.library.has_root_lanes", "def isPlayed(self):\n return bool(self.viewedLeafCount == self.leafCount)", "def isPlayed(self):\n return bool(self.viewedLeafCount == self.leafCount)", "def is_viewed(self):\n return self.has_label(VIEWED_LABEL)", "def isTopHomePage(self):\n domain = self.getDomain()\n if self.url == \"http://\" + domain + \"/\":\n return True\n if self.url == \"http://www.\" + domain + \"/\":\n return True\n if self.url == \"http://\" + domain:\n return True\n if self.url == \"http://www.\" + domain:\n return True\n return False", "def is_overlappedFootprint(self, footprint):\n if footprint.width == 0 or footprint.height == 0 or footprint.popularity <= 1:\n return False\n for corner in footprint.corners:\n if self.is_point_in(corner):\n return True\n for corner in self.corners:\n if footprint.is_point_in(corner):\n return True\n return False", "def check_boundary(self):\n turtle_position = self.turtle.position()\n if turtle_position[0] > self.screen_width/2 - 40 and int(self.turtle.heading()) == 0:\n return False\n if turtle_position[0] < -self.screen_width/2 + 40 and int(self.turtle.heading()) == 180:\n return False\n if turtle_position[1] > self.screen_height/2 - 40 and int(self.turtle.heading()) == 90:\n return False\n if turtle_position[1] < -self.screen_height/2 + 40 and int(self.turtle.heading()) == 270:\n return False\n return True", "def is_inside(self, p) -> bool:\r\n h = self.wedge\r\n inside = False\r\n if lefton(h, p):\r\n while not h.nexthedge is self.wedge:\r\n h = h.nexthedge\r\n if not lefton(h, p):\r\n return False\r\n return True\r\n else:\r\n return False" ]
[ "0.6877612", "0.6524369", "0.6232681", "0.62293905", "0.6129121", "0.5960962", "0.5907052", "0.58036876", "0.5741693", "0.57036555", "0.56856495", "0.5676682", "0.56460345", "0.5643292", "0.56271076", "0.5600498", "0.5594986", "0.55924743", "0.5587293", "0.55797714", "0.55694693", "0.55673957", "0.5535312", "0.5531818", "0.5531818", "0.5531225", "0.55263484", "0.5491185", "0.5469587", "0.54392993" ]
0.70185405
0
The safe driving pathway is defined to be the space between two lines running parallel to the rover's xaxis, at y=0.33m and y=0.33m. Driving between these lines gives the wheels about 10cm of clearance on either side. These lines also conveniently intersect the upperleft and upperright corners of the camera's field of view, but are not visible in the frame. So, to avoid driving over a cube, the rover should make sure it drives within these lines (drives without a tag in view). This function helps decide which direction to turn and how far to drive to get a tag out of view and go around it efficiently. Returns the angle the rover should turn in order to get the tag out of its field of view. | tag> .|<path_edge_point | / . | | / . | | / . |< path edge | | <theta, the angle to turn. Rover would turn left(+theta) | __|_. | in this instance. | []| |. |[] | | | . | | | |rover| | | []|_____|[] | | |+x +y____|
def _get_angle_and_dist_to_avoid(self, detection, direction='left'): OVERSHOOT_DIST = 0.20 # meters, distance to overshoot target by base_link_pose = self._transform_to_base_link(detection) radius = math.sqrt(base_link_pose.pose.position.x ** 2 + base_link_pose.pose.position.y ** 2) tag_point = Point(x=base_link_pose.pose.position.x, y=base_link_pose.pose.position.y) path_edge_point = Point() # solve for x given the radius and y-coord of a point on a circle # Just set x to zero if radius is too small (if tag is too close to # the rover. Protects math.sqrt() from evaluating a negative number. if radius > Planner.PATHWAY_EDGE_DIST: path_edge_point.x = math.sqrt(radius ** 2 - Planner.PATHWAY_EDGE_DIST ** 2) else: path_edge_point.x = 0 path_edge_point.y = Planner.PATHWAY_EDGE_DIST if direction == 'left': path_edge_point.y *= -1 return (-self._angle_between(tag_point, path_edge_point), path_edge_point.x + OVERSHOOT_DIST)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _compute_connection(current_waypoint, next_waypoint, threshold=35):\n n = next_waypoint.transform.rotation.yaw\n n = n % 360.0\n\n c = current_waypoint.transform.rotation.yaw\n c = c % 360.0\n\n diff_angle = (n - c) % 180.0\n if diff_angle < threshold or diff_angle > (180 - threshold):\n return RoadOption.STRAIGHT\n elif diff_angle > 90.0:\n return RoadOption.LEFT\n else:\n return RoadOption.RIGHT", "def GeneratePath(path_name, file_name, waypoints, settings, reverse=False, heading_overide=False, headingValue=0.0):\r\n # Generate the path using pathfinder.\r\n info, trajectory = pf.generate(waypoints, settings.order, settings.samples, settings.period,\r\n settings.maxVelocity, settings.maxAcceleration, settings.maxJerk)\r\n\r\n # Modify the path for the differential drive based on the calibrated wheelbase\r\n modifier = pf.modifiers.TankModifier(trajectory).modify(ROBOT_WHEELBASE_FT)\r\n\r\n # Ge the left and right trajectories...left and right are reversed\r\n rightTrajectory = modifier.getLeftTrajectory()\r\n leftTrajectory = modifier.getRightTrajectory()\r\n\r\n # Grab the position, velocity + acceleration for feed-forward, heading, and duration\r\n path = {\"left\": [], \"right\": []}\r\n headingOut = []\r\n for i in range(len(leftTrajectory)):\r\n if not reverse:\r\n if heading_overide:\r\n heading = headingValue\r\n elif abs(pf.r2d(leftTrajectory[i].heading)) > 180:\r\n heading = -(pf.r2d(leftTrajectory[i].heading) - 360)\r\n else:\r\n heading = -pf.r2d(leftTrajectory[i].heading)\r\n else:\r\n if heading_overide:\r\n heading = headingValue\r\n else:\r\n heading = pf.r2d(leftTrajectory[i].heading) - 180\r\n\r\n headingOut.append(heading)\r\n path[\"left\"].append([leftTrajectory[i].position * 4096 / # Position: CTRE SRX Mag encoder: 4096 units per rotation\r\n (ROBOT_WHEEL_DIAMETER_FT * math.pi), # Voltage / Feed-Forward\r\n CalculateFeedForwardVoltage(True,\r\n leftTrajectory[i].velocity,\r\n leftTrajectory[i].acceleration),\r\n heading / 360, # Pigeon IMU setup for 3600 units per rotation\r\n int(leftTrajectory[i].dt * 1000)]) # Duration\r\n path[\"right\"].append([rightTrajectory[i].position * 4096 /\r\n (ROBOT_WHEEL_DIAMETER_FT * math.pi),\r\n CalculateFeedForwardVoltage(False,\r\n rightTrajectory[i].velocity,\r\n rightTrajectory[i].acceleration),\r\n heading / 360,\r\n int(rightTrajectory[i].dt * 1000)])\r\n\r\n # Dump the path into a pickle file which will be read up later by the RoboRIO robot code\r\n with open(os.path.join(path_name, file_name+\".pickle\"), \"wb\") as fp:\r\n pickle.dump(path, fp)\r\n\r\n # Plot the data for review\r\n x = list(i * (settings.period) for i, _ in enumerate(leftTrajectory))\r\n\r\n plt.figure()\r\n # plt.plot(aspect=0.5)\r\n plt.title(\"Trajectory\")\r\n drawField(plt)\r\n plt.plot([segment.y for segment in leftTrajectory],\r\n [segment.x for segment in leftTrajectory],\r\n marker='.', color='b')\r\n plt.plot([segment.y for segment in rightTrajectory],\r\n [segment.x for segment in rightTrajectory],\r\n marker='.', color='r')\r\n plt.gca().set_yticks(np.arange(0, 30.1, 1.0), minor=True)\r\n plt.gca().set_yticks(np.arange(0, 30.1, 3))\r\n plt.gca().set_xticks(np.arange(0, 27.1, 1.0), minor=True)\r\n plt.gca().set_xticks(np.arange(0, 27.1, 3))\r\n plt.grid(which='minor', color='grey', linestyle='--', alpha=0.25)\r\n plt.grid(which='major', color='grey', linestyle='-', alpha=0.75)\r\n\r\n # Plot the heading\r\n plt.figure()\r\n plt.title(\"Heading\")\r\n plt.plot(x, headingOut, marker='.')\r\n\r\n # Plot the velocity and acceleration and look for any discontinuities\r\n plt.figure()\r\n plt.subplot(2, 1, 1)\r\n plt.title(\"Velocity\")\r\n plt.plot(x, [segment.velocity for segment in leftTrajectory], marker='.', color='b')\r\n plt.plot(x, [segment.velocity for segment in rightTrajectory], marker='.', color='r')\r\n plt.yticks(np.arange(0, DRIVETRAIN_MAX_VELOCITY + 0.1, 1.0))\r\n plt.grid()\r\n plt.subplot(2, 1, 2)\r\n plt.title(\"Acceleration\")\r\n plt.plot(x, [segment.acceleration for segment in leftTrajectory], marker='.', color='b')\r\n plt.plot(x, [segment.acceleration for segment in rightTrajectory], marker='.', color='r')\r\n plt.yticks(np.arange(-DRIVETRAIN_MAX_ACCELERATION, DRIVETRAIN_MAX_ACCELERATION + 1.1, 2.0))\r\n plt.grid()\r\n plt.tight_layout()\r\n plt.show()", "def path_to_command_thymio(path):\n\n current_x = path[0][0]\n current_y = path[1][0]\n\n next_x = path[0][1]\n next_y = path[1][1]\n\n # next-prev\n delta_x = path[0][1] - path[0][0]\n delta_y = path[1][1] - path[1][0]\n\n # delat_x = 0 and delta_y = -/+ 1 (or delat_x = -/+ 1 and delta_y = 0): go straight\n turn = STRAIGHT\n\n # delat_x = -1 and delta_y = 1 (or delat_x = 1 and delta_y = -1): turn to the right\n if delta_x * delta_y < 0:\n turn = RIGHT\n\n # delat_x = -1 and delta_y = -1 (or delat_x = 1 and delta_y = 1): turn to the left\n if delta_x * delta_y == 1:\n turn = LEFT\n\n new_path = np.array([path[0][1:], path[1][1:]])\n\n return turn, new_path", "def left_or_right(self):\n #traversal\n left_total = 0\n left_count = 0\n right_total = 0\n right_count = 0\n self.scan()\n for ang, dist in self.scan_data.items():\n if ang < self.MIDPOINT:\n right_total += dist\n right_count += 1\n print(\"Angle: %d // dist: %d // right_count: %d\" % (ang, dist, right_count))\n else:\n left_total += dist\n left_count += 1\n left_avg = left_total / left_count\n right_avg = right_total / right_count\n if left_avg > right_avg:\n self.turn_by_deg(-45)\n else:\n self.turn_by_deg(45)\n # if robot is facing the wrong way it will turn it around\n self.exit_bias()", "def GenerateTalonMotionProfileArcPath(path_name, file_name, waypoints, settings, reverse=False,\r\n heading_overide=False, headingValue=0.0):\r\n # Generate the path using pathfinder.\r\n info, trajectory = pf.generate(waypoints, settings.order, settings.samples, settings.period,\r\n settings.maxVelocity, settings.maxAcceleration, settings.maxJerk)\r\n\r\n # Modify the path for the differential drive based on the calibrated wheelbase\r\n modifier = pf.modifiers.TankModifier(trajectory).modify(ROBOT_WHEELBASE_FT)\r\n\r\n # Ge the left and right trajectories\r\n leftTrajectory = modifier.getLeftTrajectory()\r\n rightTrajectory = modifier.getRightTrajectory()\r\n\r\n # Grab the position, velocity + acceleration for feed-forward, heading, and duration. Apply the proper conversions for the position,\r\n # feed-forward, and heading. The headings from pathfinder will likely be fixed\r\n path = {\"left\": [], \"right\": []}\r\n headings = {\"left\": [], \"right\": []}\r\n for i in range(len(leftTrajectory)):\r\n heading = pf.r2d(leftTrajectory[i].heading)\r\n if heading_overide:\r\n heading = headingValue\r\n else:\r\n if not reverse:\r\n if pf.r2d(leftTrajectory[i].heading) > 180:\r\n heading = pf.r2d(leftTrajectory[i].heading) - 360\r\n else:\r\n heading = -(pf.r2d(leftTrajectory[i].heading) - 180)\r\n \r\n \r\n headings[\"left\"].append(heading)\r\n path[\"left\"].append([leftTrajectory[i].position * 4096 / # Position: CTRE SRX Mag encoder: 4096 units per rotation\r\n (ROBOT_WHEEL_DIAMETER_FT * math.pi), # Voltage / Feed-Forward\r\n CalculateFeedForwardVoltage(True,\r\n leftTrajectory[i].velocity,\r\n leftTrajectory[i].acceleration),\r\n 3600 * heading / 360, # Pigeon IMU setup for 3600 units per rotation\r\n int(leftTrajectory[i].dt * 1000)]) # Duration\r\n heading = pf.r2d(rightTrajectory[i].heading)\r\n if heading_overide:\r\n heading = headingValue\r\n else:\r\n if not reverse:\r\n if pf.r2d(rightTrajectory[i].heading) > 180:\r\n heading = pf.r2d(rightTrajectory[i].heading) - 360\r\n else:\r\n heading = -(pf.r2d(rightTrajectory[i].heading) - 180)\r\n\r\n headings[\"right\"].append(heading)\r\n path[\"right\"].append([rightTrajectory[i].position * 4096 /\r\n (ROBOT_WHEEL_DIAMETER_FT * math.pi),\r\n CalculateFeedForwardVoltage(False,\r\n rightTrajectory[i].velocity,\r\n rightTrajectory[i].acceleration),\r\n 3600 * heading / 360,\r\n int(rightTrajectory[i].dt * 1000)])\r\n\r\n # Dump the path into a pickle file which will be read up later by the RoboRIO robot code\r\n with open(os.path.join(path_name, file_name+\".pickle\"), \"wb\") as fp:\r\n pickle.dump(path, fp)\r\n\r\n # Plot the X,Y points to see if the paths go where desired\r\n x = list(i * (settings.period) for i, _ in enumerate(leftTrajectory))\r\n plt.figure()\r\n plt.title(\"Trajectory\")\r\n drawField(plt)\r\n # Pathfinder +X is forward and +Y is right, flip axis for easier viewing also flip the label of the trajectory sides. The velocity and heading\r\n # plots are the gold standards for direction.\r\n plt.plot([-segment.y for segment in leftTrajectory],\r\n [segment.x for segment in leftTrajectory],\r\n marker='.', color='b')\r\n plt.plot([-segment.y for segment in rightTrajectory],\r\n [segment.x for segment in rightTrajectory],\r\n marker='.', color='r')\r\n plt.gca().set_yticks(np.arange(0, 30.1, 1.0), minor=True)\r\n plt.gca().set_yticks(np.arange(0, 30.1, 3))\r\n plt.gca().set_xticks(np.arange(0, 27.1, 1.0), minor=True)\r\n plt.gca().set_xticks(np.arange(0, 27.1, 3))\r\n plt.grid(which='minor', color='grey', linestyle='--', alpha=0.25)\r\n plt.grid(which='major', color='grey', linestyle='-', alpha=0.75)\r\n\r\n # Plot the heading data in degrees and look for any discontinuities\r\n plt.figure()\r\n plt.title(\"Heading\")\r\n plt.plot(x, headings[\"left\"], marker='.', color='b')\r\n plt.plot(x, headings[\"right\"], marker='.', color='r')\r\n\r\n # Plot the velocity and acceleration and look for any discontinuities\r\n plt.figure()\r\n plt.subplot(2, 1, 1)\r\n plt.title(\"Velocity\")\r\n plt.plot(x, [segment.velocity for segment in leftTrajectory], marker='.', color='b')\r\n plt.plot(x, [segment.velocity for segment in rightTrajectory], marker='.', color='r')\r\n plt.yticks(np.arange(0, DRIVETRAIN_MAX_VELOCITY + 0.1, 1.0))\r\n plt.grid()\r\n plt.subplot(2, 1, 2)\r\n plt.title(\"Acceleration\")\r\n plt.plot(x, [segment.acceleration for segment in leftTrajectory], marker='.', color='b')\r\n plt.plot(x, [segment.acceleration for segment in rightTrajectory], marker='.', color='r')\r\n plt.yticks(np.arange(-DRIVETRAIN_MAX_ACCELERATION, DRIVETRAIN_MAX_ACCELERATION + 1.1, 2.0))\r\n plt.grid()\r\n plt.tight_layout()\r\n plt.show()", "def _angle_of_attack(self, rel_wind, blade_chord):\n # blade_chord_vector - (relative_wind + pi)\n # rel_oposite = rel_wind.rotated(math.pi)\n aoa_rad = rel_wind.theta - blade_chord.theta\n aoa_rad = vec.normalize_angle(aoa_rad)\n aoa_360 = aoa_rad * 360 / math.tau\n return aoa_rad, aoa_360", "def getEdgeAngle():\n '''\n returns angle a\n a\n ◿\n b c\n '''\n ANGLE_OFFSET = 8 # How far off the angle measurements are in degrees.\n THRESHOLD = 220 # How much light must be reflected to 'notice' the desk.\n angle = 0\n while angle < panTilt.TLT_RANGE:\n angle += 1\n panTilt.tilt(int(angle))\n deskDetected = ir.readWithDelay()\n # print \"Angle:\", angle + ANGLE_OFFSET, \", ir reading:\", deskDetected\n if deskDetected > THRESHOLD or angle == panTilt.TLT_RANGE:\n # print \"-----------------------\"\n break # Break out of looking downwards loop\n panTilt.up() # Look up again\n return 90 - angle - ANGLE_OFFSET", "def _go_around(self, angle, dist):\n ignore = Obstacle.IS_SONAR\n if self.avoid_targets is True:\n ignore |= Obstacle.TAG_TARGET\n elif self.avoid_home is True:\n # Need to ignore both for this because target tags are likely to\n # be in view inside the home nest.\n ignore |= Obstacle.TAG_TARGET | Obstacle.TAG_HOME\n\n cur_heading = self.swarmie.get_odom_location().get_pose().theta\n turn_result = self.swarmie.set_heading(\n cur_heading + angle,\n ignore=ignore,\n throw=False\n )\n drive_result = self.swarmie.drive(dist,\n ignore=Obstacle.SONAR_BLOCK,\n throw=False)\n\n return turn_result, drive_result", "def get_direction(x0, y0, x1, y1):\n\n # same point special case\n if x0 == x1 and y0 == y1:\n return on\n\n # vertical special cases\n if x0 == x1:\n if y1 > y0:\n return back\n else:\n return front\n\n slope = float(y1 - y0)/(x1 - x0)\n if x1 > x0:\n if slope < -2:\n return front\n elif slope < -.5:\n return front_right\n elif slope < .5:\n return right\n elif slope < 2:\n return back_right\n else:\n return back\n else:\n if slope < -2:\n return back\n elif slope < -.5:\n return back_left\n elif slope < .5:\n return left\n elif slope < 2:\n return front_left\n else:\n return front", "def compute_angle(x1, y1, x2, y2):\n # +ve y-axis is downwards, hence the negative sign\n theta = np.arctan2(x2 - x1, -(y2 - y1))\n angle_from_goal = np.rad2deg(theta)\n # The top-down view is transposed.\n if (angle_from_goal > 157.5) or (angle_from_goal <= -157.5):\n spat_rel = \"above\"\n elif (angle_from_goal > -157.5) and (angle_from_goal <= -112.5):\n spat_rel = \"above-right\"\n elif (angle_from_goal > -112.5) and (angle_from_goal <= -67.5):\n spat_rel = \"right\"\n elif (angle_from_goal > -67.5) and (angle_from_goal <= -22.5):\n spat_rel = \"below-right\"\n elif (angle_from_goal > -22.5) and (angle_from_goal <= 22.5):\n spat_rel = \"below\"\n elif (angle_from_goal > 22.5) and (angle_from_goal <= 67.5):\n spat_rel = \"below-left\"\n elif (angle_from_goal > 67.5) and (angle_from_goal <= 112.5):\n spat_rel = \"left\"\n elif (angle_from_goal > 112.5) and (angle_from_goal <= 157.5):\n spat_rel = \"above-left\"\n return spat_rel", "def four_wheel_drive(x, y, heading, speed, length, steering_angle, gas, brake, gas_to_acc=1, brake_to_acc=1):\n\n return x, y, heading, speed", "def _calc_side(self):\n\n # Calculation of the side of the car with respect to the trajectory\n next_index = self.index + 1\n\n if next_index == len(self.x_trajectory):\n next_index = self.index\n\n trajectory_vector = ((self.x_trajectory[next_index]\n - self.x_trajectory[self.index]),\n (self.y_trajectory[next_index]\n - self.y_trajectory[self.index]))\n\n x_diff = self.x - self.x_trajectory[self.index]\n y_diff = self.y - self.y_trajectory[self.index]\n\n ugv_vector = (x_diff, y_diff)\n\n vector_z = ugv_vector[0] * trajectory_vector[1] \\\n - ugv_vector[1] * trajectory_vector[0]\n\n if vector_z >= 0:\n\n # It is in the right side\n self.sign = 1\n\n else:\n\n # It is in the left side\n self.sign = -1\n\n return self.sign", "def sweep(self, angle=math.pi/4, dist=0.3,\n ignore=Obstacle.PATH_IS_CLEAR, throw=False):\n start_heading = self.swarmie.get_odom_location().get_pose().theta\n ignore |= Obstacle.SONAR_BLOCK # always ignore this one too\n\n try:\n self.swarmie.set_heading(start_heading - angle, ignore=ignore)\n self.swarmie.drive(dist, ignore=ignore)\n self.swarmie.drive(-dist, ignore=ignore)\n self.swarmie.set_heading(start_heading + angle, ignore=ignore)\n self.swarmie.drive(dist, ignore=ignore)\n self.swarmie.drive(-dist, ignore=ignore)\n self.swarmie.set_heading(start_heading, ignore=ignore)\n # self.swarmie.timed_drive(time, linear, -angular, ignore=ignore)\n # self.swarmie.timed_drive(time, -linear, angular, ignore=ignore)\n\n # physical rover doesn't go left as well\n # if not self.swarmie.simulator_running():\n # angular *= 1.5\n # linear *= 1.2\n # self.swarmie.timed_drive(time, linear, angular, ignore=ignore)\n # self.swarmie.timed_drive(time, -linear, -angular, ignore=ignore)\n\n except HomeException:\n if throw:\n raise\n return MoveResult.OBSTACLE_HOME\n except TagException:\n if throw:\n raise\n return MoveResult.OBSTACLE_TAG\n except ObstacleException:\n if throw:\n raise\n return MoveResult.OBSTACLE_SONAR\n\n return MoveResult.SUCCESS", "def R_will_change_direction(point0, point1, point2):\n\n x0, y0 = point0[0], point0[1]\n x1, y1 = point1[0], point1[1]\n x2, y2 = point2[0], point2[1]\n\n try:\n m1 = (x1 - x2) / (y2 - y1)\n m2 = (y2 - y1) / (x2 - x1)\n x3 = ((m2 * x1) - (m1 * x0) - y1 + y0) / (m2 - m1)\n y3 = m1 * (x3 - x0) + y0\n except ZeroDivisionError:\n (x3, y3) = (x0, y1) if y1 == y2 else (x1, y0)\n\n return ((min(x1, x2) <= x3 <= max(x1, x2)) and (min(y1, y2) <= y3 <= max(y1, y2))), (x3, y3)", "def two_wheel_drive(x, y, heading, speed, length, steering_angle, gas, brake, gas_to_acc=1, brake_to_acc=1):\n\n front_wheel_x = x + length / 2 * math.cos(heading)\n front_wheel_y = y + length / 2 * math.sin(heading)\n back_wheel_x = x - length / 2 * math.cos(heading)\n back_wheel_y = y - length / 2 * math.sin(heading)\n\n speed += (\n gas * gas_to_acc * sim_c.DT - (\n brake * brake_to_acc * sim_c.DT) - road_c.DRAG_COEF * speed * sim_c.DT)\n speed = speed if speed > 0 else 0\n\n # update wheel positions\n front_wheel_x += speed * c.DT * math.cos(heading + steering_angle)\n front_wheel_y += speed * c.DT * math.sin(heading + steering_angle)\n back_wheel_x += speed * c.DT * math.cos(heading)\n back_wheel_y += speed * c.DT * math.sin(heading)\n\n # update car position and heading\n x = (front_wheel_x + back_wheel_x) / 2\n y = (front_wheel_y + back_wheel_y) / 2\n heading = math.atan2((front_wheel_y - back_wheel_y), (front_wheel_x - back_wheel_x))\n\n return x, y, heading, speed", "def turn_angle_of_seg(self, rec_seg=None):\r\n if rec_seg is None:\r\n rec_seg = self.rec_seg\r\n nseg = len(rec_seg)\r\n ndetects = nseg+1\r\n turn_angle = np.zeros(ndetects, np.float64)\r\n for nd in range(1,ndetects-2):\r\n seg1 = rec_seg[nd-1]\r\n seg2 = rec_seg[nd]\r\n dy = np.sin(seg1.head - seg2.head)\r\n dx = np.cos(seg1.head - seg2.head)\r\n turn_angle[nd] = math.atan2(dy, dx)\r\n\r\n return turn_angle", "def cutDownAngle_def(state, raySortie, rayInter):\n position = state.my_goal\n diff = state.ball_pos - position\n diff.norm = max(min(raySortie, diff.norm - rayInter), 20.)\n position += diff\n return goTo(state,position)", "def getDistanceAndRotationToEdge(l, f, r):\n\n if DEBUG:\n print \"lfr:\", l,\",\",f,\",\",r\n\n # Maths help from: http://xaktly.com/MathNonRightTrig.html\n # - Specfically the law of cosines, but at least one of their\n # examples is wrong, but methods are correct... sigh.\n #\n # For triangle with forward length, shortest of\n # left and right length, and desk edge as sides...\n #\n # f = forward distance length\n # l = left distance length\n # r = right distance length\n # e = length of desk edge between left and right views\n # s = shortest of left and right distance length\n # v = \"view\" angle of how much robot looks left or right\n # g = angle between f and e\n # d = distance between robot and edge of desk\n # a = angle between the way the robot is facing and edge of desk\n # (i.e. if the robot is facing directly towards edge it's 0)\n # (in radians or degrees?..)\n #\n # e² = f² + s² - 2 * f * s * cos(v)\n # g = sin⁻¹ * (s * sin(v) / e)\n # d = f * sin(g)\n # a = 180 - 90 - g (minus or positive depending on if s is left or right)\n\n # Figure out if the edge of the desk is more to the right or left\n # s = min(l, r) <-- Used to use this, but need additional things.\n\n # r | l | s\n # x | x | ?\n # 1 | 1 | ? Logic table for _r_ight, _l_eft, and output\n # 0 | 0 | ? _s_hortest distances from robot to desk edge\n # x | 0 | l\n # 1 | x | r x = None\n # 0 | 1 | r 1 = arbitrary high-ish value\n # x | 1 | l 0 = arbitrary low-ish value\n # 1 | 0 | l\n # 0 | x | r\n\n # Distance to right and left are missing?\n if r is None and l is None:\n if DEBUG:\n print \"INFO: Skipping edge calcs because of missing distances.\"\n return int(round(f)), 0\n\n # Distance to right and left identical?\n elif r == l:\n if DEBUG:\n print \"INFO: Skipping edge calcs because of identical distances.\"\n # This is unlikely-ish because l, f, r are floats...\n #\n # r < f r > f\n # ◆ | or ◼\n # ____➘| __🠛__\n #\n return int(round(min(r, f))), 0\n\n # Figure out if _l_eft or _r_ight is the shorter distance\n else:\n if r is None:\n s = l\n direction = -1\n elif l is None:\n s = r\n direction = 1\n elif l < r:\n s = l\n direction = -1\n elif r < l :\n s = r\n direction = 1\n\n cosV = math.cos(math.radians(45))\n sinV = math.sin(math.radians(45))\n\n e = f**2 + s**2 - 2 * f * s * cosV\n e = math.sqrt(e)\n g = math.degrees(math.asin(s * sinV / e))\n d = f * math.sin(math.radians(g)) # Switching degrees/radians f'debugging\n a = (90 - g) * direction\n '''\n # Debug stuff\n print \"f =\", f\n print \"l =\", l\n print \"r =\", r\n print \"e =\", e\n print \"s =\", s\n print \"v =\", 45\n print \"g =\", g\n print \"d =\", d\n print \"a =\", a\n '''\n\n distance = int(round(d))\n rotation = int(round(a))\n\n if DEBUG:\n print \"Distance to edge:\", str(distance) + \"cm\"\n print \"Rotation to edge:\", str(rotation) + \"°\"\n\n return distance, rotation", "def get_direction(self):\n is_direction_correct = False\n while not is_direction_correct:\n direction = random.randint(0, 2)\n if direction == 0:\n self.turtle.left(90)\n elif direction == 1:\n self.turtle.right(90)\n else:\n self.turtle.right(0)\n is_direction_correct = self.check_boundary()", "def cutDownAngle_gk(state, raySortie):\n position = state.my_goal\n diff = state.ball_pos - position\n diff.norm = raySortie\n position += diff\n return goTo(state,position)", "def Oneside( x, y0, y1, r):\n\n true = 1\n size_x = np.shape( x )\n if not size_x: size_x = [0]\n\n if size_x[ 0 ] == 0:\n if x == 0: return x\n elif abs( x ) >= r: return Arc( x, y0, y1, r )\n yh = sqrt( r*r - x*x )\n if ( y0 <= -yh ):\n if ( y1 <= -yh ) : return Arc( x, y0, y1, r )\n elif ( y1 <= yh ) : return Arc( x, y0, -yh, r ) \\\n + Chord( x, -yh, y1 )\n else : return Arc( x, y0, -yh, r ) \\\n + Chord( x, -yh, yh ) + Arc( x, yh, y1, r )\n \n elif ( y0 < yh ):\n if ( y1 <= -yh ) : return Chord( x, y0, -yh ) \\\n + Arc( x, -yh, y1, r )\n elif ( y1 <= yh ) : return Chord( x, y0, y1 )\n else : return Chord( x, y0, yh ) + Arc( x, yh, y1, r )\n\n else :\n if ( y1 <= -yh ) : return Arc( x, y0, yh, r ) \\\n + Chord( x, yh, -yh ) + Arc( x, -yh, y1, r )\n elif ( y1 <= yh ) : return Arc( x, y0, yh, r ) + Chord( x, yh, y1 )\n else : return Arc( x, y0, y1, r )\n\n else :\n ans2 = x\n t0 = where( x == 0)[0]\n count = len(t0)\n if count == len( x ): return ans2\n\n ans = x * 0\n yh = x * 0\n to = where( abs( x ) >= r)[0]\n tocount = len(to)\n ti = where( abs( x ) < r)[0]\n ticount = len(ti)\n if tocount != 0: ans[ to ] = Arc( x[to], y0[to], y1[to], r )\n if ticount == 0: return ans\n \n yh[ ti ] = sqrt( r*r - x[ti]*x[ti] )\n \n t1 = where( np.less_equal(y0[ti],-yh[ti]) )[0]\n count = len(t1)\n if count != 0:\n i = ti[ t1 ]\n\n t2 = where( np.less_equal(y1[i],-yh[i]))[0]\n count = len(t2)\n if count != 0:\n j = ti[ t1[ t2 ] ]\n ans[j] = Arc( x[j], y0[j], y1[j], r )\n\n t2 = where( ( greater(y1[i],-yh[i]) ) &\n ( less_equal(y1[i],yh[i]) ))[0]\n count = len(t2)\n if count != 0:\n j = ti[ t1[ t2 ] ]\n ans[j] = Arc( x[j], y0[j], -yh[j], r ) \\\n + Chord( x[j], -yh[j], y1[j] )\n\n t2 = where( greater(y1[i], yh[i]) )[0]\n count = len(t2)\n\n if count != 0:\n j = ti[ t1[ t2 ] ]\n ans[j] = Arc( x[j], y0[j], -yh[j], r ) \\\n + Chord( x[j], -yh[j], yh[j] ) \\\n + Arc( x[j], yh[j], y1[j], r )\n \n t1 = where( ( greater(y0[ti],-yh[ti]) ) & \n ( less(y0[ti],yh[ti]) ))[0] \n count = len(t1)\n if count != 0:\n i = ti[ t1 ]\n\n t2 = where( np.less_equal(y1[i],-yh[i]))[0]\n count = len(t2)\n if count != 0:\n j = ti[ t1[ t2 ] ]\n ans[j] = Chord( x[j], y0[j], -yh[j] ) \\\n + Arc( x[j], -yh[j], y1[j], r )\n \n\n t2 = where( ( greater(y1[i], -yh[i]) ) & \n ( less_equal(y1[i], yh[i]) ))[0]\n count = len(t2)\n\n if count != 0:\n j = ti[ t1[ t2 ] ]\n ans[j] = Chord( x[j], y0[j], y1[j] )\n\n t2 = where( greater(y1[i], yh[i]))[0]\n count = len(t2)\n if count != 0:\n j = ti[ t1[ t2 ] ]\n ans[j] = Chord( x[j], y0[j], yh[j] ) \\\n + Arc( x[j], yh[j], y1[j], r )\n\n t1 = where( greater_equal(y0[ti], yh[ti]))[0] \n count = len(t1)\n if count != 0:\n i = ti[ t1 ]\n\n t2 = where ( np.less_equal(y1[i], -yh[i]))[0] \n count = len(t2)\n if count != 0:\n j = ti[ t1[ t2 ] ]\n ans[j] = Arc( x[j], y0[j], yh[j], r ) \\\n + Chord( x[j], yh[j], -yh[j] ) \\\n + Arc( x[j], -yh[j], y1[j], r )\n\n t2 = where( ( greater(y1[i], -yh[i]) ) & \n ( less_equal(y1[i], yh[i]) ))[0]\n count = len(t2)\n if count != 0:\n j = ti[ t1[ t2 ] ]\n ans[j] = Arc( x[j], y0[j], yh[j], r ) \\\n + Chord( x[j], yh[j], y1[j] )\n\n t2 = where( greater(y1[i], yh[i]))[0]\n count = len(t2)\n if count != 0:\n j = ti[ t1[ t2 ] ]\n ans[j] = Arc( x[j], y0[j], y1[j], r )\n\n return ans", "def direction_coordinates(self, gc_lines):\n lins = [(_line[0][mid], _line[0][mid + 1], _line[1][mid], _line[1][mid + 1])\n for _line, mid in zip(gc_lines, [len(_line[0]) // 2 for _line in gc_lines])\n if len(_line[0]) > 2]\n lens = [np.hypot(_line[0][0] - _line[0][-1], _line[0][0] - _line[0][-1]) * 110.\n for _line in gc_lines\n if len(_line[0]) > 2]\n lins = [(x0 * np.cos(np.deg2rad(np.mean([y0, y1]))), x1 * np.cos(np.deg2rad(np.mean([y0, y1]))), y0, y1)\n for x0, x1, y0, y1 in lins]\n lins = [_x for _x, _l in zip(lins, lens) if _l > 10]\n\n direction = [(0.5 * (x0 + x1), 0.5 * (y0 + y1), x1 - x0, y1 - y0) for x0, x1, y0, y1 in lins]\n direction = [(_u, _v, _x / np.hypot(_x, _y), _y / np.hypot(_x, _y))\n for _u, _v, _x, _y in direction]\n los = [rotate_point(point[2:], -self.dsbObsAngleAzimuth.value()) for point in direction]\n\n dist = 1.\n tp_dir = (np.array(los).T * dist).T\n\n tps = [(x0, y0, x0 + tp_x, y0 + tp_y) for\n ((x0, y0, _, _), (tp_x, tp_y)) in zip(direction, tp_dir)]\n tps = [[(x0 / np.cos(np.deg2rad(y0)), y0), (x1 / np.cos(np.deg2rad(y0)), y1)] for (x0, y0, x1, y1) in tps]\n return tps", "def direction(self):\r\n return 180 - atan2(self.x, self.y)*180/pi", "def cutDownAngle(state, raySortie, rayInter):\n position = state.my_goal\n diff = state.ball_pos - position\n diff.norm = max(raySortie, diff.norm - rayInter)\n position += diff\n return goTo(state,position)", "def right(self, param):\n\t\tglobal estop_flag, move_state\n\t\t#If input angle is zero, set angle to default\n\t\tif param:\n\t\t\tangle = param\n\t\telse:\n\t\t\tangle = riu.default_angle\n\n\t\tsignal.alarm(0) #Disable timer interrupt for the duration of the movement\n\t\t#safely grab current yaw\n\t\twith self.move_state_lock:\n\t\t\tcurrent_yaw = (math.degrees(move_state['yaw']) + 360) % 360\n\t\t#Set goal to yaw+angle. Add 360 then mod to account for negative angles but avoid going over 360\n\t\tgoal = (current_yaw - angle + 360) % 360\n\t\tif self.angle_lock:\n\t\t\tif goal >= 315 and goal < 45:\n\t\t\t\tgoal = self.zeroed_angle\n\t\t\telif goal >= 45 and goal < 135:\n\t\t\t\tgoal = self.zeroed_angle + 90\n\t\t\telif goal >= 135 and goal < 225:\n\t\t\t\tgoal = self.zeroed_angle + 180\n\t\t\telif goal >= 225 and goal < 315:\n\t\t\t\tgoal = self.zeroed_angle + 270\n\t\tgoal = goal % 360\n\t\thalf_goal = (current_yaw - angle/2 + 360) % 360\n\t\thalfway_flag = False #used to flag if we've already sent out a halfway message\n\t\t#Anonymous function that calculates the current clockwise distance to the goal\n\t\tchkdist = lambda pos, goal: round(pos - goal + 360 * (goal > pos), 1)\n\t\t#Gets current distance and initially sets previous distance = distance\n\t\tdistance = chkdist(current_yaw, goal)\n\t\tprev_dist = distance\n\t\t\"\"\"Continues to move while absolute distance is not within angular_error and clockwise\n\t\tdistance is not increasing. NOTE: absolute distance is the shortest distance in either direction,\n\t\twhile clockwise distance is the distance using only clockwise movement.\n\t\tThe angular_error condition was added because the movements tended to end within the first few \n\t\tcycles due to some float error. With the error condition, the movement can only end when inside\n\t\tat least the general area of the goal.\"\"\"\n\t\twhile distance <= prev_dist or self.get_abs_dist(current_yaw, goal) > riu.angular_error:\n\t\t\tif estop_flag:\n\t\t\t\tself.publisher.publish(Mover.stop_msg)\n\t\t\telse:\n\t\t\t\t#Build and publish right turn message\n\t\t\t\ttwist_msg = Twist()\n\t\t\t\ttwist_msg.angular.z = -1 * riu.turn_rate\n\t\t\t\tself.publisher.publish(twist_msg)\n\t\t\t\t#If distance to goal is less than half the initial distance, publish the half done message\n\t\t\t\tif distance <= half_goal and not halfway_flag:\n\t\t\t\t\thalfway_flag = True\n\t\t\t\t\tself.status_pub.publish(String(\"half\"))\n\t\t\t\t#Update current position\n\t\t\t\twith self.move_state_lock:\n\t\t\t\t\tcurrent_yaw = (math.degrees(move_state['yaw']) + 360) % 360\n\t\t\t\t#Update previous distance, then update distance based on current position\n\t\t\t\tprev_dist = distance\n\t\t\t\tdistance = chkdist(current_yaw, goal)\n\t\t\trospy.sleep(.2)\n\t\t#After loop end, send stop message and send done message to cmd_queue\t\n\t\tself.publisher.publish(Mover.stop_msg)\n\t\tself.status_pub.publish(String(\"done\"))\n\t\tsignal.alarm(Mover.ready_message_interval) #Restart timer", "def navToPose(goal):\n #compute angle required to make straight-line move to desired pose\n global xPosition\n global yPosition\n global theta\n #capture desired x and y positions\n desiredY = goal.pose.position.y\n desiredX = goal.pose.position.x\n #capture desired angle\n quat = goal.pose.orientation\n q = [quat.x, quat.y, quat.z, quat.w]\n roll, pitch, yaw = euler_from_quaternion(q)\n desiredT = yaw * (180.0/math.pi)\n #compute distance to target\n distance = math.sqrt(math.pow((desiredX - xPosition), 2) + math.pow((desiredY - yPosition), 2))\n adjustedX = goal.pose.position.x - xPosition\n adjustedY = goal.pose.position.y - yPosition\n print goal.pose.position.x, goal.pose.position.y\n print xPosition, yPosition\n print adjustedX, adjustedY\n #compute initial turn amount\n initialTurn = (math.atan2(adjustedY, adjustedX) * (180 / math.pi)) - theta\n\n print \"moving from (\" + str(xPosition) + \", \" + str(yPosition) + \") @ \" + str(theta) + \" degrees\"\n print \"moving to (\" + str(desiredX) + \", \" + str(desiredY) + \") @ \" + str(desiredT) + \" degrees\"\n print \"distance: \" + str(distance) + \", initial turn: \" + str(initialTurn)\n rotateDegrees(initialTurn)\n driveSmooth(0.25, distance)\n rospy.sleep(2)\n finalTurn = desiredT - theta\n rotateDegrees(finalTurn)", "def virtual_distance(self):\n conflict_zone_radio = 384.0\n path_width = 172.0\n right_turn_radio = path_width / 4.0\n left_turn_radio = 3 * path_width / 4.0\n initial_straight_section = conflict_zone_radio - path_width / 2.0\n if self.get_intention() == \"s\":\n virtual_distance_value = self.get_virtual_x_position()\n elif self.get_intention() == \"r\":\n # Calculate real virtual distance\n if self.get_virtual_x_position() <= initial_straight_section:\n virtual_distance_value = self.get_virtual_x_position()\n elif self.get_virtual_y_position() > -right_turn_radio:\n virtual_distance_value = (\n initial_straight_section + atan(\n (\n self.get_virtual_x_position() -\n initial_straight_section\n ) / (right_turn_radio + self.get_virtual_y_position())\n ) * right_turn_radio\n )\n else:\n virtual_distance_value = (\n initial_straight_section + pi * right_turn_radio / 2.0 -\n self.get_virtual_y_position() - right_turn_radio\n )\n\n a = path_width / 2.0\n b = right_turn_radio + path_width / 4.0\n c = pi * right_turn_radio / 2.0\n # Scale virtual distance\n if virtual_distance_value <= initial_straight_section + c:\n virtual_distance_value *= (\n (initial_straight_section + a + b) /\n (initial_straight_section + c)\n )\n else:\n virtual_distance_value += a + b - c\n\n else:\n # Calculate real virtual distance\n if self.get_virtual_x_position() <= initial_straight_section:\n virtual_distance_value = self.get_virtual_x_position()\n elif self.get_virtual_y_position() < left_turn_radio:\n virtual_distance_value = (\n initial_straight_section + atan(\n (\n self.get_virtual_x_position() -\n initial_straight_section\n ) / (\n left_turn_radio -\n self.get_virtual_y_position()\n )\n ) * left_turn_radio\n )\n else:\n virtual_distance_value = (\n initial_straight_section + pi * left_turn_radio / 2 +\n self.get_virtual_y_position() - left_turn_radio\n )\n\n a = path_width / 2\n b = right_turn_radio + path_width / 4\n c = pi * left_turn_radio / 2\n # Scale virtual distance\n if virtual_distance_value <= initial_straight_section + c:\n virtual_distance_value *= (\n (initial_straight_section + a + b) /\n (initial_straight_section + c)\n )\n else:\n virtual_distance_value += a + b - c\n\n return virtual_distance_value", "def driveStraight(self, speed, distance):\n origin = copy.deepcopy(self._current) #hint: use this\n\n\n q = [origin.orientation.x,\n origin.orientation.y,\n origin.orientation.z,\n origin.orientation.w] # quaternion nonsense\n\n xOrigin=self._current.position.x\n yOrigin=self._current.position.y\n atTarget=False\n\n move_msg=Twist()\n move_msg.linear.x=speed\n move_msg.angular.z=0\n\n stop_msg=Twist()\n stop_msg.linear.x=0\n stop_msg.linear.z=0\n\n currentDistance=0\n #for extra credit ramp speed from 0 to speed and from speed to 1/4 speed when past half way\n vel=0\n\n while(not atTarget and not rospy.is_shutdown()):\n if(currentDistance>=distance):\n print('driveStraight: stoped')\n atTarget=True\n self._vel_pub.publish(stop_msg)\n else:\n print('driveStraight: moving')\n origin=copy.deepcopy(self._current)\n xCurrent=self._current.position.x\n yCurrent=self._current.position.y\n currentDistance=math.sqrt(math.pow((xCurrent-xOrigin),2)+math.pow((yCurrent-yOrigin),2))\n self._vel_pub.publish(move_msg)\n print('current x: '+str(xCurrent)+'current y: '+str(yCurrent)+'origin x: '+str(xOrigin)+'origin y:'+str(yOrigin))\n print('\\n distance: '+str(currentDistance))\n # rospy.sleep(.15)", "def driveStraight(speed, distance):\n global pose\n\n initialX = pose.pose.position.x\n initialY = pose.pose.position.y\n\n atTarget = False\n while (not atTarget and not rospy.is_shutdown()):\n currentX = pose.pose.position.x\n currentY = pose.pose.position.y\n currentDistance = math.sqrt(math.pow((currentX - initialX), 2) + math.pow((currentY - initialY), 2))\n if (currentDistance >= distance):\n atTarget = True\n sendMoveMsg(0, 0)\n else:\n sendMoveMsg(speed, 0)\n rospy.sleep(0.15)", "def _getDirection(coord1, coord2):\n x1, y1 = coord1\n x2, y2 = coord2\n\n if x1 == x2 and y1 == y2:\n return None # two coordinates are the same.\n elif x1 == x2 and y1 > y2:\n return UP\n elif x1 == x2 and y1 < y2:\n return DOWN\n elif x1 > x2 and y1 == y2:\n return LEFT\n elif x1 < x2 and y1 == y2:\n return RIGHT\n\n slope = float(y2 - y1) / float(x2 - x1)\n\n # Figure out which quadrant the line is going in, and then\n # determine the closest direction by calculating the slope\n if x2 > x1 and y2 < y1: # up right quadrant\n if slope > -0.4142:\n return RIGHT # slope is between 0 and 22.5 degrees\n elif slope < -2.4142:\n return UP # slope is between 67.5 and 90 degrees\n else:\n return UPRIGHT # slope is between 22.5 and 67.5 degrees\n elif x2 > x1 and y2 > y1: # down right quadrant\n if slope > 2.4142:\n return DOWN\n elif slope < 0.4142:\n return RIGHT\n else:\n return DOWNRIGHT\n elif x2 < x1 and y2 < y1: # up left quadrant\n if slope < 0.4142:\n return LEFT\n elif slope > 2.4142:\n return UP\n else:\n return UPLEFT\n elif x2 < x1 and y2 > y1: # down left quadrant\n if slope < -2.4142:\n return DOWN\n elif slope > -0.4142:\n return LEFT\n else:\n return DOWNLEFT" ]
[ "0.6105278", "0.6062355", "0.5997403", "0.59659237", "0.5943051", "0.590268", "0.5894837", "0.5735727", "0.5719809", "0.56404775", "0.56403416", "0.55594426", "0.5553458", "0.55498064", "0.5519736", "0.54981464", "0.5494342", "0.54870874", "0.54864216", "0.54541856", "0.543696", "0.5434749", "0.54170513", "0.5401848", "0.53986424", "0.5370052", "0.5369047", "0.5360154", "0.5351597", "0.5342681" ]
0.6467077
0
Get the angle required to turn and face a detection to put it in the center of the camera's field of view.
def get_angle_to_face_detection(self, detection): base_link_pose = self._transform_to_base_link(detection) radius = math.sqrt(base_link_pose.pose.position.x ** 2 + base_link_pose.pose.position.y ** 2) tag_point = Point(x=base_link_pose.pose.position.x, y=base_link_pose.pose.position.y) center_of_view_point = Point() # solve for x given the radius and y-coord of a point on a circle # y-coord is zero in this special case center_of_view_point.x = math.sqrt(radius ** 2) center_of_view_point.y = 0 return -self._angle_between(tag_point, center_of_view_point)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def angle(self) -> float:\n ...", "def angle(self) -> int:", "def get_angle_to_face_point(self, point):\n start = self.swarmie.get_odom_location().get_pose()\n return angles.shortest_angular_distance(\n start.theta,\n math.atan2(point.y - start.y, point.x - start.x)\n )", "def angle(self):\n return atan2(self.v.y, self.v.x)", "def angle(self):\n return arccos(dot((self.a - self.o) / self.r, (self.b - self.o) / self.r))", "def get_angle_of_view(focal_length, crop_factor=FULL_FRAME):\n d = 36 # mm, full-frame\n d /= crop_factor\n alpha = 2 * math.atan(d/(2*focal_length))\n return alpha", "def angle(self):\n return 0", "def angle(self):\r\n return self.model.angle", "def angle(self):\n return angle(self.force, self.forceXYZ, self.excited_axis,\n self.distance, self.distanceXYZ)", "def getAngle(self):\n return self.vector.angle", "def detector_angle(self, angle):\n self.rotate_rad(-radians(angle))", "def fingerAbsoluteAngle(this):\n\t\tif not this.finger: return None\n\t\t\n\t\t_180 = math.radians(180.0)\n\t\tfinger = this.finger.x * this._FOV.x\n\t\tO = this._SPACE.o * this._FOV.x\n\t\t\n\t\t# Angle calculé à partir des coordonnées de la caméra\n\t\tif this._POS.x: offset = math.atan(this._POS.y / this._POS.x)\n\t\telif this._POS.y > 0: offset = math.radians(90.0)\n\t\telif this._POS.y < 0: offset = math.radians(-90.0)\n\t\telse: offset = 0\n\t\t\t\n\t\treturn (_180 + offset) - (O - finger)", "def compute_steering_angle(self, frame):\n preprocessed = img_preprocess(frame)\n X = np.asarray([preprocessed])\n #steering_angle = self.model.predict(X)[0]\n steering_angle = self.model(X, training=False)[0]\n\n logging.debug('new steering angle: %s' % steering_angle)\n return int(steering_angle + 0.5) # round the nearest integer", "def _angle_of_attack(self, rel_wind, blade_chord):\n # blade_chord_vector - (relative_wind + pi)\n # rel_oposite = rel_wind.rotated(math.pi)\n aoa_rad = rel_wind.theta - blade_chord.theta\n aoa_rad = vec.normalize_angle(aoa_rad)\n aoa_360 = aoa_rad * 360 / math.tau\n return aoa_rad, aoa_360", "def get_angle(self):\n return self.bot_client.send_command(_Command.GetAngle)", "def avl_angle(self):\n dif_height = (self.heights[5] - self.heights[7])\n dif_position = (self.positions[0][7] - self.positions[0][5])\n angle = atan(dif_height / dif_position) / 1.5 * 180 / pi\n return angle", "def angle(self):\n v = self.p1 - self.p0\n return atan2(v.y, v.x)", "def computeYaw(Vx, Vy):\n #print(Vx, Vy)\n if Vx > 0:\n if Vy > 0:\n angle = (math.degrees(math.atan2(Vy,Vx)))#+ how far it is from the x axis)\n #print(angle)\n return angle \n elif Vy < 0:\n angle = (math.degrees(math.atan2(Vy,Vx)) )#- how far from x axis)\n #print(angle)\n return angle\n else:\n #print(math.degrees(math.atan2(Vy,Vx)))\n return math.degrees(math.atan2(Vy,Vx))", "def get_x_y_from_center(center, angle):\n print \"center\", center\n size_of_img = (640, 480)\n alpha_x = angle + (center[1] - 0.5 * size_of_img[1]) * camera_y_angle / size_of_img[1] \n alpha_y = (center[0] - 0.5 * size_of_img[0]) * camera_x_angle / size_of_img[0] \n print \"angle y :\", alpha_y\n delta_x = height / math.tan(math.radians(alpha_x))\n d = math.sqrt(delta_x ** 2 + height ** 2)\n delta_y = d * math.sin(math.radians(alpha_y))\n return round(delta_x), round(delta_y)", "def orientation(cnt):\n\t(x,y), (MA, ma), angle = cv2.fitEllipse(cnt)\n\treturn angle", "def get_angle_info(self):\n return", "def __calculate_angle(self):\r\n mouse_x, mouse_y = pygame.mouse.get_pos()\r\n rel_x, rel_y = mouse_x - self.x, mouse_y - self.y\r\n angle = (180 / PI) * -atan2(rel_y, rel_x) - 90\r\n self.set_angle(angle)", "def get_angle(self, point_x, point_y):\n angle = atan2(point_y - self.player_y, point_x - self.player_x)\n # print(f\"point_x {point_x} point_y {point_x} angle {angle}\")\n return angle", "def convergence_angle(self):\n return np.arctan2(self.radius, self.focal_length)", "def determine_rotation_angle(self, landmarks):\n lp = landmarks['left-eye-center-pos']\n rp = landmarks['right-eye-center-pos']\n return angle_between_points(lp, rp)", "def angle(self) -> float:\n return self._angle", "def angle(self) -> float:\n return self._angle", "def steps_to_angle():\n pass", "def angle(self):\n return self._angle", "def angle(self):\n return self._angle" ]
[ "0.694608", "0.6675294", "0.6667947", "0.65525675", "0.65257376", "0.65173125", "0.649658", "0.64952534", "0.64086175", "0.64077455", "0.6364776", "0.63421", "0.63389915", "0.6318293", "0.6277705", "0.6262226", "0.625976", "0.6244727", "0.6241997", "0.62134", "0.6200563", "0.6196278", "0.61937404", "0.6192471", "0.6187019", "0.61467", "0.61467", "0.61302185", "0.61294335", "0.61294335" ]
0.7480732
0
Turn and face the home tag nearest the center of view if we see one. Does nothing if no home tag is seen.
def face_home_tag(self): home_detections = self._sort_home_tags_nearest_center( self.swarmie.get_latest_targets().detections ) if len(home_detections) > 0: angle = self.get_angle_to_face_detection(home_detections[0]) current_heading = self.swarmie.get_odom_location().get_pose().theta self.swarmie.set_heading( current_heading + angle, ignore=Obstacle.IS_SONAR | Obstacle.IS_VISION )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def go_home(self):\n self.set_all_positions([0]*self.nleaflets)", "def home(self):\n self.goto(0, 0)\n self.setheading(0)", "def set_home_position(self, lat, lon, alt):\n pass", "def set_home_locations(self):\n self.swarmie.set_home_gps_location(self.swarmie.get_gps_location())\n\n current_location = self.swarmie.get_odom_location()\n current_pose = current_location.get_pose()\n home_odom = Location(current_location.Odometry)\n\n detections = self.swarmie.get_latest_targets().detections\n try:\n for detection in detections:\n if detection.id == 256:\n see_home_tag = True\n home_detection = self._transform_to_odom(detection)\n\n quat = [home_detection.pose.orientation.x,\n home_detection.pose.orientation.y,\n home_detection.pose.orientation.z,\n home_detection.pose.orientation.w]\n _r, _p, yaw = tf.transformations.euler_from_quaternion(\n quat\n )\n yaw += math.pi / 2\n\n home_odom.Odometry.pose.pose.position.x = float(\n home_detection.pose.position.x + 0.5 * math.cos(yaw)\n )\n home_odom.Odometry.pose.pose.position.y = float(\n home_detection.pose.position.y + 0.5 * math.sin(yaw)\n )\n self.swarmie.set_home_odom_location(home_odom)\n return\n\n except tf.Exception:\n pass # use backup below\n\n # project home_odom location 50cm in front of rover's current location\n home_odom.Odometry.pose.pose.position.x = (\n current_pose.x + 0.5 * math.cos(current_pose.theta)\n )\n home_odom.Odometry.pose.pose.position.y = (\n current_pose.y + 0.5 * math.sin(current_pose.theta)\n )\n self.swarmie.set_home_odom_location(home_odom)\n return", "def sees_home_tag(self):\n detections = self.swarmie.get_latest_targets().detections\n\n for detection in detections:\n if detection.id == 256:\n return True\n\n return False", "def go_home(self):\n self.move_wl(0)", "def home(self):\n self.initial_offset = 0", "def go_home(self):\n self.set_jpos(self._home_position, wait=True)", "def home(self):\n\n # Report the current homing state\n\n print(\"Current homing state: {}\".format(self.robot.homed()))\n\n # Home each joint, even if it is already reported homed, except j4 which auto-homes with j3, and any other unhomable joint.\n\n for joint in self.JOINTS[0:4]: \n print(\"Homing {}\".format(joint))\n self.robot.home(joint)\n\n print(\"Homed all joints\")\n\n self.goToPose(self.HOME_POSE)\n\n return", "def go_home(node):\n if node.attr('t').isSettable():\n node.setAttr('t', (0, 0, 0))\n if node.attr('r').isSettable():\n node.setAttr('r', (0, 0, 0))\n if node.attr('s').isSettable():\n node.setAttr('s', (1, 1, 1))", "def _sort_home_tags_nearest_center(self, detections):\n sorted_detections = []\n\n for detection in detections:\n if detection.id == 256:\n sorted_detections.append(detection)\n\n return sorted(sorted_detections,\n key=lambda x: abs(x.pose.pose.position.x))", "def home(self):\n self.goto(0, 0)", "def is_inside_home_ring(self, detections):\n YAW_THRESHOLD = 1.3 # radians\n see_home_tag = False\n good_orientations = 0\n bad_orientations = 0\n\n for detection in detections:\n if detection.id == 256:\n see_home_tag = True\n home_detection = self._transform_to_base_link(detection)\n\n quat = [home_detection.pose.orientation.x,\n home_detection.pose.orientation.y,\n home_detection.pose.orientation.z,\n home_detection.pose.orientation.w]\n _r, _p, y = tf.transformations.euler_from_quaternion(quat)\n y -= math.pi / 2\n y = angles.normalize_angle(y)\n\n if abs(y) < YAW_THRESHOLD:\n bad_orientations += 1\n else:\n good_orientations += 1\n\n if not see_home_tag:\n return False\n\n return bad_orientations >= good_orientations", "def goHome():\n\t#Go to pod home\n\tif screen.lastScreen in screen.protectedScreens:\n\t\tpodScreen.show()\n\telse:\n\t\tsplashScreen.show()", "def home(self, max_dist=150, reset_pos=True): \n while not self.lim_cw:\n self.move_cm(True, max_dist, velocity=1)\n if reset_pos:\n self.step_position = 0\n self.homed = True", "def home(self):\n self.__send_short(self.MGMSG_MOT_MOVE_HOME, self.__chan, 0x00)", "def __add_homes(self):\n for home in self.__positions_of_homes:\n self.__grid[home[0]][home[1]][\"humans\"] = math.floor(\n self.__number_of_humans / self.__number_of_homes\n )", "def isCurrentPlayerHome(self):\r\n \r\n #creates corresponding starting and ending points for each player\r\n if self.getTurn() == RED:\r\n start = 0\r\n end = 18\r\n else:\r\n start = 6\r\n end = 24\r\n \r\n #checks whether the current player has checkers on corresponding points\r\n for i in range(start, end):\r\n if self.points[i].getTeam() == self.getTurn():\r\n return False\r\n \r\n return True", "def home(self):\n self.command(self.LCD_RETURNHOME)\n self._cursor_pos = (0, 0)\n self._msleep(2)", "def go_home(self):\n command = _build_robovac_command(RobovacModes.WORK, RobovacCommands.GO_HOME)\n message = self._build_command_user_data_message(command)\n\n self._send_packet(message, False)", "def minusToHome():\n\tif (not checkMotorsInPosition(-134.76, -34.197)):\n\t\treturn\n\n\tmoveMotor(dktheta, 0)\n\tmoveMotor(dkappa, 0)\n\tsimpleLog(\"Done\")", "def is_home_page_displayed(self):\n return self", "def press_home(self):\n # each test case 1st check for the stop button flag\n if not self.stopLoop:\n # get time\n ts = datetime.datetime.now().strftime(self.tsFormat)\n # Create label\n x = Label(\n self.testFrame, text=f'{ts} - Press Home',\n background=self.bgChooser(),\n foreground=\"#a5120d\",\n font=self.boldFont, anchor='w')\n x.pack(fill=X)\n # add counter for BG\n self.bgCounter += 1\n # allow window to catch up\n self.tkRoot.update()\n self.update_scrollbar()\n time.sleep(1)\n # Automation Script below --------------------\n\n self.tv.press_rc_key(self.rc.HOME)\n\n # Automation Script above --------------------\n\n # revert label color to black\n x.config(foreground=\"#000\", font=self.mainFont)\n self.LabelLists.append(x)\n else:\n print(\"stopping test\")", "def home( self ):\n\t\tself.command( LCD_RETURNHOME ) # set cursor position to zero\n\t\tsleep_us( 2000 ) # this command takes a long time!", "def gohome(self):\n raise Exception(\"Not implemented\")", "def go_to_center(self) -> int:\n\n self.bot.set_ball_color((0, 242, 255))\n # Compute distance Grid and shortest path\n grid = self.bfs()\n path = self.shortest_path(grid)\n\n # For each node\n for node in path:\n # Get points near it (front one is not needed)\n back_point = self.tile_in_the_direction((self.direction + 2) % 4)\n right_point = self.tile_in_the_direction((self.direction + 1) % 4)\n left_point = self.tile_in_the_direction((self.direction - 1) % 4)\n\n # Go to the next node in path\n if node == right_point:\n self.turn_right()\n elif node == left_point:\n self.turn_left()\n elif node == back_point:\n self.turn_left()\n self.turn_left()\n self.go_forward()\n self.bot.set_ball_color((0, 255, 0))\n\n # Refresh screen until user exits\n self.refresh_screen(self.img)\n self.user_pressed_exit(0)\n return SimulationRunStatus.STOP_SIMULATION", "def home(self, home):\n if home is None:\n raise ValueError(\"Invalid value for `home`, must not be `None`\") # noqa: E501\n\n self._home = home", "def gohome(turtle):\n turtle.penup()\n turtle.goto(0,0)\n turtle.pendown()", "def ishome(self) -> bool:\n pass", "def get_angle_and_dist_to_escape_home(self, detections):\n OVERSHOOT_DIST = 0.4 # meters, distance to overshoot target by\n result = {\n 'angle': sys.maxint,\n 'dist': None\n }\n see_home_tag = False\n\n for detection in detections:\n if detection.id == 256:\n see_home_tag = True\n home_detection = self._transform_to_base_link(detection)\n\n quat = [home_detection.pose.orientation.x,\n home_detection.pose.orientation.y,\n home_detection.pose.orientation.z,\n home_detection.pose.orientation.w]\n _r, _p, y = tf.transformations.euler_from_quaternion(quat)\n y -= math.pi / 2\n y = angles.normalize_angle(y)\n\n if abs(y) < result['angle']:\n result['angle'] = y\n result['dist'] = \\\n (math.sqrt(home_detection.pose.position.x ** 2\n + home_detection.pose.position.y **2)\n + OVERSHOOT_DIST)\n\n if not see_home_tag:\n # doesn't make sense to turn or drive if no home tags were seen\n return 0, 0\n\n return result['angle'], result['dist']" ]
[ "0.6119827", "0.5982294", "0.5946176", "0.5887", "0.5835729", "0.5829487", "0.58237636", "0.56908655", "0.5689949", "0.5625944", "0.56022465", "0.55240464", "0.5506971", "0.54984504", "0.5482635", "0.54312795", "0.52614295", "0.5238637", "0.522511", "0.5208262", "0.51757735", "0.5166348", "0.51126224", "0.50926375", "0.508763", "0.5073995", "0.50602746", "0.5059401", "0.50467926", "0.5045871" ]
0.78024226
0
Returns true if the rover can see a home tag. Returns false otherwise.
def sees_home_tag(self): detections = self.swarmie.get_latest_targets().detections for detection in detections: if detection.id == 256: return True return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ishome(self) -> bool:\n pass", "def someone_home(self) -> bool:\n return self._someone_home", "def ishome(self):\n return self._plrevgeoloc.isHome", "def is_home(self):\n return bool([\n device for device in self.devices\n if device.is_home and device.is_phone\n ])", "def is_home_page(self):\n return not self.title and self.category is None", "def isHomePage(self):\n home = self.getHome(self.url)\n if home == self.url:\n return True\n if home == self.url + '/':\n return True\n return False", "def can_tag(self):\n try:\n self.cork.require(role='beta-archivist')\n return True\n except Exception:\n return False", "def is_home(self):\n return self.last_seen.seconds / 60 <= 2 and self.last_seen.days == 0", "def isTopHomePage(self):\n domain = self.getDomain()\n if self.url == \"http://\" + domain + \"/\":\n return True\n if self.url == \"http://www.\" + domain + \"/\":\n return True\n if self.url == \"http://\" + domain:\n return True\n if self.url == \"http://www.\" + domain:\n return True\n return False", "def is_inside_home_ring(self, detections):\n YAW_THRESHOLD = 1.3 # radians\n see_home_tag = False\n good_orientations = 0\n bad_orientations = 0\n\n for detection in detections:\n if detection.id == 256:\n see_home_tag = True\n home_detection = self._transform_to_base_link(detection)\n\n quat = [home_detection.pose.orientation.x,\n home_detection.pose.orientation.y,\n home_detection.pose.orientation.z,\n home_detection.pose.orientation.w]\n _r, _p, y = tf.transformations.euler_from_quaternion(quat)\n y -= math.pi / 2\n y = angles.normalize_angle(y)\n\n if abs(y) < YAW_THRESHOLD:\n bad_orientations += 1\n else:\n good_orientations += 1\n\n if not see_home_tag:\n return False\n\n return bad_orientations >= good_orientations", "def is_hometown(town):\n if town == 'orlando':\n is_hometown = True\n else:\n is_hometown = False\n return is_hometown", "def homekit_enabled(self) -> bool:\n return bool(self._device_info[\"HomeKit\"])", "def home(event: EventType, widget: WidgetType) -> bool:\n return event.key == _locals.K_HOME", "def can_be_displayed_on_homepage(self):\n\n return self.filter(is_spotlighted=True).has_logo()", "def is_home_page_displayed(self):\n return self", "def get_home_state(self):\n raw_status = self.get_raw_status()\n is_home = raw_status & self.STATUS_HOMED\n is_homing = raw_status & self.STATUS_HOMING\n if is_homing:\n return 2\n if not is_home:\n return 1\n return 0", "def active(self):\n return self.home is not None and self.away is not None and self.winner is None", "def is_viewed(self):\n return self.has_label(VIEWED_LABEL)", "def is_on_home_page(self):\n current_url_path = urlparse(self.driver.current_url).path\n if current_url_path == \"/opencart.com/\":\n return True\n return False", "def isCurrentPlayerHome(self):\r\n \r\n #creates corresponding starting and ending points for each player\r\n if self.getTurn() == RED:\r\n start = 0\r\n end = 18\r\n else:\r\n start = 6\r\n end = 24\r\n \r\n #checks whether the current player has checkers on corresponding points\r\n for i in range(start, end):\r\n if self.points[i].getTeam() == self.getTurn():\r\n return False\r\n \r\n return True", "def _is_safe_to_back_up(self):\n # Only back up if we're far enough away from home for it\n # to be safe. Don't want to back up into the nest!\n home_loc = self.swarmie.get_home_odom_location()\n current_loc = self.swarmie.get_odom_location().get_pose()\n dist = math.sqrt((home_loc.x - current_loc.x) ** 2\n + (home_loc.y - current_loc.y) ** 2)\n if dist > 1.5:\n return True\n\n angle_to_home = self.get_angle_to_face_point(home_loc)\n if abs(angle_to_home) < math.pi / 2:\n return True\n\n return False", "def inHome(resname):\n prv_filename = os.path.join(os.getenv(\"HOME\"), \".aphla\", resname)\n if os.path.exists(prv_filename):\n return True\n else:\n return False", "def include_up_hosts(nmap_host):\n if nmap_host.status == 'up':\n return True\n return False", "def is_armed_home(self):\n return self in (\n ArmingState.ARMED_STAY,\n ArmingState.ARMED_STAY_PROA7,\n ArmingState.ARMED_STAY_BYPASS,\n ArmingState.ARMED_STAY_BYPASS_PROA7,\n ArmingState.ARMED_STAY_INSTANT,\n ArmingState.ARMED_STAY_INSTANT_PROA7,\n ArmingState.ARMED_STAY_INSTANT_BYPASS,\n ArmingState.ARMED_STAY_INSTANT_BYPASS_PROA7,\n ArmingState.ARMED_STAY_NIGHT,\n ArmingState.ARMED_STAY_NIGHT_BYPASS_PROA7,\n ArmingState.ARMED_STAY_NIGHT_INSTANT_PROA7,\n ArmingState.ARMED_STAY_NIGHT_INSTANT_BYPASS_PROA7,\n ArmingState.ARMED_STAY_OTHER,\n )", "def needs_home(self):\r\n return not bool(self.__lib.CC_CanMoveWithoutHomingFirst(self.__serno))", "def is_valid(self):\n if self.user_tag:\n return self.interface.is_tag_available(self.user_tag)\n return True", "def can_exist_outside_of_game(self):\n return True", "def can_exist_outside_of_game(self):\n return True", "def gethooverable(self):\n try:\n return self.hooverable\n except:\n return False", "def is_selected(self):\r\n if hasattr(self, 'name') and self.name == 'home':\r\n return False\r\n if self.opt:\r\n return request.params.get(self.opt, '') in self.aliases\r\n else:\r\n stripped_path = request.path.rstrip('/').lower()\r\n ustripped_path = _force_unicode(stripped_path)\r\n if stripped_path == self.bare_path:\r\n return True\r\n if stripped_path in self.aliases:\r\n return True" ]
[ "0.7205497", "0.70651335", "0.69522494", "0.65864754", "0.656828", "0.6561121", "0.64363885", "0.63901484", "0.6355829", "0.62823164", "0.6169507", "0.6133922", "0.604066", "0.6013944", "0.58709437", "0.5862436", "0.58027184", "0.57587904", "0.5700835", "0.5693258", "0.56464624", "0.56381655", "0.5551063", "0.5534678", "0.5533781", "0.55296534", "0.5476415", "0.5476415", "0.54675865", "0.54665905" ]
0.75401014
0
Sort home tags (id == 256) in view by their distance from the center of the camera's field of view.
def _sort_home_tags_nearest_center(self, detections): sorted_detections = [] for detection in detections: if detection.id == 256: sorted_detections.append(detection) return sorted(sorted_detections, key=lambda x: abs(x.pose.pose.position.x))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def face_home_tag(self):\n home_detections = self._sort_home_tags_nearest_center(\n self.swarmie.get_latest_targets().detections\n )\n if len(home_detections) > 0:\n angle = self.get_angle_to_face_detection(home_detections[0])\n current_heading = self.swarmie.get_odom_location().get_pose().theta\n self.swarmie.set_heading(\n current_heading + angle,\n ignore=Obstacle.IS_SONAR | Obstacle.IS_VISION\n )", "def calculate_postions_in_camera_frame(self):\n\n # Get the different distances\n self.calculate_all_distances()\n\n # The only position for this distance is centered in front of the camera\n self.close_positions_camera = [[self.close_distance, 0, 0]]\n\n # Calculate the dimensions of the field of view for the medium distance\n fov_height = self.fov_height_for_distance(self.medium_distance)\n fov_width = self.fov_width_for_distance(self.medium_distance)\n\n # Calculate the positions for the first row\n self.medium_positions_camera.append(\n [self.medium_distance, -(fov_width / 2 - self.caltab_width / 2), fov_height / 2 - self.caltab_height / 2])\n self.medium_positions_camera.append(\n [self.medium_distance, fov_width / 2 - self.caltab_width / 2, fov_height / 2 - self.caltab_height / 2])\n\n # Calculate the positions for the second row\n self.medium_positions_camera.append(\n [self.medium_distance, -(fov_width / 2 - self.caltab_width / 2),\n -(fov_height / 2 - self.caltab_height / 2)])\n self.medium_positions_camera.append(\n [self.medium_distance, fov_width / 2 - self.caltab_width / 2, -(fov_height / 2 - self.caltab_height / 2)])\n\n # Now get the dimensions of the field of view for the far distance\n fov_height = self.fov_height_for_distance(self.far_distance)\n fov_width = self.fov_width_for_distance(self.far_distance)\n\n # Calculate the positions for the first row\n self.far_positions_camera.append(\n [self.far_distance, -(fov_width / 2 - self.caltab_width / 2), fov_height / 2 - self.caltab_height / 2])\n self.far_positions_camera.append([self.far_distance, 0, fov_height / 2 - self.caltab_height / 2])\n self.far_positions_camera.append(\n [self.far_distance, fov_width / 2 - self.caltab_width / 2, fov_height / 2 - self.caltab_height / 2])\n\n # Calculate the positions for the second row\n self.far_positions_camera.append(\n [self.far_distance, -(fov_width / 2 - self.caltab_width / 2), 0])\n self.far_positions_camera.append([self.far_distance, 0, 0])\n self.far_positions_camera.append(\n [self.far_distance, fov_width / 2 - self.caltab_width / 2, 0])\n\n # Calculate the positions for the third row\n self.far_positions_camera.append(\n [self.far_distance, -(fov_width / 2 - self.caltab_width / 2), -(fov_height / 2 - self.caltab_height / 2)])\n self.far_positions_camera.append(\n [self.far_distance, 0, -(fov_height / 2 - self.caltab_height / 2)])\n self.far_positions_camera.append(\n [self.far_distance, fov_width / 2 - self.caltab_width / 2, -(fov_height / 2 - self.caltab_height / 2)])", "def get_angle_and_dist_to_escape_home(self, detections):\n OVERSHOOT_DIST = 0.4 # meters, distance to overshoot target by\n result = {\n 'angle': sys.maxint,\n 'dist': None\n }\n see_home_tag = False\n\n for detection in detections:\n if detection.id == 256:\n see_home_tag = True\n home_detection = self._transform_to_base_link(detection)\n\n quat = [home_detection.pose.orientation.x,\n home_detection.pose.orientation.y,\n home_detection.pose.orientation.z,\n home_detection.pose.orientation.w]\n _r, _p, y = tf.transformations.euler_from_quaternion(quat)\n y -= math.pi / 2\n y = angles.normalize_angle(y)\n\n if abs(y) < result['angle']:\n result['angle'] = y\n result['dist'] = \\\n (math.sqrt(home_detection.pose.position.x ** 2\n + home_detection.pose.position.y **2)\n + OVERSHOOT_DIST)\n\n if not see_home_tag:\n # doesn't make sense to turn or drive if no home tags were seen\n return 0, 0\n\n return result['angle'], result['dist']", "def _sort_tags_left_to_right(self, detections, id=0):\n BLOCK_IN_CLAW_DIST = 0.22 # meters\n sorted_detections = []\n\n for detection in detections:\n if (detection.id == id and\n detection.pose.pose.position.z > BLOCK_IN_CLAW_DIST):\n sorted_detections.append(detection)\n\n return sorted(sorted_detections, key=lambda x: x.pose.pose.position.x)", "def sorted_objects(detected_objects, keyname):\n\n if detected_objects.get(keyname):\n return sorted(detected_objects[keyname],\n key=lambda u: math.sqrt(u['norm_pos'][0] ** 2 + u['norm_pos'][1] ** 2))\n else:\n return []", "def split_by_home(matches, team_id):\n\n sorted_matches = {\n \"home\": [],\n \"away\": []\n }\n\n for match_id, match in matches.items():\n if match.hometeam.team_id == team_id:\n sorted_matches[\"home\"].append(match_id)\n elif match.awayteam.team_id == team_id:\n sorted_matches[\"away\"].append(match_id)\n\n return sorted_matches", "def _relative_5prime_pos(self, gRNAHit_objs) -> float:\n return sum((hit.range[0] if hit.target.sense != '-'\n else (hit.target_len - hit.range[1]))\n for hit in gRNAHit_objs)/len(gRNAHit_objs)", "def run_sort_home_by_score(self):\n self.homes = self.python_sort(self.homes)", "def sortDistance(netlist):\n netlist_dictionary = {}\n for i in range(len(netlist)):\n start = chips[netlist[i][0]]\n end = chips[netlist[i][1]]\n\n delta_x = abs(start[0]-end[0])\n delta_y = abs(start[1]-end[1])\n distance = delta_x + delta_y\n\n netlist_dictionary[(netlist[i][0], netlist[i][1])] = distance\n\n sorted_dictionary = sorted(netlist_dictionary.items(), key=operator.itemgetter(1))\n sorted_netlist = []\n for j in range(len(sorted_dictionary)):\n sorted_netlist.append(sorted_dictionary[j][0])\n\n return sorted_netlist", "def distances(self):", "def expected_distances_for_tag_location(self, x, y, include_margin=False):\n\n distances = {}\n\n for anchor_id in self.anchor_ids:\n ax, ay = self.anchors[anchor_id]\n distances[anchor_id] = math.hypot(ax - x, ay - y)\n \n return distances", "def sort_objects_from_viewworld(self, viewworld):\n opaque_objects = []\n transparent_objects = []\n centers = []\n for guid in self.objects:\n obj = self.objects[guid]\n if isinstance(obj, BufferObject):\n if obj.opacity * self.opacity < 1 and obj.bounding_box_center is not None:\n transparent_objects.append(obj)\n centers.append(transform_points_numpy([obj.bounding_box_center], obj.matrix)[0])\n else:\n opaque_objects.append(obj)\n if transparent_objects:\n centers = transform_points_numpy(centers, viewworld)\n transparent_objects = sorted(zip(transparent_objects, centers), key=lambda pair: pair[1][2])\n transparent_objects, _ = zip(*transparent_objects)\n return opaque_objects + list(transparent_objects)", "def objs_sort_by_center(objs, target=0):\n sorted = []\n centers = []\n for i in objs:\n if target == 0:\n centers.append((i['bbox'][0] + i['bbox'][2]) / 2.0)\n elif target == 1:\n centers.append((i['bbox'][1] + i['bbox'][3]) / 2.0)\n centers_idx = np.argsort(np.asarray(centers))\n\n for i in centers_idx:\n sorted.append(objs[i])\n \n return sorted", "def galaxy_positions():\n hdulist1 = pf.open(source+'/kids_data/KiDS_DR3.1_G9_ugri_shear.fits')\n '''\n hdulist2 = pf.open('../kids_data/KiDS_DR3.1_G12_ugri_shear.fits')\n hdulist3 = pf.open('../kids_data/KiDS_DR3.1_G15_ugri_shear.fits')\n hdulist4 = pf.open('../kids_data/KiDS_DR3.1_G23_ugri_shear.fits')\n hdulist5 = pf.open('../kids_data/KiDS_DR3.1_GS_ugri_shear.fits')\n '''\n ra = hdulist1[1].data['RAJ2000'][:sample]\n dec = hdulist1[1].data['DECJ2000'][:sample]\n global maxra\n maxra = max(ra)\n global minra\n minra = min(ra)\n global maxdec\n maxdec = max(dec)\n global mindec\n mindec = min(dec)\n global bsize\n bsize = abs(max(maxra, maxdec) - min(mindec, minra))\n coords = np.column_stack([ra, dec])\n global SIZE\n SIZE = len(coords)\n print(maxra, maxdec, minra, mindec, SIZE)\n ctree = cKDTree(coords)\n return ctree", "def nearby_sort(self, idx):\n start = max(0, idx - int(self.bin_size / 2))\n stop = min(idx + int(self.bin_size / 2), len(self.nums))\n self.nums[start: stop] = sorted(self.nums[start: stop])\n return stop", "def sortRegioni(tupla):\n\t\n\treturn int(tupla[0])", "def _layout(self):\n top = 2.0\n y = 0.0\n x = 0.0\n maxend = 0.0\n for track in self._tracks:\n track.set_view(self.view.species, self.view.seqname, \n self.view.start, self.view.end)\n tracksize = track.get_size()\n y -= tracksize[1]\n track.set_pos(track.pos_offset[0] + x, track.pos_offset[1] + y)\n self.size = [self.view.end - self.view.start + 1, 0 - y]", "def getFeaturedLocation(guide):\n photos = guide.photos.all()\n\n x = 0\n y = 0\n z = 0\n\n size = 0\n\n for photo in photos:\n if photo.latitude:\n lat = radians(float(photo.latitude))\n lon = radians(float(photo.longitude))\n x += cos(lat) * cos(lon)\n y += cos(lat) * sin(lon)\n z += sin(lat)\n size+=1\n\n if size is 0:\n return None\n\n x = float(x / size)\n y = float(y / size)\n z = float(z / size)\n\n return {\n 'latitude': degrees(atan2(z, sqrt(x * x + y * y))),\n 'longitude': degrees(atan2(y, x))\n }\n # return atan2(z, sqrt(x * x + y * y)), atan2(y, x)\n\n\n\n # for photo in photos:\n # if photo.latitude:\n # return {\n # 'latitude': photo.latitude,\n # 'longitude': photo.longitude\n # }\n\n # return None", "def _get_sort_key(self) -> np.array:\n data = self.reader.GetOutput()\n raw_cell_coords = np.empty((data.GetNumberOfCells(), 3))\n for i in range(data.GetNumberOfCells()):\n cell_corners = vtk_to_numpy(data.GetCell(i).GetPoints().GetData())\n raw_cell_coords[i] = np.array(\n [cell_corners[:, n].mean() for n in range(cell_corners.shape[1])]\n )\n\n cell_coords = np.array(\n [tuple(line) for line in raw_cell_coords],\n dtype=[(\"r\", \"f4\"), (\"phi\", \"f4\"), (\"z\", \"f4\")],\n )\n return cell_coords.argsort(order=[\"r\", \"phi\"])", "def sort_animals(all_animals):\n def get_key(a):\n return a.row + 0.001 * a.col\n\n all_animals.sort(key=get_key)", "def quicksort_from_pos(dataset, lat, lng) -> List[dict]:\n\tdist_from_x = calculateDistance(lat, lng)\n\tadd_dist_to_dataset(dataset, dist_from_x)\n\treturn quicksort(dataset, \"dist\")", "def distance_transform(img):\n dist_transform = cv2.distanceTransform(img, cv2.DIST_L2, 5)\n return dist_transform", "def car_positions(car_sectors, car_laps):\n car_sector_and_lap = [0] * 6\n # calculate all cars' total positions\n for i in range(6):\n car_sector_and_lap[i] = car_laps[i] * 1000 + car_sectors[i] \n # Sort the cars so that car at index 0 is the first car in the race\n sorted_cars = [i[0] for i in sorted(enumerate(car_sector_and_lap), key=lambda x:x[1])]\n sorted_cars.reverse()\n return sorted_cars", "def find_top(self, gravityDir):\n dirs = [\"-X\", \"+X\", \"-Y\", \"+Y\", \"-Z\", \"+Z\"]\n for dir in dirs:\n argsval = [\"NAME:\" + dir + \" Padding Data\", \"Value:=\", \"0\"]\n args = [\n \"NAME:AllTabs\",\n [\n \"NAME:Geometry3DCmdTab\",\n [\"NAME:PropServers\", \"Region:CreateRegion:1\"],\n [\"NAME:ChangedProps\", argsval],\n ],\n ]\n self.modeler.oeditor.ChangeProperty(args)\n oBoundingBox = self.modeler.get_model_bounding_box()\n if gravityDir < 3:\n return oBoundingBox[gravityDir + 3]\n else:\n return oBoundingBox[gravityDir - 3]", "def _calculate_distances(boxes, homography):\n pos_markers = []\n pix_markers = []\n for box in boxes:\n (pt1_w, pt1_h), (pt2_w, pt2_h) = box\n\n pix_marker = ((pt1_w + pt2_w) // 2, max(pt1_h, pt2_h))\n pix_markers.append(pix_marker)\n\n pos_marker = np.array(pix_marker).reshape(\n 1, 1, 2).astype(\"float32\")\n pos_marker = cv2.perspectiveTransform(\n pos_marker, homography).squeeze()\n pos_markers.append(pos_marker)\n\n if len(pos_markers) <= 1:\n return np.array([]), np.array([])\n\n distances = pdist(np.array(pos_markers))\n return pix_markers, distances", "def _sort_phot(self, verbose=False):\n if hasattr(self, \"data\") and hasattr(self, \"data_filters\"):\n ## This looks fugly.\n newkeys = np.array([i for i in self.data_filters.keys()])[np.argsort([self.data_filters[i].lambda_effective.value for i in self.data_filters])]\n\n sorted_data = OrderedDict()\n sorted_data_filters = OrderedDict()\n\n for newkey in newkeys:\n\n if verbose: print(newkey)\n\n sorted_data[newkey] = self.data[newkey]\n sorted_data_filters[newkey] = self.data_filters[newkey]\n\n self.data = sorted_data\n self.data_filters = sorted_data_filters\n\n else:\n warnings.warn(\"Doesn't seem to be any data here (empty self.data)\")\n pass", "def get_distances_to_pose(self, x, y):\n\n distances = list()\n for node in self.top_map.nodes:\n distance = dict()\n distance['node'] = node\n distance['dist'] = math.hypot((x - node.pose.position.x), (y - node.pose.position.y))\n distances.append(distance)\n return sorted(distances, key=lambda k: k['dist'])", "def _sort_measurements(self):\n if self._unsorted:\n sorted_ndxs = np.argsort(self._angles)\n self._distances = self._distances[sorted_ndxs]\n self._angles = self._angles[sorted_ndxs]\n self._intensities = self._intensities[sorted_ndxs]\n self._error_codes = self._error_codes[sorted_ndxs]\n self._unsorted = False", "def get_distance(film_coordinates, latitude, longitude):\n film_distance = []\n for film in film_coordinates.keys():\n user_coordinates = (latitude, longitude)\n film_coord = (film[0], film[1])\n\n distance = great_circle(user_coordinates, film_coord).kilometers\n film_distance.append((distance, film[0], film[1], film_coordinates[film]))\n\n film_distance.sort(key=lambda x: x[0])\n return film_distance[:10]", "def array_sort():\n to_concat = []\n for centroid_rgb, cluster in itertools.izip(centroids_rgb, self.clusters):\n # no need to revisit ratio\n new_idxed_arr = tf.concat(1,[tf.slice(cluster, [0,0], [-1,2]),\n tf.tile(tf.expand_dims(\n tf.constant(centroid_rgb), 0),\n multiples=[len(cluster.eval()), 1])])\n to_concat.append(new_idxed_arr)\n\n concated = tf.concat(0, to_concat)\n sorted_arr = np.array(sorted(concated.eval().tolist()), dtype=np.uint8)[:, 2:]\n\n new_img = Image.fromarray(sorted_arr.reshape([self.m, self.n, self.chann]))\n if save:\n new_img.save(outfile, format=format_)\n os.popen(\"open '{}'\".format(outfile))\n else:\n new_img.show()" ]
[ "0.5568948", "0.54662734", "0.5292728", "0.51869404", "0.5136833", "0.49867502", "0.49770924", "0.4963197", "0.49262795", "0.492238", "0.49045837", "0.48945516", "0.48619252", "0.48006356", "0.47847188", "0.478009", "0.4780042", "0.47667208", "0.47607234", "0.47496408", "0.47424608", "0.4727751", "0.47256538", "0.4724849", "0.47151798", "0.47101268", "0.47058228", "0.46877226", "0.46871188", "0.46770746" ]
0.74201065
0
Sort tags in view from left to right (by their x position in the camera frame). Removes/ignores tags close enough in the camera to likely be a block in the claw.
def _sort_tags_left_to_right(self, detections, id=0): BLOCK_IN_CLAW_DIST = 0.22 # meters sorted_detections = [] for detection in detections: if (detection.id == id and detection.pose.pose.position.z > BLOCK_IN_CLAW_DIST): sorted_detections.append(detection) return sorted(sorted_detections, key=lambda x: x.pose.pose.position.x)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _sort_home_tags_nearest_center(self, detections):\n sorted_detections = []\n\n for detection in detections:\n if detection.id == 256:\n sorted_detections.append(detection)\n\n return sorted(sorted_detections,\n key=lambda x: abs(x.pose.pose.position.x))", "def sort(self):\n \n ct=[]\n rt=[]\n wr=[]\n # search for tags that aren't in the right position\n for i in range(len(self.contigs)):\n c = self.contigs[i]\n if c.wa:\n if not self.wa:\n self.wa=[]\n self.wa.extend(c.wa)\n if c.ct:\n newcts=[ct_tag for ct_tag in c.ct if ct_tag.name!=c.name]\n map(self.contigs[i].ct.remove,newcts)\n ct.extend(newcts)\n for j in range(len(c.reads)):\n r = c.reads[j]\n if r.rt:\n newrts=[rt_tag for rt_tag in r.rt if rt_tag.name!=r.rd.name]\n map(self.contigs[i].reads[j].rt.remove,newrts)\n rt.extend(newrts)\n if r.wr:\n newwrs=[wr_tag for wr_tag in r.wr if wr_tag.name!=r.rd.name]\n map(self.contigs[i].reads[j].wr.remove,newwrs)\n wr.extend(newwrs)\n # now sort them into their proper place\n for i in range(len(self.contigs)):\n c = self.contigs[i]\n for ct_tag in ct:\n if ct_tag.name==c.name:\n if self.contigs[i].ct is None:\n self.contigs[i].ct=[]\n self.contigs[i].ct.append(ct_tag)\n if rt or wr:\n for j in range(len(c.reads)):\n r = c.reads[j]\n for rt_tag in rt:\n if rt_tag.name==r.rd.name:\n if self.contigs[i].reads[j].rt is None:\n self.contigs[i].reads[j].rt=[]\n self.contigs[i].reads[j].rt.append(rt_tag)\n for wr_tag in wr:\n if wr_tag.name==r.rd.name:\n if self.contigs[i].reads[j].wr is None:\n self.contigs[i].reads[j].wr=[]\n self.contigs[i].reads[j].wr.append(wr_tag)", "def sort_eyes(self):\n x1 = self.eyes[0][0]\n x2 = self.eyes[1][0]\n\n if x1 > x2:\n self.eyes.reverse()", "def sort(self, args):\n if not args:\n self.err_print('One argument required')\n return\n\n _key = args[0]\n cur = self.ui.leftwin.highlighted().data\n try:\n ind = song.tags.index(_key)\n cur.change_sort(ind)\n self.ui.rightwin.disp()\n except:\n self.err_print('\"{}\" is not a valid key to sort by'.format(_key))", "def process_tags(tags=list):\n new_tag_list = list()\n for tag in tags:\n new_tag = tag.replace(\"<\", \" \")\n new_tag = new_tag.replace(\">\", \" \")\n new_tag = new_tag.split()\n # sort elements by string length (this to avoid 'c' being checked before 'c++', etc)\n new_tag.sort(key=len, reverse=True)\n new_tag_list.append(new_tag)\n return new_tag_list", "def volume_sort(self):\n self.jobs_sorted = sorted(\n self.jobs,\n key=lambda job: (job['height'], job['width'] * job['height']),\n # key=lambda job: job['width'] * job['height'],\n reverse=True)", "def sort(self):\n self.fragment_list.sort()", "def sorted_tags(self):\n return sorted(self.tags, key=lambda x: x.name)", "def sort(self):\n # Base Case\n # If the robot has reached the end of the list and his light is off (no swaps have occurred),\n if self.can_move_right() == False and self.light_is_on() == False:\n return\n\n # Grab the first card\n self.swap_item()\n\n # While the robot is still able to move right,\n while self.can_move_right():\n\n # Move right\n self.move_right()\n\n # Compare the item in his hand to that in front of him\n # If the item in front of him is greater than what he is holding (-1), swap items\n if self.compare_item() == -1:\n # Swap the item\n self.swap_item()\n # Turn his light on to indicate that a swap has occured\n self.set_light_on()\n \n # Once the robot can no longer move right, he is at the end of the list and holding the largest value\n # Swap items\n self.swap_item()\n\n # Now the robot needs to traverse back to index 0, grabbing the smallest value as he goes\n # Follow the same logic as when he moved right with the largest value\n\n # If he hits a empty slot in the list, everything in front of it has been sorted\n # He doesn't need to sort anymore, he is holding the smallest value left to be sorted. \n # Put it in the blank spot and turn to move back in the other direction\n\n while self.compare_item() is not None:\n\n # Move left\n self.move_left()\n\n # Compare the item in his hand to that in front of him\n # If the item in front of him is less than what he is holding (1), swap items\n if self.compare_item() == 1:\n # Swap the item\n self.swap_item()\n # Turn his light on to indicate that a swap has occured\n self.set_light_on()\n \n # Once self.compare_item() is None, that means he is in front of a blank space\n # - everything to the left of the blank space has already been sorted\n # Deposit what he is holding\n self.swap_item()\n\n # Reset the light to the off position\n self.set_light_off()\n\n # Move one spot over to the right\n self.move_right()\n\n # Re-run the process all over again\n self.sort()", "def sort_cards(self):\n self.cards.sort(key=operator.attrgetter('persona', 'rank'))\n self.update_position()", "def contour_sort(l):\n length = len(l)\n if length <= 1:\n return l\n else:\n pivot = l.pop(int(length / 2))\n less, more = [], []\n for x in l:\n if cv2.contourArea(x) >= cv2.contourArea(pivot):\n less.append(x)\n else:\n more.append(x)\n return contour_sort(less) + [pivot] + contour_sort(more)", "def gallery_sort(request, item_container):\n\n return do_sort(request, item_container, 'pool', _(u'Bilder umordnen'))", "def _sort_phot(self, verbose=False):\n if hasattr(self, \"data\") and hasattr(self, \"data_filters\"):\n ## This looks fugly.\n newkeys = np.array([i for i in self.data_filters.keys()])[np.argsort([self.data_filters[i].lambda_effective.value for i in self.data_filters])]\n\n sorted_data = OrderedDict()\n sorted_data_filters = OrderedDict()\n\n for newkey in newkeys:\n\n if verbose: print(newkey)\n\n sorted_data[newkey] = self.data[newkey]\n sorted_data_filters[newkey] = self.data_filters[newkey]\n\n self.data = sorted_data\n self.data_filters = sorted_data_filters\n\n else:\n warnings.warn(\"Doesn't seem to be any data here (empty self.data)\")\n pass", "def sort_by_position(self):\n sorted_indx = np.argsort(self.vehicles.get_absolute_position(self.ids))\n sorted_ids = np.array(self.ids)[sorted_indx]\n return sorted_ids, None", "def sort(self):\n self.notes.sort()", "def sorted_objects(detected_objects, keyname):\n\n if detected_objects.get(keyname):\n return sorted(detected_objects[keyname],\n key=lambda u: math.sqrt(u['norm_pos'][0] ** 2 + u['norm_pos'][1] ** 2))\n else:\n return []", "def sort(self):\n self.cards.sort()", "def sort(self):\n self.cards.sort()", "def sort_filtered_contours(self):\r\n\r\n # Get the contours again\r\n invert = 255 - self.thresh_invert\r\n real_contours = cv2.findContours(invert, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\r\n real_contours = real_contours[0] if len(real_contours) == 2 else real_contours[1]\r\n\r\n # Make sure that they're within the correct range for size\r\n # If too small, it is probably noise; if too large, then should be things around the grid\r\n for i, c in enumerate(real_contours, 1):\r\n contour_area = cv2.contourArea(c)\r\n if self.min_cell_size < contour_area < self.max_cell_size:\r\n self.good_contours.append(c)\r\n\r\n # We assume a square board, so the number of rows/cols should be the square root of total contours/cells\r\n self.board_dimension = int(math.sqrt(len(self.good_contours)))\r\n\r\n # Sort the contours from top to bottom\r\n (half_sorted_contours, _) = contours.sort_contours(self.good_contours, method=\"top-to-bottom\")\r\n\r\n # We then sort each row from left to right\r\n row = []\r\n for i, c in enumerate(half_sorted_contours, 1):\r\n row.append(c)\r\n if i % self.board_dimension == 0:\r\n (full_sorted_contours, _) = contours.sort_contours(row, method=\"left-to-right\")\r\n self.game_board_contours.append(full_sorted_contours)\r\n row = []", "def sort_views_by_relevance(self):\n window = sublime.active_window()\n\n # add the current view is the most relevant\n views = [self.view]\n try:\n # the second most relevant suggestions are from the indexed panels\n for panel_name in panel_state:\n panel = window.find_output_panel(panel_name)\n panel.file_name = lambda v=panel_name: v \n views.append(panel)\n except Exception as e:\n print('No panel', e)\n\n # the last but not least are the open views\n for view in window.views():\n if view is not self.view:\n views.append(view)\n\n return views", "def reorder( self ):\n self.sorted.sort(self.compareFunction)", "def sort(self):\n # sort the contents of the container alphabetically\n # this is done automatically whenever an item is added/removed from the Container\n self.items.sort(key=lambda item: item.name)", "def _avoid_tag(self, id=0, ignore=Obstacle.IS_SONAR):\n sorted_detections = self._sort_tags_left_to_right(\n self.swarmie.get_latest_targets().detections,\n id=id\n )\n\n # if count == 3: # last resort\n # self.current_state = Planner.STATE_DRIVE\n # angle = self._get_angle_to_face(point)\n # self.swarmie.turn(\n # angle,\n # ignore=Obstacle.TAG_TARGET,\n # throw=False\n # )\n # self.result = self.swarmie.drive(\n # .75,\n # ignore=Obstacle.TAG_TARGET,\n # throw=False\n # )\n\n if len(sorted_detections) == 0:\n # no tags in view anymore\n print(\"I can't see anymore tags, I'll try creeping\",\n \"and clearing.\")\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_DRIVE\n self.swarmie.drive(\n 0.1,\n ignore=Obstacle.SONAR_BLOCK,\n throw=False\n )\n drive_result = self.clear(math.pi / 8, ignore=ignore)\n\n else:\n left_angle, left_dist = \\\n self._get_angle_and_dist_to_avoid(\n sorted_detections[0],\n direction='left'\n )\n right_angle, right_dist = \\\n self._get_angle_and_dist_to_avoid(\n sorted_detections[-1],\n direction='right'\n )\n angle = left_angle\n dist = left_dist\n\n if (self.current_state == Planner.STATE_AVOID_LEFT or\n self.prev_state == Planner.STATE_AVOID_LEFT):\n # Keep going left. Should help avoid bouncing back\n # and forth between tags just out of view.\n print(\"I was turning left last time, so I'll keep\",\n \"it that way.\")\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_LEFT\n angle = left_angle\n dist = left_dist\n\n elif (self.current_state == Planner.STATE_AVOID_RIGHT or\n self.prev_state == Planner.STATE_AVOID_RIGHT):\n # Keep going right\n print(\"I was turning right last time, so I'll\",\n \"keep it that way.\")\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_RIGHT\n angle = right_angle\n dist = right_dist\n\n else:\n # pick whichever angle is shortest\n if abs(right_angle) < abs(left_angle):\n print('Right looks most clear, turning right.')\n # print('Right turn makes most sense, turning right.')\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_RIGHT\n angle = right_angle\n dist = right_dist\n else:\n print('Left looks most clear, turning left.')\n # print('Left turn makes most sense, turning left')\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_LEFT\n\n _turn_result, drive_result = self._go_around(\n angle,\n dist\n )\n\n return drive_result", "def sort_words(boxes):\n mean_height = sum([y2 - y1 for _, y1, _, y2 in boxes]) / len(boxes)\n boxes.view('i8,i8,i8,i8').sort(order=['f1'], axis=0)\n current_line = boxes[0][1]\n lines = []\n tmp_line = []\n for box in boxes:\n if box[1] > current_line + mean_height:\n lines.append(tmp_line)\n tmp_line = [box]\n current_line = box[1]\n continue\n tmp_line.append(box)\n lines.append(tmp_line)\n\n for line in lines:\n line.sort(key=lambda box: box[0])\n\n return lines", "def sort_detected_champions_to_buy_by_position(\n ocr_results_sorted, champions_list_for_ocr_\n):\n\n logging.debug(\"Function sort_detected_champions_to_buy_by_position() called\")\n # sort from lowest width (left to right side)\n ocr_results_sorted = sorted(ocr_results_sorted, key=lambda x: x[0])\n sorted_champions_to_buy = []\n for text in ocr_results_sorted:\n for champ in champions_list_for_ocr_:\n if champ in text: # filters champion names\n sorted_champions_to_buy.append(champ)\n logging.info(\n \"from for loop in sort_detected_champions_to_buy_by_position()\"\n )\n logging.info(\"found %s\", champ)\n logging.info(\"return in sort_detected_champions_to_buy_by_position()\")\n logging.info(\"List of sorted champions to buy: %s\", sorted_champions_to_buy)\n\n logging.debug(\"Function sort_detected_champions_to_buy_by_position() end\")\n return sorted_champions_to_buy", "def oldsortslice(self):\n ...", "def sort_objects_from_viewworld(self, viewworld):\n opaque_objects = []\n transparent_objects = []\n centers = []\n for guid in self.objects:\n obj = self.objects[guid]\n if isinstance(obj, BufferObject):\n if obj.opacity * self.opacity < 1 and obj.bounding_box_center is not None:\n transparent_objects.append(obj)\n centers.append(transform_points_numpy([obj.bounding_box_center], obj.matrix)[0])\n else:\n opaque_objects.append(obj)\n if transparent_objects:\n centers = transform_points_numpy(centers, viewworld)\n transparent_objects = sorted(zip(transparent_objects, centers), key=lambda pair: pair[1][2])\n transparent_objects, _ = zip(*transparent_objects)\n return opaque_objects + list(transparent_objects)", "def reorder_examples(self):\n self.example_wise_shrink(Ordering, key=sort_key)", "def tags(self):\r\n if self.indexchanged or not self.sortedtags:\r\n self.indexchanged_tag = False\r\n self.sortedtags = sorted(self.get_tags())\r\n return self.sortedtags\r\n return self.sortedtags", "def sort_album(self):\n self.sort('album')" ]
[ "0.5948881", "0.59211606", "0.5505296", "0.54796255", "0.5224809", "0.51409453", "0.51296383", "0.51185745", "0.507329", "0.5059373", "0.50522685", "0.5020326", "0.49868953", "0.4957405", "0.4956648", "0.4945304", "0.49099445", "0.49099445", "0.49007258", "0.48856413", "0.48853442", "0.4875039", "0.48610586", "0.4856502", "0.48547003", "0.48392347", "0.48155767", "0.48131895", "0.48119643", "0.48098415" ]
0.68595314
0
Turn by 'angle' and then drive 'dist'.
def _go_around(self, angle, dist): ignore = Obstacle.IS_SONAR if self.avoid_targets is True: ignore |= Obstacle.TAG_TARGET elif self.avoid_home is True: # Need to ignore both for this because target tags are likely to # be in view inside the home nest. ignore |= Obstacle.TAG_TARGET | Obstacle.TAG_HOME cur_heading = self.swarmie.get_odom_location().get_pose().theta turn_result = self.swarmie.set_heading( cur_heading + angle, ignore=ignore, throw=False ) drive_result = self.swarmie.drive(dist, ignore=Obstacle.SONAR_BLOCK, throw=False) return turn_result, drive_result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def turn(orient, order, dist):\n offset = dist//90\n if order == \"L\":\n offset = -offset\n return cardinals[(cardinals.index(orient) + offset) %\n len(cardinals)]", "def rot(wx, wy, order, dist):\n for _ in range(dist//90):\n if order == \"R\":\n wx, wy = wy, -wx\n elif order == \"L\":\n wx, wy = -wy, wx\n return wx, wy", "def turn_by(self, dangle, dt):\n # Don't turn too fast\n self.angle += np.clip(dangle, -dt * self.turning_rate, dt * self.turning_rate)\n\n # Keep angle in range [-pi, pi)\n self.angle = normalize_angle(self.angle)", "def steps_to_angle():\n pass", "def tf_dist2deg(dist):\n x_rad = tf_dist2rad(dist)\n return tf_rad2deg(x_rad)", "def orbit_rotate(center, obj, d_ang, dist = 0, ang = -20):\n if ang == -20:\n\n dx = obj.rect.centerx - center.rect.centerx\n dy = obj.rect.centery - center.rect.centery\n\n if dx > 0 and dy < 0:\n ang = abs(np.rad2deg(np.arctan(dx/dy)))\n elif dx < 0 and dy < 0:\n ang = abs(np.rad2deg(np.arctan(dy/dx)))\n elif dx > 0 and dy > 0:\n ang = abs(np.rad2deg(np.arctan(dy/dx)))\n elif dx < 0 and dy > 0:\n ang = abs(np.rad2deg(np.arctan(dx/dy)))\n else:\n ang = 90\n else:\n\n obj.orbit_ang += d_ang\n\n if obj.orbit_ang > 360:\n obj.orbit_ang += -360\n elif obj.orbit_ang < 0:\n obj.orbit_ang += 360\n\n ang = obj.orbit_ang\n\n if dist == 0:\n pass\n\n obj.rect.centerx = center.rect.centerx + dist*(np.sin(np.deg2rad(ang)))\n obj.rect.centery = center.rect.centery + dist*(np.cos(np.deg2rad(ang)))", "def point(pt, angle, dist):\n x, y = pt\n return dist * cos(angle) + x, dist * sin(angle) + y,", "def drive_distance(degrees, motor, gear_ratio): #TODO Finish documentation", "def turn_to(self, angle, dt):\n a = normalize_angle(angle - self.angle)\n self.turn_by(a, dt)", "def _angle_of_attack(self, rel_wind, blade_chord):\n # blade_chord_vector - (relative_wind + pi)\n # rel_oposite = rel_wind.rotated(math.pi)\n aoa_rad = rel_wind.theta - blade_chord.theta\n aoa_rad = vec.normalize_angle(aoa_rad)\n aoa_360 = aoa_rad * 360 / math.tau\n return aoa_rad, aoa_360", "def angle(self) -> float:\n ...", "def find_allowable_angle(self, dist: float) -> float:\n angle = math.atan(self.TRUE_TARGET_RADIUS / dist)\n # print(f\"angle tolerance +- {angle} true target radius{self.TRUE_TARGET_RADIUS}\")\n return angle", "def closer_angle(x, a, dir=0):\n if dir == 0:\n return a + smaller_angle(x-a)\n elif dir == 1:\n return a + (x-a)%(2*np.pi)\n elif dir == -1:\n return a + (x-a)%(2*np.pi) - 2*np.pi", "def gripper_distance(self, dist=None, arm='arms'):\n if arm == 'larm':\n joints = [self.l_gripper_l_finger_joint]\n elif arm == 'rarm':\n joints = [self.r_gripper_l_finger_joint]\n elif arm == 'arms':\n joints = [self.r_gripper_l_finger_joint,\n self.l_gripper_l_finger_joint]\n else:\n raise ValueError('Invalid arm arm argument. You can specify '\n \"'larm', 'rarm' or 'arms'.\")\n\n def _dist(angle):\n return 0.0099 * (18.4586 * np.sin(angle) + np.cos(angle) - 1.0101)\n\n if dist is not None:\n # calculate joint_angle from approximated equation\n max_dist = _dist(joints[0].max_angle)\n dist = max(min(dist, max_dist), 0)\n d = dist / 2.0\n angle = 2 * np.arctan(\n (9137 - np.sqrt(2)\n * np.sqrt(-5e9 * (d**2) - 5e7 * d + 41739897))\n / (5 * (20000 * d + 199)))\n for joint in joints:\n joint.joint_angle(angle)\n angle = joints[0].joint_angle()\n return _dist(angle)", "def cutDownAngle_gk(state, raySortie):\n position = state.my_goal\n diff = state.ball_pos - position\n diff.norm = raySortie\n position += diff\n return goTo(state,position)", "def detector_angle(self, angle):\n self.rotate_rad(-radians(angle))", "def angle(self) -> int:", "def changeDir(turn, angle):\n # Converts each argument to the corrent type\n turn = str(turn)\n angle = int(angle)\n if turn == 'L': # If Left, set the negative of the angle, and divide by 90 to get 3/2/1/0\n return int(-angle / 90)\n elif turn == 'R':\n return int(angle / 90) # If Left, set the negative of the angle, and divide by 90 to get 3/2/1/0", "def comp_angle_magnet(self):\n Rbo = self.get_Rbo()\n W0 = self.comp_W0m()\n Harc = self.comp_H_arc()\n if self.is_outwards():\n return float(2 * arctan(W0 / (2 * (Rbo + self.H1 - Harc))))\n else:\n return float(2 * arctan(W0 / (2 * (Rbo - self.H1 - Harc))))\n\n # if self.W0_is_rad:\n # return self.W0\n # else: # Convert W0 from m to rad\n # Rbo = self.get_Rbo()\n # return float(2 * arcsin(self.W0 / (2 * Rbo)))", "def angle(z):", "def turn(self,\n radius,\n angle,\n number_of_points=0.01,\n max_points=_max_points,\n final_width=None,\n final_distance=None,\n layer=0,\n datatype=0):\n exact = True\n if angle == 'r':\n delta_i = _halfpi\n delta_f = 0\n elif angle == 'rr':\n delta_i = _halfpi\n delta_f = -delta_i\n elif angle == 'l':\n delta_i = -_halfpi\n delta_f = 0\n elif angle == 'll':\n delta_i = -_halfpi\n delta_f = -delta_i\n elif angle < 0:\n exact = False\n delta_i = _halfpi\n delta_f = delta_i + angle\n else:\n exact = False\n delta_i = -_halfpi\n delta_f = delta_i + angle\n if self.direction == '+x':\n self.direction = 0\n elif self.direction == '-x':\n self.direction = numpy.pi\n elif self.direction == '+y':\n self.direction = _halfpi\n elif self.direction == '-y':\n self.direction = -_halfpi\n elif exact:\n exact = False\n self.arc(radius, self.direction + delta_i, self.direction + delta_f,\n number_of_points, max_points, final_width, final_distance,\n layer, datatype)\n if exact:\n self.direction = _directions_list[int(\n round(self.direction / _halfpi)) % 4]\n return self", "def getDistance(angle):\n\n panTilt.pan(angle)\n time.sleep(DELAY)\n wallDistance = getWallDistance()\n edgeDistance = getEdgeDistance() if wallDistance is None else None\n\n return wallDistance, edgeDistance", "def angle(self, dangle_deg: float) -> None:\n ...", "def transform_angle_by_quadrant(self, initial_angle, x_diff, y_diff):\n\t\tif x_diff > 0 and y_diff > 0:\n\t\t\tprint(\"p1 in quadrant: {}\".format(1))\n\t\t\t# Point B in quadrant 1..\n\t\t\treturn degrees(initial_angle)\n\t\telif x_diff < 0 and y_diff > 0:\n\t\t\tprint(\"p1 in quadrant: {}\".format(2))\n\t\t\t# Point B in quadrant 2..\n\t\t\treturn 180 - degrees(initial_angle)\n\t\telif x_diff < 0 and y_diff < 0:\n\t\t\tprint(\"p1 in quadrant: {}\".format(3))\n\t\t\t# Point B in quadrant 3..\n\t\t\treturn 180 + degrees(initial_angle)\n\t\telif x_diff > 0 and y_diff < 0:\n\t\t\tprint(\"p1 in quadrant: {}\".format(4))\n\t\t\t# Point B in quadrant 4..\n\t\t\treturn 360 - degrees(initial_angle)\n\t\telse:\n\t\t\traise \"Error occurred in basic_drive_3/transform_angle_by_quadrant func..\"", "def settle(self):\n if (self.angle >= self.max_angle) or (\n self.angle <= -self.max_angle\n ): # time to reverse\n print(\"reverse\", self.angle, self.max_angle)\n self.speed *= -0.9 # damped\n self.max_angle *= 0.9\n if self.speed > 0:\n self.angle = self.max_angle\n else:\n self.angle = -self.max_angle\n\n self.angle += radians(self.speed)\n print(self.angle, self.max_angle, self.speed)\n self.x = self.cx + self.length * sin(self.angle)\n self.y = self.cy + self.length * cos(self.angle)", "def mapping(ref, non_ref, probe):\r\n v1 = (ref[0]-non_ref[0], ref[1]-non_ref[1], ref[2]-non_ref[2])\r\n v2 = (ref[0]-probe[0], ref[1]-probe[1], ref[2]-probe[2])\r\n cosin = angle_between(v1,v2)\r\n dist = math.sqrt((probe[0]-ref[0])**2+(probe[1]-ref[1])**2+(probe[2]-ref[2])**2)*cosin\r\n return dist", "def angle(self, angle: int, time: int = 0, /) -> None:", "def compute_angle(self, direction):\n scaled_cosine = self.w1.dot(direction) # ||direction|| cos(theta)\n scaled_sine = self.w2.dot(direction) # ||direction|| sin(theta)\n return np.arctan2(scaled_sine, scaled_cosine)", "def turn(self, angle):\n self.logger.debug(\"turn \" + str(angle))", "def rotate(self,center, angle):\n \n self.coord = [x-np.repeat([[center[0],center[1]]],[x.shape[0]],axis = 0) for x in self.coord]\n\n alpha = angle\n R = np.array([[np.cos(alpha),-np.sin(alpha)],[np.sin(alpha),np.cos(alpha)]])\n \n for i in range(len(self.coord)):\n self.coord[i] = np.squeeze([np.dot([x],R) for x in self.coord[i]])\n\n self.coord = [x+np.repeat([[center[0],center[1]]],[x.shape[0]],axis = 0) for x in self.coord]\n\n return self" ]
[ "0.6508124", "0.62017006", "0.6161366", "0.6018408", "0.6000472", "0.59441423", "0.594048", "0.5903337", "0.58467317", "0.58111876", "0.5799023", "0.57754487", "0.57551074", "0.57502764", "0.57444197", "0.5687021", "0.5685811", "0.56826", "0.5680098", "0.5659688", "0.5634794", "0.5625283", "0.56157196", "0.560745", "0.5602156", "0.55984986", "0.5596326", "0.5586211", "0.5572513", "0.5565681" ]
0.64250773
1
Returns the angle you should turn in order to face a point.
def get_angle_to_face_point(self, point): start = self.swarmie.get_odom_location().get_pose() return angles.shortest_angular_distance( start.theta, math.atan2(point.y - start.y, point.x - start.x) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def angle(self) -> float:\n ...", "def angle(self) -> int:", "def get_angle(self, point_x, point_y):\n angle = atan2(point_y - self.player_y, point_x - self.player_x)\n # print(f\"point_x {point_x} point_y {point_x} angle {angle}\")\n return angle", "def angle(self):\n v = self.p1 - self.p0\n return atan2(v.y, v.x)", "def angle(self):\n return atan2(self.v.y, self.v.x)", "def angle(self):\n return 0", "def angle(self):\n return angle(self.force, self.forceXYZ, self.excited_axis,\n self.distance, self.distanceXYZ)", "def angle(self):\n return math.degrees(math.atan2(self[1], self[0]))", "def angle(self) -> float:\n return self._angle", "def angle(self) -> float:\n return self._angle", "def angle(self):\n return arccos(dot((self.a - self.o) / self.r, (self.b - self.o) / self.r))", "def get_angle(self, angle_):\n return self.two_pi * angle_", "def getAngle(self):\n x, y = self.components\n return math.atan2(y, x)", "def angle(self):\n return self._angle", "def angle(self):\n return self._angle", "def angle(self):\n return self._angle", "def angle(a: Point, b: Point) -> int:\n ang = math.degrees(math.atan2(b.y - a.y, b.x - a.x)) + 90\n return ang + 360 if ang < 0 else ang", "def angle_in_degrees(x, y):\n return math.atan2(y, x) / math.pi * 180", "def angle(point1, point2):\n return atan2(point2.y() - point1.y(), point2.x() - point1.x())", "def getAngle(self):\n return self.vector.angle", "def angle_from_point( x, img_width=640, fov_angle=44 ):\r\n return( -( ( img_width / 2 ) - x ) * fov_angle )", "def get_angle(self):\n return self.bot_client.send_command(_Command.GetAngle)", "def getAngle(self):\n return self._angle", "def angle(z):", "def angle(firstPoint, secondPoint):\n\txDiff = secondPoint.x - firstPoint.x\n\tyDiff = secondPoint.y - firstPoint.y\n\treturn math.degrees(math.atan2(yDiff, xDiff))", "def angle(p1, p2):\n return dot(p1, p2)", "def point_angle(cx, cy, px, py):\n return atan2(py - cy, px - cx)", "def get_angle(self):\n return self.__angle", "def calculate_angle(opp, adjacent):\n return math.degrees(math.atan((opp / adjacent)))", "def steps_to_angle():\n pass" ]
[ "0.7816527", "0.7762613", "0.7670193", "0.75328827", "0.7400363", "0.73841375", "0.73469996", "0.7288011", "0.72033334", "0.72033334", "0.7117065", "0.7116469", "0.70479256", "0.7037837", "0.7037837", "0.7037837", "0.7018325", "0.7009219", "0.6985138", "0.6954646", "0.6939814", "0.6913312", "0.6892019", "0.68844134", "0.6866497", "0.6844517", "0.68281", "0.6827185", "0.6802354", "0.6775192" ]
0.81196034
0
Have the rover turn to face the nearest block to it. Useful when exiting gohome (when going home without a block) or search. Does nothing if no blocks are seen, if there is a home tag closer to the rover than the nearest block, or if a sonar obstacle prevents the rover from making the turn. Catches any transform exception raised by Swarmie.get_nearest_block_location() and does nothing.
def face_nearest_block(self): try: block = self.swarmie.get_nearest_block_location( use_targets_buffer=True ) except tf.Exception: # The caller should be about to exit with a normal exit code # after this call anyway, so the pickup behavior is launched. return if block is not None: angle = self.get_angle_to_face_point(block) self.swarmie.turn(angle, ignore=Obstacle.IS_VISION, throw=False) return
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def best_last_option(self):\n \n # get essential values\n board = self.get_game_space()\n affinity = self.get_affinity()\n \n # pick the right check for the game we are playing\n if isinstance(board, Gomoku):\n \n # get all possible blocks to make a move in\n winning_blocks = board.get_winning_blocks(affinity)\n print('total winning blocks:'+str(len(winning_blocks)))\n best_blocks = []\n best_block = None\n\n # find the largest blocks to place a stone in\n for block in winning_blocks:\n if affinity == BLUE_TILE():\n if len(best_blocks) == 0: best_blocks.append(block)\n elif len(block.blue) > len(best_blocks[0].blue):\n best_blocks = []\n best_blocks.append(block)\n elif len(block.blue) == len(best_blocks[0].blue):\n best_blocks.append(block)\n elif affinity ==RED_TILE():\n if len(best_blocks) == 0: best_blocks.append(block)\n if len(block.red) > len(best_blocks[0].red):\n best_blocks = []\n best_blocks.append(block)\n elif len(block.red) == len(best_blocks[0].red):\n best_blocks.append(block)\n\n # find the best block to place a stone in\n for block in best_blocks:\n if best_block is None: best_block = block \n elif block.tiles[0][0] <= best_block.tiles[0][0]: \n if (block.tiles[0][1] != block.tiles[1][1]):\n if block.direction == 'vertical':\n if block.tiles[WINNING_ROW_SIZE()-1][1] >= best_block.tiles[WINNING_ROW_SIZE()-1][1]:\n if affinity == RED_TILE(): \n if len(block.red) >= len(best_block.red):\n print('considered block:'+str(block.tiles))\n best_block = block \n if affinity == BLUE_TILE(): \n if len(block.blue) >= len(best_block.blue):\n print('considered block:'+str(block.tiles))\n best_block = block\n else:\n if block.tiles[0][1] >= best_block.tiles[0][1]:\n if affinity == RED_TILE(): \n if len(block.red) >= len(best_block.red):\n print('considered block:'+str(block.tiles))\n best_block = block \n if affinity == BLUE_TILE(): \n if len(block.blue) >= len(best_block.blue):\n print('considered block:'+str(block.tiles))\n best_block = block \n else:\n if block.tiles[0][1] >= best_block.tiles[0][1] and block.tiles[1][0] <= best_block.tiles[1][0]:\n if affinity == RED_TILE(): \n if len(block.red) >= len(best_block.red):\n print('considered block:'+str(block.tiles))\n best_block = block \n if affinity == BLUE_TILE(): \n if len(block.blue) >= len(best_block.blue):\n print('considered block:'+str(block.tiles))\n best_block = block \n\n # find the best move to make out of the best block \n # print('best block:'+str(best_block.tiles))\n best_move = (7,-1)\n for tile_i in range(len(best_block.tiles)):\n tile = best_block.tiles[tile_i]\n next_tile = None\n prev_tile = None \n if tile_i+1 in range(len(best_block.tiles)):\n next_tile = best_block.tiles[tile_i+1]\n if tile_i-1 in range(len(best_block.tiles)):\n prev_tile = best_block.tiles[tile_i-1]\n if board.get_tile(tile[0],tile[1]) == BLANK_TILE():\n if prev_tile is not None and next_tile is None:\n if board.get_tile(prev_tile[0],prev_tile[1]) == affinity:\n if tile[0] <= best_move[0]: \n if tile[1] >= tile[1]:\n best_move = tile \n elif next_tile is not None and prev_tile is None:\n if board.get_tile(next_tile[0],next_tile[1]) == affinity:\n if tile[0] <= best_move[0]: \n if tile[1] >= tile[1]:\n best_move = tile \n elif next_tile is not None and prev_tile is not None:\n if board.get_tile(prev_tile[0],prev_tile[1]) == affinity or \\\n board.get_tile(next_tile[0],next_tile[1]) == affinity:\n if tile[0] <= best_move[0]: \n if tile[1] >= tile[1]:\n best_move = tile \n \n return best_move", "def blockDetector(self, frame):\n self.detectBlocksInDepthImage()\n self.block_pos = np.zeros((50,3))\n self.block_num = 0\n pixal_arm = np.zeros(2)\n if self.kinectCalibrated == True:\n # get current arm position\n real_arm_x, real_arm_y,_,_ = self.rexarm.get_wrist_pose()\n real_arm_x *= -1000\n real_arm_y *= 1000\n real_arm = np.array(([real_arm_x],[real_arm_y],[1]))\n \n # normalize arm_line vector\n arm_length = np.sqrt(real_arm_x**2 + real_arm_y**2)\n l = np.array([real_arm_x,real_arm_y])\n\n # find center of block_1_height and put them into block_position\n for cnt in self.block_contours:\n M = cv2.moments(cnt)\n if M['m00']>0:\n cx = int(M['m10']/M['m00'])\n cy = int(M['m01']/M['m00'])\n c = np.array([[cx],[cy]])\n # Eliminate Arm itself\n real_x = self.real_coord[cx][cy][0]\n real_y = self.real_coord[cx][cy][1]\n d = np.linalg.norm(np.cross(l, np.array([real_x,real_y])))/np.linalg.norm(l)\n \n d_to_arm = np.sqrt((real_x-real_arm_x)**2+(real_y-real_arm_y)**2)\n d_to_ori = np.sqrt(real_x**2 + real_y**2)\n\n \n if d > 2 and not(d_to_ori<arm_length and d_to_arm<arm_length):\n # Check if its in our ROI\n if self.real_coord[cx][cy][0]>self.real_points[0][0] and self.real_coord[cx][cy][0]<self.real_points[2][0]:\n if self.real_coord[cx][cy][1]<self.real_points[0][1] and self.real_coord[cx][cy][1]>self.real_points[2][1]:\n # points\n self.block_pos[self.block_num][0] = self.real_coord[cx][cy][0]\n self.block_pos[self.block_num][1] = self.real_coord[cx][cy][1]\n self.block_pos[self.block_num][2] = 1\n # orientation\n rect = cv2.minAreaRect(cnt)\n self.block_ori[self.block_num][0] = -rect[2]\n box = cv2.boxPoints(rect)\n box = np.int0(box)\n # detect color\n self.block_color[self.block_num] = self.colorDetector(cx,cy)\n # draw contours\n cv2.drawContours(self.currentDetectFrame,[box],0,(30,145,86),3)\n cv2.circle(self.currentDetectFrame,(cx,cy),5,(30,145,86),-1)\n self.block_num += 1\n\n # find centers of 2 blocks\n for cnt in self.block_contours_2:\n M = cv2.moments(cnt)\n if M['m00']>0:\n cx = int(M['m10']/M['m00'])\n cy = int(M['m01']/M['m00'])\n c = np.array([[cx],[cy]])\n # Eliminate Arm itself\n real_x = self.real_coord[cx][cy][0]\n real_y = self.real_coord[cx][cy][1]\n d = np.linalg.norm(np.cross(l, np.array([real_x,real_y])))/np.linalg.norm(l)\n \n d_to_arm = np.sqrt((real_x-real_arm_x)**2+(real_y-real_arm_y)**2)\n d_to_ori = np.sqrt(real_x**2 + real_y**2)\n \n \n if d > 2 and not(d_to_ori<arm_length and d_to_arm<arm_length):\n # Check if its in our ROI\n if self.real_coord[cx][cy][0]>self.real_points[0][0] and self.real_coord[cx][cy][0]<self.real_points[2][0]:\n if self.real_coord[cx][cy][1]<self.real_points[0][1] and self.real_coord[cx][cy][1]>self.real_points[2][1]:\n # points\n self.block_pos[self.block_num][0] = self.real_coord[cx][cy][0]\n self.block_pos[self.block_num][1] = self.real_coord[cx][cy][1]\n self.block_pos[self.block_num][2] = 2\n # orientation\n rect = cv2.minAreaRect(cnt)\n self.block_ori[self.block_num][0] = -rect[2]\n box = cv2.boxPoints(rect)\n box = np.int0(box)\n # detect color\n self.block_color[self.block_num] = self.colorDetector(cx,cy)\n # draw contours\n cv2.drawContours(self.currentDetectFrame,[box],0,(30,87,137),3)\n cv2.circle(self.currentDetectFrame,(cx,cy),5,(30,87,137),-1)\n self.block_num += 1 \n\n # find centers of 3 blocks\n for cnt in self.block_contours_3:\n M = cv2.moments(cnt)\n if M['m00']>0:\n cx = int(M['m10']/M['m00'])\n cy = int(M['m01']/M['m00'])\n c = np.array([[cx],[cy]])\n # Eliminate Arm itself\n real_x = self.real_coord[cx][cy][0]\n real_y = self.real_coord[cx][cy][1]\n d = np.linalg.norm(np.cross(l, np.array([real_x,real_y])))/np.linalg.norm(l)\n \n d_to_arm = np.sqrt((real_x-real_arm_x)**2+(real_y-real_arm_y)**2)\n d_to_ori = np.sqrt(real_x**2 + real_y**2)\n \n \n if d > 2 and not(d_to_ori<arm_length and d_to_arm<arm_length):\n # Check if its in our ROI\n if self.real_coord[cx][cy][0]>self.real_points[0][0] and self.real_coord[cx][cy][0]<self.real_points[2][0]:\n if self.real_coord[cx][cy][1]<self.real_points[0][1] and self.real_coord[cx][cy][1]>self.real_points[2][1]:\n # points\n self.block_pos[self.block_num][0] = self.real_coord[cx][cy][0]\n self.block_pos[self.block_num][1] = self.real_coord[cx][cy][1]\n self.block_pos[self.block_num][2] = 3\n # orientation\n rect = cv2.minAreaRect(cnt)\n self.block_ori[self.block_num][0] = -rect[2]\n box = cv2.boxPoints(rect)\n box = np.int0(box)\n # detect color\n self.block_color[self.block_num] = self.colorDetector(cx,cy)\n # draw contours\n cv2.drawContours(self.currentDetectFrame,[box],0,(204,6,6),3)\n cv2.circle(self.currentDetectFrame,(cx,cy),5,(204,6,6),-1)\n self.block_num += 1 \n self.block_pos[self.block_num:50] = 0\n self.block_ori[self.block_num:50] = 0\n\n return frame", "def _win_block_move(self, state):\n block_moves = []\n for i in range(3):\n side, index = self._check_triple(state[i])\n if index != -1:\n if side == self.side:\n return i, index\n else:\n block_moves.append((i, index))\n side, index = self._check_triple([state[0][i], state[1][i], state[2][i]])\n if index != -1:\n if side == self.side:\n return index, i\n else:\n block_moves.append((index, i))\n\n side, index = self._check_triple([state[0][0], state[1][1], state[2][2]])\n if index != -1:\n if side == self.side:\n return index, index\n else:\n block_moves.append((index, index))\n side, index = self._check_triple([state[0][2], state[1][1], state[2][0]])\n if index != -1:\n if side == self.side:\n return index, 2 - index\n else:\n block_moves.append((index, 2 - index))\n if len(block_moves) > 0:\n return random.choice(block_moves)\n else:\n return None", "def propogate(self):\r\n X = len(grid[0])\r\n Y = len(grid)\r\n for DIR in [[1,0], [-1,0], [0,1], [0,-1]]:\r\n target_x, target_y = self.block_loc[0]+DIR[0], self.block_loc[1]+DIR[1]\r\n if 0 <= target_x < X and 0 <= target_y < Y: #if inbounds:\r\n target_block = grid[target_y][target_x]\r\n if not target_block.collapsed: #only ping uncollapsed blocks\r\n self.send_update(target_block,DIR)\r\n return", "def get_best_block(self) -> Block:\n assert self.indexes is not None\n block_hash = self.indexes.height.get_tip()\n block = self.get_transaction(block_hash)\n assert isinstance(block, Block)\n assert block.get_metadata().validation.is_fully_connected()\n return block", "def make_move(self, board: Block) -> int:\n best_block = None\n best_move = None\n best_score = -(2 ** 5) # lower bound on the score.\n\n curr_score = self.goal.score(board)\n\n for _ in range(self.moves_to_check):\n\n temp_block = select_random_block(board)\n move = random.randint(0, 3) # 4 not included as no smash allowed\n perform_move(temp_block, move)\n new_score = self.goal.score(board)\n if (new_score - curr_score) >= best_score:\n best_block = temp_block\n best_move = move\n best_score = new_score - curr_score\n undo_move(temp_block, move)\n\n # Apply the visual changes on the board\n best_block.highlighted = True\n self.renderer.draw(board, self.id)\n pygame.time.wait(TIME_DELAY)\n perform_move(best_block, best_move)\n best_block.highlighted = False\n self.renderer.draw(board, self.id)\n\n return 0", "def _update_block_poses(self, find_moved=False):\n try:\n resp = self._get_block_poses_world()\n named_poses = resp.poses\n except:\n import sys\n print('Service call to get block poses failed. Exiting.')\n sys.exit()\n\n n_found = 0\n for pddl_block_name, pddl_block in self.pddl_block_lookup.items():\n for named_pose in named_poses:\n if named_pose.block_id == pddl_block_name.split('_')[-1]:\n pose = named_pose.pose.pose\n # Skip changes the pose of objects in storage.\n if pose.position.x < 0.05:\n continue\n n_found += 1\n position = (pose.position.x, pose.position.y, pose.position.z)\n orientation = (pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w)\n self.execute()\n pddl_block.set_base_link_pose((position, orientation))\n if not self.use_planning_server:\n self.plan()\n pddl_block.set_base_link_pose((position, orientation))\n\n if find_moved and n_found != len(self.moved_blocks):\n input('Could not find all the moved blocks. Please reposition blocks outside of the camera view and hit enter to continue.')\n self._update_block_poses(find_moved=True)\n return\n\n # After loading from vision, objects may be in collision. Resolve this.\n for _, pddl_block in self.pddl_block_lookup.items():\n if pb_robot.collisions.body_collision(pddl_block, self.table):\n print('Collision with table and block:', pddl_block.readableName)\n position, orientation = pddl_block.get_base_link_pose()\n stable_z = pb_robot.placements.stable_z(pddl_block, self.table)\n position = (position[0], position[1], stable_z)\n self.execute()\n pddl_block.set_base_link_pose((position, orientation))\n self.plan()\n pddl_block.set_base_link_pose((position, orientation))\n\n # Resolve from low to high blocks.\n current_poses = [b.get_base_link_pose() for b in self.pddl_blocks]\n block_ixs = range(len(self.pddl_blocks))\n block_ixs = sorted(block_ixs, key=lambda ix: current_poses[ix][0][2], reverse=False)\n for ix in range(len(block_ixs)):\n bottom_block = self.pddl_blocks[block_ixs[ix]]\n for jx in range(ix+1, len(block_ixs)):\n top_block = self.pddl_blocks[block_ixs[jx]]\n\n dist_moved = 0\n while pb_robot.collisions.body_collision(bottom_block, top_block):\n print('Collision with bottom %s and top %s:' % (bottom_block.readableName, top_block.readableName))\n position, orientation = top_block.get_base_link_pose()\n stable_z = position[2] + 0.001\n dist_moved += 0.001\n if self.real and dist_moved > 0.04:\n print(f\"Found blocks {bottom_block} and {top_block} in collision\")\n input(\"Manually move the blocks and press Enter to continue\")\n self._update_block_poses(find_moved=False)\n return\n position = (position[0], position[1], stable_z)\n self.execute()\n top_block.set_base_link_pose((position, orientation))\n self.plan()\n top_block.set_base_link_pose((position, orientation))", "def solve_block(self, center_cell: tuple[int, int], first_round=True):\n block = Block(self.field, center_cell)\n action = block.solve()\n if action == 'clear':\n self.clear_queue.add_batch(block.unknown_neighbors,\n emphasis=self.emphasis[\"add_batch\"],\n color=\"new_clear\")\n if not self.clear_queue.is_busy:\n self.clear_queue.is_busy = True\n self.process(self.clear_queue)\n elif action == 'flag':\n to_flag = Queue(field=self.field, color=\"to_flag\")\n for cell in block.unknown_neighbors:\n to_flag.append(cell)\n to_flag.direction = self.direction\n to_flag.re_orient()\n if self.emphasis[\"to_flag\"].is_checked:\n self.update()\n pause(self.emphasis[\"to_flag\"].pause_time)\n while to_flag:\n new_flag = to_flag[0]\n to_flag.remove(new_flag)\n self.toggle_flag(new_flag)\n elif first_round and center_cell not in self.hyper_queue:\n self.hyper_queue.append(center_cell)", "def test_returns_block() -> None:\n gr = BlobGoal(REAL_RED)\n gp = PerimeterGoal(MELON_MAMBO)\n board = one_block_four_children_(1)\n rp = RandomPlayer(0, gr)\n sp = SmartPlayer(1, gp, 10)\n rp._proceed = True\n sp._proceed = True\n move_block_rp = rp.generate_move(board)[2]\n move_block_sp = sp.generate_move(board)[2]\n assert move_block_rp == board or move_block_rp in board.children\n assert move_block_sp == board or move_block_sp in board.children", "def blockDetector(self):\n # img = cv2.cvtColor(self.VideoFrame, cv2.COLOR_RGB2HSV)\n # img = cv2.resize(img, (640, 480))\n # cv2.imwrite('hsv.jpg', img)\n # mask = None\n # for color in color_ranges:\n # mask = cv2.inRange(img, color_ranges[color][0], color_ranges[color][1])\n # cv2.imwrite('blockdetect.jpg', cv2.bitwise_and(img, img, mask=mask))\n blocks = self.detectBlocksInDepthImage()\n pick_up_locs = []\n for block in blocks:\n coords = block[0]\n u = (coords[0][0] + coords[2][0]) // 2\n v = (coords[0][1] + coords[2][1]) // 2\n self.VideoFrame = cv2.circle(self.VideoFrame,(u,v), 1, (0,0,0))\n d = self.DepthFrameRaw[u,v]\n d = self.convertDepthToSI(d)\n world_coords = self.calculatePixelToWorld(u, v, d)\n world_coords[2] = self.max_depth - d\n pick_up_locs.append(world_coords)\n return pick_up_locs", "def make_move(self, board: Block):\n # select a random block and highlight it.\n rand_block = select_random_block(board)\n rand_block.highlighted = True\n self.renderer.draw(board, self.id)\n pygame.time.wait(TIME_DELAY)\n choice = random.randint(0, 4)\n\n if rand_block.level == rand_block.max_depth or rand_block.level == 0:\n # Random player has chosen to smash an invalid block thus its move\n # is forfeited\n if choice == 4:\n pass\n else:\n perform_move(rand_block, choice)\n else:\n perform_move(rand_block, choice)\n rand_block.highlighted = False\n self.renderer.draw(board, self.id)\n return 0", "def block_edges(self, block_id, remap_local=...): # -> tuple[Unknown, Unknown, Unknown]:\n ...", "def getBlocks(self):\n blocks = self.getBlocksMsg(b'\\x00')\n last_locator = self.largeMessageControl(blocks, 'inv', 0)\n\n while last_locator[1] < TARGET_BLOCK:\n blocks = self.getBlocksMsg(bytearray.fromhex(convertLittleBig(last_locator[0])))\n last_locator = self.largeMessageControl(blocks, 'inv', last_locator[1])\n\n print('\\nSuccessfully found the Block #{}: {}'.format(TARGET_BLOCK, last_locator[0]))\n return last_locator[0]", "def get_blocking_entities_at_location(entities, destination_x, destination_y):\n for entity in entities:\n if entity.blocks and entity.x == destination_x and entity.y == destination_y:\n return entity\n\n return None", "def get_neighboring_block(self, instance):\n\n # Strip index. FIXME: Use a regex here.\n block_type = instance.split(\"[\", maxsplit=1)[0]\n\n # Check self\n if self.instance == instance:\n return self\n\n # Check children\n if instance in self.blocks:\n return self.blocks[instance]\n\n # Check parent and siblings\n if self.parent is not None:\n # Parent\n if self.parent.type == block_type:\n return self.parent\n\n # Siblings\n if instance in self.parent.blocks:\n return self.parent.blocks[instance]\n\n return None", "def pick_block(self):\n current_block = self.blocks[self.editor_cursor_position[1]][self.editor_cursor_position[0]]\n if current_block in self.available_block_types:\n self.current_block_type = self.available_block_types.index(current_block)", "def spider(row, col):\r\n global moves_left\r\n\r\n touching = count_mines(row, col)\r\n try:\r\n if mines[row][col] == 1: # don't spider from mine blocks!\r\n return\r\n except:\r\n pass\r\n \r\n if grid[row][col].ButtonColor[1] in (\"green\", \"red\"): # already discovered\r\n return\r\n \r\n # mark this block as discovered\r\n grid[row][col].update(str(touching), button_color=(\"white\",\"green\"))\r\n moves_left -= 1\r\n\r\n # check surrounding tiles\r\n for r,c in ((-1,-1),(-1,0),(-1,1),(0,-1),(0,1),(1,-1),(1,0),(1,1)):\r\n try:\r\n if row+r == -1 or col+c == -1:\r\n continue\r\n total = count_mines(row+r, col+c)\r\n # print(f\"Check {row+r}, {col+c}: {total} {grid[row+r][col+c].ButtonColor[1]}\")\r\n # check if already been coloured\r\n if grid[row+r][col+c].ButtonColor[1] == \"gray\":\r\n if total == 0:\r\n spider(row+r, col+c)\r\n else:\r\n grid[row+r][col+c].update(str(total), button_color=(\"white\",\"green\"))\r\n moves_left -= 1\r\n except IndexError:\r\n continue", "def possibleMove(self, dist, blockList):\r\n \r\n if self.orientation == \"v\":\r\n for block in blockList:\r\n if dist >= 0:\r\n for n in range(dist):\r\n for coords in self.getCoords():\r\n if ((coords[0], coords[1] + n) in\r\n block.getCoords()) and (block.getNum() !=\r\n self.getNum()):\r\n self.collidedPieces = 1\r\n else:\r\n for n in range(0, dist, -1):\r\n for coords in self.getCoords():\r\n if ((coords[0], coords[1] +n) in\r\n block.getCoords()) and (block.getNum() !=\r\n self.getNum()):\r\n self.collidedPieces = 1\r\n \r\n self.y += dist\r\n self.setCoords()\r\n \r\n elif self.orientation == \"h\":\r\n for block in blockList:\r\n if dist >= 0:\r\n for n in range(dist):\r\n for coords in self.getCoords():\r\n if ((coords[0] + n, coords[1]) in\r\n block.getCoords()) and (block.getNum() !=\r\n self.getNum()):\r\n self.collidedPieces = 1\r\n else:\r\n for n in range(0, dist, -1):\r\n for coords in self.getCoords():\r\n if ((coords[0] + n, coords[1]) in\r\n block.getCoords()) and(block.getNum() !=\r\n self.getNum()):\r\n self.collidedPieces = 1\r\n \r\n self.x += dist\r\n self.setCoords()", "def reconsiderblock(self, block_hash: str) -> None:\n return self.rpc_call(\"reconsiderblock\", block_hash)", "def mine(self):\n new_block = Block(self.block['timestamp'], self.block['car'],\n self.block['id'])\n # link the block to the previous block\n new_block.previous_hash = self._get_previous_hash()\n while True:\n # get a hash\n new_hash = new_block.get_hash()\n # check hash rules, in our case check if the hash starts with\n # self.difficulty number of zeroes\n if new_hash[0] != self.difficulty * \"0\":\n if self.new_block[\"block\"] is None:\n # the hash hasn't been found yet by any other process,\n # therefore increase the nonce and continue\n # miners will use a different mining mechanism in order\n # to increase the probability of finding a hash by\n # a different miner\n new_block.increment_nonce(self.id + 1)\n continue\n break\n break\n\n # NOTE: May happen that two processes find the hash at the same time,\n # because there is not a big difficulty, however, it's not a problem,\n # for sake of the demo it's fine\n\n if self.new_block[\"block\"] is None:\n # this process has found the hash first\n print(self.id, \" - the winner hash\", new_hash)\n new_block.hash = new_hash\n self.new_block[\"block\"] = new_block\n print(self.id, \" - mined the block\")\n else:\n # validate the block found by other process (miner)\n if self.new_block[\"validated\"] is not False:\n print(self.id, \" - validating\")\n # check block's validity\n valid = False\n if self.new_block[\"block\"].is_block_valid():\n # check blockchain's validity when we apply the newly\n # mined block\n if self.is_blockchain_valid(self.new_block[\"block\"]):\n valid = True\n self.new_block[\"validated\"] = valid\n else:\n # NOTE: this demo doesn't take into account the number of\n # miners who approved the block, the block will be rejected\n # if any of them rejected it\n # but usually just more than 50% of miners must approve\n print(self.id, \" - the block has been rejected by other miner\")", "def mine_block(self):\n if self.public_key == None:\n return None\n last_block = self.__chain[-1]\n hashed_block = hash_block(last_block)\n proof = self.proof_of_work()\n reward_transaction = Transaction(\n 'MINING', self.public_key, '', MINING_REWARD)\n\n copied_transactions = self.__open_transactions[:]\n for tx in copied_transactions:\n if not Wallet.verify_transaction(tx):\n return None\n copied_transactions.append(reward_transaction)\n\n copied_chipsactions = self.__open_chipsactions[:]\n for tx in copied_chipsactions:\n if not Wallet.verify_chipsaction(tx):\n return None\n\n copied_messsactions = self.__open_messsactions[:]\n for tx in copied_messsactions:\n if not Wallet.verify_messsaction(tx):\n return None\n\n block = Block(len(self.__chain), hashed_block,\n copied_transactions, copied_chipsactions, copied_messsactions, proof)\n self.__chain.append(block)\n self.__open_transactions = []\n self.__open_chipsactions = []\n self.__open_messsactions = []\n self.save_data()\n for node in self.__peer_nodes:\n url = 'http://{}/broadcast-block'.format(node)\n converted_block = block.__dict__.copy()\n converted_block['transactions'] = [\n tx.__dict__ for tx in converted_block['transactions']]\n converted_block['chipsactions'] = [\n tx.__dict__ for tx in converted_block['chipsactions']]\n converted_block['messsactions'] = [\n tx.__dict__ for tx in converted_block['messsactions']] \n try:\n response = requests.post(url, json={'block': converted_block})\n if response.status_code == 400 or response.status_code == 500:\n print('Block declined, needs resolving')\n if response.status_code == 409:\n self.resolve_conflicts = True\n except requests.exceptions.ConnectionError:\n continue\n return block", "def getBlock(self) -> ghidra.program.model.correlate.Block:\n ...", "def mine_block(self):\n if self.hosting_node == None:\n return None\n # Fetch the currently last block of the blockchain\n last_block = self.__chain[-1]\n print(last_block)\n # Hash the last block (to be able to compare it to the stored hash value)\n hashed_block = hash_block(last_block)\n proof = self.proof_of_work()\n # Miners should be rewarded, so let's create a reward transaction\n reward_transaction = Transfer(self.hosting_node, \"MINING\", MINING_REWARD)\n # Copy transaction instead of manipulating the original open_transactions list\n # This ensures that if for some reason the mining should fail, we don't have the reward transaction stored in the open transactions\n copied_transactions = self.__open_transfers[:]\n for tx in copied_transactions:\n if not Wallet.verify_transfer(tx):\n return None\n copied_transactions.append(reward_transaction)\n block = Block(len(self.__chain), hashed_block, copied_transactions, proof)\n self.__chain.append(block)\n self.__open_transfers = []\n self.save_data()\n return block", "def at_wall(self):\n\n wall_close = self.is_at_wall()\n \n # Decide which direction to go\n if wall_close:\n \n # Find the closest detected point\n dmin = self.distmax\n tmin = 0\n for i, d in enumerate(self.parameters.sensor_distances):\n if d < dmin:\n dmin = d\n tmin = self.parameters.sensor_poses[i].theta\n \n # Go that way\n if tmin > 0:\n self.parameters.direction = 'left'\n else:\n self.parameters.direction = 'right'\n \n # Notify the controller\n self.wall.set_parameters(self.parameters)\n \n # Save the closest we've been to the goal\n self.best_distance = self.distance_from_goal\n \n return wall_close", "def get_nearest_row(self):\n return (self.rect.top - (self.screen.get_height() // 12)) // self.maze.block_size", "def blocks_ahead_of_pacman(self, dx, dy):\n\n # Here's where we want to move to\n x = self.rect.x + dx\n y = self.rect.y + dy\n\n # Find integer block pos, using floor (so 4.7 becomes 4)\n # ix, iy = int(x // BLOCK_SIZE), int(y // BLOCK_SIZE)\n # # Remainder let's us check adjacent blocks\n # rx, ry = x % BLOCK_SIZE, y % BLOCK_SIZE\n\n # blocks = [world[iy][ix]]\n # if rx: blocks.append(world[iy][ix + 1])\n # if ry: blocks.append(world[iy + 1][ix])\n # if rx and ry: blocks.append(world[iy + 1][ix + 1])\n\n #return blocks\n return None", "def new_block(self, body_blocks, snake_head):\n\t\tx = randint(0, 35)\n\t\ty = randint(1, 26)\n\t\tself.rect.x = (25 * x) + 1\n\t\tself.rect.bottom = 25 * y\n\t\t\n\t\t# If new block is on snake, get new block\n\t\tif self.rect.x == snake_head.rect.x and self.rect.bottom == snake_head.rect.bottom:\n\t\t\tself.new_block(body_blocks, snake_head)\n\t\t\n\t\t# If new block is on any body block, get new block\n\t\tif body_blocks:\n\t\t\tfor i in range(len(body_blocks)):\n\t\t\t\tif self.rect.x == body_blocks[i].rect.x and self.rect.bottom == body_blocks[i].rect.bottom:\n\t\t\t\t\tself.new_block(body_blocks, snake_head)", "def moveBlock(self, block: ghidra.program.model.mem.MemoryBlock, newStartAddr: ghidra.program.model.address.Address, monitor: ghidra.util.task.TaskMonitor) -> None:\n ...", "def fall(self, current):\n grid = self.ids.grid\n children = grid.children\n\n # The block above the current one is the one with the higher id\n child_above = current + grid.cols\n\n # the top row of blocks starts with this id\n topmost = len(grid.children) - grid.cols\n\n if child_above > topmost:\n # We are in the top row, generate new coloured block\n colour = random.randint(0, len(colours)-1)\n children[current].background_color = get_color_from_hex(colours[colour][0])\n else:\n # Let the block on top of us fall down and do the same for the block above\n children[current].background_color = children[child_above].background_color\n self.fall(child_above)", "def blocks(self, game_object, rotation = 0):\n return self.filled(self.move(game_object.position, game_object.direction + rotation))" ]
[ "0.5944293", "0.57909685", "0.57898897", "0.57172054", "0.57123226", "0.5624876", "0.55736315", "0.55671346", "0.55591387", "0.55555564", "0.54708856", "0.5432141", "0.53805804", "0.5367929", "0.533513", "0.5331298", "0.5309979", "0.5303084", "0.52709633", "0.5264895", "0.52608275", "0.5258493", "0.5258328", "0.52364445", "0.523546", "0.5195814", "0.517537", "0.5174828", "0.5159627", "0.5150471" ]
0.8221664
0
Get another nav plan and return the first waypoint. Try three times, incrementing self.tolerance by tolerance_step after a failure.
def _get_next_waypoint(self, tolerance_step): print('\nGetting new nav plan.') for i in range(4): try: self.plan = self.swarmie.get_plan( self.goal, tolerance=self.tolerance, use_home_layer=self.avoid_home ) break # plan received except rospy.ServiceException: print('ServiceException.') if i < 3: print('Expanding tolerance.') self.tolerance += tolerance_step else: raise # tried 3 times, we give up print('Received nav plan.') pose = self.plan.plan.poses[0] return Point(x=pose.pose.position.x, y=pose.pose.position.y)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_find_closest_waypoints_nearest(self):\n planner = WaypointPlanner(make_example_base_waypoints())\n\n planner.position = Vector3(0, 0, 0)\n waypoints = planner.find_closest_waypoints(1)\n self.assertEqual(1, len(waypoints))\n self.assertEqual(0, waypoints[0].pose.pose.position.x)\n self.assertEqual(0, waypoints[0].pose.pose.position.y)\n self.assertEqual(0, waypoints[0].pose.pose.position.z)\n\n planner.position = Vector3(0.9, 0.9, 0)\n waypoints = planner.find_closest_waypoints(2)\n self.assertEqual(2, len(waypoints))\n self.assertEqual(1, waypoints[0].pose.pose.position.x)\n self.assertEqual(1, waypoints[0].pose.pose.position.y)\n self.assertEqual(2, waypoints[1].pose.pose.position.x)\n self.assertEqual(2, waypoints[1].pose.pose.position.y)\n\n # Check it wraps back around to the start.\n planner.position = Vector3(0.9, 0.9, 0)\n waypoints = planner.find_closest_waypoints(3)\n self.assertEqual(3, len(waypoints))\n self.assertEqual(1, waypoints[0].pose.pose.position.x)\n self.assertEqual(1, waypoints[0].pose.pose.position.y)\n self.assertEqual(2, waypoints[1].pose.pose.position.x)\n self.assertEqual(2, waypoints[1].pose.pose.position.y)\n self.assertEqual(0, waypoints[2].pose.pose.position.x)\n self.assertEqual(0, waypoints[2].pose.pose.position.y)", "def process_waypoint(self, waypoint: Waypoint) -> Union[Trip, None]:\n\n # ignore the first entry, just remember it for further compares\n if not self.prev_point:\n self.prev_point = waypoint\n return None\n\n if self.is_driving(self.prev_point, waypoint):\n if not self.start_point:\n # indicates trip start\n self.start_point = self.prev_point\n else:\n # indicates trip finish\n if self.start_point:\n d = self.calc_distance(self.start_point, self.prev_point)\n trip = Trip(d, self.start_point, self.prev_point)\n self.start_point = None\n return trip\n self.prev_point = waypoint\n return None", "def next_step(self, goal, traps=False): #TODO: test (maybe change to l1 dist?)\n kyu = PriorityQueue()\n kyu.put((0, self.player))\n came_from = {self.player: None}\n costs_agg = {self.player: 0}\n\n while not kyu.empty():\n curr = kyu.get()[1]\n if curr == goal: break\n\n for next in self.valid_neighbors(curr):\n new_cost = costs_agg[curr] + (5 if traps and self.traps[next] else 1)\n if next not in costs_agg.keys() or new_cost < costs_agg[next]:\n costs_agg[next] = new_cost\n kyu.put((new_cost + l2(next, goal), next))\n came_from[next] = curr\n \n if goal in came_from.keys():\n return came_from[goal]\n else:\n raise RuntimeWarning(\"no path between monster and player\")\n return goal", "def get_closest_waypoint(self, pose):\n #TODO implement - Done\n # Iterate the base_waypoints' x value with current position's x value and find the closest\n # match, and pick that waypoint location index. \n min_idx = 0\n min_dist = None\n cur_x = pose.position.x\n cur_y = pose.position.y\n if self.waypoints is not None:\n for i, wp in enumerate(self.waypoints):\n wp_x = wp.pose.pose.position.x\n wp_y = wp.pose.pose.position.y\n dist = np.sqrt((cur_x - wp_x)**2 + (cur_y - wp_y)**2)\n if min_dist is None or min_dist >= dist:\n min_dist = dist\n min_idx = i\n \n # check whether the identified index is behind the current position, if so, move it by 1 index\n # https://gamedev.stackexchange.com/questions/75072/how-can-i-compare-two-quaternions-for-logical-equality\n # rospy.logwarn('min_idx before = %d', min_idx)\n eps = 1e-12\n if self.waypoints is not None:\n q1 = self.waypoints[min_idx].pose.pose.orientation\n q2 = pose.orientation\n q1_a = np.array([q1.x, q1.y, q1.z, q1.w])\n q2_a = np.array([q2.x, q2.y, q2.z, q2.w])\n direction = abs(np.dot(q1_a, q2_a))\n #rospy.logwarn('calculated direction %f', direction)\n wp_x = self.waypoints[min_idx].pose.pose.position.x\n if direction > 1-eps:\n if wp_x < cur_x:\n min_idx += 1\n else:\n min_idx -= 1\n else:\n if wp_x < cur_x:\n min_idx -= 1\n else:\n min_idx += 1\n\n # rospy.logwarn('min_idx after = %d', min_idx)\n return min_idx", "def drive_to(self, goal, tolerance=0.0, tolerance_step=0.5,\n max_attempts=10, avoid_targets=True, avoid_home=False,\n use_waypoints=True, start_location=None,\n distance_threshold=None):\n print('\\nRequest received')\n self.fail_count = 0\n self.tolerance = tolerance\n\n self.avoid_targets = avoid_targets\n if avoid_targets is True and avoid_home is True:\n avoid_home = False\n self.avoid_home = avoid_home\n\n current_ignore = Obstacle.IS_SONAR\n if self.avoid_targets is True:\n current_ignore |= Obstacle.TAG_TARGET\n elif self.avoid_home is True:\n current_ignore |= Obstacle.TAG_HOME\n\n self.goal.x = goal.x\n self.goal.y = goal.y\n\n self.cur_loc = self.swarmie.get_odom_location()\n self.current_state = Planner.STATE_IDLE\n self.prev_state = Planner.STATE_IDLE\n\n while (not self.cur_loc.at_goal(self.goal,\n Planner.DISTANCE_OK + self.tolerance)\n and self.fail_count < max_attempts):\n\n\n if use_waypoints is True:\n # get new plan and try to drive to first point in it\n point = self._get_next_waypoint(tolerance_step)\n else:\n point = goal\n\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_DRIVE\n # Turn to approximate goal heading while ignoring sonar and tags\n # helps to prevent rover from trying to jump around obstacles\n # before it even starts along its new path\n self.result = self._face_point(\n point,\n ignore=current_ignore ^ Obstacle.IS_SONAR\n )\n\n if self.result == MoveResult.SUCCESS:\n self.result = self.swarmie.drive_to(\n point,\n ignore=Obstacle.SONAR_BLOCK,\n throw=False\n )\n\n if self.result == MoveResult.SUCCESS:\n # Success, we got to our waypoint, or got ourselves out of\n # whatever pickle we were just in.\n # Just get a new plan and drive to next point\n self.fail_count = 0\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_IDLE\n print('Successfully drove to first point in nav plan.')\n\n # otherwise, something went wrong or we found home\n elif self.result == MoveResult.OBSTACLE_HOME:\n self.set_home_locations()\n\n # get around the home tag obstacle\n count = 0\n\n # Give the rover 3 tries to avoid any tags nearby before\n # getting a new nav plan. MoveResult.OBSTACLE_SONAR takes\n # priority in the driver code, so it should be safe to continue\n # this loop if the MoveResult is just an OBSTACLE_HOME\n # self.fail_count may exceed limit here, but I'll let it go\n while count < 3 and self.result == MoveResult.OBSTACLE_HOME:\n print('\\nObstacle: Found Home.')\n count += 1\n self.fail_count += 1\n\n detections = self.swarmie.get_latest_targets().detections\n inside_home = self.is_inside_home_ring(detections)\n if inside_home:\n print('\\nGetting out of the home ring!!')\n angle, dist = self.get_angle_and_dist_to_escape_home(\n detections\n )\n self.swarmie.turn(\n angle,\n ignore=Obstacle.IS_SONAR | Obstacle.IS_VISION\n )\n self.result = self.swarmie.drive(\n dist,\n ignore=Obstacle.IS_SONAR | Obstacle.IS_VISION\n )\n\n if self.avoid_home is False:\n # turn back around\n self.swarmie.turn(\n math.pi,\n ignore=Obstacle.IS_SONAR | Obstacle.IS_VISION\n )\n print('Obstacle: Found Home.')\n return MoveResult.OBSTACLE_HOME\n else:\n if self.avoid_home is False:\n print('Obstacle: Found Home.')\n return MoveResult.OBSTACLE_HOME\n\n self.result = self._avoid_tag(id=256,\n ignore=current_ignore)\n\n elif self.result == MoveResult.OBSTACLE_TAG:\n # get around the tag obstacle\n count = 0\n\n # Give the rover 3 tries to avoid any tags nearby before\n # getting a new nav plan. MoveResult.OBSTACLE_SONAR takes\n # priority in the driver code, so it should be safe to continue\n # this loop if the MoveResult is just an OBSTACLE_TAG\n # self.fail_count may exceed limit here, but I'll let it go\n while count < 3 and self.result == MoveResult.OBSTACLE_TAG:\n print('\\nObstacle: Found a Tag.')\n\n if self.avoid_targets is False:\n if not self.sees_home_tag():\n return self.result\n\n count += 1\n self.fail_count += 1\n\n self.result = self._avoid_tag(id=0,\n ignore=current_ignore)\n\n elif self.result == MoveResult.OBSTACLE_SONAR:\n # Check for home and tag obstacles just to be safe, because\n # sonar MoveResults take priority, and would mask a home or\n # target tag in view.\n obstacle = self.swarmie.get_obstacle_condition()\n\n if (obstacle & Obstacle.TAG_HOME == Obstacle.TAG_HOME and\n self.avoid_home is False):\n self.set_home_locations()\n return MoveResult.OBSTACLE_HOME\n\n if (obstacle & Obstacle.TAG_TARGET == Obstacle.TAG_TARGET and\n self.avoid_targets is False):\n return MoveResult.OBSTACLE_TAG\n\n # get around the sonar obstacle\n self.fail_count += 1\n\n print('\\nObstacle: Sonar.')\n left_blocked, center_blocked, right_blocked = \\\n self._check_sonar_obstacles()\n\n if (not left_blocked and\n not center_blocked and not right_blocked):\n print('\\nFake sonar obstacle??')\n pass # 'fake' obstacle?\n\n elif not left_blocked and center_blocked and right_blocked:\n print('Left looks clear, turning left.')\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_LEFT\n self._go_around(math.pi / 4, 0.7)\n # self.swarmie.drive_to(point, throw=False)\n\n elif left_blocked and center_blocked and not right_blocked:\n print('Right looks clear, turning right.')\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_RIGHT\n self._go_around(-math.pi / 4, 0.7)\n # self.swarmie.drive_to(point, throw=False)\n\n elif left_blocked and not center_blocked and not right_blocked:\n print('Only left blocked, turning a little right.')\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_RIGHT\n self._go_around(-math.pi / 6, 0.6)\n # self.swarmie.drive_to(point, throw=False)\n\n elif not left_blocked and not center_blocked and right_blocked:\n print('Only right blocked, turning a little left.')\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_LEFT\n self._go_around(math.pi / 6, 0.6)\n # self.swarmie.drive_to(point, throw=False)\n\n else:\n print('Neither left or right look clear.')\n\n # Only back up if we're far enough away from home for it\n # to be safe. Don't want to back up into the nest!\n if self._is_safe_to_back_up():\n print('Backing up.')\n self.swarmie.drive(\n -0.3,\n ignore=Obstacle.IS_SONAR,\n throw=False\n )\n\n if (self.current_state == Planner.STATE_AVOID_RIGHT or\n self.prev_state == Planner.STATE_AVOID_RIGHT):\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_RIGHT\n self.clear(-math.pi / 4, ignore=current_ignore,\n reset_heading=False)\n self._go_around(-math.pi / 4, 0.75)\n\n else:\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_LEFT\n self.clear(math.pi / 4, ignore=current_ignore,\n reset_heading=False)\n self._go_around(math.pi / 4, 0.75)\n\n elif self.result == MoveResult.PATH_FAIL:\n # shit, hope we can back up if this ever happens\n self.fail_count += 1\n\n print('\\nPath Failure. Backing up.')\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_REVERSE\n self.swarmie.drive(\n -0.5,\n ignore=Obstacle.IS_SONAR | Obstacle.IS_VISION,\n throw=False\n )\n\n self.cur_loc = self.swarmie.get_odom_location()\n\n if self.fail_count >= max_attempts:\n print('Failed to drive to goal {} times.'.format(\n max_attempts)\n )\n raise PathException(MoveResult.PATH_FAIL)\n\n if start_location is not None:\n current_loc = self.cur_loc.get_pose()\n dist = math.sqrt((start_location.x - current_loc.x) ** 2\n + (start_location.y - current_loc.y) ** 2)\n if dist > distance_threshold:\n raise PathException(MoveResult.PATH_FAIL)\n\n print('Successfully executed nav plan.')\n return MoveResult.SUCCESS", "def next_move(self):\n\n # Calculate all paths to destination from current location and time.\n solution = self.calculate_best_solution((None, None), self.currentTurn, [self.character.path[-1]],\n self.character.spent)\n\n # Add travel weight to spent.\n if solution[1] is not None and solution[1][0] != solution[1][1]:\n self.character.spent += self.pekingMap.get_vertex(solution[1][0]).weight(solution[1][1])\n\n # Return next point in shortest path to location.\n if solution[1] is not None:\n return solution[1][1]\n\n return None", "def _update_next_waypoint(self):\n if not self.base_waypoints:\n #rospy.logwarn(\"Waypoints not updated: base_waypoints not available yet.\")\n return False\n\n if not self.current_pose:\n #rospy.logwarn(\"Waypoints not updated: current_pose not available yet.\")\n return False\n\n # Get ego car variables\n ego_x = self.current_pose.position.x\n ego_y = self.current_pose.position.y\n ego_theta = math.atan2(self.current_pose.orientation.y, self.current_pose.orientation.x)\n\n # If I do have a next_waypoint, I will start looking from it, and stop looking\n # as soon as get a local minimum. Otherwise I will do a full search across the whole track\n t = time.time()\n wp = None\n yaw = 0\n dist = 1000000 # Long number\n if self.next_waypoint:\n idx_offset = self.next_waypoint\n full_search = False\n else:\n idx_offset = 0\n full_search = True\n num_base_wp = len(self.base_waypoints)\n\n for i in range(num_base_wp):\n idx = (i + idx_offset)%(num_base_wp)\n wp_x = self.base_waypoints[idx].pose.pose.position.x\n wp_y = self.base_waypoints[idx].pose.pose.position.y\n wp_d = math.sqrt((ego_x - wp_x)**2 + (ego_y - wp_y)**2)\n\n if wp_d < dist:\n dist = wp_d\n wp = idx\n if debugging:\n # Angle betwee car heading and waypoint heading\n yaw = math.atan2(wp_y - ego_y, wp_x - ego_x) - ego_theta\n elif not full_search:\n # Local minimum. If the waypoint makes sense, just use it and break\n if dist < max_local_distance:\n break; # Found a point\n else:\n # Seem to have lost track. Do search again\n rospy.logwarn(\"Waypoint updater lost track (local min at %.1f m after %d waypoints). Going back to full search.\", dist, i+1)\n full_search = True\n\n if debugging:\n rospy.loginfo(\"New next wp [%d] -> (%.1f,%.1f) after searching %d points in %fs\", wp, dist * math.cos(yaw), dist * math.sin(yaw), i, time.time()-t)\n\n if wp is None:\n rospy.logwarn(\"Waypoint updater did not find a valid waypoint\")\n return False\n\n self.next_waypoint = wp\n return True", "def get_next_destination(self) -> Location:\n # Find index of current location in route\n i = 0\n while i < len(self.route):\n this_loc = self.route[i]\n #\n if i > 0 and this_loc == self.route[-1]:\n return None\n if this_loc == self.current_location:\n return self.route[i + 1]\n i += 1", "def steer(self, start, goal, eta=2):\n if self.planning_env.compute_distance(start, self.goal_config) < eta:\n return self.goal_config\n mode = 1\n if mode == 1:\n vec = np.array([goal[0]-start[0], goal[1]-start[1]])\n if np.linalg.norm(vec) != 0:\n normed_vec = vec/np.linalg.norm(vec)\n else:\n normed_vec = vec\n endpoint = np.array([start[0],start[1]]) + eta * normed_vec\n\n if mode == 2:\n endpoint = goal\n # print(start)\n # print(normed_vec)\n # print(endpoint)\n # endpoint = goal\n\n return tuple(endpoint)", "def getNextDest(self):\n\n if self.direction_forward:\n if len(self.destinations)-1 == self.current_loc: #if Autobuz reaches rightmost destination, it also takes a break and changes directions\n self.direction_forward = False #Autobuz changes direction\n self.updateOmLocation()\n return self.destinations[self.current_loc], (self.break_duration + self.trip_duration) #return destination reached and elapsed time\n \n else:\n self.current_loc += 1\n self.updateOmLocation()\n return self.destinations[self.current_loc], self.trip_duration\n \n else:\n if 0 == self.current_loc: #if Autobuz reaches leftmost destination, it also takes a break and changes directions\n self.direction_forward = True #Autobuz changes direction\n self.updateOmLocation()\n return self.destinations[self.current_loc], (self.break_duration + self.trip_duration)\n \n else:\n self.current_loc -= 1\n self.updateOmLocation()\n return self.destinations[self.current_loc], self.trip_duration", "def next_gps(self):\n \n return Waypoint(0.0, 0.0)", "def _get_trial_move(self):\n max_attempts = 1000\n one_type = self._only_one_type()\n if np.random.rand() < self._ins_prob or one_type:\n\n change = None\n count = 0\n while count < max_attempts:\n try:\n change = self.insert_trial_move()\n break\n except KeyError:\n pass\n count += 1\n\n count = 0\n while (not self._no_constraint_violations(change)\n and count < max_attempts):\n try:\n change = self.insert_trial_move()\n except KeyError:\n pass\n count += 1\n\n if count < max_attempts:\n return change\n elif one_type:\n msg = \"Could not find any insert moves in \"\n msg += \"{} attempts\\n\".format(max_attempts)\n msg += \"and it is not possible to perform swap moves\\n\"\n msg += \"as the atoms object has only one element type!\"\n raise RuntimeError(msg)\n\n change = self._swap_trial_move()\n count = 0\n while (not self._no_constraint_violations(change)\n and count < max_attempts):\n change = self._swap_trial_move()\n count += 1\n\n if count == max_attempts:\n msg = \"Did not manage to find a valid move \"\n msg += \"in {} attemps!\".format(max_attempts)\n raise RuntimeError(msg)\n return change", "def test_find_closest_waypoints_no_position(self):\n planner = WaypointPlanner(make_example_base_waypoints())\n self.assertIsNone(planner.find_closest_waypoints(1))", "def get_closest_waypoint(self, x, y):\n closest_idx = self.waypoint_tree.query([x, y])[1] # ckd tree (1st closest, idx)\n\n # Check if closest waypoint is ahead or behind vehicle\n closest_coord = self.waypoints_2d[closest_idx]\n prev_coord = self.waypoints_2d[closest_idx - 1]\n\n # Equation for hyperplane through closest_coors\n cl_vect = np.array(closest_coord)\n prev_vect = np.array(prev_coord)\n pos_vect = np.array([x, y])\n\n val = np.dot(cl_vect - prev_vect, pos_vect - cl_vect)\n # Car is ahead of the closest waypoint\n if val > 0:\n closest_idx = (closest_idx + 1) % len(self.waypoints_2d)\n\n return closest_idx", "def nextWaypoint(self, pose):\n #DONE implement\n location = pose.position\n dist = 100000.\n dl = lambda a, b: math.sqrt((a.x-b.x)**2 + (a.y-b.y)**2 + (a.z-b.z)**2)\n nwp = 0\n for i in range(len(self.waypoints)):\n d1 = dl(location, self.waypoints[i].pose.pose.position)\n if dist > d1:\n nwp = i\n dist = d1\n x = self.waypoints[nwp].pose.pose.position.x\n y = self.waypoints[nwp].pose.pose.position.y\n heading = np.arctan2((y-location.y), (x-location.x))\n angle = np.abs(self.theta-heading)\n if angle > np.pi/4.:\n nwp += 1\n if nwp >= len(self.waypoints):\n nwp = 0\n return nwp", "def get_path(self, grid, start_wp, end_wp):\n # The open and closed sets\n openset = set()\n closedset = set()\n\n # Add the starting point to the open set\n openset.add(start_wp)\n\n # While the open set is not empty\n while openset:\n # Find the waypoint in the open set with the lowest G + H score\n current_wp = min(openset, key=lambda o: o.G + o.H)\n # Found the goal\n if current_wp == end_wp:\n path = []\n while current_wp.parent:\n path.append(current_wp)\n current_wp = current_wp.parent\n path.append(current_wp)\n print(\"Path found in {} moves: {}\".format(len(path), path))\n return path[::-1]\n\n # Remove the waypoint from the open set\n openset.remove(current_wp)\n # Add it to the closed set\n closedset.add(current_wp)\n\n # Generate children\n children = current_wp.generate_children(grid)\n\n for waypoint in children:\n # If it is already in the closed set, skip it\n if waypoint in closedset:\n continue\n # Otherwise if it is already in the open set\n if waypoint in openset:\n # Check if we beat the G score\n new_g = current_wp.G + 1\n\n if waypoint.G > new_g:\n # If so, update the waypoint to have a new parent\n waypoint.G = new_g\n waypoint.parent = current_wp\n else:\n # If it isn't in the open set, calculate the G and H score for the waypoint\n if waypoint.orientation != current_wp.orientation:\n waypoint.G = current_wp.G + 1.5 # Avoiding zigzag move by increase the cost of a rotation\n else:\n waypoint.G = current_wp.G + 1\n\n waypoint.H = abs(waypoint.x - end_wp.x) + abs(waypoint.y - end_wp.y)\n # Set the parent to our current_wp\n waypoint.parent = current_wp\n # Add it to the set\n openset.add(waypoint)\n\n # If there is no solution\n return [start_wp, end_wp]", "def get_step():\n\n # Decide which direction to go and how far to go in that direction.\n direction = choice([1, -1])\n distance = choice([0, 1, 2, 3, 4, 5, 6, 7, 8])\n step = direction * distance\n\n # Reject moves that go nowhere.\n if step == 0:\n get_step()\n else:\n return step", "def get_best_roundtrip(self):\n out = min(self.outgoing_flights, key=lambda f: f.price)\n ret = min(self.return_flights, key=lambda f: f.price)\n\n return RoundTrip(out, ret)", "def get_local_plan(self, ind):\n size = len(self.global_plan.poses)\n if ind < 0 or ind >= size:\n raise ValueError(\"ind must be between 0 and %d\"%size)\n \n start = self.global_plan.poses[ind].pose\n local_path = Path()\n found_ind = None\n for i in range(ind, size):\n candidate = self.global_plan.poses[i].pose\n dist = self.calc_distance(start, candidate)\n if dist >= self.look_ahead_distance:\n break\n else:\n local_path.poses.append(candidate)\n found_ind = i\n\n return found_ind, local_path", "def get_closest_waypoint(self, pose):\n # Find the nearest waypoint\n closest_distance = float('inf')\n closest_waypoint = 0\n for i in range(len(self.waypoints)):\n this_distance = self.distance_to_position(self.waypoints, i, pose.position)\n if this_distance < closest_distance:\n closest_distance = this_distance\n closest_waypoint = i\n return closest_waypoint", "def _compute_connection(current_waypoint, next_waypoint, threshold=35):\n n = next_waypoint.transform.rotation.yaw\n n = n % 360.0\n\n c = current_waypoint.transform.rotation.yaw\n c = c % 360.0\n\n diff_angle = (n - c) % 180.0\n if diff_angle < threshold or diff_angle > (180 - threshold):\n return RoadOption.STRAIGHT\n elif diff_angle > 90.0:\n return RoadOption.LEFT\n else:\n return RoadOption.RIGHT", "def _distance_next(self):\n\n self.distance = 10\n\n # Here a set index to 0 if the car is finishing a lap\n # Also reset the farthest\n if self.index > (len(self.x_trajectory) - 6) and self.closed:\n self.index = 0\n self.farthest = -1\n self.laps += 1\n\n for w in range(self.index, self.index + 20):\n\n self.dist_point = math.sqrt((self.x_trajectory[w] - self.x)**2\n + (self.y_trajectory[w] - self.y)**2)\n\n if self.dist_point < self.distance:\n self.distance = self.dist_point\n self.index = w\n\n if w >= (len(self.x_trajectory) - 1):\n break\n\n self._calc_side()\n\n self.distance = self.distance * self.sign\n\n return self.distance", "def plan(cur_pos: Node, goal_pos: Node, cur_heading: float, new_obst_segments: [ObstacleSegment]) \\\n -> (NavMode, [Node]):\n global d_reach, v_followed, v_diff, d_followed_rel, obst_id_to_follow, bf_waypoint\n # find updated obstacle with obst_id_to_follow\n obst_to_follow = find_obst(obst_id_to_follow, new_obst_segments)\n\n # check if we lost the obstacle\n if obst_to_follow is None:\n logger.info(\"BF: Lost obstacle segment; End of Routine\")\n return NavMode.MTG, None\n else:\n # update BF Waypoint\n bf_waypoint.update(obst_to_follow)\n\n # Calculate d_reach, d_followed, v_diff, d_followed_rel\n v_diff = bf_waypoint.get_pos_change()\n d_reach = goal_pos.dist_2d(bf_waypoint.cur_pos)\n assert v_followed is not None and v_diff is not None\n\n v_followed_rel = v_followed + v_diff\n d_followed_rel = v_followed_rel.dist_2d(goal_pos)\n\n # if d_reach < d_followed_rel\n if d_followed_rel - d_reach > config_.D_TOL:\n # switch to MTG\n logger.info(\"BF: path length decreased by %.2f below original minimum; End of Routine\"\n % (d_followed_rel - d_reach))\n return NavMode.MTG, None\n else:\n # choose new BF waypoint\n bf_waypoint = find_new_bf_waypoint(obst_to_follow, cur_pos, None)\n if bf_waypoint is None:\n return NavMode.MTG, None\n\n # generate path to bfWaypoint\n path = [bf_waypoint.cur_pos, goal_pos]\n\n # update v_followed\n v_followed = v_followed_rel\n\n return NavMode.BF, path", "def getBestPath(self):\n if self._bestPathVertex.getNextWaypoint() is None:\n numWaypointsCompleted = len(self._waypoints)\n quality = 2\n if self._vertexQueue.isEmpty():\n quality += 1\n else:\n numWaypointsCompleted = self._bestPathVertex.getNextWaypoint().getIndex()\n quality = 1\n if self._vertexQueue.isEmpty():\n quality -= 1\n \n return outputPath.generatePath(self._bestPathVertex, self._params.waypointAcceptanceRadii, quality, numWaypointsCompleted)", "def find_closest_trajectory(cls, **kwargs):\n # if we can find an approximation that works to two\n # decimal places, just return that\n ideal_min_pitch = kwargs[\"pitch\"] - \\\n kwargs.get(\"ideal_min_pitch_differential\", cls.IDEAL_DIFFERENTIAL)\n ideal_max_pitch = kwargs[\"pitch\"] + \\\n kwargs.get(\"ideal_min_pitch_differential\", cls.IDEAL_DIFFERENTIAL)\n\n ideal_min_roll = kwargs[\"roll\"] - \\\n kwargs.get(\"ideal_min_roll_differential\", cls.IDEAL_DIFFERENTIAL)\n ideal_max_roll = kwargs[\"roll\"] + \\\n kwargs.get(\"ideal_min_roll_differential\", cls.IDEAL_DIFFERENTIAL)\n\n # find trajectories that we are good with even if they aren't the absolute\n # best\n ideal_trajectory = SolvedTrajectory.objects.filter(\n pitch__gt=ideal_min_pitch,\n roll__gt=ideal_min_roll\n ).filter(\n pitch__lt=ideal_max_pitch,\n roll__lt=ideal_max_roll)\n ideal_trajectory = ideal_trajectory.first()\n\n # if we found something in the ideal trajectory, just return that!\n if ideal_trajectory:\n best_trajectory = ideal_trajectory\n best_match_score = cls.get_match_score(\n best_trajectory, kwargs[\"pitch\"], kwargs[\"roll\"])\n\n # otherwise, we expand our filter and include more results\n else:\n\n # determine bounds on the pitch and the roll\n # of the trajectory we will return\n min_pitch = kwargs[\"pitch\"] - kwargs[\"min_pitch_differential\"]\n max_pitch = kwargs[\"pitch\"] + kwargs[\"min_pitch_differential\"]\n\n min_roll = kwargs[\"roll\"] - kwargs[\"min_roll_differential\"]\n max_roll = kwargs[\"roll\"] + kwargs[\"min_roll_differential\"]\n\n # determine the candidate trajectories\n candidate_trajectories = SolvedTrajectory.objects.filter(\n pitch__gt=min_pitch,\n roll__gt=min_roll\n ).filter(\n pitch__lt=max_pitch,\n roll__lt=max_roll\n )\n\n # determine the best match from what we have available\n best_trajectory = None\n best_match_score = float(\"inf\")\n\n for trajectory in candidate_trajectories:\n match_score = cls.get_match_score(\n trajectory, kwargs[\"pitch\"], kwargs[\"roll\"])\n\n if match_score < best_match_score:\n best_trajectory = trajectory\n best_match_score = match_score\n\n # calculate the norm of the deviation\n deviation = math.sqrt(best_match_score)\n return best_trajectory.file_name, deviation", "def getNextTarget(self):\r\n\r\n\t\tif self.pathToGoal == []:\r\n#\t\t\tprint \"\\tPath empty, finding a new one.\"\r\n\t\t\tself.decideOnGoal()\r\n\t\t\tself.calculateNewPath()\r\n\t\r\n\t\tself.currentTarget = self.pathToGoal.pop(0)", "def nearest_neighbor(self):\n steps = [{'Tour': [], 'Tourlength': 0}]\n tour = []\n original_nodes = self._datacontroller.get_data('nodes')\n nodes = copy.deepcopy(original_nodes)\n scale = self._datacontroller.get_data('scale')\n\n # Step 1: Get a tour start\n starts = [node for node in nodes if node.start]\n _start = 'Random from marked nodes'\n if not len(starts):\n starts = nodes\n _start = 'Random from all nodes'\n\n current = starts[randint(0, (len(starts) - 1))]\n while True:\n tour.append(current.nid)\n nodes.remove(current)\n steps.append(construct_step(tour, str(_start), 'random', original_nodes, scale))\n if not len(nodes):\n break\n current = nodes[tsputil.nearest_neighbor(nodes, current)[0]]\n tour.append(tour[0])\n steps.append(construct_step(tour, str(_start), 'random', original_nodes, scale))\n self._datacontroller.commit_change('pathsteps', steps)\n self._datacontroller.commit_change('path', steps[-1])", "def get_closest_waypoint(self, pose, waypoints):\n #TODO implement\n\n\tmin_dist = float(\"inf\")\n\tclosest_wp_idx = -1\t\n\n for idx, wp in enumerate(waypoints):\n\t\tdist = self.dist_to_point(pose, wp.pose.pose)\n\t\tif(dist < min_dist):\n\t\t\tmin_dist = dist\n\t\t\tclosest_wp_idx = idx\n\treturn closest_wp_idx", "def get_closest_waypoint(self, pose):\n wpclosestDist = sys.maxint\n for index in range(len(self.waypoints.waypoints)):\n wp = self.waypoints.waypoints[index]\n wpdist = self.calcDistance_PoseStamped(pose, wp.pose)\n if(wpclosestDist > wpdist):\n wpclosestDist = wpdist\n wpindex = index\n return wpindex", "def search_path(self):\n\n nodes = [self.start]\n final_node = None\n \n count = 0\n while True:\n count += 1\n\n if count % self.pick_target == 0:\n pick = self.goal.pos[:2]\n else:\n pick = self.car.random_pos()[:2]\n \n nearest = self.get_nearest_node(nodes, pick)\n\n if count % self.check_dubins == 0:\n solutions = self.dubins.find_tangents(nearest.pos, self.goal.pos)\n dubins_route, cost, valid = self.dubins.best_tangent(solutions)\n \n if valid:\n final_node = nearest\n break\n\n phi = self.get_steering_angle(nearest.pos, pick)\n pos = nearest.pos\n branch = [pos[:2]]\n \n for i in range(self.max_steps):\n pos = self.car.step(pos, phi)\n branch.append(pos[:2])\n \n # check safety of route-----------------------\n if phi == 0:\n safe = self.dubins.is_straight_route_safe(nearest.pos, pos)\n else:\n d, c, r = self.car.get_params(nearest.pos, phi)\n safe = self.dubins.is_turning_route_safe(nearest.pos, pos, d, c, r)\n # --------------------------------------------\n \n if not safe:\n continue\n \n new_node = Node(pos, phi, i+1)\n \n if new_node in nodes:\n continue\n \n new_node.branch = branch\n new_node.parent = nearest\n nodes.append(new_node)\n \n route = self.backtracking(final_node) + dubins_route\n path = self.car.get_path(self.car.start_pos, route)\n print('Total iteration:', count)\n \n return path, nodes" ]
[ "0.64032876", "0.61642146", "0.6126271", "0.6093539", "0.6021183", "0.5997461", "0.5867292", "0.5816609", "0.5783162", "0.57300097", "0.5674514", "0.56579065", "0.5624059", "0.556312", "0.5558917", "0.5478584", "0.54468936", "0.5426313", "0.53970754", "0.5375155", "0.53722984", "0.5358433", "0.5353667", "0.535331", "0.5339426", "0.5334664", "0.5317277", "0.5314425", "0.5314034", "0.52638066" ]
0.7916811
0
Check sonar obstacles over a short period of time, hopefully to weed out some of the noise and let us continue driving if we stopped for a 'fake' obstacle.
def _check_sonar_obstacles(self): # TODO: what's a good number? BLOCKED_THRESHOLD = 0.7 rate = rospy.Rate(10) # 10 hz count = 10 left = 0 center = 0 right = 0 for i in range(count): obstacle = self.swarmie.get_obstacle_condition() if obstacle & Obstacle.SONAR_LEFT == Obstacle.SONAR_LEFT: left += 1 if (obstacle & Obstacle.SONAR_CENTER == Obstacle.SONAR_CENTER): center += 1 if obstacle & Obstacle.SONAR_RIGHT == Obstacle.SONAR_RIGHT: right += 1 rate.sleep() left_blocked = left / count > BLOCKED_THRESHOLD center_blocked = center / count > BLOCKED_THRESHOLD right_blocked = right / count > BLOCKED_THRESHOLD return left_blocked, center_blocked, right_blocked
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_for_obstacles(self):\n obs = False\n obs_p = []\n for point in self.obstacles:\n if -0.15 <= point[1] <= 0.15: # robot is 178mm wide\n # Obstacles should be less than or equal to 0.2 m away before being detected\n if 0 <= point[0] <= .2:\n obs_p.append(point)\n obs = True\n if obs:\n pos = self.determine_pos_of_obstacle(obs_p)\n data = Obstacle()\n data.x = pos[0]\n data.y = pos[1]\n data.obstacle = True\n self.obs_pub.publish(data)", "def _detect_obstacles(self):\n def _distance(point, line_point1, line_point2):\n \"\"\"calcuate the distance between a point and a line\"\"\"\n vec1 = line_point1 - point\n vec2 = line_point2 - point\n distance = np.abs(np.cross(vec1,vec2)) / np.linalg.norm(line_point1-line_point2)\n return distance\n\n def _acute_angle(point, line_point1, line_point2):\n \"\"\"detetrmine if the point is whithin the boundary of the line through law of cosines\"\"\"\n base_line = np.linalg.norm(line_point1-line_point2)\n assert base_line > 0, \"check the library useage\"\n line1 = np.linalg.norm(point - line_point1)\n line2 = np.linalg.norm(point - line_point2)\n cos_angle_1 = (base_line**2 + line1**2 - line2**2)/(2*base_line*line1)\n cos_angle_2 = (base_line**2 + line2**2 - line1**2)/(2*base_line*line2)\n if cos_angle_1 * cos_angle_2 > 0:\n return True\n else:\n return False\n\n if self.obstacles != \"None\": # if user assigned some obstacles\n for line in self.env_config: \n line_point1, line_point2 = np.array(line[0]), np.array(line[1])\n point = np.array(self.state[:2])\n distance = _distance(point, line_point1, line_point2)\n acute_angle = _acute_angle(point, line_point1, line_point2)\n if distance <= 0.02 and acute_angle:\n self.adsorption = True\n break\n else:\n self.adsorption = False", "def _trace_route(self, debug=False, time=False):\n self.radius = 2\n self.threshold = 1\n\n obstacles = []\n for vehicle in self._world.get_actors().filter('vehicle.*'):\n #print(vehicle.bounding_box)\n # draw Box\n bb_points = TestAgent._create_bb_points(vehicle)\n global_points= TestAgent._vehicle_to_world(bb_points, vehicle)\n global_points /= global_points[3,:]\n\n my_bb_points = TestAgent._create_bb_points(self._vehicle)\n my_global_points = TestAgent._vehicle_to_world(my_bb_points, self._vehicle)\n\n my_global_points /= my_global_points[3,:]\n dist = np.sqrt((my_global_points[0,2]-global_points[0,2])**2 + (my_global_points[1,2]-global_points[1,2])**2 + (my_global_points[2,2]-global_points[2,2])**2)\n\n if 0<dist:\n vehicle_box = [global_points[0,0],global_points[1,0],global_points[0,1],global_points[1,1]]\n obstacles.append(vehicle_box)\n print(f'vehicle box: {vehicle_box}')\n\n print('number of near obstacles: ', len(obstacles))\n if len(obstacles) == 0:\n self.obstacles = np.array([[-1,-1,-1,-1]]).astype(np.float32)\n self.num_obs = self.num_obs = np.array([0]).astype(np.int32)\n else:\n self.obstacles = np.array(obstacles).astype(np.float32)\n self.num_obs = self.num_obs = np.array([self.obstacles.shape[0]]).astype(np.int32)\n\n iter_parameters = {'start':self.start, 'goal':self.goal, 'radius':self.radius, 'threshold':self.threshold, 'obstacles':self.obstacles, 'num_obs':self.num_obs}\n \n start_timer = timer()\n route = self.gmt_planner.run_step(iter_parameters, iter_limit=1000, debug=debug, time=time)\n end_timer = timer()\n print(\"elapsed time: \", end_timer-start_timer) \n\n if time:\n self.time_df = pd.DataFrame(self.gmt_planner.time_data)\n \n\n # trace_route = []\n # for r in route:\n # wp = carla.Transform(carla.Location(self.states[r][0].item(), self.states[r][1].item(), 1.2), carla.Rotation(roll=0,pitch=0, yaw=(self.states[r][2]*180/np.pi).item()))\n # trace_route.append(wp)\n # draw_route(self._vehicle.get_world(), trace_route)\n\n index = len(route)-1\n trace_route = []\n for i in range(len(route)-1):\n wp = self._map.get_waypoint(carla.Location(self.states[route[index]][0].item(), self.states[route[index]][1].item(), 1.2)) # , carla.Rotation(roll=0,pitch=0, yaw=(self.states[r][2]*180/np.pi).item()\n trace_route.append((wp,-1))\n index -= 1\n\n return trace_route", "def checkObstaclesAhead(ldr_compl,tireAngle, maxLen=0.3,threshold=2):\n #within the car-width and the maxLen\n # at 45 degrees shift real for 0.05m\n madeUpHeuristic = tireAngle*0.07/45 #shifts real-axis dependent on tire angle\n madeUpHeuristic2= abs(tireAngle*0.14/45) #at 45degrees append CAR_WIDTH with 0.15m\n obstacleIdx = (ldr_compl.imag<maxLen)*(abs(ldr_compl.real+madeUpHeuristic)<((CAR_WIDTH/100+madeUpHeuristic2)/2))\n if is_debugging:\n plt.plot(ldr_compl.real,ldr_compl.imag,'.')\n plt.show()\n print(sum(obstacleIdx))\n return sum(obstacleIdx)>threshold", "def doesArmTouchObstacles(armPos, obstacles):\n for i in range(len(armPos)):\n cur_arm = armPos[i]\n arm_x = [cur_arm[0][0],cur_arm[1][0]]\n arm_y = [cur_arm[0][1],cur_arm[1][1]]\n if (arm_x[0] != arm_x[1]):\n arm_a = (arm_y[1]-arm_y[0])/(arm_x[1]-arm_x[0])\n arm_b = arm_y[1]-arm_a*arm_x[1]\n for i in range(len(obstacles)):\n cur_obs = obstacles[i]\n x_range = np.linspace(arm_x[0],arm_x[1],1000)\n y_range = arm_a * x_range + arm_b\n for j in range(1000):\n cur_x = x_range[j]\n cur_y = y_range[j]\n if(((cur_y-cur_obs[1])**2 +(cur_x-cur_obs[0])**2) <= cur_obs[2]**2):\n return True\n if (arm_x[0] == arm_x[1]):\n for i in range(len(obstacles)):\n cur_obs = obstacles[i]\n y_range = np.linspace(arm_y[0],arm_y[1],1000)\n cur_x = arm_x[0]\n for j in range(1000):\n cur_y = y_range[j]\n if(((cur_y-cur_obs[1])**2 +(cur_x-cur_obs[0])**2) <= cur_obs[2]**2):\n return True\n\n\n #print(obstacles)\n\n return False", "def obstacle_count(self):\n for x in range(6):\n # do a scan of the area in front of the robot\n self.scan()\n\n \n see_an_object = False\n count = 0 \n # Do a scan and count the amount of objects in the way\n for angle in self.scan_data:\n dist = self.scan_data[angle]\n if dist < self.SAFE_DISTANCE and not see_an_object: \n see_an_object = True\n count += 1\n print(\"~~~ I SEE SOMETHING!! ~~~\")\n elif dist > self.SAFE_DISTANCE and see_an_object:\n see_an_object = False\n print(\"I guess the object ended\") \n print(\"ANGLE: %d | DIST: %d\" % (angle, dist))\n self.turn_by_deg(90)\n print(\"\\nI saw %d objects\" % count)", "def obstacle_count(self):\n self.wide_scan()\n found_something = False\n counter = 0\n for distance in self.scan:\n if distance and distance < 200 and not found_something:\n found_something = True\n counter += 1\n print(\"Object # %d found, I think\" % counter)\n if distance and distance > 200 and found_something:\n found_something = False\n print(\"\\n----I SEE %d OBJECTS----\\n\" % counter)", "def check_time(self):\n while True:\n for name in self.neighbors:\n if not self.neighbors[name].is_killed:\n if not self.neighbors[name].update_ready and time.time() - self.neighbors[name].send_timer > self.timeout:\n self.neighbors[name].update_ready = True\n if time.time() - self.neighbors[name].kill_timer > 3 * self.timeout:\n self.neighbors[name].is_killed = True", "def obstacle_count(self):\n found_something = False\n count = 0\n starting_postion = self.get_heading()\n self.right(primary=60, counter=60)\n time.sleep(0.5)\n while self.get_heading() != starting_postion:\n if self.read_distance() < 250 and not found_something:\n found_something = True\n count += 1\n print (\"I found something\")\n elif self.read_distance() > 250 and found_something:\n found_something = False\n print(\"I have a clear view\")\n self.stop()\n\n print(\"I have found this many things: %d\" % count)\n return count", "def check_terminal(self,bump,DLightBump, AnalogBump, IR):\r\n terminal = False\r\n # signal returned from distance to obstacle /terminal 50 mm,5cm\r\n # by measurement, small obstacle (height = height of light bumper) in 2cm: signal 120 ~300\r\n # within 1cm >400\r\n # if big obstacle: (like a wall) at 2cm: 1300~1600\r\n # d_obs = 140\r\n d_obs = 500.0\r\n threshold = d_obs/self.max_strength\r\n obstacles = []\r\n Infra_Omi, Infra_L, Infra_R =IR\r\n\r\n L, FL, CL, CR, FR, R = AnalogBump\r\n prob_obs =np.array([L, FL, CL, CR, FR, R]).astype(float)\r\n strength = prob_obs/self.max_strength # maximum signal strength light bumper can receive\r\n for i in range(len(strength)):\r\n strength[i] = 1 if strength[i] >=threshold else 0\r\n\r\n cnt = strength.sum()\r\n if Infra_Omi!=0 or Infra_L!=0 or Infra_R!= 0:\r\n terminal =True\r\n x = int(self.Motion.x +d_obs)\r\n y = int(self.Motion.y)\r\n s = (x, y)\r\n obstacles.append(s)\r\n\r\n if bump != 0 or cnt >=1:\r\n terminal=True\r\n # stop immediately\r\n self.Roomba.Move(0,0)\r\n #-------------determine position of obstacles-------------\r\n l_bump = 1 if bump&2 !=0 else 0\r\n r_bump = 1 if bump& 1 !=0 else 0\r\n # Assume Left , right bumpers are at -45 degree, 45 degree\r\n # Then find the average degree of object:0, -45, 45 degree\r\n b_avg_angle = 45*(r_bump -l_bump)\r\n prob_obs /= (prob_obs.sum()+1.0)\r\n # average angles of obstacle detected by light bumper\r\n # [-90, -60,-30,30,60,90] are heading angles of 6 analog light bumper\r\n lb_avg_agl = np.dot(prob_obs,[-90, -60,-30,30,60,90])\r\n\r\n # if there are 2 obstacles\r\n if np.abs(lb_avg_agl - b_avg_angle)>=60 or (np.sign(lb_avg_agl) !=np.sign(b_avg_angle)):\r\n th = self.Motion.theta + lb_avg_agl\r\n x = self.Motion.x + d_obs * math.cos(th)\r\n y = self.Motion.y + d_obs * math.sin(th)\r\n x = int(x)\r\n y = int(y)\r\n s= (x,y)\r\n if obstacles.count(s) == 0:\r\n obstacles.append(s)\r\n\r\n th = self.Motion.theta + b_avg_angle\r\n x = self.Motion.x + d_obs * math.cos(th)\r\n y = self.Motion.y + d_obs * math.sin(th)\r\n x = int(x)\r\n y = int(y)\r\n s = (x,y)\r\n if obstacles.count(s) ==0:\r\n obstacles.append(s)\r\n\r\n else:\r\n # if there is 1 obstacle\r\n alg = (b_avg_angle+lb_avg_agl)/2.0\r\n th= self.Motion.theta+ alg\r\n x = self.Motion.x + d_obs * math.cos(th)\r\n y = self.Motion.y + d_obs * math.sin(th)\r\n x = int(x)\r\n y = int(y)\r\n s = (x,y)\r\n if obstacles.count(s) == 0:\r\n obstacles.append(s)\r\n\r\n # check if the obstacle is one of other agents\r\n for k in self.global_trans.keys():\r\n # Format: self.global_trans={id: (degree, [a,st,s_t+1])}\r\n states = self.global_trans[k][1]\r\n st = self.get_gridState(states[1])\r\n st1 = self.get_gridState(states[2])\r\n # if obstacles are other agents, remove them\r\n for o in obstacles:\r\n grid_o = self.get_gridState((o[0],o[1],th))\r\n if (grid_o[0],grid_o[1]) == (st[0],st[1]) or (grid_o[0],grid_o[1]) == (st1[0],st1[1]):\r\n obstacles.remove(o)\r\n return terminal, obstacles", "def _find_obstacle(self, obstacle_type='*traffic_light*'): \r\n obst = list()\r\n \r\n _actors = self._world.get_actors()\r\n _obstacles = _actors.filter(obstacle_type)\r\n\r\n\r\n for _obstacle in _obstacles:\r\n trigger = _obstacle.trigger_volume\r\n\r\n _obstacle.get_transform().transform(trigger.location)\r\n \r\n distance_to_car = trigger.location.distance(self._vehicle.get_location())\r\n\r\n a = np.sqrt(\r\n trigger.extent.x ** 2 +\r\n trigger.extent.y ** 2 +\r\n trigger.extent.z ** 2)\r\n b = np.sqrt(\r\n self._vehicle.bounding_box.extent.x ** 2 +\r\n self._vehicle.bounding_box.extent.y ** 2 +\r\n self._vehicle.bounding_box.extent.z ** 2)\r\n\r\n s = a + b + 10\r\n \r\n if distance_to_car <= s:\r\n # the actor is affected by this obstacle.\r\n obst.append(_obstacle)\r\n\r\n \"\"\"self._debug.draw_box(carla.BoundingBox(_obstacle.get_transform().location, carla.Vector3D(0.5,0.5,2)),\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,255,0,0),\r\n 0\r\n )\"\"\"\r\n \"\"\"self._debug.draw_box(carla.BoundingBox(trigger.location, carla.Vector3D(0.1,0.1,10)),\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,0,0,0),\r\n 0\r\n )\"\"\"\r\n \r\n \"\"\"self._debug.draw_box(carla.BoundingBox(trigger.location, carla.Vector3D(0.1,0.1,2)),\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,0,0,0),\r\n 0\r\n )\"\"\"\r\n \"\"\"self._debug.draw_box(trigger,\r\n _obstacle.get_transform().rotation, \r\n 0.05, \r\n carla.Color(255,0,0,0),\r\n 0\r\n )\"\"\"\r\n\r\n return obst", "def tick(self):\n time.sleep(self.sleep_time)\n self.time += 1\n print(\"[Turn \" + str(self.time) + \"] Tick tock...\")\n directions = [(0, -1), (1, -1), (1, 0), (1, 1), (0, 1), (-1, 1), (-1, 0), (-1, -1)]\n for i in range(len(self.robots)):\n self.robots[i][2] = (self.robots[i][2] + self.robots[i][3]) % 8\n self.robots[i][3] = 0\n self.robots[i][0] += directions[self.robots[i][2]][0]\n self.robots[i][1] += directions[self.robots[i][2]][1]\n if self.robots[i][0] < 0 or self.robots[i][0] >= self.width or \\\n self.robots[i][1] < 0 or self.robots[i][1] >= self.height:\n self.robots = []\n raise RobotWallCrashException # A robot crashed into a wall! Simulation over!\n for j in range(len(self.robots)):\n if i != j:\n if self.robots[i][0] == self.robots[j][0] and self.robots[i][1] == self.robots[j][1]:\n self.robots = []\n raise RobotCollisionException # A robot crashed into another robot! Simulation over!\n for j in range(len(self.items)):\n if self.robots[i][0] == self.items[j][0] and self.robots[i][1] == self.items[j][1]:\n if self.items[j][2] == 1:\n self.robots = []\n raise RobotFoundTreasureException # A robot found the treasure! You win!\n elif self.items[j][2] == 2:\n self.robots = []\n raise RobotObjectCrashException # A robot crashed into an object!\n if random.random() > self.reliability:\n print(\"*glug-glug-glug* Oil leak detected!\")\n self.items.append([self.robots[i][0], self.robots[i][1], 2])", "async def show_obstacles(canvas):\n\n while True:\n boxes = []\n\n for obstacle in obstacle_manager:\n boxes.append(obstacle.get_bounding_box())\n\n for x, y, frame in boxes:\n draw_frame(canvas, x, y, frame)\n\n await Sleep(1)\n\n for x, y, frame in boxes:\n draw_frame(canvas, x, y, frame, negative=True)", "def through_obstacle(line, obstacles):\r\n noofpoints = 20\r\n for i in range(noofpoints):\r\n if inside_obstacle((line[0]+(i*(line[2]-line[0])/noofpoints), line[1]+(i*(line[3]-line[1])/noofpoints)), obstacles) == 1:\r\n return 1\r\n return 0", "def check_obstacle_ahead(distances, angles, save_index = None, length_to_check = 250, half_robot_width = 180, threshold_low = 12, threshold_high = 100):\n distances = np.array(distances)\n angles = np.array(angles)\n # compute angle to differentiate regions\n delta = np.arctan(half_robot_width / length_to_check) * 57.3\n\n # compute the indices for the 2 regions of interest\n idcs1 = np.logical_or(np.logical_and(angles <= 90, angles >= delta), np.logical_and(angles >= 270, angles <= (360 - delta)))\n idcs2 = np.logical_or(np.logical_and(angles >= 0, angles <= delta), np.logical_and(angles <= 360, angles >= (360 - delta)))\n\n # compute the critical ditance for those regions (set of angles)\n critical_distance_1 = half_robot_width / np.abs(np.sin(angles[idcs1] / 57.3))\n critical_distance_2 = length_to_check / np.abs(np.cos(angles[idcs2] / 57.3))\n\n # compare actual distances with critical distances\n obstacles1 = distances[idcs1] < critical_distance_1\n obstacles2 = distances[idcs2] < critical_distance_2\n \n # save for debug\n if save_index is not None:\n np.save(\"/home/arthur/dev/ros/data/lidar/angles_{}.npy\".format(save_index), angles)\n np.save(\"/home/arthur/dev/ros/data/lidar/distances_{}.npy\".format(save_index), distances)\n \n # return true depending on the obstacle detection\n count = (np.count_nonzero(obstacles1) + np.count_nonzero(obstacles2))\n return count >= threshold_low and count <= threshold_high", "def checkObstacles(dist_compl, centerBoxCoordinate, box_width, box_height, threshold=2):\n # move the coordinate system to the center + box_height/2\n #plt.plot(dist_compl.real,dist_compl.imag,'g.')\n shift_dist_compl= dist_compl-( centerBoxCoordinate-np.array([box_height/2+0j]) )\n #plt.plot(dist_compl.real,dist_compl.imag,'r.')\n # now look in the box in front of you\n obstacleIdx = (shift_dist_compl.real<box_height)*(abs(shift_dist_compl.imag)<((box_width)))\n #plt.show()\n return sum(obstacleIdx)>threshold", "def hit_sonar(self, i):\n\n est_obstacle_dist_ = 1000\n closest_object = None\n\n # calculate sonar distance from the center of robot0\n sonar_distance = sqrt(self.sonar_coordinates[i][0]**2 +\n self.sonar_coordinates[i][1]**2) / 100\n # calculate angle of robot + sonar_angle\n sonar_robot_angle = self.x_prior[2] + self.sonar_thetas[i]\n\n # calculate predicted sonar position\n sonar_x = self.x_prior[0] + sonar_distance*sin(sonar_robot_angle)\n sonar_y = self.x_prior[1] + sonar_distance*cos(sonar_robot_angle)\n\n for object_loc in self.obstacle_locs:\n\n dist_x = object_loc[0] - sonar_x\n dist_y = object_loc[1] - sonar_y\n\n # distance between robot and obstacle\n estimated_robot_object_dist = sqrt(dist_x**2 + dist_y**2)\n\n # if obstacle is out of sensor range compare it with other obstacle\n if estimated_robot_object_dist > self.Rmax:\n continue\n\n angle = atan2(dist_y, dist_x)\n theta = -angle + sonar_robot_angle\n\n # if obstacle is outside sensor angle check other obstacles\n if theta > self.th3db:\n continue\n\n dist_obst_sonar_x = self.x_prior[0] - object_loc[0]\n dist_obst_sonar_y = self.x_prior[1] - object_loc[1]\n\n # measurement of i-th sonar\n est_obstacle_dist = sqrt(dist_obst_sonar_x**2 + dist_obst_sonar_y**2)\n\n # save closest obstacle\n if est_obstacle_dist < est_obstacle_dist_:\n est_obstacle_dist_ = est_obstacle_dist\n closest_object = object_loc\n\n # error_hack for object_loc sensor\n if not closest_object:\n closest_object = object_loc\n\n # returns estimated obstacle distance, sonar measurement and nearest obstacle location\n return(est_obstacle_dist_, self.sonardata[i], closest_object)", "def obstacle_prone_area(self,image):\r\n\r\n start_x=int(self.start[0])\r\n start_y=int(self.start[1])\r\n goal_x=int(self.goal[0])\r\n goal_y=int(self.goal[1])\r\n print(goal_x,goal_y)\r\n if (image[int(self.maximum_size-goal_x),int(goal_y),0]==0) or ((image[int(self.maximum_size-start_x),int(start_y),0]==0)):\r\n #print(1)\r\n return False\r\n else:\r\n #print(2)\r\n return True", "def obstacles_callback(self, data):\n obs_pos = [(obs.ObsPosition.x, obs.ObsPosition.y, obs.ObsPosition.z)\n for obs in data.obs]\n obs_yaw = np.array([obs.ObsTheta for obs in data.obs])\n if len(obs_pos)==0:\n self.obs_risk = 0.0\n self.min_obs_dist = self.detect_obstacle_range + 100.0\n else:\n disp_vec = np.array(obs_pos) - self.car_pos # displacement\n dist_obs = np.linalg.norm(disp_vec, axis=1) # obstacle distance\n # ego heading unit vector\n ego_hdg = (np.cos(self.car_euler[2]), np.sin(self.car_euler[2]), 0)\n # cosine of ego heading and obs displacment\n obs_cosine = np.dot(disp_vec, ego_hdg)/dist_obs\n # angle of obs displacement w.r.t ego heading\n obs_angle = np.arccos(obs_cosine)\n # raised cosine, 1.0 within a narrow angle ahead, quickly rolloff\n # to 0.0 as angle increases \n obs_rcos = self.raised_cosine(obs_angle, np.pi/24, np.pi/48)\n # distance risk is Laplacian normalized by detection rangei\n risk_dist = np.exp(-0.1*(dist_obs-self.detect_obstacle_range))\n # relative angle between headings of ego car and obs car\n # shifted by pi\n rel_angle = self.car_euler[2] - obs_yaw + np.pi\n rel_angle = (rel_angle + np.pi) % (2*np.pi) - np.pi\n collide_rcos = self.raised_cosine(rel_angle, np.pi/24, np.pi/48)\n # total directional obs risk is distance risk multiplied by\n # raised-cosied directional weight.\n self.obs_risk = np.sum(\n risk_dist * (obs_rcos+0.1) * (collide_rcos+0.1)\n )\n if np.isnan(self.obs_risk):\n self.obs_risk = 0.0\n # idx = np.argsort(dist_obs)[::]\n # minimum obs distance\n self.min_obs_dist = min(dist_obs)\n near_obs = True if self.min_obs_dist<self.detect_obstacle_range else False\n self.pub_obs_risk.publish(self.obs_risk)\n self.pub_nearest_obs.publish(near_obs)", "def avoidObstacles(inputArray):\n\n max_step = max(inputArray) + 1\n \n min_step = 1\n \n obstacles = set(inputArray)\n \n while min_step < max_step:\n steps = set(range(0, max_step + 1, min_step))\n \n collisions = len(set.intersection(steps, obstacles))\n \n if collisions < 1:\n return min_step\n \n else:\n min_step += 1\n \n return min_step", "def through_obstacle(line, obstacles):\r\n noofpoints = 100\r\n for i in range(noofpoints):\r\n if inside_obstacle((line[0]+(i*(line[2]-line[0])/noofpoints), line[1]+(i*(line[3]-line[1])/noofpoints)), obstacles) == 1:\r\n return 1\r\n return 0", "def process_obstacle(color, cx, cy, box, x, y, obj_length, obj_height, obj_depth,\n\t\t\t\t\t equi_diameter, obstacle_list, obstacle_lifetime, obstacle_id, visualize, send_data):\n\tcoords = list(depth_to_point_cloud_pos(cx, cy, obj_depth)) # convert obstacle depth to XYZ coordinate\n\n\t#theta = CameraPosition['azimuth'] * math.pi / 180 # get robot pitch angle in radians\n\t#coords[0] = CameraPosition['x'] - coords[0] * math.cos(theta) # convert relative obstacle position to global\n\t#coords[2] = CameraPosition['y'] + coords[2] * math.sin(theta)\n\tmm_diameter = equi_diameter * (1.0 / CameraParams['fx']) * obj_depth # convert pixel diameter to mm\n\n\tif 100 < mm_diameter < 400:\n\t\tnew_obstacle = True\n\t\tcurrent_obstacle = None\n\t\tfor obstacle in obstacle_list:\n\t\t\tx_match = abs(obstacle.x - coords[0]) < 0.3\n\t\t\ty_match = abs(obstacle.y - coords[2]) < 0.3\n\t\t\tz_match = abs(obstacle.z - coords[1]) < 0.5\n\t\t\tdiameter_match = abs(obstacle.diameter - mm_diameter) / 1000. < 0.5\n\t\t\tif x_match and y_match:\n\t\t\t\tobstacle.x = coords[0]\n\t\t\t\tobstacle.y = coords[2]\n\t\t\t\tobstacle.z = coords[1]\n\t\t\t\tobstacle.diameter = mm_diameter / 1000.\n\t\t\t\tnew_obstacle = False\n\t\t\t\tobstacle.lifetime = obstacle_lifetime\n\t\t\t\tif send_data:\n\t\t\t\t\tsend_obstacle_data(obstacle)\n\t\t\t\tcurrent_obstacle = Obstacle(obstacle.id,\n\t\t\t\t\t\t\t\t\t\t\tobstacle.x,\n\t\t\t\t\t\t\t\t\t\t\tobstacle.y,\n\t\t\t\t\t\t\t\t\t\t\tobstacle.z,\n\t\t\t\t\t\t\t\t\t\t\tobstacle.diameter,\n\t\t\t\t\t\t\t\t\t\t\tobstacle_lifetime)\n\t\t\t\tif obstacle.lifetime == 0:\n\t\t\t\t\tobstacle_list.remove(obstacle)\n\t\t\t\tbreak\n\t\tif new_obstacle:\n\t\t\tcurrent_obstacle = Obstacle(obstacle_id,\n\t\t\t\t\t\t\t\t\t\tcoords[0],\n\t\t\t\t\t\t\t\t\t\tcoords[2],\n\t\t\t\t\t\t\t\t\t\tcoords[1],\n\t\t\t\t\t\t\t\t\t\tmm_diameter / 1000.,\n\t\t\t\t\t\t\t\t\t\tobstacle_lifetime)\n\t\t\tobstacle_id += 1\n\t\t\tif send_data:\n\t\t\t\tsend_obstacle_data(current_obstacle)\n\t\t\tobstacle_list.append(current_obstacle)\n\n\t\tif visualize:\n\t\t\t# begin visualization\n\t\t\tcv2.drawContours(color, [box], 0, (0, 0, 255), 1)\n\t\t\tcv2.rectangle(color, (x, y), (x + obj_length, y + obj_height), (0, 255, 0), 2)\n\t\t\tfont = cv2.FONT_HERSHEY_SIMPLEX\n\t\t\tcv2.putText(color, 'id = %d' % current_obstacle.id, (cx, cy + 15), font, 0.4, (255, 0, 255),\n\t\t\t\t\t\t1, cv2.LINE_AA)\n\t\t\tcv2.putText(color, \"x = %.2f\" % coords[0], (cx, cy + 30), font, 0.4, (0, 0, 255), 1,\n\t\t\t\t\t\tcv2.LINE_AA)\n\t\t\tcv2.putText(color, \"y = %.2f\" % coords[2], (cx, cy + 45), font, 0.4, (0, 255, 0), 1,\n\t\t\t\t\t\tcv2.LINE_AA)\n\t\t\tcv2.putText(color, \"z = %.2f\" % (obj_depth / 1000), (cx, cy + 60), font, 0.4, (255, 0, 127),\n\t\t\t\t\t\t1, cv2.LINE_AA)\n\t\t\tcv2.putText(color, \"diameter = %.2f\" % (mm_diameter / 1000), (cx, cy + 75), font, 0.4,\n\t\t\t\t\t\t(255, 127, 0), 1,\n\t\t\t\t\t\tcv2.LINE_AA)\n\treturn obstacle_id", "def check_falling(self, obstacles):\n self.rect.move_ip((0, 1))\n if not pygame.sprite.spritecollideany(self, obstacles):\n if not self.climb:\n\t self.fall = True\n\n self.rect.move_ip((0, -1))", "def obstacles(self):\r\n\r\n #Radious arround the head\r\n limit_sight = self.snake_sight\r\n head = self.body[0].position\r\n binary_map_complete = self.complete_mapping()\r\n map_matrix = np.matrix(binary_map_complete)\r\n obstacles = []\r\n\r\n #limits in all directions\r\n left_x = head[0] - limit_sight\r\n right_x = head[0] + limit_sight\r\n up_y = head[1] - limit_sight\r\n down_y = head[1] + limit_sight\r\n\r\n #submatrix with limits size\r\n snake_sight = map_matrix[up_y:down_y+1, left_x:right_x+1]\r\n\r\n #Special cases where the snake approximates to the borders\r\n ##Corners\r\n if left_x < 0 and up_y < 0:\r\n snake_sight = map_matrix[0:down_y+1, 0:right_x+1]\r\n interval_x = [self.limits[0] + left_x, self.limits[0]]\r\n interval_y = [self.limits[1] + up_y, self.limits[1]]\r\n interval_x_matrix = map_matrix[0:down_y+1, interval_x[0]:interval_x[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], 0:right_x+1]\r\n interval_x_y_matrix = map_matrix[interval_y[0]:interval_y[1], interval_x[0]:interval_x[1]]\r\n temporal = np.c_[interval_x_y_matrix, interval_y_matrix]\r\n snake_sight = np.c_[interval_x_matrix, snake_sight]\r\n snake_sight = np.r_[temporal, snake_sight] \r\n return snake_sight\r\n \r\n if left_x < 0 and down_y > self.limits[1] - 1:\r\n snake_sight = map_matrix[up_y:self.limits[1], 0:right_x+1]\r\n interval_x = [self.limits[0] + left_x, self.limits[0]]\r\n interval_y = [0, down_y - self.limits[1] + 1]\r\n interval_x_matrix = map_matrix[up_y:self.limits[1], interval_x[0]:interval_x[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], 0:right_x+1]\r\n interval_x_y_matrix = map_matrix[interval_y[0]:interval_y[1], interval_x[0]:interval_x[1]]\r\n temporal = np.c_[interval_x_y_matrix, interval_y_matrix]\r\n snake_sight = np.c_[interval_x_matrix, snake_sight]\r\n snake_sight = np.r_[snake_sight, temporal]\r\n return snake_sight\r\n \r\n if right_x > self.limits[0]-1 and up_y < 0:\r\n snake_sight = map_matrix[0:down_y+1, left_x:self.limits[0]]\r\n interval_x = [0, right_x - self.limits[0] + 1]\r\n interval_y = [self.limits[1] + up_y, self.limits[1]]\r\n interval_x_matrix = map_matrix[0:down_y+1, interval_x[0]:interval_x[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], left_x:self.limits[0]]\r\n interval_x_y_matrix = map_matrix[interval_y[0]:interval_y[1], interval_x[0]:interval_x[1]]\r\n temporal = np.c_[interval_y_matrix, interval_x_y_matrix]\r\n snake_sight = np.c_[snake_sight, interval_x_matrix]\r\n snake_sight = np.r_[temporal, snake_sight]\r\n return snake_sight\r\n \r\n if right_x > self.limits[0]-1 and down_y > self.limits[1]-1:\r\n snake_sight = map_matrix[up_y:self.limits[1], left_x:self.limits[0]]\r\n interval_x = [0, right_x - self.limits[0] + 1]\r\n interval_y = [0, down_y - self.limits[1] + 1]\r\n interval_x_matrix = map_matrix[up_y:self.limits[1], interval_x[0]:interval_x[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], left_x:self.limits[0]]\r\n interval_x_y_matrix = map_matrix[interval_y[0]:interval_y[1], interval_x[0]:interval_x[1]]\r\n temporal = np.c_[interval_y_matrix, interval_x_y_matrix]\r\n snake_sight = np.c_[snake_sight, interval_x_matrix]\r\n snake_sight = np.r_[snake_sight, temporal]\r\n return snake_sight\r\n\r\n ##Middle\r\n if left_x < 0:\r\n snake_sight = map_matrix[up_y:down_y+1, 0:right_x+1]\r\n interval_x = [self.limits[0] + left_x, self.limits[0]]\r\n interval_x_matrix = map_matrix[up_y:down_y+1, interval_x[0]:interval_x[1]]\r\n snake_sight = np.c_[interval_x_matrix, snake_sight]\r\n return snake_sight\r\n\r\n if right_x > self.limits[0]-1:\r\n snake_sight = map_matrix[up_y:down_y+1, left_x:self.limits[0]]\r\n interval_x = [0, right_x - self.limits[0] + 1]\r\n interval_x_matrix = map_matrix[up_y:down_y+1, interval_x[0]:interval_x[1]]\r\n snake_sight = np.c_[snake_sight, interval_x_matrix]\r\n return snake_sight\r\n\r\n if up_y < 0:\r\n snake_sight = map_matrix[0:down_y+1, left_x:right_x+1]\r\n interval_y = [self.limits[1] + up_y, self.limits[1]]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], left_x:right_x+1]\r\n snake_sight = np.r_[interval_y_matrix, snake_sight]\r\n return snake_sight\r\n \r\n if down_y > self.limits[1]-1:\r\n snake_sight = map_matrix[up_y:self.limits[1], left_x:right_x+1]\r\n interval_y = [0, down_y - self.limits[1] + 1]\r\n interval_y_matrix = map_matrix[interval_y[0]:interval_y[1], left_x:right_x+1]\r\n snake_sight = np.r_[snake_sight, interval_y_matrix]\r\n return snake_sight\r\n\r\n return snake_sight", "def generate_possible_paths(self, obstacle):\n if self.does_uav_intersect_obstacle_vertically(obstacle, self.drone.get_point(), self.drone.get_waypoint_holder().get_current_waypoint()):\n if self.does_path_intersect_obstacle_2d(obstacle, self.drone.get_point(), self.drone.get_waypoint_holder().get_current_waypoint()):\n new_attempt_pos_points = [\n [obstacle.get_point()[0] + obstacle.get_radius(), obstacle.get_point()[1] + obstacle.get_radius(), self.drone.get_point()[2]],\n [obstacle.get_point()[0] - obstacle.get_radius(), obstacle.get_point()[1] - obstacle.get_radius(), self.drone.get_point()[2]],\n [obstacle.get_point()[0] + obstacle.get_radius(), obstacle.get_point()[1] - obstacle.get_radius(), self.drone.get_point()[2]],\n [obstacle.get_point()[0] - obstacle.get_radius(), obstacle.get_point()[1] + obstacle.get_radius(), self.drone.get_point()[2]],\n [obstacle.get_point()[0], obstacle.get_point()[1] + obstacle.get_radius(), obstacle.get_height() + (Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS * 2)],\n [obstacle.get_point()[0], obstacle.get_point()[1] - obstacle.get_radius(), obstacle.get_height() + (Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS * 2)],\n [obstacle.get_point()[0] + obstacle.get_radius(), obstacle.get_point()[1], obstacle.get_height() + (Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS * 2)],\n [obstacle.get_point()[0] - obstacle.get_radius(), obstacle.get_point()[1], obstacle.get_height() + (Constants.STATIONARY_OBSTACLE_SAFETY_RADIUS * 2)]\n ]\n\n new_paths = []\n for new_pos_point in new_attempt_pos_points:\n if not self.does_path_intersect_obstacle_3d(obstacle, self.drone.get_point(), new_pos_point) and self.flight_boundary.is_point_in_bounds(new_pos_point):\n for recursive_new_pos_point in new_attempt_pos_points:\n if self.flight_boundary.is_point_in_bounds(recursive_new_pos_point) and abs(recursive_new_pos_point[2] - new_pos_point[2]) < 5:\n if recursive_new_pos_point[0] != new_pos_point[0] or recursive_new_pos_point[1] != new_pos_point[1]:\n if not self.does_path_intersect_obstacle_3d(obstacle, new_pos_point, recursive_new_pos_point) and not self.does_path_intersect_obstacle_3d(obstacle, recursive_new_pos_point, self.drone.get_waypoint_holder().get_current_waypoint()):\n new_paths.append([new_pos_point, recursive_new_pos_point])\n\n # Uncomment for DEBUGGING ONLY\n for path in new_paths:\n print(\"Point:\", str(path))\n\n return new_paths\n\n return []", "def test_too_far_scenario():\n start_too_far_scenario(config.MERAKI_CAMERAS[0][\"serial\"])\n time.sleep(WARN_EVENT_THRESHOLD)\n start_too_far_scenario(config.MERAKI_CAMERAS[0][\"serial\"])\n return \"ok\"", "def check_obstructed(r1,r2): \n \n if r1==r2:\n return False\n \n #Densely sample line connecting r1 and r2.\n #If any of those sampled points is inside the rectangle, then the \n #line of sight intersects the rectangle and the tower's view is\n #obstructed.\n NP = 1000\n sampled_x = np.linspace(r1[0],r2[0],NP)\n sampled_y = np.linspace(r1[1],r2[1],NP)\n for x,y,w,h in self.coordinates__obstacles:\n for pt in xrange(NP):\n if (sampled_x[pt] > x) and (sampled_x[pt] < x+w) and \\\n (sampled_y[pt] > y) and (sampled_y[pt] < y+h):\n return True\n return False", "def still_going(ball_stats):\n if ball_stats[3] <= 0: # if vy = vx = 0 we should stop\n return False\n\n if ball_stats[0] > 41 * 2.54 or ball_stats[0] < 0: # checking if we are out of the lane\n return False\n pins_loc = ORIG_PINS_LOC.copy()\n for p in pins_loc:\n if dist((ball_stats[0], ball_stats[1]), p) < R_BALL + R_PIN: # checking if we hit one of the balls\n return False\n return True", "async def show_obstacles(canvas):\n\n while True:\n boxes = []\n\n for obstacle in OBSTACLES:\n boxes.append(obstacle.dump_bounding_box())\n\n for row, column, frame in boxes:\n draw_frame(canvas, row, column, frame)\n\n await asyncio.sleep(0)\n\n for row, column, frame in boxes:\n draw_frame(canvas, row, column, frame, negative=True)", "def Continue():\n # adjust this to take as many steps as you need\n return warp.top.it <= 500" ]
[ "0.69928783", "0.6701212", "0.62877244", "0.6276357", "0.6274189", "0.61626023", "0.6113274", "0.61114275", "0.60970694", "0.6059443", "0.5980487", "0.5887921", "0.5880834", "0.58741724", "0.58602977", "0.58327454", "0.58186597", "0.58019084", "0.5795218", "0.5789762", "0.57829326", "0.57824415", "0.57789934", "0.5747132", "0.56626105", "0.5653088", "0.5651129", "0.56476825", "0.5646675", "0.5610755" ]
0.6939739
1
Helper to Planner.drive_to(). Make one attempt to get around a home or target tag.
def _avoid_tag(self, id=0, ignore=Obstacle.IS_SONAR): sorted_detections = self._sort_tags_left_to_right( self.swarmie.get_latest_targets().detections, id=id ) # if count == 3: # last resort # self.current_state = Planner.STATE_DRIVE # angle = self._get_angle_to_face(point) # self.swarmie.turn( # angle, # ignore=Obstacle.TAG_TARGET, # throw=False # ) # self.result = self.swarmie.drive( # .75, # ignore=Obstacle.TAG_TARGET, # throw=False # ) if len(sorted_detections) == 0: # no tags in view anymore print("I can't see anymore tags, I'll try creeping", "and clearing.") self.prev_state = self.current_state self.current_state = Planner.STATE_DRIVE self.swarmie.drive( 0.1, ignore=Obstacle.SONAR_BLOCK, throw=False ) drive_result = self.clear(math.pi / 8, ignore=ignore) else: left_angle, left_dist = \ self._get_angle_and_dist_to_avoid( sorted_detections[0], direction='left' ) right_angle, right_dist = \ self._get_angle_and_dist_to_avoid( sorted_detections[-1], direction='right' ) angle = left_angle dist = left_dist if (self.current_state == Planner.STATE_AVOID_LEFT or self.prev_state == Planner.STATE_AVOID_LEFT): # Keep going left. Should help avoid bouncing back # and forth between tags just out of view. print("I was turning left last time, so I'll keep", "it that way.") self.prev_state = self.current_state self.current_state = Planner.STATE_AVOID_LEFT angle = left_angle dist = left_dist elif (self.current_state == Planner.STATE_AVOID_RIGHT or self.prev_state == Planner.STATE_AVOID_RIGHT): # Keep going right print("I was turning right last time, so I'll", "keep it that way.") self.prev_state = self.current_state self.current_state = Planner.STATE_AVOID_RIGHT angle = right_angle dist = right_dist else: # pick whichever angle is shortest if abs(right_angle) < abs(left_angle): print('Right looks most clear, turning right.') # print('Right turn makes most sense, turning right.') self.prev_state = self.current_state self.current_state = Planner.STATE_AVOID_RIGHT angle = right_angle dist = right_dist else: print('Left looks most clear, turning left.') # print('Left turn makes most sense, turning left') self.prev_state = self.current_state self.current_state = Planner.STATE_AVOID_LEFT _turn_result, drive_result = self._go_around( angle, dist ) return drive_result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def target_tag(self):\n raise NotImplementedError", "def findTarget(self, initial_call):\n if self.vision.hasTarget():\n self.next_state(\"driveToTarget\")\n else:\n self.chassis.setOutput(self.SEARCH_SPEED, -self.SEARCH_SPEED)", "def become_target(self):\n\t\traise NotImplementedError", "def goto(vehicle, dNorth, dEast):\n goto_function = vehicle.simple_goto # can be changed\n currentLocation = vehicle.location.global_relative_frame\n targetLocation = get_location_metres(currentLocation, dNorth, dEast)\n targetDistance = get_distance_metres(currentLocation, targetLocation)\n goto_function(targetLocation)\n\n #Stop action if we are no longer in guided mode.\n while vehicle.mode.name == \"GUIDED\": \n remainingDistance = get_distance_metres(vehicle.location.global_relative_frame, targetLocation)\n #print \"Distance to target: \", remainingDistance\n if remainingDistance <= shared.WP_RADIUS: #Just below target, in case of undershoot.\n #print \"Reached target\"\n break;\n\n time.sleep(0.5)", "def gohome(self):\n raise Exception(\"Not implemented\")", "def move_to(self, target):\n # type: (RoomPosition) -> None\n hive = self.home.hive\n home = self.find_home()\n origin = self.find_origin()\n\n total_distance = hive.honey.find_path_length(origin, target, self.new_movement_opts())\n\n min_distance_from_home = Infinity\n min_distance_to_origin = Infinity\n min_distance_to_target = movement.chebyshev_distance_room_pos(self.members_movement_order()[0].pos, target)\n max_distance_to_target = -Infinity\n any_hostiles = False\n for member in self.members:\n distance_to_home = movement.chebyshev_distance_room_pos(member.pos, home)\n distance_to_origin = movement.chebyshev_distance_room_pos(member.pos, origin)\n distance_to_target = movement.chebyshev_distance_room_pos(member.pos, target)\n if distance_to_home < min_distance_from_home:\n min_distance_from_home = distance_to_home\n if distance_to_target > max_distance_to_target:\n max_distance_to_target = distance_to_target\n if distance_to_origin < min_distance_to_origin:\n min_distance_to_origin = distance_to_origin\n if len(member.room.find(FIND_HOSTILE_CREEPS)):\n any_hostiles = True\n\n if min_distance_to_origin > 100:\n mv_order = self.members_movement_order()\n self.set_origin(mv_order[len(mv_order) - 1].pos)\n if min_distance_from_home < 50 and (max_distance_to_target < total_distance / 2):\n self.log(\"move_to: chose stage 0 (minimum distance from home: {}, maximum distance from home: {},\"\n \" total distance: {})\"\n .format(min_distance_from_home, max_distance_to_target, total_distance))\n self.move_to_stage_0(target)\n elif min_distance_to_target < 300 and any_hostiles:\n self.move_to_stage_2(target)\n elif min_distance_to_target > 60 or max_distance_to_target > 200:\n # self.log(\"move_to: chose stage 1 (minimum distance from home: {}, total distance: {}, \"\n # \"minimum distance to target: {}, maximum distance to target: {})\"\n # .format(min_distance_from_home, total_distance,\n # min_distance_to_target, max_distance_to_target))\n self.move_to_stage_1(target, any_hostiles)\n else:\n # self.log(\"move_to: chose stage 2 (minimum distance from home: {}, total distance: {}, \"\n # \"minimum distance to target: {}, maximum distance to target: {})\"\n # .format(min_distance_from_home, total_distance,\n # min_distance_to_target, max_distance_to_target))\n self.move_to_stage_2(target)", "def is_gentarget(self, target):\r\n raise NotImplementedError", "def the(target: Target) -> \"SwitchTo\":\n return SwitchTo(target)", "def _move_home_strategy(self):\n\n if self.last_status == \"Fail\":\n result = \"turnRight\"\n else:\n result = self.move_toward(self._home)\n if result is None:\n result = \"drop\"\n\n assert result is not None\n return result", "def tidy_tags(self, tags):\n tags = tags.split()\n # add target tag if not a calibrator\n if not any(\"cal\" in tag for tag in tags):\n if \"target\" not in tags:\n tags.append(\"target\")\n return \" \".join(tags)", "def test_landing_page_tag(self, setup_landing, click, locate):\r\n locate.locate_text_part('Browse by tags')\r\n click.click_xpath(LocLandind.tag)\r\n locate.locate_text_part('Here we go with the icons related')\r\n import time\r\n time.sleep(5)\r\n locate.locate_xpath(LocLandind.icon_in_tag)", "def sees_home_tag(self):\n detections = self.swarmie.get_latest_targets().detections\n\n for detection in detections:\n if detection.id == 256:\n return True\n\n return False", "def targets_placeholder(self):", "def drive_to(self, goal, tolerance=0.0, tolerance_step=0.5,\n max_attempts=10, avoid_targets=True, avoid_home=False,\n use_waypoints=True, start_location=None,\n distance_threshold=None):\n print('\\nRequest received')\n self.fail_count = 0\n self.tolerance = tolerance\n\n self.avoid_targets = avoid_targets\n if avoid_targets is True and avoid_home is True:\n avoid_home = False\n self.avoid_home = avoid_home\n\n current_ignore = Obstacle.IS_SONAR\n if self.avoid_targets is True:\n current_ignore |= Obstacle.TAG_TARGET\n elif self.avoid_home is True:\n current_ignore |= Obstacle.TAG_HOME\n\n self.goal.x = goal.x\n self.goal.y = goal.y\n\n self.cur_loc = self.swarmie.get_odom_location()\n self.current_state = Planner.STATE_IDLE\n self.prev_state = Planner.STATE_IDLE\n\n while (not self.cur_loc.at_goal(self.goal,\n Planner.DISTANCE_OK + self.tolerance)\n and self.fail_count < max_attempts):\n\n\n if use_waypoints is True:\n # get new plan and try to drive to first point in it\n point = self._get_next_waypoint(tolerance_step)\n else:\n point = goal\n\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_DRIVE\n # Turn to approximate goal heading while ignoring sonar and tags\n # helps to prevent rover from trying to jump around obstacles\n # before it even starts along its new path\n self.result = self._face_point(\n point,\n ignore=current_ignore ^ Obstacle.IS_SONAR\n )\n\n if self.result == MoveResult.SUCCESS:\n self.result = self.swarmie.drive_to(\n point,\n ignore=Obstacle.SONAR_BLOCK,\n throw=False\n )\n\n if self.result == MoveResult.SUCCESS:\n # Success, we got to our waypoint, or got ourselves out of\n # whatever pickle we were just in.\n # Just get a new plan and drive to next point\n self.fail_count = 0\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_IDLE\n print('Successfully drove to first point in nav plan.')\n\n # otherwise, something went wrong or we found home\n elif self.result == MoveResult.OBSTACLE_HOME:\n self.set_home_locations()\n\n # get around the home tag obstacle\n count = 0\n\n # Give the rover 3 tries to avoid any tags nearby before\n # getting a new nav plan. MoveResult.OBSTACLE_SONAR takes\n # priority in the driver code, so it should be safe to continue\n # this loop if the MoveResult is just an OBSTACLE_HOME\n # self.fail_count may exceed limit here, but I'll let it go\n while count < 3 and self.result == MoveResult.OBSTACLE_HOME:\n print('\\nObstacle: Found Home.')\n count += 1\n self.fail_count += 1\n\n detections = self.swarmie.get_latest_targets().detections\n inside_home = self.is_inside_home_ring(detections)\n if inside_home:\n print('\\nGetting out of the home ring!!')\n angle, dist = self.get_angle_and_dist_to_escape_home(\n detections\n )\n self.swarmie.turn(\n angle,\n ignore=Obstacle.IS_SONAR | Obstacle.IS_VISION\n )\n self.result = self.swarmie.drive(\n dist,\n ignore=Obstacle.IS_SONAR | Obstacle.IS_VISION\n )\n\n if self.avoid_home is False:\n # turn back around\n self.swarmie.turn(\n math.pi,\n ignore=Obstacle.IS_SONAR | Obstacle.IS_VISION\n )\n print('Obstacle: Found Home.')\n return MoveResult.OBSTACLE_HOME\n else:\n if self.avoid_home is False:\n print('Obstacle: Found Home.')\n return MoveResult.OBSTACLE_HOME\n\n self.result = self._avoid_tag(id=256,\n ignore=current_ignore)\n\n elif self.result == MoveResult.OBSTACLE_TAG:\n # get around the tag obstacle\n count = 0\n\n # Give the rover 3 tries to avoid any tags nearby before\n # getting a new nav plan. MoveResult.OBSTACLE_SONAR takes\n # priority in the driver code, so it should be safe to continue\n # this loop if the MoveResult is just an OBSTACLE_TAG\n # self.fail_count may exceed limit here, but I'll let it go\n while count < 3 and self.result == MoveResult.OBSTACLE_TAG:\n print('\\nObstacle: Found a Tag.')\n\n if self.avoid_targets is False:\n if not self.sees_home_tag():\n return self.result\n\n count += 1\n self.fail_count += 1\n\n self.result = self._avoid_tag(id=0,\n ignore=current_ignore)\n\n elif self.result == MoveResult.OBSTACLE_SONAR:\n # Check for home and tag obstacles just to be safe, because\n # sonar MoveResults take priority, and would mask a home or\n # target tag in view.\n obstacle = self.swarmie.get_obstacle_condition()\n\n if (obstacle & Obstacle.TAG_HOME == Obstacle.TAG_HOME and\n self.avoid_home is False):\n self.set_home_locations()\n return MoveResult.OBSTACLE_HOME\n\n if (obstacle & Obstacle.TAG_TARGET == Obstacle.TAG_TARGET and\n self.avoid_targets is False):\n return MoveResult.OBSTACLE_TAG\n\n # get around the sonar obstacle\n self.fail_count += 1\n\n print('\\nObstacle: Sonar.')\n left_blocked, center_blocked, right_blocked = \\\n self._check_sonar_obstacles()\n\n if (not left_blocked and\n not center_blocked and not right_blocked):\n print('\\nFake sonar obstacle??')\n pass # 'fake' obstacle?\n\n elif not left_blocked and center_blocked and right_blocked:\n print('Left looks clear, turning left.')\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_LEFT\n self._go_around(math.pi / 4, 0.7)\n # self.swarmie.drive_to(point, throw=False)\n\n elif left_blocked and center_blocked and not right_blocked:\n print('Right looks clear, turning right.')\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_RIGHT\n self._go_around(-math.pi / 4, 0.7)\n # self.swarmie.drive_to(point, throw=False)\n\n elif left_blocked and not center_blocked and not right_blocked:\n print('Only left blocked, turning a little right.')\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_RIGHT\n self._go_around(-math.pi / 6, 0.6)\n # self.swarmie.drive_to(point, throw=False)\n\n elif not left_blocked and not center_blocked and right_blocked:\n print('Only right blocked, turning a little left.')\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_LEFT\n self._go_around(math.pi / 6, 0.6)\n # self.swarmie.drive_to(point, throw=False)\n\n else:\n print('Neither left or right look clear.')\n\n # Only back up if we're far enough away from home for it\n # to be safe. Don't want to back up into the nest!\n if self._is_safe_to_back_up():\n print('Backing up.')\n self.swarmie.drive(\n -0.3,\n ignore=Obstacle.IS_SONAR,\n throw=False\n )\n\n if (self.current_state == Planner.STATE_AVOID_RIGHT or\n self.prev_state == Planner.STATE_AVOID_RIGHT):\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_RIGHT\n self.clear(-math.pi / 4, ignore=current_ignore,\n reset_heading=False)\n self._go_around(-math.pi / 4, 0.75)\n\n else:\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_LEFT\n self.clear(math.pi / 4, ignore=current_ignore,\n reset_heading=False)\n self._go_around(math.pi / 4, 0.75)\n\n elif self.result == MoveResult.PATH_FAIL:\n # shit, hope we can back up if this ever happens\n self.fail_count += 1\n\n print('\\nPath Failure. Backing up.')\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_REVERSE\n self.swarmie.drive(\n -0.5,\n ignore=Obstacle.IS_SONAR | Obstacle.IS_VISION,\n throw=False\n )\n\n self.cur_loc = self.swarmie.get_odom_location()\n\n if self.fail_count >= max_attempts:\n print('Failed to drive to goal {} times.'.format(\n max_attempts)\n )\n raise PathException(MoveResult.PATH_FAIL)\n\n if start_location is not None:\n current_loc = self.cur_loc.get_pose()\n dist = math.sqrt((start_location.x - current_loc.x) ** 2\n + (start_location.y - current_loc.y) ** 2)\n if dist > distance_threshold:\n raise PathException(MoveResult.PATH_FAIL)\n\n print('Successfully executed nav plan.')\n return MoveResult.SUCCESS", "def homeToMinus57():\n\tif (not checkMotorsInPosition(0, 0)):\n\t\treturn\n\n\tmoveMotor(dkappa, 134.65)\n\tmoveMotor(dktheta, 31.785)\n\tsimpleLog(\"Done\")", "def targeted(self):\n\t\tpass", "def setTarget(t):\n global targetFolder\n if t[-1] != '/':\n t += '/'\n targetFolder = t", "def robot_is_willing_default(requester, action, ctxt) :\n if action.get_actor() == \"compliant robot\" :\n raise ActionHandled()", "def robot_is_wanting_default(giver, object, receiver, ctxt) :\n if receiver==\"compliant robot\" :\n raise ActionHandled()", "def create_onedrive_mounting_point():\n return None", "def can_fetch(self, useragent, url):\n target_url = url\n if self.root_path:\n target_url = re.sub(self.root_path, \"\", target_url)\n return super(Robot, self).can_fetch(useragent, target_url)", "def target(self):", "def do_go_home(self, *arg):\n if self.ready is False:\n if self.pocs.is_weather_safe() is False:\n self.do_power_down()\n\n return\n\n try:\n self.pocs.observatory.mount.slew_to_home()\n except Exception as e:\n print_warning('Problem slewing to home: {}'.format(e))", "def do_go_home(self, *arg):\n if self.ready is False:\n if self.pocs.is_weather_safe() is False:\n self.do_power_down()\n\n return\n\n try:\n self.pocs.observatory.mount.slew_to_home()\n except Exception as e:\n print_warning('Problem slewing to home: {}'.format(e))", "def target(self) -> Optional[str]:\n return pulumi.get(self, \"target\")", "def set_home_locations(self):\n self.swarmie.set_home_gps_location(self.swarmie.get_gps_location())\n\n current_location = self.swarmie.get_odom_location()\n current_pose = current_location.get_pose()\n home_odom = Location(current_location.Odometry)\n\n detections = self.swarmie.get_latest_targets().detections\n try:\n for detection in detections:\n if detection.id == 256:\n see_home_tag = True\n home_detection = self._transform_to_odom(detection)\n\n quat = [home_detection.pose.orientation.x,\n home_detection.pose.orientation.y,\n home_detection.pose.orientation.z,\n home_detection.pose.orientation.w]\n _r, _p, yaw = tf.transformations.euler_from_quaternion(\n quat\n )\n yaw += math.pi / 2\n\n home_odom.Odometry.pose.pose.position.x = float(\n home_detection.pose.position.x + 0.5 * math.cos(yaw)\n )\n home_odom.Odometry.pose.pose.position.y = float(\n home_detection.pose.position.y + 0.5 * math.sin(yaw)\n )\n self.swarmie.set_home_odom_location(home_odom)\n return\n\n except tf.Exception:\n pass # use backup below\n\n # project home_odom location 50cm in front of rover's current location\n home_odom.Odometry.pose.pose.position.x = (\n current_pose.x + 0.5 * math.cos(current_pose.theta)\n )\n home_odom.Odometry.pose.pose.position.y = (\n current_pose.y + 0.5 * math.sin(current_pose.theta)\n )\n self.swarmie.set_home_odom_location(home_odom)\n return", "def face_home_tag(self):\n home_detections = self._sort_home_tags_nearest_center(\n self.swarmie.get_latest_targets().detections\n )\n if len(home_detections) > 0:\n angle = self.get_angle_to_face_detection(home_detections[0])\n current_heading = self.swarmie.get_odom_location().get_pose().theta\n self.swarmie.set_heading(\n current_heading + angle,\n ignore=Obstacle.IS_SONAR | Obstacle.IS_VISION\n )", "def browse_target(self):\n return self.type in ('a', 's')", "def goToMyGoal(state):\n return goTo(state, state.my_goal)", "def _default_target(package):\n return package[package.rfind('/')+1:]" ]
[ "0.5209827", "0.51571554", "0.4894787", "0.47458228", "0.4700009", "0.4642209", "0.46304765", "0.458315", "0.45454666", "0.45043343", "0.4478969", "0.44567552", "0.44558376", "0.44286126", "0.44133204", "0.43926364", "0.4371317", "0.4370641", "0.43651053", "0.43491682", "0.4345482", "0.43398127", "0.43364996", "0.43364996", "0.43300092", "0.43274072", "0.43202025", "0.4315381", "0.42980915", "0.4278388" ]
0.51685894
1
Returns True if it's safe for the rover to back up. It is safe to back up if the rover is further than 1.5 meters from the current home location, or if the rover is within 1.5 meters from home, but is facing home. In other words, it's not safe to back up if the rover is close to home and has it's back to home.
def _is_safe_to_back_up(self): # Only back up if we're far enough away from home for it # to be safe. Don't want to back up into the nest! home_loc = self.swarmie.get_home_odom_location() current_loc = self.swarmie.get_odom_location().get_pose() dist = math.sqrt((home_loc.x - current_loc.x) ** 2 + (home_loc.y - current_loc.y) ** 2) if dist > 1.5: return True angle_to_home = self.get_angle_to_face_point(home_loc) if abs(angle_to_home) < math.pi / 2: return True return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_home(self):\n return self.last_seen.seconds / 60 <= 2 and self.last_seen.days == 0", "def one_step_back(self):\n if (self.row -1<0):\n return False\n elif (self.battery == 0):\n return False\n elif (self.maze[self.row - 1][self.column] == False):\n return False\n else:\n self.row -= 1\n self.battery -= 1\n return True", "def goUp(self):\n check = self.rover.moveUp()\n if check == True:\n self._checkPortal(self.getRoverLocation())", "def stale(self, now: datetime | None = None) -> bool:\n return (\n self.last_seen is None\n or (now or dt_util.utcnow()) - self.last_seen > self.consider_home\n )", "def check_floor(self):\r\n if self.current_floor > self.destination_floor:\r\n self.down = True\r\n elif self.current_floor < self.destination_floor:\r\n self.up = True", "def is_almost_active(self,\n env\n ):\n if not hasattr(self, \"tolerance\") or self.tolerance is None:\n return False\n c_value = self.get_value(env)\n flag = np.any(np.greater(c_value + self.tolerance, 0.))\n return bool(flag)", "def needs_home(self):\r\n return not bool(self.__lib.CC_CanMoveWithoutHomingFirst(self.__serno))", "def is_pwned(self) -> bool:\n return self.w3.balance(self.sender) > self.initial_balance", "def is_caught_up_well_enough_for_government_work():\n return config.CAUGHT_UP or (config.BLOCKCHAIN_SERVICE_LAST_BLOCK and config.CURRENT_BLOCK_INDEX >= config.BLOCKCHAIN_SERVICE_LAST_BLOCK - 1)", "def has_uav_reached_current_waypoint(self):\n return self.drone.has_reached_waypoint()", "def ishome(self):\n return self._plrevgeoloc.isHome", "def set_leave_home(self):\n all_lights = self.__try_to_get(self.bridge.lights)\n if not all_lights:\n return False\n\n for light in all_lights:\n if self.__try_to_get(light.name) in EXCLUDE_LIGHTS():\n continue\n self.__try_to_run(self._turn_off_light, [light.light_id])\n return True", "def isCurrentPlayerHome(self):\r\n \r\n #creates corresponding starting and ending points for each player\r\n if self.getTurn() == RED:\r\n start = 0\r\n end = 18\r\n else:\r\n start = 6\r\n end = 24\r\n \r\n #checks whether the current player has checkers on corresponding points\r\n for i in range(start, end):\r\n if self.points[i].getTeam() == self.getTurn():\r\n return False\r\n \r\n return True", "def is_up(self):\n \n return self.is_level('up')", "def backedout(self):\n return bool(self.backedoutby)", "def is_burrowed(self) -> bool:\n return self.proto.is_burrowed", "def _move_up(self) -> bool:\n current_agent_node = self._maze.get_player_node()\n\n if current_agent_node.y == 0:\n # Can't go up. Already on the top row\n return False\n else:\n next_node = self._maze.get_node_up(current_agent_node)\n return self._handle_movement(current_agent_node, next_node)", "def is_away_mode_on(self):\n return self._away", "def someone_home(self) -> bool:\n return self._someone_home", "def is_upper(self):\n return self.z < 0", "def isUp ( self ) :\n return not self.isDown()", "def see_behind(self):\n return True", "def service_recently_down(self):\n\n outage_history_start = self.local_tz.localize(datetime.now()) - timedelta(hours=3)\n\n return outage_history_start <= self.start_time <= self.local_tz.localize(datetime.now())", "def is_game_over(self):\r\n\r\n if self.winner != 0:\r\n return True\r\n\r\n return False", "def minusToHome():\n\tif (not checkMotorsInPosition(-134.76, -34.197)):\n\t\treturn\n\n\tmoveMotor(dktheta, 0)\n\tmoveMotor(dkappa, 0)\n\tsimpleLog(\"Done\")", "def game_over(self):\n return bool(self.last_round and self.last_player == self.current_player)", "def is_game_over(self):\n return self.state.all_avatars_placed() and self.state.is_game_over()", "def _wait_home_origin(xbee):\n util.log_info(\"Waiting HOME_ORIGIN.\")\n wait_count = 0\n while not shared.home_origin:\n time.sleep(1)\n wait_count = wait_count + 1\n \n if shared.status['command'] == 'EXIT':\n comm.xbee_broadcast(xbee, \"IFO,%s abort takeoff.\" % shared.AGENT_ID)\n util.log_info(\"'EXIT' received. Abort takeoff.\")\n return False\n \n if wait_count >= 10:\n wait_count = 0\n comm.xbee_broadcast(xbee, \"IFO,%s awaiting HOME_ORIGIN.\" % shared.AGENT_ID)\n\n return True", "def back(self):\r\n if self.phone.isFullBlackBox():\r\n self.__navigateToIdle()\r\n return True\r\n\r\n currentApplication = self.phone.uiState.getCurrentApplication()\r\n\r\n if currentApplication != 'evo-home':\r\n self.phone.comment('exit.back()')\r\n if currentApplication == 'ntf-drawer':\r\n self.__backToIdleWithBackPress()\r\n else:\r\n self.__backToIdleWithSwipe()\r\n self.phone.delay(300, False)\r\n self.phone.uiState.getCurrentState(True)\r\n else:\r\n self.phone.comment('exit.back() is not done for %s' % currentApplication)", "def set_current_location_as_home(self):\n response = False\n while (not response) and (not rospy.is_shutdown()):\n response = self._set_home_proxy(True, 0., 0., 0., 0.).success\n self._rate.sleep()\n if response:\n rospy.loginfo(\n '%s is setting current location as the new home ...' %\n self.namespace)\n return response" ]
[ "0.6026265", "0.58931726", "0.5807826", "0.5787228", "0.57717633", "0.5766224", "0.5710147", "0.5660825", "0.56111115", "0.5610963", "0.5589869", "0.55510205", "0.553162", "0.5524561", "0.5508406", "0.548923", "0.54795265", "0.5445605", "0.54263294", "0.5425697", "0.53875643", "0.53857386", "0.53799784", "0.5347402", "0.534619", "0.5339104", "0.5326671", "0.5316595", "0.5295292", "0.52909625" ]
0.8233782
0
Turn to face a point in the odometry frame. Rover will attempt to turn the shortest angle to face the point, and if it fails (sonar detects something in the way, or the rover saw a type of tag it wants to stop for), it will possibly back up and try to turn in the opposite direction to face the point.
def _face_point(self, point, ignore=Obstacle.PATH_IS_CLEAR): print('Facing next point...') # Make sure all sonar sensors are never ignored together here if ignore & Obstacle.IS_SONAR == Obstacle.IS_SONAR: ignore ^= Obstacle.IS_SONAR ignore |= Obstacle.SONAR_BLOCK # Try turning in the shortest direction: turn_angle = self.get_angle_to_face_point(point) if turn_angle > 0: # turning left, pay attention to left sensor only cur_ignore = ignore | Obstacle.SONAR_CENTER | Obstacle.SONAR_RIGHT else: # turning right, pay attention to right sensor only cur_ignore = ignore | Obstacle.SONAR_CENTER | Obstacle.SONAR_LEFT drive_result = self.swarmie.turn( turn_angle, ignore=cur_ignore, throw=False ) # Return if successful, or if rover stopped for a cube or home tag if drive_result != MoveResult.OBSTACLE_SONAR: print("Completed turn or found an important AprilTag.") return drive_result # If possible, back up and try same direction again. if self._is_safe_to_back_up(): dist = -0.15 if (self.prev_state == Planner.STATE_AVOID_LEFT or self.prev_state == Planner.STATE_AVOID_RIGHT or self.prev_state == Planner.STATE_AVOID_REVERSE): dist = -0.25 self.swarmie.drive(dist, ignore=ignore | Obstacle.IS_SONAR) turn_angle = self.get_angle_to_face_point(point) if turn_angle > 0: cur_ignore = (ignore | Obstacle.SONAR_CENTER | Obstacle.SONAR_RIGHT) else: cur_ignore = (ignore | Obstacle.SONAR_CENTER | Obstacle.SONAR_LEFT) drive_result = self.swarmie.turn( turn_angle, ignore=cur_ignore, throw=False ) if drive_result != MoveResult.OBSTACLE_SONAR: print("Completed turn or found an important AprilTag.") return drive_result # Last resort, try turning in the other direction. if self._is_safe_to_back_up(): dist = -0.15 if (self.prev_state == Planner.STATE_AVOID_LEFT or self.prev_state == Planner.STATE_AVOID_RIGHT or self.prev_state == Planner.STATE_AVOID_REVERSE): dist = -0.25 self.swarmie.drive(dist, ignore=ignore | Obstacle.IS_SONAR) turn_angle = self.get_angle_to_face_point(point) # But don't bother if the rover is already mostly facing the # right direction. if abs(turn_angle) < math.pi / 2: print("Didn't make the whole turn, but I'm close enough.") return drive_result print("Trying to turn other way.") turn_angle = angles.two_pi_complement(turn_angle) turns = [] if turn_angle > 0: cur_ignore = ignore | Obstacle.SONAR_CENTER | Obstacle.SONAR_RIGHT else: cur_ignore = ignore |Obstacle.SONAR_CENTER | Obstacle.SONAR_LEFT # Split turn angle into two steps if abs val is greater than PI. # The driver API only makes individual turns <= PI. if turn_angle >= math.pi: turns.append(9 * math.pi / 10) turns.append(turn_angle - (9 * math.pi / 10)) elif turn_angle <= math.pi: turns.append(-9 * math.pi / 10) turns.append(turn_angle + (9 * math.pi / 10)) else: turns.append(turn_angle) for turn in turns: drive_result = self.swarmie.turn( turn, ignore=cur_ignore, throw=False ) if drive_result != MoveResult.SUCCESS: return drive_result return drive_result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_angle_to_face_point(self, point):\n start = self.swarmie.get_odom_location().get_pose()\n return angles.shortest_angular_distance(\n start.theta,\n math.atan2(point.y - start.y, point.x - start.x)\n )", "def look_at(self, point, connector):\n\n\n try:\n point_camera = self.tfBuffer.transform(point, 'head')\n except (tf2.LookupException, tf2.ConnectivityException, tf2.ExtrapolationException) as e:\n rospy.loginfo(\"Waiting for transform... ({})\".format(e))\n return\n\n # Calculate the head joint angles and clip them to the right range\n angle_pan, angle_tilt = connector.head.get_motor_goals_from_point(point_camera.point)\n angle_pan = np.clip(np.rad2deg(angle_pan), connector.head.min_pan, connector.head.max_pan)\n angle_tilt = np.clip(np.rad2deg(angle_tilt), connector.head.min_tilt, connector.head.max_tilt)\n\n current_pan_pos, current_tilt_pos = connector.head.get_current_head_pos()\n if (abs(current_pan_pos - angle_pan) < connector.head.delta and\n abs(current_tilt_pos - angle_tilt) < connector.head.delta):\n # We reached the position\n if rospy.get_time() - self.position_reached_time > connector.head.wait_time:\n # We waited long enough, go back\n return self.pop()\n else:\n # Represent remaining wait time\n self.publish_debug_data(\"remaining_wait_time\",connector.head.wait_time - (rospy.get_time() - self.position_reached_time))\n\n else:\n # We haven't reached it\n # Update when we should reach it\n self.position_reached_time = rospy.get_time()\n connector.head.send_motor_goals(angle_pan, 30.0, angle_tilt, 30.0)\n\n # Represent remaining tilt\n self.publish_debug_data(\"remaining_tilt\",abs(current_pan_pos - angle_pan))\n self.publish_debug_data(\"remaining_pan\",abs(current_tilt_pos - angle_tilt))", "def startface(self):\n self.fan = (self.position.x,self.position.y,self.position.z)", "def go_to(self, msg):\n rospy.loginfo('starting go_to')\n #assuming msg is the click\n #rotate to face base of arrow\n quat_orig = msg.pose.orientation\n quat_list = [ quat_orig.x, quat_orig.y, quat_orig.z, quat_orig.w]\n #calculates roll, pitch, and yaw from quaternion\n (roll , pitch , yaw) = euler_from_quaternion( quat_list )\n\n self.gtx = msg.pose.position.x\n trans_x = self.gtx - self.px\n self.gty = msg.pose.position.y\n trans_y = self.gty - self.py\n\n rotation = math.atan2(trans_y, trans_x)\n self.rotate(rotation-self.pth, 0.5)\n\n #go-to translation position\n distance = math.sqrt((trans_x)*(trans_x) + (trans_y)*(trans_y))\n\n self.drive(distance, 0.2)\n\n #rotate to orientation of arrow\n self.gtth = yaw -self.pth\n #self.rotate(self.gtth, 0.5)", "def change_face(self, face):\n if self.face is not None:\n self.face.remove_point(self)\n\n self.face = face\n self.face.add_point(self)", "def Move180(self):\n if self.facing == 0:\n self.facing = 1\n self.x -= self.stepLeft\n elif self.facing == 1:\n self.facing = 2\n self.y -= self.stepUp\n elif self.facing == 2:\n self.facing = 3\n self.x += self.stepRight\n elif self.facing == 3:\n self.facing = 0\n self.y += self.stepDown", "def my_go_to_pose1(robot, x, y, angle_z):\n\t# ####\n\t# TODO: Implement a function that makes the robot move to a desired pose\n\t# using the my_drive_straight and my_turn_in_place functions. This should\n\t# include a sequence of turning in place, moving straight, and then turning\n\t# again at the target to get to the desired rotation (Approach 1).\n\t# ####\n\tfirstRotationInRadians = (0 if y == 0 else 90) if x == 0 else math.atan(y/x)\n\tfirstRotation = firstRotationInRadians * 360.0/ (2.0 * math.pi)\n\tmy_turn_in_place(robot, firstRotation, 30)\n\trobot.stop_all_motors()\n\t# robot.drive_wheels(0, 0, duration=1)\n\t# time.sleep(1)\n\tmy_drive_straight(robot, math.sqrt(x*x + y*y), (-1 if x < 0 else 1) * 30)\n\trobot.stop_all_motors()\n\t# robot.drive_wheels(0, 0, duration=1)\n\t# time.sleep(1)\n\tmy_turn_in_place(robot, angle_z - firstRotation , 30)\n\ttime.sleep(1)", "def my_go_to_pose1(robot, x, y, angle_z):\n # Assuming positive y is to the robot's left and positive x is the direction the robot is already facing\n hypotenuse = numpy.sqrt(x*x + y*y)\n angle_offset_of_target_point = numpy.arcsin(y/hypotenuse)*180/math.pi\n my_turn_in_place(robot, angle_offset_of_target_point , 30)\n my_drive_straight(robot, hypotenuse, 50)\n my_turn_in_place(robot, angle_z-angle_offset_of_target_point, 30)\n time.sleep(1)", "def faceTowards(self, target):\n current_tile = self.current_tile()\n if(target and current_tile):\n x_dist = target.coordinates()[0] - current_tile.coordinates()[0]\n if x_dist == 0: return\n self.direction_val = x_dist/abs(x_dist)\n #TEMP\n if self.direction_val == -1:\n self.direction_id = 'left'\n if self.direction_val == 1:\n self.direction_id = 'right'", "def turn_to_pivot(self, goal_pivot):\n\n\t\tgoal_pivot = self.check_pivot_bounds(goal_pivot)\n\n\t\tturn_angle = goal_pivot - self.current_pivot # determines direction to turn\n\t\tprint(\"Turning {} degrees..\".format(turn_angle))\n\n\t\trospy.sleep(1)\n\n\t\tif turn_angle < -self.min_pivot_tolerance:\n\t\t\tself.turn_left(goal_pivot) # start turning left\n\t\telif turn_angle > self.min_pivot_tolerance:\n\t\t\tself.turn_right(goal_pivot) # start turning right\n\t\telse:\n\t\t\tprint(\"Turn angle is zero, canceling turn request..\")\n\t\t\treturn # don't turn if angle is 0", "def goto_point(self,targetx,targety):\n #if point is 0,0, make 0.01,0.01 to avoid divide by 0\n if targetx == 0 and targety == 0:\n targetx = 0.01\n targety = 0.01\n self.targetdistance = math.sqrt((self.currentx-targetx)**2 + (self.currenty-targety)**2)\n self.targetangle = math.atan2(targety-self.currenty,targetx-self.currentx)\n self.angledifference = self.angle_diff(self.targetangle,self.orientation)\n if abs(self.angledifference) < .10:\n self.turnspeed = 0\n else:\n self.turnspeed = math.tanh(self.kturn*self.angledifference)\n self.speed = math.tanh(self.targetdistance*self.kspeed/self.angledifference)\n if self.speed < 0:\n self.speed = 0\n self.linearVector = Vector3(x=self.speed, y=0.0, z=0.0)\n self.angularVector = Vector3(x = 0.0, y = 0.0, z = self.turnspeed)\n # print \"currentx = \" + str(self.currentx)\n # print \"currenty = \" + str(self.currenty)\n # print \"orientation = \" + str(self.orientation)\n # print \"targetangle = \" + str(self.targetangle)\n # print \"angledifference = \" + str(self.angledifference)\n #print \"turnspeed = \" + str(self.turnspeed)\n #print \"speed = \" + str(self.speed)", "def turned(self,angle: \"radians to turn\") -> Position:\n return Position(self.x, self.y, self.facing + angle)", "def my_go_to_pose2(robot, x, y, angle_z):\n # ####\n # TODO: Implement a function that makes the robot move to a desired pose\n # using the robot.drive_wheels() function to jointly move and rotate the \n # robot to reduce distance between current and desired pose (Approach 2).\n # ####\n pass", "def face_marker(self, marker, speed):\n self.log.debug(\"Facing marker %s at %.1f%%\", marker, speed)\n if marker.rot_y < 0:\n self.wheels.left(marker.rot_y, speed)\n else:\n self.wheels.right(marker.rot_y, speed)\n return\n m_orient = abs(marker.orientation.rot_y)\n if m_orient < 0:\n self.log.debug(\"Going left then right\")\n turn_1 = self.wheels.left\n turn_2 = self.wheels.right\n else:\n self.log.debug(\"Going right then left\")\n turn_1 = self.wheels.right\n turn_2 = self.wheels.left\n deg1 = abs(marker.rot_y)\n deg2 = m_orient * 0.5\n dist = (0.5 * marker.dist) / abs(math.cos(math.radians(m_orient)))\n turn_1(deg1, speed)\n self.wheels.forward(dist, speed)\n turn_2(deg2, speed)", "def my_go_to_pose3(robot, x, y, angle_z):\n if(numpy.abs(angle_z)>90):\n my_turn_in_place(robot, angle_z, 30)\n my_go_to_pose2(robot, x, y, 0)\n else:\n my_go_to_pose2(robot, x, y, angle_z)", "def FaceToFace(\n movablePlane: str, fixedPlane: str, flip: Boolean, clearance: float\n ) -> \"Feature\":\n return Feature()", "def getGazeDirection(self,img, facebox):\n facebox_list = facebox.getList()\n \n #extrat face box and downsampling\n face_img = img[facebox_list[1]: facebox_list[3],facebox_list[0]: facebox_list[2]]\n face_img = cv2.resize(face_img, (128, 128))\n face_img = cv2.cvtColor(face_img, cv2.COLOR_BGR2RGB)\n\n #marks detection\n marks = self.mark_detector.detect_marks([face_img])\n \n #scale and move back marks in original image coordinate\n marks *= (facebox_list[2] - facebox_list[0])\n marks[:, 0] += facebox_list[0]\n marks[:, 1] += facebox_list[1]\n shape = marks.astype(np.uint)\n\n #TODO:consider different points for surgery masks\n image_points = np.array([\n shape[30], # Nose tip\n shape[8], # Chin\n shape[36], # Left eye left corner\n shape[45], # Right eye right corne\n shape[48], # Left Mouth corner\n shape[54] # Right mouth corner\n ], dtype=\"double\")\n \n if self.debug > 1:\n for p in image_points:\n cv2.circle(img, (int(p[0]), int(p[1])), 3, (0,0,255), -1)\n\n #Solving PnP\n dist_coeffs = np.zeros((4,1)) # Assuming no lens distortion\n (success, rotation_vector, translation_vector) = cv2.solvePnP(self.model_points, image_points, self.camera_matrix, dist_coeffs, flags=cv2.SOLVEPNP_UPNP)\n \n #Get a ortogal to tha face plane - x1 and x2 are two points definig the line in the projected space\n #TODO: remove and make a line going out from eyes\n \n \n # Calculate euler angle\n rotation_mat, _ = cv2.Rodrigues(rotation_vector)\n pose_mat = cv2.hconcat((rotation_mat, translation_vector))\n _, _, _, _, _, _, euler_angles = cv2.decomposeProjectionMatrix(pose_mat)\n\n\n x1, x2 = FaceMarksDetector.computeLineOfSigth(img, rotation_vector, translation_vector, self.camera_matrix)\n\n if self.debug > 0:\n #display the line\n cv2.line(img, tuple(x1), tuple(x2), (255, 255, 0), 2)\n\n for (x, y) in shape:\n cv2.circle(img, (x, y), 4, (255, 255, 0), -1)\n\n if self.debug > 0:\n self.mark_detector.draw_marks(img, marks, color=(0, 255, 0))\n \n return marks, (x1, x2) , euler_angles", "def seek_behaviour(self):\n x, y = (int (self.posicao[0]),int(self.posicao[1]))\n nx,ny = tuple(pontos[self.nextpoint])\n rot = self.rotacao\n direction = pontos[self.nextpoint]-self.posicao", "def follow(self):\n cone_ranges = self.ranges[self.cone_left:] + self.ranges[:self.cone_right]\n nearest_deg = 0\n nearest_deg_dist = self.follow_max + 1\n for i, x in enumerate(cone_ranges):\n if (x != 0) and (x < nearest_deg_dist):\n nearest_deg = i - (self.cone_width/2)\n nearest_deg_dist = x\n if nearest_deg_dist < self.follow_min:\n self.center(degree=nearest_deg)\n elif nearest_deg_dist < self.follow_max:\n follow_speed = (nearest_deg_dist - self.follow_min)/(self.follow_max - self.follow_min)\n self.center(speed=follow_speed, degree=nearest_deg)\n else:\n self.current_state = \"wait\"", "def steerright(self):\n self.direction = self.direction-self.steering\n if self.direction < 0:\n self.direction = 360-90\n self.image, self.rect = rot_center(self.image_orig,self.rect,self.direction)", "def viewFollowPoint(self,follow_point_msg):\n marker = Marker()\n marker.header.frame_id = self.veh_name\n marker.ns = self.veh_name + \"/follow_point\"\n marker.id = 0 \n marker.action = Marker.ADD\n marker.type = Marker.SPHERE\n marker.lifetime = rospy.Duration.from_sec(5.0)\n marker.pose.position.z = 0 \n marker.pose.position.x = follow_point_msg.x\n marker.pose.position.y = follow_point_msg.y\n marker.color.a = 1.0\n marker.scale.x = 0.1\n marker.scale.y = 0.1\n marker.scale.z = 0.1\n marker.color.r = 0 \n marker.color.g = 1 \n marker.color.b = 0 \n self.pub_follow_point.publish(marker)", "def forward_character():\r\n set_point(point()+1)", "def turn(self, dir):\n if dir.y < 0: # move up\n self.surf = self.image_dir['UP']\n if dir.y > 0: # move down\n self.surf = self.image_dir['DOWN']\n if dir.x < 0: # move left\n self.surf = self.image_dir['LEFT']\n if dir.x > 0: # move right\n self.surf = self.image_dir['RIGHT']\n\n return self.surf", "def steerleft(self):\n self.direction = self.direction+self.steering\n if self.direction > 360:\n self.direction = 0+90\n self.image, self.rect = rot_center(self.image_orig,self.rect,self.direction)", "def my_go_to_pose2(robot, x, y, angle_z):\n\t# ####\n\t# TODO: Implement a function that makes the robot move to a desired pose\n\t# using the robot.drive_wheels() function to jointly move and rotate the \n\t# robot to reduce distance between current and desired pose (Approach 2).\n\t# ####\n\t\n\tabsoluteTargetPosition = (robot.pose.position.x + x, robot.pose.position.y + y, robot.pose.rotation.angle_z.degrees + angle_z)\n\t\n\twhile(math.sqrt(x*x + y*y) > 50.0):\n\t\t# print(\"(x, y, angle_z) = (%i,%i,%i)\" % (x, y, angle_z))\n\t\tfirstRotationInRadians = (0 if y == 0 else 90) if x == 0 else math.atan(y/x)\n\t\tleftMotor = 10 * (2 * x - angle_z * (2 * math.pi / 360.0) * get_distance_between_wheels() * math.cos(firstRotationInRadians)) / (2 * get_front_wheel_radius() * math.cos(firstRotationInRadians))\n\t\trightMotor = 10 * (2 * x + angle_z * (2 * math.pi / 360.0) * get_distance_between_wheels() * math.cos(firstRotationInRadians)) / (2 * get_front_wheel_radius() * math.cos(firstRotationInRadians))\n\t\t# print(\"(leftMotor, rightMotor) = (%i,%i)\" % (leftMotor, rightMotor))\n\t\tangle_delta = get_front_wheel_radius() * (rightMotor - leftMotor) / get_distance_between_wheels()\n\t\tx_delta = get_front_wheel_radius() * math.cos(angle_z * 2.0 * math.pi / 360.0) * (leftMotor + rightMotor) / 2.0\n\t\ty_delta = get_front_wheel_radius() * math.sin(angle_z * 2.0 * math.pi / 360.0) * (leftMotor + rightMotor) / 2.0\n\t\t# print(\"angle_delta %i\" % angle_delta)\n\t\t# x = x - get_front_wheel_radius() * math.cos(angle_delta) * (leftMotor + rightMotor) / 2.0\n\t\t# y = y - get_front_wheel_radius() * math.sin(angle_delta) * (leftMotor + rightMotor) / 2.0\n\t\t# print(\"(x, y, angle_z) = (%i,%i,%i)\" % (x, y, angle_z))\n\t\t# angle_z = angle_z + angle_delta * (360.0/(2 * math.pi))\n\t\t# print(\"(x, y, angle_z) = (%i,%i,%i)\" % (x, y, angle_z))\n\t\trobot.drive_wheels(leftMotor, rightMotor, duration = 1)\n\t\trobot.stop_all_motors()\n\t\t# time.sleep(1)\n\t\tx = absoluteTargetPosition[0] - robot.pose.position.x\n\t\ty = absoluteTargetPosition[1] - robot.pose.position.y\n\t\tangle_z = absoluteTargetPosition[2] - robot.pose.rotation.angle_z.degrees\n\t\t# print(\"(x, y, angle_z) = (%i,%i,%i)\" % (x, y, angle_z))\n\t\trobot.stop_all_motors()\n\t\t# robot.drive_wheels(0,0)", "def my_go_to_pose3(robot, x, y, angle_z):\n\t# ####\n\t# TODO: Implement a function that makes the robot move to a desired pose\n\t# as fast as possible. You can experiment with the built-in Cozmo function\n\t# (cozmo_go_to_pose() above) to understand its strategy and do the same.\n\t# ####\n\tif angle_z > 90 or angle_z < -90: # if it's beyond a simple differential drive towards the pose (e.g. involves a turn at the end), then just drive to it and turn.\n\t\tmy_go_to_pose1(robot, x, y, angle_z)\n\t\t# my_go_to_pose1(robot, 0, 0, angle_z)\n\t\t# my_go_to_pose2(robot, x, y, 0)\n\telse:\n\t\tmy_go_to_pose2(robot, x, y, angle_z)", "def react_to_faces(faces):\n if (len(faces) == 1):\n # Get the location of the face (one of six positions)\n face_loc = get_location(faces[0].bbox, vision.VIDEO_SIZE)\n # Set the Raspimon pose\n if face_loc is not None:\n sense.set_pixels(VOLT_POSES[face_loc])", "def cozmo_turn_in_place(robot, angle, speed):\n\trobot.turn_in_place(degrees(angle), speed=degrees(speed)).wait_for_completed()", "def advance(self):\r\n #if see if the UFO is almost at the edge of the screen\r\n if (self.center.x >= SCREEN_WIDTH-20 or self.center.y >= SCREEN_HEIGHT-20):\r\n #if it is change the velocity to negative to reverse direction\r\n self.velocity.dx *= -2\r\n self.velocity.dy *= -2\r\n \r\n # set x equal to x plus dx\r\n self.center.x += self.velocity.dx\r\n # set y equal to y plus dy\r\n self.center.y += self.velocity.dy\r\n #draw the flying object at its new point.\r\n self.draw()", "def face(self, newdir):\n if not (newdir % 180 == self.dir % 180):\n self.dir = newdir" ]
[ "0.5929196", "0.58939385", "0.5709615", "0.5670963", "0.5666145", "0.564073", "0.562725", "0.5587449", "0.5580974", "0.55736095", "0.55411446", "0.55125284", "0.5490269", "0.5477188", "0.5470099", "0.54675", "0.54084307", "0.53805107", "0.5369936", "0.5335595", "0.53157187", "0.52868176", "0.52845556", "0.5271953", "0.5265581", "0.52586496", "0.52534384", "0.5249665", "0.5236653", "0.52337074" ]
0.7282826
0
Try to get the rover to goal location. Returns when at goal or if home target is found. !! avoid_targets and avoid_home shouldn't both be set to True. avoid_home will be set to False in this case. !!
def drive_to(self, goal, tolerance=0.0, tolerance_step=0.5, max_attempts=10, avoid_targets=True, avoid_home=False, use_waypoints=True, start_location=None, distance_threshold=None): print('\nRequest received') self.fail_count = 0 self.tolerance = tolerance self.avoid_targets = avoid_targets if avoid_targets is True and avoid_home is True: avoid_home = False self.avoid_home = avoid_home current_ignore = Obstacle.IS_SONAR if self.avoid_targets is True: current_ignore |= Obstacle.TAG_TARGET elif self.avoid_home is True: current_ignore |= Obstacle.TAG_HOME self.goal.x = goal.x self.goal.y = goal.y self.cur_loc = self.swarmie.get_odom_location() self.current_state = Planner.STATE_IDLE self.prev_state = Planner.STATE_IDLE while (not self.cur_loc.at_goal(self.goal, Planner.DISTANCE_OK + self.tolerance) and self.fail_count < max_attempts): if use_waypoints is True: # get new plan and try to drive to first point in it point = self._get_next_waypoint(tolerance_step) else: point = goal self.prev_state = self.current_state self.current_state = Planner.STATE_DRIVE # Turn to approximate goal heading while ignoring sonar and tags # helps to prevent rover from trying to jump around obstacles # before it even starts along its new path self.result = self._face_point( point, ignore=current_ignore ^ Obstacle.IS_SONAR ) if self.result == MoveResult.SUCCESS: self.result = self.swarmie.drive_to( point, ignore=Obstacle.SONAR_BLOCK, throw=False ) if self.result == MoveResult.SUCCESS: # Success, we got to our waypoint, or got ourselves out of # whatever pickle we were just in. # Just get a new plan and drive to next point self.fail_count = 0 self.prev_state = self.current_state self.current_state = Planner.STATE_IDLE print('Successfully drove to first point in nav plan.') # otherwise, something went wrong or we found home elif self.result == MoveResult.OBSTACLE_HOME: self.set_home_locations() # get around the home tag obstacle count = 0 # Give the rover 3 tries to avoid any tags nearby before # getting a new nav plan. MoveResult.OBSTACLE_SONAR takes # priority in the driver code, so it should be safe to continue # this loop if the MoveResult is just an OBSTACLE_HOME # self.fail_count may exceed limit here, but I'll let it go while count < 3 and self.result == MoveResult.OBSTACLE_HOME: print('\nObstacle: Found Home.') count += 1 self.fail_count += 1 detections = self.swarmie.get_latest_targets().detections inside_home = self.is_inside_home_ring(detections) if inside_home: print('\nGetting out of the home ring!!') angle, dist = self.get_angle_and_dist_to_escape_home( detections ) self.swarmie.turn( angle, ignore=Obstacle.IS_SONAR | Obstacle.IS_VISION ) self.result = self.swarmie.drive( dist, ignore=Obstacle.IS_SONAR | Obstacle.IS_VISION ) if self.avoid_home is False: # turn back around self.swarmie.turn( math.pi, ignore=Obstacle.IS_SONAR | Obstacle.IS_VISION ) print('Obstacle: Found Home.') return MoveResult.OBSTACLE_HOME else: if self.avoid_home is False: print('Obstacle: Found Home.') return MoveResult.OBSTACLE_HOME self.result = self._avoid_tag(id=256, ignore=current_ignore) elif self.result == MoveResult.OBSTACLE_TAG: # get around the tag obstacle count = 0 # Give the rover 3 tries to avoid any tags nearby before # getting a new nav plan. MoveResult.OBSTACLE_SONAR takes # priority in the driver code, so it should be safe to continue # this loop if the MoveResult is just an OBSTACLE_TAG # self.fail_count may exceed limit here, but I'll let it go while count < 3 and self.result == MoveResult.OBSTACLE_TAG: print('\nObstacle: Found a Tag.') if self.avoid_targets is False: if not self.sees_home_tag(): return self.result count += 1 self.fail_count += 1 self.result = self._avoid_tag(id=0, ignore=current_ignore) elif self.result == MoveResult.OBSTACLE_SONAR: # Check for home and tag obstacles just to be safe, because # sonar MoveResults take priority, and would mask a home or # target tag in view. obstacle = self.swarmie.get_obstacle_condition() if (obstacle & Obstacle.TAG_HOME == Obstacle.TAG_HOME and self.avoid_home is False): self.set_home_locations() return MoveResult.OBSTACLE_HOME if (obstacle & Obstacle.TAG_TARGET == Obstacle.TAG_TARGET and self.avoid_targets is False): return MoveResult.OBSTACLE_TAG # get around the sonar obstacle self.fail_count += 1 print('\nObstacle: Sonar.') left_blocked, center_blocked, right_blocked = \ self._check_sonar_obstacles() if (not left_blocked and not center_blocked and not right_blocked): print('\nFake sonar obstacle??') pass # 'fake' obstacle? elif not left_blocked and center_blocked and right_blocked: print('Left looks clear, turning left.') self.prev_state = self.current_state self.current_state = Planner.STATE_AVOID_LEFT self._go_around(math.pi / 4, 0.7) # self.swarmie.drive_to(point, throw=False) elif left_blocked and center_blocked and not right_blocked: print('Right looks clear, turning right.') self.prev_state = self.current_state self.current_state = Planner.STATE_AVOID_RIGHT self._go_around(-math.pi / 4, 0.7) # self.swarmie.drive_to(point, throw=False) elif left_blocked and not center_blocked and not right_blocked: print('Only left blocked, turning a little right.') self.prev_state = self.current_state self.current_state = Planner.STATE_AVOID_RIGHT self._go_around(-math.pi / 6, 0.6) # self.swarmie.drive_to(point, throw=False) elif not left_blocked and not center_blocked and right_blocked: print('Only right blocked, turning a little left.') self.prev_state = self.current_state self.current_state = Planner.STATE_AVOID_LEFT self._go_around(math.pi / 6, 0.6) # self.swarmie.drive_to(point, throw=False) else: print('Neither left or right look clear.') # Only back up if we're far enough away from home for it # to be safe. Don't want to back up into the nest! if self._is_safe_to_back_up(): print('Backing up.') self.swarmie.drive( -0.3, ignore=Obstacle.IS_SONAR, throw=False ) if (self.current_state == Planner.STATE_AVOID_RIGHT or self.prev_state == Planner.STATE_AVOID_RIGHT): self.prev_state = self.current_state self.current_state = Planner.STATE_AVOID_RIGHT self.clear(-math.pi / 4, ignore=current_ignore, reset_heading=False) self._go_around(-math.pi / 4, 0.75) else: self.prev_state = self.current_state self.current_state = Planner.STATE_AVOID_LEFT self.clear(math.pi / 4, ignore=current_ignore, reset_heading=False) self._go_around(math.pi / 4, 0.75) elif self.result == MoveResult.PATH_FAIL: # shit, hope we can back up if this ever happens self.fail_count += 1 print('\nPath Failure. Backing up.') self.prev_state = self.current_state self.current_state = Planner.STATE_AVOID_REVERSE self.swarmie.drive( -0.5, ignore=Obstacle.IS_SONAR | Obstacle.IS_VISION, throw=False ) self.cur_loc = self.swarmie.get_odom_location() if self.fail_count >= max_attempts: print('Failed to drive to goal {} times.'.format( max_attempts) ) raise PathException(MoveResult.PATH_FAIL) if start_location is not None: current_loc = self.cur_loc.get_pose() dist = math.sqrt((start_location.x - current_loc.x) ** 2 + (start_location.y - current_loc.y) ** 2) if dist > distance_threshold: raise PathException(MoveResult.PATH_FAIL) print('Successfully executed nav plan.') return MoveResult.SUCCESS
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _move_home_strategy(self):\n\n if self.last_status == \"Fail\":\n result = \"turnRight\"\n else:\n result = self.move_toward(self._home)\n if result is None:\n result = \"drop\"\n\n assert result is not None\n return result", "def search(world_state, robot_pose, goal_pose):\n if world_state.shape[0] == 0 or world_state.shape[1] == 0:\n print(\"Error, empty world_state!!!\")\n return None\n if not is_pos_valid(robot_pose, world_state.shape):\n print(\"Error, invalid robot_pose!!!\", robot_pose)\n return None\n if not is_pos_valid(goal_pose, world_state.shape):\n print(\"Error, invalid goal_pose!!!\", goal_pose)\n return None\n\n directions = [(-1, 0), (1, 0), (0, -1), (0, 1)] # orthogonal directions\n found = False\n\n x, y = robot_pose\n g = 0\n h = heuristic(robot_pose, goal_pose)\n f = g + h\n open = [[f, x, y]]\n came_from = {}\n came_from[robot_pose] = None\n cost_so_far = {}\n cost_so_far[robot_pose] = 0\n\n while open:\n open.sort() # sort based on f value\n current = open.pop(0)\n\n x, y = current[1:]\n g = cost_so_far[(x, y)]\n\n if (x, y) == goal_pose:\n found = True\n break\n else:\n # find available next positions\n for direction in directions:\n x2 = x + direction[0]\n y2 = y + direction[1]\n\n # check whether x2 and y2 are valid\n if not is_pos_valid((x2, y2), world_state.shape):\n continue\n\n g2 = g + 1\n if world_state[x2, y2] == 0 and ((x2, y2) not in cost_so_far or g2 < cost_so_far[(x2, y2)]):\n\n h2 = heuristic((x2, y2), goal_pose)\n f2 = g2 + h2\n open.append([f2, x2, y2])\n came_from[(x2, y2)] = (x, y)\n cost_so_far[(x2, y2)] = g2\n if found:\n path = [goal_pose]\n current = goal_pose\n while came_from[current]:\n current = came_from[current]\n path.append(current)\n\n path.reverse()\n return path\n\n else:\n return None", "def judge_goal(self):\n err_pos = math.sqrt((self.y_des - self.y)**2 +(self.x_des - self.x)**2)\n print(\"t= %s\" % rospy.get_time()+\"-----------\")\n print('destination position=['+str(self.x_des)+','+str(self.y_des)+\"]\")\n print('the current position=['+str(self.x)+','+str(self.y)+\"]\")\n print('the current yaw angle=['+str(self.yaw))\n print('distance to destination='+str(err_pos))\n\n if(err_pos < 0.8):\n print('reach goal!!!!!')\n self.goal_flag=1", "def search_paths_agent_to_goal(self, robot_x, robot_y, goal_x, goal_y, G, road_node_Nos, road_node_info,\n road_lines, road_directions, road_lines_num, node_edges):\n # add target node\n target_node_coordinate = np.zeros((1, 2))\n target_node_coordinate[0][0] = goal_x\n target_node_coordinate[0][1] = goal_y\n target_node = None\n\n for (key, value) in road_node_info.items():\n if math.sqrt((value[0]-target_node_coordinate[0][0])**2 + (value[1]-target_node_coordinate[0][1])**2) <= 0.01:\n target_node = key\n\n if target_node == 0:\n print(target_node)\n raise Exception(\"wrong target node\", target_node)\n\n # Check whether the robot is on the road node or not\n at_node = False\n for (key, value) in road_node_info.items():\n if key == 0:\n continue\n if value[0] == robot_x and value[1] == robot_y:\n at_node = True\n agent_node_No = key\n\n if at_node == False:\n # add agent node\n agent_node_No = 0\n agent_node_coordinate = np.zeros((1, 2))\n agent_node_coordinate[0][0] = robot_x\n agent_node_coordinate[0][1] = robot_y\n agent_node = dict(zip([agent_node_No], agent_node_coordinate))\n road_node_info.update(agent_node)\n\n # add node\n env_node_Nos = [agent_node_No] + road_node_Nos\n G.add_nodes_from(env_node_Nos)\n\n # add edges from agent to the nearest road line\n # calculate the distance from the agent to the lines\n agent_line_dist = []\n for i in range(road_lines_num):\n cross = (road_lines[i][2] - road_lines[i][0]) * (agent_node_coordinate[0][0] - road_lines[i][0]) \\\n + (road_lines[i][3] - road_lines[i][1]) * (agent_node_coordinate[0][1] - road_lines[i][1])\n if cross <= 0:\n agent_line_dist.append(np.sqrt((agent_node_coordinate[0][0] - road_lines[i][0]) ** 2\n + (agent_node_coordinate[0][1] - road_lines[i][1]) ** 2))\n continue\n\n d2 = (road_lines[i][2] - road_lines[i][0]) ** 2 + (road_lines[i][3] - road_lines[i][1]) ** 2\n if cross >= d2:\n agent_line_dist.append(np.sqrt((agent_node_coordinate[0][0] - road_lines[i][2]) ** 2\n + (agent_node_coordinate[0][1] - road_lines[i][3]) ** 2))\n continue\n r = cross / d2\n p0 = road_lines[i][0] + (road_lines[i][2] - road_lines[i][0]) * r\n p1 = road_lines[i][1] + (road_lines[i][3] - road_lines[i][1]) * r\n agent_line_dist.append(\n np.sqrt((agent_node_coordinate[0][0] - p0) ** 2 + (agent_node_coordinate[0][1] - p1) ** 2))\n\n # find the nearest line index\n agent_line_dist_shortest = float(\"inf\")\n agent_line_shortest_index = 0\n\n for index, item in enumerate(agent_line_dist):\n if item < agent_line_dist_shortest:\n agent_line_shortest_index = index\n agent_line_dist_shortest = item\n\n # find the shortest line's node\n agent_line_shortest_node0 = None\n agent_line_shortest_node1 = None\n\n for (key, value) in road_node_info.items():\n if value[0] == road_lines[agent_line_shortest_index][0] and value[1] == \\\n road_lines[agent_line_shortest_index][1]:\n agent_line_shortest_node0 = key\n if value[0] == road_lines[agent_line_shortest_index][2] and value[1] == \\\n road_lines[agent_line_shortest_index][3]:\n agent_line_shortest_node1 = key\n\n # add new edges from the agent node to road note\n if road_directions[agent_line_shortest_index] == 0:\n node_edges.append([agent_node_No, agent_line_shortest_node1, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node1][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node1][1] - agent_node_coordinate[0][1]) ** 2)}])\n elif road_directions[agent_line_shortest_index] == 1:\n node_edges.append([agent_node_No, agent_line_shortest_node0, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node0][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node0][1] - agent_node_coordinate[0][1]) ** 2)}])\n elif road_directions[agent_line_shortest_index] == 2:\n node_edges.append([agent_node_No, agent_line_shortest_node0, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node0][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node0][1] - agent_node_coordinate[0][1]) ** 2)}])\n node_edges.append([agent_node_No, agent_line_shortest_node1, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node1][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node1][1] - agent_node_coordinate[0][1]) ** 2)}])\n else:\n raise ValueError('wrong direction')\n\n G.add_edges_from(node_edges)\n simple_paths_list = list()\n if agent_node_No not in G or target_node not in G:\n has_path = False\n G.clear()\n else:\n if nx.has_path(G, source=agent_node_No, target=target_node):\n simple_paths = nx.shortest_simple_paths(G, source=agent_node_No, target=target_node, weight='len')\n\n for path in simple_paths:\n simple_paths_list.append(path)\n\n for path in simple_paths_list:\n if path[1] == agent_line_shortest_node1:\n path[0] = agent_line_shortest_node0\n elif path[1] == agent_line_shortest_node0:\n path[0] = agent_line_shortest_node1\n else:\n raise ValueError('First node Error!')\n\n remove_paths_list = list()\n for path in simple_paths_list:\n for path_rest in simple_paths_list[simple_paths_list.index(path) + 1:]:\n if path == path_rest[- len(path):]:\n remove_paths_list.append(path_rest)\n\n for remove_path in remove_paths_list:\n if remove_path in simple_paths_list:\n simple_paths_list.remove(remove_path)\n\n # Choose 1 simple paths\n if len(simple_paths_list) > 1:\n simple_paths_list = simple_paths_list[0:1]\n\n # remove edges from the agent node to road note\n if road_directions[agent_line_shortest_index] == 0:\n node_edges.remove([agent_node_No, agent_line_shortest_node1, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node1][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node1][1] - agent_node_coordinate[0][1]) ** 2)}])\n elif road_directions[agent_line_shortest_index] == 1:\n node_edges.remove([agent_node_No, agent_line_shortest_node0, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node0][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node0][1] - agent_node_coordinate[0][1]) ** 2)}])\n elif road_directions[agent_line_shortest_index] == 2:\n node_edges.remove([agent_node_No, agent_line_shortest_node0, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node0][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node0][1] - agent_node_coordinate[0][1]) ** 2)}])\n node_edges.remove([agent_node_No, agent_line_shortest_node1, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node1][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node1][1] - agent_node_coordinate[0][1]) ** 2)}])\n else:\n raise ValueError('wrong direction')\n\n has_path = True\n G.clear()\n else:\n # remove edges from the agent node to road note\n if road_directions[agent_line_shortest_index] == 0:\n node_edges.remove([agent_node_No, agent_line_shortest_node1, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node1][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node1][1] - agent_node_coordinate[0][1]) ** 2)}])\n elif road_directions[agent_line_shortest_index] == 1:\n node_edges.remove([agent_node_No, agent_line_shortest_node0, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node0][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node0][1] - agent_node_coordinate[0][1]) ** 2)}])\n elif road_directions[agent_line_shortest_index] == 2:\n node_edges.remove([agent_node_No, agent_line_shortest_node0, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node0][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node0][1] - agent_node_coordinate[0][1]) ** 2)}])\n node_edges.remove([agent_node_No, agent_line_shortest_node1, {'len': np.sqrt(\n (road_node_info[agent_line_shortest_node1][0] - agent_node_coordinate[0][0]) ** 2 + (\n road_node_info[agent_line_shortest_node1][1] - agent_node_coordinate[0][1]) ** 2)}])\n else:\n raise ValueError('wrong direction')\n\n has_path = False\n G.clear()\n else:\n G.add_edges_from(node_edges)\n simple_paths_list = list()\n # 判断站点是否在路网上\n if agent_node_No not in G or target_node not in G:\n has_path = False\n G.clear()\n else:\n # 判断站点和目标间是否存在路径\n if nx.has_path(G, source=agent_node_No, target=target_node):\n # 提取所有简单路径\n simple_paths = nx.shortest_simple_paths(G, source=agent_node_No, target=target_node, weight='len')\n\n for path in simple_paths:\n simple_paths_list.append(path)\n\n # 移除带有回环的路网\n remove_paths_list = list()\n for path in simple_paths_list:\n for path_rest in simple_paths_list[simple_paths_list.index(path) + 1:]:\n if path == path_rest[- len(path):]:\n remove_paths_list.append(path_rest)\n\n for remove_path in remove_paths_list:\n if remove_path in simple_paths_list:\n simple_paths_list.remove(remove_path)\n\n # 提取最多2条路径\n if len(simple_paths_list) > 2:\n simple_paths_list = simple_paths_list[0:2]\n\n # 确认存在路径\n has_path = True\n G.clear()\n else:\n # 不存在路径\n has_path = False\n G.clear()\n\n return simple_paths_list, has_path", "def make_goal(self):\n\n # TODO: prevent calculating positions too close to a wall\n # TODO: visualize this process better\n # TODO: publish goal in map frame to prevent errors if the robot moves in time\n global ms\n rospy.loginfo('Calculating navigation goal')\n\n if self.objective == 'discovery':\n dest = ms.dis_pt()\n elif self.objective == 'delivery':\n dest = ms.del_pt()\n\n # Transform can location into base_link\n pt = PointStamped(header=Header(stamp=rospy.Time(0), frame_id='map'), point=dest)\n self.destination = tf_listener.transformPoint(\"base_link\", pt).point # w.r.t self\n\n x, y = self.destination.x, self.destination.y\n theta = math.atan2(y, x)\n\n if self.objective == 'discovery':\n r = 1.0 # 1m back from target position\n elif self.objective == 'delivery':\n r = 0.5 # 0.5m back from target position, i.e. \"not eternally far away that it seems like a failure\"\n\n x -= r * math.cos(theta)\n y -= r * math.sin(theta)\n\n angle = Quaternion(0, 0, math.sin(theta / 2), math.cos(theta / 2))\n\n dest = PoseStamped(\n header=Header(frame_id='base_link'),\n pose=Pose(position=Point(x=x, y=y, z=0), orientation=angle))\n\n goal = MoveBaseGoal(target_pose=dest)\n\n return goal", "def search_path(self):\n\n nodes = [self.start]\n final_node = None\n \n count = 0\n while True:\n count += 1\n\n if count % self.pick_target == 0:\n pick = self.goal.pos[:2]\n else:\n pick = self.car.random_pos()[:2]\n \n nearest = self.get_nearest_node(nodes, pick)\n\n if count % self.check_dubins == 0:\n solutions = self.dubins.find_tangents(nearest.pos, self.goal.pos)\n dubins_route, cost, valid = self.dubins.best_tangent(solutions)\n \n if valid:\n final_node = nearest\n break\n\n phi = self.get_steering_angle(nearest.pos, pick)\n pos = nearest.pos\n branch = [pos[:2]]\n \n for i in range(self.max_steps):\n pos = self.car.step(pos, phi)\n branch.append(pos[:2])\n \n # check safety of route-----------------------\n if phi == 0:\n safe = self.dubins.is_straight_route_safe(nearest.pos, pos)\n else:\n d, c, r = self.car.get_params(nearest.pos, phi)\n safe = self.dubins.is_turning_route_safe(nearest.pos, pos, d, c, r)\n # --------------------------------------------\n \n if not safe:\n continue\n \n new_node = Node(pos, phi, i+1)\n \n if new_node in nodes:\n continue\n \n new_node.branch = branch\n new_node.parent = nearest\n nodes.append(new_node)\n \n route = self.backtracking(final_node) + dubins_route\n path = self.car.get_path(self.car.start_pos, route)\n print('Total iteration:', count)\n \n return path, nodes", "def search_best_goal_node(self):\n\n dist_to_goal_list = [self.calc_dist_to_goal(n.x, n.y) for n in self.node_list]\n goal_indexes = [\n dist_to_goal_list.index(i)\n for i in dist_to_goal_list\n if i <= self.expand_dis\n ]\n\n safe_goal_indexes = []\n for goal_index in goal_indexes:\n t_node = self.steer(self.node_list[goal_index], self.goal_node)\n if self.check_collision(t_node, self.obstacle_list):\n safe_goal_indexes.append(goal_index)\n\n if not safe_goal_indexes:\n return None\n\n min_cost = min([self.node_list[i].cost for i in safe_goal_indexes])\n for i in safe_goal_indexes:\n if self.node_list[i].cost == min_cost:\n return i\n\n return None", "def getGhostGoal(self, gameState):\n enemies = [gameState.getAgentState(i) for i in self.getOpponents(gameState)]\n ghost = [a for a in enemies if not a.isPacman and a.getPosition() != None]\n myPos = self.getCurrentObservation().getAgentState(self.index).getPosition()\n if len(ghost) > 0:\n dis = 9999\n nearestPacman = ghost[0]\n for p in ghost:\n temp = self.getMazeDistance(myPos, p.getPosition())\n if temp < dis:\n dis = temp\n nearestPacman = p\n return nearestPacman.getPosition(), dis\n else:\n return None, None", "def detected_goal(self):\n if self._home_goal.detected_entities:\n return team.Team.AWAY\n if self._away_goal.detected_entities:\n return team.Team.HOME\n return None", "def move_to(self, target):\n # type: (RoomPosition) -> None\n hive = self.home.hive\n home = self.find_home()\n origin = self.find_origin()\n\n total_distance = hive.honey.find_path_length(origin, target, self.new_movement_opts())\n\n min_distance_from_home = Infinity\n min_distance_to_origin = Infinity\n min_distance_to_target = movement.chebyshev_distance_room_pos(self.members_movement_order()[0].pos, target)\n max_distance_to_target = -Infinity\n any_hostiles = False\n for member in self.members:\n distance_to_home = movement.chebyshev_distance_room_pos(member.pos, home)\n distance_to_origin = movement.chebyshev_distance_room_pos(member.pos, origin)\n distance_to_target = movement.chebyshev_distance_room_pos(member.pos, target)\n if distance_to_home < min_distance_from_home:\n min_distance_from_home = distance_to_home\n if distance_to_target > max_distance_to_target:\n max_distance_to_target = distance_to_target\n if distance_to_origin < min_distance_to_origin:\n min_distance_to_origin = distance_to_origin\n if len(member.room.find(FIND_HOSTILE_CREEPS)):\n any_hostiles = True\n\n if min_distance_to_origin > 100:\n mv_order = self.members_movement_order()\n self.set_origin(mv_order[len(mv_order) - 1].pos)\n if min_distance_from_home < 50 and (max_distance_to_target < total_distance / 2):\n self.log(\"move_to: chose stage 0 (minimum distance from home: {}, maximum distance from home: {},\"\n \" total distance: {})\"\n .format(min_distance_from_home, max_distance_to_target, total_distance))\n self.move_to_stage_0(target)\n elif min_distance_to_target < 300 and any_hostiles:\n self.move_to_stage_2(target)\n elif min_distance_to_target > 60 or max_distance_to_target > 200:\n # self.log(\"move_to: chose stage 1 (minimum distance from home: {}, total distance: {}, \"\n # \"minimum distance to target: {}, maximum distance to target: {})\"\n # .format(min_distance_from_home, total_distance,\n # min_distance_to_target, max_distance_to_target))\n self.move_to_stage_1(target, any_hostiles)\n else:\n # self.log(\"move_to: chose stage 2 (minimum distance from home: {}, total distance: {}, \"\n # \"minimum distance to target: {}, maximum distance to target: {})\"\n # .format(min_distance_from_home, total_distance,\n # min_distance_to_target, max_distance_to_target))\n self.move_to_stage_2(target)", "def getBoarderGoal(self, gameState):\n boards = self.boarders\n myPos = self.getCurrentObservation().getAgentState(self.index).getPosition()\n if len(boards) > 0:\n dis = 9999\n nearestBoarder = boards[0]\n for b in boards:\n temp = self.getMazeDistance(myPos, b)\n if temp < dis:\n dis = temp\n nearestBoarder = b\n return nearestBoarder, dis\n else:\n return None, None", "def test_lands_on_goal_correctly():\n env = Four_Rooms_Environment(stochastic_actions_probability=0.0)\n env.reset()\n env.move_user(env.current_user_location, (3, 3))\n env.move_goal(env.current_goal_location, (2, 2))\n\n env.step(0)\n assert env.reward == env.step_reward_for_not_achieving_goal\n assert not env.done\n\n env.step(3)\n assert env.reward == env.reward_for_achieving_goal\n assert env.done\n\n env = Four_Rooms_Environment(stochastic_actions_probability=0.0)\n env.reset()\n env.move_user(env.current_user_location, (2, 3))\n env.move_goal(env.current_goal_location, (2, 8))\n for move in [2, 1, 1, 1, 1, 1, 0]:\n env.step(move)\n if move != 0:\n assert env.reward == env.step_reward_for_not_achieving_goal\n assert not env.done\n else:\n assert env.reward == env.reward_for_achieving_goal\n assert env.done", "def get_shortest_paths(distance_map: DistanceMap, agent_pos, agent_dir, max_depth: Optional[int] = None, agent_handle: Optional[int] = None) \\\n -> Dict[int, Optional[List[Waypoint]]]:\n shortest_paths = dict()\n\n def _shortest_path_for_agent(agent,agent_pos,agent_dir):\n if agent_pos is None :\n if agent.status == RailAgentStatus.READY_TO_DEPART:\n position = agent.initial_position\n elif agent.status == RailAgentStatus.ACTIVE:\n position = agent.position\n elif agent.status == RailAgentStatus.DONE:\n position = agent.target\n else:\n shortest_paths[agent.handle] = None\n return\n direction = agent.direction\n else :\n position = agent_pos\n direction = agent_dir \n shortest_paths[agent.handle] = []\n distance = math.inf\n depth = 0\n while (position != agent.target and (max_depth is None or depth < max_depth)):\n next_actions = get_valid_move_actions_(direction, position, distance_map.rail)\n best_next_action = None\n for next_action in next_actions:\n next_action_distance = distance_map.get()[\n agent.handle, next_action.next_position[0], next_action.next_position[\n 1], next_action.next_direction]\n if next_action_distance < distance:\n best_next_action = next_action\n distance = next_action_distance\n\n shortest_paths[agent.handle].append(Waypoint(position, direction))\n depth += 1\n\n # if there is no way to continue, the rail must be disconnected!\n # (or distance map is incorrect)\n if best_next_action is None:\n shortest_paths[agent.handle] = None\n return\n\n position = best_next_action.next_position\n direction = best_next_action.next_direction\n if max_depth is None or depth < max_depth:\n shortest_paths[agent.handle].append(Waypoint(position, direction))\n\n if agent_handle is not None:\n _shortest_path_for_agent(distance_map.agents[agent_handle],agent_pos,agent_dir)\n else:\n for agent in distance_map.agents:\n _shortest_path_for_agent(agent,agent_pos,agent_dir)\n\n return shortest_paths", "def find_goal(self):\n w, l, h = self.get_pos()\n gw, gl, gh = self.goal\n try:\n angle_deg = angle((w, l), (gw, gl))\n except ZeroDivisionError:\n if w > gw and l > gl:\n return 2\n elif w < gw and l < gl:\n return 5\n if -105 <= angle_deg <= -75:\n return 0\n elif -75 < angle_deg < 15:\n return 1\n elif -15 <= angle_deg <= 15:\n return 2\n elif 15 < angle_deg < 75:\n return 3\n elif 75 <= angle_deg <= 105:\n return 4\n else:\n return 5", "def spreadOutAndFindDot(self, gameState):\n # Here are some useful elements of the startState\n currentPosition = gameState.getPacmanPosition(self.index)\n foodList = gameState.getFood().asList()\n walls = gameState.getWalls()\n randomFood = []\n problem = []\n\n #problem = AnyFoodSearchProblem(gameState, self.index)\n\n # if min(manhattan(currentPosition, foodPosition) for foodPosition in food.asList()) > 10:\n # return [Directions.STOP]\n #print(\"self.targets = \", self.targets)\n if self.index == 0:\n TargetFood = ClosestFood(currentPosition, foodList)\n #self.targets.append(TargetFood)\n problem = PositionSearchProblem(gameState, 0, goal=TargetFood, start=currentPosition, warn=False, visualize=False)\n return search.aStarSearch(problem, manhattanHeuristic)\n if self.index == 1:\n TargetFood = ClosestFood(currentPosition, foodList)\n \"\"\"\n want to find a way to avoid both agents coming up with the same target. But the below doesn't work because\n each agent has their own self.targets. How to keep a common list of targets?\n \"\"\"\n # if TargetFood in self.targets:\n # tempFoodList = foodList.copy()\n # tempFoodList.pop(tempFoodList.index(TargetFood))\n # TargetFood = ClosestFood(currentPosition, tempFoodList)\n # self.targets.append(TargetFood)\n # else:\n # self.targets.append(TargetFood)\n problem = PositionSearchProblem(gameState, 1, goal=TargetFood, start=currentPosition, warn=False, visualize=False)\n return search.aStarSearch(problem, manhattanHeuristic)\n if self.index == 2:\n TargetFood = RandomFood(currentPosition, foodList)\n problem = PositionSearchProblem(gameState, 2, goal=TargetFood, start=currentPosition, warn=False, visualize=False)\n return search.aStarSearch(problem, manhattanHeuristic)\n if self.index == 3:\n TargetFood = RandomFood(currentPosition, foodList)\n problem = PositionSearchProblem(gameState, 3, goal=TargetFood, start=currentPosition, warn=False, visualize=False)\n return search.aStarSearch(problem, manhattanHeuristic)\n #return search.bfs(problem)\n\n #util.raiseNotDefined()", "def action(self):\n obs = self.observation\n\n action = None\n try:\n if SETTINGS_DEAD_CANT_THINK and obs.respawn_in > -1:\n self.debugMsg(\"Sleeping\")\n return (0,0,False)\n\n # Check if agent reached goal.\n if self.goal and point_dist(self.goal, obs.loc) < self.settings.tilesize:\n self.goal = None\n\n # If agent already has a goal\n # check if the motivation is still accurate\n if self.goal:\n self.validateMotivation()\n\n # Drive to where the user clicked\n if self.selected and self.observation.clicked:\n self.motivation = MOTIVATION_USER_CLICK\n self.goal = obs.clicked\n\n if self.goal is None:\n if self.strategy == STRATEGY_DEFENCE:\n action = self.action_defend()\n elif self.strategy == STRATEGY_OFFENCE:\n action = self.action_offence()\n else:\n action = self.action_normal()\n else:\n self.debugMsg(\"Goal already found: (%d,%d)\" % self.goal)\n except Exception:\n self.goal = None\n# self.debugMsg(\"Goal: %s, exception: %s\" % (self.goal, exp), True)\n \n if self.goal is None:\n self.goal = obs.loc\n\n self.updateTrendingSpot()\n if action is None:\n if self.goal == obs.loc:\n return (0,0,False)\n else:\n return self.getActionTriple()\n else:\n return action", "def find_good_paths(self):\n return self.robot_step((0,0),[])", "def is_at_goal(self):\n return self._current_loc.get_row() == BoardPath._goal_loc.get_row() and \\\n self._current_loc.get_column() == BoardPath._goal_loc.get_column()", "def agents_at_goal(self):\r\n return self.searchenv.conv.state_to_tile(self.searchstate.positions) == self.searchenv.goal_tile", "def set_home_locations(self):\n self.swarmie.set_home_gps_location(self.swarmie.get_gps_location())\n\n current_location = self.swarmie.get_odom_location()\n current_pose = current_location.get_pose()\n home_odom = Location(current_location.Odometry)\n\n detections = self.swarmie.get_latest_targets().detections\n try:\n for detection in detections:\n if detection.id == 256:\n see_home_tag = True\n home_detection = self._transform_to_odom(detection)\n\n quat = [home_detection.pose.orientation.x,\n home_detection.pose.orientation.y,\n home_detection.pose.orientation.z,\n home_detection.pose.orientation.w]\n _r, _p, yaw = tf.transformations.euler_from_quaternion(\n quat\n )\n yaw += math.pi / 2\n\n home_odom.Odometry.pose.pose.position.x = float(\n home_detection.pose.position.x + 0.5 * math.cos(yaw)\n )\n home_odom.Odometry.pose.pose.position.y = float(\n home_detection.pose.position.y + 0.5 * math.sin(yaw)\n )\n self.swarmie.set_home_odom_location(home_odom)\n return\n\n except tf.Exception:\n pass # use backup below\n\n # project home_odom location 50cm in front of rover's current location\n home_odom.Odometry.pose.pose.position.x = (\n current_pose.x + 0.5 * math.cos(current_pose.theta)\n )\n home_odom.Odometry.pose.pose.position.y = (\n current_pose.y + 0.5 * math.sin(current_pose.theta)\n )\n self.swarmie.set_home_odom_location(home_odom)\n return", "def _find_fastest_path(self):\n from simulator import Robot\n clone_robot = Robot(exploration_status=self._robot.exploration_status,\n facing=self._robot.facing,\n discovered_map=self._robot.discovered_map,\n real_map=[[0] * 15 for _ in range(20)])\n\n fastest_path_start_way_point = get_shortest_path_moves(clone_robot,\n start=(1, 1),\n goal=self._way_point)\n\n if fastest_path_start_way_point:\n for move in fastest_path_start_way_point:\n clone_robot.move_robot(move)\n\n before_way_point = previous_cell(clone_robot.center, clone_robot.facing)\n\n fastest_path_way_point_goal = get_shortest_path_moves(clone_robot,\n start=self._way_point,\n goal=(18, 13),\n before_start_point=before_way_point)\n\n return fastest_path_start_way_point + fastest_path_way_point_goal", "def check_reached_waypoint_goal(self):\n return self.control_instance.check_reached_waypoint_goal()", "def get_best_move_toward(self, target_ground):\n options_by_parent, cost_to_reach = self.a_star_search(start=self.ground, goal=target_ground)\n ground = target_ground\n\n # Can't reach it?\n if ground not in options_by_parent:\n return None\n\n while options_by_parent[ground] is not self.ground:\n ground = options_by_parent[ground]\n\n return ground.x - self.ground.x, ground.y - self.ground.y", "def determineNextMove(playerLocation, opponentLocation, coins):\n global packages, route_table, best_path, best_weight, route\n if len(best_path) == 0:\n current_package = packages.pop(0)\n exhaustive(current_package, playerLocation, [], 0, (route_table,dists))\n api.debug(best_path)\n return u.direction(playerLocation, best_path.pop(0))", "def check_goal(self):\n hero = self.objects[0]\n others = self.objects[1:]\n\n for other in others:\n if other.x == hero.x and other.y == hero.y:\n self.objects.remove(other)\n if other.reward == 1:\n self.objects.append(GameObject(self.__new_position(), 1,\n 1, 1, 1, \"goal\"))\n elif other.reward == -1:\n self.objects.append(GameObject(self.__new_position(), 1,\n 1, 0, -1, \"fire\"))\n return other.reward, False\n return 0.0, False", "def next_step(self, goal, traps=False): #TODO: test (maybe change to l1 dist?)\n kyu = PriorityQueue()\n kyu.put((0, self.player))\n came_from = {self.player: None}\n costs_agg = {self.player: 0}\n\n while not kyu.empty():\n curr = kyu.get()[1]\n if curr == goal: break\n\n for next in self.valid_neighbors(curr):\n new_cost = costs_agg[curr] + (5 if traps and self.traps[next] else 1)\n if next not in costs_agg.keys() or new_cost < costs_agg[next]:\n costs_agg[next] = new_cost\n kyu.put((new_cost + l2(next, goal), next))\n came_from[next] = curr\n \n if goal in came_from.keys():\n return came_from[goal]\n else:\n raise RuntimeWarning(\"no path between monster and player\")\n return goal", "def calculate_target_path(self):\n self.path = self.game.find_path(self, self.target)\n if not self.path:\n print(f\"{self.name} can't path to {self.target.name} {self.target.x}, {self.target.y}\")\n self.broken_target(self.target)\n self.target = None", "def heuristic(cell, goal):\n return math.hypot(goal.x - cell.x, goal.y - cell.y)", "def getSafeFoodGoal(self, gameState):\n food = self.safeFood\n # print(food)\n myPos = self.getCurrentObservation().getAgentState(self.index).getPosition()\n if len(food) > 0:\n dis = 9999\n nearestFood = food[0]\n for a in food:\n temp = self.getMazeDistance(myPos, a)\n if temp < dis:\n dis = temp\n nearestFood = a\n return nearestFood, dis\n else:\n return None, None", "def satisfied_waypoints(cls, home_pos, waypoints, uas_telemetry_logs):\n # Form utm for use as projection in distance calcluations.\n zone, north = distance.utm_zone(home_pos.latitude, home_pos.longitude)\n utm = distance.proj_utm(zone, north)\n\n # Reduce telemetry from telemetry to waypoint hits.\n # This will make future processing more efficient via data reduction.\n # While iterating, compute the best distance seen for feedback.\n best = {}\n hits = []\n for iu, start_log in enumerate(uas_telemetry_logs):\n end_log = None\n if iu + 1 < len(uas_telemetry_logs):\n end_log = uas_telemetry_logs[iu + 1]\n for iw, waypoint in enumerate(waypoints):\n dist = cls.closest_interpolated_distance(\n start_log, end_log, waypoint, utm)\n best[iw] = min(best.get(iw, dist), dist)\n score = cls.score_waypoint(dist)\n if score > 0:\n hits.append((iw, dist, score))\n # Remove redundant hits which wouldn't be part of best sequence.\n # This will make future processing more efficient via data reduction.\n hits = [\n max(g, key=lambda x: x[2])\n for _, g in itertools.groupby(hits, lambda x: x[0])\n ]\n\n # Find highest scoring sequence via dynamic programming.\n # Implement recurrence relation:\n # S(iw, ih) = s[iw, ih] + max_{k=[0,ih)} S(iw-1, k)\n dp = defaultdict(lambda: defaultdict(lambda: (0, None, None)))\n highest_total = None\n highest_total_pos = (None, None)\n for iw in xrange(len(waypoints)):\n for ih, (hiw, hdist, hscore) in enumerate(hits):\n # Compute score for assigning current hit to current waypoint.\n score = hscore if iw == hiw else 0.0\n # Compute best total score, which includes this match score and\n # best of all which could come before it.\n prev_iw = iw - 1\n total_score = score\n total_score_back = (None, None)\n if prev_iw >= 0:\n for prev_ih in xrange(ih + 1):\n (prev_total_score, _) = dp[prev_iw][prev_ih]\n new_total_score = prev_total_score + score\n if new_total_score > total_score:\n total_score = new_total_score\n total_score_back = (prev_iw, prev_ih)\n dp[iw][ih] = (total_score, total_score_back)\n # Track highest score seen.\n if total_score > highest_total:\n highest_total = total_score\n highest_total_pos = (iw, ih)\n # Traceback sequence to get scores and distance for score.\n scores = defaultdict(lambda: (0, None))\n cur_pos = highest_total_pos\n while cur_pos != (None, None):\n cur_iw, cur_ih = cur_pos\n hiw, hdist, hscore = hits[cur_ih]\n if cur_iw == hiw:\n scores[cur_iw] = (hscore, hdist)\n _, cur_pos = dp[cur_iw][cur_ih]\n\n # Convert to evaluation.\n waypoint_evals = []\n for iw, waypoint in enumerate(waypoints):\n score, dist = scores[iw]\n waypoint_eval = mission_pb2.WaypointEvaluation()\n waypoint_eval.id = iw\n waypoint_eval.score_ratio = score\n if dist is not None:\n waypoint_eval.closest_for_scored_approach_ft = dist\n if iw in best:\n waypoint_eval.closest_for_mission_ft = best[iw]\n waypoint_evals.append(waypoint_eval)\n return waypoint_evals" ]
[ "0.61134183", "0.59638315", "0.59265476", "0.5922117", "0.59131104", "0.5848963", "0.5831249", "0.58035004", "0.57902896", "0.57676834", "0.56982106", "0.56882787", "0.564525", "0.5587234", "0.5555771", "0.5531212", "0.5528642", "0.5487979", "0.5486528", "0.54817724", "0.54755807", "0.5470027", "0.54653347", "0.5460534", "0.54598325", "0.5454236", "0.544888", "0.54385984", "0.5438549", "0.54324484" ]
0.61140513
0
Convenience wrapper to drive_to(). Drive the given distance, while avoiding sonar and target obstacles. !! avoid_targets and avoid_home shouldn't both be set to True. avoid_home will be set to False in this case. !!
def drive(self, distance, tolerance=0.0, tolerance_step=0.5, max_attempts=10, avoid_targets=True, avoid_home=False, use_waypoints=True): self.cur_loc = self.swarmie.get_odom_location() start = self.cur_loc.get_pose() goal = Point() goal.x = start.x + distance * math.cos(start.theta) goal.y = start.y + distance * math.sin(start.theta) return self.drive_to( goal, tolerance=tolerance, tolerance_step=tolerance_step, max_attempts=max_attempts, avoid_targets=avoid_targets, avoid_home=avoid_home, use_waypoints=use_waypoints )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def drive_to(self, goal, tolerance=0.0, tolerance_step=0.5,\n max_attempts=10, avoid_targets=True, avoid_home=False,\n use_waypoints=True, start_location=None,\n distance_threshold=None):\n print('\\nRequest received')\n self.fail_count = 0\n self.tolerance = tolerance\n\n self.avoid_targets = avoid_targets\n if avoid_targets is True and avoid_home is True:\n avoid_home = False\n self.avoid_home = avoid_home\n\n current_ignore = Obstacle.IS_SONAR\n if self.avoid_targets is True:\n current_ignore |= Obstacle.TAG_TARGET\n elif self.avoid_home is True:\n current_ignore |= Obstacle.TAG_HOME\n\n self.goal.x = goal.x\n self.goal.y = goal.y\n\n self.cur_loc = self.swarmie.get_odom_location()\n self.current_state = Planner.STATE_IDLE\n self.prev_state = Planner.STATE_IDLE\n\n while (not self.cur_loc.at_goal(self.goal,\n Planner.DISTANCE_OK + self.tolerance)\n and self.fail_count < max_attempts):\n\n\n if use_waypoints is True:\n # get new plan and try to drive to first point in it\n point = self._get_next_waypoint(tolerance_step)\n else:\n point = goal\n\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_DRIVE\n # Turn to approximate goal heading while ignoring sonar and tags\n # helps to prevent rover from trying to jump around obstacles\n # before it even starts along its new path\n self.result = self._face_point(\n point,\n ignore=current_ignore ^ Obstacle.IS_SONAR\n )\n\n if self.result == MoveResult.SUCCESS:\n self.result = self.swarmie.drive_to(\n point,\n ignore=Obstacle.SONAR_BLOCK,\n throw=False\n )\n\n if self.result == MoveResult.SUCCESS:\n # Success, we got to our waypoint, or got ourselves out of\n # whatever pickle we were just in.\n # Just get a new plan and drive to next point\n self.fail_count = 0\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_IDLE\n print('Successfully drove to first point in nav plan.')\n\n # otherwise, something went wrong or we found home\n elif self.result == MoveResult.OBSTACLE_HOME:\n self.set_home_locations()\n\n # get around the home tag obstacle\n count = 0\n\n # Give the rover 3 tries to avoid any tags nearby before\n # getting a new nav plan. MoveResult.OBSTACLE_SONAR takes\n # priority in the driver code, so it should be safe to continue\n # this loop if the MoveResult is just an OBSTACLE_HOME\n # self.fail_count may exceed limit here, but I'll let it go\n while count < 3 and self.result == MoveResult.OBSTACLE_HOME:\n print('\\nObstacle: Found Home.')\n count += 1\n self.fail_count += 1\n\n detections = self.swarmie.get_latest_targets().detections\n inside_home = self.is_inside_home_ring(detections)\n if inside_home:\n print('\\nGetting out of the home ring!!')\n angle, dist = self.get_angle_and_dist_to_escape_home(\n detections\n )\n self.swarmie.turn(\n angle,\n ignore=Obstacle.IS_SONAR | Obstacle.IS_VISION\n )\n self.result = self.swarmie.drive(\n dist,\n ignore=Obstacle.IS_SONAR | Obstacle.IS_VISION\n )\n\n if self.avoid_home is False:\n # turn back around\n self.swarmie.turn(\n math.pi,\n ignore=Obstacle.IS_SONAR | Obstacle.IS_VISION\n )\n print('Obstacle: Found Home.')\n return MoveResult.OBSTACLE_HOME\n else:\n if self.avoid_home is False:\n print('Obstacle: Found Home.')\n return MoveResult.OBSTACLE_HOME\n\n self.result = self._avoid_tag(id=256,\n ignore=current_ignore)\n\n elif self.result == MoveResult.OBSTACLE_TAG:\n # get around the tag obstacle\n count = 0\n\n # Give the rover 3 tries to avoid any tags nearby before\n # getting a new nav plan. MoveResult.OBSTACLE_SONAR takes\n # priority in the driver code, so it should be safe to continue\n # this loop if the MoveResult is just an OBSTACLE_TAG\n # self.fail_count may exceed limit here, but I'll let it go\n while count < 3 and self.result == MoveResult.OBSTACLE_TAG:\n print('\\nObstacle: Found a Tag.')\n\n if self.avoid_targets is False:\n if not self.sees_home_tag():\n return self.result\n\n count += 1\n self.fail_count += 1\n\n self.result = self._avoid_tag(id=0,\n ignore=current_ignore)\n\n elif self.result == MoveResult.OBSTACLE_SONAR:\n # Check for home and tag obstacles just to be safe, because\n # sonar MoveResults take priority, and would mask a home or\n # target tag in view.\n obstacle = self.swarmie.get_obstacle_condition()\n\n if (obstacle & Obstacle.TAG_HOME == Obstacle.TAG_HOME and\n self.avoid_home is False):\n self.set_home_locations()\n return MoveResult.OBSTACLE_HOME\n\n if (obstacle & Obstacle.TAG_TARGET == Obstacle.TAG_TARGET and\n self.avoid_targets is False):\n return MoveResult.OBSTACLE_TAG\n\n # get around the sonar obstacle\n self.fail_count += 1\n\n print('\\nObstacle: Sonar.')\n left_blocked, center_blocked, right_blocked = \\\n self._check_sonar_obstacles()\n\n if (not left_blocked and\n not center_blocked and not right_blocked):\n print('\\nFake sonar obstacle??')\n pass # 'fake' obstacle?\n\n elif not left_blocked and center_blocked and right_blocked:\n print('Left looks clear, turning left.')\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_LEFT\n self._go_around(math.pi / 4, 0.7)\n # self.swarmie.drive_to(point, throw=False)\n\n elif left_blocked and center_blocked and not right_blocked:\n print('Right looks clear, turning right.')\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_RIGHT\n self._go_around(-math.pi / 4, 0.7)\n # self.swarmie.drive_to(point, throw=False)\n\n elif left_blocked and not center_blocked and not right_blocked:\n print('Only left blocked, turning a little right.')\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_RIGHT\n self._go_around(-math.pi / 6, 0.6)\n # self.swarmie.drive_to(point, throw=False)\n\n elif not left_blocked and not center_blocked and right_blocked:\n print('Only right blocked, turning a little left.')\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_LEFT\n self._go_around(math.pi / 6, 0.6)\n # self.swarmie.drive_to(point, throw=False)\n\n else:\n print('Neither left or right look clear.')\n\n # Only back up if we're far enough away from home for it\n # to be safe. Don't want to back up into the nest!\n if self._is_safe_to_back_up():\n print('Backing up.')\n self.swarmie.drive(\n -0.3,\n ignore=Obstacle.IS_SONAR,\n throw=False\n )\n\n if (self.current_state == Planner.STATE_AVOID_RIGHT or\n self.prev_state == Planner.STATE_AVOID_RIGHT):\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_RIGHT\n self.clear(-math.pi / 4, ignore=current_ignore,\n reset_heading=False)\n self._go_around(-math.pi / 4, 0.75)\n\n else:\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_LEFT\n self.clear(math.pi / 4, ignore=current_ignore,\n reset_heading=False)\n self._go_around(math.pi / 4, 0.75)\n\n elif self.result == MoveResult.PATH_FAIL:\n # shit, hope we can back up if this ever happens\n self.fail_count += 1\n\n print('\\nPath Failure. Backing up.')\n self.prev_state = self.current_state\n self.current_state = Planner.STATE_AVOID_REVERSE\n self.swarmie.drive(\n -0.5,\n ignore=Obstacle.IS_SONAR | Obstacle.IS_VISION,\n throw=False\n )\n\n self.cur_loc = self.swarmie.get_odom_location()\n\n if self.fail_count >= max_attempts:\n print('Failed to drive to goal {} times.'.format(\n max_attempts)\n )\n raise PathException(MoveResult.PATH_FAIL)\n\n if start_location is not None:\n current_loc = self.cur_loc.get_pose()\n dist = math.sqrt((start_location.x - current_loc.x) ** 2\n + (start_location.y - current_loc.y) ** 2)\n if dist > distance_threshold:\n raise PathException(MoveResult.PATH_FAIL)\n\n print('Successfully executed nav plan.')\n return MoveResult.SUCCESS", "def cozmo_drive_straight(robot, dist, speed):\n robot.drive_straight(distance_mm(dist), speed_mmps(speed)).wait_for_completed()", "def cozmo_drive_straight(robot, dist, speed):\n\trobot.drive_straight(distance_mm(dist), speed_mmps(speed)).wait_for_completed()", "def my_drive_straight(robot, dist, speed):\n t = (1/speed) * numpy.abs(dist) + 0.6 # Constant offset to make up for lack of accuracy\n speed = speed if dist > 0 else -speed\n robot.drive_wheels(speed, speed, duration=t)", "def my_drive_straight(robot, dist, speed):\n\t# ####\n\t# TODO: Implement your version of a driving straight function using the\n\t# robot.drive_wheels() function.\n\t# ####\n\ttimeToWait = dist / abs(speed)\n\trobot.drive_wheels(speed, speed, duration=timeToWait)\n\t# time.sleep(timeToWait)\n\trobot.stop_all_motors()\n\t# robot.drive_wheels(0, 0)", "def _go_around(self, angle, dist):\n ignore = Obstacle.IS_SONAR\n if self.avoid_targets is True:\n ignore |= Obstacle.TAG_TARGET\n elif self.avoid_home is True:\n # Need to ignore both for this because target tags are likely to\n # be in view inside the home nest.\n ignore |= Obstacle.TAG_TARGET | Obstacle.TAG_HOME\n\n cur_heading = self.swarmie.get_odom_location().get_pose().theta\n turn_result = self.swarmie.set_heading(\n cur_heading + angle,\n ignore=ignore,\n throw=False\n )\n drive_result = self.swarmie.drive(dist,\n ignore=Obstacle.SONAR_BLOCK,\n throw=False)\n\n return turn_result, drive_result", "def drive(self, distance):\n if random.uniform(0.0, 100.0) < self.reliability:\n distance_driven = super().drive(distance)\n else:\n distance_driven = 0\n\n return distance_driven", "def move_straight(robot, dist):\n journey = Journey(robot, distance=dist)\n journey.start()\n robot.position.move(dist)\n sleep(0.5)", "def smooth_drive(self, distance, linear_speed):\n ### EXTRA CREDIT\n # TODO\n pass # delete this when you implement your code", "def drive(self, distance=0):\n if random.uniform(1, 100) <= self.reliability:\n distance_driven = super().drive(distance)\n return distance_driven\n return 0", "def drive_vehicle(x_offset, y_offset, run_cmd, width, front_distance_sensor_1, front_distance_sensor_2):\n # Initialise the PCA9685 using the default address (0x40).\n pwm = Adafruit_PCA9685.PCA9685()\n\n # set number of pins for direction of drives\n left_fwd_pin_1 = 4\n left_fwd_pin_2 = 17\n left_bwd_pin_1 = 18\n left_bwd_pin_2 = 23\n\n right_fwd_pin_1 = 22\n right_fwd_pin_2 = 27\n right_bwd_pin_1 = 24\n right_bwd_pin_2 = 25\n\n GPIO.setup(left_fwd_pin_1, GPIO.OUT) # left forward 1 pin\n GPIO.setup(left_fwd_pin_2, GPIO.OUT) # left forward 2 pin\n GPIO.setup(left_bwd_pin_1, GPIO.OUT) # left backward 1 pin\n GPIO.setup(left_bwd_pin_2, GPIO.OUT) # left backward 2 pin\n\n GPIO.setup(right_fwd_pin_1, GPIO.OUT) # right forward 1 pin\n GPIO.setup(right_fwd_pin_2, GPIO.OUT) # right forward 2 pin\n GPIO.setup(right_bwd_pin_1, GPIO.OUT) # right backward 1 pin\n GPIO.setup(right_bwd_pin_2, GPIO.OUT) # right backward 2 pin\n\n left_fwd = True\n left_bwd = False\n\n right_fwd = True\n right_bwd = False\n\n last_left = False\n\n while True:\n try:\n # Take shortest distance measured by ultrasound\n if front_distance_sensor_1.value < front_distance_sensor_2.value:\n front_distance = front_distance_sensor_1.value\n else:\n front_distance = front_distance_sensor_2.value\n\n if front_distance < 5 or width.value > 450:\n # if we are facing some obstacle or object we are looking for is close > stop\n left_speed = 0\n right_speed = 0\n left_fwd = left_bwd = right_fwd = right_bwd = False\n else:\n right_fwd = True\n if x_offset.value == -10:\n if last_left:\n # no object is detected by camera\n left_speed = 0 # 0.6 * max_speed\n # left_fwd = False\n # left_bwd = True\n right_speed = 0.9 * max_speed\n else:\n left_speed = 0.9 * max_speed\n right_speed = 0\n elif -5 < x_offset.value < 0:\n # object is in left part of the screen\n left_speed = pow(abs(x_offset.value), 2) * max_speed\n right_speed = max_speed\n left_fwd = True\n left_bwd = False\n last_left = True\n elif x_offset.value > 0:\n # object is in right part of the screen\n left_speed = max_speed\n right_speed = pow(x_offset.value, 2) * max_speed\n left_fwd = True\n left_bwd = False\n last_left = False\n else:\n # object is in the middle\n left_speed = max_speed\n right_speed = max_speed\n left_fwd = True\n left_bwd = False\n \n print('Speeds: Left {} Right {} Run {}'.format(left_speed, right_speed, run_cmd.value))\n\n # Right drives\n pwm.set_pwm(0, 0, int(right_speed*run_cmd.value))\n pwm.set_pwm(1, 0, int(right_speed*run_cmd.value))\n GPIO.output(left_fwd_pin_1, left_fwd)\n GPIO.output(left_fwd_pin_2, left_fwd)\n GPIO.output(left_bwd_pin_1, left_bwd)\n GPIO.output(left_bwd_pin_2, left_bwd)\n\n # Left drives\n pwm.set_pwm(4, 0, int(left_speed*run_cmd.value))\n pwm.set_pwm(5, 0, int(left_speed*run_cmd.value))\n GPIO.output(right_fwd_pin_1, right_fwd)\n GPIO.output(right_fwd_pin_2, right_fwd)\n GPIO.output(right_bwd_pin_1, right_bwd)\n GPIO.output(right_bwd_pin_2, right_bwd)\n except KeyboardInterrupt:\n # Stop robot after keyboard interrupt\n GPIO.output(left_fwd_pin_1, False)\n GPIO.output(left_fwd_pin_2, False)\n GPIO.output(left_bwd_pin_1, False)\n GPIO.output(left_bwd_pin_2, False)\n GPIO.output(right_fwd_pin_1, False)\n GPIO.output(right_fwd_pin_2, False)\n GPIO.output(right_bwd_pin_1, False)\n GPIO.output(right_bwd_pin_2, False)\n GPIO.cleanup()", "def driveSmooth(speed, distance):\n global pose\n\n initialX = pose.pose.position.x\n initialY = pose.pose.position.y\n atTarget = False\n rampSpeed = 0.0\n sleepTime = 0.05\n rampPercentage = 0.3\n step = speed / ((rampPercentage * (distance / speed)) / sleepTime)\n print \"Step size: \" + str(step)\n while (not atTarget and not rospy.is_shutdown()):\n currentX = pose.pose.position.x\n currentY = pose.pose.position.y\n currentDistance = math.sqrt(math.pow((currentX - initialX), 2) + math.pow((currentY - initialY), 2))\n if (currentDistance >= distance):\n atTarget = True\n sendMoveMsg(0, 0)\n else:\n if ((distance - currentDistance) <= distance * rampPercentage and rampSpeed >= 0):\n rampSpeed -= step\n sendMoveMsg(rampSpeed, 0)\n elif ((distance - currentDistance) >= distance * (1.0 - rampPercentage) and rampSpeed <= speed):\n rampSpeed += step\n sendMoveMsg(rampSpeed, 0)\n else:\n sendMoveMsg(speed, 0)\n rospy.sleep(sleepTime)", "def move_to(self, target):\n # type: (RoomPosition) -> None\n hive = self.home.hive\n home = self.find_home()\n origin = self.find_origin()\n\n total_distance = hive.honey.find_path_length(origin, target, self.new_movement_opts())\n\n min_distance_from_home = Infinity\n min_distance_to_origin = Infinity\n min_distance_to_target = movement.chebyshev_distance_room_pos(self.members_movement_order()[0].pos, target)\n max_distance_to_target = -Infinity\n any_hostiles = False\n for member in self.members:\n distance_to_home = movement.chebyshev_distance_room_pos(member.pos, home)\n distance_to_origin = movement.chebyshev_distance_room_pos(member.pos, origin)\n distance_to_target = movement.chebyshev_distance_room_pos(member.pos, target)\n if distance_to_home < min_distance_from_home:\n min_distance_from_home = distance_to_home\n if distance_to_target > max_distance_to_target:\n max_distance_to_target = distance_to_target\n if distance_to_origin < min_distance_to_origin:\n min_distance_to_origin = distance_to_origin\n if len(member.room.find(FIND_HOSTILE_CREEPS)):\n any_hostiles = True\n\n if min_distance_to_origin > 100:\n mv_order = self.members_movement_order()\n self.set_origin(mv_order[len(mv_order) - 1].pos)\n if min_distance_from_home < 50 and (max_distance_to_target < total_distance / 2):\n self.log(\"move_to: chose stage 0 (minimum distance from home: {}, maximum distance from home: {},\"\n \" total distance: {})\"\n .format(min_distance_from_home, max_distance_to_target, total_distance))\n self.move_to_stage_0(target)\n elif min_distance_to_target < 300 and any_hostiles:\n self.move_to_stage_2(target)\n elif min_distance_to_target > 60 or max_distance_to_target > 200:\n # self.log(\"move_to: chose stage 1 (minimum distance from home: {}, total distance: {}, \"\n # \"minimum distance to target: {}, maximum distance to target: {})\"\n # .format(min_distance_from_home, total_distance,\n # min_distance_to_target, max_distance_to_target))\n self.move_to_stage_1(target, any_hostiles)\n else:\n # self.log(\"move_to: chose stage 2 (minimum distance from home: {}, total distance: {}, \"\n # \"minimum distance to target: {}, maximum distance to target: {})\"\n # .format(min_distance_from_home, total_distance,\n # min_distance_to_target, max_distance_to_target))\n self.move_to_stage_2(target)", "def drive_distance_all(distances, motors):\n return null", "def sweep(self, angle=math.pi/4, dist=0.3,\n ignore=Obstacle.PATH_IS_CLEAR, throw=False):\n start_heading = self.swarmie.get_odom_location().get_pose().theta\n ignore |= Obstacle.SONAR_BLOCK # always ignore this one too\n\n try:\n self.swarmie.set_heading(start_heading - angle, ignore=ignore)\n self.swarmie.drive(dist, ignore=ignore)\n self.swarmie.drive(-dist, ignore=ignore)\n self.swarmie.set_heading(start_heading + angle, ignore=ignore)\n self.swarmie.drive(dist, ignore=ignore)\n self.swarmie.drive(-dist, ignore=ignore)\n self.swarmie.set_heading(start_heading, ignore=ignore)\n # self.swarmie.timed_drive(time, linear, -angular, ignore=ignore)\n # self.swarmie.timed_drive(time, -linear, angular, ignore=ignore)\n\n # physical rover doesn't go left as well\n # if not self.swarmie.simulator_running():\n # angular *= 1.5\n # linear *= 1.2\n # self.swarmie.timed_drive(time, linear, angular, ignore=ignore)\n # self.swarmie.timed_drive(time, -linear, -angular, ignore=ignore)\n\n except HomeException:\n if throw:\n raise\n return MoveResult.OBSTACLE_HOME\n except TagException:\n if throw:\n raise\n return MoveResult.OBSTACLE_TAG\n except ObstacleException:\n if throw:\n raise\n return MoveResult.OBSTACLE_SONAR\n\n return MoveResult.SUCCESS", "def drive_distance(degrees, motor, gear_ratio): #TODO Finish documentation", "def driveStraight(speed, distance):\n global pose\n\n initialX = pose.pose.position.x\n initialY = pose.pose.position.y\n\n atTarget = False\n while (not atTarget and not rospy.is_shutdown()):\n currentX = pose.pose.position.x\n currentY = pose.pose.position.y\n currentDistance = math.sqrt(math.pow((currentX - initialX), 2) + math.pow((currentY - initialY), 2))\n if (currentDistance >= distance):\n atTarget = True\n sendMoveMsg(0, 0)\n else:\n sendMoveMsg(speed, 0)\n rospy.sleep(0.15)", "def spiral_search(self, start_distance, distance_step=0.5, num_legs=10,\n tolerance=0.0, tolerance_step=0.5, max_attempts=5,\n avoid_targets=True, avoid_home=False,\n use_waypoints=True):\n points = self._get_spiral_points(start_distance, distance_step,\n num_legs=num_legs)\n MAX_CONSECUTIVE_FAILURES = 3\n fail_count = 0\n\n start_loc = self.swarmie.get_odom_location().get_pose()\n\n for index, point in enumerate(points):\n # Stop if rover gets further away than it should currently be\n # from the center of the spiral. This can happen if the rover\n # starts following arena boundary walls.\n distance_threshold = (start_distance\n + (index+1) / 2.0 * distance_step)\n\n try:\n drive_result = self.drive_to(\n point,\n tolerance=tolerance,\n tolerance_step=tolerance_step,\n max_attempts=max_attempts,\n avoid_targets=avoid_targets,\n avoid_home=avoid_home,\n use_waypoints=use_waypoints,\n start_location=start_loc,\n distance_threshold=distance_threshold\n )\n if (drive_result == MoveResult.OBSTACLE_HOME\n or drive_result == MoveResult.OBSTACLE_TAG):\n return drive_result\n except PathException:\n fail_count += 1\n if fail_count >= MAX_CONSECUTIVE_FAILURES:\n raise\n\n return MoveResult.SUCCESS", "def drive(self, distance, linear_speed):\n current_pose = [self.px, self.py, self.pth]\n \tinitial_pose = current_pose\n # final pose is distance to be moved by the robot in the x direction\n \tdistance_traveled = 0\n \ttolerance = 0.01\n\n self.send_speed(linear_speed, 0.0)\n \twhile abs(distance-distance_traveled) > tolerance:\n current_pose = [self.px, self.py, self.pth]\n distance_traveled = math.sqrt((current_pose[0]-initial_pose[0])*(current_pose[0]-initial_pose[0])+(current_pose[1]-initial_pose[1])*(current_pose[1]-initial_pose[1]))\n #print(final_pose[0]-current_pose[0])\n \tself.send_speed(0.0,0.0)", "def drive(self, distance):\n if random.randrange(1, 100) > (100 - self.reliability):\n return super().drive(distance)\n else:\n return \"Car broke down and drove 0km\"", "async def move_to(self, distance: float, speed: float) -> None:\n time = self.time_for_distance(distance, speed)\n await self.rmotor.run(-speed, time)\n await self.lmotor.run(speed, time)", "def safe_goto(self, speed=1.0, distance=SAFETY_DISTANCE):\n\n moves = []\n point = copy.deepcopy(\n self.poses[self.selected_point].measured.position)\n orientation = self.poses[self.selected_point].measured.orientation\n point.z += distance\n move = Move()\n move.pose.position = point\n move.pose.orientation = orientation\n move.speed = speed\n moves.append(move)\n\n RobotControllerHandler().current_controller.set_speed(speed)\n RobotControllerHandler().current_controller.move_pose(moves)", "def driveStraight(self, speed, distance):\n origin = copy.deepcopy(self._current) #hint: use this\n\n\n q = [origin.orientation.x,\n origin.orientation.y,\n origin.orientation.z,\n origin.orientation.w] # quaternion nonsense\n\n xOrigin=self._current.position.x\n yOrigin=self._current.position.y\n atTarget=False\n\n move_msg=Twist()\n move_msg.linear.x=speed\n move_msg.angular.z=0\n\n stop_msg=Twist()\n stop_msg.linear.x=0\n stop_msg.linear.z=0\n\n currentDistance=0\n #for extra credit ramp speed from 0 to speed and from speed to 1/4 speed when past half way\n vel=0\n\n while(not atTarget and not rospy.is_shutdown()):\n if(currentDistance>=distance):\n print('driveStraight: stoped')\n atTarget=True\n self._vel_pub.publish(stop_msg)\n else:\n print('driveStraight: moving')\n origin=copy.deepcopy(self._current)\n xCurrent=self._current.position.x\n yCurrent=self._current.position.y\n currentDistance=math.sqrt(math.pow((xCurrent-xOrigin),2)+math.pow((yCurrent-yOrigin),2))\n self._vel_pub.publish(move_msg)\n print('current x: '+str(xCurrent)+'current y: '+str(yCurrent)+'origin x: '+str(xOrigin)+'origin y:'+str(yOrigin))\n print('\\n distance: '+str(currentDistance))\n # rospy.sleep(.15)", "def search_parking_lot(self):\n\n self.start_driving()\n self.velocity = 8\n self.distance = 250 # maximum searching distance\n self.angle = 1.5 # TODO\n self.drive_thread.reset()\n\n vacant_distance = 0\n\n while self.drive_thread.driven_distance < self.distance:\n time.sleep(0.1)\n\n if self.sensor_manager.right > 25:\n vacant_distance += 1\n else:\n vacant_distance = 0\n\n if vacant_distance >= 35:\n while self.sensor_manager.right > 25:\n time.sleep(0.1)\n\n distance_right = self.sensor_manager.right\n\n if 14 <= distance_right <= 18:\n self.angle = 0\n self.distance = 35\n self.drive_thread.reset()\n\n while self.drive_thread.driven_distance < self.distance:\n time.sleep(0.1)\n elif distance_right > 18:\n self.adjust_starting_position(\"left\")\n elif distance_right < 14:\n self.adjust_starting_position(\"right\")\n \n break\n\n self.stop_driving()", "def drive(self, distance):\n random_number = random.randint(0,101)\n if float(random_number)<self.reliability:\n distance_driven = super().drive(distance)\n\n else:\n distance_driven=0\n\n return distance_driven", "def move_dolly(self, distance: int, direction: int, time: int = None):\n\n self.__do_action(self.motor.move(direction, distance, time))", "def linear_move(self, dist):\n\t\tglobal estop_flag, move_state\n\t\tsignal.alarm(0) #Disable timer interrupt for the duration of the movement\n\t\thalfway_flag = False\n\t\t\n\t\twith self.move_state_lock:\n\t\t\tstart_x, start_y, start_z = move_state['x'], move_state['y'], move_state['z']\n\t\tcurrent_x = start_x\n\t\tcurrent_y = start_y\n\t\tcurrent_z = start_z\n\t\t#While the distance travelled is less than target distance\n\t\twhile math.sqrt((current_x - start_x)**2 + (current_y - start_y)**2 + (current_z - start_z)**2) < abs(dist):\n\t\t\t#Check if the emergency stop flag is set, if so, break the current loop and reset velocity\t\n\t\t\tif estop_flag:\n\t\t\t\tself.publisher.publish(Mover.stop_msg)\n\t\t\telse:\n\t\t\t\t#If the distance goal is negative, move backward\n\t\t\t\tif dist < 0:\n\t\t\t\t\t#Send negative velocity\n\t\t\t\t\ttwist_msg = Twist()\n\t\t\t\t\ttwist_msg.linear.x = -1 * riu.move_rate\n\t\t\t\t\tself.publisher.publish(twist_msg)\n\t\t\t\t#If distance goal is positive, move forward\n\t\t\t\telif dist > 0:\n\t\t\t\t\t#Send positive velocity\n\t\t\t\t\ttwist_msg = Twist()\n\t\t\t\t\ttwist_msg.linear.x = riu.move_rate\n\t\t\t\t\tself.publisher.publish(twist_msg)\n\t\t\t\t#Check if the current movement is half completed, if so, send a Half message and set flag to avoid message duplication\n\t\t\t\tif (math.sqrt((current_x - start_x)**2 + (current_y - start_y)**2 + (current_z - start_z)**2) >= abs(dist)/2\n\t\t\t\t\tand not halfway_flag):\n\t\t\t\t\thalfway_flag = True\n\t\t\t\t\tself.status_pub.publish(String(\"half\"))\n\t\t\t\t#update current_x, current_y, and current_z (using local variables to be thread safe)\n\t\t\t\twith self.move_state_lock:\n\t\t\t\t\tcurrent_x = move_state['x']\n\t\t\t\t\tcurrent_y = move_state['y']\n\t\t\t\t\tcurrent_z = move_state['z']\n\t\t\trospy.sleep(.2)\n\t\t\t\t\n\t\t#previously had while, finally block -> illegal syntax in python. Just moved to outside loop.\n\t\tself.publisher.publish(Mover.stop_msg)\n\t\tself.status_pub.publish(String(\"done\"))\n\t\tsignal.alarm(Mover.ready_message_interval)", "def drive(self, carstate):\n global SWARM\n if (TRAIN):\n command = self.trainDrive(carstate)\n elif (SWARM):\n # If first run, set teammate file name\n if (self.start):\n for f in os.listdir(\"./positions/\"):\n if str(self.port) not in f:\n self.helper_pos_filename = \"./positions/\" + f\n break\n self.start = False\n # If no teammate, don't use swarm\n if self.helper_pos_filename == \"./positions/pos\":\n SWARM = False\n self.model = TwoLayerNet(22, 15, 2, False)\n self.model.load_state_dict(torch.load(\"./models/model_without_blocking_22.pt\", map_location=lambda storage, loc: storage))\n np_car = self.carstateToNumpy(carstate, \"winner\")\n return self.modelDrive(self.model, carstate, np_car)\n\n # Write own position to file\n own_pos_file = open(self.own_pos_filename, \"w\")\n own_pos_file.write(str(carstate.race_position) + \" \\n\")\n own_pos_file.close()\n\n # Always assume winner strategy\n model = self.model_winner\n np_car = self.carstateToNumpy(carstate, \"winner\")\n\n # Check if teammate is ahead of behind\n helper_pos_file = open(self.helper_pos_filename, \"r\")\n line = helper_pos_file.read()\n helper_pos_file.close()\n # If teammate is ahead with at least 3 positions, become a helper\n if (line and int(line) < carstate.race_position - 3):\n model = self.model_helper\n np_car = self.carstateToNumpy(carstate, \"helper\")\n\n command = self.modelDrive(model, carstate, np_car)\n else:\n np_car = self.carstateToNumpy(carstate, \"winner\")\n command = self.modelDrive(self.model, carstate, np_car)\n\n\n # Set logging data\n if self.data_logger:\n self.data_logger.log(carstate, command)\n return command", "def move(self, distance: int, direction: float, max_steering=np.pi / 2):\n if direction > max_steering:\n direction = max_steering\n if direction < -max_steering:\n direction = -max_steering\n\n if distance < 0.0:\n distance = 0.0\n\n self.total_distance_covered += distance\n\n self.theta = (self.theta + direction) % (2.0 * np.pi)\n self.x = self.x + (np.cos(self.theta) * distance)\n self.y = self.y + (np.sin(self.theta) * distance)", "def cruise(self):\n while self.dist() > self.SAFE_STOP_DIST:\n time.sleep(.2)\n self.fwd()\n self.stop()" ]
[ "0.655253", "0.63666767", "0.63183516", "0.6304355", "0.62158734", "0.555457", "0.55458516", "0.5532839", "0.55063546", "0.5494884", "0.5476724", "0.5402915", "0.5317474", "0.5294457", "0.5273561", "0.5236973", "0.5223151", "0.51784474", "0.5162055", "0.5136079", "0.5125034", "0.5088057", "0.50812566", "0.5062895", "0.50302887", "0.5011895", "0.4923362", "0.48716697", "0.483576", "0.4826732" ]
0.7307606
0
Set the rover's home locations in the /odom and /map frames. Can be called manually, but is also called from Planner.drive_to() when a home tag is seen.
def set_home_locations(self): self.swarmie.set_home_gps_location(self.swarmie.get_gps_location()) current_location = self.swarmie.get_odom_location() current_pose = current_location.get_pose() home_odom = Location(current_location.Odometry) detections = self.swarmie.get_latest_targets().detections try: for detection in detections: if detection.id == 256: see_home_tag = True home_detection = self._transform_to_odom(detection) quat = [home_detection.pose.orientation.x, home_detection.pose.orientation.y, home_detection.pose.orientation.z, home_detection.pose.orientation.w] _r, _p, yaw = tf.transformations.euler_from_quaternion( quat ) yaw += math.pi / 2 home_odom.Odometry.pose.pose.position.x = float( home_detection.pose.position.x + 0.5 * math.cos(yaw) ) home_odom.Odometry.pose.pose.position.y = float( home_detection.pose.position.y + 0.5 * math.sin(yaw) ) self.swarmie.set_home_odom_location(home_odom) return except tf.Exception: pass # use backup below # project home_odom location 50cm in front of rover's current location home_odom.Odometry.pose.pose.position.x = ( current_pose.x + 0.5 * math.cos(current_pose.theta) ) home_odom.Odometry.pose.pose.position.y = ( current_pose.y + 0.5 * math.sin(current_pose.theta) ) self.swarmie.set_home_odom_location(home_odom) return
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_home_position(self, lat, lon, alt):\n pass", "def set_current_location_as_home(self):\n response = False\n while (not response) and (not rospy.is_shutdown()):\n response = self._set_home_proxy(True, 0., 0., 0., 0.).success\n self._rate.sleep()\n if response:\n rospy.loginfo(\n '%s is setting current location as the new home ...' %\n self.namespace)\n return response", "def home_folder(self, home_folder):\n\n self._home_folder = home_folder", "def go_home(self):\n self.set_all_positions([0]*self.nleaflets)", "def home(self, home):\n if home is None:\n raise ValueError(\"Invalid value for `home`, must not be `None`\") # noqa: E501\n\n self._home = home", "def set_ospl_home_bin(self, ospl_home_bin):\r\n self.ospl_home_bin = ospl_home_bin\r\n self.reset_ospl_command()", "def go_home(self):\n command = _build_robovac_command(RobovacModes.WORK, RobovacCommands.GO_HOME)\n message = self._build_command_user_data_message(command)\n\n self._send_packet(message, False)", "def home(self):\n self.__send_short(self.MGMSG_MOT_MOVE_HOME, self.__chan, 0x00)", "def go_home(self):\n self.set_jpos(self._home_position, wait=True)", "def goHome():\n\t#Go to pod home\n\tif screen.lastScreen in screen.protectedScreens:\n\t\tpodScreen.show()\n\telse:\n\t\tsplashScreen.show()", "def home(self):\n self.initial_offset = 0", "def _update_home_information(self, homes):\n\n if homes is not None and len(homes) > 0:\n self._home = homes[0]\n self.has_home = True\n self._update_horizon(max(abs(self._home[0]), abs(self._home[1])))\n if self.experimental_home is None:\n self.experimental_home = self._home\n else:\n if self.experimental_home not in self.last_scan['Home']:\n print self, self.experimental_home, \"is not in\", self.last_scan['Home']\n self.experimental_home = self._home\n else:\n self._home = self.experimental_home # Try some reckoning\n\n return", "def home(self):\n self.goto(0, 0)\n self.setheading(0)", "def go_home(self):\n self.move_wl(0)", "def move(self, new_home):\n #checked#\n ###your code here###\n if self.home!=None:\n self.home.occupant=None\n new_home.occupant=self\n self.home=new_home", "def home(self):\n self.goto(0, 0)", "def __add_homes(self):\n for home in self.__positions_of_homes:\n self.__grid[home[0]][home[1]][\"humans\"] = math.floor(\n self.__number_of_humans / self.__number_of_homes\n )", "def home( self ):\n\t\tself.command( LCD_RETURNHOME ) # set cursor position to zero\n\t\tsleep_us( 2000 ) # this command takes a long time!", "def go_home(node):\n if node.attr('t').isSettable():\n node.setAttr('t', (0, 0, 0))\n if node.attr('r').isSettable():\n node.setAttr('r', (0, 0, 0))\n if node.attr('s').isSettable():\n node.setAttr('s', (1, 1, 1))", "def home(self):\n self.command(self.LCD_RETURNHOME)\n self._cursor_pos = (0, 0)\n self._msleep(2)", "def home_team(self, home_team):\n\n self._home_team = home_team", "def home(self):\n\n # Report the current homing state\n\n print(\"Current homing state: {}\".format(self.robot.homed()))\n\n # Home each joint, even if it is already reported homed, except j4 which auto-homes with j3, and any other unhomable joint.\n\n for joint in self.JOINTS[0:4]: \n print(\"Homing {}\".format(joint))\n self.robot.home(joint)\n\n print(\"Homed all joints\")\n\n self.goToPose(self.HOME_POSE)\n\n return", "def home():\n G.DEVICE.home()", "def register_home(route):\n global _home\n _home = route", "def register_home(route):\n global _home\n _home = route", "def go_home(self):\r\n if self.home_url is not None:\r\n self.set_url(self.home_url)", "def home(self):\n self.command(_LCD_RETURNHOME)\n self._cursor_pos = (0, 0)\n time.sleep(2*MILLISECOND)", "def _setOceanLocation(self):\r\n\t\t## If the fluids_hrc exists\r\n\t\tif cmds.objExists('fluids_hrc'):\r\n\t\t\tif cmds.objExists('ocean_srf'):\r\n\t\t\t\tcmds.connectAttr('fluids_hrc.translateX', 'ocean_srf.translateX', f = True)\r\n\t\t\t\tcmds.connectAttr('fluids_hrc.translateZ', 'ocean_srf.translateZ', f = True)\r\n\t\t\telse:\r\n\t\t\t\tcmds.warning('MISSING ocean_srf node from scene....')\r\n\r\n\t\t\tif cmds.objExists('oceanPreviewPlane_prv'):\r\n\t\t\t\tcmds.connectAttr('fluids_hrc.translateX', 'oceanPreviewPlane_prv.translateX', f = True)\r\n\t\t\t\tcmds.connectAttr('fluids_hrc.translateZ', 'oceanPreviewPlane_prv.translateZ', f = True)\r\n\t\t\telse:\r\n\t\t\t\tcmds.warning('MISSING oceanPreviewPlane_prv node from scene....')\r\n\t\telse:\r\n\t\t\tcmds.warning('NO fluids_hrc FOUND! Can not move the ocean into final position. PLEASE CHECK FX PUBLISH NOW!')", "def homeDirectory(self, ignored_value):\n\t\tself.__homeDirectory = self._resolve_home_directory()", "def gohome(self):\n raise Exception(\"Not implemented\")" ]
[ "0.7108816", "0.65008163", "0.63759106", "0.633259", "0.6307917", "0.6304005", "0.62762004", "0.6232992", "0.61337197", "0.6027151", "0.60225916", "0.6018789", "0.5885093", "0.5872494", "0.58659697", "0.58089083", "0.5792628", "0.57654065", "0.5707072", "0.5704166", "0.56773156", "0.5647598", "0.55737865", "0.55560553", "0.55560553", "0.5504496", "0.55029947", "0.5492368", "0.54915476", "0.5413178" ]
0.75460696
0
Get a list of waypoints for the spiral search pattern. Waypoints will be used as goals to planner.drive_to()
def _get_spiral_points(self, start_distance, distance_step, num_legs=10): start_loc = self.swarmie.get_odom_location().get_pose() points = [] distance = start_distance angle = math.pi / 2 prev_point = Point() prev_point.x = start_loc.x + distance * math.cos(start_loc.theta) prev_point.y = start_loc.y + distance * math.sin(start_loc.theta) points.append(prev_point) # todo: is this big enough, or too big? for i in range(1, num_legs): if i % 2 == 0: distance += distance_step point = Point() point.x = prev_point.x + distance * math.cos(start_loc.theta + angle) point.y = prev_point.y + distance * math.sin(start_loc.theta + angle) points.append(point) prev_point = point angle += math.pi / 2 return points
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def waypoints(self):\n\t\treturn [Star(star_id, galaxy=self.galaxy) for delay, star_id, order, num_ships in self.data.o]", "def spiral_search(self, start_distance, distance_step=0.5, num_legs=10,\n tolerance=0.0, tolerance_step=0.5, max_attempts=5,\n avoid_targets=True, avoid_home=False,\n use_waypoints=True):\n points = self._get_spiral_points(start_distance, distance_step,\n num_legs=num_legs)\n MAX_CONSECUTIVE_FAILURES = 3\n fail_count = 0\n\n start_loc = self.swarmie.get_odom_location().get_pose()\n\n for index, point in enumerate(points):\n # Stop if rover gets further away than it should currently be\n # from the center of the spiral. This can happen if the rover\n # starts following arena boundary walls.\n distance_threshold = (start_distance\n + (index+1) / 2.0 * distance_step)\n\n try:\n drive_result = self.drive_to(\n point,\n tolerance=tolerance,\n tolerance_step=tolerance_step,\n max_attempts=max_attempts,\n avoid_targets=avoid_targets,\n avoid_home=avoid_home,\n use_waypoints=use_waypoints,\n start_location=start_loc,\n distance_threshold=distance_threshold\n )\n if (drive_result == MoveResult.OBSTACLE_HOME\n or drive_result == MoveResult.OBSTACLE_TAG):\n return drive_result\n except PathException:\n fail_count += 1\n if fail_count >= MAX_CONSECUTIVE_FAILURES:\n raise\n\n return MoveResult.SUCCESS", "def search_for_plans(start, exits, pig_neighbours, moves, state, actions):\n goals = exits + pig_neighbours\n paths, _ = GamePlanner.astar_multi_search(start=start,\n goals=goals,\n state=state,\n actions=actions)\n plans = GamePlanner.paths_to_plans(paths=paths,\n exits=exits,\n pig_neighbours=pig_neighbours,\n moves=moves)\n return plans", "def spanning_squares(self):\n spanning = []\n for i in range(self.length):\n # Assume ACROSS and DOWN are the only valid directions\n if self.direction == \"ACROSS\":\n spanning.append((self.start_x + i, self.start_y))\n else:\n spanning.append((self.start_x, self.start_y + i))\n return spanning", "def GetSpiral(spiral):\r\n pass", "def get_path(self, grid, start_wp, end_wp):\n # The open and closed sets\n openset = set()\n closedset = set()\n\n # Add the starting point to the open set\n openset.add(start_wp)\n\n # While the open set is not empty\n while openset:\n # Find the waypoint in the open set with the lowest G + H score\n current_wp = min(openset, key=lambda o: o.G + o.H)\n # Found the goal\n if current_wp == end_wp:\n path = []\n while current_wp.parent:\n path.append(current_wp)\n current_wp = current_wp.parent\n path.append(current_wp)\n print(\"Path found in {} moves: {}\".format(len(path), path))\n return path[::-1]\n\n # Remove the waypoint from the open set\n openset.remove(current_wp)\n # Add it to the closed set\n closedset.add(current_wp)\n\n # Generate children\n children = current_wp.generate_children(grid)\n\n for waypoint in children:\n # If it is already in the closed set, skip it\n if waypoint in closedset:\n continue\n # Otherwise if it is already in the open set\n if waypoint in openset:\n # Check if we beat the G score\n new_g = current_wp.G + 1\n\n if waypoint.G > new_g:\n # If so, update the waypoint to have a new parent\n waypoint.G = new_g\n waypoint.parent = current_wp\n else:\n # If it isn't in the open set, calculate the G and H score for the waypoint\n if waypoint.orientation != current_wp.orientation:\n waypoint.G = current_wp.G + 1.5 # Avoiding zigzag move by increase the cost of a rotation\n else:\n waypoint.G = current_wp.G + 1\n\n waypoint.H = abs(waypoint.x - end_wp.x) + abs(waypoint.y - end_wp.y)\n # Set the parent to our current_wp\n waypoint.parent = current_wp\n # Add it to the set\n openset.add(waypoint)\n\n # If there is no solution\n return [start_wp, end_wp]", "def test_find_closest_waypoints_nearest(self):\n planner = WaypointPlanner(make_example_base_waypoints())\n\n planner.position = Vector3(0, 0, 0)\n waypoints = planner.find_closest_waypoints(1)\n self.assertEqual(1, len(waypoints))\n self.assertEqual(0, waypoints[0].pose.pose.position.x)\n self.assertEqual(0, waypoints[0].pose.pose.position.y)\n self.assertEqual(0, waypoints[0].pose.pose.position.z)\n\n planner.position = Vector3(0.9, 0.9, 0)\n waypoints = planner.find_closest_waypoints(2)\n self.assertEqual(2, len(waypoints))\n self.assertEqual(1, waypoints[0].pose.pose.position.x)\n self.assertEqual(1, waypoints[0].pose.pose.position.y)\n self.assertEqual(2, waypoints[1].pose.pose.position.x)\n self.assertEqual(2, waypoints[1].pose.pose.position.y)\n\n # Check it wraps back around to the start.\n planner.position = Vector3(0.9, 0.9, 0)\n waypoints = planner.find_closest_waypoints(3)\n self.assertEqual(3, len(waypoints))\n self.assertEqual(1, waypoints[0].pose.pose.position.x)\n self.assertEqual(1, waypoints[0].pose.pose.position.y)\n self.assertEqual(2, waypoints[1].pose.pose.position.x)\n self.assertEqual(2, waypoints[1].pose.pose.position.y)\n self.assertEqual(0, waypoints[2].pose.pose.position.x)\n self.assertEqual(0, waypoints[2].pose.pose.position.y)", "def spiral_search():\n #spiral inward to outward making a larger circle each pass (currently squares)\n #------------check the RSSI readings as it spins------------------\n #replace max rssi with new largest and record degrees coordinates\n rssi_max = -120\n max_x = 0\n max_y = 0\n\n count = 0\n while (count < 5):\n move(ccw_msg)\n time.sleep((.1+count))\n move(up_ccw_msg)\n time.sleep((.05+count))\n move(up_msg)\n time.sleep((.05+count))\n move(up_cw_msg)\n time.sleep((.05+count))\n move(cw_msg)\n time.sleep(2*(.1+count))\n move(down_cw_msg)\n time.sleep((.05*count))\n move(down_msg)\n time.sleep(2*(.05+(.05*count)))\n move(down_ccw_msg)\n time.sleep(.05*count)\n count+=1\n #this method isn't really ideal with using timer to determine movement length", "def get_neighbors(self):\n step_size = self.step_size\n return [\n SearchProblem(x, y, step_size, self.function)\n for x, y in (\n (self.x - step_size, self.y - step_size),\n (self.x - step_size, self.y),\n (self.x - step_size, self.y + step_size),\n (self.x, self.y - step_size),\n (self.x, self.y + step_size),\n (self.x + step_size, self.y - step_size),\n (self.x + step_size, self.y),\n (self.x + step_size, self.y + step_size),\n )\n ]", "def rrt_search(self):\n self.tree.AddVertex(self.start_config)\n self.tree.AddEdge(self.start_config, self.start_config)\n\n while True:\n x_new, x_nearest = self.new_and_near()\n if x_new is None:\n # print(\"it's None\")\n continue\n # connect shortest valid edge\n # print(\"new point\", x_new)\n self.connect_to_point(x_nearest, x_new)\n\n # probabilistically check if solution found\n if self.goal_config in self.tree.vertices:\n print(\"find it\")\n path = self.planning_env.reconstruct_path(self.tree.edges, self.start_config, self.goal_config)\n if path is not None:\n return path\n\n if self.name=='rrtstar' and self.tree.samples_taken > 10:\n return []\n # # check if can connect to goal after generating max_samples\n if self.tree.samples_taken >= self.tree.max_samples:\n return []", "def paths(p, q):\n if (p, q) == (0, 0):\n return [((0, 0),)]\n answer = list()\n if p > 0:\n west = paths(p - 1, q)\n for path in west:\n answer.append(path + ((p, q),))\n if q > 0:\n south = paths(p, q - 1)\n for path in south:\n answer.append(path + ((p, q),))\n return answer", "def get_directions():\n return [(1, 0), (0, 1), (-1, 0), (0, -1)]", "def _get_next_waypoint(self, tolerance_step):\n print('\\nGetting new nav plan.')\n\n for i in range(4):\n try:\n self.plan = self.swarmie.get_plan(\n self.goal,\n tolerance=self.tolerance,\n use_home_layer=self.avoid_home\n )\n break # plan received\n except rospy.ServiceException:\n print('ServiceException.')\n if i < 3:\n print('Expanding tolerance.')\n self.tolerance += tolerance_step\n else:\n raise # tried 3 times, we give up\n\n print('Received nav plan.')\n pose = self.plan.plan.poses[0]\n\n return Point(x=pose.pose.position.x, y=pose.pose.position.y)", "def optimized_travelling_salesman(points, start=None):\n if start is None:\n start = points[0]\n must_visit = points\n path = [start]\n must_visit.remove(start)\n while must_visit:\n nearest = min(must_visit, key=lambda x: distance(path[-1], x))\n path.append(nearest)\n must_visit.remove(nearest)\n return path", "def directions(self):\n return []", "def waypoints(t):\n global x\n xx = x + ((2 * PI)/t)\n yy = 2*(math.sin(xx))*(math.sin(xx/2))\n return [xx, yy]", "def get_shortest_path(self, r_start, r_goal):\n neighbors = [(0, 1), (0, -1), (1, 0), (-1, 0), (1, 1), (1, -1), (-1, 1), (-1, -1)]\n start = (int(r_start[0] / Map.RESOLUTION), int(r_start[1] / Map.RESOLUTION))\n goal = (int(r_goal[0] / Map.RESOLUTION), int(r_goal[1] / Map.RESOLUTION))\n close_set = set()\n came_from = {}\n gscore = {start: 0}\n fscore = {start: Map._heuristic(start, goal)}\n oheap = []\n\n heappush(oheap, (fscore[start], start))\n\n while oheap:\n current = heappop(oheap)[1]\n\n if current == goal:\n data = []\n while current in came_from:\n data.append((int((current[0] * Map.RESOLUTION) + (Map.RESOLUTION / 2)),\n int((current[1] * Map.RESOLUTION) + (Map.RESOLUTION / 2))))\n current = came_from[current]\n data.reverse()\n return data\n\n close_set.add(current)\n for i, j in neighbors:\n neighbor = current[0] + i, current[1] + j\n tentative_g_score = gscore[current] + Map._heuristic(current, neighbor)\n if 0 <= neighbor[0] < self.col_grid.shape[0]:\n if 0 <= neighbor[1] < self.col_grid.shape[1]:\n if self.col_grid[neighbor[0]][neighbor[1]] == 1:\n continue\n else:\n # array bound y walls\n continue\n else:\n # array bound x walls\n continue\n\n if neighbor in close_set and tentative_g_score >= gscore.get(neighbor, 0):\n continue\n if tentative_g_score < gscore.get(neighbor, 0) or neighbor not in [i[1] for i in oheap]:\n came_from[neighbor] = current\n gscore[neighbor] = tentative_g_score\n fscore[neighbor] = tentative_g_score + Map._heuristic(neighbor, goal)\n heappush(oheap, (fscore[neighbor], neighbor))\n\n return []", "def compute_waypoints(self, source_loc, destination_loc):\n start_waypoint = self._map.get_waypoint(\n source_loc,\n project_to_road=True,\n lane_type=carla.LaneType.Driving)\n end_waypoint = self._map.get_waypoint(\n destination_loc,\n project_to_road=True,\n lane_type=carla.LaneType.Driving)\n assert start_waypoint and end_waypoint, 'Map could not find waypoints'\n route = self._grp.trace_route(\n start_waypoint.transform.location,\n end_waypoint.transform.location)\n # TODO(ionel): The planner returns several options in intersections.\n # We always take the first one, but this is not correct.\n return deque([to_pylot_transform(waypoint[0].transform)\n for waypoint in route])", "def __routes(self, with_return):\n nonzeo_pois = list(filter(None, self.pois.keys()))\n\n for path in itertools.permutations(nonzeo_pois):\n steps = self.poi_distance(0, path[0])\n for i, j in zip(path, path[1:]):\n steps += self.poi_distance(i, j)\n if with_return:\n steps += self.poi_distance(path[-1], 0)\n yield steps", "def next_gps(self):\n \n return Waypoint(0.0, 0.0)", "def __get_neighbors(self, goal):\n neighbors = set()\n start = self.__get_position(0, self.puzzle)\n # start_x = start[0]\n # start_y = start[1]\n # Get the below neighbor.\n if(start[0] - 1 >= 0):\n temp = self.__swap(start[0], start[1], start[0] - 1, start[1])\n neighbors.add(State(temp, self.g + 1, 'D', goal))\n # Get the above neighbor\n if(start[0] + 1 <= len(self.puzzle) -1):\n temp = self.__swap(start[0], start[1], start[0] + 1, start[1])\n neighbors.add(State(temp, self.g + 1, 'U', goal))\n # Get the right neighbor\n if(start[1] - 1 >= 0):\n temp = self.__swap(start[0], start[1], start[0], start[1] - 1)\n neighbors.add(State(temp, self.g + 1, 'R', goal))\n # Get the left neighbor\n if(start[1] + 1 <= len(self.puzzle[0]) -1):\n temp = self.__swap(start[0], start[1], start[0], start[1] + 1)\n neighbors.add(State(temp, self.g + 1, 'L', goal))\n\n return neighbors", "def fermat_spiral_points(center, beam_diam, overlap, num_points):\n return spiral", "def paths(self, start):\n # This is probably a little slow\n tupadd = lambda p, v: (p[0] + v[0], p[1] + v[1])\n # First, we'll check adjacency moves.\n adj = [tupadd(start, v) for v in DIRECTIONS]\n yield from (p for p in adj if self.board(p) == 0)\n # Now we check repeated hops.\n # We do this by a breadth first search.\n\n #TODO: Consensus on legality of hopping back to start and \"skipping\"\n visited = set(adj)\n to_visit = [start]\n while len(to_visit):\n pt = to_visit.pop(0)\n if pt in visited:\n continue\n\n # We have to actually move a piece\n # But this stops us from considering \"start\" even if we can\n # make some hops and get back to start\n if pt is not start:\n yield pt\n \n visited.add(pt)\n # Compute the hop directions\n dirs = ((tupadd(pt, v), tupadd(pt, tupadd(v, v))) for v in DIRECTIONS)\n to_visit.extend(\n dest for over, dest in dirs\n if self.board(over) > 0\n and self.board(dest) == 0\n and dest not in visited\n and over != start\n )", "def get_routes(solution, routing, manager):\n # Get vehicle routes and store them in a two dimensional array whose\n # i,j entry is the jth location visited by vehicle i along its route.\n routes = []\n for route_nbr in range(routing.vehicles()):\n index = routing.Start(route_nbr)\n route = [manager.IndexToNode(index)]\n while not routing.IsEnd(index):\n index = solution.Value(routing.NextVar(index))\n route.append(manager.IndexToNode(index))\n routes.append(route)\n return routes", "def plan_path(self, start_point, end_point, map_obj):\n # STUFF FOR TESTING \n if self.enable_vis:\n marker = Marker()\n marker.header.frame_id = \"/map\"\n marker.type = marker.POINTS\n marker.action = marker.ADD\n \n marker.scale.x = 0.1\n marker.scale.y = 0.1\n self.vis_pub.publish(marker)\n \n exploration_bias = 1.0 - self.goal_bias\n final_node = None\n num_existing_path_points_added = 0\n \n self.rrt_star = RRTStar(Node(start_point))\n self.max_iterations = self.rrt_star.max_size\n while self.rrt_star.size <= self.max_iterations:\n p = np.random.uniform()\n if p < exploration_bias:\n \n x_rand = self.map.sample_free_space()\n else:\n if final_node is None:\n x_rand = end_point\n else:\n x_rand = self.branched_from_existing_path(\n final_node,\n depth_underestimate=num_existing_path_points_added\n )\n num_existing_path_points_added += 1\n\n x_nearest = self.rrt_star.nearest(x_rand) # Find the nearest node to x_rand\n\n path = self.map.generate_line_path(x_nearest.value, x_rand, eta=self.eta)\n if path is not None: # no obstacles between x_nearest and x_rand\n x_new = path[-1]\n X_nearby_connectable = self.find_nearby_connectable(x_nearest, x_new)\n\n cost_min, node_min = self.find_best_parent(X_nearby_connectable, x_new)\n\n X_nearby_connectable.remove(node_min) # Remove x_new's parent node from the list of nearby nodes so it is not considered for rewiring\n \n # Create the new node at x_new!\n node_new = self.rrt_star.add_config(node_min, x_new)\n \n if self.enable_vis:\n # FOR TESTING ONLY #\n # Code to publish marker for new node\n ###########################################################################################\n TEMP = Point()\n TEMP.x = x_new[0]\n TEMP.y = x_new[1]\n TEMP.z = .05\n marker.points.append(TEMP)\n \n TEMP = ColorRGBA()\n TEMP.r = 1\n TEMP.g = 0\n TEMP.b = 0\n TEMP.a = 1\n \n marker.colors.append(TEMP)\n \n self.vis_pub.publish(marker)\n ###########################################################################################\n\n self.rewire(cost_min, node_new, X_nearby_connectable)\n \n if np.allclose(node_new.value, end_point, .05, 0) and (final_node is None):#np.array_equal(node_new.value, end_point):\n final_node = node_new\n # reduce exploration bias so that we reinforce the existing path\n exploration_bias = .5\n if VERBOSE:\n print(\"Path found!!!!\")\n print(final_node.cost)\n if rospy.get_time() - self.start_time > self.time_thresh:\n if VERBOSE:\n print(self.rrt_star.size)\n break\n\n \n if final_node is not None:\n if self.enable_vis:\n marker = Marker()\n marker.header.frame_id = \"/map\"\n marker.type = marker.POINTS\n marker.action = marker.ADD\n \n marker.scale.x = 0.1\n marker.scale.y = 0.1\n marker.points = []\n marker.colors = []\n def recur(node):\n if self.enable_vis:\n TEMP = Point()\n TEMP.x = node.value[0]\n TEMP.y = node.value[1]\n TEMP.z = .05\n marker.points.append(TEMP)\n \n TEMP = ColorRGBA()\n TEMP.r = 1\n TEMP.g = 0\n TEMP.b = 0\n TEMP.a = 1\n \n marker.colors.append(TEMP)\n \n \n self.trajectory.points.append([node.value[0], node.value[1]])\n parent = node.parent\n if parent is not None:\n recur(parent)\n recur(final_node)\n self.trajectory.points.reverse()\n if self.enable_vis:\n self.vis_pub.publish(marker)\n if VERBOSE:\n print (final_node.depth)\n else:\n if VERBOSE:\n print(\"No path found! Please try again.\")\n \n \n \n # publish trajectory\n self.traj_pub.publish(self.trajectory.toPoseArray())\n\n # visualize trajectory Markers\n self.trajectory.publish_viz()", "def solve(self):\n return breadth_first_search(self) + [self.goal_url]", "def _get_directions(self, current_point, end_point):\n\n directions = self.planner.get_next_command(\n (current_point.location.x,\n current_point.location.y, 0.22),\n (current_point.orientation.x,\n current_point.orientation.y,\n current_point.orientation.z),\n (end_point.location.x, end_point.location.y, 0.22),\n (end_point.orientation.x, end_point.orientation.y, end_point.orientation.z))\n return directions", "def plan(self):\n return [(0, 0), (1, 0), (1, 1), (1, 2), (1, 3)]", "def prm_planning(start_x, start_y, goal_x, goal_y,\n obstacle_x_list, obstacle_y_list, robot_radius, *, rng=None):\n obstacle_kd_tree = KDTree(np.vstack((obstacle_x_list, obstacle_y_list)).T)\n\n sample_x, sample_y = sample_points(start_x, start_y, goal_x, goal_y,\n robot_radius,\n obstacle_x_list, obstacle_y_list,\n obstacle_kd_tree, rng)\n if show_animation:\n plt.plot(sample_x, sample_y, \".b\")\n\n road_map = generate_road_map(sample_x, sample_y,\n robot_radius, obstacle_kd_tree)\n\n rx, ry = dijkstra_planning(\n start_x, start_y, goal_x, goal_y, road_map, sample_x, sample_y)\n\n return rx, ry", "def solution(self):\n return [node.move for node in self.path()[1:]]" ]
[ "0.6662182", "0.6237596", "0.61242783", "0.60184735", "0.60100424", "0.595016", "0.5893749", "0.57909155", "0.5759986", "0.5754108", "0.5719062", "0.5698987", "0.56761587", "0.5652236", "0.5621764", "0.5616944", "0.56052303", "0.5590701", "0.5576818", "0.55699253", "0.55506504", "0.55230683", "0.5516882", "0.5505708", "0.5488444", "0.548566", "0.5481422", "0.5466503", "0.5461345", "0.54418874" ]
0.6771642
0
Render a mesh by casting an array of rays from an origin point, finding where they hit the mesh, and using the combination of a color texture function and the normal of the ray collision to color the resulting pixel.
def ray_cast( mesh: jnp.ndarray, origin: jnp.ndarray, directions: jnp.ndarray, color_fn: Callable[[jnp.ndarray], jnp.ndarray], batch_size: int = 128, bg_color: jnp.ndarray = None, ambient_light: float = 0.0, ) -> jnp.ndarray: # Normalize the directions so that dot products are meaningful. directions = directions / jnp.sqrt(jnp.sum(directions ** 2, axis=-1, keepdims=True)) if bg_color is None: bg_color = jnp.array([0, 0, 0]) @jax.jit def ray_color(sub_directions: jnp.ndarray) -> jnp.ndarray: collides, points, normals = jax.vmap(lambda d: mesh_ray_collision(mesh, origin, d))( sub_directions ) colors = color_fn(points) light_dot_prods = jnp.sum(jnp.abs(normals * sub_directions), axis=-1, keepdims=True) colors = colors * (light_dot_prods * (1 - ambient_light) + ambient_light) return jnp.where(collides[:, None], colors, bg_color.astype(colors.dtype)) outputs = [] for i in range(0, len(directions), batch_size): outputs.append(ray_color(directions[i : i + batch_size])) return jnp.concatenate(outputs, axis=0)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def batch_mesh_contains_points(\n ray_origins, # point cloud as origin of rays\n obj_triangles,\n direction=torch.Tensor([0.4395064455, 0.617598629942, 0.652231566745]),\n):\n tol_thresh = 0.0000001\n batch_size = obj_triangles.shape[0]\n triangle_nb = obj_triangles.shape[1]\n point_nb = ray_origins.shape[1]\n\n # Batch dim and triangle dim will flattened together\n batch_points_size = batch_size * triangle_nb\n # Direction is random but shared\n v0, v1, v2 = obj_triangles[:, :, 0], obj_triangles[:, :, 1], obj_triangles[:, :, 2]\n # Get edges\n v0v1 = v1 - v0\n v0v2 = v2 - v0\n\n direction = direction.to(ray_origins.device)\n # Expand needed vectors\n batch_direction = direction.view(1, 1, 3).expand(batch_size, triangle_nb, 3)\n\n # Compute ray/triangle intersections\n pvec = torch.cross(batch_direction, v0v2, dim=2)\n dets = torch.bmm(\n v0v1.view(batch_points_size, 1, 3), pvec.view(batch_points_size, 3, 1)\n ).view(batch_size, triangle_nb)\n\n # Check if ray and triangle are parallel\n parallel = abs(dets) < tol_thresh\n invdet = 1 / (dets + 0.1 * tol_thresh)\n\n # Repeat mesh info as many times as there are rays\n triangle_nb = v0.shape[1]\n v0 = v0.repeat(1, point_nb, 1)\n v0v1 = v0v1.repeat(1, point_nb, 1)\n v0v2 = v0v2.repeat(1, point_nb, 1)\n hand_verts_repeated = (\n ray_origins.view(batch_size, point_nb, 1, 3)\n .repeat(1, 1, triangle_nb, 1)\n .view(ray_origins.shape[0], triangle_nb * point_nb, 3)\n )\n pvec = pvec.repeat(1, point_nb, 1)\n invdet = invdet.repeat(1, point_nb)\n tvec = hand_verts_repeated - v0\n u_val = (\n torch.bmm(\n tvec.view(batch_size * tvec.shape[1], 1, 3),\n pvec.view(batch_size * tvec.shape[1], 3, 1),\n ).view(batch_size, tvec.shape[1])\n * invdet\n )\n # Check ray intersects inside triangle\n u_correct = (u_val > 0) * (u_val < 1)\n qvec = torch.cross(tvec, v0v1, dim=2)\n\n batch_direction = batch_direction.repeat(1, point_nb, 1)\n v_val = (\n torch.bmm(\n batch_direction.view(batch_size * qvec.shape[1], 1, 3),\n qvec.view(batch_size * qvec.shape[1], 3, 1),\n ).view(batch_size, qvec.shape[1])\n * invdet\n )\n v_correct = (v_val > 0) * (u_val + v_val < 1)\n t = (\n torch.bmm(\n v0v2.view(batch_size * qvec.shape[1], 1, 3),\n qvec.view(batch_size * qvec.shape[1], 3, 1),\n ).view(batch_size, qvec.shape[1])\n * invdet\n )\n # Check triangle is in front of ray_origin along ray direction\n t_pos = t >= tol_thresh\n parallel = parallel.repeat(1, point_nb)\n # # Check that all intersection conditions are met\n try:\n not_parallel = 1 - parallel\n except:\n not_parallel = parallel==False\n final_inter = v_correct * u_correct * not_parallel * t_pos\n # Reshape batch point/vertices intersection matrix\n # final_intersections[batch_idx, point_idx, triangle_idx] == 1 means ray\n # intersects triangle\n final_intersections = final_inter.view(batch_size, point_nb, triangle_nb)\n # Check if intersection number accross mesh is odd to determine if point is\n # outside of mesh\n exterior = final_intersections.sum(2) % 2 == 0\n return exterior", "def intersect(self, ray):\n # TODO A5 (Step1) implement this function\n # Copy your implementation from A4\n # Then calculate uv coordinates, to be passed into the Hit initializer\n vs = self.vs\n\n a = vs[0][0] - vs[1][0]\n b = vs[0][1] - vs[1][1]\n c = vs[0][2] - vs[1][2]\n d = vs[0][0] - vs[2][0]\n e = vs[0][1] - vs[2][1]\n f = vs[0][2] - vs[2][2]\n\n ray_dir = ray.direction\n ray_orig = ray.origin\n\n g = ray_dir[0]\n h = ray_dir[1]\n i = ray_dir[2]\n j = vs[0][0] - ray_orig[0]\n k = vs[0][1] - ray_orig[1]\n l = vs[0][2] - ray_orig[2]\n\n M = a * (e * i - h * f) + b * (g * f - d * i) + c * (d * h - e * g)\n\n t = -(f * (a * k - j * b) + e * (j * c - a * l) + d *\n (b * l - k * c)) / M\n\n if (t < ray.start or t > ray.end):\n return no_hit\n\n gamma = (i * (a * k - j * b) + h * (j * c - a * l) + g *\n (b * l - k * c)) / M\n\n if (gamma < 0 or gamma > 1):\n return no_hit\n\n beta = (j * (e * i - h * f) + k * (g * f - d * i) +\n l * (d * h - e * g)) / M\n\n if (beta < 0 or beta > 1 - gamma):\n return no_hit\n\n P = ray_orig + t * ray_dir\n\n unit_normal = normalize(np.cross(vs[0] - vs[2], vs[1] - vs[2]))\n\n A = np.linalg.norm(np.cross(vs[1] - vs[0], vs[2] - vs[0])) / 2\n areaA = np.linalg.norm(np.cross(vs[1] - P, vs[2] - P)) / 2\n areaB = np.linalg.norm(np.cross(vs[0] - P, vs[2] - P)) / 2\n areaC = np.linalg.norm(np.cross(vs[0] - P, vs[1] - P)) / 2\n u = areaB / A\n v = areaC / A\n return Hit(t, P, unit_normal, vec([u, v]), self.material)", "def trace(self, ray): # type: (Ray) -> Vector\n hit_object = None\n t = numpy.inf\n\n for scene_object in self.scene.shapes:\n t0 = scene_object.intersect(ray)\n if t0 < t:\n t = t0\n hit_object = scene_object\n\n # if there were no intersections, then return the background colour\n if t == numpy.inf:\n return self.scene.camera.background\n\n hit_point = ray.origin + ray.direction * t\n normal = hit_object.normal(hit_point)\n luminance = 0.0\n\n # perform shading calculations\n for light in self.scene.lights:\n hit_point_to_light = (light.centre - hit_point).normal\n\n #check whether this light contributes to the shading\n in_shadow = False\n for shadower in self.scene.shapes:\n # we don't want to test against itself\n if shadower == hit_object:\n continue\n shadow_ray = Ray(hit_point + normal * 0.0001, hit_point_to_light)\n if shadower.intersect(shadow_ray) < numpy.inf:\n in_shadow = True\n break\n if in_shadow:\n continue\n\n # super simple lambertian lighting model\n luminance += hit_point_to_light.dot(normal) * light.power\n\n # calculate shaded colour - luminance may be over one if there are multiple light sources\n # normally this would be dealt with by HDR and tone mapping but is just clipped\n # in demo ray tracers\n object_colour = hit_object.material.colour * min(luminance, 1.0)\n\n # calculate reflection colour if material has reflectance\n if hit_object.material.reflectance != 0.0 and ray.depth != self.scene.camera.depth:\n reflected_direction = (ray.direction - normal * 2 * (ray.direction.dot(normal))).normal\n # we need to 'translate' the reflection vector away from the hitpoint otherwise\n # we risk intersecting the original hit point again which causes artifacts in the reflection\n reflected_ray = Ray(hit_point + reflected_direction * 0.0001, reflected_direction, ray.depth + 1)\n reflection_colour = self.trace(reflected_ray)\n\n # interpolate shaded colour and reflected colour based on reflectance\n return Vector(*[lerp(object_colour.data[i], reflection_colour.data[i], hit_object.material.reflectance) for i in range(3)])\n\n return object_colour", "def _trace_ray(self, ray, depth=0, max_depth=5):\n\n color = Color()\n\n if depth >= max_depth:\n return color\n\n intersection = self._get_intersection(ray)\n if intersection is None:\n return color\n\n obj, dist = intersection\n intersection_pt = ray.point_at_dist(dist)\n surface_norm = obj.surface_norm(intersection_pt)\n\n # ambient light\n # color += obj.material.color * obj.material.ambient\n\n point_on_plane = ray.origin + dist*ray.direction\n imgx = point_on_plane.x\n imgy = np.sqrt(point_on_plane.y*point_on_plane.y + point_on_plane.z*point_on_plane.z)\n\n\n '''\n # Nearest Texel\n int_imgx = int(round(imgx))\n int_imgy = int(round(imgy))\n if int_imgx == 512:\n int_imgx = 511\n if int_imgy == 512:\n int_imgy = 511\n color += Color(img[int_imgx, int_imgy, 0], img[int_imgx, int_imgy, 1], img[int_imgx, int_imgy, 2])\n '''\n\n\n # Bilinearly Interpolated Texel\n ceilx = int(math.ceil(imgx))\n ceily = int(math.ceil(imgy))\n floorx = int(math.floor(imgx))\n floory = int(math.floor(imgy))\n if ceilx >= 512:\n ceilx = 511\n if ceily >= 512:\n ceily = 511\n if floorx >= 512:\n floorx = 511\n if floory >= 512:\n floory = 511\n interpolate_x1 = (ceilx - imgx) * (img[ceilx, ceily]) + (imgx - floorx) * (img[floorx, ceily])\n interpolate_x2 = (ceilx - imgx) * (img[ceilx, floory]) + (imgx - floorx) * (img[floorx, floory])\n interpolate_y = (ceily - imgy) * interpolate_x1 + (imgy - floory) * interpolate_x2\n color += Color(interpolate_y[0], interpolate_y[1], interpolate_y[2])\n # print color\n\n\n '''\n # lambert shading\n for light in self.lights:\n pt_to_light_vec = (light - intersection_pt).normalize()\n pt_to_light_ray = Ray(intersection_pt, pt_to_light_vec)\n if self._get_intersection(pt_to_light_ray) is None:\n lambert_intensity = surface_norm * pt_to_light_vec\n if lambert_intensity > 0:\n color += obj.material.color * obj.material.lambert * \\\n lambert_intensity\n\n \n # specular (reflective) light\n reflected_ray = Ray(\n intersection_pt, ray.direction.reflect(surface_norm).normalize())\n color += self._trace_ray(reflected_ray, depth + 1) * \\\n obj.material.specular\n '''\n return color", "def mesh_ray_collision(mesh: jnp.ndarray, origin: jnp.ndarray, direction: jnp.ndarray):\n collides, positions, distances = jax.vmap(\n lambda t: _triangle_ray_collision(t, origin, direction)\n )(mesh)\n idx = jnp.argmin(jnp.where(collides, distances, jnp.inf))\n return (\n jnp.any(collides),\n positions[idx],\n _triangle_normal(mesh[idx]),\n )", "def intersect(self, ray):\n # TODO A5 (Step3 and Step4) implement this function\n # For step 4, check if uvs and normals are not None (respectively)\n # If so, then interpolate them\n\n # batch_intersect returns t, beta, gamma, i\n posns = self.posns\n uvs = self.uvs\n inds = self.inds\n normals = self.normals\n t, beta, gamma, i = batch_intersect(posns[inds[:, :]], ray)\n if (t == np.inf):\n return no_hit\n vs = posns[inds[i, :]]\n P = ray.origin + t * ray.direction\n\n if (t == np.inf):\n return no_hit\n else:\n\n alpha = 1 - beta - gamma\n\n if uvs is not None:\n\n uv0 = uvs[inds[i][0]]\n uv1 = uvs[inds[i][1]]\n uv2 = uvs[inds[i][2]]\n\n uv = alpha * uv0 + beta * uv1 + gamma * uv2\n\n else:\n\n A = np.linalg.norm(np.cross(vs[1] - vs[0], vs[2] - vs[0])) / 2\n areaA = np.linalg.norm(np.cross(vs[1] - P, vs[2] - P)) / 2\n areaB = np.linalg.norm(np.cross(vs[0] - P, vs[2] - P)) / 2\n areaC = np.linalg.norm(np.cross(vs[0] - P, vs[1] - P)) / 2\n u = areaB / A\n v = areaC / A\n uv = vec([u, v])\n\n if normals is not None:\n\n n0 = normals[inds[i][0]]\n n1 = normals[inds[i][1]]\n n2 = normals[inds[i][2]]\n\n unit_normal = normalize(alpha * n0 + beta * n1 + gamma * n2)\n\n else:\n unit_normal = normalize(np.cross(vs[0] - vs[2], vs[1] - vs[2]))\n\n return Hit(t, P, unit_normal, uv, self.material)", "def render_image(camera, scene, lights, nx, ny):\n # TODO A5 copy implementation from A4\n img = np.zeros((ny, nx, 3), np.float32)\n\n for x in range(0, nx):\n for y in range(0, ny):\n u = (x + 0.5) / nx\n v = (y + 0.5) / ny\n ray = camera.generate_ray((u, v))\n hit = scene.intersect(ray)\n img[y][x] = shade(ray, hit, scene, lights)\n\n return img", "def mesh_renderer(\n vertices,\n triangles,\n normals,\n diffuse_colors,\n camera_position,\n camera_lookat,\n camera_up,\n light_positions,\n light_intensities,\n image_width,\n image_height,\n specular_colors=None,\n shininess_coefficients=None,\n ambient_color=None,\n fov_y=40.0,\n near_clip=0.01,\n far_clip=10.0):\n if len(vertices.shape) != 3 or vertices.shape[-1] != 3:\n raise ValueError(\n \"Vertices must have shape [batch_size, vertex_count, 3].\")\n batch_size = vertices.shape[0]\n if len(normals.shape) != 3 or normals.shape[-1] != 3:\n raise ValueError(\n \"Normals must have shape [batch_size, vertex_count, 3].\")\n if len(light_positions.shape) != 3 or light_positions.shape[-1] != 3:\n raise ValueError(\n \"light_positions must have shape [batch_size, light_count, 3].\")\n if len(light_intensities.shape) != 3 or light_intensities.shape[-1] != 3:\n raise ValueError(\n \"light_intensities must have shape [batch_size, light_count, 3].\")\n if len(diffuse_colors.shape) != 3 or diffuse_colors.shape[-1] != 3:\n raise ValueError(\n \"diffuse_colors must have shape [batch_size, vertex_count, 3].\")\n if (ambient_color is not None and\n list(ambient_color.shape) != [batch_size, 3]):\n raise ValueError(\"ambient_color must have shape [batch_size, 3].\")\n if list(camera_position.shape) == [3]:\n camera_position = torch.unsqueeze(camera_position, 0).repeat(batch_size, 1)\n elif list(camera_position.shape) != [batch_size, 3]:\n raise ValueError(\n \"camera_position must have shape [batch_size, 3] or [3].\")\n if list(camera_lookat.shape) == [3]:\n camera_lookat = torch.unsqueeze(camera_lookat, 0).repeat(batch_size, 1)\n elif list(camera_lookat.shape) != [batch_size, 3]:\n raise ValueError(\n \"camera_lookat must have shape [batch_size, 3] or [3].\")\n if list(camera_up.shape) == [3]:\n camera_up = torch.unsqueeze(camera_up, 0).repeat(batch_size, 1)\n elif list(camera_up.shape) != [batch_size, 3]:\n raise ValueError(\"camera_up must have shape [batch_size, 3] or [3].\")\n if isinstance(fov_y, float):\n fov_y = torch.tensor(batch_size * [fov_y], dtype=torch.float32)\n elif len(fov_y.shape) == 0:\n fov_y = torch.unsqueeze(fov_y, 0).repeat(batch_size)\n elif list(fov_y.shape) != [batch_size]:\n raise ValueError(\"fov_y must be a float, a 0D tensor, or a 1D tensor \"\n \"with shape [batch_size].\")\n if isinstance(near_clip, float):\n near_clip = torch.tensor(batch_size * [near_clip], dtype=torch.float32)\n elif len(near_clip.shape) == 0:\n near_clip = torch.unsqueeze(near_clip, 0).repeat(batch_size)\n elif list(near_clip.shape) != [batch_size]:\n raise ValueError(\"near_clip must be a float, a 0D tensor, or a 1D \"\n \"tensor with shape [batch_size].\")\n if isinstance(far_clip, float):\n far_clip = torch.tensor(batch_size * [far_clip], dtype=torch.float32)\n elif len(far_clip.shape) == 0:\n far_clip = torch.unsqueeze(far_clip, 0).repeat(batch_size)\n elif list(far_clip.shape) != [batch_size]:\n raise ValueError(\"far_clip must be a float, a 0D tensor, or a 1D \"\n \"tensor with shape [batch_size].\")\n if specular_colors is not None and shininess_coefficients is None:\n raise ValueError(\n \"Specular colors were supplied without shininess coefficients.\")\n if shininess_coefficients is not None and specular_colors is None:\n raise ValueError(\n \"Shininess coefficients were supplied without specular colors.\")\n if specular_colors is not None:\n # Since a 0D float32 tensor is accepted, also accept a float.\n if isinstance(shininess_coefficients, float):\n shininess_coefficients = torch.tensor(\n shininess_coefficients, dtype=torch.float32)\n if len(specular_colors.shape) != 3:\n raise ValueError(\"The specular colors must have shape [batch_size, \"\n \"vertex_count, 3].\")\n if len(shininess_coefficients.shape) > 2:\n raise ValueError(\"The shininess coefficients must have shape at \"\n \"most [batch_size, vertex_count].\")\n # If we don't have per-vertex coefficients, we can just reshape the\n # input shininess to broadcast later, rather than interpolating an\n # additional vertex attribute:\n if len(shininess_coefficients.shape) < 2:\n vertex_attributes = torch.cat(\n [normals, vertices, diffuse_colors, specular_colors], 2)\n else:\n vertex_attributes = torch.cat(\n [\n normals, vertices, diffuse_colors, specular_colors,\n torch.unsqueeze(shininess_coefficients, 2)\n ], 2)\n else:\n vertex_attributes = torch.cat([normals, vertices, diffuse_colors], 2)\n\n camera_matrices = camera_utils.look_at(camera_position, camera_lookat,\n camera_up)\n\n perspective_transforms = camera_utils.perspective(\n image_width / image_height,\n fov_y,\n near_clip,\n far_clip)\n\n clip_space_transforms = torch.matmul(perspective_transforms, camera_matrices)\n\n pixel_attributes = rasterize(\n vertices, vertex_attributes, triangles,\n clip_space_transforms, image_width, image_height,\n [-1] * vertex_attributes.shape[2])\n\n # Extract the interpolated vertex attributes from the pixel buffer and\n # supply them to the shader:\n pixel_normals = torch.nn.functional.normalize(\n pixel_attributes[:, :, :, 0:3], p=2, dim=3)\n pixel_positions = pixel_attributes[:, :, :, 3:6]\n diffuse_colors = pixel_attributes[:, :, :, 6:9]\n if specular_colors is not None:\n specular_colors = pixel_attributes[:, :, :, 9:12]\n # Retrieve the interpolated shininess coefficients if necessary, or just\n # reshape our input for broadcasting:\n if len(shininess_coefficients.shape) == 2:\n shininess_coefficients = pixel_attributes[:, :, :, 12]\n else:\n shininess_coefficients = torch.reshape(\n shininess_coefficients, [-1, 1, 1])\n\n pixel_mask = (diffuse_colors >= 0.0).reduce(dim=3).type(torch.float32)\n\n renders = phong_shader(\n normals=pixel_normals,\n alphas=pixel_mask,\n pixel_positions=pixel_positions,\n light_positions=light_positions,\n light_intensities=light_intensities,\n diffuse_colors=diffuse_colors,\n camera_position=camera_position if specular_colors is not None else None,\n specular_colors=specular_colors,\n shininess_coefficients=shininess_coefficients,\n ambient_color=ambient_color)\n return renders", "def render_dof(scene, camera, HEIGHT=100, WIDTH=100, V_SAMPLES=6, H_SAMPLES=6):\n output = np.zeros((HEIGHT, WIDTH, RGB_CHANNELS), dtype=np.uint8)\n if not scene or scene.is_empty() or not camera or camera.inside(\n scene.objects\n ):\n print(\"Cannot generate an image\")\n return output\n total_samples = H_SAMPLES * V_SAMPLES\n # This is for showing progress %\n iterations = HEIGHT * WIDTH * total_samples\n step_size = np.ceil((iterations * PERCENTAGE_STEP) / 100).astype('int')\n counter = 0\n bar = Bar('Raytracing', max=100 / PERCENTAGE_STEP)\n # This is needed to use it in Git Bash\n bar.check_tty = False\n for j in range(HEIGHT):\n for i in range(WIDTH):\n color = np.array([0, 0, 0], dtype=float)\n lens_sample_offsets = []\n n0 = camera.n0\n n1 = camera.n1\n for n in range(V_SAMPLES):\n for m in range(H_SAMPLES):\n r0, r1 = np.random.random_sample(2)\n ap_sx = camera.lens_params.ap_sx\n ap_sy = camera.lens_params.ap_sy\n x_offset = ((r0 - 0.5) * m) / H_SAMPLES * ap_sx\n y_offset = ((r1 - 0.5) * n) / V_SAMPLES * ap_sy\n lens_sample_offsets.append((x_offset, y_offset))\n random_start = np.random.random_integers(0, total_samples - 1)\n for n in range(V_SAMPLES):\n for m in range(H_SAMPLES):\n r0, r1 = np.random.random_sample(2)\n x = i + ((float(m) + r0) / H_SAMPLES)\n y = HEIGHT - 1 - j + ((float(n) + r1) / V_SAMPLES)\n # Get x projected in view coord\n xp = (x / float(WIDTH)) * camera.scale_x\n # Get y projected in view coord\n yp = (y / float(HEIGHT)) * camera.scale_y\n pp = camera.p00 + xp * camera.n0 + yp * camera.n1\n npe = utils.normalize(pp - camera.position)\n sample_idx = n + m * H_SAMPLES - random_start\n x_offset, y_offset = lens_sample_offsets[sample_idx]\n ps = pp + x_offset * n0 + y_offset * n1\n fp = pp + npe * camera.lens_params.f\n director = utils.normalize(fp - ps)\n ray = Ray(ps, director)\n\n color += raytrace(ray, scene) / float(total_samples)\n counter += 1\n if counter % step_size == 0:\n bar.next()\n output[j][i] = color.round().astype(np.uint8)\n bar.finish()\n return output", "def render_mp(scene, camera, height, width):\n output = np.zeros((height, width, RGB_CHANNELS), dtype=np.uint8)\n if not scene or scene.is_empty() or not camera or camera.inside(\n scene.objects\n ):\n print(\"Cannot generate an image\")\n return output\n print(\"Creating rays...\")\n rays = create_rays(camera, height, width)\n pool = mp.Pool(mp.cpu_count())\n print(\"Shooting rays...\")\n ray_colors = pool.map(\n raytrace_mp_wrapper, [(ray, scene) for ray in rays]\n )\n pool.close()\n print(\"Arranging pixels...\")\n for j in range(height):\n for i in range(width):\n output[j][i] = ray_colors[i + j * width]\n return output", "def intersects(self, ray):\n theta = 45\n H = 512\n W = 512\n A = self.origin\n B = Point(W, A.y, A.z)\n C = Point(B.x, (int)(H * math.sin(theta * math.pi / 180)), (int)(H * math.cos(math.pi * theta / 180)))\n D = Point(A.x, (int)(H * math.sin(theta * math.pi / 180)), (int)(H * math.cos(math.pi * theta / 180)))\n vec3 = ray.direction * self.normal\n if vec3 != 0:\n vec1 = self.origin - ray.origin\n vec2 = vec1 * self.normal\n dist = vec2 / vec3\n if dist > 0:\n point_on_plane = ray.origin + dist * ray.direction\n if A.x <= point_on_plane.x <= B.x and A.y <= point_on_plane.y <= D.y and B.z <= point_on_plane.z <= C.z:\n #print A, B, C, D, point_on_plane\n return dist", "def intersect(self, ray):\n # TODO A5 (Step1) implement this function\n # Copy your implementation from A4\n # Then calculate uv coordinates, to be passed into the Hit initializer\n D = ray.direction\n E = ray.origin\n C = self.center\n R = self.radius\n B = 2*np.dot(D, E-C)\n A = np.dot(D, D)\n min_t = ray.start\n max_t = ray.end\n\n discriminant = B ** 2 - 4 * A * (np.dot(E-C, E-C)-R**2)\n\n if discriminant < 0:\n return no_hit\n\n t0 = (-1*B - np.sqrt(discriminant)) / (2*A)\n t1 = (-1*B + np.sqrt(discriminant)) / (2*A)\n\n if (t0 >= min_t and t0 <= max_t and t0 <= t1):\n t = t0\n elif (t1 >= min_t and t1 <= max_t):\n t = t1\n else:\n return no_hit\n\n P = E + t * D\n unit_normal = (P - C) / R\n d_hat = normalize(P - C)\n u = 0.5 + (np.arctan2(d_hat[0], d_hat[2])) / (2 * np.pi)\n v = 0.5 + (np.arcsin(d_hat[1])) / np.pi\n\n return Hit(t, P, unit_normal, vec([u, v]), self.material)", "def testComplexShading(self):\n\n model_transforms = camera_utils.euler_matrices(\n [[-20.0, 0.0, 60.0], [45.0, 60.0, 0.0]])[:, :3, :3]\n\n vertices_world_space = torch.matmul(\n torch.stack([self.cube_vertices, self.cube_vertices]),\n model_transforms.transpose())\n\n normals_world_space = torch.matmul(\n torch.stack([self.cube_normals, self.cube_normals]),\n model_transforms.transpose())\n\n # camera position:\n eye = torch.tensor([[0.0, 0.0, 6.0], [0.0, 0.2, 18.0]], dtype=torch.float32)\n center = torch.tensor([[0.0, 0.0, 0.0], [0.1, -0.1, 0.1]], dtype=torch.float32)\n world_up = torch.constant([[0.0, 1.0, 0.0], [0.1, 1.0, 0.15]], dtype=torch.float32)\n fov_y = torch.tensor([40.0, 13.3], dtype=torch.float32)\n near_clip = 0.1\n far_clip = 25.0\n image_width = 640\n image_height = 480\n light_positions = torch.tensor([[[0.0, 0.0, 6.0], [1.0, 2.0, 6.0]],\n [[0.0, -2.0, 4.0], [1.0, 3.0, 4.0]]])\n light_intensities = torch.tensor(\n [[[1.0, 1.0, 1.0], [1.0, 1.0, 1.0]],\n [[2.0, 0.0, 1.0], [0.0, 2.0, 1.0]]],\n dtype=torch.float32)\n vertex_diffuse_colors = torch.tensor(2*[[[1.0, 0.0, 0.0],\n [0.0, 1.0, 0.0],\n [0.0, 0.0, 1.0],\n [1.0, 1.0, 1.0],\n [1.0, 1.0, 0.0],\n [1.0, 0.0, 1.0],\n [0.0, 1.0, 1.0],\n [0.5, 0.5, 0.5]]],\n dtype=torch.float32)\n vertex_specular_colors = torch.tensor(2*[[[0.0, 1.0, 0.0],\n [0.0, 0.0, 1.0],\n [1.0, 1.0, 1.0],\n [1.0, 1.0, 0.0],\n [1.0, 0.0, 1.0],\n [0.0, 1.0, 1.0],\n [0.5, 0.5, 0.5],\n [1.0, 0.0, 0.0]]],\n dtype=torch.float32)\n shininess_coefficients = 6.0 * torch.ones([2, 8], dtype=torch.float32)\n ambient_color = torch.tensor([[0.0, 0.0, 0.0], [0.1, 0.1, 0.2]], dtype=torch.float32)\n renders = mesh_renderer.mesh_renderer(\n vertices_world_space,\n self.cube_triangles,\n normals_world_space,\n vertex_diffuse_colors,\n eye,\n center,\n world_up,\n light_positions,\n light_intensities,\n image_width,\n image_height,\n vertex_specular_colors,\n shininess_coefficients,\n ambient_color,\n fov_y,\n near_clip,\n far_clip)\n tonemapped_renders = torch.cat([\n mesh_renderer.tone_mapper(renders[:, :, :, 0:3], 0.7),\n renders[:, :, :, 3:4]\n ],\n dim=3)\n\n # Check that shininess coefficient broadcasting works by also rendering\n # with a scalar shininess coefficient, and ensuring the result is identical:\n broadcasted_renders = mesh_renderer.mesh_renderer(\n vertices_world_space,\n self.cube_triangles,\n normals_world_space,\n vertex_diffuse_colors,\n eye,\n center,\n world_up,\n light_positions,\n light_intensities,\n image_width,\n image_height,\n vertex_specular_colors,\n 6.0,\n ambient_color,\n fov_y,\n near_clip,\n far_clip)\n tonemapped_broadcasted_renders = torch.cat([\n mesh_renderer.tone_mapper(broadcasted_renders[:, :, :, 0:3], 0.7),\n broadcasted_renders[:, :, :, 3:4]\n ],\n dim=3)\n\n for image_id in range(renders.shape[0]):\n target_image_name = \"Colored_Cube_%i.png\" % image_id\n baseline_image_path = os.path.join(self.test_data_directory,\n target_image_name)\n test_utils.expect_image_file_and_render_are_near(\n self, baseline_image_path, tonemapped_renders[image_id, :, :, :])\n test_utils.expect_image_file_and_render_are_near(\n self, baseline_image_path, tonemapped_broadcasted_renders[image_id, :, :, :])", "def render_mp(scene, camera, height, width, rgb=False):\n output = np.zeros((height, width, RGB_CHANNELS), dtype=np.uint8)\n if not scene or not scene.objects or not camera or camera.inside(\n scene.objects\n ):\n print(\"Cannot generate an image\")\n return output\n print(\"Creating rays...\")\n rays = create_rays(camera, height, width)\n pool = mp.Pool(mp.cpu_count())\n print(\"Shooting rays...\")\n ray_colors = pool.map(\n raytrace_mp_wrapper, [(ray, scene, rgb) for ray in rays]\n )\n pool.close()\n print(\"Arranging pixels...\")\n for j in range(height):\n for i in range(width):\n output[j][i] = ray_colors[i + j * width]\n return output", "def intersect(self, ray):\n\n t = None\n hit = None\n angle = ray.dir.dot(self.norm)\n if angle != 0:\n t = (self.point - ray.start).dot(self.norm) / angle\n if angle < 0:\n hit = Hit(self, ray, t, float('inf'), self.norm, self.mat)\n else:\n hit = Hit(self, ray, float('-inf'), t, self.norm, self.mat)\n else:\n vector = unit(ray.start - self.point)\n if vector.dot(self.norm) < 0:\n hit = Hit(self, ray, float('-inf'), float('inf'), self.norm, self.mat)\n else:\n return None\n if (self.mat.texture is not None and not isninf(hit.entry)) > 0:\n hit.texCords = self.texCords(ray.pos(t))\n return hit", "def main(context, event):\n # get the context arguments\n scene = context.scene\n region = context.region\n rv3d = context.region_data\n coord = event.mouse_region_x, event.mouse_region_y\n\n # get the ray from the viewport and mouse\n view_vector = view3d_utils.region_2d_to_vector_3d(region, rv3d, coord)\n ray_origin = view3d_utils.region_2d_to_origin_3d(region, rv3d, coord)\n\n ray_target = ray_origin + view_vector\n\n def visible_objects_and_duplis():\n \"\"\"Loop over (object, matrix) pairs (mesh only)\"\"\"\n\n for obj in context.visible_objects:\n if obj.type == 'MESH':\n yield (obj, obj.matrix_world.copy())\n\n if obj.instance_type != 'NONE':\n obj.dupli_list_create(scene)\n for dob in obj.dupli_list:\n obj_dupli = dob.object\n if obj_dupli.type == 'MESH':\n yield (obj_dupli, dob.matrix.copy())\n\n obj.dupli_list_clear()\n\n def obj_ray_cast(obj, matrix):\n \"\"\"Wrapper for ray casting that moves the ray into object space\"\"\"\n\n # get the ray relative to the object\n matrix_inv = matrix.inverted()\n ray_origin_obj = matrix_inv @ ray_origin\n ray_target_obj = matrix_inv @ ray_target\n ray_direction_obj = ray_target_obj - ray_origin_obj\n\n # cast the ray\n success, location, normal, face_index = obj.ray_cast(ray_origin_obj, ray_direction_obj)\n\n if success:\n return location, normal, face_index\n else:\n return None, None, None\n\n # cast rays and find the closest object\n best_length_squared = -1.0\n best_obj = None\n\n for obj, matrix in visible_objects_and_duplis():\n if obj.type == 'MESH':\n hit, normal, face_index = obj_ray_cast(obj, matrix)\n if hit is not None:\n hit_world = matrix @ hit\n scene.cursor_location = hit_world\n length_squared = (hit_world - ray_origin).length_squared\n if best_obj is None or length_squared < best_length_squared:\n best_length_squared = length_squared\n best_obj = obj\n\n # now we have the object under the mouse cursor,\n # we could do lots of stuff but for the example just select.\n if best_obj is not None:\n best_obj.select_set(True)\n context.view_layer.objects.active = best_obj", "def imshow_mesh_3d(img, vertices, faces, camera_center, focal_length, colors=(76, 76, 204)):\n H, W, C = img.shape\n if not has_pyrender:\n warnings.warn('pyrender package is not installed.')\n return img\n if not has_trimesh:\n warnings.warn('trimesh package is not installed.')\n return img\n try:\n renderer = pyrender.OffscreenRenderer(viewport_width=W, viewport_height=H)\n except (ImportError, RuntimeError):\n warnings.warn('pyrender package is not installed correctly.')\n return img\n if not isinstance(colors, list):\n colors = [colors for _ in range(len(vertices))]\n colors = [color_val(c) for c in colors]\n depth_map = np.ones([H, W]) * np.inf\n output_img = img\n for idx in range(len(vertices)):\n color = colors[idx]\n color = [(c / 255.0) for c in color]\n color.append(1.0)\n vert = vertices[idx]\n face = faces[idx]\n material = pyrender.MetallicRoughnessMaterial(metallicFactor=0.2, alphaMode='OPAQUE', baseColorFactor=color)\n mesh = trimesh.Trimesh(vert, face)\n rot = trimesh.transformations.rotation_matrix(np.radians(180), [1, 0, 0])\n mesh.apply_transform(rot)\n mesh = pyrender.Mesh.from_trimesh(mesh, material=material)\n scene = pyrender.Scene(ambient_light=(0.5, 0.5, 0.5))\n scene.add(mesh, 'mesh')\n camera_pose = np.eye(4)\n camera = pyrender.IntrinsicsCamera(fx=focal_length[0], fy=focal_length[1], cx=camera_center[0], cy=camera_center[1], zfar=100000.0)\n scene.add(camera, pose=camera_pose)\n light = pyrender.DirectionalLight(color=[1.0, 1.0, 1.0], intensity=1)\n light_pose = np.eye(4)\n light_pose[:3, 3] = np.array([0, -1, 1])\n scene.add(light, pose=light_pose)\n light_pose[:3, 3] = np.array([0, 1, 1])\n scene.add(light, pose=light_pose)\n light_pose[:3, 3] = np.array([1, 1, 2])\n scene.add(light, pose=light_pose)\n color, rend_depth = renderer.render(scene, flags=pyrender.RenderFlags.RGBA)\n valid_mask = (rend_depth < depth_map) * (rend_depth > 0)\n depth_map[valid_mask] = rend_depth[valid_mask]\n valid_mask = valid_mask[:, :, None]\n output_img = valid_mask * color[:, :, :3] + (1 - valid_mask) * output_img\n return output_img", "def render(scene, camera, HEIGHT=100, WIDTH=100):\n output = np.zeros((HEIGHT, WIDTH, RGB_CHANNELS), dtype=np.uint8)\n if not scene or scene.is_empty() or not camera or camera.inside(\n scene.objects\n ):\n print(\"Cannot generate an image\")\n return output\n # This is for showing progress %\n iterations = HEIGHT * WIDTH\n step_size = np.ceil((iterations * PERCENTAGE_STEP) / 100).astype('int')\n counter = 0\n bar = Bar('Raytracing', max=100 / PERCENTAGE_STEP)\n # This is needed to use it in Git Bash\n bar.check_tty = False\n for j in range(HEIGHT):\n for i in range(WIDTH):\n x = i\n y = HEIGHT - 1 - j\n # Get x projected in view coord\n xp = (x / float(WIDTH)) * camera.scale_x\n # Get y projected in view coord\n yp = (y / float(HEIGHT)) * camera.scale_y\n pp = camera.p00 + xp * camera.n0 + yp * camera.n1\n npe = utils.normalize(pp - camera.position)\n ray = Ray(pp, npe)\n color = raytrace(ray, scene)\n output[j][i] = color.round().astype(np.uint8)\n counter += 1\n if counter % step_size == 0:\n bar.next()\n bar.finish()\n return output", "def Sphere_ExactSerendipityLagrangeQuad():\n\n mesh = Sphere_CubeToSerendipityLagrangeQuad(1)\n \n ################\n # Modifications for exact sphere\n ################\n # x=+1 side\n def posXvals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.ones(xi1.shape);yb=np.array(-xi1);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2;\n x = xb*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n y = yb*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n z = zb*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n return np.vstack([x,y,z])\n def posXnormals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.ones(xi1.shape);yb=np.array(-xi1);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2;\n dxdxi1 = -1.0 * xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-yb+2.0*yb*zz/3.0)\n dxdxi2 = 1.0 * xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-zb+2.0*yy*zb/3.0)\n dydxi1 = -1.0 * np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n dydxi2 = 1.0 * yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-zb+2.0*xx*zb/3.0)\n dzdxi1 = -1.0 * zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-yb+2.0*xx*yb/3.0)\n dzdxi2 = 1.0 * np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n magnitude = np.sqrt(np.sum(J**2,axis=1))\n return J.T/magnitude\n def posXJ(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.ones(xi1.shape);yb=np.array(-xi1);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2;\n dxdxi1 = -1.0 * xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-yb+2.0*yb*zz/3.0)\n dxdxi2 = 1.0 * xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-zb+2.0*yy*zb/3.0)\n dydxi1 = -1.0 * np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n dydxi2 = 1.0 * yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-zb+2.0*xx*zb/3.0)\n dzdxi1 = -1.0 * zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-yb+2.0*xx*yb/3.0)\n dzdxi2 = 1.0 * np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n return np.sqrt(np.sum(J**2,axis=1))\n mesh.eList[0].vals = posXvals\n mesh.eList[0].normals = posXnormals\n mesh.eList[0].J = posXJ\n \n def posYvals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(xi1);yb=np.ones(xi1.shape);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2;\n x = xb*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n y = yb*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n z = zb*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n return np.vstack([x,y,z])\n def posYnormals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(xi1);yb=np.ones(xi1.shape);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2;\n dxdxi1 = 1.0*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n dxdxi2 = 1.0*xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-zb+2.0*yy*zb/3.0)\n dydxi1 = 1.0*yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-xb+2.0*xb*zz/3.0)\n dydxi2 = 1.0*yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-zb+2.0*xx*zb/3.0)\n dzdxi1 = 1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-xb+2.0*xb*yy/3.0)\n dzdxi2 = 1.0*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n magnitude = np.sqrt(np.sum(J**2,axis=1))\n return J.T/magnitude\n def posYJ(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(xi1);yb=np.ones(xi1.shape);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2;\n dxdxi1 = 1.0*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n dxdxi2 = 1.0*xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-zb+2.0*yy*zb/3.0)\n dydxi1 = 1.0*yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-xb+2.0*xb*zz/3.0)\n dydxi2 = 1.0*yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-zb+2.0*xx*zb/3.0)\n dzdxi1 = 1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-xb+2.0*xb*yy/3.0)\n dzdxi2 = 1.0*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n return np.sqrt(np.sum(J**2,axis=1))\n mesh.eList[1].vals = posYvals\n mesh.eList[1].normals = posYnormals\n mesh.eList[1].J = posYJ\n \n # x=-1 side\n def negXvals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=-np.ones(xi1.shape);yb=np.array(xi1);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2;\n x = xb*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n y = yb*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n z = zb*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n return np.vstack([x,y,z])\n def negXnormals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=-np.ones(xi1.shape);yb=np.array(xi1);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2;\n dxdxi1 = 1.0*xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-yb+2.0*yb*zz/3.0)\n dxdxi2 = 1.0*xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-zb+2.0*yy*zb/3.0)\n dydxi1 = 1.0*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n dydxi2 = 1.0*yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-zb+2.0*xx*zb/3.0)\n dzdxi1 = 1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-yb+2.0*xx*yb/3.0)\n dzdxi2 = 1.0*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n magnitude = np.sqrt(np.sum(J**2,axis=1))\n return J.T/magnitude\n def negXJ(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=-np.ones(xi1.shape);yb=np.array(xi1);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2;\n dxdxi1 = 1.0*xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-yb+2.0*yb*zz/3.0)\n dxdxi2 = 1.0*xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-zb+2.0*yy*zb/3.0)\n dydxi1 = 1.0*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n dydxi2 = 1.0*yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-zb+2.0*xx*zb/3.0)\n dzdxi1 = 1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-yb+2.0*xx*yb/3.0)\n dzdxi2 = 1.0*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n return np.sqrt(np.sum(J**2,axis=1))\n mesh.eList[2].vals = negXvals\n mesh.eList[2].normals = negXnormals\n mesh.eList[2].J = negXJ\n\n # y=-1 side\n def negYvals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(-xi1);yb=-np.ones(xi1.shape);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2;\n x = xb*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n y = yb*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n z = zb*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n return np.vstack([x,y,z])\n def negYnormals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(-xi1);yb=-np.ones(xi1.shape);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2; \n dxdxi1 = -1.0*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n dxdxi2 = 1.0*0.5*xb*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5*(-zb+2.0*yy*zb/3.0)\n dydxi1 = -1.0*0.5*yb*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5*(-xb+2.0*xb*zz/3.0)\n dydxi2 = 1.0*0.5*yb*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5*(-zb+2.0*xx*zb/3.0)\n dzdxi1 = -1.0*0.5*zb*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5*(-xb+2.0*xb*yy/3.0)\n dzdxi2 = 1.0*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0) \n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n magnitude = np.sqrt(np.sum(J**2,axis=1))\n return J.T/magnitude\n def negYJ(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(-xi1);yb=-np.ones(xi1.shape);zb=np.array(xi2);\n xx=xb**2;yy=yb**2;zz=zb**2; \n dxdxi1 = -1.0*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n dxdxi2 = 1.0*0.5*xb*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5*(-zb+2.0*yy*zb/3.0)\n dydxi1 = -1.0*0.5*yb*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5*(-xb+2.0*xb*zz/3.0)\n dydxi2 = 1.0*0.5*yb*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5*(-zb+2.0*xx*zb/3.0)\n dzdxi1 = -1.0*0.5*zb*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5*(-xb+2.0*xb*yy/3.0)\n dzdxi2 = 1.0*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0) \n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n return np.sqrt(np.sum(J**2,axis=1))\n mesh.eList[3].vals = negYvals\n mesh.eList[3].normals = negYnormals\n mesh.eList[3].J = negYJ\n \n # z=+1 side\n def posZvals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(xi1)\n yb=np.array(-xi2)\n zb=np.ones(xi1.shape)\n xx=xb**2;yy=yb**2;zz=zb**2;\n x = xb*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n y = yb*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n z = zb*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n return np.vstack([x,y,z])\n def posZnormals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(xi1);yb=np.array(-xi2);zb=np.ones(xi1.shape);\n xx=xb**2;yy=yb**2;zz=zb**2;\n dxdxi1 = 1.0*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n dxdxi2 = -1.0*xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-yb+2.0*yb*zz/3.0)\n dydxi1 = 1.0*yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-xb+2.0*xb*zz/3.0)\n dydxi2 = -1.0*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n dzdxi1 = 1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-xb+2.0*xb*yy/3.0)\n dzdxi2 = -1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-yb+2.0*xx*yb/3.0)\n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n magnitude = np.sqrt(np.sum(J**2,axis=1))\n return J.T/magnitude\n def posZJ(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(xi1);yb=np.array(-xi2);zb=np.ones(xi1.shape);\n xx=xb**2;yy=yb**2;zz=zb**2;\n dxdxi1 = 1.0*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n dxdxi2 = -1.0*xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-yb+2.0*yb*zz/3.0)\n dydxi1 = 1.0*yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-xb+2.0*xb*zz/3.0)\n dydxi2 = -1.0*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n dzdxi1 = 1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-xb+2.0*xb*yy/3.0)\n dzdxi2 = -1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-yb+2.0*xx*yb/3.0)\n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n return np.sqrt(np.sum(J**2,axis=1))\n mesh.eList[4].vals = posZvals\n mesh.eList[4].normals = posZnormals\n mesh.eList[4].J = posZJ\n \n # z=-1 side\n def negZvals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(xi1);yb=np.array(xi2);zb=-np.ones(xi1.shape);\n xx=xb**2;yy=yb**2;zz=zb**2;\n x = xb*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n y = yb*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n z = zb*np.sqrt(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)\n return np.vstack([x,y,z])\n def negZnormals(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(xi1);yb=np.array(xi2);zb=-np.ones(xi1.shape);\n xx=xb**2;yy=yb**2;zz=zb**2;\n dxdxi1 = 1.0*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n dxdxi2 = 1.0*xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-yb+2.0*yb*zz/3.0)\n dydxi1 = 1.0*yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-xb+2.0*xb*zz/3.0)\n dydxi2 = 1.0*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n dzdxi1 = 1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-xb+2.0*xb*yy/3.0)\n dzdxi2 = 1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-yb+2.0*xx*yb/3.0)\n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n magnitude = np.sqrt(np.sum(J**2,axis=1))\n return J.T/magnitude\n def negZJ(xi1,xi2):\n xi1=np.asarray(xi1,np.float).reshape(-1,)\n xi2=np.asarray(xi2,np.float).reshape(-1,)\n xb=np.array(xi1);yb=np.array(xi2);zb=-np.ones(xi1.shape);\n xx=xb**2;yy=yb**2;zz=zb**2;\n dxdxi1 = 1.0*np.sqrt(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)\n dxdxi2 = 1.0*xb*0.5*(1.0 - yy/2.0 - zz/2.0 + yy*zz/3.0)**-0.5 * (-yb+2.0*yb*zz/3.0)\n dydxi1 = 1.0*yb*0.5*(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)**-0.5 * (-xb+2.0*xb*zz/3.0)\n dydxi2 = 1.0*np.sqrt(1.0 - xx/2.0 - zz/2.0 + xx*zz/3.0)\n dzdxi1 = 1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-xb+2.0*xb*yy/3.0)\n dzdxi2 = 1.0*zb*0.5*(1.0 - xx/2.0 - yy/2.0 + xx*yy/3.0)**-0.5 * (-yb+2.0*xx*yb/3.0)\n J = np.array([[dxdxi1,dxdxi2],[dydxi1,dydxi2],[dzdxi1,dzdxi2]]).T\n J = np.cross(J[:,0,:],J[:,1,:])\n return np.sqrt(np.sum(J**2,axis=1))\n mesh.eList[5].vals = negZvals\n mesh.eList[5].normals = negZnormals\n mesh.eList[5].J = negZJ\n \n for e in mesh.eList:\n e.ExactElement = True\n \n return mesh", "def render_aa(scene, camera, HEIGHT=100, WIDTH=100, V_SAMPLES=4, H_SAMPLES=4):\n output = np.zeros((HEIGHT, WIDTH, RGB_CHANNELS), dtype=np.uint8)\n if not scene or scene.is_empty() or not camera or camera.inside(\n scene.objects\n ):\n print(\"Cannot generate an image\")\n return output\n total_samples = H_SAMPLES * V_SAMPLES\n # This is for showing progress %\n iterations = HEIGHT * WIDTH * total_samples\n step_size = np.ceil((iterations * PERCENTAGE_STEP) / 100).astype('int')\n counter = 0\n bar = Bar('Raytracing', max=100 / PERCENTAGE_STEP)\n # This is needed to use it in Git Bash\n bar.check_tty = False\n for j in range(HEIGHT):\n for i in range(WIDTH):\n color = np.array([0, 0, 0], dtype=float)\n for n in range(V_SAMPLES):\n for m in range(H_SAMPLES):\n r0, r1 = np.random.random_sample(2)\n # Floats x, y inside the image plane grid\n x = i + ((float(m) + r0) / H_SAMPLES)\n y = HEIGHT - 1 - j + ((float(n) + r1) / V_SAMPLES)\n # Get x projected in view coord\n xp = (x / float(WIDTH)) * camera.scale_x\n # Get y projected in view coord\n yp = (y / float(HEIGHT)) * camera.scale_y\n pp = camera.p00 + xp * camera.n0 + yp * camera.n1\n npe = utils.normalize(pp - camera.position)\n ray = Ray(pp, npe)\n\n color += raytrace(ray, scene) / float(total_samples)\n counter += 1\n if counter % step_size == 0:\n bar.next()\n output[j][i] = color.round().astype(np.uint8)\n bar.finish()\n return output", "def render(scene, camera, height, width, rgb=False):\n output = np.zeros((height, width, RGB_CHANNELS), dtype=np.uint8)\n if not scene or not scene.objects or not camera or camera.inside(\n scene.objects\n ):\n print(\"Cannot generate an image\")\n return output\n # This is for showing progress %\n iterations = height * width\n step_size = np.ceil((iterations * PERCENTAGE_STEP) / 100).astype('int')\n counter = 0\n bar = Bar('Raytracing', max=100 / PERCENTAGE_STEP, suffix='%(percent)d%%')\n for j in range(height):\n for i in range(width):\n x = i\n y = height - 1 - j\n # Get x projected in view coord\n xp = (x / float(width)) * camera.scale_x\n # Get y projected in view coord\n yp = (y / float(height)) * camera.scale_y\n pp = camera.p00 + xp * camera.n0 + yp * camera.n1\n npe = utils.normalize(pp - camera.position)\n ray = Ray(pp, npe)\n color = raytrace(ray, scene, rgb)\n output[j][i] = color\n counter += 1\n if counter % step_size == 0:\n bar.next()\n bar.finish()\n return output", "def illuminate(self, ray, hit, scene):\n # TODO A5 copy implementation from A4 and modify\n # material parameters need to be looked up by the uv's at the intersection point\n l = self.position - hit.point\n epsilon = 0.000001\n point = hit.point + l*epsilon\n shadow_ray = Ray(point, l, epsilon, 1)\n\n if (scene.intersect(shadow_ray).t > 1):\n\n # diffuse shading\n intensity = self.intensity\n position = self.position\n normal = hit.normal\n dist_to_source = np.linalg.norm(hit.point - position)\n diffuse_coeff = hit.material.lookup(hit.material.k_d, hit)\n v = (-1) * normalize(ray.direction)\n light_ray = normalize(position - hit.point)\n specular_coeff = hit.material.lookup(hit.material.k_s, hit)\n p = hit.material.lookup(hit.material.p, hit)\n\n # diffuse shading\n # diffuse_output = diffuse_coeff * (np.maximum(0, np.dot(normal, light_ray)) / (dist_to_source ** 2)) * intensity\n # specular shading\n shade_ray = Ray(hit.point, light_ray, epsilon)\n if (scene.intersect(shade_ray).t == np.inf):\n h = (v + light_ray) / np.linalg.norm(v + light_ray)\n specular_output = (diffuse_coeff + specular_coeff * ((np.dot(normal, h)) ** p)) * (\n np.maximum(0, np.dot(normal, light_ray)) / (dist_to_source ** 2)) * intensity\n return specular_output\n\n return vec([0, 0, 0])", "def pick(self, start, direction, mat):\n new_mat = np.dot(\n np.dot(mat, self.translation_matrix),\n np.linalg.inv(self.scaling_matrix)\n )\n\n results = self.aabb.ray_hit(start, direction, mat)\n return results", "def surfaceRender(nodal_mesh, focus, ax=None):\n\t# If no axes were passed, generate new set of axes\n\tif not ax:\n\t\tfig = mplt.figure()\n\t\tax = fig.add_subplot(111, projection='3d')\n\n\t# Sort the mesh by first 3 columns\n\tnodal_mesh = nodal_mesh[nodal_mesh[:, 0].argsort()]\n\tnodal_mesh = nodal_mesh[nodal_mesh[:, 1].argsort(kind='mergesort')]\n\tnodal_mesh = nodal_mesh[nodal_mesh[:, 2].argsort(kind='mergesort')]\n\t\n\t# Set up number of divisions and calculate e for each division (as a ratio)\n\tnum_div = 20\n\te = [i/num_div for i in range(num_div + 1)]\n\t# Convert angular values from degrees to radians\n\trads = math.pi/180\n\tnodal_mesh[:, 1:3] *= rads\n\t# Store the shapes and sizes of the mesh values\n\tm = nodal_mesh.shape[0]\n\tsize_nodal_nu = np.where(nodal_mesh[:, 2] == 0)[0].size\n\tsize_nodal_phi = m/size_nodal_nu\n\t# Get the mu and theta values from the mesh\n\tnodal_nu = nodal_mesh[:size_nodal_nu, 1]\n\tnodal_phi = nodal_mesh[::size_nodal_nu, 2]\n\t# Convert apex node from prolate to cartesian, then plot with scatter\n\tif min(nodal_nu) == 0:\n\t\tx, y, z = mathhelper.prolate2cart(nodal_mesh[0, 0], nodal_mesh[0, 1], nodal_mesh[0, 2], focus)\n\t\tax.scatter(z, y, -x)\n\t\tstart_nu = 1\n\telse:\n\t\tstart_nu = 0\n\t# Plot circumferential element boundaries\n\tfor i in range(start_nu, size_nodal_nu):\n\t\tfor j in range(int(size_nodal_phi)):\n\t\t\t# Define nodal values for interpolation\n\t\t\tif j == size_nodal_phi-1:\n\t\t\t\tind0 = i\n\t\t\t\tp0 = 2*math.pi\n\t\t\telse:\n\t\t\t\tind0 = (j+1)*size_nodal_nu + i\n\t\t\t\tp0 = nodal_phi[j+1]\n\t\t\tind1 = (j)*size_nodal_nu + i\n\t\t\tp1 = nodal_phi[j]\n\t\t\t# Get mu and dM/dm1\n\t\t\tm0 = nodal_mesh[ind0, 0]\n\t\t\tdm0 = nodal_mesh[ind0, 3]\n\t\t\tm1 = nodal_mesh[ind1, 0]\n\t\t\tdm1 = nodal_mesh[ind1, 3]\n\t\t\t# Convert to cartesian\n\t\t\tn0x, n0y, n0z = mathhelper.prolate2cart(nodal_mesh[ind0, 0], nodal_mesh[ind0, 1], nodal_mesh[ind0, 2], focus)\n\t\t\t# Plot the node\n\t\t\tax.scatter(n0z, n0y, -n0x)\n\t\t\t# Plot the arc segments\n\t\t\tfor k in range(2, len(e)):\n\t\t\t\t# Determine starting point to use\n\t\t\t\tif k == 2:\n\t\t\t\t\tpt_x, pt_y, pt_z = n0x, n0y, n0z\n\t\t\t\telse:\n\t\t\t\t\tpt_x, pt_y, pt_z = x_here, y_here, z_here\n\t\t\t\t# Get lambda\n\t\t\t\thm0 = 1 - 3*(e[k]**2) + 2*(e[k]**3)\n\t\t\t\thdm0 = e[k]*(e[k] - 1)**2\n\t\t\t\thm1 = (e[k]**2)*(3 - 2*e[k])\n\t\t\t\thdm1 = (e[k]**2)*(e[k] - 1)\n\t\t\t\tm = hm0 * m0 + hdm0 * dm0 + hm1 * m1 + hdm1 * dm1\n\t\t\t\t# Get theta\n\t\t\t\tp_here = p0 - e[k]*(p0 - p1)\n\t\t\t\t# Convert to cartesian\n\t\t\t\tx_here, y_here, z_here = mathhelper.prolate2cart(m, nodal_nu[i], p_here, focus)\n\t\t\t\t# Create vectors\n\t\t\t\tx = np.append(pt_x, x_here)\n\t\t\t\ty = np.append(pt_y, y_here)\n\t\t\t\tz = np.append(pt_z, z_here)\n\t\t\t\t# Plot segments\n\t\t\t\tax.plot(z, y, -x, 'k-.')\n\t# Plot longitudinal element boundaries\n\tfor i in range(int(size_nodal_phi)):\n\t\tfor j in range(size_nodal_nu-1):\n\t\t\t# Define nodal values needeed for interpolation\n\t\t\tind0 = i*size_nodal_nu + j\n\t\t\tind1 = ind0 + 1\n\t\t\tn0 = nodal_nu[j]\n\t\t\tn1 = nodal_nu[j+1]\n\t\t\t# Get lambda and dL/de2\n\t\t\tm0 = nodal_mesh[ind0, 0]\n\t\t\tdm0 = nodal_mesh[ind0, 4]\n\t\t\tm1 = nodal_mesh[ind1, 0]\n\t\t\tdm1 = nodal_mesh[ind1, 4]\n\t\t\t# Convert nodal points to cartesian\n\t\t\tn0x, n0y, n0z = mathhelper.prolate2cart(nodal_mesh[ind0, 0], nodal_mesh[ind0, 1], nodal_mesh[ind0, 2], focus)\n\t\t\t# Plot arc\n\t\t\tfor k in range(2, len(e)):\n\t\t\t\t# Determine point to use\n\t\t\t\tif k == 2:\n\t\t\t\t\tpt_x, pt_y, pt_z = n0x, n0y, n0z\n\t\t\t\telse:\n\t\t\t\t\tpt_x, pt_y, pt_z = x_here, y_here, z_here\n\t\t\t\t# Get lambda\n\t\t\t\thm0 = 1 - 3*(e[k]**2) + 2*(e[k]**3)\n\t\t\t\thdm0 = e[k]*(e[k] - 1)**2\n\t\t\t\thm1 = (e[k]**2)*(3 - 2*e[k])\n\t\t\t\thdm1 = (e[k]**2)*(e[k] - 1)\n\t\t\t\tm = hm0 * m0 + hdm0 * dm0 + hm1 * m1 + hdm1 * dm1\n\t\t\t\t# Get nu\n\t\t\t\tn_here = n0 + e[k]*(n1-n0)\n\t\t\t\t# Convert to cartesian\n\t\t\t\tx_here, y_here, z_here = mathhelper.prolate2cart(m, n_here, nodal_phi[i], focus)\n\t\t\t\t# Append the vectors for plotting\n\t\t\t\tx = np.append(pt_x, x_here)\n\t\t\t\ty = np.append(pt_y, y_here)\n\t\t\t\tz = np.append(pt_z, z_here)\n\t\t\t\t# Plot the segment\n\t\t\t\tax.plot(z, y, -x, 'k-.')\n\t\t\t\t\n\treturn(ax)", "def rayIntersection(self, ray):\n\n rotVect = ray.mDirection #math3d.VectorN(math.cos(num), - math.sin(num), 0)\n\n # this give all the lines (red green and blue at the moment)\n tankPos = math3d.VectorN(ray.mOrigin[0], ray.mOrigin[1], 0)\n linkPos = math3d.VectorN(200,200,0)\n v = linkPos - tankPos\n added = (tankPos + getPara(v, rotVect) + getPerp(v, rotVect))\n added2 = tankPos + getPara(v, rotVect) #If the magnitude of this is minus the sphere origin is less than the radius you're in the sphere\n added3 = tankPos + getPerp(v, rotVect)\n added4 = tankPos + rotVect.normalized() * 200 #this is get point only change 200 to dist\n\n\n test = added2 - self.mCenter #checks if in center\n\n\n if test.magnitude() <= self.mRadius:\n green = added2 - ray.mOrigin #this is Qpara\n thing = (self.mSRadius - test.magnitude()**2) ** 0.5\n t = (green.magnitude() - thing)\n print(green.magnitude() - thing)\n return t\n else:\n return None\n\n #print(test.magnitude(), self.mRadius)\n #print(green.magnitude(), \"green\")", "def intersectsRay(self, ray):\n pass", "def render(self):\n\n pixels = [\n [Color() for _ in range(self.width)] for _ in range(self.height)]\n\n for y in range(self.height):\n for x in range(self.width):\n ray_direction = Point(x, y) - self.camera\n ray = Ray(self.camera, ray_direction)\n pixels[y][x] = self._trace_ray(ray)\n\n return pixels", "def intersect(self, rays): \n result = {}\n \n if bool(self._merged):\n result[\"x\"], result[\"y\"], result[\"z\"], result[\"valid\"], result[\"ray_u\"], \\\n result[\"trig_u\"], result[\"trig_v\"], result[\"gather_ray\"], \\\n result[\"gather_trig\"] = self._intersection(\n rays[\"x_start\"],\n rays[\"y_start\"],\n rays[\"z_start\"],\n rays[\"x_end\"],\n rays[\"y_end\"],\n rays[\"z_end\"],\n self._merged[\"xp\"],\n self._merged[\"yp\"],\n self._merged[\"zp\"],\n self._merged[\"x1\"],\n self._merged[\"y1\"],\n self._merged[\"z1\"],\n self._merged[\"x2\"],\n self._merged[\"y2\"],\n self._merged[\"z2\"],\n self.intersect_epsilion,\n self.size_epsilion,\n self.ray_start_epsilion\n )\n \n result[\"norm\"] = tf.gather(\n self._merged[\"norm\"],\n result[\"gather_trig\"]\n )\n \n return result", "def obtain_depth(self):\n self.z_buffer = image(self.image_plane.width, self.image_plane.height)\n for j in range(self.image_plane.height):\n for i in range(self.image_plane.width):\n single_point = None\n for ray_tracing in self.ray_tracer:\n ray_tracing.ray_direction(i, j)\n ray_tracing.sphere_to_ray()\n ray_tracing.ray_sphere_intersection()\n ray_tracing.hit_pos()\n hit_point = ray_tracing.getHit()\n\n if single_point is None:\n single_point = hit_point\n elif single_point is not None and single_point.z > hit_point.z:\n single_point = hit_point\n self.z_buffer.setColor(single_point, i, j)", "def __init__(self, shape, pts, texcoords, faces, normals=None, smooth=True):\r\n super(Buffer, self).__init__()\r\n\r\n # Uniform variables all in one array!\r\n self.unib = (c_float * 12)(0.0, 0.0, 0.0,\r\n 0.5, 0.5, 0.5,\r\n 1.0, 1.0, 0.0,\r\n 0.0, 0.0, 0.0)\r\n \"\"\" pass to shader array of vec3 uniform variables:\r\n\r\n ===== ============================ ==== ==\r\n vec3 description python\r\n ----- ---------------------------- -------\r\n index from to\r\n ===== ============================ ==== ==\r\n 0 ntile, shiny, blend 0 2\r\n 1 material 3 5\r\n 2 umult, vmult, point_size 6 8\r\n 3 u_off, v_off (only 2 used) 9 10\r\n ===== ============================ ==== ==\r\n \"\"\"\r\n #self.shape = shape\r\n self.textures = []\r\n pts = np.array(pts, dtype=float)\r\n texcoords = np.array(texcoords, dtype=float)\r\n faces = np.array(faces)\r\n\r\n if normals == None: #i.e. normals will only be generated if explictly None\r\n LOGGER.debug('Calculating normals ...')\r\n\r\n normals = np.zeros(pts.shape, dtype=float) #empty array rights size\r\n\r\n fv = pts[faces] #expand faces with x,y,z values for each vertex\r\n #cross product of two edges of triangles\r\n fn = np.cross(fv[:][:][:,1] - fv[:][:][:,0], fv[:][:][:,2] - fv[:][:][:,0])\r\n fn = Utility.normalize_v3(fn)\r\n normals[faces[:,0]] += fn #add up all normal vectors for a vertex\r\n normals[faces[:,1]] += fn\r\n normals[faces[:,2]] += fn\r\n normals = Utility.normalize_v3(normals)\r\n else:\r\n normals = np.array(normals)\r\n \r\n # keep a copy for speeding up the collision testing of ElevationMap\r\n self.vertices = pts\r\n self.normals = normals\r\n self.tex_coords = texcoords\r\n self.indices = faces\r\n self.material = (0.5, 0.5, 0.5, 1.0)\r\n\r\n # Pack points,normals and texcoords into tuples and convert to ctype floats.\r\n n_verts = len(pts)\r\n if len(texcoords) != n_verts:\r\n if len(normals) != n_verts:\r\n self.N_BYTES = 12 # only use pts\r\n self.array_buffer = c_floats(pts.reshape(-1).tolist())\r\n else:\r\n self.N_BYTES = 24 # use pts and normals\r\n self.array_buffer = c_floats(np.concatenate((pts, normals),\r\n axis=1).reshape(-1).tolist())\r\n else:\r\n self.N_BYTES = 32 # use all three NB doesn't check that normals are there\r\n self.array_buffer = c_floats(np.concatenate((pts, normals, texcoords),\r\n axis=1).reshape(-1).tolist())\r\n\r\n self.ntris = len(faces)\r\n self.element_array_buffer = c_shorts(faces.reshape(-1))\r\n from pi3d.Display import Display\r\n self.disp = Display.INSTANCE # rely on there always being one!\r" ]
[ "0.6239282", "0.62272376", "0.6221398", "0.6195056", "0.619256", "0.6127605", "0.60318893", "0.59569705", "0.5896775", "0.58099455", "0.5767439", "0.5720655", "0.57185787", "0.5693293", "0.5657191", "0.56316084", "0.5596479", "0.5596469", "0.5510855", "0.5493783", "0.54878277", "0.5426589", "0.5409757", "0.5367993", "0.5366896", "0.5349162", "0.5281349", "0.52581763", "0.52386487", "0.5150513" ]
0.6649916
0
Produce a grid of ray directions to use for ray_cast().
def ray_grid( x: jnp.ndarray, y: jnp.ndarray, z: jnp.ndarray, resolution: int, ): scales = jnp.linspace(-1.0, 1.0, num=resolution) x_vecs = x * scales[None, :, None] y_vecs = y * scales[:, None, None] results = (x_vecs + y_vecs + z).reshape([-1, x.shape[0]]) return jax.vmap(_normalize)(results)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def direct(sun_pos, grid):\n\n # for each pixel at top of grid pass sun rays in\n for i in xrange(grid.gr.shape[0]):\n \"\"\"\n Make an array starting at loc\n \"\"\"\n xpos = i * grid.xres\n ypos = grid.zres * grid.zsize\n pos = np.array(xpos, ypos)\n direction = pos - sun_pos / np.norm(pos - sun_pos) # this location minus \n r = ray(pos, direction)\n \"\"\"\n The ray now travels down through the canopy being\n altered by transmission and reflectance\n\n amount of scattering vs absorption is determined by leaf area density\n\n \"\"\"", "def get_rays(nray, rho, rho_skip):\n aoff = 1/2/np.pi\n rays = list()\n for i in range(nray):\n rays.append(np.zeros((rho - rho_skip,2)).astype(np.float32))\n for j in range(rho_skip, RHO):\n [x, y] = pol2cart(2*np.pi*i/nray+aoff, j)\n x = round(x)\n y = round(y)\n rays[i][j - rho_skip, :] = [x, y]\n return rays", "def get_directions():\n return [(1, 0), (0, 1), (-1, 0), (0, -1)]", "def test_compute_pixel_rays() -> None:\n u = 12\n v = 2\n img_w = 20\n img_h = 10\n fx = 10\n fy = 10\n\n ray_dir = _compute_pixel_ray_direction(u, v, fx, fy, img_w, img_h)\n\n gt_ray_dir: NDArrayFloat = np.array([2.0, -3.0, 10.0])\n gt_ray_dir /= np.linalg.norm(gt_ray_dir)\n\n assert np.allclose(gt_ray_dir, ray_dir)", "def raytrace(pos1: tuple, pos2: tuple) -> list:\n x0, y0 = pos1\n x1, y1 = pos2\n tiles = []\n dx = abs(x1 - x0)\n dy = abs(y1 - y0)\n x, y = x0, y0\n n = 1 + dx + dy\n x_inc = 1 if x1 > x0 else -1\n y_inc = 1 if y1 > y0 else -1\n error = dx - dy\n dx *= 2\n dy *= 2\n\n while n > 0:\n tiles.append((x, y))\n if error > 0:\n x += x_inc\n error -= dy\n else:\n y += y_inc\n error += dx\n n -= 1\n return tiles", "def make_grid(dataset):\n top_left_lat = dataset[\"a\"][0]\n top_left_lng = dataset[\"a\"][1]\n top_right_lng = dataset[\"c\"][1]\n bot_left_lat = dataset[\"b\"][0]\n\n lng_row = []\n lat_col = []\n i = top_left_lng\n while i < top_right_lng:\n lng_row.append(round(i, 5))\n i += step\n j = bot_left_lat\n while j < top_left_lat:\n lat_col.append(round(j, 5))\n j += step\n out_grid = []\n for i in lat_col:\n row = []\n for j in lng_row:\n row.append(\"{0}:{1}:0\".format(i, j))\n out_grid.append(row)\n return out_grid", "def rays(self):\n pixels = np.array([\n [u, v, 1.]\n for u, v in product(range(self.width), range(self.height))\n ], dtype=np.int32).T\n rays = project(self.camera.P_pinv, pixels)\n\n return self._camera.center, rays.T", "def generate_all_locations(grid, shape):", "def _build_point_grid(n_per_side: int) -> np.ndarray:\n offset = 1 / (2 * n_per_side)\n points_one_side = np.linspace(offset, 1 - offset, n_per_side)\n points_x = np.tile(points_one_side[None, :], (n_per_side, 1))\n points_y = np.tile(points_one_side[:, None], (1, n_per_side))\n points = np.stack([points_x, points_y], axis=-1).reshape(-1, 2)\n return points", "def get_directions(board_ndim):\n directions = [\n [[0 for _ in range(board_ndim)] for _ in range(2)]\n for _ in range(board_ndim)\n ]\n for ind in range(board_ndim):\n directions[ind][0][ind] = 1\n directions[ind][1][ind] = -1\n return directions", "def test_compute_pixel_ray_directions_vectorized() -> None:\n fx = 10\n fy = 10\n\n # dummy 2d coordinates in the image plane.\n uv: NDArrayInt = np.array([[12, 2], [12, 2], [12, 2], [12, 2]])\n\n # principal point is at (10,5)\n img_w = 20\n img_h = 10\n\n pinhole_camera = _create_pinhole_camera(\n fx_px=fx,\n fy_px=fy,\n cx_px=img_w / 2,\n cy_px=img_h / 2,\n height_px=img_h,\n width_px=img_w,\n cam_name=\"ring_front_center\", # dummy name\n )\n ray_dirs = pinhole_camera.compute_pixel_ray_directions(uv)\n\n gt_ray_dir: NDArrayFloat = np.array([2, -3, 10.0])\n gt_ray_dir /= np.linalg.norm(gt_ray_dir)\n\n for i in range(4):\n assert np.allclose(gt_ray_dir, ray_dirs[i])", "def make_grid(self, nx, ny):\n nx_vec = np.arange(nx)\n ny_vec = np.arange(ny)\n yv, xv = np.meshgrid(ny_vec, nx_vec)\n grid = np.stack((yv, xv), axis=2)\n grid = grid.reshape(1, 1, ny, nx, 2)\n return grid", "def _get_movements_8n():\n s2 = math.sqrt(2)\n return [(1, 0, 1.0),\n (0, 1, 1.0),\n (-1, 0, 1.0),\n (0, -1, 1.0),\n (1, 1, s2),\n (-1, 1, s2),\n (-1, -1, s2),\n (1, -1, s2)]", "def traverse_grid(self, start_cell, direction, num_steps):\n elements = []\n\n for step in range(num_steps):\n row = start_cell[0] + step * direction[0]\n col = start_cell[1] + step * direction[1]\n elements.append(self._grid[row][col])\n\n return elements", "def hexgrid(self):\n n = self.n * 2\n vectors = []\n for u in range(-n, n+1):\n us = [u] * (2*n+1)\n if u < 0:\n vectors.extend(zip(us, range(-n-u, n+1), range(-n, n+u+1)))\n else:\n vectors.extend(zip(us, range(-n, n-u+1), range(-n+u, n+1)))\n return vectors", "def light_source_directions():\n L = np.array([[-0.06059872, -0.44839055, 0.8917812],\n [-0.05939919, -0.33739538, 0.93948714],\n [-0.05710194, -0.21230722, 0.97553319],\n [-0.05360061, -0.07800089, 0.99551134],\n [-0.04919816, 0.05869781, 0.99706274],\n [-0.04399823, 0.19019233, 0.98076044],\n [-0.03839991, 0.31049925, 0.9497977],\n [-0.03280081, 0.41611025, 0.90872238],\n [-0.18449839, -0.43989616, 0.87889232],\n [-0.18870114, -0.32950199, 0.92510557],\n [-0.1901994, -0.20549935, 0.95999698],\n [-0.18849605, -0.07269848, 0.97937948],\n [-0.18329657, 0.06229884, 0.98108166],\n [-0.17500445, 0.19220488, 0.96562453],\n [-0.16449474, 0.31129005, 0.93597008],\n [-0.15270716, 0.4160195, 0.89644202],\n [-0.30139786, -0.42509698, 0.85349393],\n [-0.31020115, -0.31660118, 0.89640333],\n [-0.31489186, -0.19549495, 0.92877599],\n [-0.31450962, -0.06640203, 0.94692897],\n [-0.30880699, 0.06470146, 0.94892147],\n [-0.2981084, 0.19100538, 0.93522635],\n [-0.28359251, 0.30729189, 0.90837601],\n [-0.26670649, 0.41020998, 0.87212122],\n [-0.40709586, -0.40559588, 0.81839168],\n [-0.41919869, -0.29999906, 0.85689732],\n [-0.42618633, -0.18329412, 0.88587159],\n [-0.42691512, -0.05950211, 0.90233197],\n [-0.42090385, 0.0659006, 0.90470827],\n [-0.40860354, 0.18720162, 0.89330773],\n [-0.39141794, 0.29941372, 0.87013988],\n [-0.3707838, 0.39958255, 0.83836338],\n [-0.499596, -0.38319693, 0.77689378],\n [-0.51360334, -0.28130183, 0.81060526],\n [-0.52190667, -0.16990217, 0.83591069],\n [-0.52326874, -0.05249686, 0.85054918],\n [-0.51720021, 0.06620003, 0.85330035],\n [-0.50428312, 0.18139393, 0.84427174],\n [-0.48561334, 0.28870793, 0.82512267],\n [-0.46289771, 0.38549809, 0.79819605],\n [-0.57853599, -0.35932235, 0.73224555],\n [-0.59329349, -0.26189713, 0.76119165],\n [-0.60202327, -0.15630604, 0.78303027],\n [-0.6037003, -0.04570002, 0.7959004],\n [-0.59781529, 0.06590169, 0.79892043],\n [-0.58486953, 0.17439091, 0.79215873],\n [-0.56588359, 0.27639198, 0.77677747],\n [-0.54241965, 0.36921337, 0.75462733],\n [0.05220076, -0.43870637, 0.89711304],\n [0.05199786, -0.33138635, 0.9420612],\n [0.05109826, -0.20999284, 0.97636672],\n [0.04919919, -0.07869871, 0.99568366],\n [0.04640163, 0.05630197, 0.99733494],\n [0.04279892, 0.18779527, 0.98127529],\n [0.03870043, 0.30950341, 0.95011048],\n [0.03440055, 0.41730662, 0.90811441],\n [0.17290651, -0.43181626, 0.88523333],\n [0.17839998, -0.32509996, 0.92869988],\n [0.18160174, -0.20480196, 0.96180921],\n [0.18200745, -0.07490306, 0.98044012],\n [0.17919505, 0.05849838, 0.98207285],\n [0.17329685, 0.18839658, 0.96668244],\n [0.1649036, 0.30880674, 0.93672045],\n [0.1549931, 0.41578148, 0.89616009],\n [0.28720483, -0.41910705, 0.8613145],\n [0.29740177, -0.31410186, 0.90160535],\n [0.30420604, -0.1965039, 0.9321185],\n [0.30640529, -0.07010121, 0.94931639],\n [0.30361153, 0.05950226, 0.95093613],\n [0.29588748, 0.18589214, 0.93696036],\n [0.28409783, 0.30349768, 0.90949304],\n [0.26939905, 0.40849857, 0.87209694],\n [0.39120402, -0.40190413, 0.8279085],\n [0.40481085, -0.29960803, 0.86392315],\n [0.41411685, -0.18590756, 0.89103626],\n [0.41769724, -0.06449957, 0.906294],\n [0.41498764, 0.05959822, 0.90787296],\n [0.40607977, 0.18089099, 0.89575537],\n [0.39179226, 0.29439419, 0.87168279],\n [0.37379609, 0.39649585, 0.83849122],\n [0.48278794, -0.38169046, 0.78818031],\n [0.49848546, -0.28279175, 0.8194761],\n [0.50918069, -0.1740934, 0.84286803],\n [0.51360856, -0.05870098, 0.85601427],\n [0.51097962, 0.05899765, 0.8575658],\n [0.50151639, 0.17420569, 0.84742769],\n [0.48600297, 0.28260173, 0.82700506],\n [0.46600106, 0.38110087, 0.79850181],\n [0.56150442, -0.35990283, 0.74510586],\n [0.57807114, -0.26498677, 0.77176147],\n [0.58933134, -0.1617086, 0.7915421],\n [0.59407609, -0.05289787, 0.80266769],\n [0.59157958, 0.057798, 0.80417224],\n [0.58198189, 0.16649482, 0.79597523],\n [0.56620006, 0.26940003, 0.77900008],\n [0.54551481, 0.36380988, 0.7550205]], dtype=float)\n return L", "def indices_grid(frame_len, frame_step, num_frames):\n indices = np.tile(np.arange(0, frame_len), (num_frames, 1)) + \\\n np.tile(np.arange(0, num_frames * frame_step, frame_step), (frame_len, 1)).T\n indices = np.array(indices, dtype=np.int32)\n return indices", "def _prepare_grid(self):\n draw_grid = list()\n for x in range(len(self._grid) + len(self._grid) + 1):\n if x % 2 == 0:\n draw_grid.append([self._walk_area_color if x % 2 != 0 else self._wall_color\n for x in range(len(self._grid) + len(self._grid) + 1)])\n else:\n draw_grid.append([self._walk_area_color\n for _ in range(len(self._grid) + len(self._grid) + 1)])\n\n draw_grid = self._draw_walls(draw_grid)\n draw_grid = self._draw_treasures(draw_grid)\n draw_grid = self._draw_border(draw_grid)\n return draw_grid", "def get_movements_8n():\n s2 = math.sqrt(2)\n return [(1, 0, 1.0),\n (0, 1, 1.0),\n (-1, 0, 1.0),\n (0, -1, 1.0),\n (1, 1, s2),\n (-1, 1, s2),\n (-1, -1, s2),\n (1, -1, s2)]", "def cast_rays(pos):\n global POLYGONS\n dtheta = 0.01\n coll = []\n for vertex in POLYGONS: \n dx = vertex[0] - pos[0]\n dy = vertex[1] - pos[1]\n angle = math.atan2(dy,dx)\n rays = (Ray(pos,angle2pointer(angle-dtheta)) , Ray(pos,angle2pointer(angle)) , Ray(pos,angle2pointer(angle+dtheta)))\n opts = (rays[0].cast(), rays[1].cast(), rays[2].cast())\n if opts[0] != None:\n coll.append(( angle-dtheta, (int(opts[0][0]),int(opts[0][1])) ))\n if opts[1] != None:\n coll.append(( angle, (int(opts[1][0]),int(opts[1][1])) ))\n if opts[2] != None:\n coll.append(( angle+dtheta, (int(opts[2][0]),int(opts[2][1])) ))\n shader_vertices = [x[1] for x in sorted(coll)]\n return shader_vertices", "def createGrid(nx, ny, include_center = False):\n direction = 0\n positions = []\n if (nx > 1) or (ny > 1):\n half_x = int(nx/2)\n half_y = int(ny/2)\n for i in range(-half_y, half_y+1):\n for j in range(-half_x, half_x+1):\n if ((i==0) and (j==0)) and not include_center:\n continue\n else:\n if ((direction%2)==0):\n positions.append([j,i])\n else:\n positions.append([-j,i])\n direction += 1\n return positions", "def _generate_grid(self, xyz, dx, dy):\n\n origin = np.amin(xyz,0)\n extent = np.amax(xyz,0)-origin\n ncells = (np.amax(xyz,0)-origin)//[dx,dy,1]\n\n # Account for remainder\n origin += [(extent[0] % dx) / 2, (extent[1] % dy) / 2, 0]\n\n xbnds = np.linspace(0, ncells[0] * dx, ncells[0] + 1)\n ybnds = np.linspace(0, ncells[1] * dy, ncells[1] + 1)\n\n return origin, xbnds, ybnds, extent[2]", "def wallsAndGates(self, rooms: List[List[int]]) -> None:\n if rooms==[]: return\n xcord=len(rooms)\n ycord=len(rooms[0])\n indexstack=[(i,j) for i in range(len(rooms)) for j in range(len(rooms[0])) if rooms[i][j] == 0]\n direction=[(0,1),(1,0),(0,-1),(-1,0)]\n gatenum=1\n while indexstack != []:\n newindex=[]\n for item in indexstack:\n for mapdir in direction:\n xpoint=item[0]+mapdir[0]\n ypoint=item[1]+mapdir[1]\n if 0<=xpoint <len(rooms) and 0<=ypoint<len(rooms[0]):\n if rooms[xpoint][ypoint]==pow(2,31)-1:\n rooms[xpoint][ypoint]=gatenum\n newindex.append((xpoint,ypoint))\n indexstack=newindex\n gatenum+=1\n ''''\n for item in index_0:\n for mapdir in direction:\n xpoint=item[0]+mapdir[0]\n ypoint=item[1]+mapdir[1]\n if xpoint <len(rooms) and ypoint<len(rooms[0]):\n if rooms[xpoint][ypoint]==pow(2,31)-1:\n rooms[xpoint][ypoint]=1\n index_1.append((xpoint,ypoint))\n for item in index_1:\n for mapdir in direction:\n xpoint=item[0]+mapdir[0]\n ypoint=item[1]+mapdir[1]\n if xpoint <len(rooms) and ypoint<len(rooms[0]):\n if rooms[xpoint][ypoint]==pow(2,31)-1:\n rooms[xpoint][ypoint]=2\n index_2.append((xpoint,ypoint))\n for item in index_2:\n for mapdir in direction:\n xpoint=item[0]+mapdir[0]\n ypoint=item[1]+mapdir[1]\n if xpoint <len(rooms) and ypoint<len(rooms[0]):\n if rooms[xpoint][ypoint]==pow(2,31)-1:\n rooms[xpoint][ypoint]=3\n index_3.append((xpoint,ypoint))\n for item in index_3:\n for mapdir in direction:\n xpoint=item[0]+mapdir[0]\n ypoint=item[1]+mapdir[1]\n if xpoint <=len(rooms) and ypoint<=len(rooms[0]):\n if rooms[xpoint][ypoint]==pow(2,31)-1:\n rooms[xpoint][ypoint]=4\n #index_3.append((xpoint,ypoint))'''", "def compute_walls(grid):\n\n\tnum_tiles = len(grid)\n\tedge_size = int(math.sqrt(num_tiles))\n\tradius = int((edge_size - 1) / 2)\n\n\tleft_wall = 0\n\tright_wall = 0\n\ttop_wall = 0\n\tbottom_wall = 0\n\n\tfor i in range((radius * edge_size), ((radius * edge_size) + radius)):\n\t\t# '(radius * edge_size)' is the position of the tile in the left middle.\n\t\tif grid[i] == Tile.WALL:\n\t\t\tleft_wall += 1\n\n\tfor i in range(((radius * edge_size) + radius + 1), ((radius + 1) * edge_size)):\n\t\tif grid[i] == Tile.WALL:\n\t\t\tright_wall += 1\n\n\tfor i in range(radius, (radius * edge_size), edge_size): # 'radius' is the position of the tile in the top middle.\n\t\tif grid[i] == Tile.WALL:\n\t\t\ttop_wall += 1\n\n\tfor i in range(((radius + 1) * edge_size) + radius, num_tiles, edge_size):\n\t\tif grid[i] == Tile.WALL:\n\t\t\tbottom_wall += 1\n\n\treturn (left_wall, right_wall, top_wall, bottom_wall)", "def rays(self):\n try:\n return self._rays\n except:\n self._rays = [list(x) for x in self.ray_generator()]\n return self._rays", "def direction_list(self, direction):\n direction_indexes = []\n \n if direction == UP:\n for index in range(self.grid_width):\n direction_indexes.append((0, index))\n elif direction == DOWN:\n for index in range(self.grid_width):\n direction_indexes.append((self.grid_height - 1, index))\n elif direction == LEFT:\n for index in range(self.grid_height):\n direction_indexes.append((index , 0))\n elif direction == RIGHT:\n for index in range(self.grid_height):\n direction_indexes.append(((index), self.grid_width - 1))\n\n return direction_indexes", "def init_grid(self):\n self.pts = np.array(\n np.meshgrid(\n np.arange(self.net_dim[0]) + 1,\n np.arange(self.net_dim[1]) + 1\n )\n ).reshape(2, np.prod(self.net_dim)).T\n if self.topo == \"hexagonal\":\n self.pts[:, 0] = self.pts[:, 0] + .5 * (self.pts[:, 1] % 2)\n self.pts[:, 1] = np.sqrt(3) / 2 * self.pts[:, 1]", "def get_grid_distances(self, num):\n every = int(360 / num)\n return self.lidar_absolute[0:-1:every]", "def neighbour_directions(index, grid_size):\n #4 corner\n if index == 0:\n directions = ['down','right','down-right']\n return directions_to_int(index,grid_size,directions)\n if index == grid_size*grid_size-1:\n directions = ['up','left','up-left']\n return directions_to_int(index,grid_size,directions)\n if index == grid_size-1:\n directions = ['down','left','down-left']\n return directions_to_int(index,grid_size,directions)\n if index == grid_size*grid_size-grid_size:\n directions = ['up','right','up-right']\n return directions_to_int(index,grid_size,directions)\n \n # 4 edge \n if index< grid_size:\n directions = ['down','right','left','down-right','down-left']\n return directions_to_int(index,grid_size,directions)\n if index > grid_size*grid_size-grid_size:\n directions = ['up','right','left','up-right','up-left']\n return directions_to_int(index,grid_size,directions)\n if index%grid_size == 0:\n directions = ['up','down','right','up-right','down-right']\n return directions_to_int(index,grid_size,directions)\n if (index+1)%grid_size == 0:\n directions = ['up','down','left','up-left','down-left']\n return directions_to_int(index,grid_size,directions)\n \n #middle\n return directions_to_int(index,grid_size,DIRECTIONS)", "def flat_2D_grid(bounds, dx, dy):\n x = np.arange(bounds[0], bounds[1] + dx, dx)\n y = np.arange(bounds[2], bounds[3] + dy, dy)\n x_grid, y_grid = np.meshgrid(x, y)\n x_grid, y_grid = x_grid.flatten(), y_grid.flatten()\n\n return pd.DataFrame({'x': x_grid,\n 'y': y_grid,\n 'masked': np.zeros(x_grid.size, dtype='bool')})" ]
[ "0.62193435", "0.594625", "0.5943372", "0.58991474", "0.5760594", "0.575585", "0.574361", "0.5715618", "0.5703676", "0.56958956", "0.5657632", "0.5629972", "0.55971795", "0.55454016", "0.5531384", "0.55228245", "0.5484227", "0.5473436", "0.5425375", "0.54226977", "0.5401635", "0.5391887", "0.53911763", "0.5388682", "0.538265", "0.5335023", "0.5328408", "0.53243357", "0.5320082", "0.5319663" ]
0.62440854
0
Shoot a ray from an origin point in a given direction and find the first place it intersects a mesh, if anywhere.
def mesh_ray_collision(mesh: jnp.ndarray, origin: jnp.ndarray, direction: jnp.ndarray): collides, positions, distances = jax.vmap( lambda t: _triangle_ray_collision(t, origin, direction) )(mesh) idx = jnp.argmin(jnp.where(collides, distances, jnp.inf)) return ( jnp.any(collides), positions[idx], _triangle_normal(mesh[idx]), )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def intersects(self, ray):\n theta = 45\n H = 512\n W = 512\n A = self.origin\n B = Point(W, A.y, A.z)\n C = Point(B.x, (int)(H * math.sin(theta * math.pi / 180)), (int)(H * math.cos(math.pi * theta / 180)))\n D = Point(A.x, (int)(H * math.sin(theta * math.pi / 180)), (int)(H * math.cos(math.pi * theta / 180)))\n vec3 = ray.direction * self.normal\n if vec3 != 0:\n vec1 = self.origin - ray.origin\n vec2 = vec1 * self.normal\n dist = vec2 / vec3\n if dist > 0:\n point_on_plane = ray.origin + dist * ray.direction\n if A.x <= point_on_plane.x <= B.x and A.y <= point_on_plane.y <= D.y and B.z <= point_on_plane.z <= C.z:\n #print A, B, C, D, point_on_plane\n return dist", "def pick(self, start, direction, mat):\n new_mat = np.dot(\n np.dot(mat, self.translation_matrix),\n np.linalg.inv(self.scaling_matrix)\n )\n\n results = self.aabb.ray_hit(start, direction, mat)\n return results", "def shoot_ray(self, origin_row, origin_column):\n\n # get the the square object at row x column\n origin = self._board.get_board_square((origin_row, origin_column))\n\n # check that it is a valid \"edge\" to send a ray from\n origin_check = origin.is_edge()\n\n # if it's not then return false\n if origin_check == False:\n return False\n\n # if we pass the origin check create shoot a new Ray.Ray object from row x column\n new_ray = Ray.Ray(origin_row, origin_column)\n\n # let the square we shot from know its an orign square\n origin.set_originating_ray(new_ray)\n # Deduct 1 from the score since we now have on exit point\n self.set_score(-1)\n\n # while the ray object has a direction (will be set to none when it reaches an endpoint)\n # send it to the helper function that will move it\n while new_ray.get_direction() != None:\n self.move_ray(new_ray)\n\n # if we hit an exit point (other than through reflection) deduct the point for that\n terminus = new_ray.get_terminal_location()\n # check the the terminal point is an edge (hitting an atom returns none as terminus)\n\n if terminus != None:\n # check that the terminus is not a reflection, which shouldn't be counted twice\n terminal_square = self._board.get_board_square(terminus)\n terminal_square.set_terminating_ray(new_ray)\n if terminus != (origin_row, origin_column):\n self.set_score(-1)\n\n return terminus", "def intersection(self, ray):\n d_proj = self._normal.dot(ray.d)\n if abs(d_proj) < bounds.too_small:\n return -1.0\n s_proj = (self._origin - ray.o).dot(self._normal)\n if d_proj * s_proj < 0.0:\n # ray going away from plane\n return -1.0\n else:\n return s_proj / d_proj", "def shoot_ray(self, row, col):\n # Uses validate method to check if row,col are legal for ray entrance location\n if not self.valid_ray(row, col):\n return False\n # creates ray object from row, col integers\n ray = Ray(row, col)\n # checks if atom is in front of entrance position\n if not ray.can_continue(self.get_a_locations()):\n self.mark_portal(ray.get_start())\n if self.get_score() <= 0:\n self.change_state(\"LOST\")\n return None\n # while there is no atom in front of ray and ray will not exit board --\n while ray.can_continue(self.get_a_locations()):\n ray.check_diags(self.get_a_locations())\n # moves ray forward one space\n ray.advance()\n # if ray will exit board by advancing --\n if not ray.on_board():\n # adjusts score if entrance/exit do not match prior entrances/exits\n self.mark_portal(ray.get_start(), ray.get_pos())\n # changes state to lose if score is now <= 0\n if self.get_score() <= 0:\n self.change_state(\"LOST\")\n # returns tuple of exit location\n return tuple(ray.get_pos())\n # if ray is blocked by atom --\n if not ray.no_atom(self.get_a_locations()):\n # changes state to lost if score is now <= 0\n self.mark_portal(ray.get_start())\n if self.get_score() <= 0:\n self.change_state(\"LOST\")\n return None", "def rayIntersection(self, ray):\n #t = \"what we are trying to find\"\n l = -ray.mDirection\n l0 = ray.mOrigin\n n = self.mNormal\n p0 = self.mDistance * n\n #p = l0 + l * t\n\n if l.dot(n) > 0:\n v = p0 - l0\n t = -(v.dot(n) / l.dot(n))\n return t\n\n else:\n return None", "def IsPointInsideMesh(MeshObj, PointInObjectSpace):\n #direction is irellevant unless mesh is REALLY wierd shaped\n direction = mathutils.Vector((1,0,0)) \n epsilon = direction * 1e-6 \n count = 0 \n result, PointInObjectSpace, normal, index = MeshObj.ray_cast(PointInObjectSpace, direction) \n while result: \n count += 1 \n result, PointInObjectSpace, normal, index = MeshObj.ray_cast(PointInObjectSpace + epsilon, direction) \n return (count % 2) == 1", "def intersect(self, ray):\n # TODO A5 (Step1) implement this function\n # Copy your implementation from A4\n # Then calculate uv coordinates, to be passed into the Hit initializer\n D = ray.direction\n E = ray.origin\n C = self.center\n R = self.radius\n B = 2*np.dot(D, E-C)\n A = np.dot(D, D)\n min_t = ray.start\n max_t = ray.end\n\n discriminant = B ** 2 - 4 * A * (np.dot(E-C, E-C)-R**2)\n\n if discriminant < 0:\n return no_hit\n\n t0 = (-1*B - np.sqrt(discriminant)) / (2*A)\n t1 = (-1*B + np.sqrt(discriminant)) / (2*A)\n\n if (t0 >= min_t and t0 <= max_t and t0 <= t1):\n t = t0\n elif (t1 >= min_t and t1 <= max_t):\n t = t1\n else:\n return no_hit\n\n P = E + t * D\n unit_normal = (P - C) / R\n d_hat = normalize(P - C)\n u = 0.5 + (np.arctan2(d_hat[0], d_hat[2])) / (2 * np.pi)\n v = 0.5 + (np.arcsin(d_hat[1])) / np.pi\n\n return Hit(t, P, unit_normal, vec([u, v]), self.material)", "def intersect(self, ray):\n\n t = None\n hit = None\n angle = ray.dir.dot(self.norm)\n if angle != 0:\n t = (self.point - ray.start).dot(self.norm) / angle\n if angle < 0:\n hit = Hit(self, ray, t, float('inf'), self.norm, self.mat)\n else:\n hit = Hit(self, ray, float('-inf'), t, self.norm, self.mat)\n else:\n vector = unit(ray.start - self.point)\n if vector.dot(self.norm) < 0:\n hit = Hit(self, ray, float('-inf'), float('inf'), self.norm, self.mat)\n else:\n return None\n if (self.mat.texture is not None and not isninf(hit.entry)) > 0:\n hit.texCords = self.texCords(ray.pos(t))\n return hit", "def rayIntersection(self, ray):\n\n rotVect = ray.mDirection #math3d.VectorN(math.cos(num), - math.sin(num), 0)\n\n # this give all the lines (red green and blue at the moment)\n tankPos = math3d.VectorN(ray.mOrigin[0], ray.mOrigin[1], 0)\n linkPos = math3d.VectorN(200,200,0)\n v = linkPos - tankPos\n added = (tankPos + getPara(v, rotVect) + getPerp(v, rotVect))\n added2 = tankPos + getPara(v, rotVect) #If the magnitude of this is minus the sphere origin is less than the radius you're in the sphere\n added3 = tankPos + getPerp(v, rotVect)\n added4 = tankPos + rotVect.normalized() * 200 #this is get point only change 200 to dist\n\n\n test = added2 - self.mCenter #checks if in center\n\n\n if test.magnitude() <= self.mRadius:\n green = added2 - ray.mOrigin #this is Qpara\n thing = (self.mSRadius - test.magnitude()**2) ** 0.5\n t = (green.magnitude() - thing)\n print(green.magnitude() - thing)\n return t\n else:\n return None\n\n #print(test.magnitude(), self.mRadius)\n #print(green.magnitude(), \"green\")", "def batch_mesh_contains_points(\n ray_origins, # point cloud as origin of rays\n obj_triangles,\n direction=torch.Tensor([0.4395064455, 0.617598629942, 0.652231566745]),\n):\n tol_thresh = 0.0000001\n batch_size = obj_triangles.shape[0]\n triangle_nb = obj_triangles.shape[1]\n point_nb = ray_origins.shape[1]\n\n # Batch dim and triangle dim will flattened together\n batch_points_size = batch_size * triangle_nb\n # Direction is random but shared\n v0, v1, v2 = obj_triangles[:, :, 0], obj_triangles[:, :, 1], obj_triangles[:, :, 2]\n # Get edges\n v0v1 = v1 - v0\n v0v2 = v2 - v0\n\n direction = direction.to(ray_origins.device)\n # Expand needed vectors\n batch_direction = direction.view(1, 1, 3).expand(batch_size, triangle_nb, 3)\n\n # Compute ray/triangle intersections\n pvec = torch.cross(batch_direction, v0v2, dim=2)\n dets = torch.bmm(\n v0v1.view(batch_points_size, 1, 3), pvec.view(batch_points_size, 3, 1)\n ).view(batch_size, triangle_nb)\n\n # Check if ray and triangle are parallel\n parallel = abs(dets) < tol_thresh\n invdet = 1 / (dets + 0.1 * tol_thresh)\n\n # Repeat mesh info as many times as there are rays\n triangle_nb = v0.shape[1]\n v0 = v0.repeat(1, point_nb, 1)\n v0v1 = v0v1.repeat(1, point_nb, 1)\n v0v2 = v0v2.repeat(1, point_nb, 1)\n hand_verts_repeated = (\n ray_origins.view(batch_size, point_nb, 1, 3)\n .repeat(1, 1, triangle_nb, 1)\n .view(ray_origins.shape[0], triangle_nb * point_nb, 3)\n )\n pvec = pvec.repeat(1, point_nb, 1)\n invdet = invdet.repeat(1, point_nb)\n tvec = hand_verts_repeated - v0\n u_val = (\n torch.bmm(\n tvec.view(batch_size * tvec.shape[1], 1, 3),\n pvec.view(batch_size * tvec.shape[1], 3, 1),\n ).view(batch_size, tvec.shape[1])\n * invdet\n )\n # Check ray intersects inside triangle\n u_correct = (u_val > 0) * (u_val < 1)\n qvec = torch.cross(tvec, v0v1, dim=2)\n\n batch_direction = batch_direction.repeat(1, point_nb, 1)\n v_val = (\n torch.bmm(\n batch_direction.view(batch_size * qvec.shape[1], 1, 3),\n qvec.view(batch_size * qvec.shape[1], 3, 1),\n ).view(batch_size, qvec.shape[1])\n * invdet\n )\n v_correct = (v_val > 0) * (u_val + v_val < 1)\n t = (\n torch.bmm(\n v0v2.view(batch_size * qvec.shape[1], 1, 3),\n qvec.view(batch_size * qvec.shape[1], 3, 1),\n ).view(batch_size, qvec.shape[1])\n * invdet\n )\n # Check triangle is in front of ray_origin along ray direction\n t_pos = t >= tol_thresh\n parallel = parallel.repeat(1, point_nb)\n # # Check that all intersection conditions are met\n try:\n not_parallel = 1 - parallel\n except:\n not_parallel = parallel==False\n final_inter = v_correct * u_correct * not_parallel * t_pos\n # Reshape batch point/vertices intersection matrix\n # final_intersections[batch_idx, point_idx, triangle_idx] == 1 means ray\n # intersects triangle\n final_intersections = final_inter.view(batch_size, point_nb, triangle_nb)\n # Check if intersection number accross mesh is odd to determine if point is\n # outside of mesh\n exterior = final_intersections.sum(2) % 2 == 0\n return exterior", "def getClosestPointFromLine(origin, ray, point):\n # calculate the difference vector\n delta = point-origin\n # norm the ray\n ray /= np.linalg.norm(ray, axis=-1)[..., None]\n # calculate the scale product\n factor = np.sum(ray*delta, axis=-1)\n try:\n return origin + factor[:, None] * ray\n except IndexError:\n return origin + factor * ray", "def intersect(self, ray):\n # TODO A5 (Step1) implement this function\n # Copy your implementation from A4\n # Then calculate uv coordinates, to be passed into the Hit initializer\n vs = self.vs\n\n a = vs[0][0] - vs[1][0]\n b = vs[0][1] - vs[1][1]\n c = vs[0][2] - vs[1][2]\n d = vs[0][0] - vs[2][0]\n e = vs[0][1] - vs[2][1]\n f = vs[0][2] - vs[2][2]\n\n ray_dir = ray.direction\n ray_orig = ray.origin\n\n g = ray_dir[0]\n h = ray_dir[1]\n i = ray_dir[2]\n j = vs[0][0] - ray_orig[0]\n k = vs[0][1] - ray_orig[1]\n l = vs[0][2] - ray_orig[2]\n\n M = a * (e * i - h * f) + b * (g * f - d * i) + c * (d * h - e * g)\n\n t = -(f * (a * k - j * b) + e * (j * c - a * l) + d *\n (b * l - k * c)) / M\n\n if (t < ray.start or t > ray.end):\n return no_hit\n\n gamma = (i * (a * k - j * b) + h * (j * c - a * l) + g *\n (b * l - k * c)) / M\n\n if (gamma < 0 or gamma > 1):\n return no_hit\n\n beta = (j * (e * i - h * f) + k * (g * f - d * i) +\n l * (d * h - e * g)) / M\n\n if (beta < 0 or beta > 1 - gamma):\n return no_hit\n\n P = ray_orig + t * ray_dir\n\n unit_normal = normalize(np.cross(vs[0] - vs[2], vs[1] - vs[2]))\n\n A = np.linalg.norm(np.cross(vs[1] - vs[0], vs[2] - vs[0])) / 2\n areaA = np.linalg.norm(np.cross(vs[1] - P, vs[2] - P)) / 2\n areaB = np.linalg.norm(np.cross(vs[0] - P, vs[2] - P)) / 2\n areaC = np.linalg.norm(np.cross(vs[0] - P, vs[1] - P)) / 2\n u = areaB / A\n v = areaC / A\n return Hit(t, P, unit_normal, vec([u, v]), self.material)", "def ray_intersect_triangle(origin, direction, triangle, use_planes=False):\n origin = np.array(origin)\n direction = np.array(direction)\n if len(direction.shape) == 1:\n direction = direction.reshape(1, *direction.shape)\n return_single = True\n else:\n return_single = False\n triangle = np.array(triangle)\n if len(triangle.shape) == 2:\n triangle = triangle.reshape(1, *triangle.shape)\n\n v0 = triangle[..., 0, :]\n v1 = triangle[..., 1, :]\n v2 = triangle[..., 2, :]\n u = v1 - v0\n v = v2 - v0\n normal = np.cross(u, v)\n b = np.inner(normal, direction)\n a = my_inner(normal[..., None, :], v0[..., None, :] - origin[None, ..., :])\n\n rI = a / b\n # ray is parallel to the plane\n rI[(b == 0.0)*(a != 0.0)] = np.nan\n # ray is parallel and lies in the plane\n rI[(b == 0.0)*(a == 0.0)] = 0\n\n # check whether the intersection is behind the origin of the ray\n rI[rI < 0.0] = np.nan\n\n if not use_planes:\n w = origin + rI[..., None] * direction - v0[..., None, :]\n denom = my_inner(u, v) * my_inner(u, v) - my_inner(u, u) * my_inner(v, v)\n\n si = (my_inner(u, v)[..., None] * my_inner(w, v[..., None, :]) - my_inner(v, v)[..., None] * my_inner(w, u[..., None, :])) / denom[:, None]\n rI[((si < 0)+(si > 1.0)).astype(bool)] = np.nan\n\n ti = (my_inner(u, v)[..., None] * my_inner(w, u[..., None, :]) - my_inner(u, u)[..., None] * my_inner(w, v[..., None, :])) / denom[:, None]\n rI[((ti < 0.0) + (si + ti > 1.0)).astype(bool)] = np.nan\n\n def nanargmin(a, axis):\n from numpy.lib.nanfunctions import _replace_nan\n a, mask = _replace_nan(a, np.inf)\n res = np.argmin(a, axis=axis)\n return res\n\n index = nanargmin(rI, axis=0)\n rI = rI[index, np.arange(len(index))]\n point = origin + rI[..., None] * direction\n\n if return_single:\n return point[0]\n return point", "def intersectRay(self, ray):\n # Ray Tracing from the Ground Up, pg. 367\n a, b, c, d = self.a[0] - self.b[0], self.a[0] - self.c[0], ray.d[0], self.a[0] - ray.o[0]\n e, f, g, h = self.a[1] - self.b[1], self.a[1] - self.c[1], ray.d[1], self.a[1] - ray.o[1]\n i, j, k, L = self.a[2] - self.b[2], self.a[2] - self.c[2], ray.d[2], self.a[2] - ray.o[2]\n\n m, n, p = f * k - g * j, h * k - g * L, f * L - h * j\n q, s = g * i - e * k, e * j - f * i\n\n denom = a * m + b * q + c * s\n if denom < self.kEpsilon:\n return None\n\n inv_denom = 1.0 / denom\n\n e1 = d * m - b * n - c * p\n beta = e1 * inv_denom\n\n if 1.0 < beta or beta < 0.0:\n return None\n\n r = e * L - h * i\n e2 = a * n + d * q + c * r\n gamma = e2 * inv_denom\n\n if 1.0 < gamma or gamma < 0.0:\n return None\n\n e3 = a * p - b * r + d * s\n t = e3 * inv_denom\n\n if t < self.kEpsilon:\n return None\n\n return t", "def intersects(self, ray):\n sphere_to_ray = ray.origin - self.center\n a = 1\n b = 2 * ray.direction.dot_product(sphere_to_ray)\n c = sphere_to_ray.dot_product(sphere_to_ray) - self.radius * self.radius\n discriminant = b * b - 4 * a * c\n\n if discriminant >= 0:\n dist = (-b - sqrt(discriminant)) / 2\n if dist > 0:\n return dist\n\n return None", "def intersects(self, ray):\n\n sphere_to_ray = ray.origin - self.origin\n b = 2 * ray.direction * sphere_to_ray\n c = sphere_to_ray ** 2 - self.radius ** 2\n discriminant = b ** 2 - 4 * c\n\n if discriminant >= 0:\n dist = (-b - math.sqrt(discriminant)) / 2\n if dist > 0:\n return dist", "def origin_is_inside_hitbox(self, hitbox):\n if self.hitdetection.accurate:\n max_x = max(hitbox, key = lambda index: abs(index[0]))[0]\n max_y = max(hitbox, key = lambda index: abs(index[1]))[1]\n \n m = max(max_x, max_y)\n \n num_intersections = 0\n for i in range(0, len(hitbox), 1):\n if self.hitdetection.module.does_intersect([[m, m], [0, 0]], [hitbox[i], hitbox[(i + 1) % len(hitbox)]]):\n num_intersections += 1\n return [False, True][num_intersections % 2]\n else:\n has_smaller = False\n has_bigger = False\n for hx, hy in hitbox:\n if hx > 0 and hy > 0:\n has_bigger = True\n if hx < 0 and hy < 0:\n has_smaller = True\n return has_smaller and has_bigger", "def intersectsRay(self, ray):\n pass", "def getIntersection(self, ray):\n pass", "def obj_ray_cast(obj, matrix):\r\n \r\n # get the ray relative to the object\r\n matrix_inv = matrix.inverted()\r\n ray_origin_obj = matrix_inv * ray_origin\r\n ray_target_obj = matrix_inv * ray_target\r\n ray_direction_obj = ray_target_obj - ray_origin_obj\r\n \r\n # cast the ray\r\n success, location, normal, face_index = obj.ray_cast(ray_origin_obj, ray_direction_obj)\r\n \r\n if success:\r\n return location, normal, face_index\r\n else:\r\n return None, None, None", "def hit(bx, by, r, px, py,h):\n if bx >= px:\n distance = bx - px\n else:\n distance = px - bx\n if py<=by and by<=py+h and distance <= r:\n return True\n else:\n return False", "def obj_ray_cast(obj, matrix):\n\n # get the ray relative to the object\n matrix_inv = matrix.inverted()\n ray_origin_obj = matrix_inv * ray_origin\n ray_target_obj = matrix_inv * ray_target\n ray_direction_obj = ray_target_obj - ray_origin_obj\n\n # cast the ray\n success, location, normal, face_index = obj.ray_cast(ray_origin_obj, ray_direction_obj)\n\n if success:\n return location, normal, face_index\n else:\n return None, None, None", "def obj_ray_cast(obj, matrix):\n\n # get the ray relative to the object\n matrix_inv = matrix.inverted()\n ray_origin_obj = matrix_inv @ ray_origin\n ray_target_obj = matrix_inv @ ray_target\n ray_direction_obj = ray_target_obj - ray_origin_obj\n\n # cast the ray\n success, location, normal, face_index = obj.ray_cast(ray_origin_obj, ray_direction_obj)\n\n if success:\n return location, normal, face_index\n else:\n return None, None, None", "def obj_ray_cast(obj, matrix):\n\n # get the ray relative to the object\n matrix_inv = matrix.inverted()\n ray_origin_obj = matrix_inv * ray_origin\n ray_target_obj = matrix_inv * ray_target\n ray_direction_obj = ray_target_obj - ray_origin_obj\n \n # cast the ray\n success, location, normal, face_index = obj.ray_cast(ray_origin_obj, ray_direction_obj)\n\n if success:\n return location, normal, face_index, ray_target\n else:\n return None, None, None, ray_target", "def intersect(self, ray):\n # TODO A5 (Step3 and Step4) implement this function\n # For step 4, check if uvs and normals are not None (respectively)\n # If so, then interpolate them\n\n # batch_intersect returns t, beta, gamma, i\n posns = self.posns\n uvs = self.uvs\n inds = self.inds\n normals = self.normals\n t, beta, gamma, i = batch_intersect(posns[inds[:, :]], ray)\n if (t == np.inf):\n return no_hit\n vs = posns[inds[i, :]]\n P = ray.origin + t * ray.direction\n\n if (t == np.inf):\n return no_hit\n else:\n\n alpha = 1 - beta - gamma\n\n if uvs is not None:\n\n uv0 = uvs[inds[i][0]]\n uv1 = uvs[inds[i][1]]\n uv2 = uvs[inds[i][2]]\n\n uv = alpha * uv0 + beta * uv1 + gamma * uv2\n\n else:\n\n A = np.linalg.norm(np.cross(vs[1] - vs[0], vs[2] - vs[0])) / 2\n areaA = np.linalg.norm(np.cross(vs[1] - P, vs[2] - P)) / 2\n areaB = np.linalg.norm(np.cross(vs[0] - P, vs[2] - P)) / 2\n areaC = np.linalg.norm(np.cross(vs[0] - P, vs[1] - P)) / 2\n u = areaB / A\n v = areaC / A\n uv = vec([u, v])\n\n if normals is not None:\n\n n0 = normals[inds[i][0]]\n n1 = normals[inds[i][1]]\n n2 = normals[inds[i][2]]\n\n unit_normal = normalize(alpha * n0 + beta * n1 + gamma * n2)\n\n else:\n unit_normal = normalize(np.cross(vs[0] - vs[2], vs[1] - vs[2]))\n\n return Hit(t, P, unit_normal, uv, self.material)", "def intersect(self, ray):\n # TODO A5 copy your implementation from A4\n surfaces = self.surfs\n\n min_t = np.inf\n i = no_hit\n\n for s in surfaces:\n intersect = s.intersect(ray)\n if (intersect.t < min_t):\n min_t = intersect.t\n i = intersect\n return i", "def random_lookat_ray(goal, radius, variance, fov):\n theta1 = 2.*np.pi*np.random.uniform(-fov, fov)\n theta2 = np.arccos(1 - np.random.uniform(0, fov)**2)\n r = radius + variance*np.random.uniform(0,1.)\n x = r*np.cos(theta1)*np.sin(theta2)\n y = r*np.sin(theta1)*np.sin(theta2)\n z = r*np.cos(theta2)\n R = goal[:3,:3]\n point = goal[:3,3] + np.dot(R, np.array([x,y,z]))\n # Find the direction\n direction = -np.dot(R, np.array([x,y,z]))\n direction = tr.unit_vector(direction)\n return orpy.Ray(point, direction)", "def next_hit(self, ray):\n hit_candidates = [(i.time_to_bound(ray), i) for i in self._bounds]\n try:\n # WARNING - A hard cut on 'times' smaller than 10^-9 is made to exclude\n # a beam reinteracting with the same barrier. This cuts out any legitimate\n # interactions closer than 1nm of the beam position.\n return (sorted([(time, surface) for time, surface in hit_candidates\n if time is not None and time > 1e-9 and all(\n [b.contains(ray.propagate(time).position) for b in self._bounds\n if b is not surface])])[0])\n except IndexError:\n return None", "def shoot_ray(self, entry_x, entry_y):\r\n\r\n # check to make sure entry_x and entry_y are valid\r\n if (entry_x in [0, 9] or entry_y in [0, 9]) and \\\r\n self._board.get_board_item(entry_x, entry_y) != \"o\":\r\n\r\n exit_tup = self._board.find_exit(entry_x, entry_y)\r\n # returned 0 if hit\r\n if exit_tup == 0:\r\n # decrement entry only if not visited\r\n marker = self.get_hit_marker()\r\n circle_tuple = self.calculate_entry_exit(entry_y, entry_x)\r\n marker.update_center(circle_tuple)\r\n points = self._player.add_entry_exit((entry_x, entry_y), marker,\r\n (entry_x, entry_y))\r\n self._stats.dec_player_score(points)\r\n return \"Hit\"\r\n elif exit_tup == 1:\r\n # decrement entry only if not visited\r\n marker = self.get_reflect_marker()\r\n circle_tuple = self.calculate_entry_exit(entry_y, entry_x)\r\n marker.update_center(circle_tuple)\r\n points = self._player.add_entry_exit((entry_x, entry_y), marker,\r\n (entry_x, entry_y))\r\n\r\n self._stats.dec_player_score(points)\r\n\r\n return \"reflect\"\r\n else:\r\n # decrement both entry and exit if not already visited\r\n marker = self.get_color_marker()\r\n exit_x, exit_y = exit_tup\r\n circle_entry = self.calculate_entry_exit(entry_y, entry_x)\r\n circle_exit = self.calculate_entry_exit(exit_y, exit_x)\r\n marker.update_center(circle_entry, circle_exit)\r\n points = self._player.add_entry_exit((entry_x, entry_y),\r\n marker, exit_tup)\r\n\r\n self._stats.dec_player_score(points)\r\n return exit_tup\r\n else:\r\n # returns false if the shoot_ray point is invalid\r\n return \"Bad shot\"" ]
[ "0.6328977", "0.6311272", "0.63029695", "0.61903316", "0.6177152", "0.61375874", "0.60943496", "0.60207874", "0.59388417", "0.5893612", "0.58412987", "0.57528913", "0.5724966", "0.5704529", "0.5689389", "0.567179", "0.56663096", "0.56171393", "0.5606271", "0.5578516", "0.55750734", "0.5554849", "0.55313087", "0.54844004", "0.5474557", "0.54509246", "0.54385555", "0.54316527", "0.5394709", "0.53854597" ]
0.7033451
0
Reads in a .haml file and outputs a list.
def readHaml(fileName: str) -> List[Tuple[str, float]]: outList = list() with open(fileName, 'r') as fileIn: for line in fileIn: val = (line.split()[0], float(line.split()[1])) outList.append(val) return outList
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read_yaml(yfile):\n try:\n f0 = get_fileobj(yfile)\n context_items = [ConTextItem(literal=d[\"Lex\"],\n category=d[\"Type\"],\n re=r\"%s\"%d[\"Regex\"],\n rule=d[\"Direction\"],\n comments=d[\"Comments\"]) for d in yaml.load_all(f0)]\n except FileNotFoundError:\n context_items = []\n finally:\n f0.close()\n return context_items", "def extract_haml(fileobj, keywords, comment_tags, options):\n\n import haml\n from mako import lexer, parsetree\n from mako.ext.babelplugin import extract_nodes \n\n encoding = options.get('input_encoding', options.get('encoding', None))\n template_node = lexer.Lexer(haml.preprocessor(fileobj.read()), input_encoding=encoding).parse()\n for extracted in extract_nodes(template_node.get_children(), keywords, comment_tags, options):\n yield extracted", "def load_dataset(file_handle) -> list:\n output = []\n lines = file_handle.readlines()\n name = None\n for line in lines:\n line = line.replace(\"\\n\", \"\")\n if line.startswith(\">\"):\n if name:\n output.append(sequence)\n name = line[1:]\n sequence = \"\"\n else:\n sequence += line\n\n if name:\n output.append(sequence)\n \n return output", "def read_file(self, file_descriptor):\n parsers = [pyocr.builders._WordHTMLParser(), pyocr.builders._LineHTMLParser()]\n html_str = file_descriptor.read()\n\n for p in parsers:\n p.feed(html_str)\n if len(p.boxes) > 0:\n last_box = p.boxes[-1]\n if last_box.content == pyocr.util.to_unicode(\"\"):\n # some parser leave an empty box at the end\n p.boxes.pop(-1)\n return p.boxes\n return []", "def read_file(filename) -> List[Todo]:\n with pathlib.Path(filename).expanduser().open('r') as fp:\n return [Todo(_id, line) for _id, line in enumerate(fp)]", "def load_gas_list(path):\n with open(path, 'r') as f:\n return yaml.load(f)", "def read_file(filename):\n contents, labels = [], []\n with open_file(filename) as f:\n for line in f:\n try:\n label,content = line.strip().split('\\t')\n contents.append(list(content))\n labels.append(label)\n except:\n pass\n return contents,labels", "def loadblogs(filename):\n\n stream = file(filename, 'r')\n data = yaml.load_all(stream)\n return data", "def read_words(filename):\n # load assets\n word_file = urllib2.urlopen(filename)\n \n # read in files as string\n words = word_file.read()\n \n # template lines and solution lines list of line string\n word_list = words.split('\\n')\n print \"Loaded a dictionary with\", len(word_list), \"words\"\n return word_list", "def read_list(file_name):\n with open(file_name, 'r') as f:\n text = f.read().splitlines()\n return text", "def get_yaml(path):\n end = False\n yaml = \"\"\n num = 0\n\n with open(path, 'r') as f:\n\n for line in f.readlines():\n if line.strip() == '---':\n if end:\n break\n else:\n end = True\n continue\n else:\n num += 1\n\n yaml += line\n\n return yaml, num", "def _epub2thtml(self, filename):\n book = epub.read_epub(filename)\n chapters = []\n for item in book['get_items']():\n if item.get_type() == ebooklib.ITEM_DOCUMENT:\n chapters.append(item.get_content())\n return chapters", "def load_input(filename: str) -> list:\n\n text_stream = io.open(filename, 'r', encoding='utf-8', errors='ignore', newline='\\n')\n \"\"\" Calls Python's io function to read the file with the specified name.\"\"\"\n\n initial_state = []\n for i in range(0, 4):\n initial_state.append(list(map(int, text_stream.readline().rstrip().split(' '))))\n \"\"\" The rstrip method removes all trailing whitespace of the string. The split \n method uses the given character as the delimiter to break down the string and \n return a list of the substrings. The map function takes that list, converts \n the substrings into integers and returns a map object, which is eventually \n converted into a list by the exterior call to the list function. \"\"\"\n\n \"\"\" A state is represented as a multi-layer list. The first layer contains \n the four rows, each of which is a second layer that consists of four tiles. \"\"\"\n\n blank_line = text_stream.readline()\n \"\"\" In the input file, there is a blank line in between the two states.\"\"\"\n\n goal_state = []\n for i in range(0, 4):\n goal_state.append(list(map(int, text_stream.readline().rstrip().split(' '))))\n \"\"\" The construct of this part is identical to the one above. \"\"\"\n\n text_stream.close()\n\n ret = [initial_state, goal_state]\n \"\"\" Returns the two lists that represent the initial and goal states, \n respectively. \"\"\"\n return ret", "def load_wordlist(filename):\n with open(filename) as f:\n \tdata = f.read().splitlines()\n return data", "def read_words(filename):\n # load assets\n word_file = urlopen(filename)\n \n # read in files as string\n words = word_file.read()\n \n # template lines and solution lines list of line string\n # if the input value is '\\n' then TypeError: a bytes-like object is required, not 'str'\n word_list = words.split(b'\\n')\n word_list = [word.decode('ascii') for word in word_list]\n print(\"Loaded a dictionary with\", len(word_list), \"words\")\n return word_list", "def loadListFromFile (filename):\n retval = []\n filename = os.path.expanduser (filename)\n if not os.path.exists (filename):\n print(\"Error: file '%s' does not exist.\"%(filename))\n raise RuntimeError(\"Bad filename\")\n source = open (filename, 'r') \n for line in source.readlines():\n line = re.sub (r'#.+$', '', line) # remove comment characters\n line = line.strip()\n if len (line):\n retval.append (line)\n source.close()\n return retval", "def read_file(path):\n with open(path) as _file:\n _list = _file.readlines()\n return _list", "def load_input(filepath: str) -> list:\n lines = []\n with open(filepath, \"r\", encoding=\"utf-8\") as file:\n for line in file.readlines():\n lines.append(line.strip())\n return lines", "def read_language(filehandle):\n from bs4 import BeautifulSoup as bs\n soup = bs(filehandle.read())\n primary_name = soup.find(\"meta\", property=\"og:title\")[\"content\"]\n alternate_names = soup.find(\"div\", class_=\"field-name-field-alternate-names\" ).find(\"div\", class_=[\"field-item\", \"even\"]).string.split(\", \")\n classification = soup.find(\"div\", class_=\"field-name-language-classification-link\").find(\"div\", class_=[\"field-item\", \"even\"]).string.split(\", \")\n dialects = soup.find(\"div\", class_=\"field-name-field-dialects\" ).find(\"div\", class_=[\"field-item\", \"even\"]).p.get_text()\n return ([unicode(primary_name)]+alternate_names, classification, dialects)", "def read_games_list(file_path):\n input_data = \"\"\n if os.path.isfile(file_path):\n f = open(file_path, \"r\")\n input_data = f.readlines()\n f.close()\n return input_data", "def loadList(file_name):\n with open(file_name) as f:\n l = [line.strip() for line in f]\n return l", "def read_data(input_file):\n\n def process_line(labels, words):\n l = ' '.join([label for label in labels if len(label) > 0])\n w = ' '.join([word for word in words if len(word) > 0])\n lines.append((l, w))\n words = []\n labels = []\n return words, labels, lines\n\n rf = open(input_file, 'r')\n lines = [];\n words = [];\n labels = []\n for line in rf:\n word = line.strip().split(' ')[0]\n label = line.strip().split(' ')[-1]\n # here we dont do \"DOCSTART\" check\n\n if len(line.strip()) == 0: # and words[-1] == '.'\n words, labels, lines = process_line(labels, words)\n words.append(word)\n labels.append(label)\n rf.close()\n return lines", "def load_description():\n with open('description.txt') as description:\n return [line.strip() for line in description]", "def read_file(self) -> PSMList:\n return PSMList(psm_list=[psm for psm in self])", "def readTotitle(fh):\n preLines = []\n while True:\n l = fh.readline().strip()\n if l.startswith('>'):\n return (preLines,l)\n elif l == '':\n return preLines,None\n else:\n preLines.append(l)", "def process_file(filename, skip_header=True):\n hist = {}\n fp = file(filename)\n fullwordlist=[]\n # if skip_header:\n # skip_gutenberg_header(fp)\n\n for line in fp:\n holder=process_line(line,hist)\n #print holder\n fullwordlist.extend(holder)\n return fullwordlist", "def convert_input_to_list():\n\n f = open('pizza_source.txt', 'r')\n file_to_list = f.read().split('\\n')\n\n return file_to_list", "def _read_list_from_file(self, filename):\n # Open script for reading.\n try:\n fh = open(filename, \"r\")\n return fh.readlines()\n except IOError as e:\n self._logger.exception(\"Cannot open file: \" + filename)\n # Pass exception up so anyone trying to add a response list\n # from a script knows it didn't work.\n raise", "def input_data(self):\n return read_yaml(self.file_path)", "def readPipelines(pipelines=default_pipelines):\n with open(pipelines, 'r') as handle:\n return yaml.load(handle, Loader=yaml.FullLoader)" ]
[ "0.6141074", "0.61266136", "0.59853286", "0.59826833", "0.59287184", "0.5864094", "0.58362484", "0.5695049", "0.5677694", "0.5672521", "0.5662456", "0.56329674", "0.5599225", "0.5592709", "0.55906165", "0.55665773", "0.5560554", "0.5542891", "0.5525169", "0.55054325", "0.55008876", "0.54987997", "0.549857", "0.54569703", "0.54475033", "0.54294", "0.5423137", "0.5420085", "0.54188615", "0.54157656" ]
0.6980631
0
Given the path to the root directory of the KITTI road dataset, it creates a dictionary of the data as numpy arrays. data_dir = directory containing `testing` and `training` subdirectories
def create_data_dict(data_dir, img_size=[25, 83]): print("Creating data dictionary") print("- Using data at:", data_dir) # Directories imgs_dir = os.path.join(data_dir, "training/image_2") labels_dir = os.path.join(data_dir, "training/gt_image_2") print("- Getting list of files") # Only get the label files for road (not lane) label_files = glob.glob(os.path.join(labels_dir, "*_road_*.png")) # Create corresponding list of training image files img_files = list(map(lambda f: os.path.basename(f).replace("_road", ""), label_files)) img_files = list(map(lambda f: os.path.join(imgs_dir, f), img_files)) # absolute path n_samples = len(img_files) print("- Encountered {} samples".format(n_samples)) est_filesize = (n_samples*np.prod(img_size)*(3+1))/1e6 print("- Estimated output filesize: {:0.3f} MB + overhead".format(est_filesize)) data = {} data["X_train"] = np.empty([n_samples]+img_size+[3], dtype=np.uint8) data["Y_train"] = np.empty([n_samples]+img_size, dtype=np.uint8) print("- Processing image files") for i in range(n_samples): label_img = scipy.misc.imread(label_files[i]) input_img = scipy.misc.imread(img_files[i]) # PRERPOCESS THE IMAGES label_img = scipy.misc.imresize(label_img, img_size) input_img = scipy.misc.imresize(input_img, img_size) # PROCESSING LABEL IMAGE # Only one channel, (1=road, 0=not road) non_road_class = np.array([255,0,0]) label_img = (1-np.all(label_img==non_road_class, axis=2, keepdims=False)).astype(np.uint8) # Place the images into the data arrays data["X_train"][i] = input_img data["Y_train"][i] = label_img print("- Shuffling the data") np.random.seed(seed=128) ids = list(np.random.permutation(n_samples)) data["X_train"] = data["X_train"][ids] data["Y_train"] = data["Y_train"][ids] print("- Done!") return data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def buildDataMap(dataDir,trDir=None,trExt=None):\n \n dataMap = {}\n \n if not trDir:\n trDir = 'TrainingObjects/'\n if not trExt:\n trExt = 'TrainingObject.h5'\n\n dataMap['object'] = {'{}{}'.format(dataDir,trDir): trExt}\n\n dataMap['midline'] = {'{}Midlines/'.format(dataDir) : \n 'Midline_Indices.mat'}\n dataMap['matching'] = {'{}MatchingLibraries/Test/MatchingMatrices/'.format(dataDir) : \n 'MatchingMatrix.0.05.Frequencies.mat'}\n \n return dataMap", "def load_data(data_dir):\n train_dir = data_dir + '/train'\n valid_dir = data_dir + '/valid'\n test_dir = data_dir + '/test'\n \n for directory in [train_dir, valid_dir, test_dir]:\n if not os.path.isdir(directory):\n raise IOError(\"Directory \" + directory + \" does not exist\")\n \n # Define transforms for the training, validation, and testing sets\n train_transforms = transforms.Compose([transforms.RandomRotation(30),\n transforms.RandomHorizontalFlip(),\n transforms.Resize(255),\n transforms.CenterCrop(224),\n transforms.ToTensor(),\n transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])])\n data_transforms = transforms.Compose([transforms.Resize(255),\n transforms.CenterCrop(224),\n transforms.ToTensor()])\n \n # Load the datasets with ImageFolder\n train_datasets = datasets.ImageFolder(train_dir, transform=train_transforms)\n valid_datasets = datasets.ImageFolder(valid_dir, transform=data_transforms)\n test_datasets = datasets.ImageFolder(test_dir, transform=data_transforms)\n \n # Using the image datasets and the trainforms, define the dataloaders\n trainloader = torch.utils.data.DataLoader(train_datasets, batch_size=64, shuffle=True)\n validloader = torch.utils.data.DataLoader(valid_datasets, batch_size=32, shuffle=True)\n testloader = torch.utils.data.DataLoader(test_datasets, batch_size=32, shuffle=True)\n \n return {\n 'datasets': {\n 'train': train_datasets,\n 'valid': valid_datasets,\n 'test': test_datasets\n },\n 'loader': {\n 'train': trainloader,\n 'valid': validloader,\n 'test': testloader\n }\n }", "def get_data(folder_name):\n train_data = {}\n for study_id, study_path in sorted(get_studies(folder_name)):\n train_data[study_id] = get_slices(study_id, study_path)\n return train_data", "def data():\n print (\"&\")\n res = {}\n\t\n # Load Data\n with open(DATA_PATH_TRAIN, 'rb') as f:\n data = pickle.load(f)\n\t\t\n for d in data:\n for j in range(len(d)):\n if not d[j][\"addinfo\"][\"path\"] in res:\n res[d[j][\"addinfo\"][\"path\"]] = {}\n d[j][\"environment\"][\"text\"] = d[j][\"addinfo\"][\"text\"]\n res[d[j][\"addinfo\"][\"path\"]][d[j][\"addinfo\"][\"line\"]] = d[j][\"environment\"]\n\t \t\n with open(DATA_PATH_TEST, 'rb') as f:\n data = pickle.load(f)\n\t\t\n for d in data:\n for j in range(len(d)):\n if not d[j][\"addinfo\"][\"path\"] in res:\n res[d[j][\"addinfo\"][\"path\"]] = {}\n d[j][\"environment\"][\"text\"] = d[j][\"addinfo\"][\"text\"]\n res[d[j][\"addinfo\"][\"path\"]][d[j][\"addinfo\"][\"line\"]] = d[j][\"environment\"]\n\t\t\t\n with open('tasks/env/data/data.json', 'w') as outfile:\n json.dump(res, outfile)", "def create_dicts():\n load_data_for_dict('data/atis/train/seq.in', 'data/atis/voc/vocabulary.json')\n load_data_for_dict('data/atis/valid/seq.in', 'data/atis/voc/vocabulary.json')\n load_data_for_dict('data/atis/test/seq.in', 'data/atis/voc/vocabulary.json') \n load_data_for_dict('data/atis/train/seq.out', 'data/atis/voc/slot_vocabulary.json')", "def load_data(folder_path: str) -> dict:\n # iterate over all files in each dataset folder\n data_dict = dict()\n for filename in os.listdir(folder_path):\n print(f'{strftime(\"%a, %d %b %Y %H:%M:%S\", gmtime())} load {filename} from {folder_path}')\n if filename == '.DS_Store':\n continue\n # connect all part of files of the same dataset\n file_path = os.path.join(folder_path, filename)\n file = joblib.load(file_path)\n data_dict[filename.split('.', 1)[0]] = file\n\n len_df = pd.DataFrame(data=data_dict[f'branches_lengths_list'],\n index=data_dict[f'branch_comments_embedded_text_df'].index)\n data_dict['len_df'] = len_df\n\n return data_dict", "def get_data_heuristics(rootdir, img_path, datasetnames, heuristicnames):\n datasets = {}\n\n print('Loading: ' + str(len(datasetnames)) + ' datasets')\n for dataset in tqdm(datasetnames):\n time.sleep(0.1)\n\n # Ground truth images (mask image of expert)\n images_gt = load_scans(rootdir + dataset + '/crop_gt')\n images_gt = sitk.GetArrayFromImage(images_gt)\n\n # Heuristic model images (predictions of models)\n images_models = {}\n for model in heuristicnames:\n image = load_data_pickle(img_path, dataset=dataset, filename=model)\n images_models.update({model: image})\n\n # Save images in datasets dictionary\n datasets.update({dataset: {'gt':images_gt, 'models':images_models}})\n\n print(\"dataset created\")\n return datasets", "def KittiTestDataset(test_root_path):\n \n names = os.listdir(test_root_path)\n dataset = [[os.path.join(test_root_path, name)] for name in names]\n \n return dataset", "def _load_data(name):\n filename = os.path.join(os.path.abspath(os.path.dirname(__file__)),\n 'data', name)\n with np.load(filename) as f:\n return dict(f.items())", "def get_training_data_directories():\n\n\tmapping = dict()\n\tmapping[ASCause.apsp] = directories.training_cause_apsp\n\tmapping[ASCause.bl] = directories.training_cause_bl\n\tmapping[ASCause.ce] = directories.training_cause_ce\n\tmapping[ASCause.dfl] = directories.training_cause_dfl\n\tmapping[ASCause.lrssi] = directories.training_cause_lrssi\n\tmapping[ASCause.pscan_assoc] = directories.training_cause_pscan_assoc\n\tmapping[ASCause.pscan_unassoc] = directories.training_cause_pscan_unassoc\n\tmapping[ASCause.pwr_state] = directories.training_cause_pwr_state\n\treturn mapping", "def get_data(name):\n data = collections.OrderedDict()\n dirname = os.path.join(test_dir, *name.split('_'))\n\n if not os.path.isdir(dirname):\n raise ValueError(\"data directory '{}' does not exist\".format(dirname))\n\n for each in get_test_files(dirname):\n if os.path.isdir(each):\n continue\n\n fname = os.path.basename(each)\n if fname.startswith('.'):\n continue\n\n test_name, ext = os.path.splitext(fname)\n data.setdefault(test_name, FileTestData(name=test_name, path=dirname))\n\n # could setattr\n attr = ext[1:]\n if ext == '.expected':\n with open(each) as fobj:\n data[test_name].expected = json.load(fobj, object_hook=json_hook)\n else:\n with open(each) as fobj:\n setattr(data[test_name], attr, fobj.read())\n\n return data", "def load_data(paths):\n result = dict()\n for split in [\"train\", \"valid\", \"test\"]:\n fname = split + \".p\"\n path = paths[fname]\n with open(path, \"rb\") as f:\n data = pickle.load(f)\n result[split] = (data[\"features\"], data[\"labels\"])\n return result", "def agg_paths(dataset_root_dir):\n root_dir_p = Path(dataset_root_dir)\n experiment_dirs = sorted([e for e in root_dir_p.iterdir()])\n \n left_paths=[]\n right_paths=[]\n occl_paths=[]\n disparity_paths=[]\n depth_paths=[] \n calib_paths=[]\n sample_name=[]\n\n for experiment in experiment_dirs:\n modality_gt_dirs = sorted([gt_dir for gt_dir in experiment.iterdir() if 'Ground_truth' in gt_dir.name])\n for modality_gt_dir in modality_gt_dirs:\n occl_paths.extend(sorted([p.resolve()\n for p in (modality_gt_dir/'OcclusionL').iterdir()]))\n disparity_paths.extend(sorted([p.resolve()\n for p in (modality_gt_dir/'Disparity').iterdir()]))\n depth_paths.extend(sorted([p.resolve()\n for p in (modality_gt_dir/'DepthL').iterdir()]))\n \n \n left_paths.extend(sorted([(experiment/'Left_rectified'/p.name).resolve()\n for p in (modality_gt_dir/'OcclusionL').iterdir()]))\n right_paths.extend(sorted([(experiment/'Right_rectified'/p.name).resolve()\n for p in (modality_gt_dir/'OcclusionL').iterdir()]))\n calib_paths.extend(sorted([(experiment/'Rectified_calibration'/(p.stem+'.json')).resolve()\n for p in (modality_gt_dir/'OcclusionL').iterdir()]))\n sample_name.extend(sorted([(p.parents[2]).name+' - ' + (p.parents[1]).name.split('_')[-1]+ ' - '+p.stem for p in (modality_gt_dir/'Disparity').iterdir()])) \n return {'left': left_paths,\n 'right': right_paths,\n 'occ': occl_paths,\n 'disparity': disparity_paths,\n 'depth': depth_paths,\n 'calib': calib_paths,\n 'name': sample_name}", "def create_train_test_val_dirs(root_dir):\n try:\n # Create training data directories\n os.makedirs(root_dir + '/train')\n os.makedirs(root_dir + '/train/CoregisteredBlurryImages')\n os.makedirs(root_dir + '/train/ClearImages')\n os.makedirs(root_dir + '/train/Masks')\n\n # Create validation data directories\n os.makedirs(root_dir + '/val')\n os.makedirs(root_dir + '/val/CoregisteredBlurryImages')\n os.makedirs(root_dir + '/val/ClearImages')\n os.makedirs(root_dir + '/val/Masks')\n\n # Create testing data directories\n os.makedirs(root_dir + '/test')\n os.makedirs(root_dir + '/test/CoregisteredBlurryImages')\n os.makedirs(root_dir + '/test/ClearImages')\n os.makedirs(root_dir + '/test/Masks')\n\n except OSError as e:\n if e.errno != errno.EEXIST:\n raise", "def create_data_folders() -> None:\n if not os.path.exists(\"data/save\"):\n os.mkdir(\"./data\")\n os.mkdir(\"./data/save\")\n if not os.path.exists(\"data/critics\"):\n os.mkdir(\"./data/critics\")\n if not os.path.exists('data/policies/'):\n os.mkdir('data/policies/')\n if not os.path.exists('data/results/'):\n os.mkdir('data/results/')", "def create_station_dics(data_directories):\n \n files_all = {} \n for k,v in data_directories.items() :\n files = os.listdir(v)\n \n for f in files:\n station = f.split('_')[0] \n if station not in files_all.keys():\n files_all[station] = {}\n \n if k == 'ncar': # separating ncar temperature and wind files \n if 'trhc' in f:\n k = 'ncar_t'\n elif 'windc' in f:\n k = 'ncar_w'\n files_all[station][k] = ''\n files_all[station][k] = v + '/' + f # compelte path to the netCDF file \n\n #print('check') \n \n \n return files_all", "def load_data(tetrode_number=TETRODE_NUMBER):\n print(\"Loading data...\")\n X_train, X_valid, X_test, y_train_labels, y_valid_labels, y_test_labels = formatData(tetrode_number,BASENAME,CONV)\n print(\"Done!\")\n\n X_train = X_train.reshape(X_train.shape[0],1,X_train.shape[1],X_train.shape[2])\n X_valid = X_valid.reshape(X_valid.shape[0],1,X_valid.shape[1],X_valid.shape[2])\n X_test = X_test.reshape(X_test.shape[0],1,X_test.shape[1],X_test.shape[2])\n\n\n y_train = X_train\n y_valid = X_valid\n y_test = X_test\n\n r={}\n for x,y in zip(X_test,y_test_labels):\n # print(\"x: {}\".format(x))\n # print(\"y: {}\".format(y))\n _y = list(y)\n if int(_y.index(1.0)) not in r:\n r[int(_y.index(1.0))]=[x]\n else:\n r[int(_y.index(1.0))].append(x)\n\n for key in r:\n r[key] = np.asarray(r[key])\n\n\n return dict(\n X_train=X_train,\n y_train=y_train,\n X_valid=X_valid,\n y_valid=y_valid,\n X_test=X_test,\n y_test=y_test,\n labeled_test=r,\n caswells_dim = y_train_labels.shape[-1],\n num_examples_train=X_train.shape[0],\n num_examples_valid=X_valid.shape[0],\n num_examples_test=X_test.shape[0],\n input_shape=X_train.shape,\n output_dim=y_train.shape[-1],\n )", "def _init_data_dir(parent_dir: Path, directories_to_include, files_to_include):\n data_dir = parent_dir / \"data\"\n data_dir.mkdir()\n\n for relative_directory_path in directories_to_include:\n (data_dir / relative_directory_path).mkdir(parents=True)\n\n for relative_file_path in files_to_include:\n (data_dir / relative_file_path).touch()", "def get_data_paths(directory: Optional[str] = None) -> DataPaths:\n if directory is None:\n directory = DATA_DIRECTORY\n\n os.makedirs(directory, exist_ok=True)\n\n node_data_path = os.path.join(directory, 'nodes.tsv')\n if not os.path.exists(node_data_path):\n logger.info(f'downloading {NODE_DATA_URL}')\n urlretrieve(NODE_DATA_URL, node_data_path)\n\n edge_data_path = os.path.join(directory, 'edges.sif.gz')\n if not os.path.exists(edge_data_path):\n logger.info(f'downloading {EDGE_DATA_URL}')\n urlretrieve(EDGE_DATA_URL, edge_data_path)\n\n transformed_features_path = os.path.join(directory, 'transformed-features.tsv.bz2')\n if not os.path.exists(transformed_features_path):\n logger.info(f'downloading {TRANSFORMED_FEATURES_URL}')\n urlretrieve(TRANSFORMED_FEATURES_URL, transformed_features_path)\n\n validate_data_path = os.path.join(directory, 'validation-statuses.tsv')\n if not os.path.exists(validate_data_path):\n logger.info(f'downloading {VALIDATE_DATA_URL}')\n urlretrieve(VALIDATE_DATA_URL, validate_data_path)\n\n symptomatic_data_path = os.path.join(directory, 'probabilities.tsv')\n if not os.path.exists(symptomatic_data_path):\n logger.info(f'downloading {SYMPTOMATIC_DATA_URL}')\n urlretrieve(SYMPTOMATIC_DATA_URL, symptomatic_data_path)\n\n repurpose_data_path = os.path.join(directory,'repurpose_overlap.json')\n if not os.path.exists(repurpose_data_path):\n logger.info(f'downloading {REPURPOSE_DATA_URL}')\n urlretrieve(REPURPOSE_DATA_URL, repurpose_data_path)\n\n repo_data_path = os.path.join(directory, 'repo_data.csv')\n if not os.path.exists(repo_data_path):\n logger.info(f'downloading {REPO_DATA_URL}')\n urlretrieve(REPO_DATA_URL, repo_data_path)\n\n permutation_directory = os.path.join(directory, \"permutations\")\n os.makedirs(permutation_directory, exist_ok=True)\n\n permutation_paths = []\n for i in range(5):\n permutation_data_path = os.path.join(permutation_directory, PERMUTATION_DATA_FILE_FMT.format(i + 1))\n if not os.path.exists(permutation_data_path):\n url = PERMUTATION_DATA_URL_FMT.format(i + 1)\n logger.info(f'downloading {url}')\n urlretrieve(url, permutation_data_path)\n permutation_paths.append(permutation_data_path)\n data_edge2vec_path = os.path.join(directory, 'data_edge2vec')\n\n return DataPaths(\n node_data_path=node_data_path,\n edge_data_path=edge_data_path,\n transformed_features_path=transformed_features_path,\n validate_data_path=validate_data_path,\n symptomatic_data_path=symptomatic_data_path,\n permutation_paths=permutation_paths,\n data_edge2vec_path=data_edge2vec_path,\n repurpose_data_path = repurpose_data_path,\n repo_data_path = repo_data_path\n )", "def get_training_data(data_dir):\n data = []\n for label in labels:\n path = os.path.join(data_dir, label)\n class_num = labels.index(label)\n img_set = os.listdir(path)\n n = len(img_set)\n for i in range(n):\n try:\n img = img_set[i]\n img_arr = cv2.imread(os.path.join(path, img))\n resized_arr = cv2.resize(img_arr, (img_size, img_size)) # Reshaping images to preferred size\n data.append([resized_arr, class_num])\n if i % 100 == 0:\n print(\"Processing images: {}/{}\".format(i + 1, n))\n except Exception as e:\n print(e)\n return np.array(data)", "def prepare_jsonlbpe_data(data_dir, train_data_file, dev_data_file, vocab_file):\n if not gfile.Exists(data_dir):\n gfile.MkDir(data_dir)\n\n # Get wmt data to the specified directory.\n train_path = get_qa_set(data_dir, train_data_file)\n dev_path = get_qa_set(data_dir, dev_data_file)\n\n # Create vocabularies of the appropriate sizes.\n vocab_path = os.path.join(data_dir, \"vocab.txt\")\n create_vocabulary(vocab_path, vocab_file)\n\n # Create token ids for the training data.\n src_train_ids_path = train_path + \".src.ids\"\n targ_train_ids_path = train_path + \".targ.ids\"\n data_to_token_ids(train_path + \".src\", src_train_ids_path, vocab_path)\n data_to_token_ids(train_path + \".targ\", targ_train_ids_path, vocab_path)\n\n # Create token ids for the development data.\n src_dev_ids_path = dev_path + \".src.ids\"\n targ_dev_ids_path = dev_path + \".targ.ids\"\n data_to_token_ids(dev_path + \".src\", src_dev_ids_path, vocab_path)\n data_to_token_ids(dev_path + \".targ\", targ_dev_ids_path, vocab_path)\n\n return (src_train_ids_path, targ_train_ids_path,\n src_dev_ids_path, targ_dev_ids_path,\n vocab_path)", "def create_dicts(dataset, sub_data_folder, num_words=30000):\n sbts, nodes, _, comms = Utils.split_sbts_nodes_comms(dataset['train'])\n print(\"sbt[0]: \", sbts[0])\n print(\"nodes[0]: \", nodes[0])\n print(\"comms[0]: \", comms[0])\n Utils.create_dict(sub_data_folder, \"sbts\", sbts, num_words)\n Utils.create_dict(sub_data_folder, \"nodes\", nodes, num_words)\n Utils.create_dict(sub_data_folder, \"comms\", comms, num_words)", "def load_data_files() -> Dict[str, Path]:\n default_path = paths.MISCELLANEOUS_DIRECTORY / \"portfolio\"\n custom_exports = (\n get_current_user().preferences.USER_PORTFOLIO_DATA_DIRECTORY / \"optimization\"\n )\n data_files = {}\n for directory in [default_path, custom_exports]:\n for file_type in [\"xlsx\", \"ini\"]:\n for filepath in Path(directory).rglob(f\"*.{file_type}\"):\n if filepath.is_file() and \"example\" not in filepath.name:\n data_files[filepath.name] = filepath\n\n return data_files", "def load_training_data(vocab, directory):\n top_level = os.listdir(directory)\n dataset = []\n for d in top_level:\n if d[-1] == '/':\n label = d[:-1]\n subdir = d\n else:\n label = d\n subdir = d+\"/\"\n files = os.listdir(directory+subdir)\n for f in files:\n bow = create_bow(vocab, directory+subdir+f)\n dataset.append({'label': label, 'bow': bow})\n return dataset", "def create_folders():\n if not os.path.exists(\"data/train-npy/\"):\n os.makedirs(\"data/train-npy/\")\n if not os.path.exists(\"data/test-npy/\"):\n os.makedirs(\"data/test-npy/\")\n if not os.path.exists(\"data/valid-npy/\"):\n os.makedirs(\"data/valid-npy/\")", "def __load_data(self, input_directory):\n print(\"Loading data...\")\n self.training, self.validation, self.testing = (\n tuple(\n numpy.load(\n os.path.join(input_directory, '{}_{}.npy'.format(x, y))\n )\n for y in ('data', 'labels')\n )\n for x in ('training', 'validation', 'testing')\n )\n self.report['data_directory'] = input_directory\n self.report['images_training'] = len(self.training[1])\n self.report['images_validation'] = len(self.validation[1])\n self.report['images_testing'] = len(self.validation[1])", "def _set_dirs(self, datafolder):\n self.List_of_dir = []\n self.List_of_files = dict()\n folders = os.listdir(datafolder)\n folders.sort()\n for i in folders:\n if os.path.isdir(os.path.join(datafolder,i)) and i != '.ipynb_checkpoints': # ignore .ipynb_checkpoints, allowing the generator to work in Amazon\n self.List_of_dir.append(os.path.join(datafolder,i))\n self.List_of_files[os.path.join(datafolder,i)]=[]\n for file in os.listdir(os.path.join(datafolder, i, 'Input')):\n if file.split('.')[-1] == 'hdf5':\n self.List_of_files[os.path.join(datafolder,i)].append(file.split('.')[-2])\n self._nb_dir = len(self.List_of_dir)", "def gen_data_dir(img_dir, id_label_dict, num_class, shuffle=True):\n img_file_path = gen_img_files(img_dir, shuffle)\n return gen_data_file(img_file_path, id_label_dict, num_class)", "def construct_data(paths=DEFAULT_PATHS, use_saved=True):\n if not verify_paths(paths):\n raise FileNotFoundError('Some of the required data files could not be '\n 'found. Before running the project, run '\n '`setup.sh` to create/download them.')\n\n # Paths to save or load the constructed datasets from\n saved_train = os.path.join(paths['dir_output'], 'train.pk')\n saved_test = os.path.join(paths['dir_output'], 'test.pk')\n\n # Load the data if possible\n if (os.path.exists(saved_train) and os.path.exists(saved_test)\n and use_saved):\n print('Found existing saved dataset; loading it...')\n with open(saved_train, mode='rb') as train_file:\n train = pickle.load(train_file)\n with open(saved_test, mode='rb') as test_file:\n test = pickle.load(test_file)\n return train, test\n\n print('Constructing dataset...')\n\n # Read in the .csv files and create DataFrames for train, test observations\n depths = pd.read_csv(paths['df_depths'], index_col='id')\n train = pd.read_csv(paths['df_train'], index_col='id', usecols=[0])\n train = train.join(depths)\n test = depths[~depths.index.isin(train.index)].copy()\n\n # (Training images)\n print('Reading training images...')\n path = paths['dir_train_images'] + '{}.png'\n train['image'] = [read_image(path.format(img))\n for img in tqdm(train.index)]\n\n # (Training masks)\n print('Reading training masks...')\n path = paths['dir_train_masks'] + '{}.png'\n train['mask'] = [read_image(path.format(img)) for img in tqdm(train.index)]\n\n # (Testing images)\n print('Reading test images...')\n path = paths['dir_test_images'] + '{}.png'\n test['image'] = [read_image(path.format(img)) for img in tqdm(test.index)]\n\n # Calculate the coverage for the training images\n # Then, bin the images into discrete classes corresponding to coverage\n train['coverage'] = train['mask'].map(np.sum) / pow(101, 2)\n train['cov_class'] = train['coverage'].map(\n lambda cov: np.int(np.ceil(cov * 10)))\n\n # Write to file\n print('Saving the constructed dataset...')\n try:\n with open(saved_train, mode='wb') as train_file:\n pickle.dump(train, train_file)\n with open(saved_test, mode='wb') as test_file:\n pickle.dump(test, test_file)\n except OSError:\n print('Could not save the data due to an occasional Python bug on '\n 'some systems. :( If this is happening on macOS, try running on '\n 'Linux instead.')\n\n return train, test", "def build_filedic(data_path, lanczos_path):\n filedic = {'CERA': sorted(glob.glob(data_path + 'CERA20C/*.nc')),\n 'lanczos(CERA)': sorted(glob.glob(lanczos_path + 'CERA_7*.nc')),\n 'lanczos(20CR)': sorted(glob.glob(lanczos_path + '20CRv3_5*.nc'))}\n return filedic" ]
[ "0.6833727", "0.66777116", "0.6587898", "0.653809", "0.64276856", "0.64088523", "0.6406939", "0.6266207", "0.62603366", "0.62525475", "0.61507684", "0.61411524", "0.61401844", "0.61256236", "0.6117592", "0.6111499", "0.61099344", "0.6104643", "0.60664076", "0.60528195", "0.6051175", "0.60297865", "0.6006025", "0.6004446", "0.600181", "0.60001916", "0.5989986", "0.59867656", "0.5974116", "0.59688044" ]
0.7099108
0
Reject the indent form.
def reject(self): print "This form has been rejected. Current state:", self.state
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def leave(self):\n assert(self.indent > 0)\n self.indent -= 1", "def reject(self):\n pass", "def check_indent_allowed(self) -> bool:\n return False", "def check_indent_allowed(self) -> bool:\n return False", "def dedent(self):\n self.indent_level -= self.INDENT_STEP", "def dedent(self):\n self.indent_level -= self.INDENT_STEP", "def doDedent(context, match):\n\treturn True\n\tv = context.getVariables().getParent ()\n\ti = v.get(\"requiredIndent\") or 0\n\tv.set(\"requiredIndent\", i - 1)\n\treturn True", "def check_indent_allowed(self) -> bool:\n return True", "def __editUnindent(self):\n self.activeWindow().unindentLineOrSelection()", "def unindent(self):\r\n editor = self.get_current_editor()\r\n if editor is not None:\r\n editor.unindent()", "def on_buttonBox_rejected(self):\n self.reject()", "def _decreaseindentation(self):\n self._curindent = self._indentlist.pop()", "def test_reset_limit_on_indent(self):\n indenter = indent.Indenter()\n indenter.indentation = -2\n self.assertRaises(ValueError, indenter.indent)\n indenter.indentation = -1\n self.assertRaises(ValueError, indenter.indent)\n indenter.indentation = 0\n indenter.indent()\n indenter.indentation = +1\n indenter.indent()\n indenter.indentation = +2\n indenter.indent()", "def try_print_indent(self):\n if self.lasttoken[0] != lex.Token.NEWLINE:\n return\n\n if len(self.lasttoken[1]) > 0:\n self.buffer.scope_line(\"__io.write(u'\" + self.lasttoken[1] + \"')\")", "def test_incorrect_indent(self, x=1, y=2): # noqa: D207, D213, D407", "def visit(self, token: tokenize.TokenInfo) -> None:\n self._check_extra_indentation(token)", "def _reject_rendering(self):\n\n curItem = self.tree.focus()\n parent = self.tree.parent(curItem)\n\n categories = ['approved', 'conflicts', 'suggestions', 'unknown', \\\n 'cldr',]\n if parent is '':\n #skip it\n pass\n else:\n if parent not in categories:\n curTerm = parent\n category = self.tree.parent(parent)\n else:\n curTerm = curItem\n category = parent\n if category == 'approved':\n #move from approved to unknown, with rendering deleted\n self.tree.item(curTerm, \\\n values=[self.tree.item(curTerm)['values'][0], ''])\n self.tree.move(curTerm, 'unknown', 'end')\n pass\n elif category == 'sugestions':\n if curTerm != curItem:\n self.tree.delete(curItem)\n if len(self.tree.get_children(curTerm)) < 1:\n self.tree.move(curTerm, 'unknown', 'end')\n # move curTrem from suggestions to unknown\n else: #if curTerm == curItem:\n self.tree.delete(*self.tree.get_children(curTerm))\n self.tree.move(curTerm, 'unknown', 'end')\n pass\n elif category == 'conflicts':\n if curTerm != curItem:\n self.tree.delete(curItem)\n if len(self.tree.get_children(curTerm)) == 1:\n curItem = self.tree.get_children(curTerm)[0]\n va = self.tree.item(curTerm)['values']\n vb = self.tree.item(curItem)['values']\n self.tree.item(curTerm, values=[va[0], vb[1]])\n self.tree.item(curTerm, tags='approved')\n self.tree.move(curTerm, 'approved', 'end')\n pass\n elif category == 'unknown':\n #ignore\n pass\n elif category == 'cldr':\n #ignore\n pass\n else:\n messagebox.showerror('_reject_rendering', \\\n 'Unknown category {}.'.format(category))\n\n self._make_suggestions()\n \n self.tree.tag_configure('approved', background='palegreen')\n self.tree.tag_configure('conflict', background='bisque')\n self.tree.tag_configure('suggestions', background='lightblue')\n self.tree.tag_configure('unknown', background='whitesmoke')\n self.tree.tag_configure('cldr', background='violet')\n self.update()", "def test_reset_limit_on_dedent(self):\n indenter = indent.Indenter()\n indenter.indentation = -2\n self.assertRaises(ValueError, indenter.dedent)\n indenter.indentation = -1\n self.assertRaises(ValueError, indenter.dedent)\n indenter.indentation = 0\n self.assertRaises(ValueError, indenter.dedent)\n indenter.indentation = +1\n indenter.dedent()\n indenter.indentation = +2\n indenter.dedent()", "def reject(self):\r\n QtGui.QDialog.reject(self)", "def ignore(self):\n self.accepted = False", "def ignore(self):\n self.accepted = False", "def unindent(self):\n self.x_pos -= 10", "def enter(self):\n self.indent += 1", "def reject(self):\n self.selectedoptions = None\n QDialog.reject(self)", "def reset_indentation(self, amount):\n while self.result and self.result[-1][0] == INDENT:\n self.result.pop()\n self.result.append((INDENT, amount))", "def __exit__(exc_type, value, traceback):\n del exc_type\n del value\n del traceback\n\n IndentedLogger._indent_level -= 1\n\n if (IndentedLogger._indent_level == 0 and\n IndentedLogger._printed_on_secondary_indents):\n print_message(\"\\n\")\n IndentedLogger._printed_on_secondary_indents = False", "def reject(self):\n\n self.date_of_submission = None\n api.content.transition(obj=self,\n transition='proposal-transition-reject')", "def reject_appl(data, ind):\n global rejected\n global pending_sheet\n rejected.append_row(data)\n ind += 1\n pending_sheet.delete_rows(ind)\n print(colored('\\nApplication rejected.\\n', 'cyan', attrs=['bold']))", "def outdent(self, decrement=1):\n # decrease the indentation level\n self._level -= decrement\n # and adjust the margin filler\n self.leader = self._indenter * self._level\n # all done\n return self", "def __editIndent(self):\n self.activeWindow().indentLineOrSelection()" ]
[ "0.6469292", "0.6420912", "0.6316097", "0.6316097", "0.62897295", "0.62897295", "0.62538606", "0.61956406", "0.61297977", "0.61183536", "0.57835186", "0.5754162", "0.5709012", "0.5672997", "0.56495655", "0.5631409", "0.56085104", "0.5585363", "0.5583762", "0.54932153", "0.54932153", "0.5456679", "0.545577", "0.54281324", "0.5401182", "0.53736466", "0.5352133", "0.53355616", "0.52963114", "0.52866286" ]
0.65557706
0
scale crop the image to make every image of the same square size, H = W = crop_size
def _scale_and_crop(self, img, seg, crop_size): h, w = img.shape[0], img.shape[1] # if train: # # random scale # scale = random.random() + 0.5 # 0.5-1.5 # scale = max(scale, 1. * crop_size / (min(h, w) - 1)) # ?? # else: # # scale to crop size # scale = 1. * crop_size / (min(h, w) - 1) scale = crop_size / min(h, w) if scale > 1: print('scale: ', scale) img = transform.rescale(img, scale, mode='reflect', order=1) # order 1 is bilinear seg = transform.rescale(seg.astype(np.float), scale, mode='reflect', order=0) # order 0 is nearest neighbor h_s, w_s = img.shape[0], seg.shape[1] if self.validation or self.testing: # center crop x1 = (w_s - crop_size) // 2 y1 = (h_s - crop_size) // 2 else: # random crop x1 = random.randint(0, w_s - crop_size) y1 = random.randint(0, h_s - crop_size) img_crop = img[y1: y1 + crop_size, x1: x1 + crop_size, :] seg_crop = seg[y1: y1 + crop_size, x1: x1 + crop_size] return img_crop, seg_crop
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def crop_resize_image(image: np.ndarray, size) -> np.ndarray:\n width, height = image.size\n if width > height:\n left = (width - height) / 2\n right = width - left\n top = 0\n bottom = height\n else:\n top = (height - width) / 2\n bottom = height - top\n left = 0\n right = width\n image = image.crop((left, top, right, bottom))\n image = image.resize(size, Image.ANTIALIAS)\n return image", "def _image_resize_keep_ratio(self, image, max_w, max_h, crop=False):\n if crop:\n width, height = sizing.new_size_keep_aspect_ratio(image.size, (max_w, max_h), 'outer')\n image = image.resize((width, height), Image.ANTIALIAS)\n image = image.crop(sizing.new_size_by_croping(image.size, (max_w, max_h)))\n else:\n width, height = sizing.new_size_keep_aspect_ratio(image.size, (max_w, max_h), 'inner')\n image = image.resize((width, height), Image.ANTIALIAS)\n return image, image.size[0], image.size[1]", "def crop_to_square(self, image):\n orig_height, orig_width, orig_channels = image.shape\n if orig_height > orig_width:\n return image[:orig_width, ...]\n elif orig_height < orig_width:\n return image[:, :orig_height, ...]\n return image", "def _image_resize_keep_ratio(self, image, max_w, max_h, crop=False):\n raise NotImplementedError", "def resize_and_crop(img, savePath, size, crop_type='middle'):\n\t# If height is higher we resize vertically, if not we resize horizontally\n\n\t# Get current and desired ratio for the images\n\timg_ratio = img.size[0] / float(img.size[1])\n\tratio = size[0] / float(size[1])\n\tprint \"img_ratio = \" + str(img_ratio) + \". ratio = \" + str(ratio)\n\t#The image is scaled/cropped vertically or horizontally depending on the ratio\n\tif ratio > img_ratio:\n\t\timg = img.resize((size[0], int(round(size[0] * img.size[1] / img.size[0]))),\n\t\t Image.ANTIALIAS)\n\t\t# Crop in the top, middle or bottom\n\t\tif crop_type == 'top':\n\t\t\tbox = (0, 0, img.size[0], size[1])\n\t\telif crop_type == 'middle':\n\t\t\tbox = (0, int(round((img.size[1] - size[1]) / 2)), img.size[0],\n\t\t int(round((img.size[1] + size[1]) / 2)))\n\t\telif crop_type == 'bottom':\n\t\t\tbox = (0, img.size[1] - size[1], img.size[0], img.size[1])\n\t\telse :\n\t\t\traise ValueError('ERROR: invalid value for crop_type')\n\t\tprint \"ratio > img_ratio. Cropping box: \"\n\t\tprint box\n\t\timg = img.crop(box)\n\telif ratio < img_ratio:\n\t\timg = img.resize((int(round(size[1] * img.size[0] / img.size[1])), size[1]),\n\t\t Image.ANTIALIAS)\n\t\t# Crop in the top, middle or bottom\n\t\tif crop_type == 'top':\n\t\t\tbox = (0, 0, size[0], img.size[1])\n\t\telif crop_type == 'middle':\n\t\t\tbox = (int(round((img.size[0] - size[0]) / 2)), 0,\n\t\t int(round((img.size[0] + size[0]) / 2)), img.size[1])\n\t\telif crop_type == 'bottom':\n\t\t\tbox = (img.size[0] - size[0], 0, img.size[0], img.size[1])\n\t\telse :\n\t\t\traise ValueError('ERROR: invalid value for crop_type')\n\n\t\tprint \"ratio < img_ratio. Cropping box: \"\n\t\tprint box\n\t\timg = img.crop(box)\n\telse :\n\t\tprint \"so i guess the ratios are the same?\"\n\t\timg = img.resize((size[0], size[1]),\n\t Image.ANTIALIAS)\n\t # If the scale is the same, we do not need to crop\n\tprint \"Saving thumb to \" + savePath\n\timg.save(savePath, \"JPEG\", quality=thumbCompressQual)", "def _image_resize_keep_ratio(self, image, max_w, max_h, crop=False):\n inter = cv2.INTER_AREA\n height, width = image.shape[:2]\n\n source_aspect_ratio = float(width) / height\n target_aspect_ratio = float(max_w) / max_h\n\n if crop:\n if source_aspect_ratio <= target_aspect_ratio:\n h_cropped = int(width / target_aspect_ratio)\n x_offset = 0\n y_offset = int((float(height) - h_cropped) / 2)\n cropped = image[y_offset:(y_offset + h_cropped), x_offset:width]\n else:\n w_cropped = int(height * target_aspect_ratio)\n x_offset = int((float(width) - w_cropped) / 2)\n y_offset = 0\n cropped = image[y_offset:height, x_offset:(x_offset + w_cropped)]\n image = cv2.resize(cropped, (max_w, max_h), interpolation=inter)\n else:\n width, height = sizing.new_size_keep_aspect_ratio((width, height), (max_w, max_h), 'inner')\n image = cv2.resize(image, (width, height), interpolation=cv2.INTER_AREA)\n return image, image.shape[1], image.shape[0]", "def crop_resize_img(img_path, target_size, crop_amount):\n img = image.load_img(img_path)\n x = image.img_to_array(img)\n x = x[crop_amount:-crop_amount, crop_amount:-crop_amount, :]\n ximg = Image.fromarray(np.uint8(x))\n ximg_resize = ximg.resize((target_size[0], target_size[1]))\n x = image.img_to_array(ximg_resize)\n\n return x", "def crop_to_square(image):\n\n if image is None:\n return None\n w, h = (image.shape[1], image.shape[0])\n w = float(w)\n h = float(h)\n\n # only crop images automatically if the aspect ratio is not bigger than 2 or not smaller than 0.5\n aspectRatio = w / h\n if aspectRatio > 3 or aspectRatio < 0.3:\n return None\n if aspectRatio == 1.0:\n return image\n \n # the shortest edge is the edge of our new square. b is the other edge\n a = min(w, h)\n b = max(w, h)\n\n # get cropping position\n x = (b - a) / 2.0\n\n # depending which side is longer we have to adjust the points\n # Heigth is longer\n if h > w:\n upperLeft = (0, x) \n else:\n upperLeft = (x, 0)\n cropW = cropH = a \n return crop_image(image, upperLeft[0], upperLeft[1], cropW, cropH)", "def crop_resize(img, top, left, width, height, size, interpolation=Image.BILINEAR):\n assert _is_numpy_image(img), 'img should be PIL Image'\n img = crop(img, top, left, width, height)\n img = resize(img, size, interpolation)\n return img", "def img_preprocess(self, img, output_size=64):\n w, h = img.size\n if w > h:\n box_param = (int(w * 0.5 - h * 0.5), 0, int(w * 0.5 + h * 0.5), h)\n cropped = img.crop(box_param)\n else: # w < h\n box_param = (0, int(h * 0.5 - w * 0.5), w, int(h * 0.5 + w * 0.5))\n cropped = img.crop(box_param)\n\n resized = cropped.resize((output_size, output_size))\n resized = np.asarray(resized)\n\n return resized", "def scale_image(self, pixels, size):\n x_min, x_max = np.amin(pixels[:,0]), np.amax(pixels[:,0])\n y_min, y_max = np.amin(pixels[:,1]), np.amax(pixels[:,1])\n z_min, z_max = np.amin(pixels[:,2]), np.amax(pixels[:,2])\n \n pixels[:,0] -= x_min \n pixels[:,1] -= y_min\n pixels[:,2] -= z_min\n \n x_max -= x_min\n y_max -= y_min\n z_max -= z_min\n \n scale_factor = size / max(x_max, y_max, z_max) \n # All points are now between [0..max]\n\n pixels *= scale_factor\n return pixels", "def crop_and_resize(image, boxes, size):\n box_ind = keras.backend.zeros_like(boxes, tensorflow.int32)\n box_ind = box_ind[..., 0]\n box_ind = keras.backend.reshape(box_ind, [-1])\n\n boxes = keras.backend.reshape(boxes, [-1, 4])\n\n return tensorflow.image.crop_and_resize(image, boxes, box_ind, size)", "def resized_crop(img, i, j, h, w, size, interpolation=cv2.INTER_LINEAR):\n assert _is_numpy_image(img), 'img should be nparray Image'\n img = crop(img, i, j, h, w)\n img = resize(img, size, interpolation)\n return img", "def resized_crop(img, top, left, height, width, size, interpolation=cv2.INTER_LINEAR):\n assert _is_numpy(img), 'img should be Numpy Image'\n img = crop(img, top, left, height, width)\n img = resize(img, size, interpolation)\n return img", "def resize_and_crop(img_path, modified_path, size, crop_type='top'):\n # If height is higher we resize vertically, if not we resize horizontally\n img = Image.open(img_path)\n # Get current and desired ratio for the images\n img_ratio = img.size[0] / float(img.size[1])\n ratio = size[0] / float(size[1])\n #The image is scaled/cropped vertically or horizontally depending on the ratio\n if ratio > img_ratio:\n img = img.resize((size[0], size[0] * img.size[1] / img.size[0]),\n Image.ANTIALIAS)\n # Crop in the top, middle or bottom\n if crop_type == 'top':\n box = (0, 0, img.size[0], size[1])\n elif crop_type == 'middle':\n box = (0, (img.size[1] - size[1]) / 2, img.size[0], (img.size[1] + size[1]) / 2)\n elif crop_type == 'bottom':\n box = (0, img.size[1] - size[1], img.size[0], img.size[1])\n else :\n raise ValueError('ERROR: invalid value for crop_type')\n img = img.crop(box)\n elif ratio < img_ratio:\n img = img.resize((size[1] * img.size[0] / img.size[1], size[1]),\n Image.ANTIALIAS)\n # Crop in the top, middle or bottom\n if crop_type == 'top':\n box = (0, 0, size[0], img.size[1])\n elif crop_type == 'middle':\n box = ((img.size[0] - size[0]) / 2, 0, (img.size[0] + size[0]) / 2, img.size[1])\n elif crop_type == 'bottom':\n box = (img.size[0] - size[0], 0, img.size[0], img.size[1])\n else :\n raise ValueError('ERROR: invalid value for crop_type')\n img = img.crop(box)\n else :\n img = img.resize((size[0], size[1]),\n Image.ANTIALIAS)\n # If the scale is the same, we do not need to crop\n img.save(modified_path)", "def resize_images(imgs, size=(720, 720)):\n res = []\n for img in imgs:\n\n factor = max(1, size[0]/float(img.shape[0]), size[1]/float(img.shape[1]))\n if factor != 1: img = scale_image(img, factor)\n\n img = center_crop(img, size)\n res.append(img)\n\n return res", "def square_image(img):\r\n x,y = img.size\r\n while y > x:\r\n #slice 10px at a time until square\r\n slice_height = min(y - x, 10)\r\n\r\n bottom = img.crop((0, y - slice_height, x, y))\r\n top = img.crop((0, 0, x, slice_height))\r\n\r\n #remove the slice with the least entropy\r\n if image_entropy(bottom) < image_entropy(top):\r\n img = img.crop((0, 0, x, y - slice_height))\r\n else:\r\n img = img.crop((0, slice_height, x, y))\r\n\r\n x,y = img.size\r\n\r\n return img", "def crop_image(filename, n):\n image = SimpleImage(filename)\n width = image.width\n new_width = width - (2 * n)\n height = image.height\n new_height = height - (2 * n)\n image_crop_width = SimpleImage.blank(new_width, height)\n for y in range(height):\n for x in range(new_width):\n pixel = image.get_pixel((x + n), y)\n image_crop_width.set_pixel(x, y, pixel)\n image_crop_width.show()\n\n image_crop_height = SimpleImage.blank(width, new_height)\n for y in range(new_height):\n for x in range(width):\n pixel = image.get_pixel(x, y + n)\n image_crop_height.set_pixel(x, y, pixel)\n image_crop_height.show()\n\n image_crop_width_height = SimpleImage.blank(new_width, new_height)\n for y in range(new_height):\n for x in range(new_width):\n pixel = image.get_pixel(x + n, y + n)\n image_crop_width_height.set_pixel(x, y, pixel)\n image_crop_width_height.show()", "def resize_and_crop(img_path, modified_path, size, crop_type='top'):\n # If height is higher we resize vertically, if not we resize horizontally\n img = Image.open(img_path)\n # Get current and desired ratio for the images\n img_ratio = img.size[0] / float(img.size[1])\n ratio = size[0] / float(size[1])\n # The image is scaled/cropped vertically or horizontally depending on the ratio\n if ratio > img_ratio:\n img = img.resize((size[0], int(round(size[0] * img.size[1] / img.size[0]))),\n Image.ANTIALIAS)\n # Crop in the top, middle or bottom\n if crop_type == 'top':\n box = (0, 0, img.size[0], size[1])\n elif crop_type == 'middle':\n box = (0, int(round((img.size[1] - size[1]) / 2)), img.size[0],\n int(round((img.size[1] + size[1]) / 2)))\n elif crop_type == 'bottom':\n box = (0, img.size[1] - size[1], img.size[0], img.size[1])\n else:\n raise ValueError('ERROR: invalid value for crop_type')\n img = img.crop(box)\n elif ratio < img_ratio:\n img = img.resize((int(round(size[1] * img.size[0] / img.size[1])), size[1]),\n Image.ANTIALIAS)\n # Crop in the top, middle or bottom\n if crop_type == 'top':\n box = (0, 0, size[0], img.size[1])\n elif crop_type == 'middle':\n box = (int(round((img.size[0] - size[0]) / 2)), 0,\n int(round((img.size[0] + size[0]) / 2)), img.size[1])\n elif crop_type == 'bottom':\n box = (img.size[0] - size[0], 0, img.size[0], img.size[1])\n else:\n raise ValueError('ERROR: invalid value for crop_type')\n img = img.crop(box)\n else:\n img = img.resize((size[0], size[1]), Image.ANTIALIAS)\n # If the scale is the same, we do not need to crop\n img.save(modified_path)", "def resize_image(img, size):\n width = int(img.shape[1])\n height = int(img.shape[0])\n new_width = 0\n crop = 0\n\n if width < height:\n new_width = size\n new_height = int((size * height) / width)\n crop = new_height - size\n img = cv2.resize(img, (new_width, new_height), 0, 0, cv2.INTER_CUBIC)\n half_crop = int(crop / 2)\n img = img[crop / 2:size + half_crop, :]\n else:\n new_height = size\n new_width = int((size * width) / height)\n crop = new_width - size\n img = cv2.resize(img, (new_width, new_height), 0, 0, cv2.INTER_CUBIC)\n half_crop = int(crop / 2)\n img = img[:, half_crop:size + half_crop]\n return img", "def five_crop(img, size):\n if isinstance(size, numbers.Number):\n size = (int(size), int(size))\n else:\n assert len(size) == 2, \"Please provide only two dimensions (h, w) for size.\"\n\n # w, h = img.size\n h, w = img.shape[:2]\n crop_h, crop_w = size\n if crop_w > w or crop_h > h:\n raise ValueError(\"Requested crop size {} is bigger than input size {}\".format(size,\n (h, w)))\n # img[i:i+h, j:j+w]\n # tl = img.crop((0, 0, crop_w, crop_h))\n tl = img[0:0+crop_h, 0:0+crop_w]\n # tr = img.crop((w - crop_w, 0, w, crop_h))\n tr = img[0:0+crop_h, w-crop_w:]\n # bl = img.crop((0, h - crop_h, crop_w, h))\n bl = img[h-crop_h:, 0:0+crop_w]\n # br = img.crop((w - crop_w, h - crop_h, w, h))\n br = img[h-crop_h:,w-crop_w:]\n center = center_crop(img, (crop_h, crop_w))\n return (tl, tr, bl, br, center)", "def crop_square(image, size):\n width, height = image.size\n top = random.randint(0, max(0, height-size))\n left = random.randint(0, max(0, width-size))\n bottom = min(top + size, height)\n right = min(left + size, width)\n\n return image.crop((left, top, right, bottom))", "def resize_img(self, filename: str, size: Tuple[int, int] = (299, 299)):\n img = Image.open(join(self.source_dir, filename))\n width, height = img.size\n orig_shape = np.array(img.size)\n wanted_shape = np.array(size)\n ratios = wanted_shape / orig_shape\n wanted_width, wanted_height = size\n ratio_w, ratio_h = wanted_width / width, wanted_height / height\n\n if np.alltrue(ratios > 1):\n # Both sides of the image are shorter than the desired dimension,\n # so take the side that's closer in size and enlarge the image\n # in both directions to make that one fit\n factor = min(ratio_h, ratio_w)\n img = img.resize((int(width * factor), int(height * factor)))\n\n # Now we have an image that's either larger than the desired shape\n # or at least one side matches the desired shape and we can resize\n # with contain\n cover = resizeimage.resize_contain(img, size)\n cover.save(join(self.dest_dir, filename), 'JPEG')", "def __call__(self, img):\n image_width, image_height = img.size\n image_short = min(image_width, image_height)\n\n crop_size = float(self.imgsize) / (self.imgsize + 32) * image_short\n\n crop_height, crop_width = crop_size, crop_size\n crop_top = int(round((image_height - crop_height) / 2.))\n crop_left = int(round((image_width - crop_width) / 2.))\n return img.crop((crop_left, crop_top, crop_left + crop_width, crop_top + crop_height))", "def __call__(self, img):\n image_width, image_height = img.size\n image_short = min(image_width, image_height)\n\n crop_size = float(self.imgsize) / (self.imgsize + 32) * image_short\n\n crop_height, crop_width = crop_size, crop_size\n crop_top = int(round((image_height - crop_height) / 2.))\n crop_left = int(round((image_width - crop_width) / 2.))\n return img.crop((crop_left, crop_top, crop_left + crop_width, crop_top + crop_height))", "def scale(img, scale):\n return resize(img, x_scale=scale, y_scale=scale)", "def _scale(self, image):\n\n if image.GetWidth() != self._width or image.GetHeight()!= self._height:\n image.Rescale(self._width, self._height)\n \n return image", "def Rescale(self):\r\n picWidth,picHeight = self.oldSize = self.GetSizeTuple()\r\n bitmap = self.scaled = self.bitmap\r\n if not bitmap: return\r\n imgWidth,imgHeight = bitmap.GetWidth(),bitmap.GetHeight()\r\n if self.scaling == 2 or (self.scaling == 1 and (imgWidth > picWidth or imgHeight > picHeight)):\r\n image = bitmap.ConvertToImage()\r\n factor = min(1.0*picWidth/imgWidth,1.0*picHeight/imgHeight)\r\n newWidth,newHeight = int(factor*imgWidth),int(factor*imgHeight)\r\n self.scaled = image.Scale(newWidth,newHeight).ConvertToBitmap()\r\n #self.scaled = image.Scale(newWidth,newHeight,wx.IMAGE_QUALITY_HIGH ).ConvertToBitmap()\r", "def resize_preserving_aspect_then_crop(self, image, new_shape):\n orig_shape = image.shape\n orig_height = orig_shape[0]\n orig_width = orig_shape[1]\n orig_chans = orig_shape[2]\n if orig_height > orig_width:\n scale = new_shape[0]/orig_width\n else:\n scale = new_shape[1]/orig_height\n new_height = int(orig_height * scale)\n new_width = int(orig_width * scale)\n # resize preserving aspect ratio\n image = transform.resize(image, [new_height, new_width, orig_chans], anti_aliasing=True,\n mode=\"reflect\")\n # crop to square\n image = self.crop_to_size(image, new_shape)\n # in case original image dim was less than new dim, expand\n image = transform.resize(image, new_shape, anti_aliasing=True,\n mode=\"reflect\")\n return image", "def image_crop_resize(image, crop_location, crop_size):\n with tf.name_scope('crop_image_from_xy'):\n s = image.get_shape().as_list()\n assert len(\n s\n ) == 4, \"Image needs to be of shape [all_jt, width, height, channel]\"\n #scale=crop_size/crop_size_best\n crop_location = tf.cast(crop_location, tf.float32)\n crop_size = tf.cast(crop_size, tf.float32)\n\n crop_size_best = tf.maximum(crop_location[1, 0] - crop_location[0, 0],\n crop_location[1, 1] - crop_location[0, 1])\n #[2,1]\n center = tf.cast(\n tf.expand_dims([(crop_location[1, 0] - crop_location[0, 0]) / 2,\n (crop_location[1, 1] - crop_location[0, 1]) / 2],\n -1), tf.float32)\n #note 此处切片之后 y/x变为了1维的\n y_min = tf.maximum(center[0] - crop_size_best // 2, 0.0)\n y_max = tf.minimum(y_min + crop_size_best, input_para['height'])\n x_min = tf.maximum(center[1] - crop_size_best // 2, 0.0)\n x_max = tf.minimum(x_min + crop_size_best, input_para['width'])\n boxes = tf.stack([\n y_min / (input_para['height'] - 1), x_min /\n (input_para['width'] - 1), y_max /\n (input_para['height'] - 1), x_max / (input_para['width'] - 1)\n ], -1)\n box_ind = tf.range(s[0])\n #先从原图像中提取box指定的crop_image再进行resize到crop_size\n image_cropped_and_resized = tf.image.crop_and_resize(\n image, boxes, box_ind, tf.cast([crop_size, crop_size], tf.int32))\n image_cropped_and_resized = tf.squeeze(image_cropped_and_resized)\n #[resized_height,resized_width,channels]\n return image_cropped_and_resized" ]
[ "0.72518253", "0.6984718", "0.6963812", "0.69593316", "0.6905565", "0.6862499", "0.68332845", "0.6787975", "0.6712533", "0.6682308", "0.6665915", "0.6658133", "0.6653898", "0.66323924", "0.6628663", "0.65988785", "0.658876", "0.65857023", "0.6571572", "0.65532595", "0.65094197", "0.650901", "0.64499", "0.64475894", "0.64475894", "0.64462864", "0.64457446", "0.6444487", "0.6431275", "0.6429858" ]
0.74396914
0
Create props suitable for plot panel
def create_plot_panel_props(prop_map): props = {} for k1, v in prop_map.items(): v = {'edgecolor' : v.get('color', None), 'facecolor' : 'none', 'linewidth' : v.get('width', None), 'alpha' : v.get('alpha', None)} for k2, _ in prop_map.items(): if (k1, k2) not in props: props[k1, k2] = v if (k2, k1) not in props: props[k2, k1] = v return props
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def properties(self,prop):\n # The particulars of how they are stored and manipulated (e.g., do \n # we want an inventory internally) is not settled. I've used a\n # property dictionary for now.\n #\n # How these properties interact with a user defined style file is\n # even less clear.\n\n # Properties defined by plot\n self.subplot.set_xlabel(r\"$%s$\" % prop[\"xlabel\"])\n self.subplot.set_ylabel(r\"$%s$\" % prop[\"ylabel\"])\n self.subplot.set_title(prop[\"title\"])\n\n # Properties defined by user\n #self.axes.grid(True)", "def properties(self,prop):\r\n # The particulars of how they are stored and manipulated (e.g., do\r\n # we want an inventory internally) is not settled. I've used a\r\n # property dictionary for now.\r\n #\r\n # How these properties interact with a user defined style file is\r\n # even less clear.\r\n\r\n # Properties defined by plot\r\n self.xbox.set_text(r\"$%s$\" % prop[\"xlabel\"])\r\n self.ybox.set_text(r\"$%s$\" % prop[\"ylabel\"])\r\n self.tbox.set_text(r\"$%s$\" % prop[\"title\"])\r\n\r\n # Properties defined by user\r\n #self.axes.grid(True)\r", "def LoadProps( self, props_dict ):\n for k in ( 'timeValue', ):\n if k in props_dict:\n setattr( self, k, props_dict[ k ] )\n\n for k in ( 'dataSetSelections', ):\n if k in props_dict:\n cur_attr = props_dict[ k ]\n\tfor name in cur_attr.keys():\n\t cur_value = cur_attr[ name ]\n\t del cur_attr[ name ]\n\t cur_attr[ DataSetName( name ) ] = cur_value\n\t#end for name\n\n setattr( self, k, cur_attr )\n #end if k in props_dict\n #end for k\n\n super( PlotWidget, self ).LoadProps( props_dict )\n self.container.dataSetMenu.UpdateAllMenus()\n wx.CallAfter( self.UpdateState, replot = True )", "def SaveProps( self, props_dict, for_drag = False ):\n super( PlotWidget, self ).SaveProps( props_dict, for_drag = for_drag )\n\n for k in ( 'timeValue', ):\n props_dict[ k ] = getattr( self, k )\n\n for k in ( 'dataSetSelections', ):\n if hasattr( self, k ):\n cur_attr = getattr( self, k )\n\tif isinstance( cur_attr, dict ):\n\t for name in cur_attr.keys():\n\t if isinstance( name, DataSetName ):\n\t cur_value = cur_attr[ name ]\n\t del cur_attr[ name ]\n\t cur_attr[ name.name ] = cur_value\n\t #end for name\n\t#end if isinstance( cur_value, dict )\n\n\tprops_dict[ k ] = cur_attr\n #end if hasattr( self, k )\n #end for k", "def __init__(self, **kwargs):\n\n tmp = defs.copy()\n tmp.update(kwargs)\n\n for kw in tmp:\n setattr(self, kw, tmp[kw])\n \n if self.left is None:\n self.left = pl.rcParams['figure.subplot.left']\n if self.right is None:\n self.right = pl.rcParams['figure.subplot.right']\n if self.bottom is None:\n self.bottom = pl.rcParams['figure.subplot.bottom']\n if self.top is None:\n self.top = pl.rcParams['figure.subplot.top']\n \n self.l = self.left\n self.r = self.right\n self.b = self.bottom\n self.t = self.top \n \n self.square = self.dims[0] == self.dims[1]\n \n if (self.diagonal is not None) and not self.square:\n raise ValueError('Must have square matrix to use diagonal=True')\n\n self.dims = tuple(self.dims)\n self.J, self.K = self.dims # J = nrows, K = ncols\n self.nrows = self.J\n self.ncols = self.K\n \n if type(self.padding) is float:\n self.padding = tuple([self.padding]* 2)\n \n # Size of an individual panel (in inches)\n self.pane_size = np.array(self.figsize) * np.array([self.r-self.l, self.t-self.b])\n self.pane_size *= np.array(self.panel_size)\n\n # Now, figure out the size of the entire figure (in inches)\n self.panel_size = np.zeros(2)\n \n # After these two lines, self.panel_size is equal to the size of the\n # panel-filled area of the window (in inches)\n self.panel_size[0] = self.pane_size[0] * self.K + self.padding[0] * (self.K - 1)\n self.panel_size[1] = self.pane_size[1] * self.J + self.padding[1] * (self.J - 1) \n\n # Add empty area above/below and left/right of panel-filled area\n self.panel_size[0] += self.figsize[0] * (self.left + (1. - self.right))\n self.panel_size[1] += self.figsize[1] * (self.bottom + (1. - self.top))\n\n self.panel_size_rel = self.pane_size / self.panel_size\n\n self.share_x = self.padding[1] <= 0.2\n self.share_y = self.padding[0] <= 0.2 \n self.share_all = self.share_x and self.share_y\n\n self.dx = self.shift_x\n self.dy = self.shift_y\n\n # Create figure\n if type(self.fig) is not int:\n new_fig = False\n l, r = self.fig.subplotpars.left, self.fig.subplotpars.right\n b, t = self.fig.subplotpars.bottom, self.fig.subplotpars.top\n else:\n self.fig = pl.figure(self.fig, self.panel_size)\n new_fig = True\n\n # Adjust padding\n if self.preserve_margins:\n l = self.left * self.figsize[0] / self.panel_size[0]\n r = (self.left * self.figsize[0] + self.K * self.pane_size[0]) \\\n / self.panel_size[0]\n b = self.bottom * self.figsize[1] / self.panel_size[1]\n t = (self.bottom * self.figsize[1] + self.J * self.pane_size[1]) \\\n / self.panel_size[1]\n else:\n l, r, b, t = self.left, self.right, self.bottom, self.top\n \n self.fig.subplots_adjust(left=l, right=r, bottom=b, top=t, \n wspace=self.padding[0], hspace=self.padding[1])\n \n self.l, self.r, self.b, self.t = l, r, b, t\n\n # Important attributes for identifying individual panels\n self.N = int(np.prod(self.dims))\n self.elements = list(np.reshape(np.arange(self.N), self.dims))\n self.elements.reverse()\n self.elements = np.array(self.elements)\n\n # Dimensions of everything (in fractional units)\n #self.window = {'left': l, 'right': r, 'top': t, \n # 'bottom': b, 'pane': ((r-l) / float(dims[0]), (t-b) / float(dims[1]))}\n\n self.xaxes = self.elements[-1]\n self.yaxes = list(zip(*self.elements))[0] \n self.lowerleft = self.elements[-1][0]\n self.lowerright = self.elements[-1][-1]\n self.upperleft = self.elements[0][0]\n self.upperright = self.elements[0][-1]\n \n if self.square:\n self.diag = np.diag(self.elements) \n self.interior = list(self.elements.ravel())\n for element in self.diag:\n self.interior.remove(element)\n else:\n self.diag = None \n \n self.left = []\n self.right = []\n self.bottom = []\n self.top = []\n for i in range(self.N):\n k, j = self.axis_position(i) # col, row\n \n if j == 0:\n self.bottom.append(i)\n if j == self.nrows - 1:\n self.top.append(i) \n if k == 0:\n self.left.append(i)\n if k == self.ncols - 1:\n self.right.append(i) \n\n self.interior = []\n for i in range(self.N):\n if i in self.left:\n continue\n if i in self.bottom:\n continue\n \n self.interior.append(i)\n\n # Create subplots\n e_fl = self.elements.flatten()\n self.grid = [None for i in range(self.N)]\n for i in range(self.N): \n j, k = self.axis_position(i)\n \n if self.diagonal == 'lower':\n if k >= (self.dims[1] - j) and i not in self.diag:\n continue\n if self.diagonal == 'upper':\n if k < (self.dims[1] - j) and i not in self.diag:\n continue \n \n #if self.diagonal == 'lower' and j == k and (j, k) != (0, 0):\n # continue\n #if self.diagonal == 'upper' and j == k and (j, k) != (self.J-1, self.K-1):\n # continue\n \n if self.square:\n if i in self.diag and not self.keep_diagonal:\n continue\n \n if new_fig:\n self.grid[i] = AxisConstructor(self.fig, self.J, self.K, e_fl[i]+1)\n else:\n\n # col, row = j, k\n\n lef = l + j * self.panel_size_rel[0] \\\n + self.padding[0] + self.dx\n bot = b + k * self.panel_size_rel[1] \\\n + self.padding[1] + self.dy\n\n rect = [lef, bot, self.panel_size_rel[0], self.panel_size_rel[1]]\n\n self.grid[i] = self.fig.add_axes(rect)", "def generate_panel(self):\r\n \r\n self.PanelData = self.RawData.filter(['ID', 'X', 'Z', 'W', 'R', 'β', 'LFP', 'H'], axis=1)", "def set_plot_props(self):\n \n if self.type == \"gas\":\n self.marker = \"v\"\n self.color = \"cyan\"\n \n elif self.type == \"cluster\":\n self.marker = \"o\"\n self.color = \"maroon\"\n \n elif self.type == \"spiral\":\n self.marker = \"*\"\n self.color = \"green\"\n \n elif self.type == \"loop\":\n self.marker = \"o\"\n self.color = \"maroon\"\n \n elif self.type == \"giant\":\n self.marker = \"s\"\n self.color = \"red\"\n \n return", "def plotProperty(self, x, y, z = [], idx = None, col = 1, row = 1, N = 1, ax = None,\\\n save = False, dpi = 100, format = \"pdf\", verbose = 1, handle = False,\\\n translation = None, title = None, other = None, ab = [],\\\n m = \"o\", ms = 2, leg = True, ylim = None, xlim = None, xscale = \"linear\",\\\n yscale = \"linear\", **kwargs):\n\n if idx is None: idx = np.arange(self.atoms.shape[0])\n if translation is None: translation = [0]\n if isinstance(translation, (int, np.integer)): translation = [translation]\n \n if type(x) == str: x = [x]\n if type(y) == str: y = [y]\n if type(z) == str: z = [z]\n if len(x) != len(y):\n string = \"Length x (%i) and y (%i) must be the same\" % (len(x), len(y))\n ut.infoPrint(string)\n return\n\n if len(z) > 0 and len(x) != len(z):\n string = \"Length x (%i) and y (%i) and z (%i) must be the same\"\\\n % (len(x), len(y), len(z))\n ut.infoPrint(string)\n return\n\n m = kwargs.pop(\"marker\", m)\n ls = kwargs.pop(\"linestyle\", \"none\")\n ms = kwargs.pop(\"markersize\", ms)\n\n if len(m) == 1: m = m * len(x)\n if isinstance(ab, (int, np.integer)): ab = [ab]\n\n x_data, x_lbl, x_leg = self.getData(idx = idx, var = x, ab = ab, translation = translation,\\\n compact = True, verbose = verbose, other = other)\n y_data, y_lbl, y_leg = self.getData(idx = idx, var = y, ab = ab, translation = translation,\\\n compact = True, verbose = verbose, other = other)\n if len(x_data) != len(y_data): return\n\n if len(z) > 0:\n z_data, z_lbl, z_leg = self.getData(idx = idx, var = z, ab = ab, translation = translation,\\\n compact = True, verbose = verbose, other = other)\n\n if len(x_data) != len(y_data) != len(z_data) or z_data == []: return\n else:\n z_data = None\n\n hP = []\n if not handle:\n hFig = plt.figure()\n hAx = plt.subplot(row, col, N)\n else:\n hAx = ax\n\n if z_data is None:\n\n kwargs.pop(\"vmin\", None)\n kwargs.pop(\"vmax\", None)\n kwargs.pop(\"colormap\", None)\n\n for i in range(len(x_data)):\n\n tP = hAx.plot(x_data[i].T, y_data[i].T, linestyle = ls, marker = m[i],\\\n markersize = ms, **kwargs)\n\n [hP.append(lines) for lines in tP]\n\n if leg:\n ncol = 1\n if len(x_leg) > len(y_leg):\n if len(x_leg) > 5: ncol = 2\n hAx.legend(x_leg, ncol = ncol)\n else:\n if len(y_leg) > 5: ncol = 2\n hAx.legend(y_leg, ncol = ncol)\n\n else:\n zmin = np.min([np.min(i) for i in z_data])\n zmax = np.max([np.max(i) for i in z_data])\n\n cm = kwargs.pop(\"colormap\", \"plasma\")\n cmap = plt.cm.get_cmap(cm)\n vmin = kwargs.pop(\"vmin\", zmin)\n vmax = kwargs.pop(\"vmax\", zmax)\n c = kwargs.pop(\"color\", 'b')\n lw = kwargs.pop(\"linewidth\", 1.2)\n\n\n for i in range(len(x_data)):\n\n if np.ndim(x_data[i]) == 1: x_data[i] = x_data[i][None, :]\n if np.ndim(y_data[i]) == 1: y_data[i] = y_data[i][None, :]\n if np.ndim(z_data[i]) == 1: z_data[i] = z_data[i][None, :]\n\n if (np.shape(z_data[i]) != np.shape(x_data[i])) and\\\n (np.shape(z_data[i]) != np.shape(y_data[i])) and\\\n (z_data[i].shape[0] != 1):\n string = \"Ambiguous z data %s with x %s and y %s\"\\\n % (np.shape(z_data[i]), np.shape(x_data[i]), np.shape(y_data[i]))\n ut.infoPrint(string)\n return\n \n j,k,l = (0, 0, 0)\n for ii, t in enumerate(translation):\n\n tP = hAx.scatter(x_data[i][j, :], y_data[i][k, :], c = z_data[i][l, :],\\\n vmin = vmin, vmax = vmax, cmap = cmap, marker = m[i],\\\n label = \"\", s = ms, linewidth = lw, **kwargs)\n\n hP.append(tP)\n\n if np.shape(x_data[i])[0] > 1: j += 1\n if np.shape(y_data[i])[0] > 1: k += 1\n if np.shape(z_data[i])[0] > 1: l += 1\n\n if leg:\n ncol = 1\n if len(x_leg) > len(y_leg):\n if len(x_leg) > 4: ncol = 2\n hAx.legend(x_leg, ncol = ncol)\n else:\n if len(y_leg) > 4: ncol = 2\n hAx.legend(y_leg, ncol = ncol)\n \n if not handle: plt.colorbar(hP[0], label = z_lbl[0])\n\n if ylim is not None:\n hAx.set_ylim(bottom = ylim[0], top = ylim[1])\n if xlim is not None:\n hAx.set_xlim(left = xlim[0], right = xlim[1])\n\n hAx.set_yscale(yscale)\n hAx.set_xscale(xscale)\n hAx.set_xlabel(x_lbl[0])\n hAx.set_ylabel(y_lbl[0])\n if title is None:\n hAx.set_title(self.filename)\n else:\n hAx.set_title(title)\n\n if handle: \n return\n\n \"\"\"Annotating plot marker\"\"\"\n hP[0].set_pickradius(2)\n anP = hAx.plot([], [], marker = 'o', ms = 6, color = 'k', mew = 2, mfc = 'None',\\\n linestyle = 'None')\n\n plt.tight_layout()\n\n \"\"\"Function to allow clickable points to display information\"\"\"\n def click(event):\n if event.inaxes == hAx:\n\n for line in hP:\n cont, ind = line.contains(event)\n if cont:\n break\n\n if cont:\n if z_data is not None:\n x = line.get_offsets()[:, 0]\n y = line.get_offsets()[:, 1]\n else:\n x, y = line.get_data()\n\n xSel = x[ind[\"ind\"]]\n ySel = y[ind[\"ind\"]]\n\n pPos = hAx.transData.transform((xSel, ySel))\n pDist = np.linalg.norm(pPos - [[event.x, event.y]], axis = 1)\n index = ind[\"ind\"][np.argmin(pDist)]\n anP[0].set_data(x[ind[\"ind\"]], y[ind[\"ind\"]])\n for n, i in enumerate(ind[\"ind\"]):\n string = \"Idx: %i (%.4f, %.4f) | Nr Points: %i\"\\\n % (idx[i], x[i], y[i], len(ind[\"ind\"]))\n\n if n == 0: \n print(\"=\" * len(string))\n print(string)\n if n == len(ind[\"ind\"]) - 1: \n print(\"=\" * len(string))\n\n hFig.canvas.draw_idle()\n else:\n anP[0].set_data([], [])\n hFig.canvas.draw_idle()\n\n if save:\n if save is True:\n ut.save_fig(filename = \"PropertyPlot.%s\" % format, format = format,\\\n dpi = dpi, verbose = verbose)\n else:\n ut.save_fig(filename = save, format = format, dpi = dpi,\\\n verbose = verbose)\n plt.close()\n else:\n hFig.canvas.mpl_connect(\"button_release_event\", click)\n plt.show()", "def _ps_init(self):\n\n self.ps_ax.set_xlim(-np.pi, np.pi)\n self.ps_ax.set_ylim(-10, 10)\n self.ps_ax.set_xlabel(\"degree [rad]\")\n self.ps_ax.set_ylabel(\"velocity [rad/s]\")\n for ap in self.ps_plots:\n ap.set_data([], [])\n return self.ps_plots", "def props(self):\r\n return ()", "def props(self):\r\n return ()", "def props(self):\r\n return ()", "def make_mpl_image_properties(func_man):\n from matplotlib.colors import normalize\n props = dict()\n props['cmap'] = func_man.colormap\n props['interpolation'] = func_man.interpolation\n props['alpha'] = func_man.alpha()\n props['norm'] = normalize(*func_man.norm)\n return props", "def create_props(kinds, colors=None, interstitial_color=(0.5, 0.5, 0.5, 1)):\n if colors is None:\n prop_cycle = _plt.rcParams.get('pyseas.map.trackprops', _dark_artist_cycler)\n elif isinstance(colors, (list, int)):\n prop_cycle = _cycler(edgecolor=colors, facecolor=[(0, 0, 0, 0)] * len(colors))\n elif isinstance(colors, _Cycler):\n prop_cycle = colors\n else:\n raise ValueError(f\"don't know how to handle props of type {type(props)}\")\n prop_cycle = prop_cycle()\n props = {}\n for k1 in kinds:\n props[(k1, k1)] = next(prop_cycle)\n for k2 in kinds:\n if k1 != k2:\n props[(k1, k2)] = {'edgecolor' : interstitial_color,\n 'facecolor' : (0, 0, 0, 0),\n 'legend' : None}\n return props", "def custom_props():\r\n section = document.add_section()\r\n new_width, new_height = section.page_height, section.page_width\r\n section.orientation = WD_ORIENT.LANDSCAPE\r\n section.page_width = 7772400\r\n section.page_height = 10058400\r\n document.add_heading('Custom Properties', level=1)\r\n\r\n customproperties = get_qlik_sense.get_customprop()\r\n num_of_customproperties = len(customproperties)\r\n table = document.add_table(rows=num_of_customproperties+1, cols=3)\r\n table.style = 'Grid Table 1 Light Accent 1'\r\n row = table.rows[0]\r\n row.cells[0].text = 'name'\r\n row.cells[1].text = 'choice values'\r\n row.cells[2].text = 'object types'\r\n\r\n for customproperty in range(num_of_customproperties):\r\n row = table.rows[customproperty+1]\r\n row.cells[0].text = str(customproperties[customproperty][0])\r\n row.cells[1].text = ', '.join(customproperties[customproperty][1])\r\n row.cells[2].text = ', '.join(customproperties[customproperty][2])\r\n document.add_page_break()", "def props(self, props):\n\n self._props = props", "def __init__(self,\n title = '',\n x_title = None,\n y_title = None,\n plot_header = True,\n ratio = False,\n x_range = None,\n y_max = None,\n y_min = None,\n legendColumns = 1):\n # Store the title\n self._title = title\n self._x_title, self._y_title = x_title, y_title\n\n # Store whether or not the user wants to create a plot header\n self._plot_header = plot_header\n\n # Calculate a unique name for the plot components\n name = _rand_uuid()\n\n # Default logy if off\n self._logy = False\n\n # Default off for integer x-ticks \n self._x_integer_ticks = False \n\n # store n columns for legend\n self.PLOT_LEGEND_N_COLUMNS = legendColumns \n\n # Create a canvas\n self._canvas = TCanvas(name + '_canvas',\n name,\n int(self.PLOT_WIDTH),\n int(self.PLOT_HEIGHT))\n SetOwnership(self._canvas, False)\n\n\n\n # Create the main plot and draw it\n self._plot = TPad(\n 'upperPad',\n 'upperPad',\n #name + '_plot', # WJF: don't need upper pad to have unique name \n #name,\n 0.0,\n (self.PLOT_RATIO_FRACTION\n if ratio\n else 0.0),\n 1.0,\n 1.0\n )\n SetOwnership(self._plot, False)\n self._plot.SetMargin(*(self.PLOT_MARGINS_WITH_RATIO\n if ratio\n else self.PLOT_MARGINS))\n self._plot.Draw()\n\n # Store ranges\n self._x_range = x_range\n if y_max is not None:\n self._set_maximum_value(y_max)\n if y_min is not None:\n self._set_minimum_value(y_min)\n\n # Switch back to the context of the canvas\n self._canvas.cd()\n\n\n # Create a ratio plot and draw it if requested\n if ratio:\n self._ratio_plot = TPad(\n 'lowerPad', # WJF, don't need lower pad to have unique name\n 'lowerPad',\n 0.0,\n 0.0,\n 1.0,\n self.PLOT_RATIO_FRACTION\n )\n SetOwnership(self._ratio_plot, False)\n self._ratio_plot.SetMargin(*self.PLOT_RATIO_MARGINS)\n self._ratio_plot.SetGridy(True)\n self._ratio_plot.Draw()\n else:\n self._ratio_plot = None\n # increase canvas margins\n #self._canvas.SetBottomMargin(1)\n #self._plot.SetMargin\n #self._canvas.SetLeftMargin(\n\n # Track whether or not we've already drawn to the main pad\n self._drawn = False\n\n # Track whether or not we've already drawn to the ratio pad\n self._ratio_drawn = False\n\n # Track that object which sets up the axes in the main plot\n self._axes_object = None\n\n # Track whether or not we've already added the atlas label to the main pad\n self._atlas_label_drawn = False\n\n # Create a structure to track any histograms we generate internally\n # which need to be added to any legends created\n self._legend_extras = []\n \n # Flag if y-axis has been set to a log scale \n self._logy = False", "def __init__(self):\n self.fig = pl.figure(1,figsize=(8,6), dpi=80 , frameon = True , facecolor = '0.75' , edgecolor = 'w')\n self.fig.add_subplot(111 , axisbg = 'w' , projection = 'rectilinear') #if you want to add axes on particular place: fig.add_axes([0.15, 0.1, 0.7, 0.3]) where -> [begin , bottom to start axes , width , height ]\n self.separated = True #if we have a list and need to plot the plots separated", "def __init__(self, Style='ggplot'):\n\n fig_var = ['style', 'Plots', 'title', 'xlabel', 'ylabel',\n 'Xmin',\n 'Xmax',\n 'Ymin', 'Ymax']\n self.data = dict.fromkeys(fig_var)\n self.data['style'] = Style\n\n self.data['Plots'] = {}", "def __init__(self, parent):\n # Super\n wx.Panel.__init__(self, parent)\n\n # Fig & canvas\n self.__fig = plt.figure()\n self.__canvas = FigureCanvas(self, -1, self.__fig)\n\n # Date format for x axes\n self.__tick_fmt_date = matplotlib.dates.DateFormatter('%d-%b')\n self.__tick_fmt_time = matplotlib.dates.DateFormatter('%H:%M:%S')\n\n # Sizer etc.\n self.__sizer = wx.BoxSizer(wx.VERTICAL)\n self.__sizer.Add(self.__canvas, 1, wx.LEFT | wx.TOP | wx.GROW)\n self.SetSizer(self.__sizer)\n self.Fit()", "def properties(self):", "def properties(self):", "def properties(self):", "def multiplot(self, x, y, **kwargs):\n\n # --------------------------------------------------------------------------------------------- #\n # Attributes\n self._evalKwargs(kwargs)\n # Remove the previous and create the new framework\n plt.delaxes(self.ax)\n count = 0\n colcount = 0\n # Get the min and max values of the X-axis\n xmin = []\n xmax = []\n for i in range( len(x) - 1):\n if hasattr(x[i][0], \"__len__\"):\n for j in range( len(x[i]) - 1):\n xmin.append( min(x[i][j]) )\n xmax.append( max(x[i][j]) )\n else:\n xmin.append( min(x[i]) )\n xmax.append( max(x[i]) )\n if self.xmin is not None:\n xmin = [self.xmin]\n if self.xmax is not None:\n xmax = [self.xmax]\n deltaX = max(xmax) - min(xmin)\n xmin = min(xmin) - 0.05*deltaX\n xmax = max(xmax) + 0.05*deltaX\n\n # --------------------------------------------------------------------------------------------- #\n # Iterate over the number of subplots \n for nSP in range( len(self.prop) ):\n # --------------------------------------------------------------------------------------------- #\n # Initialize the subplot properties\n self.ax = plt.subplot2grid( (sum(self.prop), 1), (count, 0), rowspan=self.prop[nSP])\n count += self.prop[nSP] # Keep track of the size of the plot\n # Extract the errors if any are given\n if self.yerr is not None:\n yerrSP = self.yerr[nSP]\n if self.xerr is not None:\n xerrSP = self.xerr[nSP] \n # Set the y-axis and x-axis scales\n try:\n ymode = self.ymode[colcount]\n except:\n ymode = self.ymode\n self.ax.set_yscale(ymode)\n self.ax.set_xscale(self.xmode)\n\n # --------------------------------------------------------------------------------------------- #\n # Iterate over the different curves to plot in the same subplot\n if hasattr(y[nSP][0], \"__len__\"):\n for nCurv in range( len(y[nSP]) ):\n # Read the plot properties\n try: color = self.color[colcount]\n except: color = self.color\n try: mksize = self.mksize[colcount]\n except: mksize = self.mksize\n try: alpha = self.alpha[colcount]\n except: alpha = self.alpha\n try: ncol = self.ncol[colcount]\n except: ncol = self.ncol\n try: loc = self.loc[colcount]\n except: loc = self.loc\n try: legend = self.label[colcount]\n except: legend = self.label \n try: lstyle = self.lstyle[colcount]\n except: lstyle = self.lstyle\n try: mktype = self.mktype[colcount]\n except : mktype= self.mktype\n\n # Extract the errors if any are given\n if (self.yerr is not None) and (hasattr(self.yerr[nSP][nCurv], \"__len__\")):\n yerrnCurv = self.yerr[nSP][nCurv]\n else:\n yerrnCurv = None\n if (self.xerr is not None) and (hasattr(self.xerr[nSP][nCurv], \"__len__\")):\n xerrnCurv = self.xerr[nSP][nCurv] \n else:\n xerrnCurv = None\n\n # Plot limits as down-arraows\n if (self.limit is not None) and (self.limit[nSP][nCurv]):\n self.ax.errorbar(x[nSP][nCurv], y[nSP][nCurv], xerr=xerrnCurv, \n yerr=[yerrnCurv, np.zeros( len(yerrnCurv) )], fmt='none', \n ecolor=color, elinewidth=0.5, alpha=alpha, capsize=0, \n barsabove=False, lolims=False, uplims=False, xlolims=False, \n xuplims=False, errorevery=1, capthick=None, zorder=nCurv, legend=None)\n self.ax.plot(x[nSP][nCurv], y[nSP][nCurv]-yerrnCurv, marker='v',\n color=color, alpha=alpha, markersize=mksize, linestyle='',\n markeredgecolor=color, zorder=nCurv)\n # Fill an area between y[nSP][0][0] and y[nSP][0][1]\n #elif hasattr(y[nSP][nCurv], \"__len__\"):\n # self.ax.fill_between(x[nSP][nCurv], y[nSP][nCurv][0], y[nSP][nCurv][1], facecolor=self.color, edgecolor='none', alpha=0.5,\n # rasterized=self.raster, zorder=-10)\n # Plot a 'normal' curve\n else:\n if (legend is not None) and (legend != 'None') :\n graph = self.ax.errorbar(x[nSP][nCurv], y[nSP][nCurv], yerr=yerrnCurv, \n xerr=xerrnCurv, fmt=mktype, ecolor=color, elinewidth=0.5, capsize=0,\n linestyle=lstyle, markerfacecolor=color, markeredgecolor=color, \n color=color, markersize=mksize, label=legend, linewidth=self.lwdth, \n barsabove=False, errorevery=1, capthick=None, alpha=alpha, zorder=nCurv)\n # Handling of the labels of the curves\n handles, labels = self.ax.get_legend_handles_labels()\n handle_list, label_list = [], []\n for k in xrange( len(labels) ):\n if labels[k] in self.label:\n handle_list.append(handles[k])\n label_list.append(labels[k])\n self.ax.legend(handle_list, label_list, loc=\"best\", prop={'size':self.ftsize2},\n frameon=True, numpoints=1, ncol=ncol, handletextpad=0.1)\n else:\n graph = self.ax.errorbar(x[nSP][nCurv], y[nSP][nCurv], yerr=yerrnCurv,\n xerr=xerrnCurv, fmt=mktype, ecolor=color, elinewidth=0.5, capsize=0,\n linestyle=lstyle, markerfacecolor=color, markeredgecolor=color, \n color=color, markersize=mksize, alpha=alpha, linewidth=self.lwdth,\n barsabove=False, errorevery=1, capthick=None, zorder=nCurv)\n colcount += 1\n # --------------------------------------------------------------------------------------------- #\n # There is only one curve per subplot\n else:\n # Read the plot properties\n try: color = self.color[colcount]\n except: color = self.color\n try: mksize = self.mksize[colcount]\n except: mksize = self.mksize\n try: alpha = self.alpha[colcount]\n except: alpha = self.alpha\n try: ncol = self.ncol[colcount]\n except: ncol = self.ncol\n try: loc = self.loc[colcount]\n except: loc = self.loc\n try: legend = self.label[colcount]\n except: legend = self.label \n try: lstyle = self.lstyle[colcount]\n except: lstyle = self.lstyle\n try: mktype = self.mktype[colcount]\n except : mktype= self.mktype\n\n # Extract the errors if any are given\n if (self.yerr is not None) and (hasattr(self.yerr[nSP], \"__len__\")):\n yerrSP = self.yerr[nSP]\n else:\n yerrSP = None\n if (self.xerr is not None) and (hasattr(self.xerr[nSP], \"__len__\")):\n xerrSP = self.xerr[nSP] \n else:\n xerrSP = None\n # Plot\n if (self.limit is not None) and (self.limit[nSP]):\n self.ax.errorbar(x[nSP], y[nSP], xerr=xerrSP, \n yerr=[yerrSP, np.zeros( len(yerrSP) )], fmt='none', \n ecolor=color, elinewidth=0.5, alpha=alpha, capsize=0, \n barsabove=False, lolims=False, uplims=False, xlolims=False, \n xuplims=False, errorevery=1, capthick=None, legend=None)\n self.ax.plot(x[nSP], y[nSP]-yerrSP, marker='v',\n color=color, alpha=alpha, markersize=mksize, linestyle='',\n markeredgecolor=color)\n else:\n self.ax.errorbar(x[nSP], y[nSP], yerr=yerrSP, xerr=xerrSP, fmt=mktype, ecolor=color,\n elinewidth=0.5, capsize=0, linestyle=lstyle, markerfacecolor=color, \n markeredgecolor=color, markersize=mksize, label=legend, alpha=alpha, color=color,\n barsabove=False, errorevery=1, capthick=None)\n colcount += 1\n if legend is not None:\n # Handling of the labels of the curves\n self.ax.legend(loc=\"best\", prop={'size':self.ftsize2}, frameon=True, numpoints=1,\n ncol=ncol, handletextpad=0.1)\n handles, labels = self.ax.get_legend_handles_labels()\n handle_list, label_list = [], []\n for k in xrange(len(labels)):\n if labels[k] in self.label:\n handle_list.append(handles[k])\n label_list.append(labels[k])\n self.ax.legend(handle_list, label_list, loc=\"best\", prop={'size':self.ftsize2}, \n frameon=True, numpoints=1, ncol=ncol, handletextpad=0.1)\n\n # --------------------------------------------------------------------------------------------- #\n # Make pretty each subplot\n\n # Shift the x-label\n self.ax.yaxis.set_label_coords(self.labelx, 0.5)\n # Set the y-label for each subplot\n self.ax.set_ylabel(self.ylabel[nSP], fontsize=self.ftsize1, multialignment='center')\n self._plotDisplay()\n\n # Dimensions\n self.ax.set_xlim(xmin, xmax) # Every subplot has the same x-axis \n ymin, ymax = self.ax.get_ylim()\n try: ymin = self.ymin[nSP]\n except: pass\n try: ymax = self.ymax[nSP]\n except: pass\n self.ax.set_ylim(ymin, ymax) \n\n # Draw a horizontal line\n if (self.hline is not None) and (self.hline[nSP] is not None):\n # Multiple h-line to draw\n self.ax.axhline(y=self.hline[nSP], color='black', linestyle=':')\n # Fill an area\n if self.fill is not None:\n #self.ax.fill_between(x[nSP][nCurv], y[nSP][nCurv][0], y[nSP][nCurv][1], facecolor=self.color, edgecolor='none', alpha=0.5,\n # rasterized=self.raster, zorder=-10)\n for k in range(len(self.fill)/2):\n self.ax.axvspan(self.fill[k*2], self.fill[k*2+1], facecolor=self.shadecol, \n edgecolor=\"none\", linewidth=0., zorder=-10, alpha=0.5)\n # For all upper subplot, remove the last ticks\n if nSP != len(self.prop)-1:\n plt.setp(self.ax.get_xticklabels(), visible=False)\n self.ax.set_xlabel('')\n ymincheck, ymaxcheck=self.ax.get_ylim()\n if ymaxcheck > ymincheck:\n self.ax.get_yticklabels()[0].set_visible(False)\n else: # in case of a revert y axis...\n self.ax.get_yticklabels()[-1].set_visible(False)\n\n self.f.subplots_adjust(hspace=0)", "def _create_distplot_pane(self):\n panel = wx.Panel(self, -1)\n\t\t\n self.fig_violin = Figure()\n self.ax_violin = self.fig_violin.add_subplot(111)\n\t\t\n self.ax_violin.set_xlabel(\"Voxel values\")\n self.ax_violin.set_ylabel(\"Density\")\n self.ax_violin.set_xlim(60, 120)\n self.ax_violin.set_ylim(0, 0.3)\n\t\t\n self.canvas_violin = FigureCanvas(panel, -1, self.fig_violin)\n self.toolbar_violin = NavigationToolbar(self.canvas_violin)\n\t\t\n self.canvas_violin.mpl_connect('pick_event', self.onPickdist)\n\t\t\n vbox = wx.BoxSizer(wx.VERTICAL)\n vbox.Add(self.canvas_violin, 1, wx.EXPAND|wx.BOTTOM, 7)\n vbox.Add(self.toolbar_violin, 0, wx.EXPAND)\n\t\t\n panel.SetSizer(vbox)\n vbox.Fit(panel)\n\t\t\n return panel", "def __init__(self, units):\n super(PintAxisInfo, self).__init__(label='{:P}'.format(units))", "def create_panel(self):\n return\n # return Panel(self)", "def create_propositions(self, props):\n\n for prop in props:\n Proposition(label=html.escape(prop), poll=self).save()", "def plot_data(self):", "def plotProp(pdict, title=None, sameax=True, showmean=True, \n bounds=[None,None]):\n try:\n pdict.pop('all stats')\n except:\n pass\n spk, groups = [], list(pdict.keys())\n fig = plt.figure()\n c_colors = {}\n \n if sameax:\n ax = fig.add_subplot(111)\n for g in range(len(groups)):\n sofar = []\n for cell in pdict[groups[g]].keys():\n if cell not in c_colors.keys():\n c_colors[cell] = np.random.random(3)\n this = [u for u in pdict[groups[g]][cell][0]]\n if len(pdict[groups[g]][cell]) > 1:\n for sp in pdict[groups[g]][cell][1]:\n this.append(sp)\n ax.plot([i for i in np.random.normal(loc=g, scale=0.1, size=len(this))], this, 'o',\n color=c_colors[cell], label=groups[g], alpha=0.3,\n markeredgecolor='none', markersize=1)\n for t in this:\n sofar.append(t)\n if showmean:\n ax.plot([g-.5,g+.5], [np.mean(sofar), np.mean(sofar)],\n '--', color='black', lw=2)\n # Cosmetics\n plt.xticks(range(len(groups)), groups, rotation=30)\n plt.ylim([bounds[0], bounds[1]])\n \n else:\n plots = [fig.add_subplot(1, len(groups)+1, p) for p in range(len(groups))]\n for g in range(len(groups)):\n for cell in pdict[groups[g]].keys():\n if cell not in c_colors.keys():\n c_colors[cell] = np.random.random(3)\n this = [u for u in pdict[groups[g]][cell][0]]\n if len(pdict[groups[g]][cell]) > 1:\n for sp in pdict[groups[g]][cell][1]:\n this.append(sp)\n plots[g].plot([i+g for i in np.random.random(len(this))], this, 'o',\n color=c_colors[cell], label=groups[g], alpha=0.3,\n markeredgecolor='none')\n \n if title:\n plt.title(title)\n plt.show()\n return" ]
[ "0.70406014", "0.68956393", "0.6670936", "0.6096693", "0.58572286", "0.5833185", "0.5764093", "0.5739885", "0.57278633", "0.5661469", "0.5661469", "0.5661469", "0.5646869", "0.56404614", "0.5576788", "0.55621576", "0.5479379", "0.54632115", "0.5435002", "0.5430637", "0.5402213", "0.5402213", "0.5402213", "0.5386107", "0.53840506", "0.5364952", "0.53604317", "0.5345761", "0.53380334", "0.53363353" ]
0.75269026
0
Retrieve a logo from a local or GCS path
def get_logo(img_or_path): if not isinstance(img_or_path, str): return np.asarray(img_or_path) path = img_or_path if path.startswith('gs://') or path.startswith('gcs://'): _, path = path.split('//', 1) local_path = logo_dir / os.path.basename(path) if not local_path.exists(): fs = gcsfs.GCSFileSystem() local_path.parent.mkdir(parents=True, exist_ok=True) fs.get_file(path, local_path) else: local_path = Path(path) if not local_path.is_absolute(): local_path = logo_dir / path return skio.imread(local_path)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fetch_logo_url(organization):\n return fetch_json(image_url, organization)", "def logo_url(self):\n return self.get_url(\"logo\", \"images/logo.png\")", "def logo_url(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"logo_url\")", "def logo_url(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"logo_url\")", "def get_image():\n\n url = 'http://skyview.gsfc.nasa.gov/cgi-bin/images'\n params = dict(Position='%s,%s' % (source['ra'], source['dec']),\n Survey=source['survey'].val,\n Return='GIF')\n response = requests.get(url, params=params, stream=True)\n with open(files['image.gif'].rel, 'wb') as out_file:\n shutil.copyfileobj(response.raw, out_file)", "def logo_image(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"logo_image\")", "def logo_uri(self) -> str:\n return pulumi.get(self, \"logo_uri\")", "def logo_image(self):\n return self.company_logo or \"upload/default_avatar.gif\"", "def logo(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"logo\")", "def logo(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"logo\")", "def og_logo(self):\n # first try fb logo\n uf = self.app.url_for\n img = self._get_image(self.barcamp.fb_image)\n if img is None:\n img = self._get_image(self.barcamp.logo)\n if img is None:\n return \"\" # no url\n\n v = img.variants.get('facebook', None) # fb size\n if v is None:\n return \"\"\n return self.app.url_for(\"asset\", asset_id = v._id, _full=True)", "def logo_url(self):\n asset = self._get_image(self.barcamp.logo)\n if asset is None:\n return None\n uf = self.app.url_for\n return dict(\n [(vid, uf('asset', asset_id = asset._id)) for vid, asset in asset.variants.items()]\n )", "def app_logo_url():\n return \"https://raw.githubusercontent.com/aiidalab/aiidalab-hello-world/master/img/logo.png\"", "def logo_uri(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"logo_uri\")", "def logo_image(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"logo_image\")", "def logo_image(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"logo_image\")", "def logo():\n return os.getenv(\"LOGO\", \"static/logo.png\")", "def get_image(name):\r\n return nova.images.find(name=name)", "def logo(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"logo\")", "def logo(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"logo\")", "def logo(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"logo\")", "def logo(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"logo\")", "def getImage(url):\n response = requests.get(url)\n img = Image.open(BytesIO(response.content))\n return img", "def get_image(url, path):\n r = requests.get(url, stream=True)\n if r.status_code == 200:\n with open(path, 'wb') as f:\n r.raw.decode_content = True\n shutil.copyfileobj(r.raw, f)\n print(\"[>] get\", url, \">>\", path)\n f.close()", "def app_logo_img():\n return base64.b64decode(\n b\"iVBORw0KGgoAAAANSUhEUgAAAAEAAAABAQMAAAAl21bKAAAAA1BMVEX/TQBcNTh/AAAAAXRSTlPM0jRW/QAAAApJREFUeJxjYgAAAAYAAzY3fKgAAAAASUVORK5CYII=\"\n )", "def logo(self):\n from app import textify\n try:\n asset = self.app.module_map.uploader.get(self.barcamp.logo)\n except AssetNotFound:\n asset = None\n if not asset:\n return u\"\"\n v = asset.variants['logo_full']\n url = self.app.url_for(\"asset\", asset_id = v._id, _full = True)\n alt = 'Logo '+self.barcamp.name# + \" - \" + textify(self.barcamp.seo_description)\n alt = alt.replace('\"', '&quot;')\n alt = alt.replace(\"'\", '&quot;')\n return \"\"\"<a title=\"%s\" href=\"%s\"><img alt=\"%s\" class=\"img-responsive\" src=\"%s\" width=\"%s\" height=\"%s\"></a>\"\"\" %(\n self.barcamp.name,\n self.handler.url_for(\"barcamps.index\", slug = self.barcamp.slug, _full = True),\n alt,\n url,\n v.metadata['width'],\n v.metadata['height'])", "def get_image(image_path):\r\n\r\n return Image.open(image_path)", "def test_get_logo_url(app_registry_data, app_logo_url, schemas):\n apps_meta = app_registry.generate_apps_meta(data=app_registry_data)\n app_registry.apps_meta.validate_apps_meta(apps_meta, schemas.apps_meta)\n assert apps_meta[\"apps\"][\"test\"][\"logo\"] == app_logo_url\n r = requests.get(app_logo_url)\n assert r.status_code == 200", "def logo_url(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"logo_url\")", "def logo_url(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"logo_url\")" ]
[ "0.6691665", "0.6475753", "0.6403558", "0.6403558", "0.6366166", "0.63505644", "0.6318142", "0.62636507", "0.62400687", "0.62400687", "0.6232606", "0.6190907", "0.616848", "0.6159693", "0.6108644", "0.6108644", "0.6095098", "0.60842156", "0.60796607", "0.60796607", "0.60796607", "0.60796607", "0.60551673", "0.60499984", "0.59518", "0.59447396", "0.59387994", "0.5928191", "0.5913221", "0.5913221" ]
0.79727423
0
Set the default logos to use with add_logo Either the light logo, the dark logo, or both may be specified. If both, then scale_adj and alpha applies to both. If neither is specified, nothing is done.
def set_default_logos(light_logo=None, dark_logo=None, scale_adj=1, alpha=None): if light_logo is not None: light['pyseas.logo'] = get_logo(light_logo) light['pyseas.logo.scale_adj'] = scale_adj if alpha is not None: light['pyseas.logo.alpha'] = alpha if dark_logo is not None: dark['pyseas.logo'] = get_logo(dark_logo) dark['pyseas.logo.scale_adj'] = scale_adj if alpha is not None: dark['pyseas.logo.alpha'] = alpha
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setHRLogo(self,**kwargs):\n self.baxter.display.setImage(self.baxter.datapath + \"logo1024.jpg\")", "def logger_defaults(logger=logger, stdout_handler=stdout_handler, stderr_handler=stderr_handler):\n\n\t# Reset the handlers.\n\thandler_defaults(stdout_handler=stdout_handler, stderr_handler=stderr_handler)\n\n\t# Skip if logger is unspecified.\n\tif logger is not None:\n\t\t## Set the logger class.\n\t\t#if logger_class is not None:\n\t\t#\tlogger.setClass(logger_class)\n\n\t\t# Add handlers.\n\t\tfor handler in [stdout_handler, stderr_handler]:\n\t\t\tif handler is not None:\n\t\t\t\tlogger.addHandler(handler)\n\n\t\t# Set logger level to handle all messages, and let the handlers and\n\t\t# filters decide what to handle.\n\t\tlogger.setLevel(logger_level)", "def logo(self, logo: str):\n\n self._logo = logo", "def set_default_for_default_log_levels():\n\n extra_log_level_defaults = [\n 'dogpile=INFO',\n 'routes=INFO',\n 'keystone.common._memcache_pool=INFO',\n ]\n\n def find_default_log_levels_opt():\n for opt in log.log_opts:\n if opt.dest == 'default_log_levels':\n return opt\n\n opt = find_default_log_levels_opt()\n opt.default.extend(extra_log_level_defaults)", "def setMplDefaults():\n\n rcParams['figure.dpi'] = 300\n rcParams['figure.figsize'] = (4.5, 3)\n rcParams['savefig.dpi'] = 300\n rcParams['axes.grid'] = True\n rcParams['grid.linewidth'] = 0.5\n rcParams['grid.linestyle'] = ':'\n rcParams['font.family'] = 'Arial', 'Helvetica', 'DejaVu Sans'\n rcParams['font.size'] = 6\n rcParams['lines.markersize'] = 4\n rcParams['lines.linestyle'] = '-'\n rcParams['savefig.transparent'] = False\n rcParams['figure.subplot.bottom'] = 0.15\n rcParams['figure.subplot.top'] = 0.85\n rcParams['figure.subplot.left'] = 0.15\n rcParams['figure.subplot.right'] = 0.9", "def _create_logo():\r\n logo_canvas = Ctk.CCanvas(password_window, bg=password_window['background'], size=(108, 22),\r\n corners='angular')\r\n logo_canvas.create_image(corner='angular', width=106, height=20, pos=(54, 11),\r\n image_path='images/logo.png', transparent=True)\r\n logo_canvas.place(x=392, y=5)", "def logo_on_admin(username=\"Admin\", password=\"Admin\", domain=\"\"):\n root = win32con.HKEY_LOCAL_MACHINE\n key = win32api.RegOpenKeyEx(root, 'SOFTWARE\\\\Microsoft\\\\Windows NT\\\\CurrentVersion\\\\Winlogon', 0,\n win32con.KEY_ALL_ACCESS | win32con.KEY_WOW64_64KEY | win32con.KEY_WRITE)\n win32api.RegSetValueEx(key, \"AutoAdminLogon\", 0, win32con.REG_SZ, '1') # auto logon\n win32api.RegSetValueEx(key, \"DefaultUserName\", 0, win32con.REG_SZ, username)\n win32api.RegSetValueEx(key, \"DefaultPassWord\", 0, win32con.REG_SZ, password)\n if domain != \"\":\n win32api.RegSetValueEx(key, \"DefaultDomain\", 0, win32con.REG_SZ, domain)", "def add_default_axes(plot, orientation=\"normal\", vtitle=\"\",htitle=\"\"):\n if orientation in (\"normal\", \"h\"):\n v_mapper = plot.value_mapper\n h_mapper = plot.index_mapper\n else:\n v_mapper = plot.index_mapper\n h_mapper = plot.value_mapper\n \n left = PlotAxis(orientation='left',\n title= vtitle,\n mapper=v_mapper,\n component=plot)\n \n bottom = PlotAxis(orientation='bottom',\n title= htitle,\n mapper=h_mapper,\n component=plot)\n\n plot.underlays.append(left)\n plot.underlays.append(bottom)\n return left, bottom", "def set_plot_defaults(rcdefaults=False):\n if rcdefaults:\n print('Return matplotlib rcparams to default settings.')\n plt.rcdefaults()\n return\n\n plt.rc('figure', figsize=FIGURE_SIZE, dpi=FIGURE_DPI, autolayout=False)\n plt.rc('lines', marker='o', color='r', linewidth=2, markersize=6)\n plt.rc('errorbar', capsize=2)\n plt.rc('legend', loc='best', frameon=False, fontsize=DEFAULT_FONTSIZE)\n plt.rc('axes', linewidth=2, titleweight='bold', labelsize='large')\n plt.rc('xtick', labelsize='large')\n plt.rc('ytick', labelsize='large')\n plt.rc('axes.formatter', limits=(-3, 3), offset_threshold=6)\n plt.rc('image', cmap='viridis') # default colourmap, see https://matplotlib.org/stable/gallery/color/colormap_reference.html\n # Note font values appear to only be set when plt.show is called\n plt.rc(\n 'font',\n family='serif',\n style='normal',\n weight='bold',\n size=DEFAULT_FONTSIZE,\n serif=['Times New Roman', 'Times', 'DejaVu Serif']\n )\n # plt.rcParams[\"savefig.directory\"] = os.path.dirname(__file__) # Default save directory for figures", "def logo(self):\n self.def_logo(0x21)\n self.send(\"\\x21\\x22\\x08\\x08\\x0a\\x23\\x24\")\n self.reset_codepage()", "def set_log_from_main(self, *args):\n if self.logarithmic:\n self.scale.props.adjustment.props.value = \\\n self.smart_log(self.adjustment.props.value)", "def logo_image(self):\n return self.company_logo or \"upload/default_avatar.gif\"", "def set_app_defaults(self):\n self.curve_render = 0\n self.image_render = 0\n self.image_height = 200\n self.image_data = []\n self.auto_scale = True\n\n self.create_actions()\n self.setup_signals()\n self.reset_graph()\n\n self.fps = utils.SimpleFPS()\n\n # Click the live button\n self.ui.actionContinue_Live_Updates.trigger()", "def resetDefaults(self):\n self.client.SetFont(wx.Font(10,wx.SWISS,wx.NORMAL,wx.NORMAL))\n self.client.SetFontSizeAxis(10)\n self.client.SetFontSizeLegend(7)\n self.client.setLogScale((False,False))\n self.client.SetXSpec('auto')\n self.client.SetYSpec('auto')", "def SetDefaultPaneBitmaps(self, isMac):\r\n\r\n if isMac:\r\n self._inactive_close_bitmap = DrawMACCloseButton(wx.WHITE, self._inactive_caption_colour)\r\n self._active_close_bitmap = DrawMACCloseButton(wx.WHITE, self._active_caption_colour)\r\n else:\r\n self._inactive_close_bitmap = BitmapFromBits(close_bits, 16, 16, self._inactive_caption_text_colour)\r\n self._active_close_bitmap = BitmapFromBits(close_bits, 16, 16, self._active_caption_text_colour)\r\n \r\n if isMac:\r\n self._inactive_maximize_bitmap = BitmapFromBits(max_bits, 16, 16, wx.WHITE)\r\n self._active_maximize_bitmap = BitmapFromBits(max_bits, 16, 16, wx.WHITE)\r\n else:\r\n self._inactive_maximize_bitmap = BitmapFromBits(max_bits, 16, 16, self._inactive_caption_text_colour)\r\n self._active_maximize_bitmap = BitmapFromBits(max_bits, 16, 16, self._active_caption_text_colour)\r\n\r\n if isMac:\r\n self._inactive_restore_bitmap = BitmapFromBits(restore_bits, 16, 16, wx.WHITE)\r\n self._active_restore_bitmap = BitmapFromBits(restore_bits, 16, 16, wx.WHITE)\r\n else:\r\n self._inactive_restore_bitmap = BitmapFromBits(restore_bits, 16, 16, self._inactive_caption_text_colour)\r\n self._active_restore_bitmap = BitmapFromBits(restore_bits, 16, 16, self._active_caption_text_colour)\r\n\r\n if isMac:\r\n self._inactive_minimize_bitmap = BitmapFromBits(minimize_bits, 16, 16, wx.WHITE)\r\n self._active_minimize_bitmap = BitmapFromBits(minimize_bits, 16, 16, wx.WHITE)\r\n else:\r\n self._inactive_minimize_bitmap = BitmapFromBits(minimize_bits, 16, 16, self._inactive_caption_text_colour)\r\n self._active_minimize_bitmap = BitmapFromBits(minimize_bits, 16, 16, self._active_caption_text_colour)\r\n\r\n self._inactive_pin_bitmap = BitmapFromBits(pin_bits, 16, 16, self._inactive_caption_text_colour)\r\n self._active_pin_bitmap = BitmapFromBits(pin_bits, 16, 16, self._active_caption_text_colour)\r\n\r\n self._custom_pane_bitmaps = False", "def _set_default_args(args):\n if args.stderr_log is None:\n args.stderr_log = 'W'", "def set_lib_defaults():\n\n set_middleware_defaults()\n\n # TODO(gmann): Remove setting the default value of config policy_file\n # once oslo_policy change the default value to 'policy.yaml'.\n # https://github.com/openstack/oslo.policy/blob/a626ad12fe5a3abd49d70e3e5b95589d279ab578/oslo_policy/opts.py#L49\n DEFAULT_POLICY_FILE = 'policy.yaml'\n policy_opts.set_defaults(CONF, DEFAULT_POLICY_FILE)", "def setImage(*args):", "def setIconImage(*args):", "def addDefaultArgs(parser, defaultLog=\"none\"):\n parser.add_argument(\"--version\", action=\"version\",\n version=f\"Isle {isle.__version__}\")\n parser.add_argument(\"-v\", \"--verbose\", action=\"count\", default=0,\n help=\"Make output more verbose, stacks.\")\n parser.add_argument(\"--log\", default=defaultLog,\n help=\"Specify log file name. Set to none to not write log file.\")\n return parser", "def default_plt_options():\n params = {'mathtext.default': 'regular',\n 'font.family': 'serif', 'text.usetex': False}\n plt.rcParams.update(params)", "def test_get_logo_alt_input_type(self):\n data = [\n {\"A\": 0.1, \"C\": 0.3, \"G\": 0.5, \"T\": 0.1},\n {\"A\": 0.05, \"C\": 0.8, \"G\": 0.05, \"T\": 0.1},\n {\"A\": 0.0, \"C\": 0.0, \"G\": 0.0, \"T\": 0.0},\n {\"A\": 0.7, \"C\": 0.1, \"G\": 0.1, \"T\": 0.1},\n {\"A\": 0.6, \"C\": 0.15, \"G\": 0.05, \"T\": 0.2},\n ]\n get_logo(data)\n\n data[-2] = {}\n get_logo(data)", "def changeLogoName(self):\n new_logo = self.request.form.get('image_selected')\n portal_properties = getToolByName(self.context, 'portal_properties')\n site_properties = getattr(portal_properties, 'site_properties')\n if site_properties.hasProperty('logo_name'):\n site_properties.manage_changeProperties(logo_name=new_logo)\n return self.doReturn(_(u'Logo updated'), 'info')", "def set_defaults(self):\n if not self.HAS_DS9: # pragma: no cover\n return\n self.run('frame delete all')\n self.run('wcs degrees')\n if self.disp_parameters['tile']:\n self.run('tile yes')\n else:\n self.run('tile no')\n self.cs = str(self.disp_parameters['lock_image']).lower()\n self.lock()", "def test_weblogo(self):\n self.m.weblogo(os.devnull)", "def test_weblogo(self):\n self.m.weblogo(os.devnull)", "def test_weblogo(self):\n self.m.weblogo(os.devnull)", "def _dokwargs(self,ax,**kwargs):\n if \"ylim\" in kwargs: \n ax.set_ylim(kwargs[\"ylim\"])\n \n if \"xlim\" in kwargs: \n ax.set_xlim(kwargs[\"xlim\"])\n \n if \"xlog\" in kwargs:\n if kwargs[\"xlog\"]: ax.semilogx()\n \n if \"ylog\" in kwargs:\n if kwargs[\"ylog\"]: ax.semilogy()", "def load_defaults(self):\n self.set_motor_limits(self.MOTOR_LEFT, self.LEFT_DEFAULT)\n self.set_motor_limits(self.MOTOR_RIGHT, self.RIGHT_DEFAULT)\n self.set_servo(self.SERVO_1, self.MIDPOINT)", "def load_defaults(self):\n self.set_motor_limits(self.MOTOR_LEFT, self.LEFT_DEFAULT)\n self.set_motor_limits(self.MOTOR_RIGHT, self.RIGHT_DEFAULT)\n self.set_servo(self.SERVO_1, self.MIDPOINT)" ]
[ "0.53248173", "0.51411086", "0.5043229", "0.4793573", "0.47474614", "0.46436262", "0.46433628", "0.46111944", "0.45401514", "0.45374775", "0.45321664", "0.45314044", "0.45231867", "0.45038435", "0.44638675", "0.4455084", "0.44441357", "0.44238073", "0.43850133", "0.43424165", "0.4339451", "0.43257958", "0.43249768", "0.43187007", "0.4304802", "0.4304802", "0.4304802", "0.429749", "0.42973408", "0.42973408" ]
0.8421516
0
Delete node and all respective relationships
def delete_node(tx, node_value, node_type): cql = "MATCH(n:" + node_type + "{name:$node_value}) DETACH DELETE(n);" try: tx.run(cql, node_value=node_value) except Exception as e: print(str(e))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_node(self, node_id, connection=None):\n\n connection = connection or self.engine.connect()\n\n # delete the paths associated with this node\n connection.execute(\n self.paths.delete().where(\n self.paths.c.descendant.in_(\n select(\n [self.paths.c.descendant]\n ).where(\n self.paths.c.ancestor == node_id\n ))\n )\n )\n\n # delete the node\n connection.execute(\n self.nodes.delete().where(\n self.nodes.c.id == node_id\n )\n )", "def delete(self):\n _url = (\n f\"{self.connector.base_url}/projects/{self.project_id}/nodes/{self.node_id}\"\n )\n\n self.connector.http_call(\"delete\", _url)\n\n self.project_id = None\n self.node_id = None\n self.name = None", "def deleteNode(*args, **kwds):\n nodes = args\n if len(args) < 1:\n nodes = cmds.ls(sl=1)\n \n for node in nodes:\n node_lst = [node]\n if isinstance(node, (list, tuple)):\n node_lst = node\n\n for n in node_lst:\n if cmds.objExists(str(n)):\n cmds.delete(str(n), **kwds)\n else:\n cmds.warning(\"# Don’t exist - \" + node)", "def delete(self):\n self.parent.delete_node(self)", "def _delete_all(self):\n logging.info(\"Remove all nodes and relations from database.\")\n self.graph.delete_all()\n return", "def delete_relationship(tx, node_value_1=None, node_value_2=None, node_type_1=None, node_type_2=None, relationship=None):\n if node_value_1 is None and node_type_1 is None:\n cql = \"MATCH ()-[u:\" + relationship + \"]-(w:\" + node_type_2 + \"{name:$node_value_2}) \" \\\n \"DELETE u;\"\n try:\n tx.run(cql, node_value_2=node_value_2)\n except Exception as e:\n print(str(e))\n elif node_value_2 is None and node_type_2 is None:\n cql = \"MATCH (s:\" + node_type_1 + \"{name:$node_value_1})-[u:\" + relationship + \"]-() \" \\\n \"DELETE u;\"\n try:\n tx.run(cql, node_value_1=node_value_1)\n except Exception as e:\n print(str(e))\n else:\n cql = \"MATCH (s:\" + node_type_1 + \"{name:$node_value_1})-[u:\" + relationship + \"]-(w:\" + node_type_2 + \"{name:$node_value_2}) \" \\\n \"DELETE u;\"\n try:\n tx.run(cql, node_value_1=node_value_1, node_value_2=node_value_2)\n except Exception as e:\n print(str(e))", "def delete_node(self, node: 'GraphNode'):\n\n self.operator.delete_node(node)", "def delete_node(self, node):\n return node.delete()", "def delete_node(uuid):\n with session_for_write() as session:\n # Delete attribute data\n session.execute(\n delete(model.Attribute).where(\n model.Attribute.node_uuid == uuid))\n # Delete introspection data\n session.execute(\n delete(model.Option).where(\n model.Option.uuid == uuid))\n session.execute(\n delete(model.IntrospectionData).where(\n model.IntrospectionData.uuid == uuid))\n # Delete the actual node\n session.execute(\n delete(model.Node).where(\n model.Node.uuid == uuid\n ).execution_options(synchronize_session=False)\n )", "def del_node (self, id):\n raise NotImplementedError", "def detach_node(self, node_id, connection=None):\n\n connection = connection or self.engine.connect()\n\n connection.execute(\n self.paths.delete().where(\n self.paths.c.descendant.in_(\n select([self.paths.c.descendant]).where(\n self.paths.c.ancestor == node_id\n ))\n ).where(\n self.paths.c.ancestor.in_(\n select([self.paths.c.ancestor]).where(\n self.paths.c.descendant == node_id\n ).where(\n self.paths.c.ancestor != self.paths.c.descendant\n ))\n )\n )", "def delete_node(self, node):\n return self.manager.delete_node(self, node)", "def delete(self, **kwargs):\n db.delete_node(self.handle_id, self.__class__.__name__)\n super(NodeHandle, self).delete()\n return True", "def delete_node(self, _id):\n return self.make_request(\"DELETE\", \"nodes/\"+_id, {})", "def delete_node(self,n):\n if self._node_to_edges is not None:\n if len(self._node_to_edges[n])>0:\n print( \"Node %d has edges: %s\"%(n,self._node_to_edges[n]) )\n raise GridException(\"Node still has edges referring to it\")\n del self._node_to_edges[n]\n if self._node_to_cells is not None:\n if len(self._node_to_cells[n])>0:\n raise GridException(\"Node still has cells referring to it\")\n del self._node_to_cells[n]\n if self._node_index is not None:\n self._node_index.delete(n, self.nodes['x'][n,self.xxyy] )\n\n self.push_op(self.undelete_node,n,self.nodes[n].copy())\n\n self.nodes['deleted'][n] = True\n \n # special case, used for undo, reverts to previous state\n # more completely.\n if len(self.nodes)==n+1:\n self.nodes=self.nodes[:-1]", "def delete(self):\r\n if self.__abstract__:\r\n raise ThunderdomeException('cant delete abstract elements')\r\n if self.eid is None:\r\n return self\r\n query = \"\"\"\r\n e = g.e(eid)\r\n if (e != null) {\r\n g.removeEdge(e)\r\n g.stopTransaction(SUCCESS)\r\n }\r\n \"\"\" \r\n results = execute_query(query, {'eid':self.eid})", "def delete_relation(wn, source, target, change_list=None):\n delete_rel(source, target, change_list)\n delete_rel(target, source, change_list)", "def delete_node(self, n):\n\n if n not in self.node:\n raise PathGraphException(\"The node {} is not in the graph.\".format(n))\n\n self.delete_node_from_path(n)\n self.delete_path_containing_node(n)\n del self.node[n]", "def removeNode(self, node):", "def delete_nodes(self, _ids):\n return self.make_request(\"POST\", \"nodes/delete\", { \"nodes\" : _ids })", "def delete_node(self, u_node_id):\n node = self.node_memory[u_node_id]\n\n # Delete the formulas from the tree, but keep the formulas in node for restoration later\n copy = list(node.formulas)\n for f in node.formulas:\n self.delete_formula(f)\n node.formulas = copy\n\n # Remove node from parent_formula\n parent_formula = node.parent_formula\n parent_formula.node_children.remove(node)\n\n # Remove the node from parent\n node.parent.children.remove(node)\n\n # Remove the node from the Tree node list\n self.nodes.pop(node.node_id)\n self.readjust_node_id(node.node_id)", "def delete_node_cascade(self,n):\n # list will get mutated - copy preemptively\n for j in list(self.node_to_edges(n)):\n self.delete_edge_cascade(j)\n self.delete_node(n)", "def delete(node):\n try:\n if os.path.isdir(node):\n shutil.rmtree(node)\n else:\n os.unlink(node)\n except OSError as error:\n if error.errno not in [errno.ENOENT, errno.EPERM, errno.EACCES]:\n raise error", "def delete(self, *args, **kwargs):\n self.delete_relatives()\n old_content = self.content\n super().delete(*args, **kwargs)\n if old_content.isOrphaned():\n old_content.delete()", "def delete():", "def _delete(self, current_node):\n pass", "def delete(self):\n self.graph._del(handle=self.handle)", "def delete_all(self):\n query = \"\"\"MATCH(n) DETACH DELETE n\"\"\"\n return self.create_tx(query)", "def del_node(self, n):\n if n in self.node_dict:\n del self.node_dict[n]\n for node in self.node_dict:\n try:\n self.del_edge(node, n)\n except:\n pass\n else:\n raise KeyError(\"Cannot remove node that does not exist.\")", "def destroyNodes(self):\r\n for nt in self.listNodes.keys(): \t# for all kind of nodes...\r\n for node in self.listNodes[nt]: \t# for all nodes of type <nt>\r\n if node.graphObject_: node.graphObject_.destroy()" ]
[ "0.7315839", "0.7264437", "0.7173346", "0.71137744", "0.7031798", "0.6985995", "0.69757694", "0.69397056", "0.6935024", "0.6907919", "0.68684053", "0.6862054", "0.6849799", "0.68376833", "0.6835751", "0.681058", "0.68090886", "0.6793854", "0.6739512", "0.6716", "0.66446584", "0.6642482", "0.66316813", "0.6630539", "0.6626539", "0.66037095", "0.6591668", "0.6588554", "0.65810806", "0.6561224" ]
0.7336805
0
Delete Utterance Relationship, based on input nodes
def delete_relationship(tx, node_value_1=None, node_value_2=None, node_type_1=None, node_type_2=None, relationship=None): if node_value_1 is None and node_type_1 is None: cql = "MATCH ()-[u:" + relationship + "]-(w:" + node_type_2 + "{name:$node_value_2}) " \ "DELETE u;" try: tx.run(cql, node_value_2=node_value_2) except Exception as e: print(str(e)) elif node_value_2 is None and node_type_2 is None: cql = "MATCH (s:" + node_type_1 + "{name:$node_value_1})-[u:" + relationship + "]-() " \ "DELETE u;" try: tx.run(cql, node_value_1=node_value_1) except Exception as e: print(str(e)) else: cql = "MATCH (s:" + node_type_1 + "{name:$node_value_1})-[u:" + relationship + "]-(w:" + node_type_2 + "{name:$node_value_2}) " \ "DELETE u;" try: tx.run(cql, node_value_1=node_value_1, node_value_2=node_value_2) except Exception as e: print(str(e))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_relation(wn, source, target, change_list=None):\n delete_rel(source, target, change_list)\n delete_rel(target, source, change_list)", "def delete_sense_relation(wn, source, target, change_list=None):\n delete_sense_rel(wn, source, target, change_list)\n delete_sense_rel(wn, target, source, change_list)", "def test_delete_hyperflex_node_profile(self):\n pass", "def removeNode(self, node):", "def delete_node(tx, node_value, node_type):\n cql = \"MATCH(n:\" + node_type + \"{name:$node_value}) DETACH DELETE(n);\"\n try:\n tx.run(cql, node_value=node_value)\n except Exception as e:\n print(str(e))", "def test_relation_after_remove():\n assert query_row(db_conf, 'osm_buildings', 50011)['type'] == 'yes'\n assert query_row(db_conf, 'osm_landusages', 50021) == None\n assert query_row(db_conf, 'osm_landusages', -50021) == None", "def deleteNode(*args, **kwds):\n nodes = args\n if len(args) < 1:\n nodes = cmds.ls(sl=1)\n \n for node in nodes:\n node_lst = [node]\n if isinstance(node, (list, tuple)):\n node_lst = node\n\n for n in node_lst:\n if cmds.objExists(str(n)):\n cmds.delete(str(n), **kwds)\n else:\n cmds.warning(\"# Don’t exist - \" + node)", "def delete_rel(source, target, change_list=None):\n print(\"Delete %s =*=> %s\" % (source.id, target.id))\n ss = source\n source.synset_relations = [\n r for r in ss.synset_relations if r.target != target.id]\n if change_list:\n change_list.change_synset(source)", "def test_remove_relation_type(self):\n pass", "def _delete_edges(self, to_be_deleted_set, adj_dict):\n for pair in to_be_deleted_set:\n first_node = pair[0]\n second_node = pair[1]\n adj_dict.pop((first_node, second_node), None)", "def remove_nodes(self, properties, **kwargs):\r\n\t\traise NotImplementedError", "def delete_sense_rel(wn, source, target, change_list=None):\n print(\"Delete %s =*=> %s\" % (source, target))\n (source_synset, source_entry) = decompose_sense_id(source)\n lex_name = wn.synset_by_id(source_synset).lex_name\n entry = wn.entry_by_id(source_entry)\n if change_list:\n change_list.change_entry(wn, entry)\n sense = [sense for sense in entry.senses if sense.id == source][0]\n sense.sense_relations = [\n r for r in sense.sense_relations if r.target != target]", "def remove_node(self, node):\n # if the node is a part of the graph\n if node.get_name() in self.get_node_names():\n for edge in node.get_incident_edges(): # for every edge incident to the input node\n other_node = edge.get_other_node(node.get_name()) # get the other incident node object\n if other_node.get_name() in self.get_node_names(): # if the other node is a part of the graph\n self.remove_edge(tuple((node, other_node))) # remove the edge\n self.set_nodeset(\n set({\n vertex\n for vertex in self.get_nodeset()\n if not vertex.get_name().__eq__(node.get_name())\n })\n ) # remove the node from the graph's nodeset", "def removeNode(node, pNodes, pInteractions):\n rpInteractions = reverseInteractions(pInteractions)\n del pNodes[node]\n if node in pInteractions:\n for element in pInteractions[node].keys():\n del pInteractions[node][element]\n if len(pInteractions[node].keys()) == 0:\n del pInteractions[node]\n del rpInteractions[element][node]\n if len(rpInteractions[element].keys()) == 0:\n del rpInteractions[element]\n if node in rpInteractions:\n for element in rpInteractions[node].keys():\n del pInteractions[element][node]\n if len(pInteractions[element].keys()) == 0:\n del pInteractions[element]\n del rpInteractions[node][element]\n if len(rpInteractions[node].keys()) == 0:\n del rpInteractions[node]\n return(pNodes, pInteractions)", "def del_node (self, id):\n raise NotImplementedError", "def delete_sense_rel(wn, source, target, change_list=None):\n print(\"Delete %s =*=> %s\" % (source, target))\n (source_synset, source_entry) = decompose_sense_id(source)\n lex_name = wn.synset_by_id(source_synset).lex_name\n wn_source = wn\n entry = wn_source.entry_by_id(source_entry)\n if entry:\n sense = [sense for sense in entry.senses if sense.id == source][0]\n if not any(r for r in sense.sense_relations if r.target == target):\n print(\"No sense relations deleted\")\n else:\n sense.sense_relations = [\n r for r in sense.sense_relations if r.target != target]\n if change_list:\n change_list.change_entry(wn, entry)\n else:\n print(\"No entry for \" + source_entry)", "def delete_node(ugraph, node):\n neighbors = ugraph[node]\n ugraph.pop(node)\n for neighbor in neighbors:\n ugraph[neighbor].remove(node)", "def delete_node(ugraph, node):\n neighbors = ugraph[node]\n ugraph.pop(node)\n for neighbor in neighbors:\n ugraph[neighbor].remove(node)", "def delete_node(ugraph, node):\n neighbors = ugraph[node]\n ugraph.pop(node)\n for neighbor in neighbors:\n ugraph[neighbor].remove(node)", "def delete_node(ugraph, node):\n neighbors = ugraph[node]\n ugraph.pop(node)\n for neighbor in neighbors:\n ugraph[neighbor].remove(node)", "def delete_node(ugraph, node):\n neighbors = ugraph[node]\n ugraph.pop(node)\n for neighbor in neighbors:\n ugraph[neighbor].remove(node)", "def delete_node(ugraph, node):\r\n neighbors = ugraph[node]\r\n ugraph.pop(node)\r\n for neighbor in neighbors:\r\n ugraph[neighbor].remove(node)", "def test_relation_before_remove():\n assert query_row(db_conf, 'osm_buildings', 50011)['type'] == 'yes'\n assert query_row(db_conf, 'osm_landusages', -50021)['type'] == 'park'", "def _delete_cuds_triples(self, cuds_object):\n del self._registry[cuds_object.uid]\n t = self.graph.value(cuds_object.iri, rdflib.RDF.type)\n self.graph.remove((cuds_object.iri, None, None))\n cuds_object._graph = rdflib.Graph()\n cuds_object._graph.set((cuds_object.iri, rdflib.RDF.type, t))\n self._notify_delete(cuds_object)", "def test__removeRelObject(t):\n t.adm._removeRelObject(\"device\", \"objmap\", \"relname\")", "def test_remove_relation_types(self):\n pass", "def remove_edges(self, node: NodeKey) -> Edge:", "def test_graph_deletes_nodes(graph_with_edges):\n graph_with_edges.del_nodes('B')\n listy = ['A', 'C', 'D', 'E', 'F']\n for node in listy:\n assert node in graph_with_edges.nodes()\n assert 'B' not in graph_with_edges.nodes()", "def node_remove(self, node, update_statistics_ancestors_depth=None):\n\n if self.node_count_children(node):\n return False\n\n mtime = time()\n q = (\"select count(serial), sum(size), cluster \"\n \"from versions \"\n \"where node = ? \"\n \"group by cluster\")\n self.execute(q, (node,))\n for population, size, cluster in self.fetchall():\n self.statistics_update_ancestors(\n node, -population, -size, mtime, cluster,\n update_statistics_ancestors_depth)\n\n q = \"delete from nodes where node = ?\"\n self.execute(q, (node,))\n return True", "def test_model_flow_node_model_flow_id_node_id_component_delete(self):\n pass" ]
[ "0.6832094", "0.6719297", "0.6620328", "0.65294105", "0.65291214", "0.63991565", "0.6309279", "0.62979054", "0.6252702", "0.6207111", "0.6170153", "0.61654645", "0.6164246", "0.6163437", "0.6113204", "0.609971", "0.6099355", "0.6099355", "0.6099355", "0.6099355", "0.6099355", "0.6092636", "0.60626465", "0.6027232", "0.599832", "0.5997675", "0.5988539", "0.597338", "0.59627783", "0.59610516" ]
0.70461917
0
Compute the tight bounding box of a binary mask.
def mask_to_bbox(mask): xs = np.where(np.sum(mask, axis=0) > 0)[0] ys = np.where(np.sum(mask, axis=1) > 0)[0] if len(xs) == 0 or len(ys) == 0: return None x0 = xs[0] x1 = xs[-1] y0 = ys[0] y1 = ys[-1] return np.array((x0, y0, x1, y1), dtype=np.float32)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def extract_bboxes(mask):\r\n boxes = np.zeros([mask.shape[-1], 4], dtype=np.int32)\r\n for i in range(mask.shape[-1]):\r\n m = mask[:, :, i]\r\n # Bounding box.\r\n horizontal_indicies = np.where(np.any(m, axis=0))[0]\r\n vertical_indicies = np.where(np.any(m, axis=1))[0]\r\n if horizontal_indicies.shape[0]:\r\n x1, x2 = horizontal_indicies[[0, -1]]\r\n y1, y2 = vertical_indicies[[0, -1]]\r\n # x2 and y2 should not be part of the box. Increment by 1.\r\n x2 += 1\r\n y2 += 1\r\n else:\r\n # No mask for this instance. Might happen due to\r\n # resizing or cropping. Set bbox to zeros\r\n x1, x2, y1, y2 = 0, 0, 0, 0\r\n boxes[i] = np.array([y1, x1, y2, x2])\r\n return boxes.astype(np.int32)", "def bbox(img):\n a = np.where(img != 0)\n bbox = np.min(a[0]), np.max(a[0]), np.min(a[1]), np.max(a[1])\n return bbox", "def extract_bboxes(mask):\n boxes = np.zeros([mask.shape[-1], 4], dtype=np.int32)\n for i in range(mask.shape[-1]):\n m = mask[:, :, i]\n # Bounding box.\n horizontal_indicies = np.where(np.any(m, axis=0))[0]\n vertical_indicies = np.where(np.any(m, axis=1))[0]\n if horizontal_indicies.shape[0]:\n x1, x2 = horizontal_indicies[[0, -1]]\n y1, y2 = vertical_indicies[[0, -1]]\n # x2 and y2 should not be part of the box. Increment by 1.\n x2 += 1\n y2 += 1\n else:\n # No mask for this instance. Might happen due to\n # resizing or cropping. Set bbox to zeros\n x1, x2, y1, y2 = 0, 0, 0, 0\n boxes[i] = np.array([y1, x1, y2, x2])\n return boxes.astype(np.int32)", "def extract_bboxes(mask):\n boxes = np.zeros([mask.shape[-1], 4], dtype=np.int32)\n for i in range(mask.shape[-1]):\n m = mask[:, :, i]\n # Bounding box.\n horizontal_indicies = np.where(np.any(m, axis=0))[0]\n vertical_indicies = np.where(np.any(m, axis=1))[0]\n if horizontal_indicies.shape[0]:\n x1, x2 = horizontal_indicies[[0, -1]]\n y1, y2 = vertical_indicies[[0, -1]]\n # x2 and y2 should not be part of the box. Increment by 1.\n x2 += 1\n y2 += 1\n else:\n # No mask for this instance. Might happen due to\n # resizing or cropping. Set bbox to zeros\n x1, x2, y1, y2 = 0, 0, 0, 0\n boxes[i] = np.array([x1, y1, x2, y2])\n return boxes.astype(np.int32)", "def extract_bboxes(mask):\n boxes = np.zeros([mask.shape[-1],mask.shape[0], 2], dtype=np.int32)\n for i in range(mask.shape[-1]):\n # Bounding box.\n\n for j in range(mask.shape[0]):\n m = mask[j, :, i]\n horizontal_indicies = np.where(m)[0]\n\n if horizontal_indicies.shape[0]:\n x1, x2 = horizontal_indicies[[0, -1]]\n\n # x2 should not be part of the box. Increment by 1.\n x2 += 1\n else:\n # No mask for this instance. Might happen due to\n # resizing or cropping. Set bbox to zeros\n x1, x2 = 0, 0\n boxes[i,j] = np.array([x1, x2])\n\n return boxes.astype(np.int32)", "def mask_to_bbox(mask, label=None):\n mask = mask if label is None else mask == label\n coords = np.where(mask)\n return coords_to_bbox(coords)", "def measure_binary_image_bounds(binaryImage, border=100, targetBool=True):\n bys, bxs = where(binaryImage == targetBool)\n l = max(bxs.min() - border, 0)\n r = min(bxs.max() + border, binaryImage.shape[1])\n t = max(bys.min() - border, 0)\n b = min(bys.max() + border, binaryImage.shape[0])\n return l, r, t, b", "def get_bounding_box(img):\n rows = np.any(img, axis=1)\n cols = np.any(img, axis=0)\n rmin, rmax = np.where(rows)[0][[0, -1]]\n cmin, cmax = np.where(cols)[0][[0, -1]]\n # due to python indexing, need to add 1 to max\n # else accessing will be 1px in the box, not out\n rmax += 1\n cmax += 1\n return [rmin, rmax, cmin, cmax]", "def getbbox(self):\r\n img_ = (self._instance > 0)\r\n rows = np.any(img_, axis=1)\r\n cols = np.any(img_, axis=0)\r\n rmin, rmax = np.argmax(rows), img_.shape[0] - 1 - np.argmax(np.flipud(rows))\r\n cmin, cmax = np.argmax(cols), img_.shape[1] - 1 - np.argmax(np.flipud(cols))\r\n return (rmin, rmax, cmin, cmax)", "def get_contour_bbox_from_raw(raw_mask):\n cnts = grab_contours(\n cv2.findContours(\n raw_mask, \n cv2.RETR_EXTERNAL, \n cv2.CHAIN_APPROX_SIMPLE\n ))\n xywhs = [cv2.boundingRect(cnt) for cnt in cnts]\n xys = [(xywh[0], xywh[1], xywh[0]+xywh[2], xywh[1]+xywh[3]) for xywh in xywhs]\n return sorted(xys, key=lambda x: (x[1], x[0]))", "def boundingBox(self):\n minx, miny, maxx, maxy = self.substrates.bounds\n return pcbnew.BOX2I(\n pcbnew.VECTOR2I(int(minx), int(miny)),\n pcbnew.VECTOR2I(int(maxx - minx), int(maxy - miny)))", "def optimize_bbox(img_shape,\n bbox,\n edge_width=8):\n (rows,columns) = img_shape\n (x1,y1,x2,y2) = bbox\n\n return max(0,x1-edge_width),max(0,y1-edge_width),min(rows-1,x2+edge_width),min(columns-1,y2+edge_width)", "def bounding_box(self):\n if self._owcs.pixel_bounds is None:\n if self._owcs.pixel_shape is not None:\n nx, ny = self._owcs.pixel_shape\n elif self._owcs.array_shape is not None:\n ny, nx = self._owcs.array_shape\n else:\n return None\n\n return ((-0.5, nx - 0.5), (-0.5, ny - 0.5))\n\n else:\n return self._owcs.pixel_bounds", "def bounding_box(self):\n if self._owcs.pixel_bounds is None:\n if self._owcs.pixel_shape is not None:\n nx, ny = self._owcs.pixel_shape\n elif self._owcs.array_shape is not None:\n ny, nx = self._owcs.array_shape\n else:\n return None\n\n return ((-0.5, nx - 0.5), (-0.5, ny - 0.5))\n\n else:\n return self._owcs.pixel_bounds", "def unmold_mask(mask, bbox, image_shape):\n threshold = 0.5\n y1, x1, y2, x2 = bbox\n mask = scipy.misc.imresize(\n mask, (y2 - y1, x2 - x1), interp='bilinear').astype(np.float32) / 255.0\n mask = np.where(mask >= threshold, 1, 0).astype(np.uint8)\n\n # Put the mask in the right location.\n full_mask = np.zeros(image_shape[:2], dtype=np.uint8)\n full_mask[y1:y2, x1:x2] = mask\n return full_mask", "def calc_bounding_box(self):\n self.BB = self.geos.abs_el(0).BB\n for geo in self.geos.abs_iter():\n self.BB = self.BB.joinBB(geo.BB)", "def get_bounding_box(im):\n coords = np.where(im)\n \n return np.array([np.min(coords[0]), np.max(coords[0]), \n np.min(coords[1]), np.max(coords[1])])", "def smallest_bounding_box(msk):\n x, y, z = np.where(msk > 0)\n corner = np.array([x.min(), y.min(), z.min()])\n size = np.array([x.max() + 1, y.max() + 1, z.max() + 1])\n return corner, size", "def smallest_bounding_box(msk):\n x, y, z = np.where(msk > 0)\n corner = np.array([x.min(), y.min(), z.min()])\n size = np.array([x.max() + 1, y.max() + 1, z.max() + 1])\n return corner, size", "def bbox(self, obj):\n return self.phy2abs.bbox(obj)", "def get_biomass(binary_mask):\n\n white_pixels = cv2.countNonZero(binary_mask)\n return white_pixels", "def bounds(self):\n return self._bboxes[0][0] #TODO: merge all coverages", "def unmold_mask(mask, bbox, image_shape):\n threshold = 0.5\n y1, x1, y2, x2 = bbox\n mask = resize(mask, (y2 - y1, x2 - x1))\n mask = np.where(mask >= threshold, 1, 0).astype(np.bool)\n\n # Put the mask in the right location.\n full_mask = np.zeros(image_shape[:2], dtype=np.bool)\n full_mask[y1:y2, x1:x2] = mask\n return full_mask", "def _get_rounded_bounding_box(\n geom: BasePolygon, width: Numeric\n ) -> Tuple[int, int, int, int]:\n return (\n geom.bounds[0] - (geom.bounds[0] % width),\n geom.bounds[1] - (geom.bounds[1] % width),\n geom.bounds[2] + (-geom.bounds[2] % width),\n geom.bounds[3] + (-geom.bounds[3] % width),\n )", "def get_bbox(self):\n z2p = 64 # zoomFactor to bbox radius in pixels @ MIP0\n pos = Vec(*self.get_position())\n zoom = self.get_zoom()\n return Bbox(pos-Vec(z2p*zoom, z2p*zoom, 0), \n pos+Vec(z2p*zoom, z2p*zoom, 1))", "def detect(self, mask):\n # 1) Return Non zero indices\n det_idx = np.where(mask > 0.0)\n idx_x, idx_y = det_idx[0], det_idx[1]\n # 2) Create 1x1 box for each pixel detected.\n detections = []\n for i in range(0, len(idx_x)):\n x, y = idx_x[i], idx_y[i]\n detections.append((x, y, x+1, y+1, 1)) # x1, y1, x2, y2, area\n # 3) merge boxes\n bounding_boxes = self.bounding_boxes(detections)\n return bounding_boxes", "def smallest_bounding_box(msk):\n x, y, z = np.where(msk > 10)\n corner = np.array([x.min(), y.min(), z.min()])\n size = np.array([x.max() + 1, y.max() + 1, z.max() + 1])\n return corner, size", "def get_boundingbox(face, width, height, scale=1.3, minsize=None):\n x1 = face.left()\n y1 = face.top()\n x2 = face.right()\n y2 = face.bottom()\n size_bb = int(max(x2 - x1, y2 - y1) * scale)\n if minsize:\n if size_bb < minsize:\n size_bb = minsize\n center_x, center_y = (x1 + x2) // 2, (y1 + y2) // 2\n\n # Check for out of bounds, x-y top left corner\n x1 = max(int(center_x - size_bb // 2), 0)\n y1 = max(int(center_y - size_bb // 2), 0)\n # Check for too big bb size for given x, y\n size_bb = min(width - x1, size_bb)\n size_bb = min(height - y1, size_bb)\n\n return x1, y1, size_bb", "def bbox(self):\n bbox = self.get_bounding_box()\n if bbox is None:\n bbox = ((0, 0), (0, 0))\n return np.array(bbox)", "def boundingBoxArea(box):\n return (box[2] - box[0] + 1) * (box[3] - box[1] + 1)" ]
[ "0.6920042", "0.68922883", "0.6874152", "0.6871232", "0.68570936", "0.67880124", "0.6712782", "0.65961045", "0.6540863", "0.64311737", "0.63427234", "0.63425565", "0.6330978", "0.6330978", "0.62618804", "0.62499213", "0.62219423", "0.6137984", "0.6137984", "0.6115428", "0.6105045", "0.6097309", "0.6078465", "0.6063825", "0.6052035", "0.60367954", "0.60262144", "0.602367", "0.60209835", "0.6010961" ]
0.7192502
0
Convert from the COCO polygon segmentation format to a binary mask encoded as a 2D array of data type numpy.float32. The polygon segmentation is understood to be enclosed in the given box and rasterized to an M x M mask. The resulting mask is therefore of shape (M, M).
def polys_to_mask_wrt_box(polygons, box, M): w = box[2] - box[0] h = box[3] - box[1] w = np.maximum(w, 1) h = np.maximum(h, 1) polygons_norm = [] for poly in polygons: p = np.array(poly, dtype=np.float32) p[0::2] = (p[0::2] - box[0]) * M / w p[1::2] = (p[1::2] - box[1]) * M / h polygons_norm.append(p) rle = mask_util.frPyObjects(polygons_norm, M, M) mask = np.array(mask_util.decode(rle), dtype=np.float32) # Flatten in case polygons was a list mask = np.sum(mask, axis=2) mask = np.array(mask > 0, dtype=np.float32) return mask
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def binary_mask_to_polygon(binary_mask, tolerance=0):\r\n\r\n polygons = []\r\n if isinstance(binary_mask, torch.Tensor):\r\n binary_mask = binary_mask.cpu().numpy()\r\n # pad mask to close contours of shapes which start and end at an edge\r\n padded_binary_mask = np.pad(binary_mask, pad_width=1, mode='constant', constant_values=0)\r\n contours = measure.find_contours(padded_binary_mask, 0.5)\r\n contours = np.subtract(contours, 1)\r\n for contour in contours:\r\n contour = close_contour(contour)\r\n contour = measure.approximate_polygon(contour, tolerance)\r\n if len(contour) < 3:\r\n continue\r\n contour = np.flip(contour, axis=1) # x, y\r\n polygon = np.maximum(contour, 0)\r\n #segmentation = contour.ravel().tolist()\r\n # after padding and subtracting 1 we may get -0.5 points in our segmentation\r\n #segmentation = [0 if i < 0 else i for i in segmentation]\r\n polygons.append(polygon)\r\n\r\n return polygons", "def binary_mask_to_polygon(binary_mask, tolerance=0):\n polygons = []\n # pad mask to close contours of shapes which start and end at an edge\n padded_binary_mask = np.pad(binary_mask, pad_width=1, mode='constant', constant_values=0)\n contours = measure.find_contours(padded_binary_mask, 0.5)\n contours = np.subtract(contours, 1)\n for contour in contours:\n contour = close_contour(contour)\n contour = measure.approximate_polygon(contour, tolerance)\n if len(contour) < 3:\n continue\n contour = np.flip(contour, axis=1)\n segmentation = contour\n # after padding and subtracting 1 we may get -0.5 points in our segmentation\n segmentation = [np.clip(i,0.0,i).tolist() for i in segmentation]\n polygons.append(segmentation)\n\n return polygons", "def binary_mask_to_polygon(binary_mask, tolerance=0):\n polygons = []\n # pad mask to close contours of shapes which start and end at an edge\n padded_binary_mask = np.pad(\n binary_mask, pad_width=1, mode='constant', constant_values=0)\n contours = measure.find_contours(padded_binary_mask, 0.5)\n contours = np.subtract(contours, 1)\n for contour in contours:\n contour = close_contour(contour)\n contour = measure.approximate_polygon(contour, tolerance)\n if len(contour) < 3:\n continue\n contour = np.flip(contour, axis=1)\n segmentation = contour.ravel().tolist()\n # after padding and subtracting 1 we may\n # get -0.5 points in our segmentation\n segmentation = [0 if i < 0 else i for i in segmentation]\n polygons.append(segmentation)\n return polygons", "def coco_box_to_bbox(box):\n bbox = np.array([box[0], box[1], box[0] + box[2], box[1] + box[3]],\n dtype=np.float32)\n return bbox.reshape(1,4)", "def rasterize_polygons_within_box(\n polygons: List[np.ndarray], box: np.ndarray, mask_size: int\n) -> torch.Tensor:\n # 1. Shift the polygons w.r.t the boxes\n w, h = box[2] - box[0], box[3] - box[1]\n\n polygons = copy.deepcopy(polygons)\n for p in polygons:\n p[0::2] = p[0::2] - box[0]\n p[1::2] = p[1::2] - box[1]\n\n # 2. Rescale the polygons to the new box size\n ratio_h = mask_size / max(h, 0.1)\n ratio_w = mask_size / max(w, 0.1)\n\n if ratio_h == ratio_w:\n for p in polygons:\n p *= ratio_h\n else:\n for p in polygons:\n p[0::2] *= ratio_w\n p[1::2] *= ratio_h\n\n # 3. Rasterize the polygons with coco api\n mask = polygons_to_bitmask(polygons, mask_size, mask_size)\n mask = torch.from_numpy(mask)\n return mask", "def mask_to_bbox(mask):\n xs = np.where(np.sum(mask, axis=0) > 0)[0]\n ys = np.where(np.sum(mask, axis=1) > 0)[0]\n\n if len(xs) == 0 or len(ys) == 0:\n return None\n\n x0 = xs[0]\n x1 = xs[-1]\n y0 = ys[0]\n y1 = ys[-1]\n return np.array((x0, y0, x1, y1), dtype=np.float32)", "def coco_box_to_bbox(box):\n bbox = np.array([box[0], box[1], box[0] + box[2], box[1] + box[3]], dtype=np.float32)\n return bbox", "def _prepare_mask_file(mask):\n result = np.ndarray((mask.shape[0], mask.shape[1]), dtype=np.uint8)\n for i in range(mask.shape[0]):\n for j in range(mask.shape[1]):\n\n if mask[i][j] > 0:\n result[i][j] = 1\n else:\n result[i][j] = 0\n \n return result", "def get_mask(self, anno, img_info) -> np.ndarray:\n m = np.zeros((img_info[\"height\"], img_info[\"width\"]), dtype=np.float32)\n\n for obj in anno:\n if obj[\"iscrowd\"]:\n rle = pycocotools.mask.frPyObjects(obj[\"segmentation\"], img_info[\"height\"], img_info[\"width\"])\n mask = pycocotools.mask.decode(rle)\n if mask.shape != m.shape:\n logger.warning(f\"Mask shape {mask.shape} does not match image shape {m.shape} for image {img_info['file_name']}\")\n continue\n m += mask\n elif obj[\"num_keypoints\"] == 0:\n rles = pycocotools.mask.frPyObjects(obj[\"segmentation\"], img_info[\"height\"], img_info[\"width\"])\n for rle in rles:\n mask = pycocotools.mask.decode(rle)\n if mask.shape != m.shape:\n logger.warning(f\"Mask shape {mask.shape} does not match image shape {m.shape} for image {img_info['file_name']}\")\n continue\n\n m += mask\n\n return (m < 0.5).astype(np.float32)", "def detect(self, mask):\n # 1) Return Non zero indices\n det_idx = np.where(mask > 0.0)\n idx_x, idx_y = det_idx[0], det_idx[1]\n # 2) Create 1x1 box for each pixel detected.\n detections = []\n for i in range(0, len(idx_x)):\n x, y = idx_x[i], idx_y[i]\n detections.append((x, y, x+1, y+1, 1)) # x1, y1, x2, y2, area\n # 3) merge boxes\n bounding_boxes = self.bounding_boxes(detections)\n return bounding_boxes", "def cocoseg_to_binary(seg, height, width):\n if type(seg) == list:\n rle = cocomask.frPyObjects(seg, height, width)\n rle = cocomask.merge(rle)\n mask = cocomask.decode([rle])\n elif type(seg['counts']) == list:\n rle = cocomask.frPyObjects(seg, height, width)\n mask = cocomask.decode([rle])\n else:\n rle = cocomask.merge(seg)\n mask = cocomask.decode([rle])\n assert mask.shape[2] == 1\n return mask[:, :, 0]", "def get_mask(self, shape):\n h, w = shape[0:2]\n y, x = np.mgrid[:h, :w]\n points = np.transpose((x.ravel(), y.ravel()))\n\n mask = _nxutils_points_inside_poly(points, self.verts)\n #mask = nxutils.points_inside_poly(points, self.verts)\n return mask.reshape(h, w)", "def load_mask(self, image_id):\n image_info = self.image_info[image_id]\n if image_info[\"source\"] != \"face\":\n return super(self.__class__, self).load_mask(image_id)\n info = self.image_info[image_id]\n mask = np.zeros([info['height'], info['width'], len(info['boundingbox'])], dtype=np.uint8)\n for i, p in enumerate(info['boundingbox'].values()):\n rr, cc = skimage.draw.polygon(p['y'], p['x'])\n mask[rr, cc, i] = 1\n return mask, np.ones([mask.shape[-1]], dtype=np.int32)", "def _get_mask(self, anno, idx):\n coco = self.coco\n img_info = coco.loadImgs(self.img_ids[idx])[0]\n\n m = np.zeros((img_info['height'], img_info['width']), dtype=np.float32)\n\n for obj in anno:\n if 'segmentation' in obj:\n if obj['iscrowd']:\n rle = pycocotools.mask.frPyObjects(obj['segmentation'],\n img_info['height'],\n img_info['width'])\n m += pycocotools.mask.decode(rle)\n elif obj['num_keypoints'] == 0:\n rles = pycocotools.mask.frPyObjects(obj['segmentation'],\n img_info['height'],\n img_info['width'])\n for rle in rles:\n m += pycocotools.mask.decode(rle)\n\n return m < 0.5", "def mask(self):\n mask = np.zeros((self.height, self.width))\n pts = [\n np.array(anno).reshape(-1, 2).round().astype(int)\n for anno in self.segmentation\n ]\n mask = cv2.fillPoly(mask, pts, 1)\n return mask", "def polygon_to_mask_array(dims: tuple, vertices: CoordinatePair) -> np.ndarray:\n\n poly_vertices = [\n (vertices.x_ul, vertices.y_ul),\n (vertices.x_ul, vertices.y_br),\n (vertices.x_br, vertices.y_br),\n (vertices.x_br, vertices.y_ul),\n ]\n\n img = PIL.Image.new(\"L\", dims, 0)\n PIL.ImageDraw.Draw(img).polygon(poly_vertices, outline=1, fill=1)\n return np.array(img).astype(bool)", "def extract_bboxes(mask):\r\n boxes = np.zeros([mask.shape[-1], 4], dtype=np.int32)\r\n for i in range(mask.shape[-1]):\r\n m = mask[:, :, i]\r\n # Bounding box.\r\n horizontal_indicies = np.where(np.any(m, axis=0))[0]\r\n vertical_indicies = np.where(np.any(m, axis=1))[0]\r\n if horizontal_indicies.shape[0]:\r\n x1, x2 = horizontal_indicies[[0, -1]]\r\n y1, y2 = vertical_indicies[[0, -1]]\r\n # x2 and y2 should not be part of the box. Increment by 1.\r\n x2 += 1\r\n y2 += 1\r\n else:\r\n # No mask for this instance. Might happen due to\r\n # resizing or cropping. Set bbox to zeros\r\n x1, x2, y1, y2 = 0, 0, 0, 0\r\n boxes[i] = np.array([y1, x1, y2, x2])\r\n return boxes.astype(np.int32)", "def extract_bboxes(mask):\n boxes = np.zeros([mask.shape[-1], 4], dtype=np.int32)\n for i in range(mask.shape[-1]):\n m = mask[:, :, i]\n # Bounding box.\n horizontal_indicies = np.where(np.any(m, axis=0))[0]\n vertical_indicies = np.where(np.any(m, axis=1))[0]\n if horizontal_indicies.shape[0]:\n x1, x2 = horizontal_indicies[[0, -1]]\n y1, y2 = vertical_indicies[[0, -1]]\n # x2 and y2 should not be part of the box. Increment by 1.\n x2 += 1\n y2 += 1\n else:\n # No mask for this instance. Might happen due to\n # resizing or cropping. Set bbox to zeros\n x1, x2, y1, y2 = 0, 0, 0, 0\n boxes[i] = np.array([x1, y1, x2, y2])\n return boxes.astype(np.int32)", "def cfmask_to_mask(raster):\r\n mask = raster.ReadAsArray()\r\n # A value of 0 is clear of clouds/water. Make all other values = 1.\r\n mask[mask != 0] = 1\r\n\r\n # That's it, just return the result...\r\n return mask", "def whole_mask2mask(whole_mask, bbox):\n if len(whole_mask) != len(bbox):\n raise ValueError(\n 'The length of whole_mask and bbox should be the same')\n mask = list()\n for whole_m, bb in zip(whole_mask, bbox):\n bb = np.round(bb).astype(np.int32)\n mask.append(whole_m[bb[0]:bb[2], bb[1]:bb[3]])\n return mask", "def poly2mask(self):\n self.x_gridnum = int((self.x_range[1] - self.x_range[0]) / self.x_gridsize)\n self.y_gridnum = int((self.y_range[1] - self.y_range[0]) / self.y_gridsize)\n img = Image.new(\"L\", (self.x_gridnum, self.y_gridnum), 0)\n\n self.perimeter = 0.0\n for ii in self.polygons:\n pp = np.array(ii) * self.CD # polygon\n polygonlen = len(pp)\n self.perimeter += np.sum(np.abs(pp[0:-1] - pp[1:polygonlen]))\n pp[:, 0] = (pp[:, 0] - self.x_range[0]) / self.x_gridsize\n pp[:, 1] = (pp[:, 1] - self.y_range[0]) / self.y_gridsize\n vetex_list = list(pp)\n polygon = [tuple(y) for y in vetex_list]\n ImageDraw.Draw(img).polygon(polygon, outline=1, fill=1)\n\n self.data = np.array(img)\n self.data = np.float64(self.data)\n\n self.spat_part = pyfftw.empty_aligned(\n (self.y_gridnum, self.x_gridnum), dtype=\"complex128\"\n )\n self.freq_part = pyfftw.empty_aligned(\n (self.y_gridnum, self.x_gridnum), dtype=\"complex128\"\n )\n self.fft_mask = pyfftw.FFTW(self.spat_part, self.freq_part, axes=(0, 1))", "def extract_bboxes(mask):\n boxes = np.zeros([mask.shape[-1],mask.shape[0], 2], dtype=np.int32)\n for i in range(mask.shape[-1]):\n # Bounding box.\n\n for j in range(mask.shape[0]):\n m = mask[j, :, i]\n horizontal_indicies = np.where(m)[0]\n\n if horizontal_indicies.shape[0]:\n x1, x2 = horizontal_indicies[[0, -1]]\n\n # x2 should not be part of the box. Increment by 1.\n x2 += 1\n else:\n # No mask for this instance. Might happen due to\n # resizing or cropping. Set bbox to zeros\n x1, x2 = 0, 0\n boxes[i,j] = np.array([x1, x2])\n\n return boxes.astype(np.int32)", "def extract_bboxes(mask):\n boxes = np.zeros([mask.shape[-1], 4], dtype=np.int32)\n for i in range(mask.shape[-1]):\n m = mask[:, :, i]\n # Bounding box.\n horizontal_indicies = np.where(np.any(m, axis=0))[0]\n vertical_indicies = np.where(np.any(m, axis=1))[0]\n if horizontal_indicies.shape[0]:\n x1, x2 = horizontal_indicies[[0, -1]]\n y1, y2 = vertical_indicies[[0, -1]]\n # x2 and y2 should not be part of the box. Increment by 1.\n x2 += 1\n y2 += 1\n else:\n # No mask for this instance. Might happen due to\n # resizing or cropping. Set bbox to zeros\n x1, x2, y1, y2 = 0, 0, 0, 0\n boxes[i] = np.array([y1, x1, y2, x2])\n return boxes.astype(np.int32)", "def get_regions_mask(self, input):", "def geometry_mask(geom, geobox, all_touched=False, invert=False):\n return rasterio.features.geometry_mask([geom],\n out_shape=geobox.shape,\n transform=geobox.affine,\n all_touched=all_touched,\n invert=invert)", "def get_binary_mask(self,index):\n mask = self.load_mask_png(index)\n (rows,cols) = np.where(mask>0)[0:2] #pixels in mask disregarding the color\n new_mask = np.zeros(shape=mask.shape[0:2], dtype=np.uint8)\n new_mask[(rows,cols)] = 255\n return new_mask", "def bbox2mask(self, shape, margin, bbox_shape, times):\r\n bboxs = []\r\n for i in range(times):\r\n bbox = self.random_bbox(shape, margin, bbox_shape)\r\n bboxs.append(bbox)\r\n height = shape\r\n width = shape\r\n mask = np.zeros((height, width), np.float32)\r\n for bbox in bboxs:\r\n h = int(bbox[2] * 0.1) + np.random.randint(int(bbox[2] * 0.2 + 1))\r\n w = int(bbox[3] * 0.1) + np.random.randint(int(bbox[3] * 0.2) + 1)\r\n mask[(bbox[0] + h) : (bbox[0] + bbox[2] - h), (bbox[1] + w) : (bbox[1] + bbox[3] - w)] = 1.\r\n return mask.reshape((1, ) + mask.shape).astype(np.float32)", "def bbox2mask(self, shape, margin, bbox_shape, times):\r\n bboxs = []\r\n for i in range(times):\r\n bbox = self.random_bbox(shape, margin, bbox_shape)\r\n bboxs.append(bbox)\r\n height = shape\r\n width = shape\r\n mask = np.zeros((height, width), np.float32)\r\n for bbox in bboxs:\r\n h = int(bbox[2] * 0.1) + np.random.randint(int(bbox[2] * 0.2 + 1))\r\n w = int(bbox[3] * 0.1) + np.random.randint(int(bbox[3] * 0.2) + 1)\r\n mask[(bbox[0] + h) : (bbox[0] + bbox[2] - h), (bbox[1] + w) : (bbox[1] + bbox[3] - w)] = 1.\r\n return mask.reshape((1, ) + mask.shape).astype(np.float32)", "def _batched_mask_to_box(masks: \"torch.Tensor\"):\n # torch.max below raises an error on empty inputs, just skip in this case\n\n if torch.numel(masks) == 0:\n return torch.zeros(*masks.shape[:-2], 4, device=masks.device)\n\n # Normalize shape to Cxheightxwidth\n shape = masks.shape\n height, width = shape[-2:]\n\n # Get top and bottom edges\n in_height, _ = torch.max(masks, dim=-1)\n in_height_coords = in_height * torch.arange(height, device=in_height.device)[None, :]\n bottom_edges, _ = torch.max(in_height_coords, dim=-1)\n in_height_coords = in_height_coords + height * (~in_height)\n top_edges, _ = torch.min(in_height_coords, dim=-1)\n\n # Get left and right edges\n in_width, _ = torch.max(masks, dim=-2)\n in_width_coords = in_width * torch.arange(width, device=in_width.device)[None, :]\n right_edges, _ = torch.max(in_width_coords, dim=-1)\n in_width_coords = in_width_coords + width * (~in_width)\n left_edges, _ = torch.min(in_width_coords, dim=-1)\n\n # If the mask is empty the right edge will be to the left of the left edge.\n # Replace these boxes with [0, 0, 0, 0]\n empty_filter = (right_edges < left_edges) | (bottom_edges < top_edges)\n out = torch.stack([left_edges, top_edges, right_edges, bottom_edges], dim=-1)\n out = out * (~empty_filter).unsqueeze(-1)\n\n # Return to original shape\n out = out.reshape(*shape[:-2], 4)\n return out", "def mask(self, polygon: Union[Polygon, MultiPolygon], srs=\"EPSG:4326\") -> 'ImageCollection':\n geojson = mapping(polygon)\n geojson['crs'] = {\n 'type': 'name',\n 'properties': {\n 'name': srs\n }\n }\n\n process_id = 'mask'\n\n args = {\n 'imagery': self.graph,\n 'mask_shape': geojson\n }\n\n return self.graph_add_process(process_id, args)" ]
[ "0.6484304", "0.6433804", "0.6404158", "0.63878393", "0.63706", "0.6336888", "0.62718064", "0.62076074", "0.6202123", "0.6174527", "0.61689824", "0.61581665", "0.61225784", "0.6116983", "0.610723", "0.6053304", "0.60233164", "0.5974742", "0.5972378", "0.5969232", "0.5961104", "0.59480673", "0.5946011", "0.5908218", "0.5893851", "0.5880969", "0.5879456", "0.5879456", "0.587404", "0.58540064" ]
0.65987563
0
Performs greedy nonmaximum suppression based on an overlap measurement between masks. The type of measurement is determined by `mode` and can be either 'IOU' (standard intersection over union) or 'IOMA' (intersection over mininum area).
def rle_mask_nms(masks, dets, thresh, mode='IOU'): if len(masks) == 0: return [] if len(masks) == 1: return [0] if mode == 'IOU': # Computes ious[m1, m2] = area(intersect(m1, m2)) / area(union(m1, m2)) all_not_crowds = [False] * len(masks) ious = mask_util.iou(masks, masks, all_not_crowds) elif mode == 'IOMA': # Computes ious[m1, m2] = area(intersect(m1, m2)) / min(area(m1), area(m2)) all_crowds = [True] * len(masks) # ious[m1, m2] = area(intersect(m1, m2)) / area(m2) ious = mask_util.iou(masks, masks, all_crowds) # ... = max(area(intersect(m1, m2)) / area(m2), # area(intersect(m2, m1)) / area(m1)) ious = np.maximum(ious, ious.transpose()) elif mode == 'CONTAINMENT': # Computes ious[m1, m2] = area(intersect(m1, m2)) / area(m2) # Which measures how much m2 is contained inside m1 all_crowds = [True] * len(masks) ious = mask_util.iou(masks, masks, all_crowds) else: raise NotImplementedError('Mode {} is unknown'.format(mode)) scores = dets[:, 4] order = np.argsort(-scores) keep = [] while order.size > 0: i = order[0] keep.append(i) ovr = ious[i, order[1:]] inds_to_keep = np.where(ovr <= thresh)[0] order = order[inds_to_keep + 1] return keep
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def nms(dets, thresh=0.5, mode=\"Union\"):\n x1 = dets[:, 0]\n y1 = dets[:, 1]\n x2 = dets[:, 2]\n y2 = dets[:, 3]\n scores = dets[:, 4]\n\n areas = (x2 - x1 + 1) * (y2 - y1 + 1)\n order = scores.argsort()[::-1]\n\n keep = []\n while order.size > 0:\n i = order[0]\n keep.append(i)\n xx1 = np.maximum(x1[i], x1[order[1:]])\n yy1 = np.maximum(y1[i], y1[order[1:]])\n xx2 = np.minimum(x2[i], x2[order[1:]])\n yy2 = np.minimum(y2[i], y2[order[1:]])\n\n w = np.maximum(0.0, xx2 - xx1 + 1)\n h = np.maximum(0.0, yy2 - yy1 + 1)\n inter = w * h\n if mode == \"Union\":\n ovr = inter / (areas[i] + areas[order[1:]] - inter)\n elif mode == \"Minimum\":\n ovr = inter / np.minimum(areas[i], areas[order[1:]])\n\n inds = np.where(ovr <= thresh)[0]\n order = order[inds + 1]\n \n #step 2: filter the word space \n inds = range(len(x1))\n keep_ori = keep\n for k in keep_ori:\n inds_exp = list(set(inds) - set([k]))\n xx1 = np.maximum(x1[k], x1[inds_exp])\n yy1 = np.maximum(y1[k], y1[inds_exp])\n xx2 = np.minimum(x2[k], x2[inds_exp])\n yy2 = np.minimum(y2[k], y2[inds_exp])\n \n w = np.maximum(0.0, xx2 - xx1 + 1)\n h = np.maximum(0.0, yy2 - yy1 + 1)\n inter = w * h\n ovr = inter / (areas[k] + areas[inds_exp] - inter)\n ind_max = np.argmax(ovr)\n if ovr[ind_max] > thresh:\n keep.append(inds_exp[ind_max])\n\n #step 3: merge \n retain = []\n for i in range(len(keep) - 1):\n xx1 = np.maximum(x1[keep[i]], x1[keep[i+1:]])\n yy1 = np.maximum(y1[keep[i]], y1[keep[i+1:]])\n xx2 = np.maximum(x2[keep[i]], x2[keep[i+1:]])\n yy2 = np.maximum(y2[keep[i]], y2[keep[i+1:]])\n\n \n w = np.maximum(0.0, xx2 - xx1 + 1)\n h = np.maximum(0.0, yy2 - yy1 + 1)\n inter = w * h\n ovr = inter / (areas[keep[i]] + areas[keep[i+1:]] - inter)\n inds = np.where(ovr<0.2)[0]\n for j in inds:\n retain.append(keep[i+1+j])\n return dets[retain]", "def box_non_maximum_suppression(data=None, overlap_thresh=_Null, topk=_Null, coord_start=_Null, score_index=_Null, id_index=_Null, force_suppress=_Null, in_format=_Null, out_format=_Null, out=None, name=None, **kwargs):\n return (0,)", "def non_maximum_suppression(boxes, confs, overlap_threshold, top_k):\n eps = 1e-15\n \n boxes = np.asarray(boxes, dtype='float32')\n \n pick = []\n x1, y1, x2, y2 = boxes.T\n \n idxs = np.argsort(confs)\n area = (x2 - x1) * (y2 - y1)\n \n while len(idxs) > 0:\n i = idxs[-1]\n \n pick.append(i)\n if len(pick) >= top_k:\n break\n \n idxs = idxs[:-1]\n \n xx1 = np.maximum(x1[i], x1[idxs])\n yy1 = np.maximum(y1[i], y1[idxs])\n xx2 = np.minimum(x2[i], x2[idxs])\n yy2 = np.minimum(y2[i], y2[idxs])\n \n w = np.maximum(0, xx2 - xx1)\n h = np.maximum(0, yy2 - yy1)\n I = w * h\n \n overlap = I / (area[idxs] + eps)\n # as in Girshick et. al.\n \n #U = area[idxs] + area[i] - I\n #overlap = I / (U + eps)\n \n idxs = idxs[overlap <= overlap_threshold]\n \n return pick", "def create_mask(data, mode: Union[int, float, str]='median', value: Union[float, int]=0):\n \n assert mode in ['median', 'value']\n \n bg_value = value\n if mode == 'median':\n bg_value = np.median(data)\n\n mask = (data != bg_value).astype(int) \n return mask", "def non_max_suppression(prediction, conf_thres=0.5, nms_thres=0.4):\n\n # From (center x, center y, width, height) to (x1, y1, x2, y2)\n prediction[..., :4] = change_box_order(prediction[..., :4], order=\"xywh2xyxy\")\n output = [None for _ in range(len(prediction))]\n for image_i, image_pred in enumerate(prediction):\n # Filter out confidence scores below threshold\n image_pred = image_pred[image_pred[:, 4] >= conf_thres]\n # If none are remaining => process next image\n if not image_pred.size(0):\n continue\n # Object confidence times class confidence\n score = image_pred[:, 4] * image_pred[:, 5:].max(1)[0]\n # Sort by it\n image_pred = image_pred[(-score).argsort()]\n class_confs, class_preds = image_pred[:, 5:].max(1, keepdim=True)\n detections = torch.cat(\n (image_pred[:, :5], class_confs.float(), class_preds.float()), 1\n )\n # Perform non-maximum suppression\n keep_boxes = []\n while detections.size(0):\n large_overlap = (\n box_iou(detections[0, :4].unsqueeze(0), detections[:, :4], order=\"xyxy\")\n > nms_thres\n )\n label_match = detections[0, -1] == detections[:, -1]\n # Indices of boxes with lower confidence scores, large IOUs and matching labels\n invalid = large_overlap & label_match\n weights = detections[invalid, 4:5]\n # Merge overlapping bboxes by order of confidence\n detections[0, :4] = (weights * detections[invalid, :4]).sum(\n 0\n ) / weights.sum()\n keep_boxes += [detections[0]]\n detections = detections[~invalid]\n if keep_boxes:\n output[image_i] = torch.stack(keep_boxes)\n return output", "def greedyNonMaximumSupression(boxlist,clipthresh=0.05,IOUthresh=0.5):\r\n NMSed_list=[]\r\n if len(boxlist)==0 or clipthresh>1:\r\n return NMSed_list\r\n \r\n # keep every box with largest score while doesn't overlap with all the other\r\n # boxes\r\n NMSed_list.append(boxlist[0])\r\n for i in range(1,len(boxlist)):\r\n keepflag=True\r\n \r\n if boxlist[i][4]<clipthresh:\r\n break # break when score of current box is lower than thresh\r\n else:\r\n #print('----NMS--{}----'.format(i))\r\n for j in range(len(NMSed_list)):\r\n iou=getIoU(boxlist[i],NMSed_list[j])\r\n #print(iou)\r\n if iou>IOUthresh:\r\n keepflag=False\r\n break\r\n if keepflag:\r\n NMSed_list.append(boxlist[i])\r\n \r\n return NMSed_list", "def fast_nms(maps, size, stride, prob, overlap_ratio):\n rval = []\n # Define overlapping region\n # The filter has size over,over and is centered on a point\n over = 2 * ((size - 1) / stride) + 1\n over = int(over * (1.0 - overlap_ratio))\n for s in maps:\n # Apply the filter to the overlapping region\n maps[s] = maps[s] * (maps[s] > prob)\n maxF = ndimage.filters.maximum_filter(maps[s], (over, over))\n maps[s] = maps[s] * (maps[s] == maxF)\n n_z = np.transpose(np.nonzero(maps[s]))\n rval.extend([[s, n_z[e, 0], n_z[e, 1], maps[s][n_z[e, 0], n_z[e, 1]]]\n for e in range(len(n_z))])\n return rval", "def non_max_suppression(prediction, conf_thres=0.25, iou_thres=0.45, classes=None, agnostic=False, multi_label=False,\n labels=(), max_det=300):\n\n nc = prediction.shape[2] - 5 # number of classes\n xc = prediction[..., 4] > conf_thres # candidates\n\n # Checks\n assert 0 <= conf_thres <= 1, f'Invalid Confidence threshold {conf_thres}, valid values are between 0.0 and 1.0'\n assert 0 <= iou_thres <= 1, f'Invalid IoU {iou_thres}, valid values are between 0.0 and 1.0'\n\n # Settings\n min_wh, max_wh = 2, 4096 # (pixels) minimum and maximum box width and height\n max_nms = 30000 # maximum number of boxes into torchvision.ops.nms()\n time_limit = 10.0 # seconds to quit after\n redundant = True # require redundant detections\n multi_label &= nc > 1 # multiple labels per box (adds 0.5ms/img)\n merge = False # use merge-NMS\n\n t = time.time()\n output = [torch.zeros((0, 6), device=prediction.device)] * prediction.shape[0]\n for xi, x in enumerate(prediction): # image index, image inference\n # Apply constraints\n # x[((x[..., 2:4] < min_wh) | (x[..., 2:4] > max_wh)).any(1), 4] = 0 # width-height\n x = x[xc[xi]] # confidence\n\n # Cat apriori labels if autolabelling\n if labels and len(labels[xi]):\n l = labels[xi]\n v = torch.zeros((len(l), nc + 5), device=x.device)\n v[:, :4] = l[:, 1:5] # box\n v[:, 4] = 1.0 # conf\n v[range(len(l)), l[:, 0].long() + 5] = 1.0 # cls\n x = torch.cat((x, v), 0)\n\n # If none remain process next image\n if not x.shape[0]:\n continue\n\n # Compute conf\n x[:, 5:] *= x[:, 4:5] # conf = obj_conf * cls_conf\n\n # Box (center x, center y, width, height) to (x1, y1, x2, y2)\n box = xywh2xyxy(x[:, :4])\n\n # Detections matrix nx6 (xyxy, conf, cls)\n if multi_label:\n i, j = (x[:, 5:] > conf_thres).nonzero(as_tuple=False).T\n x = torch.cat((box[i], x[i, j + 5, None], j[:, None].float()), 1)\n else: # best class only\n conf, j = x[:, 5:].max(1, keepdim=True)\n x = torch.cat((box, conf, j.float()), 1)[conf.view(-1) > conf_thres]\n\n # Filter by class\n if classes is not None:\n x = x[(x[:, 5:6] == torch.tensor(classes, device=x.device)).any(1)]\n\n # Apply finite constraint\n # if not torch.isfinite(x).all():\n # x = x[torch.isfinite(x).all(1)]\n\n # Check shape\n n = x.shape[0] # number of boxes\n if not n: # no boxes\n continue\n elif n > max_nms: # excess boxes\n x = x[x[:, 4].argsort(descending=True)[:max_nms]] # sort by confidence\n\n # Batched NMS\n c = x[:, 5:6] * (0 if agnostic else max_wh) # classes\n boxes, scores = x[:, :4] + c, x[:, 4] # boxes (offset by class), scores\n i = torchvision.ops.nms(boxes, scores, iou_thres) # NMS\n if i.shape[0] > max_det: # limit detections\n i = i[:max_det]\n if merge and (1 < n < 3E3): # Merge NMS (boxes merged using weighted mean)\n # update boxes as boxes(i,4) = weights(i,n) * boxes(n,4)\n iou = box_iou(boxes[i], boxes) > iou_thres # iou matrix\n weights = iou * scores[None] # box weights\n x[i, :4] = torch.mm(weights, x[:, :4]).float() / weights.sum(1, keepdim=True) # merged boxes\n if redundant:\n i = i[iou.sum(1) > 1] # require redundancy\n\n output[xi] = x[i]\n if (time.time() - t) > time_limit:\n print(f'WARNING: NMS time limit {time_limit}s exceeded')\n break # time limit exceeded\n\n return output", "def nonmax_suppression(pred_labels, probabilities, x0, y0, windowsize, overlap_thr=0.1):\n\n # define list of proposals as list of indices over all predictions\n proposals = np.arange(0, len(pred_labels), dtype='int')\n\n # intialize final list of boxes\n final = []\n\n # delete all boxes labeled as \"other\"\n mask_other = [pred!='other' for pred in pred_labels]\n proposals = list(proposals[mask_other])\n\n while len(proposals)>0:\n\n # add the box with the highest confidence to the final selection\n ind_max = probabilities[proposals].argmax()\n select = proposals.pop(ind_max)\n final.append(select)\n\n # delete all boxes which overlap substantially with this last selected box\n delete_i = []\n for i, p in enumerate(proposals):\n\n # compute IoU score\n boxA = (x0[select], y0[select], x0[select]+windowsize[select], y0[select]+windowsize[select])\n boxB = (x0[p], y0[p], x0[p]+windowsize[p], y0[p]+windowsize[p])\n iou = intersection_over_union_from_boxes(boxA, boxB)\n\n if iou >= overlap_thr:\n delete_i.append(i)\n\n # update proposal list\n proposals = [proposals[i] for i in range(len(proposals)) if i not in delete_i]\n\n\n new_pred_labels = np.array(pred_labels)[final]\n new_probabilities = np.array(probabilities)[final]\n new_x0 = np.array(x0)[final]\n new_y0 = np.array(y0)[final]\n new_windowsize = np.array(windowsize)[final]\n\n return new_pred_labels, new_probabilities, new_x0, new_y0, new_windowsize", "def non_max_suppression_fast(boxes, overlapThresh=0.2):\n # if there are no boxes, return an empty list\n if len(boxes) == 0:\n return []\n\n # if the bounding boxes integers, convert them to floats --\n # this is important since we'll be doing a bunch of divisions\n if boxes.dtype.kind == \"i\":\n boxes = boxes.astype(\"float\")\n\n # initialize the list of picked indexes\n pick = []\n\n # grab the coordinates of the bounding boxes\n x1 = boxes[:, 0]\n y1 = boxes[:, 1]\n x2 = boxes[:, 2]\n y2 = boxes[:, 3]\n\n # compute the area of the bounding boxes and sort the bounding\n # boxes by the bottom-right y-coordinate of the bounding box\n area = (x2 - x1 + 1) * (y2 - y1 + 1)\n idxs = np.argsort(y2)\n\n # keep looping while some indexes still remain in the indexes list\n while len(idxs) > 0:\n # grab the last index in the indexes list and add the\n # index value to the list of picked indexes\n last = len(idxs) - 1\n i = idxs[last]\n pick.append(i)\n\n # find the largest (x, y) coordinates for the start of\n # the bounding box and the smallest (x, y) coordinates\n # for the end of the bounding box\n xx1 = np.maximum(x1[i], x1[idxs[:last]])\n yy1 = np.maximum(y1[i], y1[idxs[:last]])\n xx2 = np.minimum(x2[i], x2[idxs[:last]])\n yy2 = np.minimum(y2[i], y2[idxs[:last]])\n\n # compute the width and height of the bounding box\n w = np.maximum(0, xx2 - xx1 + 1)\n h = np.maximum(0, yy2 - yy1 + 1)\n\n # compute the ratio of overlap\n overlap = (w * h) / area[idxs[:last]]\n\n # delete all indexes from the index list that have\n idxs = np.delete(idxs, np.concatenate(([last], np.where(overlap > overlapThresh)[0])))\n\n # return only the bounding boxes that were picked using the\n # integer data type\n return boxes[pick].astype(\"int\"), pick", "def non_max_suppression(prediction, conf_thres=0.1, iou_thres=0.6, merge=False, classes=None, agnostic=False):\r\n if prediction.dtype is torch.float16:\r\n prediction = prediction.float() # to FP32\r\n\r\n nc = prediction[0].shape[1] - 5 # number of classes\r\n xc = prediction[..., 4] > conf_thres # candidates\r\n\r\n # Settings\r\n min_wh, max_wh = 2, 4096 # (pixels) minimum and maximum box width and height\r\n max_det = 300 # maximum number of detections per image\r\n time_limit = 10.0 # seconds to quit after\r\n redundant = True # require redundant detections\r\n multi_label = nc > 1 # multiple labels per box (adds 0.5ms/img)\r\n\r\n t = time.time()\r\n output = [None] * prediction.shape[0]\r\n for xi, x in enumerate(prediction): # image index, image inference\r\n # Apply constraints\r\n # x[((x[..., 2:4] < min_wh) | (x[..., 2:4] > max_wh)).any(1), 4] = 0 # width-height\r\n x = x[xc[xi]] # confidence\r\n\r\n # If none remain process next image\r\n if not x.shape[0]:\r\n continue\r\n\r\n # Compute conf\r\n x[:, 5:] *= x[:, 4:5] # conf = obj_conf * cls_conf\r\n\r\n # Box (center x, center y, width, height) to (x1, y1, x2, y2)\r\n box = xywh2xyxy(x[:, :4])\r\n\r\n # Detections matrix nx6 (xyxy, conf, cls)\r\n if multi_label:\r\n i, j = (x[:, 5:] > conf_thres).nonzero(as_tuple=False).T\r\n x = torch.cat((box[i], x[i, j + 5, None], j[:, None].float()), 1)\r\n else: # best class only\r\n conf, j = x[:, 5:].max(1, keepdim=True)\r\n x = torch.cat((box, conf, j.float()), 1)[conf.view(-1) > conf_thres]\r\n\r\n # Filter by class\r\n if classes:\r\n x = x[(x[:, 5:6] == torch.tensor(classes, device=x.device)).any(1)]\r\n\r\n # Apply finite constraint\r\n # if not torch.isfinite(x).all():\r\n # x = x[torch.isfinite(x).all(1)]\r\n\r\n # If none remain process next image\r\n n = x.shape[0] # number of boxes\r\n if not n:\r\n continue\r\n\r\n # Sort by confidence\r\n # x = x[x[:, 4].argsort(descending=True)]\r\n\r\n # Batched NMS\r\n c = x[:, 5:6] * (0 if agnostic else max_wh) # classes\r\n boxes, scores = x[:, :4] + c, x[:, 4] # boxes (offset by class), scores\r\n i = torchvision.ops.boxes.nms(boxes, scores, iou_thres)\r\n if i.shape[0] > max_det: # limit detections\r\n i = i[:max_det]\r\n if merge and (1 < n < 3E3): # Merge NMS (boxes merged using weighted mean)\r\n try: # update boxes as boxes(i,4) = weights(i,n) * boxes(n,4)\r\n iou = box_iou(boxes[i], boxes) > iou_thres # iou matrix\r\n weights = iou * scores[None] # box weights\r\n x[i, :4] = torch.mm(weights, x[:, :4]).float() / weights.sum(1, keepdim=True) # merged boxes\r\n if redundant:\r\n i = i[iou.sum(1) > 1] # require redundancy\r\n except: # possible CUDA error https://github.com/ultralytics/yolov3/issues/1139\r\n print(x, i, x.shape, i.shape)\r\n pass\r\n\r\n output[xi] = x[i]\r\n if (time.time() - t) > time_limit:\r\n break # time limit exceeded\r\n\r\n return output", "def non_max_suppression(bboxes, iou_threshold, threshold, box_format=\"corners\"):\n\n # 49 x 6 \n assert type(bboxes) == list\n # print(bboxes)\n bboxes = [box for box in bboxes if box[1] > threshold]\n bboxes = sorted(bboxes, key=lambda x: x[1], reverse=True)\n bboxes_after_nms = []\n # print(bboxes)\n while bboxes:\n chosen_box = bboxes.pop(0)\n bbox_temp = bboxes.copy()\n bboxes = []\n for box in bbox_temp: # not the same class or not overlap a lot \n if box[0] != chosen_box[0] or intersection_over_union(torch.tensor(chosen_box[2:]),torch.tensor(box[2:]), box_format=box_format,) < iou_threshold:\n bboxes.append(box)\n\n bboxes_after_nms.append(chosen_box)\n # print(\"NMS: \" + str(len(bboxes_after_nms)))\n return bboxes_after_nms", "def non_max_suppression(prediction, conf_thres=0.25, iou_thres=0.45, agnostic=False, multi_label=False,\n labels=(), max_det=300):\n\n nc = prediction.shape[2] - 5 # number of classes\n xc = prediction[..., 4] > conf_thres # candidates\n # Checks\n assert 0 <= conf_thres <= 1, f'Invalid Confidence threshold {conf_thres}, valid values are between 0.0 and 1.0'\n assert 0 <= iou_thres <= 1, f'Invalid IoU {iou_thres}, valid values are between 0.0 and 1.0'\n\n # Settings\n min_wh, max_wh = 2, 4096 # (pixels) minimum and maximum box width and height\n max_nms = 30000 # maximum number of boxes into torchvision.ops.nms()\n #redundant = True # require redundant detections\n multi_label &= nc > 1 # multiple labels per box (adds 0.5ms/img)\n #merge = False # use merge-NMS\n\n output = [np.zeros((0, 6))] * prediction.shape[0]\n for xi, x in enumerate(prediction): # image index, image inference\n # Apply constraints\n # x[((x[..., 2:4] < min_wh) | (x[..., 2:4] > max_wh)).any(1), 4] = 0 # width-height\n x = x[xc[xi]] # confidence\n # If none remain process next image\n if not x.shape[0]:\n continue\n\n # Compute conf\n x[:, 5:] *= x[:, 4:5] # conf = obj_conf * cls_conf\n # Box (center x, center y, width, height) to (x1, y1, x2, y2)\n box = xywh2xyxy(x[:, :4])\n # Detections matrix nx6 (xyxy, conf, cls)\n # best class only\n conf = x[:, 5:].max(1, keepdims=True)\n j = np.argmax(x[:, 5:], axis=1)\n j = j.reshape(j.shape[0],1)\n #x = torch.cat((box, conf, j.float()), 1)[conf.view(-1) > conf_thres]\n x = np.concatenate((box, conf, j.astype(np.float32)),axis=1)\n # Check shape\n n = x.shape[0] # number of boxes\n if not n: # no boxes\n continue\n elif n > max_nms: # excess boxes\n x = x[x[:, 4].argsort()[:max_nms]] # sort by confidence\n\n # Batched NMS\n c = x[:, 5:6] * (0 if agnostic else max_wh) # classes\n boxes, scores = x[:, :4] + c, x[:, 4] # boxes (offset by class), scores\n #i = torchvision.ops.nms(boxes, scores, iou_thres) # NMS\n i = nms(boxes, scores, iou_thres) # NMS\n \n output[xi] = x[i]\n\n return output", "def non_max_suppression(prediction, conf_thres=0.4, iou_thres=0.6):\n\n nc = prediction[0].shape[1] - 5 # number of classes\n xc = prediction[..., 4] > conf_thres # candidates\n\n # Settings\n min_wh, max_wh = 2, 4096 # (pixels) minimum and maximum box width and height\n max_det = 300 # maximum number of detections per image\n time_limit = 10.0 # seconds to quit after\n redundant = True # require redundant detections\n multi_label = nc > 1 # multiple labels per box (adds 0.5ms/img)\n\n t = time.time()\n output = [None] * prediction.shape[0]\n for xi, x in enumerate(prediction): # image index, image inference\n # Apply constraints\n # x[((x[..., 2:4] < min_wh) | (x[..., 2:4] > max_wh)).any(1), 4] = 0 # width-height\n x = x[xc[xi]] # confidence\n\n # If none remain process next image\n if not x.shape[0]:\n continue\n\n # Compute conf\n x[:, 5:] *= x[:, 4:5] # conf = obj_conf * cls_conf\n\n # Box (center x, center y, width, height) to (x1, y1, x2, y2)\n box = xywh2xyxy(x[:, :4])\n\n # Detections matrix nx6 (xyxy, conf, cls)\n if multi_label:\n i, j = (x[:, 5:] > conf_thres).nonzero(as_tuple=False).T\n x = torch.cat((box[i], x[i, j + 5, None], j[:, None].float()), 1)\n else: # best class only\n conf, j = x[:, 5:].max(1, keepdim=True)\n x = torch.cat((box, conf, j.float()), 1)[conf.view(-1) > conf_thres]\n\n # If none remain process next image\n n = x.shape[0] # number of boxes\n if not n:\n continue\n\n # Sort by confidence\n # x = x[x[:, 4].argsort(descending=True)]\n\n # Batched NMS\n c = x[:, 5:6] * max_wh # classes\n boxes, scores = x[:, :4] + c, x[:, 4] # boxes (offset by class), scores\n i = torchvision.ops.boxes.nms(boxes, scores, iou_thres)\n if i.shape[0] > max_det: # limit detections\n i = i[:max_det]\n\n output[xi] = x[i]\n if (time.time() - t) > time_limit:\n break # time limit exceeded\n\n return output", "def test_calc_mode():\r\n lamb = 1.8e-6\r\n radius = 50e-6\r\n num_points = 50\r\n x = np.linspace(-1, 1, num_points) * radius\r\n y = np.linspace(-1, 1, num_points)[:, None] * radius\r\n r = (x ** 2 + y ** 2) ** 0.5\r\n theta = np.arctan2(x, y)\r\n dA = (x[1] - x[0]) * (y[1, 0] - y[0, 0])\r\n ##\r\n M = 5\r\n N = 3\r\n fields = []\r\n for m in np.arange(1, M + 1):\r\n for n in np.arange(-N, N + 1):\r\n for theta0 in (0, np.pi / 2):\r\n fields.append(hc.calc_mode(1.5, radius, n, m, lamb, r, theta, dA, theta0))\r\n rows = []\r\n for f1 in fields:\r\n row = []\r\n for f2 in fields:\r\n row.append(hc.calc_mode_overlap(f1, f2, dA))\r\n rows.append(row)\r\n overlap = np.array(rows)", "def nms(bobj, cf_thresh, nms_thresh):\n bboxs = bobj[\"boxs\"]\n scores = bobj[\"scores\"]\n cfvalid_ids = np.where(scores >= cf_thresh)[0]\n if len(cfvalid_ids) == 0:\n return None, None\n bboxs = bobj[\"boxs\"][cfvalid_ids]\n scores = scores[cfvalid_ids]\n ids = bobj[\"ids\"][cfvalid_ids]\n masks = bobj[\"masks\"][cfvalid_ids]\n x1 = bboxs[:, 0]\n y1 = bboxs[:, 1]\n x2 = bboxs[:, 2]\n y2 = bboxs[:, 3]\n areas = (x2 - x1 + 1) * (y2 - y1 + 1)\n # cfvalid_ids = np.where(scores >= cf_thresh)[0]\n # scores = scores[cfvalid_ids]\n\n # order = scores.argsort()[::-1]\n mask_sizes = np.sum(masks, axis=(1, 2))\n order = mask_sizes.argsort()[::-1]\n keep = []\n suppress = []\n while order.size > 0:\n i = order[0]\n keep.append(i)\n xx1 = np.maximum(x1[i], x1[order[1:]])\n yy1 = np.maximum(y1[i], y1[order[1:]])\n xx2 = np.minimum(x2[i], x2[order[1:]])\n yy2 = np.minimum(y2[i], y2[order[1:]])\n w = np.maximum(0.0, xx2 - xx1 + 1)\n h = np.maximum(0.0, yy2 - yy1 + 1)\n inter = w * h\n iou = inter / (areas[i] + areas[order[1:]] - inter)\n # Because of we split the object cross the boundary in the cropped instance,\n # concatenating it to the original instance, thus we need also mask iou condition for nms\n mask_other = masks[order[1:], :, :]\n mask_cur = masks[i, :, :]\n mask_inter = np.sum(mask_cur & mask_other, axis=(1, 2))\n mask_union = np.sum(mask_cur | mask_other, axis=(1, 2))\n mask_iou = mask_inter / mask_union\n\n suppress_inds = np.where((iou > nms_thresh) | (mask_iou > nms_thresh))[0]\n sup_i = order[1:][suppress_inds] if suppress_inds.size != 0 else np.array([])\n suppress.append(sup_i)\n\n inds = np.where((iou <= nms_thresh) & (mask_iou <= nms_thresh))[0]\n order = order[inds + 1]\n\n for i, sup in enumerate(suppress):\n if sup.any():\n for sup_id in sup:\n # sup_id = s + 1\n keep_id = keep[i]\n # union the keep mask and the suppress mask\n masks[keep_id, :, :] = masks[keep_id, :, :] | masks[sup_id, :, :]\n if keep:\n return ids[keep], masks[keep]\n else:\n return [], []", "def trim_occluded_throats(network, mask='all'):\n occluded_ts = network['throat.area'] == 0\n if np.sum(occluded_ts) > 0:\n occluded_ts *= network[\"throat.\"+mask]\n trim(network=network, throats=occluded_ts)", "def non_maximum_suppression(prediction, iou_threshold=0.45, score_threshold=0.25):\n\n # num_classes = len(names)\n max_wh = 4096\n max_det = 300\n max_nms = 30000\n output = [torch.zeros((0, 6), device=prediction.device)] * prediction.shape[0]\n\n for xi, x in enumerate(prediction):\n x = x[x[..., 4] > score_threshold]\n\n # If none remain process next image\n if not x.shape[0]:\n continue\n\n # Compute conf\n x[:, 5:] *= x[:, 4:5] # conf = obj_conf * cls_conf\n\n # Box (center x, center y, width, height) to (x1, y1, x2, y2)\n box = x[:, :4]\n\n conf, j = x[:, 5:].max(1, keepdim=True)\n x = torch.cat((box, conf, j.float()), 1)[conf.view(-1) > score_threshold]\n\n # Filter by class\n # if classes is not None:\n # x = x[(x[:, 5:6] == torch.tensor(classes, device=x.device)).any(1)]\n\n # Check shape\n n = x.shape[0] # number of boxes\n if not n: # no boxes\n continue\n elif n > max_nms: # excess boxes\n # sort by confidence\n x = x[x[:, 4].argsort(descending=True)[:max_nms]]\n\n # Batched NMS\n c = x[:, 5:6] * max_wh # classes\n # boxes (offset by class), scores\n boxes, scores = x[:, :4] + c, x[:, 4]\n i = nms(boxes, scores, iou_threshold) # NMS\n if i.shape[0] > max_det: # limit detections\n i = i[:max_det]\n\n output[xi] = x[i]\n\n return output", "def iou_suppression(cnt_box, yolo_box, max_threshold, min_threshold):\n all_boxes = []\n pre_bboxes = yolo_box\n bboxes = cnt_box\n for i in range(len(pre_bboxes)):\n max_flag = 0\n min_flag = 0\n for j in range(len(bboxes)):\n\n (pre_x1, pre_y1) = (pre_bboxes[i][0], pre_bboxes[i][1])\n (pre_x2, pre_y2) = (pre_bboxes[i][2], pre_bboxes[i][3])\n (cur_x1, cur_y1) = (bboxes[j][0], bboxes[j][1])\n (cur_x2, cur_y2) = (bboxes[j][2], bboxes[j][3])\n origin_w = pre_x2 - pre_x1\n origin_h = pre_y2 - pre_y1\n current_w = cur_x2 - cur_x1\n current_h = cur_y2 - cur_y1\n prime_area = origin_h * origin_w\n current_area = current_h*current_w\n\n if pre_x1 > cur_x1:\n if pre_y1 > cur_y1:\n if cur_x2 - pre_x1 <= 0 or cur_y2 - pre_y1 <= 0:\n lap_area = 0\n else:\n width = cur_x2 - pre_x1\n height = cur_y2 - pre_y1\n if width > origin_w:\n width = origin_w\n if height > origin_h:\n height = origin_h\n\n lap_area = width*height\n\n else:\n if cur_x2 - pre_x1 <= 0 or pre_y2 - cur_y1 <= 0:\n lap_area = 0\n else:\n width = cur_x2 - pre_x1\n height = pre_y2 - cur_y1\n if width > origin_w:\n width = origin_w\n if height > current_h:\n height = current_h\n\n lap_area = width*height\n else:\n if pre_y1 > cur_y1:\n if pre_x2 - cur_x1 <= 0 or cur_y2 - pre_y1 <= 0:\n lap_area = 0\n else:\n width = pre_x2 - cur_x1\n height = cur_y2 - pre_y1\n if width > current_w:\n width = current_w\n if height > origin_h:\n height = origin_h\n\n lap_area = width*height\n else:\n if pre_x2 - cur_x1 <= 0 or pre_y2 - cur_y1 <= 0:\n lap_area = 0\n else:\n width = pre_x2 - cur_x1\n height = pre_y2 - cur_y1\n if width > current_w:\n width = current_w\n if height > current_h:\n height = current_h\n\n lap_area = width*height\n\n if lap_area != 0:\n sum_area = (prime_area + current_area - lap_area)\n iou_score = lap_area/sum_area\n if iou_score > max_threshold: # set the threshold of the iou scores, in line with the sort\n max_flag = 1\n elif iou_score > min_threshold:\n min_flag = 1\n\n if max_flag == 1 or min_flag == 0:\n all_boxes.append(pre_bboxes[i])\n\n if cnt_box != []:\n for index_box in range(cnt_box.shape[0]):\n all_boxes.append(cnt_box[index_box])\n\n return np.asarray(all_boxes)", "def non_maximum_suppression(image):\n # Find local maximas.\n neighborhood = generate_binary_structure(2,2)\n local_max = maximum_filter(image, footprint=neighborhood)==image\n local_max[image<(image.max()*0.1)] = False\n\n # Erode areas to single points.\n lbs, num = label(local_max)\n centers = center_of_mass(local_max, lbs, np.arange(num)+1)\n centers = np.stack(centers).round().astype(np.int)\n ret = np.zeros_like(image, dtype=np.bool)\n ret[centers[:,0], centers[:,1]] = True\n\n return ret", "def _filter_masks_pt(\n self,\n masks,\n iou_scores,\n original_size,\n cropped_box_image,\n pred_iou_thresh=0.88,\n stability_score_thresh=0.95,\n mask_threshold=0,\n stability_score_offset=1,\n ):\n requires_backends(self, [\"torch\"])\n original_height, original_width = original_size\n iou_scores = iou_scores.flatten(0, 1)\n masks = masks.flatten(0, 1)\n\n if masks.shape[0] != iou_scores.shape[0]:\n raise ValueError(\"masks and iou_scores must have the same batch size.\")\n\n if masks.device != iou_scores.device:\n iou_scores = iou_scores.to(masks.device)\n\n batch_size = masks.shape[0]\n\n keep_mask = torch.ones(batch_size, dtype=torch.bool, device=masks.device)\n\n if pred_iou_thresh > 0.0:\n keep_mask = keep_mask & (iou_scores > pred_iou_thresh)\n\n # compute stability score\n if stability_score_thresh > 0.0:\n stability_scores = _compute_stability_score_pt(masks, mask_threshold, stability_score_offset)\n keep_mask = keep_mask & (stability_scores > stability_score_thresh)\n\n scores = iou_scores[keep_mask]\n masks = masks[keep_mask]\n\n # binarize masks\n masks = masks > mask_threshold\n converted_boxes = _batched_mask_to_box(masks)\n\n keep_mask = ~_is_box_near_crop_edge(\n converted_boxes, cropped_box_image, [0, 0, original_width, original_height]\n )\n\n scores = scores[keep_mask]\n masks = masks[keep_mask]\n converted_boxes = converted_boxes[keep_mask]\n\n masks = _pad_masks(masks, cropped_box_image, original_height, original_width)\n # conversion to rle is necessary to run non-maximum suppresion\n masks = _mask_to_rle_pytorch(masks)\n\n return masks, scores, converted_boxes", "def nms_cpu(boxes, confs, nms_thresh=0.5, min_mode=False):\n x1 = boxes[:, 0]\n y1 = boxes[:, 1]\n x2 = boxes[:, 2]\n y2 = boxes[:, 3]\n\n areas = (x2 - x1) * (y2 - y1)\n order = confs.argsort()[::-1]\n\n keep = []\n while order.size > 0:\n idx_self = order[0]\n idx_other = order[1:]\n\n keep.append(idx_self)\n\n xx1 = np.maximum(x1[idx_self], x1[idx_other])\n yy1 = np.maximum(y1[idx_self], y1[idx_other])\n xx2 = np.minimum(x2[idx_self], x2[idx_other])\n yy2 = np.minimum(y2[idx_self], y2[idx_other])\n\n w = np.maximum(0.0, xx2 - xx1)\n h = np.maximum(0.0, yy2 - yy1)\n inter = w * h\n\n if min_mode:\n over = inter / np.minimum(areas[order[0]], areas[order[1:]])\n else:\n over = inter / (areas[order[0]] + areas[order[1:]] - inter)\n\n inds = np.where(over <= nms_thresh)[0]\n order = order[inds + 1]\n\n return np.array(keep)", "def non_max_suppression_all_classes(boxes, scores, labels, iou_threshold=0.5):\n excluded_indices = []\n for i in range(0,len(boxes)):\n obj1_box, _, obj1_label = boxes[i], scores[i], labels[i]\n for j in range(i+1,len(boxes)):\n obj2_box, _, obj2_label = boxes[j], scores[j], labels[j]\n if (get_iou(obj1_box, obj2_box) > iou_threshold):\n #print('excluding idx={}, class={}, score={}, bbox={}'.format(j, obj2_label, obj2_score, obj2_box))\n excluded_indices.append(j)\n \n excluded_indices = list(set(excluded_indices)) #Elimina indices repetidos\n included_indices = [idx for idx in range(len(boxes)) if idx not in excluded_indices]\n #print(included_indices)\n return included_indices", "def NMS(dets, threshold):\n assert dets.dim() == 2 and dets.size(1) == 5, \"input error of dets\"\n\n x1 = dets[:,0]\n y1 = dets[:,1]\n x2 = dets[:,2]\n y2 = dets[:,3]\n score = dets[:,4]\n\n # 1 compute areas\n areas = (x2-x1+1) * (y2-y1+1)\n\n # 2 sort score \n order = score.sort(dim=0,descending=True)[1]\n\n # 3 del bbox of those IoU greater than threshold\n # import ipdb; ipdb.set_trace()\n mask = torch.zeros_like(order, dtype=torch.uint8).cuda()\n while order.numel() > 0:\n i = order[0]\n mask[i] = 1\n # compute IoU\n xx1 = torch.max(x1[i], x1[order[1:]])\n yy1 = torch.max(y1[i], y1[order[1:]])\n xx2 = torch.min(x2[i], x2[order[1:]])\n yy2 = torch.min(y2[i], y2[order[1:]])\n\n w = xx2 - xx1 + 1\n h = yy2 - yy1 +1\n w[w<0] = 0\n h[h<0] = 0\n inter_area = w*h\n IoU = inter_area/(areas[i]+areas[order[1:]]-inter_area)\n\n order = order[1:][IoU<=threshold]\n\n return mask", "def non_maximum_suppression(image):\n # Find local maximas.\n neighborhood = generate_binary_structure(2, 2)\n local_max = maximum_filter(image, footprint=neighborhood) == image\n local_max[image < (image.max() * 0.1)] = False\n\n # Erode areas to single points.\n lbs, num = label(local_max)\n centers = center_of_mass(local_max, lbs, np.arange(num) + 1)\n centers = np.stack(centers).round().astype(np.int)\n ret = np.zeros_like(image, dtype=np.bool)\n ret[centers[:, 0], centers[:, 1]] = True\n\n return ret", "def non_maximum_suppression(image):\n # Find local maximas.\n neighborhood = generate_binary_structure(2, 2)\n local_max = maximum_filter(image, footprint=neighborhood) == image\n local_max[image < (image.max() * 0.1)] = False\n\n # Erode areas to single points.\n lbs, num = label(local_max)\n centers = center_of_mass(local_max, lbs, np.arange(num) + 1)\n centers = np.stack(centers).round().astype(np.int)\n ret = np.zeros_like(image, dtype=np.bool)\n ret[centers[:, 0], centers[:, 1]] = True\n\n return ret", "def non_max_suppression(boxes, max_bbox_overlap, scores=None):\n if len(boxes) == 0:\n return []\n\n boxes = boxes.astype(np.float)\n pick = []\n\n x1 = boxes[:, 0]\n y1 = boxes[:, 1]\n x2 = boxes[:, 2] + boxes[:, 0]\n y2 = boxes[:, 3] + boxes[:, 1]\n\n area = (x2 - x1 + 1) * (y2 - y1 + 1)\n if scores is not None:\n idxs = np.argsort(scores)\n else:\n idxs = np.argsort(y2)\n\n while len(idxs) > 0:\n last = len(idxs) - 1\n i = idxs[last]\n pick.append(i)\n\n xx1 = np.maximum(x1[i], x1[idxs[:last]])\n yy1 = np.maximum(y1[i], y1[idxs[:last]])\n xx2 = np.minimum(x2[i], x2[idxs[:last]])\n yy2 = np.minimum(y2[i], y2[idxs[:last]])\n\n w = np.maximum(0, xx2 - xx1 + 1)\n h = np.maximum(0, yy2 - yy1 + 1)\n\n overlap = (w * h) / area[idxs[:last]]\n\n idxs = np.delete(\n idxs, np.concatenate(\n ([last], np.where(overlap > max_bbox_overlap)[0])))\n\n return pick", "def mask_tif(shape_path, main_dir, results_dir):\n file_name_list, path_list = eliminate_nanoverlap(main_dir, shape_path)\n\n rel_orbit_number_list = []\n for i, name in enumerate(file_name_list):\n filename = name[0:28] + \"manifest.safe\"\n path_name = path_list[i]\n rel_orbit_number = \"_\" + xml_extract(path=path_name, file=filename)\n rel_orbit_number_list.append(rel_orbit_number)\n\n shapes = import_polygons(shape_path)\n\n # Print info, what step is currently processed:\n print(\"Cliping overlapping files to ROI...\")\n\n # Create necessary folder for the output:\n VH_folder = results_dir + \"VH/\"\n VH_Asc_folder = VH_folder + \"Asc/\"\n VH_Desc_folder = VH_folder + \"Desc/\"\n if not os.path.exists(VH_folder):\n os.mkdir(VH_folder)\n os.mkdir(VH_Asc_folder)\n os.mkdir(VH_Desc_folder)\n\n VV_folder = results_dir + \"VV/\"\n VV_Asc_folder = VV_folder + \"Asc/\"\n VV_Desc_folder = VV_folder + \"Desc/\"\n if not os.path.exists(VV_folder):\n os.mkdir(VV_folder)\n os.mkdir(VV_Asc_folder)\n os.mkdir(VV_Desc_folder)\n\n # Iterate through all files, which overlap with the ROI (return from \"eliminate_nanoverlap\" function)\n for i, file in enumerate(file_name_list):\n file_name = path_list[i] + file_name_list[i]\n\n if os.path.exists(VH_Asc_folder + file[10:len(file)]):\n continue\n if os.path.exists(VH_Desc_folder + file[10:len(file)]):\n continue\n if os.path.exists(VV_Asc_folder + file[10:len(file)]):\n continue\n if os.path.exists(VV_Desc_folder + file[10:len(file)]):\n continue\n\n # Clip files to extent of ROI:\n src1 = rio.open(file_name)\n out_image, out_transform = rio.mask.mask(src1, [shapes[0]], all_touched=0, crop=True, nodata=np.nan)\n out_meta = src1.meta\n out_meta.update({\"driver\": \"GTiff\",\n \"height\": out_image.shape[1],\n \"width\": out_image.shape[2],\n \"transform\": out_transform})\n\n # Write subsets to corresponding folders and rename files to be sorted by date:\n flight_dir = file_name_list[i][file_name_list[i].index(\"___\") + 3:file_name_list[i].index(\"___\") + 4]\n polarization = file_name_list[i][file_name_list[i].index(\"grd\") - 3:file_name_list[i].index(\"grd\") - 1]\n if polarization == \"VH\":\n if flight_dir == \"A\":\n with rasterio.open(\n VH_Asc_folder + file[10:len(file)-4] + \"_\" + file[0:3] + rel_orbit_number_list[i] + \".tif\", \"w\",\n **out_meta) as dest:\n dest.write(out_image)\n if flight_dir == \"D\":\n with rasterio.open(\n VH_Desc_folder + file[10:len(file)-4] + \"_\" + file[0:3] + rel_orbit_number_list[i] + \".tif\",\n \"w\", **out_meta) as dest:\n dest.write(out_image)\n if polarization == \"VV\":\n if flight_dir == \"A\":\n with rasterio.open(\n VV_Asc_folder + file[10:len(file)-4] + \"_\" + file[0:3] + rel_orbit_number_list[i] + \".tif\", \"w\",\n **out_meta) as dest:\n dest.write(out_image)\n if flight_dir == \"D\":\n with rasterio.open(\n VV_Desc_folder + file[10:len(file)-4] + \"_\" + file[0:3] + rel_orbit_number_list[i] + \".tif\",\n \"w\", **out_meta) as dest:\n dest.write(out_image)\n return [VH_Asc_folder, VH_Desc_folder, VV_Asc_folder, VV_Desc_folder]", "def py_cpu_nms(dets, scores, thresh): \n # inpurt 8x3 \n x1 = dets[:, 0, 0] \n y1 = dets[:, 0, 1] \n # z1 = dets[:, 0, 2]\n x2 = dets[:, 2, 0] \n y2 = dets[:, 2, 1] \n print('7777777777777',scores.shape)\n # z2 = dets[:, 2, 2] \n # height = dets[:, 4, 2] - dets[:, 0, 2]\n \n areas = (x2 - x1 + 1) * (y2 - y1 + 1) \n #打分从大到小排列,取index \n order = scores.argsort()[::-1] \n #keep为最后保留的边框 \n keep = [] \n while order.size > 0: \n #order[0]是当前分数最大的窗口,肯定保留 \n i = order[0] \n keep.append(i) \n #计算窗口i与其他所有窗口的交叠部分的面积\n xx1 = np.maximum(x1[i], x1[order[1:]]) \n yy1 = np.maximum(y1[i], y1[order[1:]]) \n xx2 = np.minimum(x2[i], x2[order[1:]]) \n yy2 = np.minimum(y2[i], y2[order[1:]]) \n \n w = np.maximum(0.0, xx2 - xx1 + 1) \n h = np.maximum(0.0, yy2 - yy1 + 1) \n inter = w * h \n #交/并得到iou值 \n ovr = inter / (areas[i] + areas[order[1:]] - inter) \n #inds为所有与窗口i的iou值小于threshold值的窗口的index,其他窗口此次都被窗口i吸收 \n inds = np.where(ovr <= thresh)[0] \n #order里面只保留与窗口i交叠面积小于threshold的那些窗口,由于ovr长度比order长度少1(不包含i),所以inds+1对应到保留的窗口\n order = order[inds + 1] \n \n return keep", "def mask(mode: str = 'illuminated', band: str = '78') -> np.ndarray:\n if band in ('7', '8'):\n res = np.full((256, 500), False)\n else:\n res = np.full((256, 1000), False)\n\n res[coords(mode, band)] = True\n\n return res" ]
[ "0.5926733", "0.55196464", "0.5387993", "0.53370196", "0.53234637", "0.52802104", "0.5256077", "0.5233371", "0.5185825", "0.5180145", "0.5169495", "0.5125346", "0.50940484", "0.5078901", "0.5071394", "0.5042755", "0.5040507", "0.50296456", "0.5021151", "0.5018777", "0.5015462", "0.49988395", "0.49958956", "0.49958324", "0.49827543", "0.49827543", "0.49825427", "0.4972003", "0.49282905", "0.4917731" ]
0.6313439
0
Computes the bounding box of each mask in a list of RLE encoded masks.
def rle_masks_to_boxes(masks): if len(masks) == 0: return [] decoded_masks = [ np.array(mask_util.decode(rle), dtype=np.float32) for rle in masks ] def get_bounds(flat_mask): inds = np.where(flat_mask > 0)[0] return inds.min(), inds.max() boxes = np.zeros((len(decoded_masks), 4)) keep = [True] * len(decoded_masks) for i, mask in enumerate(decoded_masks): if mask.sum() == 0: keep[i] = False continue flat_mask = mask.sum(axis=0) x0, x1 = get_bounds(flat_mask) flat_mask = mask.sum(axis=1) y0, y1 = get_bounds(flat_mask) boxes[i, :] = (x0, y0, x1, y1) return boxes, np.where(keep)[0]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def extract_bboxes(mask):\r\n boxes = np.zeros([mask.shape[-1], 4], dtype=np.int32)\r\n for i in range(mask.shape[-1]):\r\n m = mask[:, :, i]\r\n # Bounding box.\r\n horizontal_indicies = np.where(np.any(m, axis=0))[0]\r\n vertical_indicies = np.where(np.any(m, axis=1))[0]\r\n if horizontal_indicies.shape[0]:\r\n x1, x2 = horizontal_indicies[[0, -1]]\r\n y1, y2 = vertical_indicies[[0, -1]]\r\n # x2 and y2 should not be part of the box. Increment by 1.\r\n x2 += 1\r\n y2 += 1\r\n else:\r\n # No mask for this instance. Might happen due to\r\n # resizing or cropping. Set bbox to zeros\r\n x1, x2, y1, y2 = 0, 0, 0, 0\r\n boxes[i] = np.array([y1, x1, y2, x2])\r\n return boxes.astype(np.int32)", "def extract_bboxes(mask):\n boxes = np.zeros([mask.shape[-1], 4], dtype=np.int32)\n for i in range(mask.shape[-1]):\n m = mask[:, :, i]\n # Bounding box.\n horizontal_indicies = np.where(np.any(m, axis=0))[0]\n vertical_indicies = np.where(np.any(m, axis=1))[0]\n if horizontal_indicies.shape[0]:\n x1, x2 = horizontal_indicies[[0, -1]]\n y1, y2 = vertical_indicies[[0, -1]]\n # x2 and y2 should not be part of the box. Increment by 1.\n x2 += 1\n y2 += 1\n else:\n # No mask for this instance. Might happen due to\n # resizing or cropping. Set bbox to zeros\n x1, x2, y1, y2 = 0, 0, 0, 0\n boxes[i] = np.array([y1, x1, y2, x2])\n return boxes.astype(np.int32)", "def extract_bboxes(mask):\n boxes = np.zeros([mask.shape[-1], 4], dtype=np.int32)\n for i in range(mask.shape[-1]):\n m = mask[:, :, i]\n # Bounding box.\n horizontal_indicies = np.where(np.any(m, axis=0))[0]\n vertical_indicies = np.where(np.any(m, axis=1))[0]\n if horizontal_indicies.shape[0]:\n x1, x2 = horizontal_indicies[[0, -1]]\n y1, y2 = vertical_indicies[[0, -1]]\n # x2 and y2 should not be part of the box. Increment by 1.\n x2 += 1\n y2 += 1\n else:\n # No mask for this instance. Might happen due to\n # resizing or cropping. Set bbox to zeros\n x1, x2, y1, y2 = 0, 0, 0, 0\n boxes[i] = np.array([x1, y1, x2, y2])\n return boxes.astype(np.int32)", "def extract_bboxes(mask):\n boxes = np.zeros([mask.shape[-1],mask.shape[0], 2], dtype=np.int32)\n for i in range(mask.shape[-1]):\n # Bounding box.\n\n for j in range(mask.shape[0]):\n m = mask[j, :, i]\n horizontal_indicies = np.where(m)[0]\n\n if horizontal_indicies.shape[0]:\n x1, x2 = horizontal_indicies[[0, -1]]\n\n # x2 should not be part of the box. Increment by 1.\n x2 += 1\n else:\n # No mask for this instance. Might happen due to\n # resizing or cropping. Set bbox to zeros\n x1, x2 = 0, 0\n boxes[i,j] = np.array([x1, x2])\n\n return boxes.astype(np.int32)", "def _batched_mask_to_box(masks: \"torch.Tensor\"):\n # torch.max below raises an error on empty inputs, just skip in this case\n\n if torch.numel(masks) == 0:\n return torch.zeros(*masks.shape[:-2], 4, device=masks.device)\n\n # Normalize shape to Cxheightxwidth\n shape = masks.shape\n height, width = shape[-2:]\n\n # Get top and bottom edges\n in_height, _ = torch.max(masks, dim=-1)\n in_height_coords = in_height * torch.arange(height, device=in_height.device)[None, :]\n bottom_edges, _ = torch.max(in_height_coords, dim=-1)\n in_height_coords = in_height_coords + height * (~in_height)\n top_edges, _ = torch.min(in_height_coords, dim=-1)\n\n # Get left and right edges\n in_width, _ = torch.max(masks, dim=-2)\n in_width_coords = in_width * torch.arange(width, device=in_width.device)[None, :]\n right_edges, _ = torch.max(in_width_coords, dim=-1)\n in_width_coords = in_width_coords + width * (~in_width)\n left_edges, _ = torch.min(in_width_coords, dim=-1)\n\n # If the mask is empty the right edge will be to the left of the left edge.\n # Replace these boxes with [0, 0, 0, 0]\n empty_filter = (right_edges < left_edges) | (bottom_edges < top_edges)\n out = torch.stack([left_edges, top_edges, right_edges, bottom_edges], dim=-1)\n out = out * (~empty_filter).unsqueeze(-1)\n\n # Return to original shape\n out = out.reshape(*shape[:-2], 4)\n return out", "def get_bounds(shape, affine):\n adim, bdim, cdim = shape\n adim -= 1\n bdim -= 1\n cdim -= 1\n # form a collection of vectors for each 8 corners of the box\n box = np.array([[0., 0, 0, 1],\n [adim, 0, 0, 1],\n [0, bdim, 0, 1],\n [0, 0, cdim, 1],\n [adim, bdim, 0, 1],\n [adim, 0, cdim, 1],\n [0, bdim, cdim, 1],\n [adim, bdim, cdim, 1]]).T\n box = np.dot(affine, box)[:3]\n return zip(box.min(axis=-1), box.max(axis=-1))", "def rle_mask_voting(\n top_masks, all_masks, all_dets, iou_thresh, binarize_thresh, method='AVG'\n):\n if len(top_masks) == 0:\n return\n\n all_not_crowd = [False] * len(all_masks)\n top_to_all_overlaps = mask_util.iou(top_masks, all_masks, all_not_crowd)\n decoded_all_masks = [\n np.array(mask_util.decode(rle), dtype=np.float32) for rle in all_masks\n ]\n decoded_top_masks = [\n np.array(mask_util.decode(rle), dtype=np.float32) for rle in top_masks\n ]\n all_boxes = all_dets[:, :4].astype(np.int32)\n all_scores = all_dets[:, 4]\n\n # Fill box support with weights\n mask_shape = decoded_all_masks[0].shape\n mask_weights = np.zeros((len(all_masks), mask_shape[0], mask_shape[1]))\n for k in range(len(all_masks)):\n ref_box = all_boxes[k]\n x_0 = max(ref_box[0], 0)\n x_1 = min(ref_box[2] + 1, mask_shape[1])\n y_0 = max(ref_box[1], 0)\n y_1 = min(ref_box[3] + 1, mask_shape[0])\n mask_weights[k, y_0:y_1, x_0:x_1] = all_scores[k]\n mask_weights = np.maximum(mask_weights, 1e-5)\n\n top_segms_out = []\n for k in range(len(top_masks)):\n # Corner case of empty mask\n if decoded_top_masks[k].sum() == 0:\n top_segms_out.append(top_masks[k])\n continue\n\n inds_to_vote = np.where(top_to_all_overlaps[k] >= iou_thresh)[0]\n # Only matches itself\n if len(inds_to_vote) == 1:\n top_segms_out.append(top_masks[k])\n continue\n\n masks_to_vote = [decoded_all_masks[i] for i in inds_to_vote]\n if method == 'AVG':\n ws = mask_weights[inds_to_vote]\n soft_mask = np.average(masks_to_vote, axis=0, weights=ws)\n mask = np.array(soft_mask > binarize_thresh, dtype=np.uint8)\n elif method == 'UNION':\n # Any pixel that's on joins the mask\n soft_mask = np.sum(masks_to_vote, axis=0)\n mask = np.array(soft_mask > 1e-5, dtype=np.uint8)\n else:\n raise NotImplementedError('Method {} is unknown'.format(method))\n rle = mask_util.encode(np.array(mask[:, :, np.newaxis], order='F'))[0]\n top_segms_out.append(rle)\n\n return top_segms_out", "def masks_to_boxes(masks):\n if masks.numel() == 0:\n return torch.zeros((0, 4), device=masks.device)\n\n h, w = masks.shape[-2:]\n\n y = torch.arange(0, h, dtype=torch.float)\n x = torch.arange(0, w, dtype=torch.float)\n y, x = torch.meshgrid(y, x)\n\n x_mask = (masks * x.unsqueeze(0))\n x_max = x_mask.flatten(1).max(-1)[0]\n x_min = x_mask.masked_fill(~(masks.bool()), 1e8).flatten(1).min(-1)[0]\n\n y_mask = (masks * y.unsqueeze(0))\n y_max = y_mask.flatten(1).max(-1)[0]\n y_min = y_mask.masked_fill(~(masks.bool()), 1e8).flatten(1).min(-1)[0]\n\n return torch.stack([x_min, y_min, x_max, y_max], 1)", "def mask_to_bbox(mask):\n xs = np.where(np.sum(mask, axis=0) > 0)[0]\n ys = np.where(np.sum(mask, axis=1) > 0)[0]\n\n if len(xs) == 0 or len(ys) == 0:\n return None\n\n x0 = xs[0]\n x1 = xs[-1]\n y0 = ys[0]\n y1 = ys[-1]\n return np.array((x0, y0, x1, y1), dtype=np.float32)", "def get_bound(box_list):\n box_xyxy_list = []\n for box in box_list:\n box_xyxy = xywh2xyxy(box)\n box_xyxy_list.append(box_xyxy)\n\n box_xyxy_list = np.array(box_xyxy_list)\n x1max, y1max, x2max, y2max = np.amax(box_xyxy_list, axis=0)\n x1min, y1min, x2min, y2min = np.amin(box_xyxy_list, axis=0)\n\n boundbox = xyxy2xywh([x1min, y1min, x2max, y2max])\n return boundbox", "def bounding_boxes(self, detections):\n bboxes = []\n while len(detections) > 0:\n det = detections.pop(0)\n merging = True\n while merging:\n merging = False\n pointer = 0\n while pointer < len(detections):\n if self.get_distance(det, detections[pointer]) <= self.max_distance:\n det = self.merge_boxes(det, detections[pointer])\n merging = True\n detections.pop(pointer)\n else:\n pointer += 1\n if det[4] >= self.min_area:\n bboxes.append(det)\n return bboxes", "def _filter_box_candidates(self, bboxes, labels):\n bbox_w = bboxes[:, 2] - bboxes[:, 0]\n bbox_h = bboxes[:, 3] - bboxes[:, 1]\n valid_inds = (bbox_w > self.min_bbox_size) & \\\n (bbox_h > self.min_bbox_size)\n valid_inds = np.nonzero(valid_inds)[0]\n return bboxes[valid_inds], labels[valid_inds]", "def _get_bounding_boxes(self, imgs, summed_viz, threshold_value=.7):\n self.viz = summed_viz # for debug\n viz = summed_viz\n n_batchs = viz.shape[ 0]\n n_classes = viz.shape[-1]\n \n # viz.shape (100,14,14,20) => (14,14,100,20)\n viz = viz.swapaxes(0,2); viz = viz.swapaxes(0,1)\n \n # Normalize <viz>, image per image (to be in range [-1,1])\n viz = viz / np.max(np.abs(viz), axis=(0,1))\n viz = (viz+1)/2 # range[0,1]\n \n # Resize each summed_viz to its original size (size of input image)\n if viz.shape[:2] != imgs.shape[1:3]:\n viz = np.array(\n [ skimage.transform.resize(viz[:,:,idx], imgs[idx].shape[:2])\n for idx in range(len(imgs))\n if viz.shape[0] != imgs.shape[1]\n ] )\n viz = viz.swapaxes(0,2); viz = viz.swapaxes(0,1)\n \n # Threshold <viz>s to keep values over 70% of its max values\n m_max = threshold_value * viz.max(axis=(0,1))\n viz = viz * (m_max < viz)\n \n # We want a 2d boundind box, so project threshold in xs and ys\n xxs = viz.sum(axis=0)\n yys = viz.sum(axis=1)\n \n # Get some non-thresholded values (left, top... of bounding boxes)\n get_lefts = lambda b_id, c_idx: xxs[:,b_id,c_idx].nonzero()[0][ 0]\n get_tops = lambda b_id, c_idx: yys[:,b_id,c_idx].nonzero()[0][-1]\n get_rights = lambda b_id, c_idx: xxs[:,b_id,c_idx].nonzero()[0][-1]\n get_bottoms = lambda b_id, c_idx: yys[:,b_id,c_idx].nonzero()[0][ 0]\n\n # Debug\n # def get_lefts (b_id, c_idx): \n # print xxs[:,b_id,c_idx].nonzero()\n # xxs[:,b_id,c_idx].nonzero()[0][ 0]\n \n # Build the 2d array with first or lasts positions of zeros\n # INNER FUNCTION\n def _get_border_array(f_border=get_lefts):\n return np.array(\n [ map(f_border, [b_idx]*n_classes, range(n_classes))\n for b_idx in range(n_batchs) ]\n )\n \n lefts = _get_border_array(get_lefts)\n tops = _get_border_array(get_tops)\n rights = _get_border_array(get_rights)\n bottoms = _get_border_array(get_bottoms)\n \n return lefts, tops, rights, bottoms", "def bounds(self):\n return self._bboxes[0][0] #TODO: merge all coverages", "def _polygons_to_bboxes(polygons):\n# Build bounding boxes\n bboxes = np.empty([len(polygons), 4])\n for n, p in enumerate(polygons):\n try:\n left, bottom = np.min(p, axis = 0)\n except:\n import pdb\n pdb.set_trace()\n right, top = np.max(p, axis = 0)\n bboxes[n] = [left, bottom, right, top]\n return bboxes", "def im_detect_mask(model, im_scales, boxes):\n assert len(im_scales) == 1, \\\n 'Only single-image / single-scale batch implemented'\n\n M_HEIGHT = cfg.MRCNN.RESOLUTION_H\n M_WIDTH = cfg.MRCNN.RESOLUTION_W\n if boxes.shape[0] == 0:\n pred_masks = np.zeros((0, M, M), np.float32)\n return pred_masks\n\n inputs = {'mask_rois': _get_rois_blob(boxes, im_scales)}\n # Add multi-level rois for FPN\n if cfg.FPN.MULTILEVEL_ROIS:\n _add_multilevel_rois_for_test(inputs, 'mask_rois')\n\n for k, v in inputs.items():\n workspace.FeedBlob(core.ScopedName(k), v)\n workspace.RunNet(model.mask_net.Proto().name)\n\n # Fetch masks\n pred_global_masks = workspace.FetchBlob(\n core.ScopedName('mask_fcn_global_probs')\n ).squeeze()\n pred_char_masks = workspace.FetchBlob(\n core.ScopedName('mask_fcn_char_probs')\n ).squeeze()\n # pred_char_boxes = workspace.FetchBlob(\n # core.ScopedName('mask_fcn_charbox_pred')\n # ).squeeze()\n pred_global_masks = pred_global_masks.reshape([-1, 1, M_HEIGHT, M_WIDTH])\n pred_char_masks = pred_char_masks.reshape([-1, M_HEIGHT, M_WIDTH, 37])\n pred_char_masks = pred_char_masks.transpose([0,3,1,2])\n # pred_char_boxes = pred_char_boxes.reshape([-1, 4, M_HEIGHT, M_WIDTH])\n\n return pred_global_masks, pred_char_masks, None", "def bounds(self):\n return self._bboxes[0][0] #TODO: merge all coverages", "def rectify_bbox(bboxes, max_shape): \n bboxes = np.array(bboxes, np.int32)\n n = bboxes.shape[0]\n if n == 0:\n return bboxes\n\n h, w = max_shape\n\n bboxes[:, 0] = np.maximum(bboxes[:, 0], np.zeros((n)))\n bboxes[:, 0] = np.minimum(bboxes[:, 0], (h-1) * np.ones((n)))\n bboxes[:, 1] = np.maximum(bboxes[:, 1], np.zeros((n)))\n bboxes[:, 1] = np.minimum(bboxes[:, 1], (w-1) * np.ones((n)))\n bboxes[:, 2] = np.maximum(bboxes[:, 2], np.ones((n)))\n bboxes[:, 2] = np.minimum(bboxes[:, 2], h * np.ones((n)) - bboxes[:, 0])\n bboxes[:, 3] = np.maximum(bboxes[:, 3], np.ones((n)))\n bboxes[:, 3] = np.minimum(bboxes[:, 3], w * np.ones((n)) - bboxes[:, 1])\n\n return bboxes", "def detect(self, mask):\n # 1) Return Non zero indices\n det_idx = np.where(mask > 0.0)\n idx_x, idx_y = det_idx[0], det_idx[1]\n # 2) Create 1x1 box for each pixel detected.\n detections = []\n for i in range(0, len(idx_x)):\n x, y = idx_x[i], idx_y[i]\n detections.append((x, y, x+1, y+1, 1)) # x1, y1, x2, y2, area\n # 3) merge boxes\n bounding_boxes = self.bounding_boxes(detections)\n return bounding_boxes", "def get_contour_bbox_from_raw(raw_mask):\n cnts = grab_contours(\n cv2.findContours(\n raw_mask, \n cv2.RETR_EXTERNAL, \n cv2.CHAIN_APPROX_SIMPLE\n ))\n xywhs = [cv2.boundingRect(cnt) for cnt in cnts]\n xys = [(xywh[0], xywh[1], xywh[0]+xywh[2], xywh[1]+xywh[3]) for xywh in xywhs]\n return sorted(xys, key=lambda x: (x[1], x[0]))", "def bounding_box(self):\n latlon00 = self.ij_to_latlon(-1,-1)\n latlon01 = self.ij_to_latlon(-1,self.domain_size[1]+1)\n latlon11 = self.ij_to_latlon(self.domain_size[0]+1,self.domain_size[1]+1)\n latlon10 = self.ij_to_latlon(self.domain_size[0]+1,-1)\n return (latlon00,latlon01,latlon11,latlon10)", "def _batched_mask_to_box_tf(masks: \"tf.Tensor\"):\n\n if tf.size(masks) == 0:\n return tf.zeros([*masks.shape[:-2], 4])\n\n # Normalize shape to Cxheightxwidth\n shape = shape_list(masks)\n height, width = shape[-2:]\n\n # Get top and bottom edges\n in_height = tf.reduce_max(masks, axis=-1)\n in_height_coords = in_height * tf.range(height)[None, :]\n bottom_edges = tf.reduce_max(in_height_coords, axis=-1)\n in_height_coords = in_height_coords + height * (~in_height)\n top_edges = tf.reduce_min(in_height_coords, axis=-1)\n\n # Get left and right edges\n in_width, _ = tf.reduce_max(masks, axis=-2)\n in_width_coords = in_width * tf.range(width)[None, :]\n right_edges, _ = tf.reduce_max(in_width_coords, axis=-1)\n in_width_coords = in_width_coords + width * (~in_width)\n left_edges, _ = tf.reduce_min(in_width_coords, axis=-1)\n\n # If the mask is empty the right edge will be to the left of the left edge.\n # Replace these boxes with [0, 0, 0, 0]\n empty_filter = (right_edges < left_edges) | (bottom_edges < top_edges)\n out = tf.stack([left_edges, top_edges, right_edges, bottom_edges], axis=-1)\n out = out * tf.expand_dims(~empty_filter, -1)\n\n # Return to original shape\n out = tf.reshape(out, *shape[:-2], 4)\n return out", "def compute_bb(self):\n all_shapes = list(self.parts.values()) + list(self.edges.values())\n bbox_vertices = cascaded_union(all_shapes).envelope.exterior.coords.xy\n min_x = min(bbox_vertices[0])\n max_x = max(bbox_vertices[0])\n min_y = min(bbox_vertices[1])\n max_y = max(bbox_vertices[1])\n return [min_x, max_x,min_y, max_y]", "def compute_bbox_mask_targets_and_label(rois, instances, overlaps, labels, seg, flipped, for_maskfcn):\n # Ensure ROIs are floats\n rois = rois.astype(np.float32, copy=False)\n\n # Sanity check\n assert len(rois) == len(overlaps), 'number of proposal ROIs and max overlap with gt bbox does not match'\n\n fg_indexes = np.where(overlaps >= config.TRAIN.BBOX_REGRESSION_THRESH)[0]\n fg_rois = rois[fg_indexes, :]\n\n if for_maskfcn:\n mask_targets, mask_label = \\\n compute_mask_and_label_fcn(fg_rois, instances[fg_indexes], labels[fg_indexes], seg, flipped)\n else:\n mask_targets, mask_label = \\\n compute_mask_and_label(fg_rois, instances[fg_indexes], labels[fg_indexes], seg, flipped)\n return mask_targets, mask_label, fg_indexes", "def show_bounding_boxes(dir_path: str) -> None:\r\n \r\n for image_file in glob.glob(dir_path + '/*.png'):\r\n image = cv2.imread(image_file)\r\n height, width, _ = image.shape\r\n\r\n with open(image_file.split(\".\")[0] +'.txt', 'r') as reader:\r\n annotations = reader.readlines()\r\n for annot in annotations:\r\n annot = annot.split()\r\n \r\n # Calculation of top left point and bottom right point of the bounding box \r\n x1, y1 = int((float(annot[1]) - float(annot[3])/2)*width), int((float(annot[2]) - float(annot[4])/2)*height)\r\n x2, y2 = int((float(annot[1]) + float(annot[3])/2)*width), int((float(annot[2]) + float(annot[4])/2)*height)\r\n \r\n # BGR color format\r\n if annot[0] == '0':\r\n color = (0,255,0) # Mask is worn correctly (Green color)\r\n label = 'Good'\r\n else:\r\n color = (0,0,255) # Mask is either not worn correctly or not worn at all (Red color)\r\n label = 'Bad'\r\n \r\n cv2.putText(image,\r\n label, \r\n (x1, y1 - 10),\r\n fontFace=cv2.FONT_HERSHEY_TRIPLEX,\r\n fontScale=0.5, \r\n color=color,\r\n thickness=1) \r\n \r\n cv2.rectangle(image, (x1, y1), (x2, y2), color, thickness=1)\r\n \r\n k = cv2.waitKey(0) & 0xFF\r\n cv2.imshow(image_file.split(\"sss\")[-1], image)\r\n if k == 27:\r\n cv2.destroyAllWindows()\r\n break", "def get_bounding_box(img):\n rows = np.any(img, axis=1)\n cols = np.any(img, axis=0)\n rmin, rmax = np.where(rows)[0][[0, -1]]\n cmin, cmax = np.where(cols)[0][[0, -1]]\n # due to python indexing, need to add 1 to max\n # else accessing will be 1px in the box, not out\n rmax += 1\n cmax += 1\n return [rmin, rmax, cmin, cmax]", "def draw_bbox(image, bboxes, masks, class_ids, class_names, scores, colors, show_label=True, show_mask=True):\n image_h, image_w, _ = image.shape\n\n for i, bbox in enumerate(bboxes):\n y1, x1, y2, x2 = bbox[i]\n coor = np.array([x1, y1, x2, y2], dtype=np.int32)\n fontScale = 0.5\n score = scores[i]\n class_ind = int(class_ids[i])\n bbox_color = colors[class_ind]\n bbox_thick = int(0.6 * (image_h + image_w) / 600)\n c1, c2 = (coor[0], coor[1]), (coor[2], coor[3])\n cv2.rectangle(image, c1, c2, bbox_color, bbox_thick)\n\n if show_label:\n bbox_mess = '%s: %.2f' % (class_names[class_ind], score)\n t_size = cv2.getTextSize(bbox_mess, 0, fontScale, thickness=bbox_thick // 2)[0]\n cv2.rectangle(image, c1, (c1[0] + t_size[0], c1[1] - t_size[1] - 3), bbox_color, -1) # filled\n\n cv2.putText(image, bbox_mess, (c1[0], c1[1] - 2), cv2.FONT_HERSHEY_SIMPLEX,\n fontScale, (0, 0, 0), bbox_thick // 2, lineType=cv2.LINE_AA)\n\n # Mask\n mask = masks[:, :, i]\n if show_mask:\n image = apply_mask(image, mask, bbox_color)\n\n # Mask Polygon\n # Pad to ensure proper polygons for masks that touch image edges.\n padded_mask = np.zeros(\n (mask.shape[0] + 2, mask.shape[1] + 2), dtype=np.uint8)\n padded_mask[1:-1, 1:-1] = mask\n contours = find_contours(padded_mask, 0.5)\n pts = np.array(contours[0], np.int32)\n pts = pts.reshape((-1, 1, 2))\n # image = cv2.polylines(image, [pts], True, bbox_color)\n\n return image", "def getbbox(self):\r\n img_ = (self._instance > 0)\r\n rows = np.any(img_, axis=1)\r\n cols = np.any(img_, axis=0)\r\n rmin, rmax = np.argmax(rows), img_.shape[0] - 1 - np.argmax(np.flipud(rows))\r\n cmin, cmax = np.argmax(cols), img_.shape[1] - 1 - np.argmax(np.flipud(cols))\r\n return (rmin, rmax, cmin, cmax)", "def containing(*boxes):\n if not boxes:\n raise ValueError('At least one bounding box must be specified')\n boxes_objs = map(BoundingBox, boxes)\n start = boxes_objs[0].start\n end = boxes_objs[0].end\n for box in boxes_objs[1:]:\n start = np.minimum(start, box.start)\n end = np.maximum(end, box.end)\n return BoundingBox(start=start, end=end)", "def _resize_bboxes(self, results):\n img_shape = results['img_shape']\n for key in results.get('bbox_fields', []):\n bboxes = []\n for box in results[key]:\n tmp_box = np.array(box, dtype=np.float32)\n tmp_box[0::2] *= results['scale_factor'][0]\n tmp_box[1::2] *= results['scale_factor'][1]\n if self.bbox_clip_border:\n tmp_box[0::2] = np.clip(tmp_box[0::2], 0, img_shape[1])\n tmp_box[1::2] = np.clip(tmp_box[1::2], 0, img_shape[0])\n bboxes.append(tmp_box)\n if len(results[key]) > 0:\n results[key] = bboxes" ]
[ "0.7248413", "0.7208978", "0.7207902", "0.71380645", "0.67405194", "0.664477", "0.6614984", "0.65787214", "0.6552082", "0.6529659", "0.65243286", "0.6360303", "0.6339058", "0.6239182", "0.62314016", "0.620707", "0.61381495", "0.6109044", "0.6088461", "0.60875016", "0.6082529", "0.6080452", "0.6073387", "0.6072913", "0.607262", "0.6072384", "0.60630107", "0.6060522", "0.6060028", "0.602551" ]
0.7486755
0
Home redirects to /register
def home_page(): return redirect('/register')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def home():\n\n if not current_user.is_authenticated:\n return redirect(url_for('login'))\n else:\n return redirect(url_for('show_registrations'))", "def home_page():\n return redirect('/users')", "def welcome(self):\n if self.user:\n return self.render('welcome.html')\n self.redirect('/register')", "def register(request):\n if request.method == 'GET':\n # Use Django built-in form for registering new users\n form = UserCreationForm()\n # Send form to the template\n context = {'form': form}\n return render(request, 'registration/register.html', context)\n elif request.method == 'POST':\n filled_form = UserCreationForm(request.POST)\n if filled_form.is_valid():\n # CUSTOM USER REGISTRATION LOGIC\n # get the created user\n registered_user = filled_form.save()\n # logic registered user (as django.contrib.auth.models.User)\n login(request, registered_user)\n return redirect('biologs:home') # redirect to URL\n else:\n # Send invalid form message\n context = {'form': filled_form}\n return render(request, 'registration/register.html', context)\n else:\n raise Http404", "def register():\n form = RegistrationsForm()\n state = process_register(form)\n if state == LoginState.SHOW_LOGIN:\n flash('User Successfully Registered', 'success')\n return redirect(url_for('user_view.login'))\n elif state == LoginState.SHOW_REGISTER:\n return render_template('user/register.html', form=form)", "def register():\n\n # forget any user_id\n session.clear()\n\n # if user reached route via POST (as by submitting a form via POST)\n if request.method == \"POST\":\n\n # ensure username was submitted\n if not request.form.get(\"username\"):\n return apology(\"must provide username\")\n\n # ensure password was submitted\n elif not request.form.get(\"password\"):\n return apology(\"must provide password\")\n \n # ensure password was submitted\n elif not request.form.get(\"confirm password\"):\n return apology(\"must confirm password\")\n \n elif request.form.get(\"confirm password\") != request.form.get(\"password\"):\n return apology(\"Please re-enter password\")\n \n # query database for username\n rows = db.execute(\"INSERT INTO users (username, hash) VALUES (:username, :hash)\", username=request.form.get(\"username\"), hash=hash_password(request.form.get(\"password\")))\n\n # redirect user to home page\n return redirect(url_for(\"index\"))\n\n # else if user reached route via GET (as by clicking a link or via redirect)\n else:\n return render_template(\"register.html\")", "def register(request):\n if request.method != \"POST\":\n form = UserCreationForm()\n else:\n form = UserCreationForm(data=request.POST)\n if form.is_valid():\n new_user = form.save()\n login(request, new_user)\n return redirect(\"pybasic:index\")\n context = {\"form\": form}\n return render(request, \"users/register.html\", context)", "def register(request):\n\n # If logged in, won't be able to acces /register path\n # user redirected back to home store page\n if request.user.is_authenticated:\n return redirect('store')\n else:\n form = RegisterUserForm()\n if request.method == 'POST':\n form = RegisterUserForm(request.POST)\n if form.is_valid(): #if form is valid, save the form\n user = form.save()\n # clean our form and only get the username\n username = form.cleaned_data.get('username')\n messages.success(request, \"Hello {0}, you have successful registered for an account\".format(username))\n return redirect('login') #redirect user to login page is the form is valid\n context = {'form': form}\n return render(request, 'store/register.html', context)", "def register():\n\n return render_template(\"auth/registerHere.html\")", "def register(request):\n if request.method == \"POST\":\n form = RegisterForm(request.POST)\n if form.is_valid():\n user = form.save()\n login(request, user)\n return HttpResponseRedirect(reverse(\"auctions:index\"))\n return render(request, \"auctions/register.html\", {\n \"form\": form\n })\n return render(request, \"auctions/register.html\", {\n \"form\": RegisterForm()\n })", "def dispatch(self, request, *args, **kwargs):\n if request.user.is_authenticated:\n return HttpResponseRedirect(reverse(\"home\"))\n return super(RegisterView, self).dispatch(request, *args, **kwargs)", "def get(self):\n if self.user:\n self.render('welcome.html', username = self.user.name)\n else:\n self.redirect('/signup')", "def register(request):\n register_form = UserCreationForm()\n return render(request, 'metro_app/register.html', {'form': register_form})", "def register(request): \n\tif request.method != 'POST':\n\t\tform = UserCreationForm() \n\telse: \n\t\tform = UserCreationForm(data = request.POST)\n\n\tif form.is_valid():\n\t\tnew_user = form.save()\n\t\tauthenticated_user = authenticate(username=new_user.username, password=request.POST['password1']) \n\t\tlogin(request, authenticated_user) \n\t\treturn HttpResponseRedirect(reverse('Toeic:index'))\n\n\tcontext = {'form': form}\n\treturn render(request, 'users/register.html', context)", "def register():\n if g.user:\n return redirect(url_for('user_page'))\n error = None\n if request.method == 'POST':\n if not request.form['username']:\n error = 'You have to enter a username'\n elif not request.form['email'] or \\\n '@' not in request.form['email']:\n error = 'You have to enter a valid email address'\n elif not request.form['password']:\n error = 'You have to enter a password'\n elif request.form['password'] != request.form['password2']:\n error = 'The two passwords do not match'\n elif get_user_id(request.form['username']) is not None:\n error = 'The username is already taken'\n else:\n db = get_db()\n db.execute('''insert into user (\n username, email, pw_hash) values (%s, %s, %s)''',\n [request.form['username'], request.form['email'],\n hash_password(request.form['password'])])\n db.commit()\n flash('You were successfully registered and can login now')\n return redirect(url_for('login'))\n return render_template('register.html', error=error)", "def register():\n\n from .forms import RegisterForm\n\n form = RegisterForm(request.form)\n\n if form.validate_on_submit():\n username = request.form['username']\n password = request.form['password1']\n app.add_user_and_password(username, password)\n logger.info('Created account for ' + username + '.')\n\n if \"rememberMe\" in request.form:\n user = User()\n user.id = username\n session['username'] = username\n session['registrations'] = []\n login_user(user, remember=True)\n logger.info('Logged ' + username + ' in after account creation.')\n\n return redirect(url_for('home'))\n\n return render_template('signup.html', form=form)", "def register(request):\n if not settings.BMAT_ALLOW_REGISTER:\n return render(request, \"users/no_register.html\", {})\n \n if request.method == \"GET\":\n return render(request, \"users/register.html\", {\"form\":CustomUserCreationForm()})\n \n elif request.method == \"POST\":\n f = CustomUserCreationForm(data=request.POST)\n \n if not f.is_valid():\n return render(request, \"users/register.html\", {\"form\":f})\n \n u = f.save(commit=False)\n \n u.email = f.cleaned_data.get(\"email\", \"\")\n u.save()\n \n u = authenticate(username=u.username, password=f.cleaned_data[\"password1\"])\n alogin(request, u)\n \n return redirect(\"/\")", "def register():\n \n form = RegistrationForm()\n \n # if the current user is authenticated then redirected to the home page\n if current_user.is_authenticated:\n return redirect(url_for('home'))\n if form.validate_on_submit():\n # encrypt the password using bcrypt\n hashed_password = bcrypt.generate_password_hash(form.password.data).decode('utf-8')\n if form.picture.data:\n # the save_picture function from the utils module for saving the image in cropped size\n image_file = save_picture(form.picture.data)\n \n # setting the user with the data from the register form.\n user = User(username=form.username.data, email=form.email.data, \n password=hashed_password, image_file=image_file)\n db.session.add(user)\n db.session.commit()\n flash('Your account has been created', 'sucess')\n return redirect(url_for('home'))\n return render_template('register.html', form=form)", "def register():\n username = request.POST.get('username', '')\n password = request.POST.get('password', '')\n #email_addr = request.POST.get('email_addr', '')\n aaa.register(username, password, \"[email protected]\")\n redirect(\"/\")", "def get(self,request,*args,**kwargs):\n form = UserCreationForm()\n context = {'form':form}\n template = 'authentication/register.html'\n return render(request,template,context)", "def register():\n\n registration_form = RegistrationForm()\n if registration_form.validate_on_submit():\n user = User(email=registration_form.email.data,\n username=registration_form.username.data,\n first_name=registration_form.first_name.data,\n last_name=registration_form.last_name.data,\n password=registration_form.password.data)\n\n # adding a user to the database\n db.session.add(user)\n db.session.commit()\n flash('You have been successfully registered!')\n\n # redirect to the main search page\n return redirect(url_for('home.search'))\n\n return render_template('auth/register.html', form=registration_form, title='Register')", "def register():\n if request.method == \"POST\":\n username_exist = mongo.db.users.find_one(\n {\"username\": request.form.get(\"username\").lower()})\n if username_exist:\n flash(\n \"Username is already in use, please choose a different one.\")\n\n return redirect(url_for(\"register\"))\n\n register = {\n \"username\": request.form.get(\"username\").lower(),\n \"password\": generate_password_hash(request.form.get(\"password\"))\n }\n mongo.db.users.insert_one(register)\n\n session[\"user\"] = request.form.get(\"username\").lower()\n flash(\"You are now registred\")\n\n return render_template(\"profile.html\")\n return render_template(\"register.html\")", "def get(self):\r\n return_url = self.request.get(\"return_url\")\r\n template_values = {\r\n \"user_form\" : User.to_form(return_url, mode=\"add\")\r\n }\r\n self.render_out(\"templates/register.html\", template_values)", "def register_view():\n form = RegisterForm()\n if form.validate_on_submit():\n user = User.query.filter_by(email=form.email.data).first()\n\n # Check if user has already existed in the database\n if user:\n flash(\"Email already registered.\")\n \n # New user creation\n user = User()\n user.email = form.email.data\n user.hash_password(form.password.data)\n \n db.session.add(user)\n db.session.commit()\n\n login_user(user)\n return redirect(url_for(\"profiles.create_view\"))\n return render_template(\"register.html\", form=form)", "def register_page():\n form = addUser()\n\n if form.validate_on_submit():\n username=form.username.data\n password=form.password.data\n email=form.email.data\n first_name=form.first_name.data\n last_name=form.last_name.data\n \n new_user = User.register(username=username, password=password, email=email, first_name=first_name, last_name=last_name)\n\n db.session.add(new_user)\n db.session.commit()\n\n session[\"user\"] = new_user.username\n return redirect(f'/users/{username}')\n else:\n return render_template(\"reg_form.html\", form=form)", "def registration(request):\n if request.user.is_authenticated:\n return redirect(reverse('index'))\n\n if request.method == 'POST':\n registration_form = UserRegistrationForm(request.POST)\n\n if registration_form.is_valid():\n registration_form.save()\n user = auth.authenticate(\n email=request.POST['email'], password=request.POST['password1'])\n\n if user:\n auth.login(user=user, request=request)\n messages.success(request, 'You have been registered!')\n return redirect(reverse('index'))\n else:\n messages.error(\n request, 'Unable to register. Please try again later.')\n else:\n registration_form = UserRegistrationForm()\n\n return render(request, 'signup.html', {\"registration_form\": registration_form})", "def register(request):\n if request.method == 'POST':\n user_form = UserRegistrationForm(request.POST)\n if user_form.is_valid():\n user_form.save()\n\n user = auth.authenticate(request.POST.get('email'),\n password=request.POST.get('password1'))\n\n if user:\n auth.login(request, user)\n messages.success(request, \"You have successfully registered\")\n return redirect(reverse('homepage'))\n\n else:\n messages.error(request, \"unable to log you in at this time!\")\n else:\n user_form = UserRegistrationForm()\n\n args = {'user_form': user_form}\n return render(request, 'register.html', args)", "def register(request):\n if request.user.is_authenticated():\n \"\"\"\n If current user already login website, \\\n don't need to register a new one, \\\n go to user center directly.\n \"\"\"\n return HttpResponseRedirect(\"usercenter\")\n\n errors = []\n try:\n if request.method == 'POST':\n username = request.POST.get('memUserId', '')\n password1 = request.POST.get('memPassword', '')\n password_confirm = request.POST.get('password_confirm', '')\n if password1 != password_confirm:\n errors.append(\"两次输入的密码不一致!\")\n return render_to_response(\"/\", \\\n RequestContext(request, {'memUserId': username, 'errors': errors}))\n if User.objects.filter(username=username):\n errors.append(\"该用户名已存在!\")\n return render_to_response(\"/\", \\\n RequestContext(request, {'memUserId': username, 'errors': errors}))\n user = User()\n user.username = username\n user.set_password(password1)\n user.is_staff = 1\n user.save()\n\n user = auth.authenticate(username=username, password=password1)\n auth.login(request, user)\n return render_to_response(\"user/usercenter.html\", RequestContext(request))\n\n except Exception, e:\n errors.append(str(e))\n return render_to_response(\"user/userregister.html\", \\\n RequestContext(request, {'memUserId': '', 'errors': errors}))", "def sign_up():\n if request.method == 'POST':\n result = register(request.form['name'], request.form['username'],\n request.form['password'], request.form['rpt_password'])\n if result == \"Registration successful\":\n flash(result, 'info')\n return redirect(url_for('sign_in'))\n flash(result, 'warning')\n return render_template('register.html')", "def register():\r\n # TODO: re-enable csrf\r\n form = RegisterForm(request.form)\r\n if request.method == 'POST' and form.validate():\r\n account = model.user.User(fullname=form.fullname.data,\r\n name=form.name.data,\r\n email_addr=form.email_addr.data)\r\n account.set_password(form.password.data)\r\n # account.locale = get_locale()\r\n db.session.add(account)\r\n db.session.commit()\r\n login_user(account, remember=True)\r\n flash(gettext('Thanks for signing-up'), 'success')\r\n return redirect(url_for('home.home'))\r\n if request.method == 'POST' and not form.validate():\r\n flash(gettext('Please correct the errors'), 'error')\r\n return render_template('account/register.html',\r\n title=gettext(\"Register\"), form=form)" ]
[ "0.7688918", "0.7417969", "0.71906203", "0.70102954", "0.69529814", "0.6927726", "0.6917957", "0.68358433", "0.6789156", "0.67626184", "0.67475903", "0.67112434", "0.67056745", "0.670232", "0.6696272", "0.6661629", "0.66303813", "0.66194004", "0.6612684", "0.6611185", "0.6606997", "0.6606547", "0.65992194", "0.6594149", "0.65936226", "0.65869635", "0.657558", "0.65721834", "0.6548586", "0.65450066" ]
0.8752239
0
Add feedback for a user
def add_feedback(username): if 'username' in session: form = FeedbackForm() if form.validate_on_submit(): feedback_data = generate_feedback_data(form, username) new_feedback = Feedback.make_feedback(feedback_data) db.session.add(new_feedback) db.session.commit() flash('Feedback added', 'success') return redirect(f'/users/{username}') return render_template('add_feedback.html', form=form) flash("You must be logged in to do that!") return redirect('/login')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_feedback(username):\n form = addFeedback()\n\n if \"user\" not in session: \n flash(\"Not logged in\")\n return redirect('/login')\n \n elif form.validate_on_submit():\n title = form.title.data\n content = form.content.data\n recipient = username\n user = session[\"user\"]\n\n new_feedback = Feedback(title=title, content=content, recipient=recipient, user=user)\n db.session.add(new_feedback)\n db.session.commit()\n\n flash(\"Added feedback\")\n return redirect(f'/users/{username}')\n else:\n return render_template(\"feedback_form.html\", form=form, username=username)", "def show_add_feedback(username):\n\n if \"username\" not in session or username != session['username']:\n flash(\"You do not have permission to view this content.\")\n return redirect(\"/\")\n else:\n form = AddFeedbackForm()\n \n \n \n if form.validate_on_submit():\n title = form.title.data\n content = form.content.data\n \n \n post = Feedback(title=title, content=content, username=username)\n db.session.add(post)\n db.session.commit()\n flash(f\"Feedback Posted!\", \"success\")\n return redirect(f\"/users/{username}\")\n \n else:\n \n return render_template(\n \"add_feedback.html\", form=form)", "def feedback(request):\n feedback = request.POST.get('feedback', None)\n if not feedback:\n return {\n 'res': 'failed'\n }\n feedback.replace('\\n', '<br>')\n user = request.user\n subject = email_templates.feedback['subject']\n content = email_templates.feedback['content'] % \\\n (user.username, feedback)\n admin_emails = [admin[1] for admin in ADMINS]\n email_users(subject, content, admin_emails,\n from_email=user.email)\n return {\n 'res': 'success'\n }", "async def feedback(self, ctx, *, feedback):\n url = os.environ.get(\"FEEDBACK_WEBHOOK\", None)\n if url:\n webhook = Webhook.from_url(url, adapter=RequestsWebhookAdapter())\n embed = discord.Embed(description=feedback, colour=discord.Colour.teal())\n embed.set_author(name=f\"{ctx.author.name}#{ctx.author.discriminator}\", icon_url=ctx.author.avatar_url)\n embed.set_footer(text=f\"User id: {ctx.author.id}\")\n webhook.send(embed=embed)\n await ctx.send(embed=embeds.success(\"Sent the feedback!\"))\n else:\n await ctx.send(embed=embeds.error(\"This command is disabled.\"))", "def post_feedback(self, content):\n payload = {\n \"content\": content,\n }\n r = self.request(\"post\", USER_RESOURCE_URL, payload=payload)\n self.check_and_raise(r)", "def add_feedback(username):\n\n if 'username' not in session or username != session['username']:\n flash('Please login first!')\n return redirect('/login')\n\n form = FeedbackForm()\n\n if form.validate_on_submit():\n title = form.title.data\n content = form.content.data\n\n feedback = Feedback(title=title, content=content, username=username)\n\n db.session.add(feedback)\n db.session.commit()\n\n return redirect(f'/users/{feedback.username}')\n\n else:\n return render_template('feedback/new.html', form=form)", "def record_user_feedback(self, context, user_answer, bool_qa=False):\n self.feedback_records.append((context, user_answer))\n\n if bool_qa:\n self.questioned_tags.append((context, user_answer))\n self.q_counter += 1 # number of questions + 1", "def new_feedback(username: str):\n\n if \"username\" not in session:\n raise Unauthorized()\n\n form = FeedbackForm()\n\n if form.validate_on_submit():\n data = { key: val for key, val in form.data.items() if key != \"csrf_token\" }\n\n feedback = Feedback(**data, from_username=session[\"username\"], to_username=username)\n db.session.add(feedback)\n db.session.commit()\n\n return redirect(f\"/users/{username}\")\n\n return render_template(\"feedback.html\",\n form = form,\n username = session[\"username\"],\n fusername = username\n )", "async def feedback(self, ctx, *, message):\n channel = self.bot.get_channel(config.feedback_channel) # feedback chanel in support server\n\n embed = discord.Embed(title='New Feedback!',\n description=message,\n color=self.bot.color)\n embed.add_field(name='Author',\n value=ctx.author.mention)\n embed.add_field(name='Server',\n value=ctx.guild.name)\n if ctx.message.attachments:\n embed.add_field(name='Attachments',\n value='\\n'.join(f'[{file.filename}]({file.url})' for file in ctx.message.attachments),\n inline=False)\n embed.set_footer(text='Vote on this submissions using the reactions so I can determine what to focus on!')\n\n message = await channel.send(embed=embed)\n await message.add_reaction('<:upvote:651325140663140362>')\n await message.add_reaction('<:downvote:651325233105600544>')\n await ctx.send('Thank you for your submission! '\n 'If you haven\\'t already, consider joining the support server with `support`.')", "def feedback():\n return render_template(\"feedback.html\")", "def feedback(ctx, message):\n client = ctx.obj[\"client\"]\n\n if len(message) > 0:\n message = \" \".join(message)\n else:\n message = click.edit(\n text=\"Type your message here. \" + \"Save and exit to send, or just exit to abort.\",\n require_save=True,\n )\n if not message:\n click.echo(\"Aborted.\")\n else:\n click.echo(\"Posting feedback to the Spell team\" + ellipses(ctx.obj[\"utf8\"]))\n with api_client_exception_handler():\n logger.info(\"Sending feedback\")\n client.post_feedback(message)\n click.echo(\n \"Post complete. Thanks so much for your feedback. We'll look into it right away!\"\n )", "def _feedback_email(email, body, kind, name='', reply_to = ''):\r\n Email.handler.add_to_queue(c.user if c.user_is_loggedin else None,\r\n None, [feedback], name, email,\r\n datetime.datetime.now(),\r\n request.ip, kind, body = body,\r\n reply_to = reply_to)", "def add_feedback(\n self, feedback_result: FeedbackResult = None, **kwargs\n ) -> None:\n\n if feedback_result is None:\n feedback_result = FeedbackResult(**kwargs)\n else:\n feedback_result.update(**kwargs)\n\n self.db.insert_feedback(feedback_result=feedback_result)", "def update_feedback(feedback_id: int):\n\n feedback = Feedback.query.get_or_404(feedback_id)\n\n if \"username\" not in session or session[\"username\"] != feedback.from_username:\n raise Unauthorized()\n\n form = FeedbackForm(obj=feedback)\n if form.validate_on_submit():\n feedback.title = form.title.data\n feedback.content = form.content.data\n\n db.session.commit()\n\n return redirect(f\"/users/{feedback.to_username}\")\n\n return render_template(\"feedback.html\",\n form = form,\n feedback = feedback,\n username = session[\"username\"]\n )", "def add_feedback(self, feedback):\n self.feedback.append(feedback)\n if not isinstance(feedback.parent, (int, str)) and feedback.parent is not None:\n feedback.parent._get_child_feedback(feedback, True)\n self.execute_hooks('pedal.report', 'add_feedback', (feedback,))\n return feedback", "def send_feedback(app: Flask, feedback: str) -> None:\n mail = Mail(app)\n try:\n msg = Message(\n \"Feedback\",\n sender=\"User\",\n recipients=[app.config[\"MAIL_USERNAME\"]],\n body=feedback,\n )\n mail.send(msg)\n except KeyError:\n print(\"$MAIL_USERNAME not configured\")\n print(f'Feedback: \"{feedback}\"')", "def add_user_form():\n\n return render_template(\"add_user.html\", headline=\"Add New Blogly User\")", "def grabFeedback(self, message): #$NON-NLS-1$\r", "def new_feedback(sender, instance, created=False, **kwargs):\n if created:\n send_templated_email(\n [email for name, email in settings.ADMINS],\n 'emails/new_feedback',\n { 'feedback': instance, }\n )", "def send_text_to_user(user):", "async def warning_add(\n self, context: Context, user: discord.User, *, reason: str = \"Not specified\"\n ) -> None:\n member = context.guild.get_member(user.id) or await context.guild.fetch_member(\n user.id\n )\n total = await db_manager.add_warn(\n user.id, context.guild.id, context.author.id, reason\n )\n embed = discord.Embed(\n description=f\"**{member}** was warned by **{context.author}**!\\nTotal warns for this user: {total}\",\n color=0x9C84EF,\n )\n embed.add_field(name=\"Reason:\", value=reason)\n await context.send(embed=embed)\n try:\n await member.send(\n f\"You were warned by **{context.author}** in **{context.guild.name}**!\\nReason: {reason}\"\n )\n except:\n # Couldn't send a message in the private messages of the user\n await context.send(\n f\"{member.mention}, you were warned by **{context.author}**!\\nReason: {reason}\"\n )", "def add_thankyou():\n\n final_thanks = \"Thank you for taking our survey\"\n\n return render_template(\n \"thankyou.html\",\n thanks=final_thanks\n )", "def Feedback(parent, text=None):\n dialog = sppasFeedbackDialog(parent)\n if text is not None:\n dialog.SetBodyText(text)\n response = dialog.ShowModal()\n dialog.Destroy()\n return response", "def feedback(request, item_id):\n keys = ('score', 'review', 'made_on', 'usefulness',\n 'user.id', 'user.username')\n q = \"\"\"SELECT score, review, made_on, usefulness, f.user_id, username\n FROM feedback f\n INNER JOIN auth_user ON f.user_id = auth_user.id\n LEFT JOIN (SELECT item_id, user_id, AVG(usefulness) AS usefulness\n FROM rating GROUP BY item_id, user_id) r\n ON f.item_id = r.item_id AND f.user_id = r.user_id\n WHERE f.item_id = %s\"\"\"\n\n if request.method == 'POST':\n if not request.user.is_authenticated:\n raise PermissionDenied(NOT_LOGGED_IN)\n uid = request.user.id\n\n s = \"\"\"INSERT INTO feedback (user_id, item_id, score, review, made_on)\n VALUES (%s, %s, %s, %s, NOW())\"\"\"\n try:\n rq = loads(request.body)\n sql(s, uid, item_id, int(rq['score']), rq['review'])\n except (ValueError, KeyError):\n return None\n\n pg = pagination(request)\n pg['sort'].append('-usefulness')\n return (obj(i, keys) for i in sql(q + page(**pg), item_id))", "def update_feedback(feedback_id):\n\n feedback = Feedback.query.get(feedback_id)\n\n if 'username' not in session or feedback.username != session['username']:\n flash('Please login first!')\n return redirect('/login')\n\n form = FeedbackForm(obj=feedback)\n\n if form.validate_on_submit():\n feedback.title = form.title.data\n feedback.content = form.content.data\n\n db.session.commit()\n\n return redirect(f'/users/{feedback.username}')\n\n return render_template('/feedback/edit.html', form=form, feedback=feedback)", "def post_actions(request, form):\n if form.is_valid():\n form = form.save(commit=False)\n if request.user.is_authenticated:\n profile = UserProfile.objects.get(user=request.user)\n # Attach the user's profile to the form\n form.user_profile = profile\n form.save()\n messages.success(request, 'Message sent successfully')\n return form\n else:\n messages.error(request,\n ('Message failed. Please ensure '\n 'the form is valid.'))\n return False", "def thank(user_id, review_id, note):\n if (Thank.objects.filter(giver=user_id, review=review_id) or\n user_id == Review.objects.get(pk=review_id).user_id):\n return \"You can't thank your own review!\"\n \n form = ThankForm({\n 'giver': user_id,\n 'review': review_id,\n 'note': note\n })\n if form.is_valid():\n form.save()\n return False\n return \"You already thanked this review!\"", "def take_feedback(request):\n from_num = request.POST.get(\"From\", \"\")\n feedback = request.POST.get(\"Body\", \"\")\n logger.debug(\"Feedback from %s: '%s'\" % (from_num, feedback))\n numkey = from_num[2:]\n try:\n possiblePlayers = Player.objects.filter(cell=numkey)\n for p in possiblePlayers:\n p.feedback = p.feedback + ' ' + feedback\n p.save()\n send_thxfeedback(p)\n except:\n logger.error(\"Problem getting feedback from %s '%s'\" % (from_num, feedback), exc_info=True)\n return HttpResponse(\"\")", "def add_expertise(self, user, score, is_vote=False):\r\n if user==None or user.is_anonymous():\r\n return\r\n \r\n # If user already has expertise on that message\r\n if self.is_expert(user):\r\n expert = self.expert_set.filter(user=user)[0]\r\n expert.score += score\r\n if is_vote:\r\n expert.voted = True\r\n expert.save()\r\n else:\r\n expert = Expert(message=self, user=user, score=score+1., voted=is_vote)\r\n expert.save()\r\n self.expert_set.add(expert)\r\n \r\n # Adds fraction to parent, if score still high enough\r\n if score >= OI_SCORE_ANONYMOUS:\r\n if self.parent:\r\n self.parent.add_expertise(user,score*OI_SCORE_FRACTION_TO_PARENT)", "def RequestUserAttention(self, dc, window, text, rect, pane): \r\n \r\n state = pane.state\r\n pane.state &= ~optionActive\r\n \r\n for indx in xrange(6):\r\n active = (indx%2 == 0 and [True] or [False])[0]\r\n if active:\r\n pane.state |= optionActive\r\n else:\r\n pane.state &= ~optionActive\r\n \r\n self.DrawCaptionBackground(dc, rect, pane)\r\n self.DrawCaption(dc, window, text, rect, pane)\r\n wx.SafeYield()\r\n wx.MilliSleep(350)\r\n\r\n pane.state = state" ]
[ "0.75064313", "0.73097485", "0.72848845", "0.7062981", "0.7040209", "0.6944699", "0.68973976", "0.6821135", "0.6812384", "0.62886703", "0.6259805", "0.61126626", "0.59743065", "0.59662056", "0.59238136", "0.5827477", "0.5825752", "0.5822465", "0.58078146", "0.5737781", "0.5685186", "0.56385803", "0.5632984", "0.559945", "0.55940557", "0.5569403", "0.5550003", "0.5527246", "0.5524717", "0.5512325" ]
0.73722345
1
Delete feedback and return to user page
def delete_feedback(feedback_id): if 'username' in session: # Get username username = session['username'] # Remove feedback Feedback.query.filter_by(id=feedback_id).delete() db.session.commit() flash('Feedback Deleted!', 'success') return redirect(f'/users/{username}') else: flash("You must be logged in to do that!", 'danger') return redirect('/login')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def feedback_delete(request, feedback_id):\n store = SESSION.get_store(request.session)\n \n # get the feedback from the messages_received_list in session cache\n messages_received_list = SESSION.get_messages_received_list(\\\n request.session)\n i_remove, feedback = 0, None\n for ind, m in enumerate(messages_received_list):\n if m.objectId == feedback_id:\n feedback = m\n i_remove = ind\n break\n \n if not feedback:\n # feedback not found - it may have been deleted\n return redirect(reverse('messages_index')+ \"?%s\" %\\\n urllib.urlencode({'error': 'Feedback not found.'}))\n \n # we don't actually delete the feedback object,\n # we just remove from the store's relation\n store.remove_relation(\"ReceivedMessages_\", [feedback.objectId])\n \n # remove it from the messages_received_list in session cache\n messages_received_list.pop(i_remove)\n request.session['messages_received_list'] =\\\n messages_received_list\n \n # notify other dashboards logged into the same store of this change\n comet_receive(store.objectId, {\n COMET_RECEIVE_KEY_NAME: COMET_RECEIVE_KEY,\n \"deletedFeedback\":Message(objectId=feedback.objectId).jsonify(),\n })\n \n return redirect(reverse('messages_index')+ \"?%s\" %\\\n urllib.urlencode({'success':'Feedback has been deleted.',\n 'tab_feedback':1}))", "def delete_feedback(feedback_id):\n\n feedback = Feedback.query.get_or_404(feedback_id)\n recipient = feedback.recipient\n \n db.session.delete(feedback)\n db.session.commit()\n\n return redirect(f'/users/{recipient}')", "def delete_feedback(feedback_id: int):\n\n feedback = Feedback.query.get_or_404(feedback_id)\n\n if \"username\" not in session or session[\"username\"] not in {\n feedback.from_username,\n feedback.to_username\n }:\n raise Unauthorized()\n\n form = DeleteForm()\n if form.validate_on_submit():\n db.session.delete(feedback)\n db.session.commit()\n\n return redirect(f\"/users/{feedback.to_username}\")", "def api_delete_feedback(request, id):\n\n close_old_connections()\n \n # Not marking it as served if it isn't even ready yet.\n if not request.user.is_authenticated:\n return HttpResponseForbidden(\"You're not authenticated.\")\n \n # Delete the feedback.\n Feedback.objects.get(id=id).delete()\n\n close_old_connections()\n \n return HttpResponse('Deleted.')", "def delete(*, db_session, feedback_id: int):\n feedback = db_session.query(Feedback).filter(Feedback.id == feedback_id).one_or_none()\n db_session.delete(feedback)\n db_session.commit()", "def delreply(request, post_id):\n if not request.user.is_authenticated():\n return redirect('/login/?next=%s' % request.path)\n else:\n\n reply = Reply.objects.get(id = post_id)\n \n reply.delete() \n return redirect('/home/')", "def deleted_user(request):\n auth.logout(request)\n messages.success(request, \"Your profile has been deleted. Please contact us if you want to undo this.\")\n return redirect(reverse('index'))", "def test_DELETE_feedback(self):\n\t\t# 1\n\t\tfeedback_ids = []\n\t\tfor i in range(4):\n\t\t\tdata = self.POST_feedback()\n\t\t\tfeedback_ids.append(data['_id'])\n\t\t# 2\n\t\tfor i in range(len(feedback_ids)):\n\t\t\tfeedback_id = feedback_ids[i]\n\t\t\tself.DELETE('/api/feedback/' + feedback_id)\n\n\t\t\tdata = self.GET_data('/api/feedback/search')\n\t\t\tself.assertEqual(len(feedback_ids) - i - 1, len(data))\n\n\t\t\tlist = self.GET_data('/api/list/' + self.list_id)\n\t\t\tself.assertEqual(len(feedback_ids) - i - 1, len(list['feedbacks']))", "def confirm_delete(self):\n self.language = LANGUAGE.get(self.lang)\n message = Message(self.language[\"del_user\"], self.language[\"del_info\"])\n delete_message = message.create_question_message(self.language[\"yes\"])\n response = delete_message.exec()\n\n if response == QMessageBox.Yes:\n self.delete_user()\n elif response == QMessageBox.No:\n delete_message.close()", "def delete_user():", "def feedback_reply(request, feedback_id):\n account = request.session['account']\n store = SESSION.get_store(request.session)\n # data to be passed in the templace context\n data = {\n 'messages_nav': True,\n 'from_address': store.get(\"store_name\"),\n }\n \n # get from the messages_received_list in session cache\n messages_received_list = SESSION.get_messages_received_list(\\\n request.session)\n i_remove, feedback = 0, None\n for ind, m in enumerate(messages_received_list):\n if m.objectId == feedback_id:\n feedback = m\n i_remove = ind\n break\n \n if not feedback:\n # feedack not found - redirect to messages page with error message\n return redirect(reverse('messages_index')+ \"?%s\" %\\\n urllib.urlencode({'error': 'Feedback not found.'}))\n \n if request.method == 'POST':\n # user submitted reply form\n body = request.POST.get('body')\n if body is not None:\n # strip the body so that it doesn't have trailing or\n # leading whitespaces\n body = body.strip()\n else:\n body = \"\"\n \n data['body'] = body\n \n if len(body) == 0:\n # body cannot be empty\n data['error'] = 'Please enter a message.' \n \n elif len(body) > 750:\n # body cannot exceed 750 cahracters \n data['error'] = 'Body must be less than 750 characters.' \n \n elif feedback.get('Reply'):\n # double check if feedback already has a reply\n # should not go here unless it is a hacker \n return redirect(reverse('messages_index')+ \"?%s\" %\\\n urllib.urlencode({'error':\\\n 'Feedback has already been replied to.'}))\n \n else:\n # all valid - this method of validation is dirty and not\n # the way to do it in Django. Use a form instead. \n # I just got lazy here.\n \n # create the Parse Message object\n msg = Message.objects().create(message_type=\\\n FEEDBACK, sender_name=store.get('store_name'),\n store_id=store.objectId, body=body)\n # add the created reply to the store's sent messages relation\n store.add_relation(\"SentMessages_\", [msg.objectId])\n # set feedback Reply pointer to message and update it\n feedback.set('Reply', msg.objectId)\n feedback.update()\n \n # store the updated feedback\n messages_received_list.pop(i_remove)\n messages_received_list.insert(i_remove, feedback)\n request.session['messages_received_list'] =\\\n messages_received_list\n \n # save the session now! cloud_call may take a bit!\n request.session.save()\n\n # make the cloud call\n cloud_call(\"retailer_message\", {\n \"store_id\":store.objectId,\n \"store_name\":store.get('store_name'),\n \"message_id\":feedback.objectId,\n \"filter\":'one',\n \"patron_id\":feedback.get('patron_id'),\n \"feedback_reply_body\": body,\n })\n \n # notify other tabs/browsers logged into the same store \n # about the newly created message.\n comet_receive(store.objectId, {\n COMET_RECEIVE_KEY_NAME: COMET_RECEIVE_KEY,\n \"newMessage\":feedback.jsonify()\n })\n \n # make sure we have the latest session to save!\n request.session.clear()\n request.session.update(SessionStore(request.session.session_key))\n\n return redirect(reverse('feedback_details', \n args=(feedback.objectId,)) + \"?%s\" %\\\n urllib.urlencode({'success':'Reply has been sent.'}))\n \n else:\n # user navigated to this page \n if feedback.get(\"Reply\"):\n # if the user manually tweaks the url, then s/he might be\n # able to reply to a feedback that already has a reply.\n return redirect(reverse('feedback_details', \n args=(feedback.objectId,)) + \"?%s\" %\\\n urllib.urlencode({'error':'Cannot reply more than once.'})) \n \n # update store session cache\n request.session['store'] = store\n data['feedback'] = feedback\n \n # store the updated feedback\n messages_received_list.pop(i_remove)\n messages_received_list.insert(i_remove, feedback)\n request.session['messages_received_list'] =\\\n messages_received_list\n \n return render(request, 'manage/feedback_reply.djhtml', data)", "def user_delete(self, request):\n\n try:\n if request.method == \"POST\":\n flash(\"Be careful you are about to delete all of your data\")\n self._student_handler.delete_students(current_user.scheme_id, current_user.k_number)\n return redirect(url_for(\"user.user\"))\n else:\n return render_template(\"user/delete_page.html\")\n\n except Exception as e:\n self._log.exception(\"Could not delete student\")\n return abort(500)", "def delete_personal_message(request, pk=None):\n user = User.objects.get(email=request.user.email)\n contactuserposts = ContactUser.objects.all()\n contactuserpost = get_object_or_404(ContactUser, pk=pk)\n if request.method == \"POST\":\n contactuserpost.delete()\n messages.success(request, 'This message has been successfully deleted.')\n return redirect(user_profile)\n return render(request, \"personalmessagedelete.html\", {'contactuserposts': contactuserposts})", "def deleteUser(self, password, feedback=\"\"):\n\t\turl = \"https://habitica.com/api/v3/user\"\n\t\tpayload = {'password': password, 'feedback': feedback}\n\t\treturn(deleteUrl(url, self.credentials, payload))", "def aboutToDelete(self):\n \n pass", "def aboutToDelete(self):\n \n pass", "def aboutToDelete(self):\n \n pass", "def del_accomment(request, pk):\n\n comment = get_object_or_404(ActorComment, pk=pk)\n comment.delete()\n actor = comment.actor\n url = '../../' + str(comment.actor.pk)\n return redirect(url)", "def feedback(request):\n feedback = request.POST.get('feedback', None)\n if not feedback:\n return {\n 'res': 'failed'\n }\n feedback.replace('\\n', '<br>')\n user = request.user\n subject = email_templates.feedback['subject']\n content = email_templates.feedback['content'] % \\\n (user.username, feedback)\n admin_emails = [admin[1] for admin in ADMINS]\n email_users(subject, content, admin_emails,\n from_email=user.email)\n return {\n 'res': 'success'\n }", "async def delete(self):\n return await self.set_message(text='')", "def delete(self, request, *args, **kwargs):\n self.object = self.get_object()\n user = request.user\n success_url = reverse_lazy('muxic:user', kwargs={'username': user.username})\n self.object.delete()\n return HttpResponseRedirect(success_url)", "def feedback(request, feedback_id):\n data = {\n 'messages_nav': True,\n 'feedback_id':feedback_id,\n \"store_name\":\\\n SESSION.get_store(request.session).get(\"store_name\"),\n } \n \n # get from the messages_received_list in session cache\n messages_received_list = SESSION.get_messages_received_list(\\\n request.session)\n i_remove, feedback = 0, None\n for ind, m in enumerate(messages_received_list):\n if m.objectId == feedback_id:\n feedback = m\n i_remove = ind\n break\n \n if not feedback:\n # feedack not found - redirect to messages page with error message\n return redirect(reverse('messages_index')+ \"?%s\" %\\\n urllib.urlencode({'error': 'Feedback not found.',\n \"tab_feedback\":1}))\n \n if not feedback.is_read:\n # update the feedback's read if not yet read\n feedback.is_read = True\n feedback.update()\n \n # make sure that the message stored in the list is the updated 1\n messages_received_list.pop(i_remove)\n messages_received_list.insert(i_remove, feedback)\n request.session['messages_received_list'] = messages_received_list\n \n # inserting this success and error message into the template\n # should be done in a cleaner way - this was done by the \n # first guy. I just didn't bother changing it.\n if request.GET.get(\"success\"):\n data['success'] = request.GET.get(\"success\")\n if request.GET.get(\"error\"):\n data['error'] = request.GET.get(\"error\")\n \n # there should only be at most 1 reply\n data['reply'] = feedback.get('reply')\n data['feedback'] = feedback\n \n return render(request, 'manage/feedback.djhtml', data)", "def delete_user():\n #TODO user delete\n pass", "def delete_question(request, slug):\n\n this_question = question.objects.get(id=slug)\n\n if request.method == \"POST\":\n this_question.delete()\n\n messages.success(\n request, \"Your question was deleted\")\n\n return redirect('profile')\n\n return render(request, 'delete_question.html', {\"question\": this_question})", "def delete_capteur():\n if request.method==\"POST\":\n if request.form['del']==\"\":\n return render_template(\n \"delete-capteur.html\",\n liste = get_capteurs(),\n title = \"Supprimer un capteur\")\n else:\n a = get_capteur(int(request.form['del']))\n a.clear_datas()\n ac = Actions(\n contenu = \"Suppresion du capteur \"+a.get_name(),\n liste = 1\n )\n db.session.add(ac)\n db.session.delete(a)\n db.session.commit()\n return render_template(\n \"delete-capteur.html\",\n liste = get_capteurs(),\n title = \"Supprimer un capteur\")", "def delete_comment_view(request, pk):\n\n comment = get_object_or_404(Comment, pk=pk)\n group = comment.element.tab.group\n user = request.user\n element_view_url = reverse('element_view', args=(comment.element.pk,))\n\n if user not in group.users.all():\n return redirect(reverse('my_groups_view'))\n\n if user.id != comment.creator.id:\n return redirect(element_view_url)\n\n if request.method == 'POST':\n comment.delete()\n return redirect(element_view_url)\n\n return render(request, 'platformapp/comment/delete_comment_view.html')", "def delete():\n return render_template('layout.html')", "def KLP_User_Delete(request, user_id):\n\n # get logged in user\n\n user = request.user\n if user.id:\n\n # check logged in user permissions to delete user\n\n KLP_user_Perm(request.user, 'Users', None)\n import random\n import string\n rangeNum = 8\n\n # generate random string to replace existing password.\n\n randomStr = ''.join(random.choice(string.ascii_uppercase\n + string.digits) for x in range(rangeNum))\n\n # get user object\n\n userObj = User.objects.get(pk=user_id)\n userObj.is_active = 0 # deactivate user\n\n # ........userObj.set_password(randomStr) # replace password with random string\n\n userObj.save() # save user object\n return render_to_response('viewtemplates/userAction_done.html',\n {\n 'user': request.user,\n 'selUser': userObj,\n 'message': 'User Deletion Successful',\n 'legend': 'Karnataka Learning Partnership',\n 'entry': 'Add',\n }, context_instance=RequestContext(request))\n else:\n\n # if user is not logged in redirect to login page\n\n return HttpResponseRedirect('/login/')", "def delete(device):\n delete_subject(device)\n return redirect_back('index')", "def delete(self, request, *args, **kwargs):\r\n self.object = self.get_object()\r\n success_url = self.get_success_url()\r\n self.object.delete()\r\n messages.success(self.request, self.success_message)\r\n return HttpResponseRedirect(success_url)" ]
[ "0.7626995", "0.752246", "0.7413773", "0.7310091", "0.66966355", "0.6507985", "0.6378763", "0.6378238", "0.63614917", "0.63230973", "0.63185835", "0.6307736", "0.630549", "0.6293602", "0.6273581", "0.6273581", "0.6273581", "0.62598974", "0.62179583", "0.61881214", "0.61778986", "0.6170966", "0.61387354", "0.61320454", "0.6131392", "0.61221635", "0.61092573", "0.610618", "0.6082688", "0.60765773" ]
0.77901316
0
Displays a custom error page when returning a 404 error
def display_404(error): return render_template('/error.html'), 404
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def error():\n return render_template(\"404.html\")", "def not_found(e):\n return render_template(\"errors/404.html\"), 404", "def error_404(error):\n return '404 Error'", "def not_found_error(error):\n return render_template('errors/404.html'), 404", "def page_not_found():\n return render_template(\"errors/404.html\"), 404", "def error_404(error):\n return 'Bummer, there is nothing at this URL.'", "def page_not_found(er):\n return render_template('errors.html'), 404", "def error_not_found(error):\n return 'No page here, dood. 404!', 404", "def error_404(error):\n\n # Delete the error variable as unused\n del error\n # Render 404 page\n return render_template('404.html'), 404", "def not_found(error):\n\n return render_template('errors/404.html'), 404", "def page_not_found(_error):\n return render_template('404.html'), 404", "def error404(e) -> tuple:\n return render_template('404.html'), 404", "def page_not_found(e):\n return render_template(\"error/404.html\"), 404", "def page_not_found(error):\n return render_template('error.html', error_msg=\"404 Page Not Found\", pagetitle=\"404 Page Not Found\"), 404", "def page_not_found(error):\n\n return render_template('/errors/404.html'), 404", "def error_404(self):\n response = self.render_template('404.html')\n response.status_code = 404\n return response", "def page_not_found(e):\n return render_template('404.html'), 404", "def page_not_found(e):\n return render_template('404.html'), 404", "def page_not_found(e):\n return render_template(\"404.html\"), 404", "def not_found_error_handler(error):\r\n return render_template('error.404.html')", "def page_not_found(e):\n\n return render_template('404.html'), 404", "def err404():\n return render_template('404.html', year=datetime.now().year)", "def page_not_found(er): \n return render_template('errors.html'), 400", "def page_not_found(error):\n return '<h1> 404 - Not Found</h1>', 404", "def page_error(e):\n\n return render_template('404.html')", "def page_not_found(e):\n return render_template(\"404.html\", page_title=404)", "def _page_not_found():\n return render_template(\n \"error.html\",\n title=\"Page Not Found\"\n ), 404", "def page_not_found(err):\n return error_formatter(code='404', details=err, parm1=request.path)", "def page_not_found(e):\n return render_template('404.html')", "def error_page(e):\n \n return render_template('error-page.html'), 404" ]
[ "0.86492556", "0.85913444", "0.8580295", "0.854521", "0.85064226", "0.8480788", "0.8473789", "0.84674686", "0.8443625", "0.8435984", "0.8434157", "0.8430222", "0.84054524", "0.8401293", "0.8397243", "0.8393867", "0.839259", "0.839259", "0.83886766", "0.83642703", "0.83563656", "0.83545417", "0.8351667", "0.83213496", "0.83006346", "0.8288222", "0.82785934", "0.82783735", "0.82573783", "0.82554245" ]
0.8704343
0
expands the volume to new_size specified.
def expand_volume(self, vol, new_size): self.authenticate_user() volume_name = self._get_vipr_volume_name(vol) size_in_bytes = vipr_utils.to_bytes(str(new_size) + "G") try: self.volume_obj.expand( self.configuration.vipr_tenant + "/" + self.configuration.vipr_project + "/" + volume_name, size_in_bytes, True) except vipr_utils.SOSError as e: if e.err_code == vipr_utils.SOSError.SOS_FAILURE_ERR: raise vipr_utils.SOSError( vipr_utils.SOSError.SOS_FAILURE_ERR, "Volume " + volume_name + ": expand failed\n" + e.err_text) else: with excutils.save_and_reraise_exception(): LOG.exception(_("Volume : %s expand failed") % volume_name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def extend_volume(self, volume, new_size):\n spdk_name = self._get_spdk_volume_name(volume.name)\n params = {'name': spdk_name, 'size': new_size * units.Gi}\n self._rpc_call('bdev_lvol_resize', params)", "def extend_volume(self, volume, new_size):\n LOG.info('Extending volume: %(id)s New size: %(size)s GB',\n {'id': volume['id'], 'size': new_size})\n nfs_share = volume['provider_location']\n nms = self.share2nms[nfs_share]\n volume_path = self.remote_path(volume)\n if getattr(self.configuration,\n self.driver_prefix + '_sparsed_volumes'):\n self._create_sparsed_file(nms, volume_path, new_size)\n else:\n block_size_mb = 1\n block_count = ((new_size - volume['size']) * units.Gi /\n (block_size_mb * units.Mi))\n\n nms.appliance.execute(\n 'dd if=/dev/zero seek=%(seek)d of=%(path)s'\n ' bs=%(bs)dM count=%(count)d' % {\n 'seek': volume['size'] * units.Gi / block_size_mb,\n 'path': volume_path,\n 'bs': block_size_mb,\n 'count': block_count\n }\n )", "def extend_volume(self, volume, new_size):\n if isinstance(new_size, dict):\n new_size = random.randint(new_size[\"min\"], new_size[\"max\"])\n\n aname = \"cinder_v%s.extend_volume\" % self.version\n with atomic.ActionTimer(self, aname):\n self._get_client().volumes.extend(volume, new_size)\n return self._wait_available_volume(volume)", "def resize_volume(self, size):\n curr_size = self.volume.size\n if size <= curr_size:\n raise exc.InvalidVolumeResize(\"The new volume size must be larger \"\n \"than the current volume size of '%s'.\" % curr_size)\n body = {\"volume\": {\"size\": size}}\n self.manager.action(self, \"resize\", body=body)", "def resize_memory(self, new_size=None):\n\n self.container.capacity = new_size", "def resize(self, size):\n self.instance.resize_volume(size)\n self.size = size", "def grow(self, size):\n # size of the instance\n if size is not None and (type(size) == int or size.isdigit()):\n size = { 'size': int(size) }\n else:\n # TODO : proper error\n raise Exception()\n\n if self.size > size['size']:\n # TODO : proper error\n raise Exception((\"This instance has a data storage volume of %d GB and cannot \" + \\\n \"be shrunk. (Tried to specify %d GB as new size.)\") % (self.size, size['size']))\n\n self.client.post(self.path+'/action', { 'resize': {'volume': size} })\n return True", "def _setSize(self, newsize):\n log_method_call(self, self.name,\n status=self.status, size=self._size, newsize=newsize)\n if not isinstance(newsize, Size):\n raise ValueError(\"new size must of type Size\")\n\n if not self.exists:\n # device does not exist (a partition request), just set basic values\n self._size = newsize\n self.req_size = newsize\n self.req_base_size = newsize\n\n if self.exists:\n super(PartitionDevice, self)._setSize(newsize)\n return\n\n # the rest is for changing the size of an allocated-but-not-existing\n # partition, which I'm not sure is advisable\n if self.disk and newsize > self.disk.size:\n raise ValueError(\"partition size would exceed disk size\")\n\n if not self.partedPartition:\n log.warn(\"No partedPartition, not adjusting geometry\")\n return\n\n maxAvailableSize = Size(self.partedPartition.getMaxAvailableSize(unit=\"B\"))\n\n if newsize > maxAvailableSize:\n raise ValueError(\"new size is greater than available space\")\n\n # now convert the size to sectors and update the geometry\n geometry = self.partedPartition.geometry\n physicalSectorSize = geometry.device.physicalSectorSize\n\n new_length = int(newsize) / physicalSectorSize\n geometry.length = new_length", "def resize(self, old, new):", "def bdev_rbd_resize(client, name, new_size):\n params = {\n 'name': name,\n 'new_size': new_size,\n }\n return client.call('bdev_rbd_resize', params)", "def expand_volume_helper(self, vol, size_in_gb, existing_vol_size):\n vol_id = vol['volumeId']\n try:\n if size_in_gb < existing_vol_size:\n self.show_error_exit(msg='Current volume size {0} GB is '\n 'greater than {1} GB specified.'.\n format(existing_vol_size, size_in_gb))\n elif size_in_gb > existing_vol_size:\n if 'rdfGroupId' in vol:\n array_id = self.module.params['serial_no']\n array_details = self.common.get_array(array_id=array_id)\n if utils.parse_version(array_details['ucode'])\\\n < utils.parse_version(self.foxtail_version):\n msg = (\"Expansion of SRDF protected volume is\"\n \" supported from v5978.444.444 onward. Please\"\n \" upgrade the array for this support.\")\n self.show_error_exit(msg=msg)\n return self.srdf_volume_expansion(vol, size_in_gb,\n existing_vol_size)\n return self.expand_volume(vol_id, size_in_gb,\n existing_vol_size)\n\n LOG.info('Current volume size and specified volume size'\n ' are equal')\n return False\n except Exception as e:\n error_message = 'Expand volume %s failed with error: %s' \\\n % (vol_id, str(e))\n self.show_error_exit(msg=error_message)", "def resize_memory(self, new_size=None):\n\n self.capacity = new_size\n\n # self.push() takes care of decreasing the memory.\n # # Oldest experiences are discarded. For Ever.\n # # TODO: Check for a more efficient way of cleaning the memory.\n # while len(self.memory) > self.capacity:\n # _ = self.pop()", "def set_size(self, new_bunch_size):\n self.bunch_size = new_bunch_size", "def bdev_daos_resize(client, name, new_size):\n params = {\n 'name': name,\n 'new_size': new_size,\n }\n return client.call('bdev_daos_resize', params)", "def resize_volume(self, delta_disk, vdisk_name):\n LOG.debug(\"Entering\")\n cmd = \"svctask expandvdisksize -size %s \" \\\n \"-unit b %s\" % (delta_disk, vdisk_name)\n\n output = self._svc_command(cmd)[0]\n LOG.debug(\"Exiting\")", "def _resize(self, new_cap):\n new_array = ba(new_cap)\n\n for i in range(self.count):\n new_array[i] = self.the_array[i]\n\n self.the_array = new_array\n self.capacity = new_cap", "def change_size(self, new_size):\n if not Circle.available_circles.has_key(new_size - 1):\n logging.debug('Circle Cache miss: ' + str(new_size))\n Circle.available_circles[new_size - 1] = AACircle(new_size, color=(0, 0, 0), antialias=2)\n self.image = Circle.available_circles[new_size - 1]\n self.size = new_size", "def scale(self, new_size):\n if new_size < 1:\n return False\n cur_size = self.size\n if new_size == cur_size:\n return True\n elif new_size < cur_size:\n # stop some running containers\n for container in self.containers[new_size:]:\n try:\n self.network.remove_container(container.id)\n _stop_container(container)\n except (OvsException,):\n pass\n self.reload()\n else:\n # start new containers\n for _ in range(new_size - cur_size):\n try:\n container = self._run_container()\n self.containers.append(container)\n except Exception as e:\n logger.error(e)\n return True", "def size(self, new_size):\n if type(new_size) is str:\n new_size = new_size.replace(\" \", \"\").upper()\n new_size = new_size.replace(\")\", \"\")\n new_size = new_size.replace(\"(\", \"\")\n new_size = new_size.replace(\",\", \".\")\n new_size = new_size.replace(\"B\", \"\").strip()\n target_unit = None\n multiplier = 1\n is_bytes = False\n try:\n float(new_size)\n target_unit = \"B\"\n is_bytes = True\n except Exception as e:\n pass\n\n if not is_bytes:\n multiplier *= 1024\n for unit in [\"K\", \"M\", \"G\", \"T\", \"P\", \"E\", \"Z\", \"Y\"]:\n if not target_unit and unit in new_size:\n target_unit = unit\n multiplier *= 1024\n # Reject double units\n elif target_unit and unit in new_size:\n target_unit = None\n break\n\n if target_unit:\n new_size = new_size.replace(target_unit, \"\").strip()\n try:\n self._size = int(float(new_size) * multiplier)\n except Exception as e:\n logger.error(f\"Failed to set a size from \\\"{new_size}\\\"\")\n logger.error(e)\n\n elif type(new_size) is int:\n self._size = new_size\n\n else:\n raise Exception(\"Wrong size type provided ({type(new_size)})\")\n\n if not self._size:\n logger.warn(f\"Failed to set a size from \\\"{new_size}\\\"\")", "def perform_module_operation(self):\n size = self.module.params['size']\n state = self.module.params['state']\n new_name = self.module.params['new_name']\n vol_id = self.module.params['vol_id']\n vol_name = self.module.params['vol_name']\n sg_name = self.module.params['sg_name']\n cap_unit = self.module.params['cap_unit']\n new_sg_name = self.module.params['new_sg_name']\n\n if vol_name is not None and sg_name is None:\n self.show_error_exit(msg='Specify Storage group name along '\n 'with volume name')\n\n if size and cap_unit is None:\n cap_unit = 'GB'\n elif cap_unit and size is None:\n self.show_error_exit(msg='Parameters size and cap_unit are '\n 'required together')\n self.volume_id = vol_id\n\n vol = self.get_volume()\n\n existing_vol_size = 0\n if vol is not None:\n self.volume_id = vol['volumeId']\n vol_id = vol['volumeId']\n existing_vol_size = vol['cap_gb']\n\n changed = False\n\n # Call to create volume in storage group\n if state == 'present' and vol is None:\n if new_name:\n self.show_error_exit(msg=\"Invalid argument new_name \"\n \"while creating a volume\")\n if size is None:\n self.show_error_exit(msg='Size is required to create volume')\n vol_id = self.create_volume(vol_name, sg_name, size, cap_unit)\n changed = True\n\n if state == 'present' and vol and size:\n if size is None:\n self.show_error_exit(msg='Size is required to expand volume')\n # Convert the given size to GB\n if size is not None and size > 0:\n size = utils.get_size_in_gb(size, cap_unit)\n LOG.info('Existing Size: %s GB, Specified Size: %s GB',\n existing_vol_size, size)\n changed = self.expand_volume_helper(vol, size, existing_vol_size)\n\n if state == 'present' and vol and new_name is not None:\n if len(new_name.strip()) == 0:\n self.show_error_exit(msg=\"Please provide valid volume \"\n \"name.\")\n\n vol_name = vol['volume_identifier']\n if new_name != vol_name:\n LOG.info('Changing the name of volume %s to %s',\n vol_name, new_name)\n changed = self.rename_volume(vol_id, new_name) or changed\n\n if state == 'absent' and vol:\n LOG.info('Deleting volume %s ', vol_id)\n changed = self.delete_volume(vol_id) or changed\n\n if state == 'present' and vol and new_sg_name:\n vol_sg = vol['storageGroupId'][0]\n if vol_sg != new_sg_name:\n LOG.info('Moving volume from %s to %s', vol_sg, new_name)\n changed = self.move_volume_between_storage_groups(\n vol, sg_name, new_sg_name) or changed\n\n '''\n Finally update the module changed state and saving updated volume\n details\n '''\n self.u4v_conn.set_array_id(\n array_id=self.module.params['serial_no'])\n self.result[\"changed\"] = changed\n if state == 'present':\n self.result[\"volume_details\"] = self.get_volume()\n LOG.info(\"Closing unisphere connection %s\", self.u4v_conn)\n utils.close_connection(self.u4v_conn)\n LOG.info(\"Connection closed successfully\")\n self.module.exit_json(**self.result)", "def _resize(self, new_capacity):\n temp_array = self.make_array(new_capacity)\n for i in range(self.n):\n temp_array[i] = self.original_array[i]\n self.original_array = temp_array\n self.capacity = new_capacity", "def resizeChildren(self,newSize):\n assert newSize > len(self.children)\n self.children += [None] * (newSize - len(self.children))", "def __resize(self, new_capacity):\r\n B = self.make_array(new_capacity) # create new array\r\n for k in range(self.n):\r\n B[k] = self.A[k]\r\n\r\n self.A = B\r\n self.capacity = new_capacity", "def resize(self, new_size):\n resized_img = opencv.resize(self.img, new_size)\n return Image(resized_img)", "def resize_volume(self, volumeObj, sizeInGb, bsize=1000):\n current_vol = self.get_volume_by_id(volumeObj.id)\n if current_vol.size_kb > (sizeInGb * bsize * bsize):\n raise RuntimeError(\n \"resize_volume() - New size needs to be bigger than: %d KBs\" % current_vol.size_kb)\n \n resizeDict = { 'sizeInGB' : str(sizeInGb) }\n response = self.conn.connection._do_post(\"{}/{}{}/{}\".format(\n self.conn.connection._api_url, \"instances/Volume::\", volumeObj.id, 'action/setVolumeSize'), json=resizeDict)\n return response", "def resize(self, size):\n assert size >= 0 and size <= self._cap, \\\n \"invalid size[%d] for resize\" % (size)\n\n self._size = size", "def _resize_list(self, new_size: int):\n for _ in range((new_size + 1) - len(self)):\n self.append(0)", "def _grow(self):\n self.capacity *= self.factor\n temp = [None] * self.capacity\n for i in range(self.size):\n temp[i] = self.store[i]\n self.store = temp", "def bdev_null_resize(client, name, new_size):\n params = {\n 'name': name,\n 'new_size': new_size,\n }\n return client.call('bdev_null_resize', params)", "def set_node_size(self, new_node_size: float):\n self.node_size = new_node_size" ]
[ "0.7807558", "0.76771975", "0.7514389", "0.7189748", "0.7182422", "0.7124434", "0.70722014", "0.68315697", "0.6571997", "0.65215564", "0.6489591", "0.64570886", "0.63780797", "0.6323964", "0.63189167", "0.6287346", "0.6242331", "0.6234601", "0.6156915", "0.61034304", "0.6095417", "0.6021093", "0.6005047", "0.59386754", "0.59371185", "0.5908566", "0.590708", "0.589823", "0.5786397", "0.5754942" ]
0.7985611
0
Creates volume from given snapshot ( snapshot clone to volume ).
def create_volume_from_snapshot(self, snapshot, volume, volume_db): self.authenticate_user() if self.configuration.vipr_emulate_snapshot == 'True': self.create_cloned_volume(volume, snapshot) return ctxt = context.get_admin_context() src_snapshot_name = None #src_snapshot_name = snapshot['display_name'] src_vol_ref = volume_db.volume_get(ctxt, snapshot['volume_id']) new_volume_name = self._get_volume_name(volume) number_of_volumes = 1 try: src_vol_name, src_vol_uri = self._get_vipr_volume_name(src_vol_ref, True) src_snapshot_name = self._get_vipr_snapshot_name(snapshot , src_vol_uri) (storageresType, storageresTypename) = self.volume_obj.get_storageAttributes( src_vol_name , None , src_snapshot_name) resource_id = self.volume_obj.storageResource_query(storageresType, src_vol_name, None, src_snapshot_name, self.configuration.vipr_project, self.configuration.vipr_tenant) self.volume_obj.clone( new_volume_name, number_of_volumes, resource_id, sync=True) except vipr_utils.SOSError as e: if(e.err_code == vipr_utils.SOSError.SOS_FAILURE_ERR): raise vipr_utils.SOSError( vipr_utils.SOSError.SOS_FAILURE_ERR, "Snapshot " + src_snapshot_name + ": clone failed\n" + e.err_text) else: with excutils.save_and_reraise_exception(): LOG.exception( _("Snapshot : %s clone failed") % src_snapshot_name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_volume_from_snapshot(self, volume, snapshot):\n self._ensure_shares_mounted()\n\n snapshot_vol = self._get_snapshot_volume(snapshot)\n nfs_share = snapshot_vol['provider_location']\n volume['provider_location'] = nfs_share\n nms = self.share2nms[nfs_share]\n\n vol, dataset = self._get_share_datasets(nfs_share)\n snapshot_name = '%s/%s/%s@%s' % (vol, dataset, snapshot['volume_name'],\n snapshot['name'])\n folder = '%s/%s' % (dataset, volume['name'])\n nms.folder.clone(snapshot_name, '%s/%s' % (vol, folder))\n\n try:\n self._share_folder(nms, vol, folder)\n except utils.NexentaException:\n try:\n nms.folder.destroy('%s/%s' % (vol, folder), '')\n except utils.NexentaException:\n LOG.warning(\"Cannot destroy cloned folder: \"\n \"%(vol)s/%(folder)s\",\n {'vol': vol, 'folder': folder})\n raise\n\n if self._get_nfs_server_version(nfs_share) < 4:\n sub_share, mnt_path = self._get_subshare_mount_point(nfs_share,\n volume)\n self._ensure_share_mounted(sub_share, mnt_path)\n\n if (('size' in volume) and (\n volume['size'] > snapshot['volume_size'])):\n self.extend_volume(volume, volume['size'])\n\n return {'provider_location': volume['provider_location']}", "def create_volume_from_snapshot(self, volume, snapshot):\n snap_name = self.get_snap_name(snapshot.id)\n view_name = self.get_view_name(volume.id)\n vol_name = self.get_volume_name(volume.id)\n cview = src_attach_info = dest_attach_info = None\n rpolicy = self.get_policy()\n properties = volume_utils.brick_get_connector_properties(\n self.configuration.use_multipath_for_image_xfer,\n self.configuration.enforce_multipath_for_image_xfer)\n LOG.debug(\"Searching for snapshot: %s in K2.\", snap_name)\n snap_rs = self.client.search(\"snapshots\", short_name=snap_name)\n if hasattr(snap_rs, 'hits') and snap_rs.total != 0:\n snap = snap_rs.hits[0]\n LOG.debug(\"Creating a view: %(view)s from snapshot: %(snap)s\",\n {'view': view_name, 'snap': snap_name})\n try:\n cview = self.client.new(\"snapshots\",\n short_name=view_name,\n source=snap, retention_policy=rpolicy,\n is_exposable=True).save()\n except Exception as ex:\n LOG.exception(\"Creating a view: %(view)s from snapshot: \"\n \"%(snap)s failed\", {\"view\": view_name,\n \"snap\": snap_name})\n raise KaminarioCinderDriverException(reason=ex)\n\n else:\n msg = _(\"Snapshot: %s search failed in K2.\") % snap_name\n LOG.error(msg)\n raise KaminarioCinderDriverException(reason=msg)\n\n try:\n conn = self.initialize_connection(cview, properties)\n src_attach_info = self._connect_device(conn)\n self.create_volume(volume)\n conn = self.initialize_connection(volume, properties)\n dest_attach_info = self._connect_device(conn)\n volume_utils.copy_volume(src_attach_info['device']['path'],\n dest_attach_info['device']['path'],\n snapshot.volume.size * units.Ki,\n self.configuration.volume_dd_blocksize,\n sparse=True)\n self._kaminario_disconnect_volume(src_attach_info,\n dest_attach_info)\n self.terminate_connection(volume, properties)\n self.terminate_connection(cview, properties)\n cview.delete()\n except Exception as ex:\n self._kaminario_disconnect_volume(src_attach_info,\n dest_attach_info)\n self.terminate_connection(cview, properties)\n self.terminate_connection(volume, properties)\n cview.delete()\n self.delete_volume(volume)\n LOG.exception(\"Copy to volume: %(vol)s from view: %(view)s \"\n \"failed\", {\"vol\": vol_name, \"view\": view_name})\n raise KaminarioCinderDriverException(reason=ex)", "def create_volume_from_snapshot(self, volume, snapshot):\n\n free_size = self._get_spdk_lvs_free_space(\n self._get_spdk_lvs_uuid(\n self._get_spdk_volume_name(snapshot.name)))\n\n if free_size < volume.size:\n raise exception.VolumeBackendAPIException(\n data=_('Not enough space to create snapshot with SPDK'))\n\n return self._create_volume(volume, snapshot)", "def create_cloned_volume(self, volume, src_vref):\n LOG.info('Creating clone of volume: %s', src_vref['id'])\n snapshot = {'volume_name': src_vref['name'],\n 'volume_id': src_vref['id'],\n 'volume_size': src_vref['size'],\n 'name': self._get_clone_snapshot_name(volume)}\n # We don't delete this snapshot, because this snapshot will be origin\n # of new volume. This snapshot will be automatically promoted by NMS\n # when user will delete its origin.\n self.create_snapshot(snapshot)\n try:\n return self.create_volume_from_snapshot(volume, snapshot)\n except utils.NexentaException:\n LOG.error('Volume creation failed, deleting created snapshot '\n '%(volume_name)s@%(name)s', snapshot)\n try:\n self.delete_snapshot(snapshot)\n except (utils.NexentaException, exception.SnapshotIsBusy):\n LOG.warning('Failed to delete zfs snapshot '\n '%(volume_name)s@%(name)s', snapshot)\n raise", "def create_cloned_volume(self, volume, src_vref):\n # Form the snapshot structure.\n snapshot = {'id': uuid.uuid4().__str__(),\n 'volume_id': src_vref['id'],\n 'volume': src_vref}\n\n # Create snapshot.\n self.create_snapshot(snapshot)\n\n try:\n # Create volume from snapshot.\n lun_info = self.create_volume_from_snapshot(volume, snapshot)\n finally:\n try:\n # Delete snapshot.\n self.delete_snapshot(snapshot)\n except exception.VolumeBackendAPIException:\n LOG.warning(_LW(\n 'Failure deleting the snapshot %(snapshot_id)s '\n 'of volume %(volume_id)s.'),\n {'snapshot_id': snapshot['id'],\n 'volume_id': src_vref['id']},)\n\n return {'provider_location': lun_info['ID'],\n 'lun_info': lun_info}", "def test_create_volume_from_snapshot(self, mock_ghn):\n ctxt = context.get_admin_context()\n extra_specs = {}\n type_ref = volume_types.create(ctxt, 'hgst-1', extra_specs)\n snap = {'id': '1', 'name': 'volume1', 'display_name': '',\n 'volume_type_id': type_ref['id'], 'size': 10,\n 'provider_id': 'space_orig'}\n volume = {'id': '2', 'name': 'volume2', 'display_name': '',\n 'volume_type_id': type_ref['id'], 'size': 10}\n pid = self.driver.create_volume_from_snapshot(volume, snap)\n # We must copy entier underlying storage, ~12GB, not just 10GB\n self.assertEqual(11444 * units.Mi, self.dd_count)\n self.assertEqual('1M', self.bs)\n # Check space-create command\n expected = {'redundancy': '0', 'group': 'xanadu',\n 'name': 'volume2', 'mode': '0777',\n 'user': 'kane', 'net': 'net1',\n 'storageserver': 'stor1:gbd0,stor2:gbd0,',\n 'size': '12'}\n self.assertDictMatch(expected, self.created)\n # Check the returned provider\n expected_pid = {'provider_id': 'volume2'}\n self.assertDictMatch(expected_pid, pid)", "def create_volume_from_snapshot(self, volume, snapshot):\n snapshotname = huawei_utils.encode_name(snapshot['id'])\n\n snapshot_id = snapshot.get('provider_location', None)\n if snapshot_id is None:\n snapshot_id = self.restclient.get_snapshotid_by_name(snapshotname)\n if snapshot_id is None:\n err_msg = (_(\n 'create_volume_from_snapshot: Snapshot %(name)s '\n 'does not exist.')\n % {'name': snapshotname})\n LOG.error(err_msg)\n raise exception.VolumeBackendAPIException(data=err_msg)\n\n lun_info = self.create_volume(volume)\n\n tgt_lun_id = lun_info['ID']\n luncopy_name = huawei_utils.encode_name(volume['id'])\n\n LOG.info(_LI(\n 'create_volume_from_snapshot: src_lun_id: %(src_lun_id)s, '\n 'tgt_lun_id: %(tgt_lun_id)s, copy_name: %(copy_name)s.'),\n {'src_lun_id': snapshot_id,\n 'tgt_lun_id': tgt_lun_id,\n 'copy_name': luncopy_name})\n\n event_type = 'LUNReadyWaitInterval'\n\n wait_interval = huawei_utils.get_wait_interval(self.xml_file_path,\n event_type)\n\n def _volume_ready():\n result = self.restclient.get_lun_info(tgt_lun_id)\n\n if (result['HEALTHSTATUS'] == constants.STATUS_HEALTH\n and result['RUNNINGSTATUS'] == constants.STATUS_VOLUME_READY):\n return True\n return False\n\n huawei_utils.wait_for_condition(self.xml_file_path,\n _volume_ready,\n wait_interval,\n wait_interval * 10)\n\n self._copy_volume(volume, luncopy_name,\n snapshot_id, tgt_lun_id)\n\n return {'ID': lun_info['ID'],\n 'lun_info': lun_info}", "def create_volume(self, snapshot_id = \"\", size = -1):\n response = volume.create_volume(self.url, self.verb,\n self.headers, self.version,\n snapshot_id, size)\n if response is not None :\n res = CreateVolumeResponse.CreateVolumeResponse()\n parseString(str(response.text), res)\n return res\n else :\n return None", "def create_snapshot(self, snapshot):\n LOG.info(_LI('Creating snapshot: %s'), snapshot['name'])\n lcfg = self.configuration\n snap_name = self._create_snapshot_name()\n self.zfssa.create_snapshot(lcfg.zfssa_nfs_pool, lcfg.zfssa_nfs_project,\n lcfg.zfssa_nfs_share, snap_name)\n\n src_file = snap_name + '/' + snapshot['volume_name']\n\n try:\n self.zfssa.create_snapshot_of_volume_file(src_file=src_file,\n dst_file=\n snapshot['name'])\n except Exception:\n with excutils.save_and_reraise_exception():\n LOG.debug('Error thrown during snapshot: %s creation',\n snapshot['name'])\n finally:\n self.zfssa.delete_snapshot(lcfg.zfssa_nfs_pool,\n lcfg.zfssa_nfs_project,\n lcfg.zfssa_nfs_share, snap_name)", "def create_volume_from_snapshot(snapshots, objects_created,\n wait_for_available=120):\n if type(snapshots) is not list:\n snapshots = [snapshots]\n v = []\n for snapshot in snapshots:\n command = 'cinder create --snapshot-id %s --name %s' % \\\n (snapshot['id'], snapshot['display_name'])\n volume_from_snapshot = parse_output(Popen(\n command.split(), stdout=STDOUT, stderr=STDERR).communicate()[0])\n volume_from_snapshot['device'] = snapshot['device']\n volume_from_snapshot['bootable'] = snapshot['bootable']\n v.append(volume_from_snapshot)\n if wait_for_available > 0:\n wait = 0\n again = False\n while wait < wait_for_available:\n time.sleep(5)\n wait += 5\n again = False\n for volume in v:\n command = 'cinder show %s' % volume['id']\n status = parse_output(Popen(command.split(), stdout=STDOUT,\n stderr=STDERR).communicate()[0]\n )['status']\n if status == 'error':\n # clean up and create volume again\n command = 'cinder delete %s' % volume['id']\n a = Popen(command.split(), stdout=STDOUT,\n stderr=STDERR).communicate()[0]\n command = 'cinder create --snapshot-id %s' % \\\n volume['snapshot_id']\n volume_info = parse_output(Popen(\n command.split(), stdout=STDOUT,\n stderr=STDERR).communicate()[0])\n volume_info['bootable'] = volume['bootable']\n volume_info['device'] = volume['device']\n volume = volume_info\n again = True\n break\n elif status == 'creating':\n again = True\n break\n elif status == 'available':\n volume['status'] = status\n pass\n if again:\n continue\n else:\n break\n if again: # Loop ended due to timeout\n print 'Error creating volume from snapshot!'\n print 'The following entities were created in the process:'\n print_objects_created(objects_created)\n sys.exit(-1)\n return v", "def _create_snapshot(connection, volume, name=''):\n logging.info(kayvee.formatLog(\"ebs-snapshots\", \"info\", \"creating new snapshot\", {\"volume\": volume.id}))\n snapshot = volume.create_snapshot(\n description=\"automatic snapshot by ebs-snapshots\")\n if not name:\n name = '{}-snapshot'.format(volume.id)\n connection.create_tags(\n [snapshot.id], dict(Name=name, creator='ebs-snapshots'))\n logging.info(kayvee.formatLog(\"ebs-snapshots\", \"info\", \"created snapshot successfully\", {\n \"name\": name,\n \"volume\": volume.id,\n \"snapshot\": snapshot.id\n }))\n return snapshot", "def create_snapshot(self, snapshot):\n vg_name = self.get_volume_group_name(snapshot.volume_id)\n snap_name = self.get_snap_name(snapshot.id)\n rpolicy = self.get_policy()\n try:\n LOG.debug(\"Searching volume_group: %s in K2.\", vg_name)\n vg = self.client.search(\"volume_groups\", name=vg_name).hits[0]\n LOG.debug(\"Creating a snapshot: %(snap)s from vg: %(vg)s\",\n {'snap': snap_name, 'vg': vg_name})\n self.client.new(\"snapshots\", short_name=snap_name,\n source=vg, retention_policy=rpolicy,\n is_auto_deleteable=False).save()\n except Exception as ex:\n LOG.exception(\"Creation of snapshot: %s failed.\", snap_name)\n raise KaminarioCinderDriverException(reason=ex)", "def create_volume_snapshot(self, volume, name=None, description=None,\n check=True):\n cmd = 'cinder snapshot-create'\n if name:\n cmd += ' --name ' + name\n if description is not None:\n cmd += ' --description ' + moves.shlex_quote(description)\n\n cmd += ' ' + volume.id\n\n exit_code, stdout, stderr = self.execute_command(\n cmd, timeout=config.SNAPSHOT_AVAILABLE_TIMEOUT, check=check)\n\n snapshot_table = output_parser.table(stdout)\n snapshot = {key: value for key, value in snapshot_table['values']}\n\n return snapshot", "def create_cloned_volume(self, volume, src_vref):\n clone_name = self.get_volume_name(volume.id)\n src_name = self.get_volume_name(src_vref.id)\n src_vol = self.client.search(\"volumes\", name=src_name)\n src_map = self.client.search(\"mappings\", volume=src_vol)\n src_attach_info = dest_attach_info = None\n if src_map.total != 0:\n msg = _(\"K2 driver does not support clone of an attached volume. \"\n \"To get this done, create a snapshot from the attached \"\n \"volume and then create a volume from the snapshot.\")\n LOG.error(msg)\n raise KaminarioCinderDriverException(reason=msg)\n try:\n properties = volume_utils.brick_get_connector_properties(\n self.configuration.use_multipath_for_image_xfer,\n self.configuration.enforce_multipath_for_image_xfer)\n conn = self.initialize_connection(src_vref, properties)\n src_attach_info = self._connect_device(conn)\n self.create_volume(volume)\n conn = self.initialize_connection(volume, properties)\n dest_attach_info = self._connect_device(conn)\n volume_utils.copy_volume(src_attach_info['device']['path'],\n dest_attach_info['device']['path'],\n src_vref.size * units.Ki,\n self.configuration.volume_dd_blocksize,\n sparse=True)\n self._kaminario_disconnect_volume(src_attach_info,\n dest_attach_info)\n self.terminate_connection(volume, properties)\n self.terminate_connection(src_vref, properties)\n except Exception as ex:\n self._kaminario_disconnect_volume(src_attach_info,\n dest_attach_info)\n self.terminate_connection(src_vref, properties)\n self.terminate_connection(volume, properties)\n self.delete_volume(volume)\n LOG.exception(\"Create a clone: %s failed.\", clone_name)\n raise KaminarioCinderDriverException(reason=ex)", "def create_snapshot(volume_name, volume_id):\n\tif cliargs.verbose:\n\t\tprint \"Will snapshot '%s' and tag it with '%s'\" % (volume_id, volume_name)\n\n\tresponse = ec2.create_snapshot(VolumeId=volume_id, Description=volume_name)\n\tif response:\n\t\tec2.create_tags(Resources=[response[\"SnapshotId\"]], Tags=[{\"Key\": \"Name\", \"Value\": volume_name}])", "def create_snapshot(args, **_):\n\n volume_id = \\\n utils.get_external_resource_id_or_raise(\n 'create snapshot', ctx.instance)\n\n ctx.logger.info(\n 'Trying to create a snapshot of EBS volume {0}.'\n .format(volume_id))\n\n volume_object = _get_volumes_from_id(volume_id)\n\n if not args:\n snapshot_desc = \\\n unicode(datetime.datetime.now()) + \\\n ctx.instance.runtime_properties[constants.EXTERNAL_RESOURCE_ID]\n args = dict(description=snapshot_desc)\n\n try:\n new_snapshot = volume_object.create_snapshot(**args)\n except (boto.exception.EC2ResponseError,\n boto.exception.BotoServerError) as e:\n raise NonRecoverableError('{0}'.format(str(e)))\n\n ctx.logger.info(\n 'Created snapshot of EBS volume {0}.'.format(volume_id))\n\n if constants.VOLUME_SNAPSHOT_ATTRIBUTE not in \\\n ctx.instance.runtime_properties:\n ctx.instance.runtime_properties[\n constants.VOLUME_SNAPSHOT_ATTRIBUTE] = list()\n\n ctx.instance.runtime_properties[\n constants.VOLUME_SNAPSHOT_ATTRIBUTE].append(new_snapshot.id)", "def create_cloned_volume(self, volume, src_vref):\n LOG.info(_LI('new cloned volume: %s'), volume['name'])\n LOG.info(_LI('source volume for cloning: %s'), src_vref['name'])\n\n snapshot = {'volume_name': src_vref['name'],\n 'volume_id': src_vref['id'],\n 'volume_size': src_vref['size'],\n 'name': self._create_snapshot_name()}\n\n self.create_snapshot(snapshot)\n return self.create_volume_from_snapshot(volume, snapshot,\n method='MOVE')", "def _create_snapshot(self, name=None, metadata=None):\n req = fakes.HTTPRequest.blank('/v3/snapshots')\n req.environ['cinder.context'] = self.ctx\n snap = {\"volume_id\": fake.VOLUME_ID,\n \"display_name\": name or \"Volume Test Name\",\n \"description\": \"Volume Test Desc\"\n }\n if metadata:\n snap[\"metadata\"] = metadata\n body = {\"snapshot\": snap}\n self.controller.create(req, body=body)", "def snapshot(self, name):\r\n return self.driver.create_volume_snapshot(volume=self, name=name)", "def create_volume_snapshot(volumes, source_instance, objects_created,\n wait_for_available=50):\n if type(volumes) is not list:\n volumes = [volumes]\n s = []\n for volume in volumes:\n command = 'cinder snapshot-create --force True --name %s %s' % \\\n (volume['name'], volume['id'])\n snapshot_info = parse_output(Popen(command.split(), stdout=STDOUT).communicate()[0])\n if volume['bootable'] == 'true':\n snapshot_info['bootable'] = True\n else:\n snapshot_info['bootable'] = False\n att = volume['attachments'].replace(\"'\", \"\\\"\").replace(\n \"u\\\"\", \"\\\"\").replace(\" None,\", \" \\\"None\\\",\")\n snapshot_info['device'] = get(json.loads(att), 'server_id',\n source_instance['id'])[0]['device']\n s.append(snapshot_info)\n if wait_for_available > 0:\n wait = 0\n again = False\n while wait < wait_for_available:\n time.sleep(5)\n wait += 5\n again = False\n for snapshot in s:\n command = 'cinder snapshot-show %s' % snapshot['id']\n status = parse_output(Popen(command.split(), stdout=STDOUT,\n stderr=STDERR).communicate()[0]\n )['status']\n if status == 'error':\n # clean up and take snapshot again\n command = 'cinder snapshot-delete %s' % snapshot['id']\n a = Popen(command.split(), stdout=STDOUT,\n stderr=STDERR).communicate()[0]\n command = 'cinder snapshot-create --force True %s' % \\\n snapshot['volume_id']\n snapshot_info = parse_output(Popen(command.split(),\n stdout=STDOUT, stderr=STDERR\n ).communicate()[0])\n snapshot_info['bootable'] = snapshot['bootable']\n snapshot_info['device'] = snapshot['device']\n snapshot = snapshot_info\n again = True\n break\n elif status == 'creating':\n again = True\n break\n elif status == 'available':\n snapshot['status'] = status\n pass\n if again:\n continue\n else:\n break\n if again: # Loop ended due to timeout\n print 'Error creating volume snapshot!'\n print 'The following entities were created in the process:'\n print_objects_created(objects_created)\n sys.exit(-1)\n return s", "def create_volume(self, volume):\n LOG.debug('SPDK create volume')\n\n return self._create_volume(volume)", "def test_create_snapshot(self, mock_ghn):\n # Now snapshot the volume and check commands\n snapshot = {'volume_name': 'volume10',\n 'volume_id': 'xxx', 'display_name': 'snap10',\n 'name': '123abc', 'volume_size': 10, 'id': '123abc',\n 'volume': {'provider_id': 'space10'}}\n ret = self.driver.create_snapshot(snapshot)\n # We must copy entier underlying storage, ~12GB, not just 10GB\n self.assertEqual(11444 * units.Mi, self.dd_count)\n self.assertEqual('1M', self.bs)\n # Check space-create command\n expected = {'redundancy': '0', 'group': 'xanadu',\n 'name': snapshot['display_name'], 'mode': '0777',\n 'user': 'kane', 'net': 'net1',\n 'storageserver': 'stor1:gbd0,stor2:gbd0,',\n 'size': '12'}\n self.assertDictMatch(expected, self.created)\n # Check the returned provider\n expected_pid = {'provider_id': 'snap10'}\n self.assertDictMatch(expected_pid, ret)", "def snapshot(self, snapshot, keep=0):\n if keep:\n until = util.today() + datetime.timedelta(days=keep)\n snapshot = snapshot + \"-keep-until-\" + until.strftime(\"%Y%m%d\")\n if snapshot in [x.snapname for x in self.ceph.root.snapshots]:\n self.log.info(\"snapshot-exists\", snapshot=snapshot)\n return\n self.log.info(\"snapshot-create\", name=snapshot)\n with self.frozen_vm() as frozen:\n if frozen:\n self.ceph.root.snapshots.create(snapshot)\n else:\n self.log.error(\"snapshot-ignore\", reason=\"not frozen\")\n raise RuntimeError(\"VM not frozen, not making snapshot.\")", "def create_snapshot(self, snap_description=None):\n log.debug(\"Initiating creation of a snapshot for volume '%s'\" % self.volume_id)\n try:\n snapshot = self.volume.create_snapshot(description=snap_description)\n log.info(\"Created snapshot {0} from volume {1} ({2}). Check the snapshot \"\n \"for status.\".format(snapshot.id, self.volume_id, self.fs))\n self._derived_snapshots.append(snapshot)\n # Add tags to the newly created snapshot\n self.app.cloud_interface.add_tag(snapshot, 'Name',\n self.app.config['cluster_name'])\n self.app.cloud_interface.add_tag(\n self.volume, 'bucketName', self.app.config['bucket_cluster'])\n self.app.cloud_interface.add_tag(self.volume, 'filesystem', self.fs.name)\n return str(snapshot.id)\n except EC2ResponseError as ex:\n log.error(\"Error creating a snapshot from volume '%s': %s\" %\n (self.volume_id, ex))\n return None", "def create_volume(self, size, zone, snapshot=None):\r\n if isinstance(zone, Zone):\r\n zone = zone.name\r\n params = {'AvailabilityZone' : zone}\r\n if size:\r\n params['Size'] = size\r\n if snapshot:\r\n if isinstance(snapshot, Snapshot):\r\n snapshot = snapshot.id\r\n params['SnapshotId'] = snapshot\r\n return self.get_object('CreateVolume', params, Volume, verb='POST')", "def create_snapshot(self, volume_id, notes='No longer needed'):\r\n\r\n self.iscsi_svc.createSnapshot(notes, id=volume_id)", "def create_snapshot(self, volume_id, description=None):\r\n params = {'VolumeId' : volume_id}\r\n if description:\r\n params['Description'] = description[0:255]\r\n snapshot = self.get_object('CreateSnapshot', params,\r\n Snapshot, verb='POST')\r\n volume = self.get_all_volumes([volume_id])[0]\r\n volume_name = volume.tags.get('Name')\r\n if volume_name:\r\n snapshot.add_tag('Name', volume_name)\r\n return snapshot", "def create_snapshot(self, volume_id):\n response = snapshot.create_snapshot(self.url, self.verb,\n self.headers, self.version,\n volume_id)\n \n if response is not None :\n res = CreateSnapshotResponse.CreateSnapshotResponse()\n parseString(str(response.text), res)\n return res\n else :\n return None", "def database_volume_snapshot_add(volume_snapshot_obj):\n db = database_get()\n session = db.session()\n query = session.query(model.VolumeSnapshot)\n query = query.filter(model.VolumeSnapshot.uuid == volume_snapshot_obj.uuid)\n volume_snapshot = query.first()\n if not volume_snapshot:\n volume_snapshot = model.VolumeSnapshot()\n volume_snapshot.uuid = volume_snapshot_obj.uuid\n volume_snapshot.name = volume_snapshot_obj.name\n volume_snapshot.description = volume_snapshot_obj.description\n volume_snapshot.size_gb = volume_snapshot_obj.size_gb\n volume_snapshot.volume_uuid = volume_snapshot_obj.volume_uuid\n volume_snapshot.nfvi_volume_snapshot_data = \\\n json.dumps(volume_snapshot_obj.nfvi_volume_snapshot.as_dict())\n session.add(volume_snapshot)\n else:\n volume_snapshot.name = volume_snapshot_obj.name\n volume_snapshot.description = volume_snapshot_obj.description\n volume_snapshot.size_gb = volume_snapshot_obj.size_gb\n volume_snapshot.volume_uuid = volume_snapshot_obj.volume_uuid\n volume_snapshot.nfvi_volume_snapshot_data = \\\n json.dumps(volume_snapshot_obj.nfvi_volume_snapshot.as_dict())\n db.commit()", "def CreateDiskFromSnapshotURI(\n self,\n snapshot: 'AZComputeSnapshot',\n snapshot_uri: str,\n region: Optional[str] = None,\n disk_name: Optional[str] = None,\n disk_name_prefix: Optional[str] = None,\n disk_type: str = 'Standard_LRS') -> 'AZComputeDisk':\n\n if not region:\n region = self.az_account.default_region\n\n # Create a temporary Azure account storage to import the snapshot\n storage_account_name = hashlib.sha1(\n snapshot.resource_id.encode('utf-8')).hexdigest()[:23]\n storage_account_url = 'https://{0:s}.blob.core.windows.net'.format(\n storage_account_name)\n # pylint: disable=line-too-long\n storage_account_id, storage_account_access_key = self.az_account.storage.CreateStorageAccount(\n storage_account_name, region=region)\n # pylint: enable=line-too-long\n blob_service_client = blob.BlobServiceClient(\n account_url=storage_account_url, credential=storage_account_access_key)\n\n # Create a container within the Storage to receive the imported snapshot\n container_name = storage_account_name + '-container'\n snapshot_vhd_name = snapshot.name + '.vhd'\n container_client = blob_service_client.get_container_client(container_name)\n try:\n logger.info('Creating blob container {0:s}'.format(container_name))\n container_client.create_container()\n logger.info('Blob container {0:s} successfully created'.format(\n container_name))\n except exceptions.ResourceExistsError:\n # The container already exists, so we can re-use it\n logger.warning('Reusing existing container: {0:s}'.format(container_name))\n\n # Download the snapshot from the URI to the storage\n copied_blob = blob_service_client.get_blob_client(\n container_name, snapshot_vhd_name)\n logger.info('Importing snapshot to container from URI {0:s}. '\n 'Depending on the size of the snapshot, this process is going '\n 'to take a while.'.format(snapshot_uri))\n copied_blob.start_copy_from_url(snapshot_uri)\n copy_status = copied_blob.get_blob_properties().copy.status\n while copy_status != 'success':\n sleep(5) # Wait for the vhd to be imported in the Azure storage container\n copy_status = copied_blob.get_blob_properties().copy.status\n if copy_status in ('aborted', 'failed'):\n raise errors.ResourceCreationError(\n 'Could not import the snapshot from URI {0:s}'.format(\n snapshot_uri), __name__)\n logger.debug('Importing snapshot from URI {0:s}'.format(snapshot_uri))\n logger.info('Snapshot successfully imported from URI {0:s}'.format(\n snapshot_uri))\n\n if not disk_name:\n disk_name = common.GenerateDiskName(snapshot,\n disk_name_prefix=disk_name_prefix)\n\n # Create a new disk from the imported snapshot\n creation_data = {\n 'location': region,\n 'creation_data': {\n 'source_uri': copied_blob.url,\n 'storage_account_id': storage_account_id,\n 'create_option': models.DiskCreateOption.import_enum\n },\n 'sku': {'name': disk_type}\n }\n\n try:\n logger.info('Creating disk: {0:s}'.format(disk_name))\n request = self.compute_client.disks.begin_create_or_update(\n self.az_account.default_resource_group_name,\n disk_name,\n creation_data)\n while not request.done():\n sleep(5) # Wait 5 seconds before checking disk status again\n disk = request.result()\n logger.info('Disk {0:s} successfully created'.format(disk_name))\n except azure_exceptions.CloudError as exception:\n raise errors.ResourceCreationError(\n 'Could not create disk from URI {0:s}: {1!s}'.format(\n snapshot_uri, exception), __name__) from exception\n\n # Cleanup the temporary account storage\n self.az_account.storage.DeleteStorageAccount(storage_account_name)\n\n return AZComputeDisk(self.az_account,\n disk.id,\n disk.name,\n disk.location,\n disk.zones)" ]
[ "0.81089157", "0.79880166", "0.7807846", "0.7783433", "0.7662862", "0.7652661", "0.7525891", "0.75196546", "0.74478763", "0.7440903", "0.73937786", "0.73604554", "0.7312219", "0.71355873", "0.7077416", "0.7056985", "0.7022125", "0.7012366", "0.69805145", "0.6944935", "0.6888308", "0.6877732", "0.6868665", "0.68464017", "0.6686492", "0.66637343", "0.665881", "0.66453075", "0.6631388", "0.660886" ]
0.837259
0
Find the export group to which the given initiator ports are the same as the initiators in the group
def _find_exportgroup(self, initiator_ports): foundgroupname = None grouplist = self.exportgroup_obj.exportgroup_list( self.configuration.vipr_project, self.configuration.vipr_tenant) for groupid in grouplist: groupdetails = self.exportgroup_obj.exportgroup_show( groupid, self.configuration.vipr_project, self.configuration.vipr_tenant) if groupdetails is not None: if groupdetails['inactive']: continue initiators = groupdetails['initiators'] if initiators is not None: inits_eg = set() for initiator in initiators: inits_eg.add(initiator['initiator_port']) if inits_eg <= set(initiator_ports): foundgroupname = groupdetails['name'] if foundgroupname is not None: # Check the associated varray if groupdetails['varray']: varray_uri = groupdetails['varray']['id'] varray_details = self.varray_obj.varray_show( varray_uri) if varray_details['name'] == \ self.configuration.vipr_varray: LOG.debug( "Found exportgroup " + foundgroupname) break # Not the right varray foundgroupname = None return foundgroupname
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_port_group_list(self):\n pass", "def get_group_out_ports(self, dp, dstip):\n pass", "def _find_matching_ports(i, j):\n\n i_ports = i.available_ports()\n j_ports = j.available_ports()\n i_port_names = [p.name for p in i.available_ports()]\n j_port_names = [p.name for p in j.available_ports()]\n common_name = list(set(i_port_names).intersection(j_port_names))\n if len(common_name) != 1:\n warn(\"{} ports were found with corresponding names for\"\n \" particles {} and {}\".format(len(common_name), i,j))\n i_port = [p for p in i.available_ports() if p.name == common_name[0]]\n j_port = [p for p in j.available_ports() if p.name == common_name[0]]\n #for j_port in j_ports:\n #if j_port.name == i_port.name:\n #return i_port, j_port\n return i_port[0], j_port[0]", "def test_get_port_group_by_moid(self):\n pass", "def test_get_port_sub_group_by_moid(self):\n pass", "def get_destination_group(self):\n sg = self.source_group\n dd = self.destination_directory\n\n while True:\n try:\n matches = dd.groups.search({'name': sg.name})\n return matches[0] if len(matches) > 0 else None\n except StormpathError as err:\n logger.error('Failed to search for Group: {} in Directory: {} ({})'.format(sg.name.encode('utf-8'), dd.name.encode('utf-8'), err))", "def test_modify_rdf_group_add_remove_port(self):\n srdf_group, local_port_list, remote_port_list = self.setup_srdf_group()\n if not remote_port_list:\n self.skipTest('Skipping test_modify_rdf_group_add_remove_port - '\n 'no remote port list')\n port_list = list()\n port_list.append(local_port_list[0])\n self.replication.modify_rdf_group(\n action='remove_ports', srdf_group_number=srdf_group,\n port_list=port_list)\n modifed_port_list = self.replication.get_rdf_group(\n rdf_number=srdf_group)['localPorts']\n self.assertNotIn(local_port_list[0], modifed_port_list)\n self.replication.modify_rdf_group(\n action='add_ports', port_list=port_list,\n srdf_group_number=srdf_group)\n modifed_port_list = self.replication.get_rdf_group(\n rdf_number=srdf_group)['localPorts']\n self.assertIn(local_port_list[0], modifed_port_list)\n self.replication.delete_rdf_group(srdf_group_number=srdf_group)", "def get_exports_count_by_initiators(self, initiator_ports):\n comma_delimited_initiator_list = \",\".join(initiator_ports)\n (s, h) = vipr_utils.service_json_request(\n self.configuration.vipr_hostname,\n self.configuration.vipr_port, \"GET\",\n URI_BLOCK_EXPORTS_FOR_INITIATORS.format(\n comma_delimited_initiator_list),\n None)\n\n export_itl_maps = vipr_utils.json_decode(s)\n\n if export_itl_maps is None:\n return 0\n\n itls = export_itl_maps['itl']\n return itls.__len__()", "def find_ports(destination):\n output_ports = set()\n if isinstance(destination, collections.Iterable):\n for device in destination:\n # ports leading to device\n ports_towards_device = self.forwarding_table.get(\n device, self.ports)\n output_ports.update(ports_towards_device)\n else:\n output_ports.update(\n self.forwarding_table.get(destination, self.ports))\n return output_ports", "def test_get_port_sub_group_list(self):\n pass", "def test_update_port_group(self):\n pass", "def tunnel1_phase2_dh_group_numbers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]:\n return pulumi.get(self, \"tunnel1_phase2_dh_group_numbers\")", "def tunnel1_phase2_dh_group_numbers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]:\n return pulumi.get(self, \"tunnel1_phase2_dh_group_numbers\")", "def tunnel2_phase1_dh_group_numbers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]:\n return pulumi.get(self, \"tunnel2_phase1_dh_group_numbers\")", "def tunnel2_phase1_dh_group_numbers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]:\n return pulumi.get(self, \"tunnel2_phase1_dh_group_numbers\")", "def group():\n yield None, 'port grouping is determined by the global default.'\n yield False, 'ports are not grouped in an additional record.'\n yield (re.compile(r'[a-zA-Z][a-zA-Z0-9_]*'),\n 'ports are grouped in a record with the specified name.')", "def tunnel2_phase2_dh_group_numbers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]:\n return pulumi.get(self, \"tunnel2_phase2_dh_group_numbers\")", "def tunnel2_phase2_dh_group_numbers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]:\n return pulumi.get(self, \"tunnel2_phase2_dh_group_numbers\")", "def does_grp_rule_exist ( tgt_secgrp, new_secgrp, from_port, to_port = None, protocol = 'tcp' ) :\n from_port = str( from_port ) # Boto stores ports as strings!\n if not to_port :\n to_port = from_port\n else :\n to_port = str( to_port ) # Boto stores ports as strings!\n\n ## Verify that this rule going into the target does not exist\n for rule in tgt_secgrp.rules :\n for grant in rule.grants :\n if ( grant.group_id == new_secgrp.id\n and rule.from_port == from_port\n and rule.to_port == to_port\n and rule.ip_protocol == protocol ) :\n print \"Found incoming rule for security group \" + tgt_secgrp.name\n print \"Rule is: \" + new_secgrp.name + '-' + from_port + '-' + to_port + '-' + protocol\n return True\n\n ## Verify that this rule going out of the new secgrp does not exist\n for rule in new_secgrp.rules_egress :\n for grant in rule.grants :\n if ( grant.group_id == tgt_secgrp.id\n and rule.from_port == from_port\n and rule.to_port == to_port\n and rule.ip_protocol == protocol ) :\n print \"Found outgoing rule for security group \" + new_secgrp.name\n print \"Rule is: \" + tgt_secgrp.name + '-' + from_port + '-' + to_port + '-' + protocol\n return True\n\n return False", "def tunnel1_phase2_dh_group_numbers(self) -> pulumi.Output[Optional[Sequence[int]]]:\n return pulumi.get(self, \"tunnel1_phase2_dh_group_numbers\")", "def get_srv_ppgrp_id(self):\n pp_grp_id_lst = list()\n for srv_grp in self.srv_grp_lst:\n pp_grp_id = list()\n for srv in srv_grp:\n pp_id = (\n self.conn.network.find_port(srv['name'] + '_pt_in').id,\n self.conn.network.find_port(srv['name'] + '_pt_out').id\n )\n pp_grp_id.append(pp_id)\n pp_grp_id_lst.append(pp_grp_id)\n return pp_grp_id_lst", "def tunnel1_phase1_dh_group_numbers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]:\n return pulumi.get(self, \"tunnel1_phase1_dh_group_numbers\")", "def tunnel1_phase1_dh_group_numbers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[int]]]]:\n return pulumi.get(self, \"tunnel1_phase1_dh_group_numbers\")", "def tunnel2_phase1_dh_group_numbers(self) -> pulumi.Output[Optional[Sequence[int]]]:\n return pulumi.get(self, \"tunnel2_phase1_dh_group_numbers\")", "def test_patch_port_group(self):\n pass", "def update_ports(self):\n \n # fetch only those ports having\n # VID:PID == a valid (VID, PID) pair in target_vid_pid\n ports = []\n\n for valid_pair in self.target_vid_pid:\n vid_pid = valid_pair[0] + ':' + valid_pair[1]\n ports = ports + [p for p in list_ports.grep(vid_pid)]\n #ports = list_ports.comports()\n \n # add new ports to connected_ports\n # and update new_ports\n new_ports = []\n for p in ports:\n if not p in self.connected_ports:\n self.connected_ports.append(p)\n new_ports.append(p)\n\n # remove missing ports from devices_found\n # and update removed_ports\n removed_ports = []\n for p in self.connected_ports:\n if not p in ports:\n self.connected_ports.remove(p)\n removed_ports.append(p)\n\n return new_ports, removed_ports", "def _get_output_fwd_group(self):\n return self.__output_fwd_group", "def _get_output_fwd_group(self):\n return self.__output_fwd_group", "def _get_output_fwd_group(self):\n return self.__output_fwd_group", "def _get_output_fwd_group(self):\n return self.__output_fwd_group" ]
[ "0.5692042", "0.56847864", "0.5607452", "0.55505884", "0.54425293", "0.53261787", "0.53103065", "0.5302931", "0.5264498", "0.52638507", "0.5077104", "0.50555277", "0.50555277", "0.50547683", "0.50547683", "0.49056864", "0.48782742", "0.48782742", "0.48311147", "0.4830629", "0.48273945", "0.48194936", "0.48194936", "0.48165855", "0.48157603", "0.48078474", "0.47323945", "0.47323945", "0.47323945", "0.47323945" ]
0.8118215
0
changes the vpool type
def retype(self, ctxt, volume, new_type, diff, host): self.authenticate_user() volume_name = self._get_vipr_volume_name(volume) vpool_name = new_type['extra_specs']['ViPR:VPOOL'] try: task = self.volume_obj.update( self.configuration.vipr_tenant + "/" + self.configuration.vipr_project, volume_name, vpool_name) self.volume_obj.check_for_sync(task['task'][0], True) return True except vipr_utils.SOSError as e: if e.err_code == vipr_utils.SOSError.SOS_FAILURE_ERR: raise vipr_utils.SOSError( vipr_utils.SOSError.SOS_FAILURE_ERR, "Volume " + volume_name + ": update failed\n" + e.err_text) else: with excutils.save_and_reraise_exception(): LOG.exception(_("Volume : %s type update failed") % volume_name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_type(self, new_value):\n\n self.vax_type = new_value\n self.save()", "def setType(self,newtype):\n\t\tself.type = newtype;", "def pool_selected( self, object ):\n\t\tud.debug( ud.ADMIN, ud.INFO, 'UVMM.DW.ps(node_uri=%s)' % self.node_uri)\n\t\tpool_name = object.options.get('pool-name')\n\t\tif not pool_name:\n\t\t\tpool_name = object.options['pool-name'] = 'default'\n\t\tdrive_type = object.options['drive-type']\n\t\ttry:\n\t\t\tif drive_type == 'cdrom':\n\t\t\t\tvols = self.uvmm.storage_pool_volumes(self.node_uri, pool_name, 'cdrom')\n\t\t\telse:\n\t\t\t\tvols = self.uvmm.storage_pool_volumes(self.node_uri, pool_name, 'disk' )\n\t\texcept uvmmd.UvmmError, e:\n\t\t\tvols = ()\n\t\tud.debug(ud.ADMIN, ud.INFO, 'UVMM.DW.ps: volumes=%s' % map(str, vols))\n\t\tchoices = []\n\t\tfor vol in vols:\n\t\t\tbasename = os.path.basename( vol.source )\n\t\t\tif '.' in basename:\n\t\t\t\tsuffix = basename[ basename.rfind( '.' ) + 1 : ]\n\t\t\t\tif suffix in ( 'xml', 'snapshot' ):\n\t\t\t\t\tcontinue\n\t\t\tchoices.append( basename )\n\t\tchoices.sort()\n\t\tself.image_syntax.update_choices( choices )\n\n\t\t# recreate pool button\n\t\tbtn = self._create_pool_select_button( object.options )\n\t\tself[DriveWizard.PAGE_OLD].options[0] = btn\n\t\tself[DriveWizard.PAGE_NEW].options[0] = btn\n\t\t# recreate driver-type button\n\t\titems = [self[DriveWizard.PAGE_NEW].options[2].id(), self[DriveWizard.PAGE_NEW].options[3].id()]\n\t\tbtn = self._create_type_select_button(object.options, items)\n\t\tself[DriveWizard.PAGE_NEW].options[1] = btn\n\n\t\tif drive_type == 'disk':\n\t\t\tself[DriveWizard.PAGE_OLD].hint = None\n\t\telif drive_type in ( 'cdrom', 'floppy' ):\n\t\t\tif self.image_syntax._choices:\n\t\t\t\tmsg = _( \"If the required image is not found it might be added by copying the file into the storage pool, e.g. to /var/lib/libvirt/images/ which is the directory of the storage pool <i>local directory</i>. After that go to the previous page and return to this one. The image should now be listed.\" )\n\t\t\telse:\n\t\t\t\tmsg = _( \"The list of available images is empty! To add an image the file needs to be copied into the storage pool, e.g. to /var/lib/libvirt/images/ which is the directory of the storage pool <i>local directory</i>. After that go to the previous page and return to this one. The image should now be listed.\" )\n\t\t\tself[DriveWizard.PAGE_OLD].hint = msg\n\t\t\tself[DriveWizard.PAGE_OLD].description = ''\n\t\telse:\n\t\t\traise ValueError('Invalid drive-type \"%s\"' % drive_type)\n\n\t\treturn self.type_selected(object)", "def set_type(self, index):\n self.set_type_obj(index)\n self.set_type_gui(index)\n # Notify the machine GUI that the machine has changed\n self.saveNeeded.emit()", "def set_type(self, _new_type):\n # Check to see if type is changing\n if _new_type == self._type:\n return\n # Move from current boid set to boid set for new type\n self.target._grid[self._grid][self._type].discard(self)\n self.target._grid[self._grid][_new_type].add(self)\n # Update type\n self._type = _new_type", "def setSolverType(*argv):", "def _set_vport_type(self, vport, layer1, imports):\n fcoe = False\n if hasattr(layer1, 'flow_control') and layer1.flow_control is not None:\n fcoe = True\n vport_type = vport['type']\n elegible_fcoe_vport_types = [\n 'ethernet', 'tenGigLan', 'fortyGigLan', 'tenGigWan',\n 'hundredGigLan', 'tenFortyHundredGigLan', 'novusHundredGigLan',\n 'novusTenGigLan', 'krakenFourHundredGigLan', 'aresOneHundredGigLan'\n ]\n if fcoe is True and vport_type in elegible_fcoe_vport_types:\n vport_type = vport_type + 'Fcoe'\n if fcoe is False and vport_type.endswith('Fcoe'):\n vport_type = vport_type.replace('Fcoe', '')\n if vport_type != vport['type']:\n imports.append({'xpath': vport['xpath'], 'type': vport_type})\n if fcoe is True and vport_type.endswith('Fcoe'):\n self._configure_fcoe(vport, layer1.flow_control, imports)\n return vport_type", "def type(self, kind):\n self.type = kind", "def changeType(self, newType):\n self.__class__ = globals()[newType + 'Format']\n self.format = self.defaultFormat\n self.initFormat()", "def _update_instance_type_for_local_mode(self):\n self.config[\"resource\"][\"private_resource\"][\"hosting_fleet\"][\"instance_type\"] = \"local\"\n self.config[\"resource\"][\"private_resource\"][\"training_fleet\"][\"instance_type\"] = \"local\"\n self.config[\"resource\"][\"private_resource\"][\"evaluation_fleet\"][\"instance_type\"] = \"local\"", "def refresh_types(self, type_clss):\n pass", "def set_type(self, type: int):\r\n self.type = type\r\n self.canvas.itemconfig(self.item, image=self._get_image())", "def transform_os_pool_to_avi_pool(self, os_pool, avi_client, context,\n driver):\n avi_pool = dict()\n\n avi_pool['cloud_ref'] = (\"/api/cloud?name=%s\" % self.avicfg.cloud)\n avi_pool['description'] = os_pool.description\n avi_pool['enabled'] = os_pool.admin_state_up\n avi_pool['lb_algorithm'] = self.dict_lb_method[os_pool.lb_algorithm]\n subkey = 'lb_algorithm_hash'\n if avi_pool['lb_algorithm'] == 'LB_ALGORITHM_CONSISTENT_HASH':\n avi_pool[subkey] = 'LB_ALGORITHM_CONSISTENT_HASH_SOURCE_IP_ADDRESS'\n\n avi_tenant_uuid = os2avi_uuid(\"tenant\", os_pool.tenant_id)\n\n # add ssl profile if protocol is HTTPS\n avi_pool[\"ssl_profile_ref\"] = None\n if os_pool.protocol == \"HTTPS\":\n avi_pool[\"ssl_profile_ref\"] = self.get_avi_ssl_profile_ref(\n \"System-Standard\", avi_client, avi_tenant_uuid)\n\n # add members\n avi_pool['servers'] = []\n snws = {}\n if os_pool.members:\n servers = []\n for member in os_pool.members:\n if member.provisioning_status == \"PENDING_DELETE\":\n continue\n avi_svr, snw = self.transform_member(member, os_pool,\n context=context,\n driver=driver)\n if snw:\n snws[snw['id']] = snw\n servers.append(avi_svr)\n\n avi_pool[\"servers\"] = servers\n\n if self.avicfg.use_placement_network_for_pool and snws:\n plcmntnws = []\n for snw in snws.values():\n addr, mask = snw['cidr'].split('/')\n pnw = {\n \"network_ref\": snw['network_id'],\n \"subnet\": {\n \"ip_addr\": {\n \"addr\": addr,\n \"type\": ('V4' if snw['ip_version'] == 4\n else 'V6'),\n },\n \"mask\": mask,\n },\n }\n plcmntnws.append(pnw)\n\n avi_pool['placement_networks'] = plcmntnws\n\n metainfo = {}\n lb = None\n if getattr(os_pool, 'loadbalancer_id', None):\n lb = driver.objfns.loadbalancer_get(context,\n os_pool.loadbalancer_id)\n else:\n ll = driver.objfns.listener_get(context, os_pool.listener.id)\n lb = driver.objfns.loadbalancer_get(context, ll.loadbalancer_id)\n\n if lb:\n flvid = getattr(lb, 'flavor_id', None)\n if flvid:\n metainfo = driver.objfns.get_metainfo_from_flavor(\n context, flvid)\n\n if (getattr(self.avicfg, 'vrf_context_per_subnet', False) or\n (metainfo and metainfo.get('vrf_context_per_subnet', False))):\n subnet_uuid = None\n if lb:\n subnet_uuid = lb.vip_subnet_id\n else:\n subnet_uuid = driver.objfns.get_vip_subnet_from_listener(\n context, os_pool.listener.id)\n\n vrf_context = get_vrf_context(subnet_uuid, self.avicfg.cloud,\n avi_tenant_uuid, avi_client)\n if vrf_context:\n avi_pool['vrf_ref'] = vrf_context['url']\n\n # add healthmonitor\n avi_pool[\"health_monitor_refs\"] = []\n os_hm = os_pool.healthmonitor\n if (os_hm and os_hm.admin_state_up and\n os_hm.provisioning_status == \"ACTIVE\"):\n hm_uuid = os2avi_uuid(\"healthmonitor\", os_hm.id)\n hm_tenant_uuid = os2avi_uuid(\"tenant\", os_hm.tenant_id)\n try:\n hm = avi_client.get(\"healthmonitor\", hm_uuid, hm_tenant_uuid)\n except ObjectNotFound:\n self.log.warn(\"Healthmonitor %s not found; creating\", hm_uuid)\n hm_def = self.transform_os_hm_to_avi_hm(os_hm)\n hm = avi_client.create(\"healthmonitor\", hm_def,\n hm_tenant_uuid)\n avi_pool[\"health_monitor_refs\"] = [hm[\"url\"]]\n\n # session persistence\n os_persist = os_pool.session_persistence\n avi_pool['application_persistence_profile_ref'] = None\n if os_persist:\n pkey = os_persist.type\n if pkey == 'APP_COOKIE':\n persist_profile_uuid = os2avi_uuid(\n \"applicationpersistenceprofile\", os_pool.id)\n try:\n persist_profile = avi_client.get(\n \"applicationpersistenceprofile\", persist_profile_uuid,\n avi_tenant_uuid)\n updated_persist_profile = copy.deepcopy(persist_profile)\n self.transform_appcookie(os_pool,\n updated_persist_profile)\n if updated_persist_profile != persist_profile:\n persist_profile = avi_client.update(\n \"applicationpersistenceprofile\",\n persist_profile_uuid,\n updated_persist_profile,\n avi_tenant_uuid\n )\n except ObjectNotFound:\n persist_profile_def = self.transform_appcookie(os_pool)\n persist_profile_def[\"uuid\"] = persist_profile_uuid\n persist_profile = avi_client.create(\n \"applicationpersistenceprofile\", persist_profile_def,\n avi_tenant_uuid\n )\n ref = persist_profile[\"url\"]\n else:\n ref = (\"/api/applicationpersistenceprofile?name=\" +\n self.dict_persist_profile_name[pkey])\n avi_pool['application_persistence_profile_ref'] = ref\n return avi_pool", "def set_pool_size(self, pool_size):\n self._aspp.set_pool_size(pool_size)", "def type(self, type):\n\n self.container['type'] = type", "def type(self, type):\n\n self.container['type'] = type", "def __init__(__self__, *,\n type: Optional[pulumi.Input[Union[str, 'VNetSolutionType']]] = None):\n if type is not None:\n pulumi.set(__self__, \"type\", type)", "def set_pool_size(self, pool_size):\n self._semantic_decoder.set_pool_size(pool_size)\n if self._instance_decoder is not None:\n self._instance_decoder.set_pool_size(pool_size)", "def type_selected(self, object):\n\t\tdriver_type = object.options['driver-type']\n\t\tvol_name = object.options.get('vol-name-new', None)\n\t\tud.debug(ud.ADMIN, ud.INFO, 'UVMM.DW.ts(type=%s name=%s)' % (driver_type, vol_name))\n\t\tif vol_name: # reuse existing image name\n\t\t\tbase_name = vol_name.split('.', 1)[0]\n\t\t\tif driver_type == 'RAW':\n\t\t\t\tvol_name = '%s' % base_name\n\t\t\telse:\n\t\t\t\tvol_name = '%s.%s' % (base_name, driver_type)\n\t\telse: # generate new image name\n\t\t\tif driver_type == 'RAW':\n\t\t\t\tsuffix = ''\n\t\t\telse:\n\t\t\t\tsuffix = '.%s' % driver_type\n\t\t\ttry:\n\t\t\t\tvol_name = self.uvmm.next_drive_name(self.node_uri, self.domain_name, suffix=suffix, temp_drives=self.blacklist)\n\t\t\texcept uvmmd.UvmmError, e:\n\t\t\t\tvol_name = 'ERROR'\n\t\tobject.options['vol-name-new'] = vol_name\n\t\treturn self[self.current]", "def update_listener_pool(self, service, name, bigips):\n vip = self.service_adapter.get_virtual_name(service)\n if vip:\n vip[\"pool\"] = name\n for bigip in bigips:\n v = bigip.tm.ltm.virtuals.virtual\n if v.exists(name=vip[\"name\"], partition=vip[\"partition\"]):\n obj = v.load(name=vip[\"name\"], partition=vip[\"partition\"])\n obj.modify(**vip)", "def defineType(name,numSites,bindsTo,symmetric,maxCount):\n\t\t\n\ttypePrimitive.append([name,numSites,bindsTo,symmetric,maxCount])\n\treturn", "def setType(self, type):\n\t\tif not self.Loaded:\n\t\t\tself.type = type\n\t\t\tself.loader = NetLoader.getNetwork(type)\n\t\t\tself.isTypeSet = True", "def update_pool(self, pool, body=None):\r\n return self.put(self.pool_path % (pool), body=body)", "def submit_vserver_limit(vs_host, type_instance, value):\n submit_generic(vs_host, 'context', 'vs_vlimit', value, type_instance)", "def newPool(name: str, superPool, types: [], cls):\n try:\n if name == \"colorholder\":\n superPool = P0(len(types), cls)\n return superPool\n elif name == \"abstractnode\":\n superPool = P1(len(types), cls)\n return superPool\n elif name == \"node\":\n superPool = P2(len(types), superPool, cls)\n return superPool\n \n elif name == \"subnode\":\n superPool = P3(len(types), superPool, cls)\n return superPool\n \n else:\n if superPool is None:\n superPool = BasePool(len(types), name, StoragePool.noKnownFields, StoragePool.noAutoFields, cls)\n else:\n superPool = superPool.makeSubPool(len(types), name, cls)\n return superPool\n finally:\n types.append(superPool)", "def _list_pool_vm(args):\n _logger.debug('_list_pool_vm')\n #\n #\n _data_struct = {'name': {'head': 'Name', 'func': 'name', 'type': 'str'},\n 'uuid': {'head': 'UUID', 'func': 'UUIDString', 'type': 'str'},\n 'autostart': {'head': 'Autostart', 'func': 'autostart', 'type': 'yesno', 'convert': get_yesno},\n 'active': {'head': 'Active', 'func': 'isActive', 'type': 'yesno', 'convert': get_yesno},\n 'persistent': {'head': 'Persistent', 'func': 'isPersistent', 'type': 'yesno', 'convert': get_yesno},\n 'volumes': {'head': 'Volumes', 'func': 'numOfVolumes', 'type': 'int'},\n 'state': {'head': 'State', 'func': 'info', 'type': 'list', 'index': 0, 'convert': get_pool_state},\n 'capacity': {'head': 'Capacity', 'func': 'info', 'type': 'list', 'index': 1, 'convert': format_size},\n 'allocation': {'head': 'Allocation', 'func': 'info', 'type': 'list', 'index': 2, 'convert': format_size},\n 'available': {'head': 'Available', 'func': 'info', 'type': 'list', 'index': 3, 'convert': format_size},\n 'type': {'head': 'Type', 'func': None, 'type': 'str'}\n }\n #\n # get the pools\n _sps_fs, _sps_netfs = _get_pools()\n _sps = _sps_fs + _sps_netfs\n if len(_sps) == 0:\n _logger.info('No pools found.')\n return\n #\n # initialise the column widths\n _data_struct = initalise_column_lengths(_data_struct)\n #\n # column cantains only 'fs' or 'net fs'\n _data_struct['type']['len'] = 6\n #\n # format data and determine optimal length of fields.\n pool_data = list()\n for _sp in _sps:\n _sp_data = dict()\n for key, value in _data_struct.items():\n value_data = get_value_data(_sp, _data_struct[key])\n _sp_data[key] = value_data[0]\n val_length = value_data[1]\n _data_struct[key]['collen'] = max(val_length, _data_struct[key]['collen'])\n _sp_data['type'] = 'fs' if _sp in _sps_fs else 'net fs'\n pool_data.append(_sp_data)\n #\n # compose data\n _title = 'VM pool Information:'\n _columns = list()\n for key, value in _data_struct.items():\n _columns.append([value['head'], value['collen']+2, key])\n #\n printerKlass = get_row_printer_impl(args.output_mode)\n printer = printerKlass(title=_title, columns=_columns)\n printer.printHeader()\n #\n # print\n for _sp in pool_data:\n printer.rowBreak()\n printer.printRow(_sp)\n printer.printFooter()\n printer.finish()\n return", "def upgrade():\n with create_session() as session:\n session.query(TaskInstance).filter(TaskInstance.pool.is_(None)).update(\n {TaskInstance.pool: 'default_pool'}, synchronize_session=False\n ) # Avoid select updated rows\n session.commit()\n\n conn = op.get_bind()\n if conn.dialect.name == \"mssql\":\n op.drop_index('ti_pool', table_name='task_instance')\n\n # use batch_alter_table to support SQLite workaround\n with op.batch_alter_table('task_instance') as batch_op:\n batch_op.alter_column(\n column_name='pool',\n type_=sa.String(50),\n nullable=False,\n )\n\n if conn.dialect.name == \"mssql\":\n op.create_index('ti_pool', 'task_instance', ['pool', 'state', 'priority_weight'])", "def use(self, compo_type):\n self.compo_type = compo_type", "def create_vlan_pool(self, vlan_pool_name, allocation_mode):\n VlanInstP_mo = VlanInstP('uni/infra/', vlan_pool_name, allocation_mode)\n self.commit(VlanInstP_mo)\n return VlanInstP_mo", "def change_type(self, change_type):\n\n self._change_type = change_type" ]
[ "0.5848784", "0.57391757", "0.5723897", "0.561624", "0.5591289", "0.5518067", "0.5517167", "0.546704", "0.54359967", "0.54287094", "0.54124904", "0.53810906", "0.53711545", "0.52850056", "0.52722555", "0.52722555", "0.52640766", "0.5241069", "0.5206191", "0.51765263", "0.51639396", "0.51513994", "0.5139351", "0.51272243", "0.5110979", "0.5101768", "0.5082095", "0.50807685", "0.50776", "0.5049371" ]
0.64140636
0
Will itterate through data to find the index of the oldest and youngest people.
def find_people(data): youngest_idx = 0 oldest_idx = 0 for index, item in enumerate(data): if item['age'] < data[youngest_idx]['age'] and item['age'] > 0: youngest_idx = index if item['age'] > data[oldest_idx]['age'] and item['age'] < 80: oldest_idx = index return youngest_idx, oldest_idx
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def youngest():\n def get_age(person_list):\n return person_list['age']\n return sorted(PEOPLE_LIST, key = get_age)", "def get_youngest_student(students):\n youngest_index = 0 \n youngest = students[0][3]\n for counter, row in enumerate(students[1:], 1):\n if int(row[3]) > int(youngest):\n youngest = students[counter][3]\n youngest_index = counter \n return students[youngest_index]", "def oldest():\n def get_age(person_list):\n return person_list['age']\n return sorted(PEOPLE_LIST, key = get_age, reverse=True)", "def youngest():\n # fill it out\n newlist = sorted(PEOPLE_LIST, key=itemgetter('age'))\n return newlist", "def get_oldest_student(students):\n oldest_index = 0 \n oldest = students[0][3]\n for counter, row in enumerate(students[1:], 1):\n if int(row[3]) < int(oldest):\n oldest = students[counter][3]\n oldest_index = counter \n return students[oldest_index]", "def forbes():\n oldest = {}\n youngest = {}\n\n for entry in data:\n age = entry['age']\n if not oldest or (age > oldest['age']) and (age < 80):\n oldest.update(entry)\n if not youngest or (age < youngest['age']) and (age > 0):\n youngest.update(entry)\n return \"\"\"\nOldest: Name: %s, Net Worth: %d, Industry: %s , Age: %s\nYoungest: Name: %s, Net Worth: %d, Industry: %s, Age: %s\n\"\"\" % (\n oldest['name'], oldest['net_worth (USD)'], oldest['source'], oldest['age'],\n youngest['name'], youngest['net_worth (USD)'], youngest['source'], youngest['age'],\n )", "def youngest(self):\n # Your implementation here", "def oldest():\n # fill it out\n newlist = sorted(PEOPLE_LIST, key=itemgetter('age'), reverse=True)\n return newlist", "def user_birth_statistics(df):\n birth_year=df['Birth Year']\n #most common birth year\n most_common_birth_year=birth_year.value_counts().idxmax()\n print('Most common birth year is: ',most_common_birth_year)\n \n #most recent birth year\n most_recent_birth_year=birth_year.max()\n print('The most recent birth year is: ',most_recent_birth_year)\n \n #most earliest birth year\n most_earliest_birth_year =birth_year.min()\n print('Most earliest birth year is: ',most_earliest_birth_year)", "def maxQualifiedIndex(self, indices):\n entry = self.getConfig()\n # the leader keep its own record updated to the newest\n indices[self.datacenter_id] = len(self.log) - 1\n # print('!!!!!', indices)\n if entry['config'] == 'single':\n return sorted([indices[x] for x in entry['data']])[(len(entry['data'])-1)/2]\n maxOld = sorted([indices[x] for x in entry['data'][0]])[(len(entry['data'][0])-1)/2]\n maxNew = sorted([indices[x] for x in entry['data'][1]])[(len(entry['data'][1])-1)/2]\n return min(maxOld, maxNew)", "def get_same_or_newer(start_date):\n#\tdata = get_file_lines(FILE_URL) ## Moved up & out of the function\n#\treader = csv.reader(data[1:])\n\n\treader1 = csv.reader(data[1:])\t## Changed the above to two lines to these two\n\treader = sorted(reader1, key=operator.itemgetter(3))\n\t\n # We want all employees that started at the same date or the closest newer\n # date. To calculate that, we go through all the data and find the\n # employees that started on the smallest date that's equal or bigger than\n # the given start date.\n\tmin_date = datetime.datetime.today()\n\tmin_date_employees = []\n\tfor row in reader: \n\t\trow_date = datetime.datetime.strptime(row[3], '%Y-%m-%d')\n\n # If this date is smaller than the one we're looking for,\n # we skip this row\n\t\tif row_date < start_date:\n\t\t\tcontinue\n\n # If this date is smaller than the current minimum,\n # we pick it as the new minimum, resetting the list of\n # employees at the minimal date.\n\t\tif row_date < min_date:\n\t\t\tmin_date = row_date\n\t\t\tmin_date_employees = []\n\t\n\treturn min_date, min_date_employees", "def _findMaxIndex(data, mark):\n # assume the maximum value is at initial mark position\n maxIndex = mark\n # loop over the remaining positions greater than the mark\n for mark in range(mark+1, len(data)):\n # if a bigger value is found, record its index\n if data[mark][1][2] > data[maxIndex][1][2]:\n maxIndex = mark\n return maxIndex", "def first_unique_local_maximum_of_derivative(data):\n\tfirst_unique_local_maximum_index = 1\n\tlast_delta = 0\n\t\n\tfor i in range(1, len(data) - 1):\n\t\tcurrent_delta = data[i] - data[i - 1]\n\t\t\n\t\tif current_delta >= last_delta:\n\t\t\tfirst_unique_local_maximum_index += 1\n\t\t\tlast_delta = current_delta\n\t\telse:\n\t\t\tbreak\n\t\n\treturn first_unique_local_maximum_index", "def oldest_person_nt(all_profile_nt: namedtuple) -> float:\n \"\"\"Param: all_profile_nt: Named tuple containing all profiles\"\"\"\n value = min(all_profile_nt, key=lambda v: v[-1])\n date_today = datetime.date.today()\n age = (date_today - value.birthdate).days\n return int(age/365)", "def get_best(self):\n scores, ids = self.sort_best()\n return scores[1], ids[1]", "def most_earned(table):\n money_earned = employees_earning(table)\n temp = 0\n for employee in money_earned:\n if money_earned[employee] > temp:\n temp = money_earned[employee]\n most_earned_employee = str(employee) + \":\" + str(money_earned[employee])\n return most_earned_employee", "def relevant_indexes(data, min_threshold):\n\n start_index = 1\n end_index = len(data) - 1\n\n for i in range(len(data)):\n if data[i] > min_threshold:\n start_index = i\n break\n\n for i in range(len(data)):\n if data[::-1][i] > min_threshold:\n end_index = i\n break\n\n return start_index, end_index", "def findPoz(self, insDay):\r\n poz = 0\r\n for day in self._repo:\r\n if insDay.numberOfActivities > day.numberOfActivities or insDay.numberOfActivities == day.numberOfActivities and insDay.date > day.date:\r\n return poz\r\n poz += 1\r\n return poz", "def find_max(weather_data):\n if len(weather_data) == 0:\n return()\n\n value = float(weather_data[0])\n position = 0\n\n for index, weather in enumerate(weather_data):\n if float(weather) >= value:\n value= float(weather)\n position = index\n\n return(value, position)", "def major_vote( dts, record ):\n votes = {}\n for dt in dts:\n p = dt.vote( record )\n votes[ p ] = votes.get(p, 0) + 1\n best, max_vote = None, 0\n for k in votes:\n if votes[k] > max_vote:\n max_vote = votes[k]\n best = k\n return best", "def oldest_ow_instance(ow_launch_data):\n log.info(\"oldest_ow_instance( %s )\", ow_launch_data)\n sorted_ow_launch_data = sorted(ow_launch_data.items(), key=lambda x: x[1])\n log.info(\"sorted_ow_launch_data = %s\", sorted_ow_launch_data)\n oldest_ow_instance = sorted_ow_launch_data[0]\n ow_instance_id, launch_time = oldest_ow_instance\n log.info(\"ow_instance_id = %s, ow_launch_data = %s\",\n ow_instance_id, ow_launch_data)\n print(\"Oldest OW instance ==> {}\".format(ow_instance_id))\n log.info(\"Oldest OW instance ==> %s\", ow_instance_id)\n return ow_instance_id", "def is_most_recent_location(self, person):", "def _find_newest_update_by_location(updates: Iterable) -> Iterable:\n d = defaultdict(list)\n for update in updates:\n d[update[\"location\"]].append(update)\n\n for k, v in d.items():\n d[k] = max(v, key=lambda x: x[\"date\"])\n\n return d.values()", "def oldest_txo(self) -> int:", "def get_most_popular(self):\n\t\tpopular_rated = self.data_final[self.data_final['Rating'] == 10]\n\t\tpopular_jokes = popular_rated.groupby('JokeID').count().reset_index()\n\t\tpopular_jokes = popular_jokes[['JokeID','Rating']]\n\t\tpopular_jokes.columns = ['JokeID','Number_rated10']\n\t\ttop_joke = popular_jokes.sort_values(by=['Number_rated10'], ascending=False).head(1)\n\t\ttop_joke_val = top_joke['JokeID'].values[0]\n\t\tjokes_list = sorted(set(self.data_final['JokeID']))\n\t\tjoke_num = jokes_list.index(top_joke_val)\n\t\ttop_joke_desc = self.data_jokes[self.data_jokes['JokeID'] == top_joke_val].values[0][1]\n\n\t\treturn top_joke_desc, joke_num", "def most_recent_poll_row(poll_rows, pollster, state):\n temp_poll = poll_rows[:]\n i=0\n for poll in temp_poll[:]: #removes all polls with pollsters other than input\n if poll['Pollster'] != pollster:\n del temp_poll[i]\n i -=1\n i +=1\n i=0\n for poll in temp_poll[:]: #removes all polls with states other than input\n if poll['State'] != state:\n del temp_poll[i]\n i -=1\n i +=1\n if len(temp_poll) == 0: #returns none if no polls meet criteria\n return None\n else:\n temp_max = \"Jan 01 1000\" #arbitrary starting comparison date\n for n in temp_poll: #checks temp_max for most recent date\n if earlier_date(temp_max, n['Date'])==True:\n temp_max = n['Date'] #temp_max becomes most recent date\n else: pass\n for n in temp_poll:\n if n['Date'] == temp_max:\n most_recent_poll = n\n return most_recent_poll\n else: continue", "def personal_best(scores):\n# return sorted(scores, reverse=True)[0]\n return max(scores)", "def minmax(data):\n smallest = data[0]\n largest = data[0]\n\n for i in range(0,len(data)):\n if data[i] < smallest:\n smallest = data[i]\n elif data[i] > largest:\n largest = data[i]\n\n return(smallest,largest)", "def find_max(data):\n index = 0\n res = data[index]\n for i in range(1, len(data)):\n if data[i] > res:\n res = float(data[i])\n index = i\n else:\n break\n return res, index", "def h_index(citations):\n citations.sort(reverse=True)\n for i, citation in enumerate(citations):\n if i <= citation:\n return i" ]
[ "0.6441285", "0.643465", "0.6179866", "0.6166724", "0.616368", "0.61431545", "0.60194755", "0.5788272", "0.57452214", "0.56919736", "0.5552204", "0.55210495", "0.5455431", "0.5422648", "0.5338053", "0.5331254", "0.52707314", "0.524333", "0.5236064", "0.5232713", "0.519718", "0.5178651", "0.5165715", "0.51595986", "0.51550233", "0.5128074", "0.51154876", "0.5115416", "0.51146597", "0.5112765" ]
0.78464746
0
Collapses a directed multigraph into a networkx directed graph. In the output directed graph, each node is a number, which contains itself as node_data['node'], while each edge contains a list of the data from the original edges as its attribute (edge_data[0...N]).
def collapse_multigraph_to_nx(graph: Union[gr.MultiDiGraph, gr.OrderedMultiDiGraph]) -> nx.DiGraph: # Create the digraph nodes. digraph_nodes: List[Tuple[int, Dict[str, nd.Node]]] = ([None] * graph.number_of_nodes()) node_id = {} for i, node in enumerate(graph.nodes()): digraph_nodes[i] = (i, {'node': node}) node_id[node] = i # Create the digraph edges. digraph_edges = {} for edge in graph.edges(): src = node_id[edge.src] dest = node_id[edge.dst] if (src, dest) in digraph_edges: edge_num = len(digraph_edges[src, dest]) digraph_edges[src, dest].update({edge_num: edge.data}) else: digraph_edges[src, dest] = {0: edge.data} # Create the digraph result = nx.DiGraph() result.add_nodes_from(digraph_nodes) result.add_edges_from(digraph_edges) return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reconstruct_network_from_dgraph_json(data):\n G = nx.MultiDiGraph(crs=ox.settings.default_crs)\n for node in data:\n if \"location\" in node:\n attributes = node.copy()\n attributes[\"x\"] = attributes[\"location\"][\"coordinates\"][0]\n attributes[\"y\"] = attributes[\"location\"][\"coordinates\"][1]\n attributes.pop(\"location\", 0)\n attributes.pop(\"connects_to\", 0)\n G.add_node(node[\"uid\"], **attributes)\n for node in data:\n if \"connects_to\" in node:\n node_uid = node[\"uid\"]\n if isinstance(node[\"connects_to\"], list):\n for neighbor in node[\"connects_to\"]:\n neighbor_uid = neighbor[\"uid\"]\n if neighbor_uid in G.nodes:\n G.add_edge(node_uid, neighbor_uid)\n else:\n neighbor_uid = node[\"connects_to\"][\"uid\"]\n if neighbor_uid in G.nodes:\n G.add_edge(node_uid, neighbor_uid)\n return G", "def multi_edge():\n from networkx.readwrite import json_graph\n import networkx as nx\n import autonetkit\n # returns a house graph\n data = {'directed': False,\n 'graph': [],\n 'links': [{'_ports': {'r4': 2, 'r5': 1},\n 'raw_interfaces': {},\n 'source': 0,\n 'target': 1},\n {'_ports': {'r2': 3, 'r4': 1},\n 'raw_interfaces': {},\n 'source': 0,\n 'target': 3},\n {'_ports': {'r2': 4, 'r4': 3},\n 'raw_interfaces': {},\n 'source': 0,\n 'target': 3},\n {'_ports': {'r3': 3, 'r5': 2},\n 'raw_interfaces': {},\n 'source': 1,\n 'target': 4},\n {'_ports': {'r1': 1, 'r2': 1},\n 'raw_interfaces': {},\n 'source': 2,\n 'target': 3},\n {'_ports': {'r1': 3, 'r2': 5},\n 'raw_interfaces': {},\n 'source': 2,\n 'target': 3},\n {'_ports': {'r1': 2, 'r3': 1},\n 'raw_interfaces': {},\n 'source': 2,\n 'target': 4},\n {'_ports': {'r1': 4, 'r3': 4},\n 'raw_interfaces': {},\n 'source': 2,\n 'target': 4},\n {'_ports': {'r1': 5, 'r3': 5},\n 'raw_interfaces': {},\n 'source': 2,\n 'target': 4},\n {'_ports': {'r2': 2, 'r3': 2},\n 'raw_interfaces': {},\n 'source': 3,\n 'target': 4}],\n 'multigraph': True,\n 'nodes': [{'_ports': {0: {'category': 'physical', 'description': None},\n 1: {'category': 'physical', 'description': 'r4 to r2', 'id': 'eth0'},\n 2: {'category': 'physical', 'description': 'r4 to r5', 'id': 'eth1'},\n 3: {'category': 'physical', 'description': 'r4 to r2', 'id': 'eth2'}},\n 'asn': 2,\n 'device_type': 'router',\n 'id': 'r4',\n 'label': 'r4',\n 'x': 675,\n 'y': 300},\n {'_ports': {0: {'category': 'physical', 'description': None},\n 1: {'category': 'physical', 'description': 'r5 to r4', 'id': 'eth0'},\n 2: {'category': 'physical', 'description': 'r5 to r3', 'id': 'eth1'}},\n 'asn': 2,\n 'device_type': 'router',\n 'id': 'r5',\n 'label': 'r5',\n 'x': 675,\n 'y': 500},\n {'_ports': {0: {'category': 'physical', 'description': None},\n 1: {'category': 'physical', 'description': 'r1 to r2', 'id': 'eth0'},\n 2: {'category': 'physical', 'description': 'r1 to r3', 'id': 'eth1'},\n 3: {'category': 'physical', 'description': 'r1 to r2', 'id': 'eth2'},\n 4: {'category': 'physical', 'description': 'r1 to r3', 'id': 'eth3'},\n 5: {'category': 'physical', 'description': 'r1 to r3', 'id': 'eth4'}},\n 'asn': 1,\n 'device_type': 'router',\n 'id': 'r1',\n 'label': 'r1',\n 'x': 350,\n 'y': 400},\n {'_ports': {0: {'category': 'physical', 'description': None},\n 1: {'category': 'physical', 'description': 'r2 to r1', 'id': 'eth0'},\n 2: {'category': 'physical', 'description': 'r2 to r3', 'id': 'eth1'},\n 3: {'category': 'physical', 'description': 'r2 to r4', 'id': 'eth2'},\n 4: {'category': 'physical', 'description': 'r2 to r4', 'id': 'eth3'},\n 5: {'category': 'physical', 'description': 'r2 to r1', 'id': 'eth4'}},\n 'asn': 1,\n 'device_type': 'router',\n 'id': 'r2',\n 'label': 'r2',\n 'x': 500,\n 'y': 300},\n {'_ports': {0: {'category': 'physical', 'description': None},\n 1: {'category': 'physical', 'description': 'r3 to r1', 'id': 'eth0'},\n 2: {'category': 'physical', 'description': 'r3 to r2', 'id': 'eth1'},\n 3: {'category': 'physical', 'description': 'r3 to r5', 'id': 'eth2'},\n 4: {'category': 'physical', 'description': 'r3 to r1', 'id': 'eth3'},\n 5: {'category': 'physical', 'description': 'r3 to r1', 'id': 'eth4'}},\n 'asn': 1,\n 'device_type': 'router',\n 'id': 'r3',\n 'label': 'r3',\n 'x': 500,\n 'y': 500}]}\n graph = json_graph.node_link_graph(data)\n anm = autonetkit.anm.NetworkModel()\n g_in = anm.add_overlay(\"input\")\n g_in._replace_graph(nx.MultiGraph(graph))\n # TODO: check if should build overlays here rather than clone in?\n g_phy = anm[\"phy\"]\n g_phy._replace_graph(graph)\n return anm", "def graph_data(\n edge_list_path,\n node_features_path,\n protein_ids_path,\n protein_id_col_node=\"Gene\",\n protein_id_col_prot=\"ensembl.gene\",\n sparse_tensor=True,\n cut=0,\n):\n a = pd.read_csv(edge_list_path).values\n edge_attr = a[:, 2:] / 1000.0\n\n # cut the edges\n cut_mask = edge_attr[:, -1] > cut\n edge_ind = torch.tensor(a[:, :2][cut_mask], dtype=torch.long)\n edge_attr = torch.tensor(edge_attr[cut_mask], dtype=torch.float32)\n\n # force undirected\n if not is_undirected(edge_ind):\n edge_ind = torch.cat([edge_ind, edge_ind[:, [1, 0]]], 0)\n edge_attr = torch.cat([edge_attr, edge_attr], 0)\n\n # features\n protein_ids = pd.read_csv(protein_ids_path, sep=\"\\t\")[\n [\"id\", protein_id_col_prot]\n ]\n x = pd.read_csv(node_features_path, sep=\"\\t\")\n feature_columns = x.drop(protein_id_col_node, 1).columns\n x = pd.merge(\n protein_ids,\n x,\n how=\"left\",\n left_on=protein_id_col_prot,\n right_on=protein_id_col_node,\n ).sort_values(\"id\")[feature_columns]\n x.fillna(x.mean(), inplace=True)\n x = torch.tensor(((x - x.mean()) / x.std()).values, dtype=torch.float32)\n data = Data(x, edge_ind.T, edge_attr, id=torch.arange(x.shape[0]))\n\n if sparse_tensor:\n tsp = ToSparseTensor(False)\n data = tsp(data)\n\n return data", "def d3_forced_layout(self, data=[]):\n if not self.d3:\n raise NotImplementedError\n\n edges = []\n nodes = []\n edge_ids = []\n\n for edge in self.graph.edges(data=True):\n extra = {datum: edge[2][datum] for datum in data}\n edges.append({\n 'source': edge[0],\n 'target': edge[1],\n 'data': extra\n })\n edge_ids.extend([edge[0], edge[1]])\n edge_ids = sorted(list(set(edge_ids)))\n for edge_id in edge_ids:\n nodes.append(self.graph.node[edge_id])\n return edges, nodes", "def build_graph(edges):\n \n G = nx.MultiGraph()\n G.add_edges_from(edges)\n return G", "def direct_network(self):\n #print list(self.get_subgraphs())\n graphs = [self._depth_first_directed(g) for g in self.get_subgraphs()]\n self._network = reduce(lambda a, b: nx.union(a, b), graphs)", "def network_to_dgraph_json(network):\n\n uid_prefix = \"_:\"\n node_uid_prefix = uid_prefix + \"node\"\n\n json_mutations = []\n for node, attributes in network.nodes.items():\n json_mutations.append({\n \"uid\": node_uid_prefix + str(attributes.get(\"osmid\", attributes.get(\"osmids\"))),\n \"osmids\": str(attributes.get(\"osmid\", attributes.get(\"osmids\"))),\n \"location\": {\n \"type\": \"Point\",\n \"coordinates\": [\n attributes[\"x\"],\n attributes[\"y\"]\n ]\n }\n })\n for node, neighbors in network.adj.items():\n for neighbor, edges in neighbors.items():\n json_mutations.append({\n \"uid\": node_uid_prefix + str(network.nodes[node].get(\"osmid\", network.nodes[node].get(\"osmids\"))),\n \"connects_to\": {\n \"uid\": node_uid_prefix + str(network.nodes[neighbor].get(\"osmid\", network.nodes[neighbor].get(\"osmids\"))),\n \"connects_to|osmids\": \":\".join(str(edge_attributes.get(\"osmid\", edge_attributes.get(\"osmids\"))) for edge, edge_attributes in edges.items() if \"osmid\" in edge_attributes or \"osmids\" in edge_attributes)\n }\n })\n return json_mutations", "def get_transpose_graph(graph: Graph):\n transpose: Graph = {node: set() for node in graph.keys()}\n for node, target_nodes in graph.items():\n for target_node in target_nodes:\n transpose[target_node].add(node)\n return transpose", "def edge_list_build(input_path, output_path):\n\n start_time = time.time()\n\n df = pd.read_csv(input_path, sep='\\t', header=None)\n\n for col in range(1, len(df.columns)):\n df.iloc[:, col] = df.iloc[:, col-1] + '_' + df.iloc[:, col]\n\n n_divs = len(df.columns) - 1\n\n\n dict_node_names = {}\n\n for id, node_name in enumerate(np.unique(df.values.flatten())):\n dict_node_names[node_name] = id + 1\n\n tmp_df = pd.DataFrame.from_dict(dict_node_names, orient='index')\n tmp_df.reset_index(inplace=True)\n tmp_df.rename({'index': 'nodes', 0: 'hash'}, inplace=True, axis=1)\n\n hash_df = tmp_df['nodes'].str.split('_', n=n_divs, expand=True)\n hash_df = pd.concat([hash_df, tmp_df['hash']], axis=1)\n\n for col_name in df.columns:\n df[col_name] = df[col_name].map(dict_node_names)\n\n df['root'] = 0\n colnames = df.columns.values\n colnames = list(colnames[-1:]) + list(colnames[:-1])\n df = df[colnames]\n\n df_tuples = pd.DataFrame()\n\n for i in range(len(df.columns) - 1):\n df_tuples[i] = list(df[df.columns[i:i + 2]].itertuples(index=False, name=None))\n del df\n gc.collect()\n\n nodes_list = []\n\n for col_id in range(0, df_tuples.shape[1]):\n father_child = df_tuples.iloc[:, col_id].drop_duplicates().values\n nodes_list.extend(father_child)\n\n graph = nx.DiGraph(nodes_list)\n graph_bfs = nx.bfs_tree(graph, 0)\n \n path = output_path + '.hashmap'\n hash_df.to_csv(path, index=False, sep='\\t')\n end_time = time.time()\n print(\"Time spent creating tree from csv file:\", end_time - start_time)\n return graph_bfs", "def ConstrDict(raw_data):\n if (path.exists(\"processed_out.txt\") and\n path.exists(\"processed_in.txt\")):\n with open(\"processed_out.txt\") as out:\n global out_edges\n out_edges = pickle.load(out)\n with open(\"processed_in.txt\") as fin:\n global in_edges\n in_edges = pickle.load(fin)\n print len(in_edges.keys())\n with open(\"nodes.txt\") as n:\n global nodes\n nodes = pickle.load(n)\n print \"nodes: \", len(nodes)\n else:\n # read each line and construct a dictionary to store\n # sources and destinations\n for line in raw_data: \n splitted_line = line.split()\n # source is the first element in a line, the rest of elements\n # are destinations\n threshold = 10000\n src, dests = splitted_line[0], splitted_line[1:threshold]\n # if src is not in the dictionary, create a key-value pair for\n # this src\n out_edges.setdefault(src, set())\n\n # put all destinations into the list of the corresponding src\n out_edges[src].update(set(dests))\n\n # construct a set to store all nodes appearing\n nodes.add(src)\n nodes.update(set(dests))\n\n # create the list of inedges for each node\n for i in out_edges[src]:\n in_edges.setdefault(i, set())\n in_edges[i].add(src)\n\n nodes = list(nodes)\n # shuffle the order of nodes\n shuffle(nodes)\n\n with open(\"processed_out.txt\", \"wb\") as out:\n pickle.dump(out_edges, out)\n with open(\"processed_in.txt\", \"wb\") as fin:\n pickle.dump(in_edges, fin)\n with open(\"nodes.txt\", \"wb\") as n:\n pickle.dump(nodes, n)\n\n\n # construct edge list\n for src, dests in out_edges.iteritems():\n pairs = [(src, dest) for dest in dests if (src, dest) not in\n exists]\n edges.extend(pairs)", "def multigraph_to_weighted_graph(M):\n G = nx.Graph()\n for u,v,data in M.edges_iter(data=True):\n w = data['weight'] if 'weight' in data else 1.0\n if G.has_edge(u,v):\n G[u][v]['weight'] += w\n else:\n G.add_edge(u, v, weight=w)\n return G", "def create_graph_data(edge_list=None, node_labels=None):\n # Add inverted edges to make graph undirected.\n edge_list += [(target, source) for source, target in edge_list]\n\n # Extract arrays of source and target nodes.\n sources = jnp.array([source for source, target in edge_list])\n targets = jnp.array([target for source, target in edge_list])\n\n node_feats = jnp.eye(len(node_labels)) # Unique one-hot features.\n\n return node_feats, node_labels, sources, targets", "def collate_molgraphs(data):\n assert len(data[0]) in [3, 4], \\\n 'Expect the tuple to be of length 3 or 4, got {:d}'.format(len(data[0]))\n if len(data[0]) == 3:\n smiles, graphs, labels = map(list, zip(*data))\n masks = None\n else:\n smiles, graphs, labels, masks = map(list, zip(*data))\n \n bg = dgl.batch(graphs)\n bg.set_n_initializer(dgl.init.zero_initializer)\n bg.set_e_initializer(dgl.init.zero_initializer)\n labels = torch.stack(labels, dim=0)\n \n if masks is None:\n masks = torch.ones(labels.shape)\n else:\n masks = torch.stack(masks, dim=0)\n return smiles, bg, labels, masks", "def graph_to_matrix(graph, node_map, edge_map, max_prev_node=None, random_bfs=False):\n n = len(graph.nodes())\n len_node_vec, _, num_nodes_to_consider = get_attributes_len_for_graph_rnn(\n len(node_map), len(edge_map), max_prev_node)\n\n if random_bfs:\n bfs_seq = get_random_bfs_seq(graph)\n bfs_order_map = {bfs_seq[i] : i for i in range(n)}\n graph = nx.relabel_nodes(graph, bfs_order_map)\n\n # 3D adjacecny matrix in case of edge_features (each A[i, j] is a len_edge_vec size vector)\n adj_mat_2d = torch.ones((n, num_nodes_to_consider))\n adj_mat_2d.tril_(diagonal=-1)\n adj_mat_3d = torch.zeros((n, num_nodes_to_consider, len(edge_map)))\n\n node_mat = torch.zeros((n, len_node_vec))\n\n for v, data in graph.nodes.data():\n ind = node_map[data['label']]\n node_mat[v, ind] = 1\n\n for u, v, data in graph.edges.data():\n if abs(u - v) <= max_prev_node:\n adj_mat_3d[max(u, v), max(u, v) - min(u, v) - 1, edge_map[data['label']]] = 1\n adj_mat_2d[max(u, v), max(u, v) - min(u, v) - 1] = 0\n \n adj_mat = torch.cat(\n (adj_mat_3d, adj_mat_2d.reshape(adj_mat_2d.size(0), adj_mat_2d.size(1), 1), \n torch.zeros((n, num_nodes_to_consider, 2))), dim=2)\n \n adj_mat = adj_mat.reshape((adj_mat.size(0), -1))\n\n return torch.cat((node_mat, adj_mat), dim=1)", "def build_graph(self):\n for node in self.graph.nodes():\n self.c2py[node] = PyNode(node)\n for _input in node.inputs():\n if _input not in self.c2py:\n self.c2py[_input] = PyNode(_input, True)\n if _input in self.forward_edge:\n self.forward_edge[_input].append(node)\n else:\n self.forward_edge[_input] = [node]\n for output in node.outputs():\n if output not in self.c2py:\n self.c2py[output] = PyNode(output, True)\n if node in self.forward_edge:\n self.forward_edge[node].append(output)\n else:\n self.forward_edge[node] = [output]", "def network(self):\n G = nx.MultiDiGraph()\n reaction_hash = []\n product_count = 0\n mapping = {}\n reaction_count = 0\n\n for r in self.reactions:\n reaction_count += 1\n\n reaction_dict = r.__dict__\n G.add_edge(reaction_dict.get('left'), hash(r))\n G.add_edge(reaction_dict.get('right'), hash(r))\n G.add_edge(hash(r), reaction_dict.get('left2'))\n G.add_edge(hash(r), reaction_dict.get('right2'))\n\n product_count += 1\n mapping[reaction_dict.get('left')] = \"x{}\".format(product_count)\n product_count += 1\n mapping[reaction_dict.get('right')] = \"x{}\".format(product_count)\n product_count += 1\n mapping[reaction_dict.get('left2')] = \"x{}\".format(product_count)\n product_count += 1\n mapping[reaction_dict.get('right2')] = \"x{}\".format(product_count)\n\n mapping[hash(r)] = \"r{}\".format(reaction_dict.get(\"reaction_n\"))\n reaction_hash.append(hash(r))\n\n return G, mapping", "def transpose_graph(adj):\n trans_adj = [[] for _ in range(len(adj))]\n\n for i in range(len(adj)):\n for j in adj[i]:\n trans_adj[j].append(i)\n\n return trans_adj", "def read_graph(graph_path):\n print(\"\\nTarget matrix creation started.\\n\")\n graph = nx.from_edgelist(pd.read_csv(graph_path).values.tolist())\n graph.remove_edges_from(graph.selfloop_edges())\n return graph", "def conn_reshape_directed(da, net=False, sep='-', order=None, rm_missing=False,\n fill_value=np.nan, to_dataframe=False,\n inplace=False):\n assert isinstance(da, xr.DataArray)\n if not inplace:\n da = da.copy()\n assert ('roi' in list(da.dims)) and ('direction' in list(da.dims))\n if 'times' not in list(da.dims):\n da = da.expand_dims(\"times\")\n\n # get sources, targets names and sorted full list\n sources, targets, roi_tot = _untangle_roi(da, sep)\n\n # transpose, reindex and reorder (if needed)\n da_xy, da_yx = da.sel(direction='x->y'), da.sel(direction='y->x')\n if net:\n da = xr.concat((da_xy - da_yx, da_xy - da_yx), 'roi')\n else:\n da = xr.concat((da_xy, da_yx), 'roi')\n da, order = _dataarray_unstack(da, sources, targets, roi_tot, fill_value,\n order, rm_missing)\n\n # dataframe conversion\n if to_dataframe:\n da = _dataframe_conversion(da, order)\n\n return da", "def prepare_collapse(graph, node, color=None):\n is_ancestor_target = graph.get_node_attribute(node, graph.ANCESTOR_TARGET)\n if is_ancestor_target:\n redirect_ancestor_edges(graph, node, color)\n else:\n add_ancestor_edges(graph, node, color)\n\n if color:\n out_neighbor, = graph.get_deductive_out_neighbors(node)\n graph.set_edge_attribute(node, out_neighbor, graph.COLOR, color)", "def edge_list_df_to_igraph(edge_list_df, node_id_mapper):\n nodes = list(set(edge_list_df.from_id.values.tolist() + edge_list_df.to_id.values.tolist()))\n #node_names = list(set(edge_list_df.from_name.values.tolist() + edge_list_df.to_name.values.tolist()))\n edges = list(zip(edge_list_df.from_id, edge_list_df.to_id))\n weights = list(edge_list_df.weight.values)\n g = Graph()\n g.add_vertices(len(nodes))\n g.add_edges(edges)\n g.es['weight'] = weights\n g.vs['label'] = list(node_id_mapper.inverse_transform(np.array(range(len(g.vs)))))\n g.vs['community'] = 0 # Set original community the same for all nodes\n return g, edges", "def example_graph():\n g = nx.Graph()\n g.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'C'), ('B', 'D'), ('D', 'E'), ('D', 'F'), ('D', 'G'), ('E', 'F'), ('G', 'F')])\n return g", "def _edges_to_nml_edges(edges):\n\n nml_edges = []\n for idx in range(edges.shape[0]):\n nml_edge = wknml.Edge(\n source=int(edges[idx, 0]),\n target=int(edges[idx, 1]),\n )\n nml_edges.append(nml_edge)\n\n return nml_edges", "def unify_graph(graph):\n simpleGraph = nx.empty_graph(graph.number_of_nodes())\n for node1, node2, property in graph.edges(data=True):\n edist = property['edist']\n fdist = property['fdist']\n weight = property['weight']\n capa = property['capa']\n lgth = property['lgth']\n conn = property['conn']\n jump = property['jump']\n multi = 1\n if simpleGraph.has_edge(node1, node2):\n simpleGraph[node1][node2]['multi'] += 1.0\n simpleGraph[node1][node2]['capa'] += capa\n if(simpleGraph[node1][node2]['lgth'] > lgth):\n simpleGraph[node1][node2]['lgth'] = lgth\n else:\n simpleGraph.add_edge(node1, node2, edist=edist, fdist=fdist, weight=weight, capa=capa, lgth=lgth, conn=conn, jump=jump, multi=multi)\n return(simpleGraph)", "def convert_edges_perm(edges):\n L = dict(edges)\n output = [START_NODE]\n while output[-1] != END_NODE:\n output.append(L[output[-1]])\n if len(edges) + 1 != len(output):\n raise Exception()\n return output", "def buildGraph(M: List[List[int]]) -> List:\n l = len(M)\n G = [Node(i) for i in range(l)]\n for i in range(len(M)):\n for j in range(len(M)):\n if M[i][j]:\n G[i].add_adjacent(G[j])\n return G", "def anchors_to_adjacency(set_path, n_proteomes, mailbox_reader):\n frame_list = []\n for idx in range(n_proteomes):\n with mailbox_reader(idx) as file_handle:\n frame_list.append(\n pd.read_csv(\n file_handle, sep=\"\\t\", index_col=0\n ).convert_dtypes()\n )\n nodes = pd.concat(\n frame_list,\n ignore_index=True,\n )\n del frame_list\n graph = nx.Graph()\n for unused_tuple, subframe in nodes.groupby(\n by=[\"syn.anchor.id\", \"syn.anchor.sub_id\"]\n ):\n ids = subframe[\"member_ids\"]\n n_ids = len(ids)\n graph.add_nodes_from(ids)\n if n_ids > 1:\n edges = combinations(ids, 2)\n graph.add_edges_from(edges, weight=n_ids)\n outpath = set_path / ANCHORS_FILE\n summarypath = outpath.parent / (\n outpath.name[: -len(outpath.suffix)] + \"_summary.tsv\"\n )\n histpath = outpath.parent / (\n outpath.name[: -len(outpath.suffix)] + \"_hist.tsv\"\n )\n components = [\n c\n for c in sorted(nx.connected_components(graph), key=len, reverse=True)\n if len(c) > 1\n ]\n fh = outpath.open(\"w\")\n fh.write(\"idx\\tcluster_id\\tsize\\tmembers\\n\")\n n_items = 0\n count_list = []\n hash_list = []\n id_list = []\n for i, comp in enumerate(components):\n component = np.sort(pd.Index(list(comp)).to_numpy())\n id_list.append(i)\n size = len(comp)\n count_list.append(size)\n hash_list.append(hash_array(component))\n for node in component:\n fh.write(f\"{n_items}\\t{i}\\t{size}\\t{node}\\n\")\n n_items += 1\n fh.close()\n n_clusts = len(count_list)\n del graph, components\n cluster_counts = pd.DataFrame({\"size\": count_list})\n largest_cluster = cluster_counts[\"size\"].max()\n cluster_hist = (\n pd.DataFrame(cluster_counts.value_counts()).sort_index().reset_index()\n )\n cluster_hist = cluster_hist.set_index(\"size\")\n cluster_hist = cluster_hist.rename(columns={0: \"n\"})\n cluster_hist[\"item_pct\"] = (\n cluster_hist[\"n\"] * cluster_hist.index * 100.0 / n_items\n )\n cluster_hist.to_csv(histpath, sep=\"\\t\", float_format=\"%5.2f\")\n cluster_hist[\"cluster_pct\"] = cluster_hist[\"n\"] * 100.0 / n_clusts\n cluster_hist.to_csv(histpath, sep=\"\\t\", float_format=\"%5.2f\")\n clusters = pd.DataFrame(\n {\"anchor.id\": id_list, \"count\": count_list, \"hash\": hash_list}\n )\n clusters.to_csv(summarypath, sep=\"\\t\")\n stats_dict = {\n \"in_anchor\": n_items,\n \"syn.anchors.n\": n_clusts,\n \"syn.anchors.largest\": largest_cluster,\n }\n return stats_dict", "def to_NetworkX(nodes, edges, attributes=None):\n \n import networkx as nx\n # convert to dataframe if numpy array\n if isinstance(nodes, np.ndarray):\n nodes = coords_to_df(nodes)\n if isinstance(edges, np.ndarray):\n edges = pairs_to_df(edges)\n \n G = nx.from_pandas_edgelist(edges)\n if attributes is not None:\n for col in attributes.columns:\n # only for glm extension file:\n # nx.set_node_attributes(G, attributes[col].to_dict(), col.replace('+','AND')) \n nx.set_node_attributes(G, attributes[col].to_dict(), col)\n return G", "def to_graph(l):\n G = nx.Graph()\n for part in l:\n # each sublist is a bunch of nodes\n G.add_nodes_from(part)\n # it also imlies a number of edges:\n G.add_edges_from(to_edges(part))\n return G", "def to_undirected_graph(self):\n visited = set() \n G = Graph.Graph()\n \n for node in self.node_set:\n \n if node not in visited:\n visited.add(node)\n for i in self.suffix[node]:\n G.add_edge(node, i)\n \n return G" ]
[ "0.6554219", "0.641795", "0.6140745", "0.5753671", "0.5641915", "0.559664", "0.5504024", "0.54336166", "0.5385011", "0.53718996", "0.5364176", "0.53571475", "0.53153133", "0.531101", "0.5291452", "0.52877784", "0.5240004", "0.52247584", "0.5210389", "0.51955575", "0.5184813", "0.5166038", "0.51610285", "0.51519287", "0.514763", "0.5130967", "0.5130523", "0.51273984", "0.51106304", "0.51062894" ]
0.6853212
0
Checks whether `node_a` is an instance of the same type as `node_b`, or if either `node_a`/`node_b` is a type and the other is an instance of that type. This is used in subgraph matching to allow the subgraph pattern to be either a graph of instantiated nodes, or node types.
def type_or_class_match(node_a, node_b): if isinstance(node_b['node'], type): return issubclass(type(node_a['node']), node_b['node']) elif isinstance(node_a['node'], type): return issubclass(type(node_b['node']), node_a['node']) elif isinstance(node_b['node'], xf.PatternNode): return isinstance(node_a['node'], node_b['node'].node) elif isinstance(node_a['node'], xf.PatternNode): return isinstance(node_b['node'], node_a['node'].node) return isinstance(node_a['node'], type(node_b['node']))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def same_type(one, two):\n\n return isinstance(one, type(two))", "def type_match(graph_node, pattern_node):\n if isinstance(pattern_node['node'], xf.PatternNode):\n return isinstance(graph_node['node'], pattern_node['node'].node)\n return isinstance(graph_node['node'], type(pattern_node['node']))", "def sametype(variable1, variable2):\n\n # Return the result\n return isinstance(variable1, type(variable2))", "def is_same_type_as_other(cls, other):\r\n return isinstance(other, cls)", "def is_instance_of_type(object_a, type_a):\n\n return is_type_subclass_of_type(type(object_a), type_a)", "def of_type(self, a):\n return type(a) == type(self.one)", "def _is_equal_same_type(self, other):\n return True", "def is_same_class(obj, a_class):\n return isinstance(obj, a_class)", "def is_same_class(obj, a_class):\n return type(obj) == a_class", "def is_same_class(obj, a_class):\n return type(obj) == a_class", "def _node_equal(self, other):\n # We're not equal if other isn't a Node, or if other is a different class.\n if not isinstance(other, Node) or not isinstance(other, self.__class__):\n return False\n # Loop through all children, checking whether they are equal\n for self_child, other_child in zip(self.getChildren(), other.getChildren()):\n if not self_child == other_child:\n return False\n # If we get here, our two nodes much be equal\n return True", "def is_same_class(obj, a_class):\n if type(obj) == a_class:\n return True\n else:\n return False", "def is_same_class(obj, a_class):\n return type(obj) is a_class", "def is_same_class(obj, a_class):\n return type(obj) is a_class", "def is_same_class(obj, a_class):\n return (type(obj) == a_class)", "def is_same_class(obj, a_class):\n if type(obj) is a_class:\n return True\n else:\n return False", "def is_same_class(obj, a_class):\n return(type(obj) == a_class)", "def is_same_class(obj, a_class):\n\n if type(obj) is a_class:\n return True\n return False", "def is_same_class(obj, a_class):\n return (type(obj) is a_class)", "def PyType_IsSubtype(space, a, b):\n w_type1 = from_ref(space, rffi.cast(PyObject, a))\n w_type2 = from_ref(space, rffi.cast(PyObject, b))\n return int(abstract_issubclass_w(space, w_type1, w_type2)) #XXX correct?", "def is_same_class(obj, a_class):\n return(type(obj) is a_class)", "def check_types(begin, end):\n try:\n begin.get_midpoint()\n end.get_midpoint()\n except AttributeError:\n return False\n\n return isinstance(begin.get_midpoint(), type(end.get_midpoint()))", "def __eq__(self, other):\n return type(self) == type(other) and self.node is other.node", "def __eq__(self, other):\n return type(other) is type(self) and other._node is self._node", "def check_type_compat(input_a, input_b):\n return return_family_type(input_a) is return_family_type(input_b)", "def is_subtype_of(self, other):\n if type(self) is not type(other):\n return False\n\n if (not self._transform_is_composite and\n self.transform_or_spec != other.transform_or_spec):\n return False\n\n # pylint: disable=protected-access\n try:\n tf.nest.assert_same_structure((self._specs, self._unique_id_params),\n (other._specs, other._unique_id_params))\n except (TypeError, ValueError):\n return False\n\n self_elements = tf.nest.flatten((self._specs, self._unique_id_params))\n other_elements = tf.nest.flatten((other._specs, other._unique_id_params))\n\n def is_subtype_or_equal(a, b):\n try:\n return a.is_subtype_of(b)\n except AttributeError:\n return a == b\n\n return all(\n is_subtype_or_equal(self_element, other_element)\n for (self_element, other_element) in zip(self_elements, other_elements))", "def is_subclass(self, left: TypeInfo, right: TypeInfo) -> bool:\n return nx.has_path(self._graph, right, left)", "def _is_node_identical(self, job_name_a, job_name_b):\n\n node_a = self._graph_a.get_node(job_name_a)\n node_b = self._graph_b.get_node(job_name_b)\n\n # Check for same job type name and version\n if node_a.job_type_name != node_b.job_type_name or node_a.job_type_version != node_b.job_type_version:\n return False\n\n # Check that A and B have matching parents that are identical to one another\n a_parent_names = set(a_parent.node_name for a_parent in node_a.parents)\n for b_parent in node_b.parents:\n b_parent_name = b_parent.node_name\n if b_parent_name not in self._identical_nodes:\n return False # B has a parent that is not identical to any other node\n matched_a_parent_name = self._identical_nodes[b_parent_name]\n if matched_a_parent_name not in a_parent_names:\n return False # B has a parent that does not match a parent of A\n a_parent_names.remove(matched_a_parent_name)\n if a_parent_names:\n return False # A has a parent that does not match a parent of B\n\n # Check that A and B use the same inputs\n a_inputs = dict(node_a.inputs)\n for b_input_name in node_b.inputs:\n if b_input_name not in a_inputs:\n return False # B input not defined for A\n b_input = node_b.inputs[b_input_name]\n a_input = a_inputs[b_input_name]\n if not a_input.is_equal_to(b_input, self._matched_recipe_inputs, self._identical_nodes):\n return False # A and B have a non-matching input\n del a_inputs[b_input_name]\n if a_inputs:\n return False # A input not defined for B\n\n return True", "def __eq__(self, other):\n return type(other) is type(self) and other._node is self._node", "def __eq__(self, other):\n return type(other) is type(self) and other._node is self._node" ]
[ "0.7246985", "0.6875418", "0.6746393", "0.66509914", "0.6645413", "0.64153445", "0.63544613", "0.6354311", "0.6345268", "0.6345268", "0.6300346", "0.6269791", "0.6268452", "0.6268452", "0.626733", "0.62422216", "0.62378544", "0.623251", "0.6231513", "0.62188184", "0.62145805", "0.6078149", "0.6073576", "0.60273296", "0.60047877", "0.5951628", "0.58889574", "0.58799404", "0.5870834", "0.5870834" ]
0.8616503
0
Helper function that tries to instantiate a pattern match into a transformation object.
def _try_to_match_transformation(graph: Union[SDFG, SDFGState], collapsed_graph: nx.DiGraph, subgraph: Dict[int, int], sdfg: SDFG, xform: Union[xf.PatternTransformation, Type[xf.PatternTransformation]], expr_idx: int, nxpattern: nx.DiGraph, state_id: int, permissive: bool, options: Dict[str, Any]) -> Optional[xf.PatternTransformation]: subgraph = { nxpattern.nodes[j]['node']: graph.node_id(collapsed_graph.nodes[i]['node']) for i, j in subgraph.items() } try: if isinstance(xform, xf.PatternTransformation): match = xform else: # Construct directly from type with options opts = options or {} try: match = xform(**opts) except TypeError: # Backwards compatibility, transformation does not support ctor arguments match = xform() # Set manually for oname, oval in opts.items(): setattr(match, oname, oval) match.setup_match(sdfg, sdfg.sdfg_id, state_id, subgraph, expr_idx, options=options) match_found = match.can_be_applied(graph, expr_idx, sdfg, permissive=permissive) except Exception as e: if Config.get_bool('optimizer', 'match_exception'): raise if not isinstance(xform, type): xft = type(xform) else: xft = xform print('WARNING: {p}::can_be_applied triggered a {c} exception:' ' {e}'.format(p=xft.__name__, c=e.__class__.__name__, e=e)) return None if match_found: return match return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def pattern_from_transform(\n step_structure: tree.Structure[Any],\n transform: Callable[[ReferenceStep], Pattern]) -> Pattern:\n return transform(create_reference_step(step_structure))", "def from_re_match(cls, match):\n kwargs = match.groupdict()\n location = kwargs['location'].split()\n kwargs['location'] = (int(location[0]), int(location[1]),\n int(location[2]))\n return cls(**kwargs)", "def from_re_match(cls, match):\n kwargs = match.groupdict()\n player_location = kwargs['player_location'].split()\n kwargs['player_location'] = (int(player_location[0]),\n int(player_location[1]),\n int(player_location[2]))\n target_location = kwargs['target_location'].split()\n kwargs['target_location'] = (int(target_location[0]),\n int(target_location[1]),\n int(target_location[2]))\n return cls(**kwargs)", "def __new__(cls, format):\n self = super(SF_Pattern, cls).__new__(cls)\n\n if isinstance(format, bytes):\n uni_str = format.decode('ISO-8859-1') # decode to unicode\n trans_str = translate(uni_str) # translate only works with unicode\n re_fmt = trans_str.encode('ISO-8859-1') # encode back to bytes\n self._spec = _gbspec\n else:\n re_fmt = translate(format)\n self._spec = _gspec\n\n self._format = format\n self._re = cre = re.compile(re_fmt)\n\n if cre.groupindex and len(cre.groupindex) != cre.groups:\n raise RuntimeError('cannot mix mapped and unmapped specifiers')\n elif not cre.groupindex:\n self._retfunc = self._return_tuple\n self._type = tuple\n else:\n self._retfunc = self._return_dict\n self._type = dict\n\n self._casts = self._get_types()\n\n return self", "def compile(format):\n try:\n return _cache[format]\n except KeyError:\n _cache[format] = retval = SF_Pattern.__new__(SF_Pattern, format)\n return retval", "def from_re_match(cls, match):\n kwargs = match.groupdict()\n player_location = kwargs['player_location'].split()\n kwargs['player_location'] = (int(player_location[0]),\n int(player_location[1]),\n int(player_location[2]))\n target_location = kwargs['target_location'].split()\n kwargs['target_location'] = (int(target_location[0]),\n int(target_location[1]),\n int(target_location[2]))\n if match.string.endswith('(headshot)'):\n kwargs['headshot'] = True\n return cls(**kwargs)", "def __init__(self, pattern):\n self._pattern = re.compile(pattern)", "def replacement(cls, search_pattern: str, replacement: str) -> PhonTransform:\n sub_func = lambda match: replacement\n return cls(search_pattern, sub_func)", "def __init__(self, pattern):\r\n self.pattern = pattern", "def __new__(cls, name, build_pattern: str = None, parse_pattern: re.Pattern = None):\n obj = super().__new__(cls, name)\n\n if parse_pattern is not None:\n obj.parse_pattern = parse_pattern\n\n if build_pattern is not None:\n obj.build_pattern = build_pattern\n\n return obj", "def _Transform(obj, jac=None, offset=(0.,0.), flux_ratio=1.):\n ret = Transformation.__new__(Transformation)\n ret._gsparams = obj.gsparams\n ret._propagate_gsparams = True\n ret._jac = jac\n ret._dx, ret._dy = offset\n if isinstance(obj, Transformation):\n if obj._has_offset:\n if jac is None:\n dx1, dy1 = obj._dx, obj._dy\n else:\n dx1, dy1 = ret._fwd_normal(obj._dx, obj._dy)\n ret._dx += dx1\n ret._dy += dy1\n if jac is None:\n ret._jac = obj._jac\n else:\n ret._jac = ret._jac if obj._jac is None else ret._jac.dot(obj.jac)\n ret._flux_ratio = flux_ratio * obj._flux_ratio\n ret._original = obj._original\n else:\n ret._flux_ratio = flux_ratio\n ret._original = obj\n ret._has_offset = (ret._dx != 0. or ret._dy != 0.)\n return ret", "def __init__(self, regex, view):\n self.regex = re.compile(regex)\n self.view = view", "def pattern_factory(self):\n\t\treturn self.args[1]", "def to_pattern(obj):\n if isinstance(obj, Pattern):\n return obj\n return Glob(str(obj))", "def _compile_fnmatch(pattern: str) -> re.Pattern:\n return re.compile(translate(pattern))", "def factory(**pattern):\n\n class_name = pattern.get('class')\n del (pattern['class'])\n\n # pprint(inspect.stack()[1][0].f_globals)\n _cls = inspect.stack()[1][0].f_globals[class_name]\n\n # _cls = globals()[class_name]\n return _cls(**pattern)", "def from_regex(pattern:str) -> str:\n raise NotImplementedError()", "def __init__(self, format_string):\r\n if not isinstance(format_string, Compatibility.string):\r\n raise TypeError('format_string should be a string, instead got %s' % type(format_string))\r\n self._re_pattern, self._applicators = self._preprocess_format_string(format_string)\r\n self._re = re.compile(self._re_pattern)", "def convert_pattern(pattern, pattern_type=None):\n\tif pattern_type == 'regex':\n\t\treturn re.compile(pattern)\n\telif pattern_type == 'wildcard':\n\t\treturn re.compile(fnmatch.translate(pattern))\n\treturn re.compile(re.escape(pattern))", "def MakePattern(self,content):\n return self.register(Pattern(content,reg=self))", "def instantiate(obj):\n return obj() if isinstance(obj, type) else obj", "def parse_from_placeholder(string,pattern,encloser='%',matcher='(.+)'):\n pattern,fields = placeholder_to_regex(pattern,encloser,matcher)\n return parse_from_regex(string,pattern,fields)", "def __init__(self, regexp, handler, quick=0):\n\n self.regexp = compile(regexp)\n self.handler = handler\n self.quick = quick", "def __init__(self, regex, groups, nestedPattern = None, ignored = dict()):\r\n self.regex = regex.format(*[x.group() for x in groups])\r\n self.groups = groups\r\n self.ignored = ignored\r\n self.nestedPattern = nestedPattern\r\n self.name = \"_\"\r\n while self.name in self.groups:\r\n self.name += \"_\"", "def __init__(self, pattern):\n self._pattern = pattern.lower()", "def func(match, create_new=False, *args, **kwargs):\n\n data = match.group(0)\n\n if create_new:\n dash = data.replace(\"_\", \"-\") if \"_\" in data else data\n\n if dash not in objects:\n new = gen_guid()\n objects[dash] = new\n objects[dash.replace(\"-\", \"_\")] = new.replace(\"-\", \"_\")\n\n if data in objects:\n return (objects[data], True)\n\n return (data, False)", "def __init__(self,pattern):\n\t\tself.__type__ = 'pol'\n\t\tif type(pattern)!=list and type(pattern)!=tuple :\n\t\t\traise InvalidArgumentException(\"No puedo construir un polinomio con este argumento\")", "def __init__(self, pattern1, pattern2):\n self.pattern1 = pattern1\n self.pattern2 = pattern2", "def __init__(self, matcher, generate):\n self.matcher = matcher\n self._generate = generate", "def transform():" ]
[ "0.62242067", "0.59924597", "0.5892394", "0.58774453", "0.5842749", "0.57457894", "0.57445747", "0.57260877", "0.5663162", "0.5640416", "0.55747616", "0.55739737", "0.5535943", "0.54877406", "0.54796714", "0.54767984", "0.5468489", "0.54371476", "0.5349981", "0.53497803", "0.53490037", "0.53295684", "0.5322245", "0.5318905", "0.5299198", "0.5296147", "0.5271535", "0.5270302", "0.52287585", "0.52014315" ]
0.61571854
1
Returns a generator of Transformations that match the input SDFG. Ordered by SDFG ID.
def match_patterns(sdfg: SDFG, patterns: Union[Type[xf.PatternTransformation], List[Type[xf.PatternTransformation]]], node_match: Callable[[Any, Any], bool] = type_match, edge_match: Optional[Callable[[Any, Any], bool]] = None, permissive: bool = False, metadata: Optional[PatternMetadataType] = None, states: Optional[List[SDFGState]] = None, options: Optional[List[Dict[str, Any]]] = None): if isinstance(patterns, type): patterns = [patterns] if isinstance(options, dict): options = [options] # Collect transformation metadata if metadata is not None: # Transformation metadata can be evaluated once per apply loop interstate_transformations, singlestate_transformations = metadata else: # Otherwise, precompute all transformation data once (interstate_transformations, singlestate_transformations) = get_transformation_metadata(patterns, options) # Collect SDFG and nested SDFGs sdfgs = sdfg.all_sdfgs_recursive() # Try to find transformations on each SDFG for tsdfg in sdfgs: ################################### # Match inter-state transformations if len(interstate_transformations) > 0: # Collapse multigraph into directed graph in order to use VF2 digraph = collapse_multigraph_to_nx(tsdfg) for xform, expr_idx, nxpattern, matcher, opts in interstate_transformations: for subgraph in matcher(digraph, nxpattern, node_match, edge_match): match = _try_to_match_transformation(tsdfg, digraph, subgraph, tsdfg, xform, expr_idx, nxpattern, -1, permissive, opts) if match is not None: yield match #################################### # Match single-state transformations if len(singlestate_transformations) == 0: continue for state_id, state in enumerate(tsdfg.nodes()): if states is not None and state not in states: continue # Collapse multigraph into directed graph in order to use VF2 digraph = collapse_multigraph_to_nx(state) for xform, expr_idx, nxpattern, matcher, opts in singlestate_transformations: for subgraph in matcher(digraph, nxpattern, node_match, edge_match): match = _try_to_match_transformation(state, digraph, subgraph, tsdfg, xform, expr_idx, nxpattern, state_id, permissive, opts) if match is not None: yield match
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def tss_generator(gtf):\n\tfor transcript in db.features_of_type('transcript'):\n\t\tyield TSS(asinterval(transcript), upstream=1000, downstream=1000)", "def get_bfs_seq(G, start_id):\n successors_dict = dict(nx.bfs_successors(G, start_id))\n start = [start_id]\n output = [start_id]\n while len(start) > 0:\n succ = []\n for current in start:\n if current in successors_dict:\n succ = succ + successors_dict[current]\n \n output = output + succ\n start = succ\n return output", "def _generator():\n filename_1 = 'gene.txt'\n filename_2 = 'geneSynonym.txt'\n gene_set_1 = gene_names(filename_1)\n gene_syn = gene_names(filename_2, complete=False)\n genes = gene_set_1 | gene_syn\n return genes", "def gen_seq(self, sort_by='', ascending=True, **kwargs):\n res_df = self.filter(sort_by, ascending, **kwargs)\n seq_id_list = res_df['seq_id'].tolist()\n id_list = list(res_df.index)\n \n for i, seq_id in zip(id_list, seq_id_list):\n yield i, seq_id, self._d[seq_id]", "def get_training_seqs(self):\r\n # Rdp requires unique sequence IDs without whitespace. Can't\r\n # trust user IDs to not have whitespace, so we replace all\r\n # whitespace with an underscore. Classification may fail if\r\n # the replacement method generates a name collision.\r\n for seq_id, node in self.sequence_nodes.iteritems():\r\n seq = self.sequences.get(seq_id)\r\n if seq is not None:\r\n lineage = node.get_lineage()\r\n rdp_id = '%s %s' % (\r\n re.sub('\\s',\r\n '_',\r\n seq_id),\r\n ';'.join(lineage))\r\n yield rdp_id, seq", "def __iter__(self):\n for id in self.order():\n inputs = [w for w in self.wires if w['target'][0] == id]\n yield id, inputs", "def iter_sequence(self):\n for res_name, fragment in self.sequence_fragment_list:\n yield res_name", "def _query_sequence_sources(self):\n if self.uniprot_id:\n self._query_uniprot()\n elif self.ncbi_id:\n self._query_ncbi()\n if \"mutations\" in self.metadata.keys():\n mutations = self.metadata[\"mutations\"].split()\n del self.metadata[\"mutations\"] # remove mutations, will be added subsequently\n for mutation in mutations:\n import re\n\n if mutation.startswith(\"ins\"): # insertion\n logger.debug(f\"Performing insertion {mutation} ...\")\n match = re.search(\"ins(?P<position>[0-9]+)(?P<insertion>[A-Z]+)\", mutation)\n self.insert(int(match.group(\"position\")), match.group(\"insertion\"))\n elif mutation.startswith(\"del\"): # deletion\n logger.debug(f\"Performing deletion {mutation} ...\")\n match = re.search(\n \"del(?P<first>[0-9]+)-(?P<last>[0-9]+)(?P<insertion>[A-Z]*)\",\n mutation,\n )\n self.delete(\n int(match.group(\"first\")),\n int(match.group(\"last\")),\n match.group(\"insertion\"),\n )\n else: # substitution\n logger.debug(f\"Performing substitution {mutation} ...\")\n self.substitute(mutation)\n if \"construct_range\" in self.metadata.keys():\n logger.debug(f\"Cropping sequence to construct {self.metadata['construct_range']} ...\")\n first, last = [int(x) for x in self.metadata[\"construct_range\"].split(\"-\")]\n self._sequence = self._sequence[first - 1 : last] # 1-indexed", "def semigroup_generators(self):\n from sage.sets.family import Family\n return Family([self(i) for i in self.associated_graph().vertices()])", "def get_all_relaxed_candidates_after_generation(self, gen):\n q = 'relaxed=1,extinct=0,generation<={0}'\n entries = self.c.select(q.format(gen))\n\n trajs = []\n for v in entries:\n t = self.get_atoms(id=v.id)\n t.info['confid'] = v.gaid\n t.info['relax_id'] = v.id\n trajs.append(t)\n trajs.sort(key=lambda x: get_raw_score(x),\n reverse=True)\n return trajs", "def enumerate_matches(sdfg: SDFG,\n pattern: gr.Graph,\n node_match=type_or_class_match,\n edge_match=None) -> Iterator[gr.SubgraphView]:\n if len(pattern.nodes()) == 0:\n raise ValueError('Subgraph pattern cannot be empty')\n\n # Find if the subgraph is within states or SDFGs\n is_interstate = (isinstance(pattern.node(0), SDFGState)\n or (isinstance(pattern.node(0), type) and pattern.node(0) is SDFGState))\n\n # Collapse multigraphs into directed graphs\n pattern_digraph = collapse_multigraph_to_nx(pattern)\n\n # Find matches in all SDFGs and nested SDFGs\n for graph in sdfg.all_sdfgs_recursive():\n if is_interstate:\n graph_matcher = iso.DiGraphMatcher(collapse_multigraph_to_nx(graph),\n pattern_digraph,\n node_match=node_match,\n edge_match=edge_match)\n for subgraph in graph_matcher.subgraph_isomorphisms_iter():\n yield gr.SubgraphView(graph, [graph.node(i) for i in subgraph.keys()])\n else:\n for state in graph.nodes():\n graph_matcher = iso.DiGraphMatcher(collapse_multigraph_to_nx(state),\n pattern_digraph,\n node_match=node_match,\n edge_match=edge_match)\n for subgraph in graph_matcher.subgraph_isomorphisms_iter():\n yield gr.SubgraphView(state, [state.node(i) for i in subgraph.keys()])", "def segms_by_class(cls):\n\tcls = norm(cls)\n\tfor n in xrange(idaapi.get_segm_qty()):\n\t\tseg = idaapi.getnseg(n)\n\t\tif seg and not seg.empty():\n\t\t\tsegcls = norm(idaapi.get_segm_class(seg))\n\t\t\tif segcls == cls:\n\t\t\t\tyield seg", "def generate_schreier_sims(self, af=False):\n\n n = self._degree\n u = self.basic_transversals\n basic_orbits = self._basic_orbits\n if len(u) == 0:\n for x in self.generators:\n if af:\n yield x._array_form\n else:\n yield x\n return\n if len(u) == 1:\n for i in basic_orbits[0]:\n if af:\n yield u[0][i]._array_form\n else:\n yield u[0][i]\n return\n\n u = list(reversed(u))\n basic_orbits = basic_orbits[::-1]\n # stg stack of group elements\n stg = [list(range(n))]\n posmax = [len(x) for x in u]\n n1 = len(posmax) - 1\n pos = [0]*n1\n h = 0\n while 1:\n # backtrack when finished iterating over coset\n if pos[h] >= posmax[h]:\n if h == 0:\n return\n pos[h] = 0\n h -= 1\n stg.pop()\n continue\n p = _af_rmul(u[h][basic_orbits[h][pos[h]]]._array_form, stg[-1])\n pos[h] += 1\n stg.append(p)\n h += 1\n if h == n1:\n if af:\n for i in basic_orbits[-1]:\n p = _af_rmul(u[-1][i]._array_form, stg[-1])\n yield p\n else:\n for i in basic_orbits[-1]:\n p = _af_rmul(u[-1][i]._array_form, stg[-1])\n p1 = _af_new(p)\n yield p1\n stg.pop()\n h -= 1", "def semigroup_generators(self):\n return self.ambient().semigroup_generators().map(self.retract)", "def get_successors(self, sas_task: SASTask) -> Generator:\n operator_names = [operator.name for operator in sas_task.operators]\n random.Random(SEED).shuffle(operator_names)\n for name in operator_names:\n pre_child = copy.deepcopy(sas_task)\n with timers.timing(\"Obtaining successor\"):\n child = self.transform(pre_child, name)\n yield child, name", "def get_seq(data_dir, dname):\n # Get list of video files\n data_dir = os.path.join(data_dir, 'softmotion30_44k', dname)\n filenames = gfile.Glob(os.path.join(data_dir, '*'))\n if not filenames:\n raise RuntimeError('No data files found.')\n # Enumerates videos (filename, index of file, list of images)\n for f in filenames:\n k = 0\n for serialized_example in tf.python_io.tf_record_iterator(f):\n example = tf.train.Example()\n example.ParseFromString(serialized_example)\n image_seq = []\n # Get all frames of the video\n for i in range(30):\n image_name = str(i) + '/image_aux1/encoded'\n byte_str = example.features.feature[image_name].bytes_list.value[0]\n img = Image.frombytes('RGB', (64, 64), byte_str)\n image_seq.append(img)\n k = k + 1\n yield f, k, image_seq", "def _generators(self):\n return self.free_group.generators", "def sequence(self):\n inigen = IniGen()\n fields = algorithm_fields.algorithms['sequence']\n\n output_uuid_map = {}\n\n # set up global parameters\n algorithm_path = fields['path']\n enabled = \"True\"\n inigen.emit_global(algorithm_path, enabled)\n\n label = \"SEQ\"\n for t in ['C','L']:\n run_label = label+'_'+t\n t1Mag_label = '{0}1MAG'.format(t)\n t2Mag_label = '{0}2MAG'.format(t)\n t3Mag_label = '{0}3MAG'.format(t)\n t1Ang_label = '{0}1ANG'.format(t)\n t2Ang_label = '{0}2ANG'.format(t)\n t3Ang_label = '{0}3ANG'.format(t)\n distillate_label = \"{0}-ALL\".format(t)\n\n # header\n inigen.emit_run_header(run_label, CHUNKING, MINTIME, MAXTIME)\n\n # body\n dep_1Mag_label = t1Mag_label\n dep_1Mag_name = fields['deps'][0]\n dep_1Mag_uuid = self.uuid_map[t1Mag_label]\n\n dep_2Mag_label = t2Mag_label\n dep_2Mag_name = fields['deps'][1]\n dep_2Mag_uuid = self.uuid_map[t2Mag_label]\n\n dep_3Mag_label = t3Mag_label\n dep_3Mag_name = fields['deps'][2]\n dep_3Mag_uuid = self.uuid_map[t3Mag_label]\n\n dep_1Ang_label = t1Ang_label\n dep_1Ang_name = fields['deps'][3]\n dep_1Ang_uuid = self.uuid_map[t1Ang_label]\n\n dep_2Ang_label = t2Ang_label\n dep_2Ang_name = fields['deps'][4]\n dep_2Ang_uuid = self.uuid_map[t2Ang_label]\n\n dep_3Ang_label = t3Ang_label\n dep_3Ang_name = fields['deps'][5]\n dep_3Ang_uuid = self.uuid_map[t3Ang_label]\n \n deps = [[dep_1Mag_label, dep_1Mag_name, dep_1Mag_uuid],\n [dep_2Mag_label, dep_2Mag_name, dep_2Mag_uuid],\n [dep_3Mag_label, dep_3Mag_name, dep_3Mag_uuid],\n [dep_1Ang_label, dep_1Ang_name, dep_1Ang_uuid],\n [dep_2Ang_label, dep_2Ang_name, dep_2Ang_uuid],\n [dep_3Ang_label, dep_3Ang_name, dep_3Ang_uuid]]\n\n param_section_name = fields['params'][0]\n param_section_value = \"Production/{0}/{1}/{2}\".format(self.location, self.name, distillate_label)\n param_name_name = fields['params'][1]\n param_name_value = \"SEQ\"\n params = [[param_section_name, param_section_value], [param_name_name, param_name_value]]\n\n outputs = fields['outputs']\n\n emitted = inigen.emit_run_body(deps, params, outputs)\n\n output_uuid_map[\"ZER_{0}ANG\".format(t)] = emitted[-9][-36:]\n output_uuid_map[\"ZER_{0}MAG\".format(t)] = emitted[-8][-36:]\n output_uuid_map[\"POS_{0}ANG\".format(t)] = emitted[-7][-36:]\n output_uuid_map[\"POS_{0}MAG\".format(t)] = emitted[-6][-36:]\n output_uuid_map[\"NEG_{0}ANG\".format(t)] = emitted[-5][-36:]\n output_uuid_map[\"NEG_{0}MAG\".format(t)] = emitted[-4][-36:]\n output_uuid_map[\"UNB_{0}NEG\".format(t)] = emitted[-3][-36:]\n output_uuid_map[\"UNB_{0}ZER\".format(t)] = emitted[-2][-36:]\n\n filename = \"{0}/SEQ_{1}.ini\".format(self.dirname, self.name)\n inigen.generate_file(filename)\n return output_uuid_map", "def tarjan_iter(g):\n ctx = TarjanContext(\n g=g,\n S=[],\n S_set=set(),\n index={},\n lowlink={},\n T=[],\n ret=[])\n main_iter = iter(g)\n while True:\n try:\n v = next(main_iter)\n except StopIteration:\n return\n if v not in ctx.index:\n _tarjan_head(ctx, v)\n while ctx.T:\n it, inside, v, w = ctx.T.pop()\n if inside:\n ctx.lowlink[v] = min(ctx.lowlink[w],\n ctx.lowlink[v])\n _tarjan_body(ctx, it, v)\n if ctx.ret:\n assert len(ctx.ret) == 1\n yield ctx.ret.pop()", "def nx_stream(transformers, extractor_json):\n graph = extractor_json[\"graph\"]\n paths = extractor_json[\"paths\"]\n for record in paths:\n for transformer in transformers:\n trans_kwrd = transformer.keys()[0]\n trans = transformer[trans_kwrd]\n to_set = trans.get(\"set\", [])\n attrs = _nx_lookup_attrs(to_set, record, graph)\n yield record, trans_kwrd, trans, attrs", "def getGenerators(self) -> list:\n return self.state[GENERATORS]", "def stitchable_infiles(gdf):\n gdf['date'] = pd.DatetimeIndex(gdf.sensing_start).date\n for date, gdf_date in gdf.groupby('date'):\n for relative_orbit_number, gdf_rob in gdf_date.groupby('relative_orbit_number'):\n for passdir, gdf_passdir in gdf_rob.groupby('passdir'):\n for spacecraft, gdf_spacecraft in gdf_passdir.groupby('spacecraft'):\n for somode, gdf_somode in gdf_spacecraft.groupby('sensor_operational_mode'):\n fp = _get_footprint_union(gdf_spacecraft)\n meta = dict(\n date=date, relative_orbit_number=relative_orbit_number,\n passdir=passdir, sensor_operational_mode=somode,\n spacecraft=spacecraft, footprint_union=fp)\n yield meta, gdf_somode['filepath'].values.tolist()", "def get_seq(vgz, fsq, chm=None, bp0=None, nbp=None):\n if nbp is None:\n nbp = 1024\n\n itr = VcfSeq(vgz, fsq, chm=chm, bp0=bp0)\n \n # ret = np.empty((nbp, itr.__ssz__), dtype='u1')\n ret = []\n for i in range(nbp):\n ret.append(next(itr))\n\n return ret", "def generate(self, batch_size, s=\"train\"):\n while True:\n pairs, targets = self.get_batch(batch_size,s)\n yield (pairs, targets)", "def resolutions(self):\n while not self._state_stream_.empty:\n state = self._state_stream_.pop()\n for new_state in state.next():\n self._state_stream_.push(new_state)\n if new_state.end:\n yield new_state.model", "def _gen(x_t, states, previous_direction=None):\n # Append the DWI ID of each sequence after the 3D coordinates.\n subject_ids = np.array([subject_id] * len(x_t), dtype=floatX)[:, None]\n\n if not self.use_previous_direction:\n x_t = np.c_[x_t, subject_ids]\n else:\n x_t = np.c_[x_t, subject_ids, previous_direction]\n\n results = f(x_t, *states)\n next_x_t = results[0]\n next_states = results[1:]\n return next_x_t, next_states", "def _try_to_match_transformation(graph: Union[SDFG, SDFGState], collapsed_graph: nx.DiGraph, subgraph: Dict[int, int],\n sdfg: SDFG, xform: Union[xf.PatternTransformation, Type[xf.PatternTransformation]],\n expr_idx: int, nxpattern: nx.DiGraph, state_id: int, permissive: bool,\n options: Dict[str, Any]) -> Optional[xf.PatternTransformation]:\n subgraph = {\n nxpattern.nodes[j]['node']: graph.node_id(collapsed_graph.nodes[i]['node'])\n for i, j in subgraph.items()\n }\n\n try:\n if isinstance(xform, xf.PatternTransformation):\n match = xform\n else: # Construct directly from type with options\n opts = options or {}\n try:\n match = xform(**opts)\n except TypeError:\n # Backwards compatibility, transformation does not support ctor arguments\n match = xform()\n # Set manually\n for oname, oval in opts.items():\n setattr(match, oname, oval)\n\n match.setup_match(sdfg, sdfg.sdfg_id, state_id, subgraph, expr_idx, options=options)\n match_found = match.can_be_applied(graph, expr_idx, sdfg, permissive=permissive)\n except Exception as e:\n if Config.get_bool('optimizer', 'match_exception'):\n raise\n if not isinstance(xform, type):\n xft = type(xform)\n else:\n xft = xform\n print('WARNING: {p}::can_be_applied triggered a {c} exception:'\n ' {e}'.format(p=xft.__name__, c=e.__class__.__name__, e=e))\n return None\n\n if match_found:\n return match\n\n return None", "def _rr_yun0_sqf_list(self, f):\n if f.is_ground:\n return []\n\n result, count = [], 1\n qs = [f.diff(x) for x in self.gens]\n\n g = f\n for q in qs:\n g = self.gcd(g, q)\n\n while f != 1:\n qs = [q // g for q in qs]\n f //= g\n qs = [q - f.diff(x) for x, q in zip(self.gens, qs)]\n\n g = f\n for q in qs:\n g = self.gcd(g, q)\n if g != 1:\n result.append((g, count))\n\n count += 1\n\n return result", "def inner_generator():\n # A buffer where observed query-document features will be stored.\n # It is a list of dictionaries, one per query-document pair, where\n # each dictionary is a mapping from a feature ID to a feature value.\n for p in processed:\n yield p", "def scan(self):\n for fn in self.map:\n coords = list(self.map[fn].keys())\n coords.sort()\n for coord in coords:\n yield fn, coord, self.map[fn][coord]" ]
[ "0.5767118", "0.53664094", "0.51615536", "0.51071537", "0.5089572", "0.5014163", "0.49915856", "0.49313155", "0.4886499", "0.48748744", "0.4865992", "0.4863112", "0.48612818", "0.48256567", "0.48142615", "0.4807478", "0.47889474", "0.47657067", "0.47569597", "0.47207323", "0.47165564", "0.47051272", "0.4675953", "0.4666522", "0.46562338", "0.4636985", "0.46359912", "0.46324003", "0.46313846", "0.46302834" ]
0.5467522
1
Dwell Time Compute the dwell time for the given symbolic, 1d time series.
def dwell_time(x): data = x symbols = np.unique(data) dwell = {} dwell_mean = {} dwell_std = {} for symbol in symbols: r = np.where(data == symbol)[0] r_diff = np.diff(r) r_diff_without_one = np.where(r_diff != 1) x = r[r_diff_without_one] segments = len(x) dur = np.zeros((segments, 1)) len_r = len(r) tmp1 = np.squeeze(x) tmp2 = r[len_r - 1] xx = np.hstack([tmp1, tmp2]) for l in range(segments - 1): r1 = np.where(r == xx[l + 1])[0] r2 = np.where(r == xx[l])[0] dur[l] = r1 - r2 r1 = np.where(r == xx[segments])[0] r2 = np.where(r == xx[segments - 1])[0] dur[segments - 1] = r1 - r2 + 1 dwell[symbol] = dur / len(data) dwell_mean[symbol] = np.mean(dur) / len(data) dwell_std[symbol] = np.std(dur) / len(data) return (dwell, dwell_mean, dwell_std)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dydt(t,S):\n Scl = S[0]\n Swb = S[1]\n \n Seff_cl = (Scl - Sclmin)/(Sclmax - Sclmin)\n Lcl = acl * Seff_cl**bcl\n \n Seff_wb = (Swb - Swbmin)/(Swbmax - Swbmin)\n Lwb = awb * Seff_wb**bwb\n \n E = pE * Cf *fred\n Beta = Beta0 * Seff_cl\n \n # Equations\n dScldt = Jrf - Lcl - E\n dSwbdt = (1 - Beta) * Lcl - Lwb\n\n return np.array([dScldt, dSwbdt,Qdr])", "def dsdt(s, t, eps1, eps2):\n # model parameter constants\n lambda1 = 1e4\n lambda2 = 31.98\n d1 = 0.01\n d2 = 0.01\n f = 0.34\n k1 = 8e-7\n k2 = 1e-4\n delta = 0.7\n m1 = 1e-5\n m2 = 1e-5\n NT = 100.0\n c = 13.0\n rho1 = 1.0\n rho2 = 1.0\n lambdaE = 1\n bE = 0.3\n Kb = 100\n d_E = 0.25\n Kd = 500\n deltaE = 0.1\n\n # decompose state\n T1, T2, T1s, T2s, V, E = s\n\n # compute derivatives\n tmp1 = (1.0 - eps1) * k1 * V * T1\n tmp2 = (1.0 - f * eps1) * k2 * V * T2\n dT1 = lambda1 - d1 * T1 - tmp1\n dT2 = lambda2 - d2 * T2 - tmp2\n dT1s = tmp1 - delta * T1s - m1 * E * T1s\n dT2s = tmp2 - delta * T2s - m2 * E * T2s\n dV = (\n (1.0 - eps2) * NT * delta * (T1s + T2s)\n - c * V\n - ((1.0 - eps1) * rho1 * k1 * T1 + (1.0 - f * eps1) * rho2 * k2 * T2) * V\n )\n dE = (\n lambdaE\n + bE * (T1s + T2s) / (T1s + T2s + Kb) * E\n - d_E * (T1s + T2s) / (T1s + T2s + Kd) * E\n - deltaE * E\n )\n\n return np.array([dT1, dT2, dT1s, dT2s, dV, dE])", "def dY_dt(self, y, t=0):\n\t\t \n\t\t#variables\n\t\tpSgg = y[0] / float(sum(y))\n\t\tpSgh = y[3] / float(sum(y))\n\t\tpSh = y[3] / float(y[3] + y[4] + y[5])\n\t\t\n\t\t#exit flows\n\t\texit_Sg = y[0] * (1 / time_active) * t \n\t\texit_Pg = y[1] * (1 / time_active) * t\n\t\texit_PPg = y[2] * (1 / time_active) * t\n\t\texit_Sh = y[3] * (1 / time_active) * t\n\t\texit_Ph = y[4] * (1 / time_active) * t\n\t\texit_PPh = y[5] * (1 / time_active) * t\n\t\t#episodic flows\n\t\tSg_to_h = y[0] * (1 / tin_g) * t\n\t\tPg_to_h = y[1] * (1 / tin_g) * t\n\t\tPPg_to_h = y[2] * (1 / tin_g) * t\n\t\tSh_to_g = y[3] * (1 / tin_h) * t\n\t\tPh_to_g = y[4] * (1 / tin_h) * t\n\t\tPPh_to_g = y[5] * (1 / tin_h) * t\n\t\t#entry flows\n\t\tinto_g = new_g * t\n\t\tinto_h = new_h * t\n\t\t#infection flows\n\t\tnewinf_gg = ((y[1] + y[4]) * B1 + (y[2] + y[5]) * B2) * Cg * pSgg * t\n\t\tnewinf_gh = ((y[1] + y[4]) * B1 + (y[2] + y[5]) * B2) * Cg * pSgh * t\n\t\tnewinf_h = (y[4] * B1 + y[5] * B2) * Ch * pSh * t\n\t\t#stage progression flows\n\t\tPg_to_PPg = y[1] * D1 * t\n\t\tPPg_to_d = y[2] * D2 * t\n\t\tPh_to_PPh = y[4] * D1 * t\n\t\tPPh_to_d = y[5] * D2 * t\n\t\t\t\n\t\tstate = [- exit_Sg - newinf_gg - Sg_to_h + into_g + Sh_to_g,\n\t\t\t\t - exit_Pg - Pg_to_PPg - Pg_to_h + newinf_gg + Ph_to_g,\n\t\t\t\t - exit_PPg - PPg_to_d - PPg_to_h + Pg_to_PPg + PPh_to_g,\n\t\t\t\t - exit_Sh - newinf_gh - newinf_h - Sh_to_g + into_h + Sg_to_h,\n\t\t\t\t - exit_Ph - Ph_to_PPh - Ph_to_g + newinf_gh + newinf_h + Pg_to_h,\n\t\t\t\t - exit_PPh - PPh_to_d - PPh_to_g + Ph_to_PPh + PPg_to_h]\n\t\n\t\treturn state", "def _dt(dop, H, Lk, dt_func_data=None, integrator_time=None):\n return hamiltonian_dt(dop, H) + lindbladian_dt(dop, Lk)", "def dynstall_oye_dxdt(t,fs,u,p):\n alpha = u['alpha'](t)\n f_st = p['F_st'](alpha)\n return 1/p['tau'] * (f_st - fs)", "def dynstall_oye_dxdt_simple(fs, fs_alpha, tau):\n return 1/tau * (fs_alpha - fs)", "def time_period(s,h=30):\n\n t = 0\n\n old_z, pass_1 = 0, None\n\n while(True):\n k1 = h*sdot(s)\n k2 = h*sdot(s+k1/2)\n k3 = h*sdot(s+k2/2)\n k4 = h*sdot(s+k3)\n\n s = s+(k1+2*k2+2*k3+k4)/6\n t = t+h\n\n if (s[2]>=0 and old_z<0):\n dt = -s[2]/s[5]\n t2 = t+dt\n\n if pass_1 is None:\n pass_1 = t2\n else:\n return t2-pass_1\n\n old_z = s[2]", "def compute_time_step():\n\n dt = Hydro.compute_time_step()\n\n return dt", "def dynstall_mhh_dxdt(t,x,u,p):\n # Inputs\n U = u['U'](t)\n U_dot = u['U_dot'](t)\n omega = u['omega'](t)\n alpha_34 = u['alpha_34'](t)\n return dynstall_mhh_dxdt_simple(t, x, U, U_dot, omega, alpha_34, p)", "def drillTime(matID, thickness_mm, W, FWHM_mm):\n return thickness_mm / drillSpeed(matID, W, FWHM_mm)", "def SIR_timederivative(self, SIR, t): \n beta = self.beta(t)\n nu = self.nu(t)\n Sddt = - beta*SIR[0]*SIR[1]\n Iddt = beta*SIR[0]*SIR[1] - nu*SIR[1]\n Rddt = nu*SIR[1]\n return [Sddt, Iddt, Rddt]", "def ddspmt(t):\n return (spmt(t) - _spm_dd_func(t)) / 0.01", "def dm_time_behaviour(sps, use_env=True):\n pts = get_envelope(sps) if use_env else sps\n _,_,R,_,_ = linregress(pts.time, pts.dm)\n return R**2", "def dS_dt(self, species_conc, t):\n ret = self.model.species_volume_prefactor * numpy.dot(self.model.N, self.flux(species_conc, t))\n # handle rate rules\n for var in self.model.rate_rules:\n f = self.model.rate_rules[var].replace(Model.TIME_VARIABLE, str(t))\n f = self.model.rate_rules[var].replace(Model.TIME_VARIABLE, str(t))\n species2conc = dict(zip(self.model.ode_variables, species_conc))\n species2conc['math'] = globals()['math']\n # rate = eval( f, species2conc, self._external_species_conc )\n rate = eval(f, self.model.external_species_concentrations, species2conc)\n if self.model.species_2_position.has_key(var):\n ret[self.model.species_2_position[var]] = rate\n else:\n l = ret.tolist()\n l.append(rate)\n ret = numpy.array(l)\n return ret", "def dspmt(t):\n t = np.asarray(t)\n return spmt(t) - spmt(t - 1)", "def test_double_ended_ols_wls_estimate_synthetic():\n from dtscalibration import DataStore\n import numpy as np\n\n np.random.seed(0)\n\n cable_len = 100.\n nt = 50\n time = np.arange(nt)\n x = np.linspace(0., cable_len, 100)\n ts_cold = np.ones(nt) * 4.\n ts_warm = np.ones(nt) * 20.\n\n C_p = 15246\n C_m = 2400.\n dalpha_r = 0.0005284\n dalpha_m = 0.0004961\n dalpha_p = 0.0005607\n gamma = 482.6\n cold_mask = x < 0.5 * cable_len\n warm_mask = np.invert(cold_mask) # == False\n temp_real = np.ones((len(x), nt))\n temp_real[cold_mask] *= ts_cold + 273.15\n temp_real[warm_mask] *= ts_warm + 273.15\n\n st = C_p * np.exp(-dalpha_r * x[:, None]) * \\\n np.exp(-dalpha_p * x[:, None]) * np.exp(gamma / temp_real) / \\\n (np.exp(-gamma / temp_real) - 1)\n ast = C_m * np.exp(-dalpha_r * x[:, None]) * \\\n np.exp(-dalpha_m * x[:, None]) / (np.exp(-gamma / temp_real) - 1)\n rst = C_p * np.exp(-dalpha_r * (-x[:, None] + cable_len)) * \\\n np.exp(-dalpha_p * (-x[:, None] + cable_len)) * \\\n np.exp(gamma / temp_real) / (np.exp(-gamma / temp_real) - 1)\n rast = C_m * np.exp(-dalpha_r * (-x[:, None] + cable_len)) * np.exp(\n -dalpha_m * (-x[:, None] + cable_len)) / \\\n (np.exp(-gamma / temp_real) - 1)\n\n alpha = np.mean(np.log(rst / rast) - np.log(st / ast), axis=1) / 2\n\n ds = DataStore({\n 'st': (['x', 'time'], st),\n 'ast': (['x', 'time'], ast),\n 'rst': (['x', 'time'], rst),\n 'rast': (['x', 'time'], rast),\n 'userAcquisitionTimeFW': (['time'], np.ones(nt)),\n 'userAcquisitionTimeBW': (['time'], np.ones(nt)),\n 'cold': (['time'], ts_cold),\n 'warm': (['time'], ts_warm)\n },\n coords={\n 'x': x,\n 'time': time},\n attrs={\n 'isDoubleEnded': '1'})\n\n sections = {\n 'cold': [slice(0., 0.5 * cable_len)],\n 'warm': [slice(0.5 * cable_len, cable_len)]}\n\n # OLS\n ds.calibration_double_ended(sections=sections,\n st_label='st',\n ast_label='ast',\n rst_label='rst',\n rast_label='rast',\n method='ols',\n solver='sparse')\n\n np.testing.assert_almost_equal(\n ds.gamma.values, gamma, decimal=6)\n np.testing.assert_almost_equal(\n ds.alpha.values, alpha, decimal=7)\n np.testing.assert_almost_equal(\n ds.TMPF.values, temp_real - 273.15, decimal=4)\n np.testing.assert_almost_equal(\n ds.TMPB.values, temp_real - 273.15, decimal=4)\n np.testing.assert_almost_equal(\n ds.TMPW.values, temp_real - 273.15, decimal=4)\n\n # WLS\n ds.calibration_double_ended(sections=sections,\n st_label='st',\n ast_label='ast',\n rst_label='rst',\n rast_label='rast',\n st_var=1e-7,\n ast_var=1e-7,\n rst_var=1e-7,\n rast_var=1e-7,\n method='wls',\n solver='sparse',\n tmpw_mc_size=5)\n\n np.testing.assert_almost_equal(\n ds.gamma.values, gamma, decimal=5)\n np.testing.assert_almost_equal(\n ds.alpha.values, alpha, decimal=6)\n np.testing.assert_almost_equal(\n ds.TMPF.values, temp_real - 273.15, decimal=4)\n np.testing.assert_almost_equal(\n ds.TMPB.values, temp_real - 273.15, decimal=4)\n np.testing.assert_almost_equal(\n ds.TMPW.values, temp_real - 273.15, decimal=4)", "def test_single_ended_ols_wls_estimate_synthetic():\n\n from dtscalibration import DataStore\n import numpy as np\n\n np.random.seed(0)\n\n cable_len = 100.\n nt = 50\n time = np.arange(nt)\n x = np.linspace(0., cable_len, 500)\n ts_cold = np.ones(nt) * 4.\n ts_warm = np.ones(nt) * 20.\n\n C_p = 15246\n C_m = 2400.\n dalpha_r = 0.0005284\n dalpha_m = 0.0004961\n dalpha_p = 0.0005607\n gamma = 482.6\n cold_mask = x < 0.5 * cable_len\n warm_mask = np.invert(cold_mask) # == False\n temp_real = np.ones((len(x), nt))\n temp_real[cold_mask] *= ts_cold + 273.15\n temp_real[warm_mask] *= ts_warm + 273.15\n\n st = C_p * np.exp(-dalpha_r * x[:, None]) * \\\n np.exp(-dalpha_p * x[:, None]) * \\\n np.exp(gamma / temp_real) / (np.exp(gamma / temp_real) - 1)\n ast = C_m * np.exp(-dalpha_r * x[:, None]) * \\\n np.exp(-dalpha_m * x[:, None]) / (np.exp(gamma / temp_real) - 1)\n\n print('alphaint', cable_len * (dalpha_p - dalpha_m))\n print('alpha', dalpha_p - dalpha_m)\n print('C', np.log(C_p / C_m))\n print('x0', x.max())\n\n ds = DataStore({\n 'st': (['x', 'time'], st),\n 'ast': (['x', 'time'], ast),\n 'userAcquisitionTimeFW': (['time'], np.ones(nt)),\n 'cold': (['time'], ts_cold),\n 'warm': (['time'], ts_warm)\n },\n coords={\n 'x': x,\n 'time': time},\n attrs={\n 'isDoubleEnded': '0'})\n\n sections = {\n 'cold': [slice(0., 0.5 * cable_len)],\n 'warm': [slice(0.5 * cable_len, cable_len)]}\n\n # OLS\n ds.calibration_single_ended(sections=sections,\n st_label='st',\n ast_label='ast',\n method='ols',\n solver='sparse')\n\n np.testing.assert_almost_equal(\n ds.gamma.values, gamma, decimal=6)\n np.testing.assert_almost_equal(\n ds.dalpha.values, dalpha_p - dalpha_m, decimal=8)\n np.testing.assert_almost_equal(\n ds.TMPF.values, temp_real - 273.15, decimal=4)\n\n # WLS\n ds.calibration_single_ended(sections=sections,\n st_label='st',\n ast_label='ast',\n st_var=1.,\n ast_var=1.,\n method='wls',\n solver='sparse')\n\n np.testing.assert_almost_equal(\n ds.gamma.values, gamma, decimal=6)\n np.testing.assert_almost_equal(\n ds.dalpha.values, dalpha_p - dalpha_m, decimal=8)\n np.testing.assert_almost_equal(\n ds.TMPF.values, temp_real - 273.15, decimal=4)\n\n pass", "def do_dt(r, t):\n return -o(r,t)/(2*t)", "def tpsys(t,y,gas,mw,T,P):\n\n # Set the state of the gas, based on the current solution vector.\n gas.TPY = T,P,y\n nsp = gas.n_species\n # species molar production rates\n wdot = gas.net_production_rates\n # set up column vector for dydt\n dYdt = []\n # species time evolution equations\n rrho = 1.0/gas.density\n for i in range(nsp):\n dYdt.append(rrho*mw[i]*wdot[i])\n \n return dYdt", "def dpsi_dt(self, psi, t):\n#\t#To avoid doing anything twice. (odeint tends to do that.)\n#\t#---------------------------------------------------------\n#\tnovel, result = self.check_novelty(t,psi)\n#\tif not novel:\n#\t if self.my_id == 0:\n#\t\tprint \"Time: %2.2f / %2.2f au. Runtime: %2.2f---\"%(\n#\t\t t, self.total_duration, (time.time() - self.t_0)/60.)\n#\t\tself.debug_norm(t, psi, result)\t\n#\t\t\n#\t return result\n#\t##########################################################\n\n\t#Making a complex array. \n\tpsi_complex = psi[:len(psi)/2] + 1j * psi[len(psi)/2:] \n\t\n\tdp_dt_complex = zeros(psi_complex.shape, dtype = complex)\n\tdp_dt_buffer= zeros(psi_complex.shape, dtype = complex)\n\t\n\n\t#Do operations.\n\tmat_vec = self.mat_vec_product(psi_complex, t)\n\n\tdp_dt_complex[self.my_slice] = self.solve_overlap(-1j * mat_vec)\n\t\n\n\n\t#Add and redistribute.\n\tdp_dt_complex = pypar.reduce(dp_dt_complex, pypar.SUM, 0, buffer = dp_dt_buffer)\n\tdp_dt_buffer = dp_dt_complex.copy()\n\tdp_dt_complex = pypar.broadcast(dp_dt_buffer, 0)\n\t\n\n\n\t#Making a float array.\n\tdp_dt = r_[real(dp_dt_buffer), imag(dp_dt_buffer)] \n\t\n\tif self.my_id == 0:\n\t print \"Time: %2.2f / %2.2f au. Runtime: %2.2f\"%(\n\t\tt, self.total_duration, (time.time() - self.t_0)/60.)\n\t self.debug_norm(t, psi, dp_dt)\t\n\t\n\t#Store latest result. ----------------------------------\n\tself.prev_out = dp_dt\n\t############################3###########################3\n\treturn dp_dt", "def time_function(t):\n\n omega = np.pi\n return np.sin(omega * t) + np.sin(10 * omega * t) + np.sin(20 * omega * t)", "def dtw(ts1, ts2, derivative=False):\n s = ts1\n t = ts2\n\n if derivative:\n tmp_ts1 = []\n tmp_ts2 = []\n for i in range(len(ts1) - 1):\n tmp_ts1.append(ts1[i + 1] - ts1[i])\n tmp_ts2.append(ts2[i + 1] - ts2[i])\n s = tmp_ts1\n t = tmp_ts2\n\n n, m = len(s), len(t)\n dtw_matrix = np.zeros((n + 1, m + 1))\n for i in range(n + 1):\n for j in range(m + 1):\n dtw_matrix[i, j] = np.inf\n dtw_matrix[0, 0] = 0\n\n for i in range(1, n + 1):\n for j in range(1, m + 1):\n cost = abs(s[i - 1] - t[j - 1])\n # take last min from a square box\n last_min = np.min([dtw_matrix[i - 1, j], dtw_matrix[i, j - 1], dtw_matrix[i - 1, j - 1]])\n dtw_matrix[i, j] = cost + last_min\n return dtw_matrix[-1][-1]", "def dy(y, t, gamma, w0, drive_amp, drive_w):\n\n x, p = y[0], y[1]\n dx = p\n dp = -2 * gamma * p - w0**2 * x + w0**2 * drive_amp*np.cos(drive_w*t)\n return [dx, dp]", "def firstderiv(state, time, press):\n dy = np.zeros_like(state)\n pyjacob.py_dydt(time, press, state, dy)\n return dy", "def dt(self):\n if isinstance(self._time_axis, are_ax.RegularAxis):\n return self._time_axis.step\n raise RuntimeError(\"Time step is not available for orbits constructed with non-regular time axis\")", "def evolve_system(self, x, n, k, gamma):\n temp = tf.pow(k, n)/(tf.pow(x, n)+tf.pow(k,n))\n # dxdt = tf.manip.roll(temp, shift = -1, axis = 1) - gamma*x # v1.6+\n dxdt = tf.concat([ tf.reshape(temp[:, -1], [-1, 1]),\n temp[:,:-1]], axis=1) - gamma*x # v1.5\n dxdt = tf.convert_to_tensor(dxdt, dtype = tf.float32, name = \"dxdt\")\n return dxdt", "def fixed_time_trajectories(self, ll=1, distributions=None, discrete=False, noise=0):\n\n self.time_uniform = np.linspace(0, self.nsteps, self.nsteps * self.padding)\n\n for t in tqdm.tqdm(range(self.ntraj)):\n\n if distributions is not None:\n\n if self.dwell_distribution == 'exponential':\n self.lamb = np.random.choice(distributions[0])\n elif self.dwell_distribution == 'power':\n self.alpha = np.random.choice(distributions[0])\n\n self.hop_sigma = np.random.choice(distributions[1])\n self.H = np.random.choice(distributions[2])\n #self.H = np.mean(distributions[2])\n\n time = [0]\n total_time = 0 # saves a lot of time\n\n while total_time < self.nsteps:\n\n # hop at random time intervals according to one of the following PDFs\n if self.dwell_distribution == 'exponential':\n time.append(sampling.random_exponential_dwell(self.lamb))\n elif self.dwell_distribution == 'power':\n if self.alpha == 1:\n time.append(1)\n else:\n time.append(sampling.random_power_law_dwell(1 + self.alpha, ll=ll, discrete=discrete)[0])\n else:\n sys.exit('Please enter a valid dwell time probability distribution')\n total_time += time[-1]\n\n time = np.cumsum(time)\n\n if self.hop_distribution in ['gaussian', 'Gaussian']:\n\n z = np.cumsum(np.random.normal(loc=0, scale=self.hop_sigma, size=len(time)))\n z -= z[0] # untested\n\n elif self.hop_distribution in ['fbm', 'fractional', 'fraction_brownian_motion']:\n z = fbm.FBM(len(time), self.H, method=\"daviesharte\").fbm()[:-1] # automatically inserts zero at beginning of array\n z /= ((1.0 / len(time)) ** self.H) # reversing a normalization done in the fbm code\n z *= self.hop_sigma\n self.steps.append(z[1:] - z[:-1]) # for autocorrelation calculation\n\n else:\n sys.exit('Please enter a valid hop distance probability distribution')\n\n # for visualizing hops\n # trajectory_hops = np.zeros([2 * len(time) - 1, 2])\n #\n # trajectory_hops[1::2, 0] = time[1:]\n # trajectory_hops[2::2, 0] = time[1:]\n #\n # trajectory_hops[::2, 1] = z\n # trajectory_hops[1:-1:2, 1] = z[:-1]\n # trajectory_hops[-1, 1] = z[-1]\n # plt.plot(trajectory_hops[:, 0], trajectory_hops[:, 1])\n # plt.show()\n # exit()\n\n # make uniform time intervals with the same interval for each simulated trajectory\n self.z_interpolated[t, :] = z[np.digitize(self.time_uniform, time, right=False) - 1]\n\n #plt.hist(np.random.normal(loc=0, scale=noise, size=len(self.time_uniform)))\n\n if noise > 0:\n self.z_interpolated += np.random.normal(loc=0, scale=noise, size=len(self.time_uniform))\n\n self.time_uniform *= self.dt\n # plt.plot(trajectory_hops[:, 0]*self.dt, trajectory_hops[:, 1])\n # plt.plot(self.time_uniform, self.z_interpolated[-1, :])\n # plt.show()\n # exit()", "def test_single_ended_wls_estimate_synthetic():\n\n from dtscalibration import DataStore\n\n cable_len = 100.0\n nt = 50\n time = np.arange(nt)\n x = np.linspace(0.0, cable_len, 500)\n ts_cold = np.ones(nt) * 4.0\n ts_warm = np.ones(nt) * 20.0\n\n C_p = 15246\n C_m = 2400.0\n dalpha_r = 0.0005284\n dalpha_m = 0.0004961\n dalpha_p = 0.0005607\n gamma = 482.6\n cold_mask = x < 0.5 * cable_len\n warm_mask = np.invert(cold_mask) # == False\n temp_real = np.ones((len(x), nt))\n temp_real[cold_mask] *= ts_cold + 273.15\n temp_real[warm_mask] *= ts_warm + 273.15\n\n st = (\n C_p\n * np.exp(-dalpha_r * x[:, None])\n * np.exp(-dalpha_p * x[:, None])\n * np.exp(gamma / temp_real)\n / (np.exp(gamma / temp_real) - 1)\n )\n ast = (\n C_m\n * np.exp(-dalpha_r * x[:, None])\n * np.exp(-dalpha_m * x[:, None])\n / (np.exp(gamma / temp_real) - 1)\n )\n\n print(\"alphaint\", cable_len * (dalpha_p - dalpha_m))\n print(\"alpha\", dalpha_p - dalpha_m)\n print(\"C\", np.log(C_p / C_m))\n print(\"x0\", x.max())\n\n ds = DataStore(\n {\n \"st\": ([\"x\", \"time\"], st),\n \"ast\": ([\"x\", \"time\"], ast),\n \"userAcquisitionTimeFW\": ([\"time\"], np.ones(nt)),\n \"cold\": ([\"time\"], ts_cold),\n \"warm\": ([\"time\"], ts_warm),\n },\n coords={\"x\": x, \"time\": time},\n attrs={\"isDoubleEnded\": \"0\"},\n )\n\n sections = {\n \"cold\": [slice(0.0, 0.5 * cable_len)],\n \"warm\": [slice(0.5 * cable_len, cable_len)],\n }\n\n # WLS\n ds.calibration_single_ended(\n sections=sections, st_var=1.0, ast_var=1.0, method=\"wls\", solver=\"sparse\"\n )\n\n assert_almost_equal_verbose(ds.gamma.values, gamma, decimal=6)\n assert_almost_equal_verbose(ds.dalpha.values, dalpha_p - dalpha_m, decimal=8)\n assert_almost_equal_verbose(ds.tmpf.values, temp_real - 273.15, decimal=4)\n\n pass", "def dynstall_mhh_dxdt_simple(t, x, U, U_dot, omega, alpha_34, p):\n # States\n x1=x[0] # Downwash memory term 1\n x2=x[1] # Downwash memory term 2\n x3=x[2] # Clp', Lift coefficient with a time lag to the attached lift coeff\n x4=x[3] # f'' , Final separation point function\n # Parameters\n alpha0 = p['alpha0']\n Cla = p['Cla']\n c = p['chord']\n A1 = p['A1']\n A2 = p['A2']\n b1 = p['b1']\n b2 = p['b2']\n F_st = p['F_st']\n # Variables derived from inputs\n U = max(U, 0.01)\n Tu = max(c/(2*U), 1e-4) # Eq. 23\n Tf = p['Tf0']*Tu # OLD was twice: Tf = p['Tf0']*c/U\n Tp = p['Tp0']*Tu # OLD was twice: Tp = p['Tp0']*c/U\n # Variables derived from states\n if p['alpha0_in_x1x2']:\n alphaE = alpha_34*(1-A1-A2)+ x1 + x2 # Eq. 12\n else:\n alphaE = (alpha_34-alpha0)*(1-A1-A2)+ x1 + x2 + alpha0 # Eq. 12\n\n# alphaE = u['alphaE'](t) # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< HACK HACK TODO TODO TODO TODO TODO\n\n Clp = Cla * (alphaE-alpha0) + np.pi * Tu * omega # Eq. 13\n alphaF = x3/Cla+alpha0 # p. 13\n fs_aF = F_st(alphaF) # p. 13\n if(fs_aF<0):\n print('Problematic fs:',fs_aF)\n x4 = np.clip(x4, 1e-16, 1.0) # Constraining x4 between 0 and 1 increases numerical stability\n # State equation\n xdot = [0]*4\n if p['alpha0_in_x1x2']:\n xdot[0] = -1/Tu * (b1 + c * U_dot/(2*U**2)) * x1 + b1 * A1 / Tu * alpha_34\n xdot[1] = -1/Tu * (b2 + c * U_dot/(2*U**2)) * x2 + b2 * A2 / Tu * alpha_34\n else:\n xdot[0] = -1/Tu * (b1 + c * U_dot/(2*U**2)) * x1 + b1 * A1 / Tu * (alpha_34-alpha0)\n xdot[1] = -1/Tu * (b2 + c * U_dot/(2*U**2)) * x2 + b2 * A2 / Tu * (alpha_34-alpha0)\n xdot[2] = -1/Tp * x3 + 1/Tp * Clp\n xdot[3] = -1/Tf * x4 + 1/Tf * fs_aF\n return xdot", "def time_step(self, dt, Nsteps = 1):\n self.dt = dt\n\n if Nsteps > 0:\n self.psi_mod_x *= self.x_evolve_half\n \n for i in xrange(Nsteps - 1):\n self.k_fft()\n self.psi_mod_k *= self.k_evolve\n self.x_fft()\n self.psi_mod_x *= self.x_evolve\n \n self.k_fft()\n\n self.t += dt * Nsteps" ]
[ "0.65580326", "0.61233747", "0.610152", "0.60596985", "0.5993235", "0.5960062", "0.59412473", "0.5851181", "0.5830028", "0.5748378", "0.57329965", "0.5726178", "0.5721409", "0.5717818", "0.5668342", "0.5655372", "0.5641321", "0.5618273", "0.56117505", "0.5595713", "0.5545355", "0.5505074", "0.5503246", "0.5438043", "0.5413629", "0.54025066", "0.5394752", "0.5388487", "0.5386075", "0.53752846" ]
0.6985884
0
Given the URL and directory, download the image page's html for parsing. Parse the full html to find the important bit concerning the image's actual host location but looking in the 'leftside' content div wrapper. Extract the image name and description to be used as the image's name. Download the image and save to the given directory!
def get_image_qm(html_src, todir): #print url img_url, title = img_details(html_src) r = requests.get(img_url) with open(todir+title+'.jpg','wb') as f: f.write(r.content)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def extract_image(page_html, family_url, folder):\n image_extractor = Extractor(page_html, family_url)\n for url in image_extractor.get_image_table():\n image_page_url = urljoin(family_url, url)\n # print(image_page_url)\n imres = requests.get(image_page_url)\n image_page_extractor = Extractor(imres.text, image_page_url)\n image_src, image_name = image_page_extractor.get_image_link()\n\n image_link = urljoin(image_page_url, image_src)\n\n print(image_link, image_name)\n # Download image\n fetch(image_link, image_name, folder)", "def html_url_parser(url, save_dir, show=False, wait=False):\n\n website = urlopen(url)\n html = website.read()\n\n soup = BeautifulSoup(html, \"lxml\")\n\n for image_id, link in enumerate(soup.find_all(\"a\", href=True)):\n if image_id == 0:\n continue\n\n img_url = link[\"href\"]\n\n try:\n if os.path.isfile(save_dir + \"img-%d.png\" % image_id) == False:\n print(\"[INFO] Downloading image from URL:\", link[\"href\"])\n image = Image.open(urlopen(img_url))\n image.save(save_dir + \"img-%d.png\" % image_id, \"PNG\")\n if show:\n image.show()\n else:\n print(\"skipped\")\n except KeyboardInterrupt:\n print(\"[EXCEPTION] Pressed 'Ctrl+C'\")\n break\n except Exception as image_exception:\n print(\"[EXCEPTION]\", image_exception)\n continue\n\n if wait:\n key = input(\"[INFO] Press any key to continue ('q' to exit)... \")\n if key.lower() == \"q\":\n break", "def download_images(img_urls, dest_dir, base_url=\"http://code.google.com\"):\n create_dir(dest_dir)\n img_tags = fetch_call(img_urls, dest_dir)\n create_html(dest_dir, img_tags)", "def download_img_and_save(url, path):\n import requests\n a = url.find(\"UW-EauClaireCOVID-19DataTrackerDashboard\")\n b = len(url)\n fn = url[a:b].replace('/','_')\n fn = '{}/{}'.format(path,fn)\n with open(fn, \"wb\") as f:\n f.write(requests.get(url).content)", "def download_images(src_dir, dest_dir):\n # +++your code here+++\n if not os.path.exists(dest_dir):\n os.mkdir(dest_dir)\n res=utility(src_dir)\n k=0\n f=file(dest_dir+\"/\"+\"index.html\", 'w')\n f.write(\"<html><body>\")\n for i in res:\n local_name='image'+str(k)\n print \"downloading image%d\" %(k)\n urllib.urlretrieve(i, os.path.join(dest_dir, local_name))\n f.write(\"<img src=\"+'\"'+os.path.join(dest_dir, local_name)+'\"'+\">\")\n k+=1\n f.write(\"</body></html>\")\n f.close()\n cmd=\"xdg-open\"+\" \"+'\"'+dest_dir+\"/\"+\"index.html\"+'\"'\n (status, output)=commands.getstatusoutput(cmd)\n sys.exit(1)", "def download(url, out_folder):\n \n filename = \"2.png\"\n \n outpath = os.path.join(out_folder, filename)\n \n if url.lower().startswith(\"http\"):\n urlretrieve(url, outpath)\n else:\n urlretrieve(urlparse.urlunparse(parsed), outpath)", "def download_content(content_link, output_dir):\n if content_link is None: return None\n res = requests.get(content_link, stream=True)\n try:\n res.raise_for_status()\n except requests.exceptions.HTTPError:\n return None\n img_name, img_format = parse_image_url(res.url)\n filepath = '{}/{}.{}'.format(output_dir, img_name, img_format)\n\n with open(filepath, mode='wb') as image_file:\n for chunk in res.iter_content(chunk_size=chunk_size):\n image_file.write(chunk)\n\n return abspath(filepath)", "def download_images(img_urls, dest_dir):\n # +++your code here+++\n (errcode, statusmsg) = check_create_dir(dest_dir)\n if errcode:\n print statusmsg\n sys.exit(errcode)\n else: print statusmsg\n # retrieve images and generate html code for files\n html_str = '<html>\\n<body>\\n' # opening html file tags\n i = 0\n for img in img_urls:\n img_filename = 'img' + str(i)\n full_filepath = os.path.join(dest_dir, img_filename) \n print 'Retrievieng ' + img + ' to ' + full_filepath + ' file..'\n urllib.urlretrieve(img, full_filepath)\n html_str += '<img src=\\\"' + img_filename + '\\\">'\n i += 1\n html_str += '\\n</html>\\n</body>' # closing html file tags\n # create html file\n html_filename = os.path.join(dest_dir, 'index.html')\n f = open(html_filename, 'w')\n f.write(html_str) \n f.close()\n print 'File ' + html_filename + ' was created.'", "def read_from_server(url_base=\"http://10.200.102.18/\", url_dir=\"G179-dataset/\"):\n\n all_images = urllib2.urlopen(url_base + url_dir).read()\n\n parser = ImagesHTMLParser()\n parser.feed(all_images)\n data = parser.data\n imgs = []\n\n print(\"Found %d images!\" % len(data))\n print(\"Started Download!\")\n i = 1\n\n for d in data:\n print(\"\\rProgress: %d/%d \" % (i, len(data)), end='')\n dl_img = urllib2.urlopen(url_base + url_dir + d).read()\n asd = cStringIO.StringIO(dl_img)\n img = Image.open(asd)\n imgs.append(np.array(img))\n i = i + 1\n\n return imgs", "def download_images(img_urls, dest_dir):\n # Creating the directory if the directory does not already exist\n if not os.path.exists(str(dest_dir)):\n os.mkdir(dest_dir)\n print ('Retrieving...')\n with open(str(dest_dir) + '/index.html', 'w') as f:\n f.write(\"<html>\\n<body>\\n\")\n for index, url in enumerate(img_urls):\n img_name = 'img' + str(index + 1)\n urllib.urlretrieve(\"https://code.google.com\" + url, filename=str(dest_dir) + '/'\n + img_name +'.jpg')\n print ('Downloaded ' + url[-10:] + \": \" + \\\n str(index + 1) + \" images downloaded\")\n\n f.write(\"<img src=\" + '\"' + img_name +\".jpg\" +'\">')\n f.write(\"\\n</html>\\n</body>\")\n print ('Download Complete!')\n pass", "def download_img(self, url, output):\n try:\n print(\"Downloading from: %s\" % url)\n with open(output, 'wb') as f:\n f.write(urllib2.urlopen(url).read())\n print(\"Wrote to: %s\" % output)\n except IOError, e:\n print(e)", "def dl_image(img_name, img_url):\n path = os.path.join(base_path, img_name)\n res = requests.get(img_url)\n with open(path, 'wb') as fout:\n fout.write(res.content)", "def get_img_from_url(index, url):\n try:\n with urllib.request.urlopen(url) as response:\n if response.headers.get_content_maintype() == 'image':\n image_filename = image_filename_prefix.format(name=image_class_name,\n counter=index,\n ext=response.headers.get_content_subtype())\n image_filepath = os.path.join(target_folder, image_filename)\n with open(image_filepath, 'wb') as image_file:\n image_file.write(response.read())\n\n print('Fetched URL {}'.format(index))\n\n except urllib.request.HTTPError:\n pass\n except Exception:\n pass", "def download_image_from(link, directory, name):\n try:\n img_content = requests.get(link).content\n image_file = io.BytesIO(img_content)\n image = Image.open(image_file).convert('RGB')\n image.save(f'./{directory}/{name}.png', 'PNG', quality=100, subsampling=0)\n except:\n pass", "def fetch(self, page, part):\n\n file = '/page-' + str(page)\n if part > 1:\n file += '.' + str(part)\n file += '.png'\n\n source = self.baseUrl + file\n destination = self.download\n\n no_problem_unlink(destination + '-small')\n no_problem_unlink(destination)\n\n image = http_get(source)\n if image.find('Not Found') == -1 and len(image) > 0:\n f = open(destination + '-small', 'w')\n f.write(image)\n f.close()\n\n if os.path.exists(destination + '-small') and os.path.getsize(destination + '-small') > 0:\n width = self.ORIGINAL_WIDTH * self.RESIZE_FACTOR\n height = self.ORIGINAL_HEIGHT * self.RESIZE_FACTOR\n resize_png(width, height, destination + '-small', destination)\n return destination\n else:\n return self.default", "def download_images(img_urls, dest_dir):\n if not os.path.exists(dest_dir):\n # If the directory doesn't exist, create it\n os.mkdir(dest_dir)\n count = 0\n img_string = ''\n # Copies each file from the url provided to the directory provided\n for file in img_urls:\n new_filename = '{}/img{}.jpg'.format(dest_dir, count)\n print \"Retrieving {}\".format(file)\n urllib.urlretrieve(file, new_filename)\n img_string += \"<img src = 'img{}.jpg'>\".format(count)\n count += 1\n print \"Retrieved {} files\".format(count)\n # Creates an html file to display the completed image\n with open('{}/index.html'.format(dest_dir), 'w') as f:\n f.write(\n '<html>\\n<body>\\n{}\\n</body>\\n</html>'.format(img_string)\n )\n pass", "def pywget_inside_crawler(url):\n\n # open and read the url\n content = ''\n try:\n request = urllib.request.urlopen(url)\n content = request.read().decode(\"utf-8\")\n except:\n pass\n\n # find all contents we need which are links and srcs using regex\n match = re.findall(r'<a href=\"(.*?)\"', content) + \\\n re.findall(r'<img src=\"(.*?)\"', content) + \\\n re.findall(r'<a href = \"(.*?)\"', content) + \\\n re.findall(r'<img src = \"(.*?)\"', content)\n\n domain_name = url[0 : url.rfind('/')]\n\n all_item_list = []\n\n # if it's an absolute link, add it to all_item_list\n # if it's a relative link, add prefix in the front and add it to the list\n if match:\n for item in match:\n if item.startswith(\"http://\") or item.startswith(\"https://\") or item.startswith(\"//\"):\n if item.startswith(domain_name):\n all_item_list.append(item)\n else:\n all_item_list.append(domain_name + \"/\" + item)\n\n # apply pywget_download_inside\n for item in all_item_list:\n pywget(item, first_time=False)", "def download_image(urls):\r\n image_paths = []\r\n\r\n base_url = \"https://classifieds.castanet.net\"\r\n image_directory = os.path.join('C:\\\\', 'users', 'ccholon', 'my documents', 'castanet images')\r\n\r\n for url in urls:\r\n listing_url = base_url + url\r\n image_page = requests.get(listing_url)\r\n image_soup = BeautifulSoup(image_page.text, 'html.parser')\r\n\r\n # find the URL for the listing image\r\n image_element = image_soup.find(name='div', class_='image_container')\r\n image_element = image_element.find(name='img')\r\n image_url = image_element.get('src')\r\n\r\n # download the image\r\n #image = requests.get(image_url, stream=True)\r\n\r\n # save to local directory\r\n #image_file = open(os.path.join(image_directory, os.path.basename(image_url)), 'wb')\r\n #for bytes in image.iter_content(100000):\r\n #image_file.write(bytes)\r\n #image_file.close()\r\n\r\n image_paths.append(os.path.join(image_directory, os.path.basename(image_url)))\r\n\r\n return image_paths", "def fetchImgOrDir(url, verboseLogs):\n try:\n resp = urllib.request.urlopen(url)\n except Exception as e:\n if verboseLogs:\n logging.error('Result of fetch from %s: %s', url, str(e))\n return (None, None)\n if resp.getheader('content-type') == 'image/jpeg':\n return ('img', resp)\n else:\n return ('dir', resp)", "def download_image(image_url, image_name, collection_id):\n try:\n response = requests.get(image_url)\n folder_path = imgs_directory + '/' + collection_id\n if not os.path.exists(folder_path):\n os.makedirs(folder_path)\n image_path = folder_path + '/' + image_name\n # image_path = os.path.join(folder_path, image_name)\n with open(image_path, 'wb') as f:\n f.write(response.content)\n return image_path\n except Exception as e:\n print(f\"An error occurred while downloading image {image_name}. Error message: {e}\")\n return None", "def download_image(url, filename):\n r = requests.get(url)\n open(filename, 'wb').write(r.content)", "def get_images(url):\n \n # =============================================================================\n # Selenium.\n # =============================================================================\n\n chrome_options = Options()\n #chrome_options.add_argument('--incognito')\n #chrome_options.add_argument('--headless')\n #chrome_options.add_argument('--no-sandbox')\n \n driver = webdriver.Chrome(options=chrome_options,executable_path='/usr/local/bin/chromedriver') # Optional argument, if not specified will search path.\n driver.get('https://' + url)\n \n #scrolling to bottom to load all images on the page\n driver.execute_script(\"window.scrollTo(0, document.body.scrollHeight);\")\n #sleep to make sure everything loads\n time.sleep(5)\n \n \n html_source = driver.page_source\n \n img_alt_src(html_source)\n \n driver.close()\n driver.quit()", "def download(input_file, img_dir):\n xml_parser = OvenXMLParser()\n xml_parser.load_file(opts.input_file)\n downloader = IMGDownloader()\n for item in xml_parser.item_generator():\n ad_id = item.find(conf.AD_ID_KEY).text\n output_dir = \"%s/%s\" % (img_dir, ad_id)\n img_sources = [img.find(\"src\").text for\n img in item.find(conf.IMGS_KEY).findall(\"value\")]\n if img_sources and not os.path.exists(output_dir):\n os.makedirs(output_dir, 0755)\n for src in img_sources:\n filename = ntpath.basename(src)\n outpath = \"%s/%s\" % (output_dir, filename)\n if not os.path.exists(outpath):\n downloader.download_img(src, outpath)\n else:\n print(\"Img file already exists: %s (not overwriting)\" % outpath)", "def extract_images_url(url, source):\n if source == \"mangaseeonline\":\n r = s.post(\n \"http://playwright:5000/scrape\",\n json={\n \"url\": url.replace(\"-page-1\", \"\"), \"wait\": 1}\n )\n tree = html.fromstring(r.text)\n return tree.xpath('//*[@id=\"TopPage\"]/descendant::img/@src')\n if source == \"nettruyen\":\n r = s.get(\n settings.SPLASH_URL, params={\n \"url\": url.replace(\"-page-1\", \"\"), \"wait\": 1}\n )\n tree = html.fromstring(r.text)\n return tree.xpath('//*[@class=\"reading-detail box_doc\"]/div/img/@src')\n if source == \"doctruyen3q\":\n r = s.get(\n settings.SPLASH_URL, params={\"url\": url, \"wait\": 1}\n )\n tree = html.fromstring(r.text)\n return tree.xpath('//*[contains(@id, \"page_\")]/img/@src')\n if source == \"truyenkinhdien\":\n r = s.get(\n settings.SPLASH_URL.replace(\"render.html\", \"execute\"),\n params={\"url\": url, \"lua_source\": lua_script, \"wait\": 1},\n )\n tree = html.fromstring(r.json()[\"html\"])\n return tree.xpath(\n '//*[@class=\"sgdg-gallery\"]/a[not(contains(@style,\"display:none\"))]/img/@src'\n )", "def get_content(url):\n img=requests.get(url).content\n return img", "def download_images(article: Article) -> Article:\n img_tag: Tag\n for img_tag in article.content.find_all('img'):\n url = img_tag.attrs['src']\n filename = os.path.basename(urllib.parse.urlparse(url).path)\n local_path = article.get_image_location(filename)\n try:\n urllib.request.urlretrieve(url, local_path)\n #resize the image to a reasonable size\n resize_image(local_path)\n #InDesign recognizes <link href=\"\"> tags for images\n img_tag.name = 'link'\n img_tag.attrs['href'] = 'file://' + local_path\n except urllib.error.HTTPError as e:\n print(f'Error downloading image {url}. Reason: {e}')\n return article", "def download_image(self, url):\r\n file_path = os.path.join(self.temp_dir, 'image.png')\r\n urlretrieve(url, file_path)\r\n return file_path", "def _download_images(self, url_file, destination_dir, log_file):\n logger = self.setup_log(log_file)\n logger.info(config.LOG_INITIAL_MESSAGE % (url_file, destination_dir))\n\n with open(url_file) as urls:\n for i, l in enumerate(urls):\n pass\n bar = progressbar.ProgressBar(i + 1)\n\n download_count = 0\n\n # opening the url file and reading the urls\n with open(url_file, 'r') as urls:\n for i, url in enumerate(urls):\n bar.set(i)\n\n url = url.strip()\n components = urllib.parse.urlparse(url)\n if not (components.scheme and components.netloc and components.path):\n logger.error('%s: \"%s\"' % (config.LOG_URL_INVALID, self.truncate_middle(url, config.MAX_URL)))\n continue\n \n # check whether the robots.txt allows us to crawl this URL\n try:\n can_fetch = self.download_allowed(url, components.scheme, components.netloc)\n except (AttributeError, urllib.error.URLError, ValueError):\n logger.error('%s: %s' % (config.LOG_ERROR_ROBOTS, self.truncate_middle(url, config.MAX_URL)))\n continue\n\n # log that image download is disallowed\n if not can_fetch:\n logger.error('%s: %s' % (config.LOG_DISALLOWED, self.truncate_middle(url, config.MAX_URL)))\n continue\n \n # open image url\n try:\n url_response = urllib.request.urlopen(url)\n except urllib.error.URLError as error:\n logger.error('%s: %s' % (config.LOG_ERROR_OPENING, self.truncate_middle(url, config.MAX_URL)))\n continue\n\n # check whether the URL content is an image \n if url_response.info().get_content_maintype().lower() != config.IMAGE_MIMETYPE:\n logger.error('%s: %s' % (config.LOG_NOT_AN_IMAGE, self.truncate_middle(url, config.MAX_URL)))\n continue\n \n # retrieve the content and store in the destination directory\n os.makedirs(destination_dir, exist_ok=True) \n image_name = '%s_%s' % (download_count + 1, os.path.basename(url))\n with open(os.path.join(destination_dir, image_name), 'wb') as image_file:\n try:\n image_file.write(url_response.read())\n except urllib.error.URLError as error:\n logger.error('%s: %s' % (config.LOG_ERROR_DOWNLOADING, self.truncate_middle(url, config.MAX_URL)))\n continue\n \n # log download and increment the counter\n logger.info('%s %s, url: %s' % (config.LOG_DOWNLOADED, self.truncate_middle(image_name, config.MAX_FILE_NAME), self.truncate_middle(url, config.MAX_URL)))\n download_count += 1\n\n # set the progress bar to 100 percent and print a comment and new line for the returning prompt\n bar.complete('completed')\n\n # release the logger handles\n self.shutdown_log(logger)", "def image_downloader(url, file_path, file_name):\n response = requests.get(url, stream=True)\n with open(file_path + \"/\" + file_name, 'wb') as out_file:\n shutil.copyfileobj(response.raw, out_file)", "def _download_img_from_url(self, img_url):\r\n response = requests.get(img_url)\r\n img = Image.open(BytesIO(response.content))\r\n print(\"Downloaded image from url\")\r\n return img" ]
[ "0.694661", "0.6541669", "0.63542134", "0.6328814", "0.6266402", "0.62424034", "0.6190662", "0.6182079", "0.6157415", "0.6123659", "0.6086594", "0.60814106", "0.6049556", "0.6047241", "0.6015702", "0.6006452", "0.5921177", "0.5914483", "0.58915526", "0.5884733", "0.5876298", "0.5872016", "0.5868614", "0.5846934", "0.58408797", "0.5817456", "0.5815309", "0.58058184", "0.57901645", "0.5780631" ]
0.6868128
1
Add vectors to table Should be implemented
def add_vectors(self, table_name, records, ids, timeout, **kwargs): _abstract()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def register_vectors(self, vectors):\n\n self.vectors.extend(vectors)", "def put_vector(self, term, vector):\n self.terms.append(term)\n self.vectors.append(vector.vector)\n self.real_vectors.append(vector)\n return self.dict.update({term: vector})", "def add(self, keys: List[Tuple[int, int]], vectors: np.ndarray, weights: List[float], *args, **kwargs):\n pass", "def add_vecs_to_vocab(vocab, vectors):\n length = len(vectors[0][1])\n vocab.reset_vectors(width=length)\n for word, vec in vectors:\n vocab.set_vector(word, vector=vec)\n return vocab", "def AddTable(self, table):\n self.tables.append(table)", "def append_table(self, table):\n if not table:\n return\n\n indexes = []\n for idx in table.index:\n index = self.size + idx\n indexes.append(index)\n\n self.set(indexes=indexes, columns=table.columns, values=table.data)", "def add_edge_vectors(self):\n edge_vectors = np.eye(self.values.shape[1])\n self.values = np.vstack([self.values, edge_vectors])\n self.values_planar = np.vstack([self.values_planar, edge_vectors])\n self.number_of_vectors = self.values.shape[0]\n self.normalize()\n #df = pd.DataFrame(data=self.values)\n #fig = px.scatter_3d(df,x=0, y=1, z=2)\n #plotly.offline.plot(fig,filename=\"reference_vectors_plot.html\")", "def add(table):\n\n list_to_add=ui.get_inputs(list_labels,\"\")\n \n list_to_add.insert(0,common.generate_random(table))\n\n table.append(list_to_add)\n return table", "def add_table(self, table_pose, table_extents):\n while not self.sim.add_box(table_pose, table_extents, check_collision=True):\n table_pose[0,3] += 0.05 # move forward until not in collision", "def with_column(self, label, values):\n \n \n \n # self.column_labels.append(label)\n # for i in range(len(self.rows)):\n # self.rows[i].append(values[i]) \n \n new_label = []\n new_rows = []\n for x in self.column_labels:\n new_label.append(x)\n new_label.append(label)\n \n for i in range(len(self.rows)):\n new_row = []\n new_row += self.rows[i]\n # for i in range(len(b)): \n new_row.append(values[i])\n new_rows.append(new_row)\n \n \n new_Table = T88ble(new_rows, new_label)\n\n return new_Table", "def vectors(self, adr = 0x10000):\n\n\t\tself.__vector(adr - 2, \"RST\")\n\t\tself.__vector(adr - 4, \"NMI\")\n\t\tself.__vector(adr - 6, \"SWI\")\n\t\tself.__vector(adr - 8, \"IRQ\")\n\t\tx = self.p.t.add(adr - 8, adr, \"tbl\")\n\t\tx.blockcmt += \"\\n-\\nMC6800 Vector Table\\n\\n\"", "def __mag_table_append(self, table_new):\n for r in table_new[self.__mag_colnames]:\n self.__mags.add_row(r)\n self.__mags.sort(['ra','dec','MJD'])", "def __limmag_table_append(self, table_new): \n for r in table_new[self.__limmag_colnames]:\n self.__lim_mags.add_row(r)\n self.__lim_mags.sort(['ra','dec','MJD'])", "def add(table):\n\n # your code\n row = []\n row.append(common.generate_random(table))\n\n inputs = ui.get_inputs([\"TITLE: \", \"MANUFACTURER: \", \"PRICE: \", \"STOCK: \"], \"Fill the records below: \")\n for i in inputs:\n row.append(i)\n\n table.append(row)\n\n return table", "def change_tvec(self, tvec: np.array) -> None:\n\n self.tvec = sc.promotetoarray(tvec).copy()\n for td_table in list(self.tdve.values()) + self.transfers + self.interpops:\n td_table.tvec = tvec", "def set_vectors(self, vecs):\n self.vecs = vecs[:]", "def initializeVectorField(self, ctrs, vecs, label):\n self.ctrs = ctrs\n self.vecs = vecs\n self.Npts = len(vecs)\n self.label = label\n return", "def AddTable(self,InsertionPoint,NumRows,NumColumns,RowHeight,ColWidth):\n\t\treturn self.Space.AddTable(InsertionPoint,NumRows,NumColumns,RowHeight,ColWidth)", "def add_to_vec(self, nt, positions, counts):\n\t\tself.__seqvector.add_to_vec(nt, positions, counts)", "def test_append(self):\n self.table.append(['Tom', 26])", "def create_vectors(self):\n self.localStatistics = []\n self.lastStatistics = []\n self.globalV = []\n self.estimate = []\n self.delta = []\n self.drift = []\n self.slack = [] # only for coordBased model", "def add_table(self, tab, name, columns=None, select_in=None, margin=0,\n ra=None, dec=None, col_dist=None, col_edgedist=None,\n digit=3):\n if columns:\n tab = tab[columns]\n\n if select_in:\n wcs = self.images[select_in].wcs\n tab = tab.select(wcs, ra=ra, dec=dec, margin=margin)\n if len(tab) == 0:\n return\n\n if col_dist is not None:\n from astropy.coordinates import SkyCoord\n\n scat_coords = tab.to_skycoord(ra=ra, dec=dec)\n src_coord = SkyCoord(ra=self.RA, dec=self.DEC,\n unit=('deg', 'deg'), frame='fk5')\n tab[col_dist] = src_coord.separation(scat_coords).arcsec\n if digit is not None:\n tab[col_dist] = np.round(tab[col_dist], digit)\n tab.sort(col_dist)\n\n if col_edgedist is not None:\n tab[col_edgedist] = tab.edgedist(wcs, ra=ra, dec=dec)\n if digit is not None:\n tab[col_edgedist] = np.round(tab[col_edgedist], digit)\n\n self.tables[name] = tab", "def add_table_field(m, poly_mod):\n # Find all elements of a field\n x = find_all_elements_field(m, poly_mod)\n\n # Initialize 3D list\n f = [[[] for a in range(len(x))] for b in range(len(x))]\n\n # Use add_field to calculate the additions\n for i in range(len(x)):\n for j in range(len(x)):\n f[i][j] = add_field(poly_mod, m, x[i], x[j])\n return f", "def generate_table(self, rows):\n ...", "def _add_scanvec(self, mutinfo):\n vb, uuid, seq, bktname = mutinfo\n self._sv.setdefault(bktname, {})[vb] = (seq, str(uuid))", "def create_vector(self):\n\n series = {'PLANNEDTIME_ARR': [int(self.planned_arrival)],\n 'rain': [float(self.rain)],\n 'temp': [float(self.temp)],\n 'distance_centre': [float(self.distance_centre)],\n 'day_into_year': [int(self.t_minus_1118)],\n 'STOPPOINTID_label': [int(self.stop_id)],\n 'DAYOFWEEK_label': [int(self.day_of_week)],\n 'MONTH_label': [int(self.month)],\n 'holiday_label': [int(self.holiday)],\n 'cluster_label': [int(self.cluster)]\n }\n\n self.vector = pd.DataFrame(series)", "def add(table):\n\n # your code\n inventory_data = [\"Product: \", \"Manufacturer: \", \"Purchase Year: \", \"Durability: \"]\n inputs = ui.get_inputs(inventory_data, \"Add item\")\n ID = common.generate_random(table)\n table.append([ID, *inputs])\n return table", "def add_vectors(u, v): #11.22.5\r\n new_vector = []\r\n \"\"\"Because they have same length so we\r\n should take advantage from this one\"\"\"\r\n for i in range(len(u)):\r\n m = u[i] + v[i] # Get their value of i index at the same time!\r\n new_vector.append(m)\r\n return new_vector", "def add(table):\n id_storage = common.get_values_from_column(table, 0)\n id_ = common.generate_random(table)\n table = manage_data_from_user(table, id_storage, id_, False)\n\n return table", "def add(table):\n\n generated = common.generate_random(table)\n\n list_labels = ['Title: ', 'Manufacturer: ', 'Price: ', 'Number in stock: ']\n\n inputs = list_labels[:]\n\n while not inputs[2].isdigit() or not inputs[3].isdigit():\n inputs = ui.get_inputs(list_labels, 'Provide data: ')\n\n inputs.insert(0, generated)\n table.append(inputs)\n\n return table" ]
[ "0.675615", "0.634529", "0.61804163", "0.6127101", "0.61153644", "0.60995406", "0.6077289", "0.60696733", "0.6050394", "0.60191166", "0.5878969", "0.587209", "0.5812719", "0.5787769", "0.57723516", "0.576734", "0.5764154", "0.57554567", "0.5741254", "0.57369924", "0.5715069", "0.5712914", "0.57001656", "0.56883985", "0.56864387", "0.5679447", "0.5659726", "0.5658238", "0.56278974", "0.5617621" ]
0.6852637
1
Query vectors in a table Should be implemented
def search_vectors(self, table_name, top_k, nprobe, query_records, query_ranges, **kwargs): _abstract()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def query(_from, _select, _geomselect=None, _where=None, _groupby=None, _limit=None):\n # INSTEAD MAKE INTO CLASS\n # WITH .fields attr\n # AND .__iter__()\n # AND .get_vectordata()\n # AND MAKE EACH YIELDED ROW A VECTOR FEATURE CLASS\n # THIS WAY ALLOWING CHAINED QUERIES\n\n # parse args\n iterables = _from\n columnfuncs = _select\n geomfunc = _geomselect\n condition = _where\n key = _groupby\n n = _limit\n \n # first yield header as list of column names\n colnames = [each[0] for each in columnfuncs]\n yield colnames\n\n # make an iterable that yields every combinaion of all input iterables' items\n if len(iterables) == 1:\n iterable = iterables[0]\n else:\n iterable = itertools.product(*iterables)\n\n # iterate and add\n if key:\n groups = groupby(iterable, key)\n\n # limit\n if n:\n groups = limit(groups, n)\n \n for items in groups:\n # filter\n if condition:\n items = where(items, condition)\n \n # aggregate\n # NOTE: columnfuncs and geomfunc must expect an iterable as input and return a single row,geom pair\n item = aggreg(items, columnfuncs, geomfunc)\n yield item\n \n else:\n # filter\n if condition:\n iterable = where(iterable, condition)\n\n # limit\n if n:\n iterable = limit(iterable, n)\n\n # select\n for item in select(iterable, columnfuncs, geomfunc):\n yield item", "def vectors_from_dataframe(*columns):\n return lambda df: [np.array(v) for v in zip(*[list(df[x].values) for x in columns])]", "def get_vector(self, query: list):\n if len(query) == 0:\n raise BadQueryParameter(\"Query (list) can not be empty.\")\n\n return self.vectorizer.transform(query)", "def query_and_bundle(session, fields, offset, limit, filter_):\n q = session.query(*fields) # raw query\n q = q.offset(offset) if filter_ is None else q.filter(filter_) # filter / offset\n ids, vectors = zip(*q.limit(limit)) # unravel results\n # bundle into arrays\n _ids = np.array(ids, dtype=STR_TYPE)\n _str_vectors = [json.loads(vector) for vector in vectors]\n _vectors = np.array(_str_vectors, dtype=FLOAT_TYPE)\n return _ids, _vectors", "def calcQueryVector(self):\n query = input(\"Query: \");\n ana = StemmingAnalyzer() ### lowercases, stems, ignores stopwords\n tokens = [token.text for token in ana(query)]\n\n queryVector = {}\n for token in tokens:\n if token in self.invertedIndex.keys():\n if token in queryVector.keys():\n queryVector[token]+=1;\n else:\n queryVector[token] = 1;\n\n return self.normalizeQueryVector(queryVector);", "def get_features(words, vectors):\n result = [vectors.loc[word].values for word in words if word in df_keys.values.reshape(-1)]\n if result:\n return np.stack(result)\n return None", "def lookup_rows(self, key, values, fields=None):\n\n s3db = current.s3db\n\n table = self.table\n ftable = s3db.cr_shelter_flag\n itable = s3db.cr_shelter_inspection\n utable = s3db.cr_shelter_unit\n\n left = (ftable.on(ftable.id == table.flag_id),\n itable.on(itable.id == table.inspection_id),\n utable.on(utable.id == itable.shelter_unit_id),\n )\n count = len(values)\n if count == 1:\n query = (table.id == values[0])\n else:\n query = (table.id.belongs(values))\n limitby = (0, count)\n\n rows = current.db(query).select(table.id,\n utable.name,\n itable.id,\n itable.date,\n ftable.name,\n left = left,\n limitby = limitby,\n )\n return rows", "def _extract_subtable_by_attribute_values(self, values, \n mode=\"and\"):\n self._check_attribute_names(list(values.keys()))\n if mode == \"and\":\n indices = [i for i in range(len(self)) if self._has_values(i, values)]\n elif mode == \"or\":\n indices = [i for i in range(len(self)) if self._has_at_least_one_value(i, values)]\n return ([self.objects[i] for i in indices],\n [self.table[i] for i in indices])", "def lookup_rows(self, key, values, fields=None):\n\n s3db = current.s3db\n\n table = self.table\n\n utable = s3db.cr_shelter_unit\n left = utable.on(utable.id == table.shelter_unit_id)\n\n count = len(values)\n if count == 1:\n query = (table.id == values[0])\n else:\n query = (table.id.belongs(values))\n limitby = (0, count)\n\n rows = current.db(query).select(table.id,\n table.date,\n utable.name,\n left = left,\n limitby = limitby,\n )\n return rows", "def unit_vector(self,vector):\n\t\tunit_vector_query=0;\n\t\tfor word in vector:\n\t\t\tunit_vector_query += vector[word]*vector[word];\n\t\tunit_vector_query = math.sqrt(unit_vector_query);\n\t\treturn unit_vector_query", "def call_single_vec(self, input_value):\n _, eigVectors = self.getEigen(input_value)\n return eigVectors[:,:,-1]", "def add_vectors(self, table_name, records, ids, timeout, **kwargs):\n _abstract()", "def add_vectors(self, table_name, records, ids, timeout, **kwargs):\n _abstract()", "def _to_full_vector(self, query_vector: List[Tuple[str, float]]) -> np.array:\n terms = list(self.index.get_terms())\n terms.sort()\n vector = np.zeros(len(terms))\n\n for (term, weight) in query_vector:\n index = terms.index(term)\n vector[index] = weight\n\n return vector", "def query3() :", "def evaluate_as_vector(self, chain_state): \n def vector_representation(n, ordering, it):\n return self.mapping.subspace(zip(ordering,it))\n return self._evaluate(vector_representation, chain_state)", "def load_intron_vector(table, session):\n assert any(table == cls for cls in (TmIntronSupport, AugCgpIntronSupport, AugTmIntronSupport, AugPbIntronSupport,\n AugTmrIntronSupport, ExRefIntronSupport))\n query = session.query(table)\n return pd.read_sql(query.statement, session.bind)", "def vec(self):\n return np.matrix(self.val.ravel()).transpose()", "def get_vector(self, word):\n string = \"SELECT * FROM Vectors WHERE name=?\"\n params = (word,)\n self.cur.execute(string, params)\n raw_vector = self.cur.fetchone()\n if raw_vector is None:\n raise KeyError(\"Vector not found\")\n else:\n vector = pickle.loads(raw_vector[1])\n return vector", "def search_vectors_in_files(self, table_name, file_ids, query_records,\n top_k, nprobe, query_ranges, **kwargs):\n _abstract()", "def search_vectors_in_files(self, table_name, file_ids, query_records,\n top_k, nprobe, query_ranges, **kwargs):\n _abstract()", "def vectored_fields(self):\r\n return [i for i, ftype in enumerate(self._by_number) if ftype.vector]", "def getVectors(self):\n vectors = dict()\n i = 0\n N = len(self.db.invertedIndex)\n for w, (idf, docs) in self.db.invertedIndex.items():\n for doc, tf in docs.items():\n try:\n vectors[doc][i] = tf * idf\n except KeyError as k:\n vectors[doc] = {i: tf * idf}\n i += 1\n i = 0;\n return vectors", "def wrapDBVector(self,vec):\n return vec.todense()", "def getVector(lstOfValues):\n return MatrixExtended([[v] for v in lstOfValues])", "def __select(self, fields, tables, conditions, values, order):\n\n start_table = 0\n end_table = -1\n for i in range(0, len(tables)):\n if tables[i] == \"Varieties\":\n tables.pop(i)\n for j in range(0, len(conditions)):\n if conditions[j].startswith(\"resource_id\"):\n index = self._match_variety_table(values[j])\n if conditions[j].endswith(\" =\"):\n start_table = index\n end_table = index + 1\n elif conditions[j].endswith(\" <\") or conditions[j].endswith(\n \" <=\"\n ):\n end_table = index + 1\n elif conditions[j].endswith(\" >\") or conditions[j].endswith(\n \" >=\"\n ):\n start_table = index\n\n tables.extend(self.variety_tables[start_table:end_table])\n\n request = \"SELECT {fields} FROM {tables}{conditions}\"\n\n if conditions:\n cond_list = \" WHERE \"\n for index, cond in enumerate(conditions):\n cond_list += \"(\" + cond\n if values[index] == \"NULL\":\n cond_list += \" IS %s)\"\n values[index] = None\n elif values[index] == \"NOT NULL\":\n cond_list += \" IS NOT %s)\"\n values[index] = None\n else:\n cond_list += \" %s)\"\n if index < len(conditions) - 1:\n cond_list += \" AND \"\n else:\n cond_list = \"\"\n\n for table in tables:\n end_request = \" UNION \".join(\n [\n request.format(\n fields=\", \".join(fields),\n tables=table,\n conditions=cond_list,\n )\n for table in tables\n ]\n )\n if order:\n ord_list = \" ORDER BY {0}\".format(\", \".join(order))\n\n end_request = end_request + ord_list\n\n cursor = self.conn.cursor(dictionary=True)\n results = []\n _logger.debug(\"%r, %r\" % (end_request, values * len(tables)))\n try:\n if values:\n cursor.execute(end_request, tuple(values * len(tables)))\n else:\n cursor.execute(end_request)\n except Exception as error:\n _logger.exception(str(error))\n else:\n for row in cursor.fetchall():\n result = {}\n for key in row.keys():\n result[key] = row[key]\n if row[key] == \"NULL\":\n result[key] = None\n results.append(result)\n\n aux_results = copy(results)\n for i in range(0, len(aux_results)):\n aux_results[i].pop(\"hash\", None)\n aux_results[i].pop(\"content\", None)\n\n _logger.debug(\n \"SELECT REQUEST ON {0} OK. Results: {1}\".format(\n \", \".join(tables), aux_results\n )\n )\n cursor.close()\n return results", "def call_table(conn, table):\r\n cursor = conn.cursor()\r\n values_list = []\r\n header_list = get_header(conn, table) # list with table header values\r\n sql = f\"SELECT * FROM {table}\"\r\n cursor.execute(sql)\r\n for value in cursor.fetchall(): # iterates over list of tuples\r\n value_dict = dict() # dictionary to store each row values. keys = column headers, value = respective row value\r\n for index, c_header in enumerate(header_list):\r\n value_dict[f\"{c_header}\"] = value[index]\r\n values_list.append(value_dict)\r\n return values_list", "def find(self, *args):\n return _ida_hexrays.qvector_lvar_t_find(self, *args)", "def create_query_vector(ix, term_dict, bow):\n\n\tqfv = list()\n\tfor idx, tf in bow:\n\t\t# get term from dict index\n\t\tterm = ix[idx]\n\t\t# filter out terms not contained in self.term_dict\n\t\tif term not in term_dict:\n\t\t\tcontinue\n\t\t# append term w/ tf to tfv\n\t\tqfv.append((term, tf))\n\treturn scale_to_L1_norm(defaultdict(float, qfv))", "def result_to_vector(results):\n return [vectorized_result(x) for x in results]" ]
[ "0.6033002", "0.5755397", "0.57263374", "0.5559812", "0.5555392", "0.55485225", "0.5512064", "0.54893804", "0.5469456", "0.5459995", "0.5443791", "0.54426426", "0.54426426", "0.53633827", "0.5357214", "0.53425556", "0.5329374", "0.5324857", "0.53152955", "0.5296956", "0.5296956", "0.5293589", "0.52636755", "0.52545834", "0.5249977", "0.5226812", "0.5221129", "0.5214204", "0.520404", "0.5181834" ]
0.6481918
1
Query vectors in a table, query vector in specified files Should be implemented
def search_vectors_in_files(self, table_name, file_ids, query_records, top_k, nprobe, query_ranges, **kwargs): _abstract()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def search_vectors(self, table_name, top_k, nprobe, query_records, query_ranges, **kwargs):\n _abstract()", "def search_vectors(self, table_name, top_k, nprobe, query_records, query_ranges, **kwargs):\n _abstract()", "def search_from_sqlite():\n key = request.args.get('key')\n graph = FileStructureProcessor(\"sqlite\")\n return graph.search_from_sqlite(key)", "def get_source_vectors(testsmells):\n\n for testsmell in testsmells:\n df = pd.read_csv('data/' + testsmell + '_data.csv')\n df['Vector'] = ''\n\n repnames = df['App'].unique().tolist()\n for repname in repnames:\n print('Processing project \\'' + repname + '\\' for ' + testsmell + '...')\n currdf = df[df['App'] == repname]\n repo = Repo('repositories/' + repname)\n vectors = []\n \n # Get the vectors for each Java file in the dataframe\n for _, row in tqdm(list(currdf.iterrows())): \n try:\n repo.git.checkout(row['CommitSHA'], force=True)\n file_path = 'repositories/' + repname + '/' + row['RelativeTestFilePath']\n vectors.append(get_vector(file_path))\n except GitCommandError as err:\n print('Failed for ' + row['App'] + ':' + row['CommitSHA'])\n print(err)\n vectors.append('')\n \n df.loc[df['App'] == repname, 'Vector'] = vectors # Set the vectors on the dataframe\n \n filename = 'data/' + testsmell + '_vectors.csv'\n df.to_csv(filename, index=False)", "def myhtable_index_search(files, index, terms):\n res_file = []\n count = 0\n if len(terms) == 0:\n print('empty terms')\n return\n for term in terms:\n term = term.lower()\n count += 1\n if count == 1:\n s = htable_get(index, term)\n if s == None:\n s = {-1}\n else:\n s = s.intersection(htable_get(index, term))\n for id in s:\n if id != -1:\n res_file.append(files[id])\n return res_file", "def lookup(collated_file,query_file):\r\n x=open(query_file,\"r\")\r\n query=[]\r\n for i in x:\r\n i=i.replace(\"\\n\",\"\")\r\n query.append(i)\r\n y=open(collated_file,\"r\")\r\n collection=[]\r\n for i in y :\r\n i=i.replace(\"\\n\",\"\")\r\n i=i.split(\":\")\r\n collection.append(i)\r\n answer=[]\r\n for i in range(len(query)):\r\n answer.append(BinarySearch(collection,0,len(collection)-1,query[i]))\r\n y = open(\"song_ids.txt\", \"w\")\r\n for i in range(len(answer)):\r\n y.write(str(answer[i]) + \"\\n\")", "def finder(files, queries):\n # Create a list of file names mapped to all paths that lead to them\n paths = {}\n for file in files:\n fname = file.split('/')[-1]\n paths[fname] = paths.get(fname, [])\n paths[fname].append(file)\n\n # Iterate over all queries, and add the file paths pointing to each query\n # to result and return it\n result = []\n for query in queries:\n result += paths.get(query, [])\n return result", "def test_get_parsed_files(self):\n files = Historical_ROAs_Parser()._get_parsed_files()\n with Historical_ROAs_Parsed_Table() as t:\n for f in files:\n sql = f\"SELECT * FROM {t.name} WHERE file = '{f}'\"\n assert len(t.execute(sql)) == 1", "def load_vecs(fin):\n h5f = tables.open_file(fin)\n h5vecs= h5f.root.vecs\n\n vecs=np.zeros(shape=h5vecs.shape,dtype=h5vecs.dtype)\n vecs[:]=h5vecs[:]\n h5f.close()\n return vecs", "def vcf_query(self, **kwargs):\n try:\n import pysam\n except ImportError:\n print(\"Can't find pysam\")\n raise ImportError('Handling of bam files requires pysam')\n\n try:\n file_handle = pysam.Tabix(self._meta.filename, 'rb')\n except IOError:\n raise IOError('Could not find bam file')\n\n reads = file_handle.fetch(\n kwargs['id'],\n kwargs['start'],\n kwargs['stop'])\n\n hits = dict(**reads)\n print(\"hits\")\n\n raise NotImplementedError()", "def jointure_table(path, table1, table2, field1, field2, search1, search2):\n conn = sqlite3.connect(path)\n c = conn.cursor()\n rows = c.execute('SELECT * FROM '+table1 +' as t1 JOIN '+table2+\" as t2 where t1.\"+field1+\" like '%\"+search1+\"%' and t2.\"+field2+\" like '%\"+search2+\"%'\").fetchall()\n conn.close()\n return rows", "def find_files(self, where_clause, keywords=[]):\n files_by_hash = {}\n old_factory = self.connection.row_factory\n self.connection.row_factory = sqlite3.Row\n if keywords:\n kw_clause = 'keywords._keyword in (%s) and ' % ','.join(\n ['\"%s\"'%kw for kw in keywords])\n else:\n kw_clause = ''\n query = \"\"\"select distinct files.* from files left join\n (\n keyword_x_file inner join keywords\n on keyword_x_file._keyword_id=keywords._keyword_id\n )\n on files._file_id=keyword_x_file._file_id\n where \"\"\" + kw_clause + where_clause\n rows = self.connection.execute(query).fetchall()\n self.connection.row_factory = old_factory\n return rows", "def read_votable_catalog(\n self,\n votable_file,\n table_name,\n id_column,\n lon_column,\n lat_column,\n flux_columns,\n frame,\n reference_frequency=None,\n freq_array=None,\n freq_edge_array=None,\n spectral_index_column=None,\n flux_error_columns=None,\n history=\"\",\n run_check=True,\n check_extra=True,\n run_check_acceptability=True,\n ):\n parsed_vo = votable.parse(votable_file)\n\n tables = list(parsed_vo.iter_tables())\n table_ids = [table._ID for table in tables]\n table_names = [table.name for table in tables]\n\n if None in table_ids:\n raise ValueError(f\"File {votable_file} contains tables with no name or ID.\")\n\n try:\n table_name_use = _get_matching_fields(table_name, table_ids)\n table_match = [table for table in tables if table._ID == table_name_use][0]\n except ValueError:\n table_name_use = _get_matching_fields(table_name, table_names)\n table_match = [table for table in tables if table.name == table_name_use][0]\n\n # Convert to astropy Table\n astropy_table = table_match.to_table()\n\n # get ID column\n id_col_use = _get_matching_fields(id_column, astropy_table.colnames)\n\n # get lon & lat columns, if multiple matches, exclude VizieR calculated columns\n # which start with an underscore\n lon_col_use = _get_matching_fields(\n lon_column, astropy_table.colnames, exclude_start_pattern=\"_\"\n )\n lat_col_use = _get_matching_fields(\n lat_column, astropy_table.colnames, exclude_start_pattern=\"_\"\n )\n\n if isinstance(flux_columns, (str)):\n flux_columns = [flux_columns]\n flux_cols_use = []\n for col in flux_columns:\n flux_cols_use.append(_get_matching_fields(col, astropy_table.colnames))\n\n if len(flux_columns) > 1 and (freq_array is None and freq_edge_array is None):\n raise ValueError(\n \"Frequency information must be provided with multiple flux columns. \"\n \"Must provide either freq_edge_array or freq_array (if the \"\n \"frequencies are evenly spaced), both can be provided.\"\n )\n\n if len(flux_columns) > 1:\n if freq_edge_array is None:\n # if get here, freq_array exists\n try:\n freq_edge_array = _get_freq_edges_from_centers(\n freq_array=freq_array, tols=self._freq_array.tols\n )\n warnings.warn(\n \"freq_edge_array not set, calculating it from the freq_array.\"\n )\n except ValueError as ve:\n raise ValueError(\n \"freq_edge_array must be provided for multiple flux columns if \"\n \"freq_array is not regularly spaced.\"\n ) from ve\n elif freq_array is None:\n warnings.warn(\n \"freq_array not set, calculating it from the freq_edge_array.\"\n )\n freq_array = _get_freq_centers_from_edges(\n freq_edge_array=freq_edge_array\n )\n\n if reference_frequency is not None or len(flux_cols_use) == 1:\n if reference_frequency is not None:\n reference_frequency = (\n np.array([reference_frequency.value] * len(astropy_table))\n * reference_frequency.unit\n )\n if spectral_index_column is not None:\n spectral_type = \"spectral_index\"\n spec_index_col_use = _get_matching_fields(\n spectral_index_column, astropy_table.colnames\n )\n spectral_index = astropy_table[spec_index_col_use].data.data\n else:\n spectral_type = \"flat\"\n spectral_index = None\n else:\n spectral_type = \"subband\"\n spectral_index = None\n\n col_units = []\n for col in flux_cols_use:\n col_units.append(astropy_table[col].unit)\n\n allowed_units = [\"Jy\", \"Jy/sr\", \"K\", \"K sr\"]\n unit_use = None\n for unit_option in allowed_units:\n if np.all(\n np.array(\n [this_unit.is_equivalent(unit_option) for this_unit in col_units]\n )\n ):\n unit_use = unit_option\n break\n if unit_use is None:\n raise ValueError(\n \"All flux columns must have compatible units and must be compatible \"\n f\"with one of {allowed_units}.\"\n )\n\n stokes = Quantity(\n np.zeros((4, len(flux_cols_use), len(astropy_table))), unit_use\n )\n for index, col in enumerate(flux_cols_use):\n stokes[0, index, :] = astropy_table[col].quantity.to(unit_use)\n\n if flux_error_columns is not None:\n if isinstance(flux_error_columns, (str)):\n flux_error_columns = [flux_error_columns]\n flux_err_cols_use = []\n for col in flux_error_columns:\n flux_err_cols_use.append(\n _get_matching_fields(col, astropy_table.colnames)\n )\n\n err_col_units = []\n for col in flux_err_cols_use:\n err_col_units.append(astropy_table[col].unit)\n\n if not np.all(\n np.array(\n [this_unit.is_equivalent(unit_use) for this_unit in err_col_units]\n )\n ):\n raise ValueError(\n \"All flux error columns must have units compatible with the units \"\n \"of the flux columns.\"\n )\n\n stokes_error = Quantity(\n np.zeros((4, len(flux_err_cols_use), len(astropy_table))), unit_use\n )\n for index, col in enumerate(flux_err_cols_use):\n stokes_error[0, index, :] = astropy_table[col].quantity.to(unit_use)\n else:\n stokes_error = None\n\n self.__init__(\n name=astropy_table[id_col_use].data.data.astype(\"str\"),\n lon=Longitude(astropy_table[lon_col_use].quantity),\n lat=Latitude(astropy_table[lat_col_use].quantity),\n frame=frame,\n stokes=stokes,\n spectral_type=spectral_type,\n freq_array=freq_array,\n freq_edge_array=freq_edge_array,\n reference_frequency=reference_frequency,\n spectral_index=spectral_index,\n stokes_error=stokes_error,\n history=history,\n filename=os.path.basename(votable_file),\n )\n\n if run_check:\n self.check(\n check_extra=check_extra, run_check_acceptability=run_check_acceptability\n )\n\n return", "def load_vectors(vec_file, info_file, buckets):\n vectors = np.load(vec_file)\n with open(info_file, 'r') as f:\n info = json.load(f)\n\n if len(info['sequence_id_list']) != len(info['instance_id_list']):\n raise ValueError('Invalid info file: len(info[''sequence_id_list'']) should be equal to len(info[''instance_id_list'']) but got %d != %d' % (len(info['sequence_id_list']), len(info['instance_id_list'])))\n if vectors.shape[0] != len(info['instance_id_list']):\n raise ValueError('Number of vectors and length of info list are not identical. %d != %d' % (len(info['sequence_id_list']), len(info['instance_id_list'])))\n\n num_vec, vec_dim = vectors.shape\n prev_seq_id, prev_ins_id = None, None\n seq_ids = {i: [] for i in xrange(len(buckets))}\n ins_ids = {i: [] for i in xrange(len(buckets))}\n data = {i: [] for i in xrange(len(buckets))}\n\n for vec_idx in tqdm(xrange(num_vec)):\n curr_seq_id, curr_ins_id = info['sequence_id_list'][vec_idx], info['instance_id_list'][vec_idx]\n if curr_seq_id != prev_seq_id or curr_ins_id != prev_ins_id:\n if prev_seq_id is not None:\n bucket_id, curr_sample = bucketing(vec_count, buckets, vec_dim, curr_sample)\n seq_ids[bucket_id].append(prev_seq_id)\n ins_ids[bucket_id].append(prev_ins_id)\n data[bucket_id].append(curr_sample)\n vec_count = 1\n curr_sample = vectors[vec_idx]\n else:\n vec_count += 1\n curr_sample = np.vstack((curr_sample, vectors[vec_idx]))\n prev_seq_id = curr_seq_id\n prev_ins_id = curr_ins_id\n\n # throw the last sample into bucket\n bucket_id, curr_sample = bucketing(vec_count, buckets, vec_dim, curr_sample)\n seq_ids[bucket_id].append(prev_seq_id)\n ins_ids[bucket_id].append(prev_ins_id)\n data[bucket_id].append(curr_sample)\n\n return data, seq_ids, ins_ids", "def load_vectors(args):\n dict_fold = 'train' # which fold of the data will be used to produce results\n if args.task == 'conneau' or 'xling':\n data_dir = os.path.join(args.data_dir, 'MUSE')\n dict_dir = os.path.join(data_dir, 'crosslingual/')\n if args.task == 'xling':\n dict_dir = os.path.join(dict_dir, 'xling-dictionaries/bli_datasets/')\n else:\n dict_dir = os.path.join(dict_dir, 'dictionaries/')\n\n src_path = os.path.join(data_dir, 'wiki.' + args.src_lang + '.vec')\n trg_path = os.path.join(data_dir, 'wiki.' + args.trg_lang + '.vec')\n src_freq_path = None\n trg_freq_path = None\n if dict_fold == 'test':\n postfix = '.5000-6500.txt'\n elif dict_fold == 'train':\n postfix = '.0-5000.txt'\n else:\n raise ValueError('Unrecognized dictionary fold for evaluation')\n elif args.task == 'dinu':\n data_dir = os.path.join(args.data_dir,'dinu')\n dict_dir = os.path.join(data_dir, 'dictionaries/')\n src_path = os.path.join(data_dir, 'embeddings', args.src_lang + '.emb.txt')\n trg_path = os.path.join(data_dir, 'embeddings', args.trg_lang + '.emb.txt')\n src_freq_path = None\n trg_freq_path = None\n postfix = '.{}.txt'.format(dict_fold)\n elif args.task == 'zhang':\n order = [args.src_lang,args.trg_lang]\n if args.src_lang == 'en':\n order = order[::-1]\n data_dir = os.path.join(args.home_dir,'pkg/UBiLexAT/data/','-'.join(order))\n dict_dir = data_dir\n src_path = os.path.join(data_dir, 'word2vec.' + args.src_lang)\n trg_path = os.path.join(data_dir, 'word2vec.' + args.trg_lang)\n src_freq_path = os.path.join(data_dir, 'vocab-freq.' + args.src_lang)\n trg_freq_path = os.path.join(data_dir, 'vocab-freq.' + args.trg_lang)\n postfix = '.train.txt'\n\n srcfile = open(src_path, encoding=args.encoding, errors='surrogateescape')\n trgfile = open(trg_path, encoding=args.encoding, errors='surrogateescape')\n src_words, xs = embeddings.read(srcfile, args.maxs)\n trg_words, xt = embeddings.read(trgfile, args.maxt)\n srcfile.close()\n trgfile.close()\n \n if src_freq_path:\n with open(src_freq_path, encoding=args.encoding, errors='surrogateescape') as f:\n lines = [a.split(' ') for a in f.read().strip().split('\\n')]\n freq_src = {k: int(v) for (k,v) in lines}\n\n with open(trg_freq_path, encoding=args.encoding, errors='surrogateescape') as f:\n lines = [a.split(' ') for a in f.read().strip().split('\\n')]\n freq_trg = {k: int(v) for (k,v) in lines}\n\n # Build word to index map\n src_word2ind = {word: i for i, word in enumerate(src_words)}\n trg_word2ind = {word: i for i, word in enumerate(trg_words)}\n\n if args.task == 'zhang':\n dict_path = os.path.join(dict_dir, 'all.' + '-'.join(order) + '.lex')\n flip = False\n elif args.task == 'dinu' and args.src_lang != 'en':\n # Only has dicts in one direction, flip\n dict_path = os.path.join(dict_dir, args.trg_lang + '-' + args.src_lang + postfix)\n src_to_en = os.path.join(dict_dir, 'en' + '-' + args.src_lang + postfix)\n en_to_trg = os.path.join(dict_dir, args.trg_lang + '-' + 'en' + postfix)\n flip = True\n elif args.task == 'xling':\n dict_path = os.path.join(dict_dir, args.src_lang+'-'+args.trg_lang+'/yacle.test.freq.2k.'+args.src_lang+'-' + args.trg_lang + '.tsv')\n src_to_en = os.path.join(dict_dir, args.src_lang+'-'+'en'+'/yacle.test.freq.2k.'+args.src_lang+'-' + 'en' + '.tsv')\n en_to_trg = os.path.join(dict_dir, 'en'+'-'+args.trg_lang+'/yacle.test.freq.2k.'+'en'+'-' + args.trg_lang + '.tsv')\n\n flip = False\n if not os.path.exists(dict_path):\n dict_path = os.path.join(dict_dir, args.trg_lang+'-'+args.src_lang+'/yacle.test.freq.2k.'+args.src_lang+'-' + args.trg_lang + '.tsv')\n flip = True\n\n else:\n src_to_en = os.path.join(dict_dir, args.src_lang + '-' + 'en' + postfix)\n en_to_trg = os.path.join(dict_dir, 'en' + '-' + args.trg_lang + postfix)\n dict_path = os.path.join(dict_dir, args.src_lang + '-' + args.trg_lang + postfix)\n flip = False\n\n\n if not os.path.exists(dict_path):\n # create new dict\n print('Warning: no dict found, creating dictionary')\n create_dict_for(src_to_en, en_to_trg, dict_path, args)\n\n dictf = open(dict_path, encoding=args.encoding, errors='surrogateescape')\n src2trg = collections.defaultdict(set)\n oov = set()\n vocab = set()\n max_srcind = 0 # These are mostly for debug\n max_trgind = 0\n for line in dictf:\n splitted = line.split()\n if len(splitted) > 2:\n # Only using first translation if many are provided\n src, trg = splitted[:2]\n elif len(splitted) == 2:\n src, trg = splitted\n else:\n # No translation? Only happens for Zhang data so far\n continue\n if flip: src, trg = trg, src\n try:\n src_ind = src_word2ind[src]\n trg_ind = trg_word2ind[trg]\n src2trg[src_ind].add(trg_ind)\n vocab.add(src)\n max_srcind = max(max_srcind, src_ind)\n max_trgind = max(max_trgind, trg_ind)\n except KeyError:\n oov.add(src)\n\n return xs, xt, src_words, trg_words, src_word2ind, trg_word2ind, src2trg", "def linking_qry_sec(self, file_path, collid):\n\n FACTOR_T1 = ''''T1' FACTOR'''\n FACTOR_T2 = ''''T2' FACTOR'''\n cond = None\n sql_param = {}\n cond_fpath = None\n if file_path is not None:\n file_path = file_path.lower()\n file_name = path.basename(file_path)\n file_path, pid = self._convert_filepath(file_path)\n # print \"file_path, pid > \", file_path, pid\n if pid is not None:\n sql_param[str(len(sql_param) + 1)] = file_path\n # make exact filepath lower case SQL condition\n cond_fpath = SQLBinaryExpr(SQLFuncExpr(self.db_func_map[DB_FUNC_NAME_LOWER],\n COL_NAME_FILES_FILEPATH),\n OP_EQ, \":%d\" % (len(sql_param)))\n sql_param[str(len(sql_param) + 1)] = pid\n cond_fpath = SQLBinaryExpr(cond_fpath, OP_AND, SQLBinaryExpr(COL_NAME_FILES_PID,\n OP_EQ, \":%d\" % (len(sql_param))))\n else:\n if file_path[:2] == r\"\\\\\":\n _, file_path = splitunc(file_path)\n\n else:\n _, file_path = path.splitdrive(file_path)\n\n file_path = \"%%%s\" % file_path\n sql_param[str(len(sql_param) + 1)] = file_path\n cond_fpath = SQLBinaryExpr(SQLFuncExpr(self.db_func_map[DB_FUNC_NAME_LOWER],\n COL_NAME_FILES_FILEPATH),\n OP_LIKE, \":%d\" % (len(sql_param)))\n if cond is None:\n cond = cond_fpath\n else:\n cond = SQLBinaryExpr(cond, OP_AND, cond_fpath)\n if file_name is not None:\n file_name = file_name.lower()\n sql_param[str(len(sql_param) + 1)] = file_name\n if cond is None:\n cond = SQLBinaryExpr(COL_NAME_FILES_RECFILEID, OP_EQ, \":%d\" % (len(sql_param)))\n else:\n cond = SQLBinaryExpr(cond, OP_AND, SQLBinaryExpr(COL_NAME_FILES_RECFILEID,\n OP_EQ, \":%d\" % (len(sql_param))))\n\n sql1_union = GenericSQLSelect([COL_NAME_FILES_MEASID, FACTOR_T1, COL_NAME_FILES_BEGINTIMESTAMP,\n COL_NAME_FILES_ENDTIMESTAMP],\n False, [TABLE_NAME_FILES], cond)\n cmap_tbl_alias = \"cmap\"\n cf_tbl_alias = \"cf\"\n cf_tbl = SQLTableExpr(TABLE_NAME_FILES, cf_tbl_alias)\n cmap_tbl = SQLTableExpr(TABLE_NAME_COLLMAP, cmap_tbl_alias)\n\n join = SQLJoinExpr(cf_tbl, OP_INNER_JOIN, cmap_tbl,\n SQLBinaryExpr(SQLColumnExpr(cmap_tbl_alias, COL_NAME_COLLMAP_MEASID), OP_EQ,\n SQLColumnExpr(cf_tbl_alias, COL_NAME_FILES_MEASID)))\n sql_param[str(len(sql_param) + 1)] = collid\n columns = [SQLColumnExpr(cmap_tbl_alias, COL_NAME_COLLMAP_MEASID)]\n columns.append(FACTOR_T2)\n columns.append(SQLColumnExpr(cmap_tbl_alias, COL_NAME_COLLMAP_BEGINTIMESTAMP))\n columns.append(SQLColumnExpr(cmap_tbl_alias, COL_NAME_COLLMAP_ENDTIMESTAMP))\n two_tbl_cond = SQLBinaryExpr(cond, OP_AND, SQLBinaryExpr(COL_NAME_COLLMAP_COLLID,\n OP_EQ, \":%d\" % (len(sql_param))))\n sql2_union = GenericSQLSelect(columns, False, [join], two_tbl_cond)\n stmt = \"%s UNION %s\" % (sql1_union, sql2_union)\n # print \"stmt >> \", stmt, sql_param\n return self.sql(stmt, sql_param)\n # return self.execute(stmt, sql_param)", "def load_intron_vector(table, session):\n assert any(table == cls for cls in (TmIntronSupport, AugCgpIntronSupport, AugTmIntronSupport, AugPbIntronSupport,\n AugTmrIntronSupport, ExRefIntronSupport))\n query = session.query(table)\n return pd.read_sql(query.statement, session.bind)", "def load_vector_dictionary():\n return read_word2vecs_from_file(VECTOR_FILE)", "def loadSensitivity(tradeTbl, sensiTbl, filepath, vectorField):\n\n df = pd.read_csv(filepath)\n df[\"AsOfDate\"] = pd.to_datetime(df[\"AsOfDate\"]).dt.date\n df[vectorField] = getArrayValue(df[vectorField])\n\n if \"CashflowKey\" not in df.columns:\n df[\"CashflowKey\"] = \"-\"\n\n tradeTbl.load_pandas(df[tradeTbl.columns])\n sensiTbl.load_pandas(df[sensiTbl.columns])", "def get_box_vectors(file):\n box_vectors = [None,None,None]\n with open(file,\"rt\") as fin:\n for line in fin:\n if line[0:6] == \"CRYST1\":\n x_length = float(line[9:14])\n y_length = float(line[18:23])\n z_length = float(line[27:33])\n box_vectors = [x_length,y_length,z_length]\n return(box_vectors)\n return(box_vectors)", "def load_vectors(fname):\r\n # taken from: https://fasttext.cc/docs/en/english-vectors.html\r\n vectors_data = vocab.Vectors(name=fname)\r\n\r\n return vectors_data", "def search(self, top, bottom, left, right):\n params = VectorsSourcesParams(top=top, right=right, bottom=bottom, left=left, query=None)\n self.query_sources(params)", "def findTable(*keys):\r\n\treturn getGameData().findTable(*keys)", "def _get_files_in_db(self):\r\n query = 'SELECT DISTINCT file_name FROM {0};'.format(\r\n self.tables['measurements'])\r\n self.cursor.execute(query)\r\n result = self.cursor.fetchall()\r\n files = [ele[0] for ele in result if ele[0] is not None]\r\n return files", "def searchDatabase(metaDataBaseFile, conditions,var_from_database_to_select):\n # Read meta data and find the indices of interest\n # The indices will be those that are in the dataframe so that the user can index\n # the data frame to extract current_exp_ID in order to load the dataset\n metaData_frame = pd.read_csv(metaDataBaseFile,sep='\\t',header=0)\n positive_indices = {}\n for condition_indentifier, condition in conditions.items():\n if condition.startswith('#'):\n pass\n else:\n currIndices = metaData_frame\\\n [metaData_frame[condition_indentifier]==condition].index.values\n positive_indices[condition_indentifier] = set(currIndices.flatten())\n \n common_indices = list(set.intersection(*positive_indices.values()))\n \n data_to_select = pd.DataFrame()\n for variable in var_from_database_to_select:\n data_to_select[variable] = metaData_frame[variable].iloc[common_indices]\n \n \n \n \n return data_to_select", "def loadVector(vector):\n expVecCmmd = 'v.out.ascii format=standard input=' + vector\n# JL p = Popen(expVecCmmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True)\n p = Popen(expVecCmmd, shell=True, stdin=PIPE, stdout=PIPE,\n stderr=STDOUT, close_fds=False)\n vectorAscii = p.stdout.read().strip('\\n').split('\\n')\n l = 0\n while 'ORGANIZATION' not in vectorAscii[l]:\n l += 1\n while ':' in vectorAscii[l]:\n l += 1\n v = []\n while l < len(vectorAscii):\n line = vectorAscii[l].split()\n if line[0] in ['L', 'B', 'A']:\n skip = len(line) - 2\n vertices = int(line[1])\n l += 1\n v.append([])\n for i in range(vertices):\n v[-1].append(map(float, vectorAscii[l].split()[:2]))\n l += 1\n l += skip\n elif line[0] in ['P', 'C', 'F', 'K']:\n skip = len(line) - 2\n vertices = int(line[1])\n l += 1\n for i in range(vertices):\n l += 1\n l += skip\n else:\n grass.fatal(_(\"Problem with line: <%s>\") % vectorAscii[l])\n if len(v) < 1:\n grass.fatal(_(\"Zero lines found in vector map <%s>\") % vector)\n return v", "def index_search(files, index, terms):\n\n\n termlist = set()\n\n for i in range(len(terms)):\n for j in range(len(terms[i].split(\" \"))):\n\n termlist.add(terms[i].split(\" \")[j])\n\n indexlist = [index[w] for w in termlist]\n\n intersect = list(set.intersection(*indexlist))\n\n return [files[x] for x in intersect]", "def test_vector_packet():\n f = Level3File(get_test_data('nids/KOUN_SDUS64_NHITLX_201305202016'))\n for page in f.graph_pages:\n for item in page:\n if 'vectors' in item:\n x1, x2, y1, y2 = np.array(item['vectors']).T\n assert len(x1)\n assert len(x2)\n assert len(y1)\n assert len(y2)", "def vocab_tables(source_file, tags_file):\n pass", "def getModelsByFiltrationSenSpeDataframe(vec_models_filtered_data, vec_all_models_data):\n index_models = np.array(vec_models_filtered_data['index'].values)\n models_delects_ds = vec_all_models_data[vec_all_models_data.index.isin(index_models)]\n list_models_path = models_delects_ds['file_name'].values.tolist()\n return list_models_path" ]
[ "0.6183722", "0.6183722", "0.58732986", "0.58360183", "0.574578", "0.56998706", "0.5665273", "0.56031334", "0.56000257", "0.54932994", "0.5461923", "0.54583454", "0.5440742", "0.5416554", "0.54097044", "0.5398005", "0.5373472", "0.5369907", "0.5323089", "0.53185755", "0.52975965", "0.52620244", "0.52431005", "0.5215661", "0.5208058", "0.5200964", "0.5194767", "0.51941794", "0.5181084", "0.51755553" ]
0.74628633
1
Provide server version should be implemented
def server_version(self, timeout): _abstract()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_server_version(self):\n return self.__aceQLHttpApi.get_server_version()", "def get_server_version(self):\n return self.client.getServerVersion().decode('utf-8')\n return self.client.getServerVersion().decode('utf-8')", "def server_version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"server_version\")", "def server_version(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"server_version\")", "def version_string(self):\n return self.server_version", "def get_server_info(self):\n raise NotImplementedError('Database.get_version()')", "def _get_version(self):", "def version():\n\n pass", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError", "def version(self):", "def version(self):\r\n raise NotImplementedError()", "def version(self):\n pass", "def version(self):\n pass", "def version(self):\n pass", "def _check_server_version(self, server_version: str) -> None:\n cur_version = parse_version(server_version)\n min_version = parse_version(MIN_SERVER_VERSION)\n if cur_version < min_version:\n raise InvalidServerVersion\n if cur_version != min_version:\n self._logger.warning(\n \"Connected to a Zwave JS Server with an untested version, \\\n you may run into compatibility issues!\"\n )", "def version(cls):\n raise NotImplementedError", "def version(self):\n raise NotImplementedError('version')", "def get_version():\n return 1", "def test_version(server):\n\n assert isinstance(server.version(), six.string_types)", "def get_version(self):\n pass", "def version(self, request, format=None):\n return Response(server_version())", "def get_server_version():\n url_address = 'https://raw.githubusercontent.com/muhammadfredo/FrMaya/master/FrMaya/version.py'\n url_data = urllib2.urlopen(url_address).read(200)\n result = re.search(r'(\\d+), (\\d+), (\\d+)', url_data, re.MULTILINE)\n if result:\n version_list = [int(v) for v in result.groups()]\n return version_list\n else:\n raise ValueError('Cannot get server version!!!')", "def __init__( self, conn, addr, server, version ):", "def Version(self) -> _n_0_t_12:", "def Version(self) -> _n_0_t_12:", "def change_server_ident(name, version=None):\n global server_ident\n \n server_ident[\"server_name\"] = name\n \n if version != None and len(version) > 0:\n server_ident[\"server_version\"] = str(version)\n version_text = \"/%s\" % server_ident[\"server_version\"]\n else:\n version_text = \"\"\n \n server.version = server_ident[\"server_name\"] + version_text", "def get_server():\n pass" ]
[ "0.72736466", "0.72201157", "0.72106344", "0.72106344", "0.7147017", "0.7114719", "0.70159096", "0.6988763", "0.6958514", "0.6958514", "0.6958514", "0.6958514", "0.6935772", "0.6924782", "0.69026643", "0.69026643", "0.69026643", "0.6858275", "0.6853175", "0.6825424", "0.6825143", "0.6788872", "0.67829126", "0.6779645", "0.6653855", "0.66107094", "0.6554808", "0.6554808", "0.6540561", "0.6534443" ]
0.7665297
1
Get latitude and longitude from cities in data.
def get_lat_lon(data): from time import sleep from geopy import geocoders from geopy.exc import GeocoderTimedOut gn = geocoders.GeoNames(username='foobar') cities = get_cities(data).keys() coords = {} for city in cities: while True: try: loc = gn.geocode(city + ", Brazil") except GeocoderTimedOut: sleep(2) else: break coords[city] = (loc.latitude, loc.longitude) return coords
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_city_points(city):\n for item in coordinate_list:\n if item[0] == city:\n return (item[1], item[2])", "def get_coordinates_from_city(self, city):\n return self.cities_dict.get(city)", "def get_lat_lon():\r\n\r\n # Columns: dt,AverageTemperature,AverageTemperatureUncertainty,City,Country,Latitude,Longitude\r\n temperatures = pd.read_csv(\"GlobalLandTemperatures/GlobalLandTemperaturesByCity.csv\")\r\n\r\n Latitude = temperatures['Latitude']\r\n Longitude = temperatures['Longitude']\r\n City = temperatures['City']\r\n Country = temperatures['Country']\r\n\r\n lat_array = []\r\n long_array = []\r\n cities_array = []\r\n countries_array = []\r\n tuples = []\r\n for i, j, city, country in zip(Latitude, Longitude, City, Country):\r\n if (i, j) not in tuples:\r\n tuples.append((i, j))\r\n lat_array.append(float(i[:-1]))\r\n long_array.append(float(j[:-1]))\r\n cities_array.append(city)\r\n countries_array.append(country)\r\n\r\n return lat_array, long_array, cities_array, countries_array", "def extract_locations(self):\n default_pos_columns = common_cfg.coord_col_names\n if set(default_pos_columns).issubset(set(self._raw_data.columns)):\n print('Location data found')\n # check and drop units outside provided city boundary\n geometry = [shapely.geometry.Point(xy) for xy in zip(\n self._raw_data[default_pos_columns[0]], # Long\n self._raw_data[default_pos_columns[1]])] # Lat\n b_within_boundary = np.array(list(map(\n lambda p: p.within(self.model_city.convhull), geometry)))\n\n if not all(b_within_boundary):\n print('%s -- dropping %i units outside city.' %\n (self.servicetype,\n sum(np.bitwise_not(b_within_boundary))))\n self._raw_data = self._raw_data.iloc[\n b_within_boundary, :].reset_index()\n\n # store geolocations as geopy Point\n locations = [geopy.Point(yx) for yx in zip(\n self._raw_data[default_pos_columns[1]], # Lat\n self._raw_data[default_pos_columns[0]])] # Long\n\n propert_data = self._raw_data.drop(default_pos_columns, axis=1)\n\n else:\n raise NotImplementedError('Locations not found - not implemented!')\n\n return propert_data, locations", "def get_coordinates_for_city(city, state=None):\n search_str = ', '.join([city, state]) if state else city\n db_coords = get_coordinates_from_db(search_str)\n if db_coords:\n return (search_str, db_coords)\n else:\n page_title, coords = get_coordinates_from_wikipedia(search_str)\n add_coordinates_to_db(coords, search_str)\n return (page_title, coords)", "def get_coords(data, id):\n return data[id]['lat'], data[id]['lon']", "def GetWorldCities():\n return GetDataFromCsvFile('world_cities.csv')", "def get_datacenter_city(self, node):\n if self._datacenter_cache is None:\n self.populate_datacenter_cache()\n location = self._datacenter_cache[node.datacenter_id].location\n location = location.lower()\n location = location.split(\",\")[0]\n return location", "def get_city_by_name(self, name):\n query = \"SELECT _id, name, country, lat, lon FROM cities INNER JOIN coord \" \\\n \"ON cities.coord = coord.id WHERE name = '{}';\".format(name)\n self.select_from_bd(query)\n cities_list = self.cursor.fetchall()\n for loop, city in enumerate(cities_list):\n cities_list[loop]['lat'] = float('%g' % round(city['lat'], 2))\n cities_list[loop]['lon'] = float('%g' % round(city['lon'], 2))\n return cities_list", "def loc_to_coord(codes):\n def adfilter(codes):\n return re.findall(\"\"\"[a-zA-Z]+, [A-Z]{2}\"\"\", \";\".join(codes))\n\n api_key = \"AIzaSyCxQCjOrHFAf7T-W3vtUYqWkgSFkvMjxN4\"\n\n g = geocoders.GoogleV3(api_key = api_key)\n coords = {\"lat\":[], \"long\":[]}\n for code in adfilter(codes):\n if code != \"\":\n try:\n place = g.geocode(code)\n if place != None:\n coords[\"lat\"].append(place.latitude)\n coords[\"long\"].append(place.longitude)\n except (exc.GeocoderTimedOut, exc.GeocoderQueryError):\n pass\n return coords", "def GetUsCities():\n return GetDataFromCsvFile('us_cities.csv')", "def get_city_details(self, location_id):\n sql = \"SELECT * FROM [location] WHERE [id] = %d\"%(location_id)\n self.cursor.execute(sql)\n row = self.cursor.fetchone()\n city = row['city']\n state = row['region']\n zip_code = row['postal_code']\n provider = row['provider']\n ip_address_int = random.randint(3221225729, 3758096126) # Class C\n #ip_address = socket.inet_ntop(socket.AF_INET6, struct.pack('L', int(socket.htonl(ip_address_int))))\n ip_address = socket.inet_ntoa(hex(ip_address_int)[2:].zfill(8).decode('hex')) \n return [city, state, zip_code, provider, ip_address]", "def lat_lons(self):", "def get(city: str = None, state: str = None, fallback=True) -> Tuple[float, float]:\n loc_id = LocationCoordinates.to_location(city, state)\n\n row = LocationCoordinates.query.get(loc_id)\n if row is None:\n # not present, create then return\n loc_obj = geolocator.geocode(loc_id)\n if loc_obj is None or loc_obj.latitude is None:\n if not fallback:\n raise ValueError(f\"Cannot be found: '{loc_id}' (city: {city}, state: {state})\")\n if state is not None: # try just getting the state\n return LocationCoordinates.get(None, state, fallback)\n # otherwise place in bermuda\n return 32.3078, -64.7505\n row = LocationCoordinates(location=loc_id, latitude=loc_obj.latitude, longitude=loc_obj.longitude)\n db.session.add(row)\n db.session.commit()\n return row.latitude, row.longitude", "def createCityCoordinateList(gmaps, cityList):\n cities = []\n print \"Calculating gps coordinates.\"\"\"\n for i in range(len(cityList)):\n r = gmaps.geocode(cityList[i])\n c = r['Placemark'][0]['Point']['coordinates'][0:2]\n cities.append((cityList[i], c[0], c[1]))\n \n return cities", "def geolocation(self):\n if self.latitude and self.longitude:\n return self.longitude, self.latitude", "def get_coordinates(addresses, boroughs):\n latitude = []\n longitude = []\n for address, borough in zip(addresses, boroughs):\n try:\n g = geocoder.osm('{}, {}, New York'.format(address, borough)).json\n latitude.append(g['lat'])\n longitude.append(g['lng'])\n except:\n latitude.append(None)\n longitude.append(None)\n\n return np.array(latitude).T, np.array(longitude).T", "def get_near_cities_from_user_coordinates(user_coordinates):\n data = pandas.read_csv('city_coordinates.tsv', sep='\\t')\n cities = data['city_ascii']\n latitudes, longitudes = data['lat'], data['lng']\n distance_list = []\n for city, lat, lng in zip(cities, latitudes, longitudes):\n try:\n distance = geodesic((lat, lng), user_coordinates).km\n distance_list.append(((lat, lng), city, distance))\n except Exception:\n continue\n distance_list_sorted = sorted(distance_list, key=lambda x: x[-1])\n return [elem[-2] for elem in distance_list_sorted[:100]]", "async def lat_long(zip_code: str, country: str) -> Sequence[float]:\n key: str = f\"{zip_code}, {country}\"\n url: str = f'http://www.datasciencetoolkit.org/street2coordinates/{key.replace(\" \", \"+\")}'\n\n async with aiohttp.ClientSession() as session:\n async with session.get(url) as response:\n response.raise_for_status()\n data = await response.json()\n\n city: Dict[str, Any] = data.get(f\"{zip_code}, {country}\", dict())\n return city.get(\"latitude\", 0.00), city.get(\"longitude\", 0.00)", "def get_latlong():\r\n info = urllib.request.urlopen(\"https://ipinfo.io\").read()\r\n decoded = json.loads(info)\r\n print(decoded[\"loc\"])\r\n return decoded[\"loc\"]", "def get_data_from_latlon(self, latitudes, longitudes, years):\n lat_idx, lon_idx = self.get_idx_from_latlon(latitudes, longitudes)\n return self.get_data_from_idx(lat_idx, lon_idx, years)", "def get_place_details(self):\n self.google_api_url = 'https://maps.googleapis.com/maps/api/place/details/json?placeid={}&key={}'.format(self.place_id, api_key)\n self.r = requests.get(url=self.google_api_url)\n self.data = self.r.json()\n self.address_components = self.data['result']['address_components']\n\n for i in self.address_components:\n if i['types'][0] == 'locality':\n self.city = (i['long_name'])\n return (self.city)\n else:\n pass", "def get_geo_data(request):\n\n # Note that geoip2 (from maximind) doesn't work on GAE because there is a\n # C lib in there apparently.\n # We can use Appengine's added headers to do that work though thankfully.\n geo = dict()\n geo['region'] = request.headers.get(\"X-AppEngine-Region\", \"unknown\")\n geo['city'] = request.headers.get(\"X-AppEngine-City\", \"unknown\")\n geo['country'] = request.headers.get(\"X-AppEngine-Country\", \"unknown\")\n geo['city_lat_long'] = request.headers.get(\"X-AppEngine-CityLatLong\", \"unknown\")\n\n return geo", "def coordinates(latitude, longitude):\r\n location = geolocator.reverse(latitude + \", \" + longitude)\r\n data = location.raw\r\n data = data['address']\r\n state_code = data['state']\r\n return state_code", "def get_city(address):\n geolocator = Nominatim(user_agent=\"specify_your_app_name_here\")\n \n while True:\n try:\n location = geolocator.geocode(address)\n break\n except Exception:\n None\n \n city = citipy.nearest_city(location.latitude, location.longitude)\n return [city.city_name.title(), city.country_code.title()]", "def get_cities():\n _, cities = API.cities(limit=1000)\n result = []\n for city in cities['results']:\n result.append(city['city'])\n return result", "def _extract_coords_loc_entities(loc_entities: Iterable[GeoLocation]):\n return [\n (loc[\"Latitude\"], loc[\"Longitude\"])\n for loc in loc_entities\n if \"Latitude\" in loc and \"Longitude\" in loc\n ]", "def get_city_base(city_map = CHICAGO_NEIGHBORHOOD):\n shapes = shp.Reader(city_map).shapeRecords()\n x,y = [],[]\n for shape in shapes:\n inner_x,inner_y = list(zip(*shape.shape.points))\n x.append(inner_x)\n y.append(inner_y)\n x_flat = [item for sublist in x for item in sublist]\n y_flat = [item for sublist in y for item in sublist]\n return x_flat,y_flat", "def read_xy_file(self, city = \"\"):\n\t\tcenter = []\n\t\ttemp_list = []\n\t\tif 1 > len( city ):\n\t\t\treturn center\n\t\ttoday = datetime.datetime.now().strftime(\"%Y%m%d\")\n\t\ttry:\n\t\t\tinput_filename = f\"{city}{self.second_part_of_xy_filename}\"\n\t\t\twith open( os.path.join( self.input_dir, input_filename ), 'r', encoding='utf-8') as f:\n\t\t\t\tfor item in f.readlines()[1:]:\n\t\t\t\t\tcenter.append(tuple(item.strip().split(\",\")[-5:])) # lng, lat, ok0, max_value, max_timestamp\n\t\texcept Exception as ex:\n\t\t\tcenter = []\n\t\t\tself.logger.error( f\"Inside Method {sys._getframe().f_code.co_name} of Class {self.__class__.__name__}, cannot read xy_list file ({input_filename}) or requested xy points file ({input_filename}). Exception = {ex}\" )\n\t\treturn center", "def get_location(geoname):\n\n DB_NAME = global_settings.DB_NAME_GEONAMES\n db_user = global_settings.POSTGRESQL_USERNAME\n db_password = global_settings.POSTGRESQL_PASSWORD\n db_host = global_settings.POSTGRESQL_HOST\n db_port = global_settings.POSTGRESQL_PORT\n\n sql = \"SELECT latitude, longitude FROM {} WHERE name like '{}'\".format(global_settings.TABLE_NAME_GEONAMES, geoname)\n\n resp = sqlExecute(DB_NAME, db_user, db_password, db_host, db_port, sql, True)\n\n if not resp['success']:\n return []\n\n lat_long = []\n\n for data in resp['data']:\n lat_long.append([data[0], data[1]])\n\n return lat_long" ]
[ "0.6921667", "0.68246377", "0.67158616", "0.66107935", "0.6552043", "0.65011436", "0.6473308", "0.6398875", "0.63900644", "0.6264114", "0.624961", "0.62353706", "0.62113905", "0.61969936", "0.6188648", "0.6156948", "0.6132603", "0.61280274", "0.61232203", "0.61097056", "0.6091537", "0.60398066", "0.60241723", "0.60140693", "0.6008744", "0.5993676", "0.5982746", "0.5945034", "0.59381706", "0.59335995" ]
0.7427571
0
Evaluate the MNIST model
def evaluate(model, iterations, use_cuda=False): logger.debug("Allocating input and target tensors on GPU : %r", use_cuda) # create the instance of data loader data_loader = DataLoaderMnist(cuda=use_cuda, seed=1, shuffle=False, train_batch_size=64, test_batch_size=100) model.eval() total = 0 correct = 0 current_iterations = 0 with torch.no_grad(): for inputs, labels in data_loader.test_loader: inputs, labels = inputs.to(data_loader.device), labels.to(data_loader.device) output = model(inputs) current_iterations += 1 _, predicted = torch.max(output.data, dim=1) total += labels.size(0) correct += (predicted == labels).sum().item() if iterations is not None: if current_iterations >= iterations: break accuracy = correct / total return accuracy
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def MNIST_experiment():\n tsetlin_machine = TsetlinMachine(number_clauses=1000,\n number_action_states=1000,\n precision=3.0,\n threshold=10)\n\n X, y, val_X, val_y = MNIST()\n\n tsetlin_machine.fit(X, y, val_X, val_y, 300)\n print('Final training accuracy:', tsetlin_machine.accuracy(X, y))\n print('Final validation accuracy:', tsetlin_machine.accuracy(val_X, val_y))", "def mnist_training():\n mndata = MNIST(MNIST_PATH)\n train_ims, train_labels = mndata.load_training()\n train_X = np.array(train_ims).T\n train_y = np.array(train_labels).T\n return train_X, train_y", "def test_model_evaluation(model, mnist, idx, label):\n expected_probabilities = np.zeros((10,))\n expected_probabilities[label] = 1.0\n assert_array_almost_equal(\n model.classify(mnist.get_test_image(idx)),\n expected_probabilities\n )", "def test_keras_mnist():\n data = fetch(\"mnist\")\n check(data, n_samples_train=60000, n_samples_test=10000, n_features=28 * 28)", "def MNIST_data():\n\n # Pobieramy macierze numpy z cyframi\n # images[i,j,k] <=> piksel (j,k) z i-tego obrazka w zbiorze danych\n images, labels = get_MNIST_dataset(range(10), \"training\") #pierwszy argument to\n\n # a) Ilosc przykladow i rozmiary danych\n print \"Raw training data dimensions \", images.shape\n print \"Labels dimensions \",labels.shape\n\n # b) Ile jest cyfr 2?\n print \"Counting 2 in training dataset \",len(filter(lambda x: x == 2, labels))\n\n # c) Jaki jest sredni obrazek 2 ? (Usrednienie wszystkich macierzy ktore sa 2)\n\n #1. Pobierzmy wszystkie dwojki, fajny sposob indeksowania\n print labels == 2\n only_2 = images[labels == 2, :, :]\n print \"Checking number of 2s \", only_2.shape\n\n #2. TODO: Usrednienie (matrix.mean moze byc przydatne)\n\n #3. TODO: narysowanie usrednionej cyfry (zobacz pl.imshow)\n\n # d) Ostatnie - przetworzmy ostatnia cyfre do 1 wymiarowego wektora\n vectorized = np.reshape(images[-1], newshape=(images[-1].shape[0]*images[-1].shape[1]))\n print \"Vectorized last digit \", vectorized", "def load_data():\n # Load image data from MNIST.\n (train_x, train_y),(eval_x, eval_y) = keras.datasets.mnist.load_data()\n\n # We convert the input data to (60000, 28, 28, 1), float32 and normalize our data values to the range [0, 1].\n train_x = train_x.reshape(train_x.shape[0], train_x.shape[1], train_x.shape[2], 1)\n eval_x = eval_x.reshape(eval_x.shape[0], eval_x.shape[1], eval_x.shape[2], 1)\n\n train_x = train_x.astype('float32')\n eval_x = eval_x.astype('float32')\n train_x /= 255\n eval_x /= 255\n\n # Preprocess class labels \n train_y = train_y.astype(np.int32)\n eval_y = eval_y.astype(np.int32)\n\n train_y = np_utils.to_categorical(train_y, 10)\n eval_y = np_utils.to_categorical(eval_y, 10)\n\n return train_x, train_y, eval_x, eval_y", "def test_keras_mnist_return_X_y():\n X, y = fetch(\"mnist\", return_X_y=True)\n assert X.shape == (70000, 28 * 28)\n assert y.shape == (70000,)", "def test_mnist(args):\n # type: () -> None\n\n # Build dataset and model\n dataset = MNIST(path=args.path)\n model = MEMMNIST(input_shape=dataset.shape, code_length=64, cpd_channels=100, mem_dim=100, shrink_thres=0.5/100).cuda().eval()\n\n # Set up result helper and perform test\n helper = MEMResultHelper(dataset, model, checkpoints_dir=args.checkpoints, output_file='mem_mnist.txt')\n helper.test_one_class_classification()", "def test_dataset():\n X,Y = get_MNIST_training_normalized()\n digits_test_truth = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 632, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 166, 0, 0, 0, 0, 0]\n digits_test = []\n for example in itertools.islice(X,30):\n digits_test.append(sum(example[1:100]))\n assert(example.shape == (28*28,))\n\n show_as_image(X[0,:], 28, 28)\n print digits_test\n print digits_test_truth\n assert(digits_test_truth == digits_test)\n assert(X.shape == (60000, 28*28))\n assert(Y.shape == (60000,))\n return \"Dziala :)\"", "def mnist_v1(batch_size=128, epochs=20, kernel_size=3):\n (X_train, Y_train), (X_test, Y_test) = mnist.load_data()\n\n # Data preparation\n X_train = prepare(X_train)\n X_test = prepare(X_test)\n Y_train = np_utils.to_categorical(Y_train, 10) # 0..9\n Y_test = np_utils.to_categorical(Y_test, 10) # 0..9\n\n # Fitting the data to the augmentation data generator\n datagen = augmentedData(X_train)\n\n # --------------------\n # NEURAL NETWORK MODEL\n # --------------------\n\n # Model architecture\n model = Sequential()\n\n model.add(Conv2D(32, (kernel_size, kernel_size), activation='relu', input_shape=(1, 28, 28)))\n model.add(Conv2D(32, (kernel_size, kernel_size), activation='relu'))\n model.add(MaxPooling2D(pool_size=(2, 2)))\n model.add(Dropout(0.25))\n\n model.add(Flatten())\n model.add(Dense(128, activation='relu'))\n model.add(Dropout(0.5))\n model.add(Dense(10, activation='softmax'))\n\n # Model compilation\n model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])\n\n #Tensor board saves\n now = datetime.datetime.now()\n tensorboard = TensorBoard(log_dir=\"logs_first/kernel_size:{}\".format(kernel_size))\n\n model.fit_generator(datagen.flow(X_train, Y_train, batch_size=batch_size), epochs=epochs, verbose=1, callbacks=[tensorboard])\n\n # Model saves\n now = datetime.datetime.now()\n model.save(\"sirr_HYPERPARAMETERS_mnist_first_\" + str(now.hour) + \"h\" + str(now.minute) + \".h5\")\n\n # Model evaluation\n return model.evaluate(X_test, Y_test, verbose=1)", "def evaluate(sess, images_ph, labels_ph, softmax, mnist, config, task):\n\n print 'Evaluating on {} task ({}x{}, {} distractors) using {} glimpses (at {} scales)'.format(\n task, config.new_size, config.new_size, config.n_distractors,\n config.num_glimpses, config.n_patches)\n\n # Evaluation\n test_acc = []\n val_acc = []\n\n for k, dataset in enumerate([mnist.validation, mnist.test]):\n\n steps_per_epoch = dataset.num_examples // config.eval_batch_size\n correct_cnt = 0\n num_samples = steps_per_epoch * config.batch_size\n # loc_net.sampling = True\n\n for test_step in tqdm(xrange(steps_per_epoch)):\n\n images, labels = dataset.next_batch(config.batch_size)\n images = images.reshape((-1, config.original_size, config.original_size, 1))\n labels_bak = labels\n\n if task == 'translated':\n images = translate(images, width=config.new_size, height=config.new_size)\n elif task == 'cluttered':\n images = clutter(images,\n dataset.images.reshape((-1, config.original_size, config.original_size, 1)),\n width=config.new_size, height=config.new_size, n_patches=config.n_distractors\n )\n elif task == 'cluttered_var':\n images, _, _, _ = clutter_rnd(images,\n train_data=dataset.images.reshape(\n (-1, config.original_size, config.original_size, 1)),\n lim=config.distractor_range,\n color_digits=config.color_digits,\n color_noise=config.color_noise,\n width=config.new_size, height=config.new_size, norm=True)\n\n # else:\n # print 'original mnist data ({}x{}).'.format(config.original_size,config.original_size)\n\n # Duplicate M times (average prediction over M repeats)\n images = np.tile(images, [config.M, 1, 1, 1])\n labels = np.tile(labels, [config.M])\n\n softmax_val = sess.run(softmax,\n feed_dict={\n images_ph: images,\n labels_ph: labels\n })\n softmax_val = np.reshape(softmax_val,\n [config.M, -1, config.num_classes])\n softmax_val = np.mean(softmax_val, 0)\n\n pred_labels_val = np.argmax(softmax_val, 1)\n correct_cnt += np.sum(pred_labels_val == labels_bak)\n acc = correct_cnt / float(num_samples)\n\n if k == 0:\n print '\\nVal accuracy\\t{:4.4f} ({:4.4f} error)'.format(100 * acc, 100 - 100 * acc)\n val_acc = acc\n else:\n print 'Test accuracy\\t{:4.4f} ({:4.4f} error)\\n'.format(100 * acc, 100 - 100 * acc)\n test_acc = acc\n\n return test_acc, val_acc", "def evaluate(X_test, y_test):\n # batch size is 16 for evaluation\n batch_size = 16\n\n # Load Model\n model = load_model('model/model.h5')\n return model.evaluate(X_test, y_test, batch_size, verbose = 1)", "def mnist_testing(shuffled = True):\n mndata = MNIST(MNIST_PATH)\n test_ims, test_labels = mndata.load_testing()\n test_X = np.array(test_ims).T\n test_y = np.array(test_labels).T\n return test_X, test_y", "def use_mnist_model(self):\n\n\t\t# load the model\n\t\tnumber_recognizer_MNIST = load_model('models/MNIST_digits_recognition.h5', compile=False)\n\n\t\t# create empty ndarray\n\t\tnumbers_mnist = np.ones(shape=(self.sudoku_size, self.sudoku_size))\n\n\t\tpics = deepcopy(self.list_of_number_pictures)\n\t\tfor i in range(self.sudoku_size):\n\t\t\tfor j in range(self.sudoku_size):\n\t\t\t\tpics[i][j] = self.preprocess_cell(pics[i][j], mnist=True, resize=True, clean_remains=True)\n\t\t\t\tif self.empty_cells[i][j] != 0:\n\t\t\t\t\tnumbers_mnist[i][j] = np.argmax(number_recognizer_MNIST.predict([[pics[i][j].reshape(28,28,1)]]))\n\n\t\treturn numbers_mnist", "def evaluate_model(X_train, X_test, y_train, y_test, batch_size, nb_epoch):\n model = Sequential()\n model.add(Dense(512, input_shape=(784,)))\n model.add(Activation(\"relu\"))\n model.add(Dropout(0.2))\n model.add(Dense(512))\n model.add(Activation(\"relu\"))\n model.add(Dropout(0.2))\n model.add(Dense(10))\n model.add(Activation(\"softmax\"))\n model.compile(loss=\"categorical_crossentropy\",\n optimizer=RMSprop(),\n metrics=[\"accuracy\"])\n model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch,\n verbose=1, validation_data=(X_test, y_test))\n results = model.evaluate(X_test, y_test, verbose=0)\n return results, model", "def train_and_evaluate(self) -> None:\n with tf.Session() as self.sess:\n # Initialize computation graph.\n self.create_model()\n\n # Initialize variables.\n tf.global_variables_initializer().run()\n\n # Initialize summary writer.\n self.writer = tf.summary.FileWriter(logdir='conv_vis')\n\n for epoch_no in range(self.nb_epochs):\n # Train model on next batch\n batch_x, batch_y = self.mnist.train.next_batch(self.mb_size)\n results = self.train_on_batch(batch_x, batch_y, global_step=epoch_no)\n\n if epoch_no > 0 and epoch_no % self.lr_decay_time == 0:\n # Test on all samples.\n self.test_on_all()\n # Perform learning rate decay.\n self.learning_rate /= 2\n if epoch_no % 100 == 0:\n self.logger.info(\"Epoch {0}: Loss: {1[0]}, accuracy: {1[1]}\".format(epoch_no, results))\n batch_x_t, batch_y_t = self.mnist.test.next_batch(self.mb_size)\n test_results = self.test_on_batch(batch_x_t, batch_y_t)\n self.logger.info(\"(Test(batch): Loss: {0[0]}, accuracy: {0[1]}\".format(test_results))\n self.test_on_all()\n\n # Save the trained model with all valuable variables.\n saver = tf.train.Saver()\n saver.save(sess=self.sess, save_path='./saved_model', global_step=epoch_no)", "def eval_model(model, x_test, y_test, batch_size=None):\n if batch_size is None:\n batch_size = 128\n\n loss, acc = model.evaluate(x_test, y_test, batch_size=batch_size)\n confusion_matrix_model(model, y_test, x_test)\n return loss, acc", "def evaluate_knn(train_from_scratch=False, verbose=True):\n data_train, data_test = load_MNIST(num_training=60000, num_validation=0)\n\n print(\"Evaluating the k-NN classifier...\")\n start_timer = time.time()\n\n model = kNearestNeighbours()\n\n path_to_optimal = os.path.join(path_to_models, 'knn/optimal_k.npy')\n if not train_from_scratch and os.path.exists(path_to_optimal):\n best_k = np.load(path_to_optimal)\n print(\"\\tLoading pre-computed optimal parameter k={}\".format(best_k))\n else:\n validator = KFoldCrossValidation(data=data_train, k=5)\n best_k = validator.validate(model=model, ranges=xrange(1, 10), verbose=verbose)\n np.save(path_to_optimal, best_k)\n\n model.fit(data_train)\n predictions = model.predict(data_test['x_test'], k=best_k)\n\n test_acc = np.sum(predictions == data_test['y_test']) / float(predictions.shape[0]) * 100.\n\n test_time = time.time() - start_timer\n print(\"\\tEvaluated in {} s\".format(test_time))\n print(\"\\tTest accuracy = {0}% (Test error = {1}%)\".format(test_acc, 100. - test_acc))\n\n # log the result from the test\n np.save(os.path.join(path_to_results, 'predictions_knn.npy'), predictions)\n\n del data_train, data_test, model\n return test_acc", "def evaluate_model(testing_images, *model, num_images=None, shuffle=False):\n if num_images is None:\n if hasattr(testing_images, \"__len__\"):\n num_images = len(testing_images)\n else:\n raise ValueError(\"Require num_images with infinite dataset\")\n \n if shuffle:\n testing_images = random_iterator(testing_images)\n imgs = islice(testing_images, num_images)\n\n E = Evaluator()\n logging.info(f\"Running model on {num_images} images\")\n for idx,(gt,dt,shape) in enumerate(detect_on_images(imgs, *model), start=1):\n E.add_ground_truth(idx, gt, shape)\n E.add_detections(idx, dt)\n if idx % 20 == 0:\n logging.info(f\"{idx}\")\n\n if num_images != idx:\n logging.warning(f\"Requested test on {num_images} but only {idx} images were given in dataset.\")\n\n return E", "def evaluate(network, loss_function, softmax_function, test_loader, test_set_size):\n running_loss = 0.0\n confusion_matrix = { # Of shape [predicted value][real value]\n 0: {0: 0, 1: 0, 2: 0},\n 1: {0: 0, 1: 0, 2: 0},\n 2: {0: 0, 1: 0, 2: 0},\n }\n batch_size = -1\n network.eval()\n with torch.no_grad():\n correct = 0\n for graph_batch, label_batch in test_loader:\n if batch_size == -1:\n batch_size = label_batch.size(0)\n logits = network(graph_batch, graph_batch.ndata['n_feat'], graph_batch.edata['e_feat'], 0, 0)\n running_loss += loss_function(logits, label_batch).detach().item()\n predicted_classes = torch.argmax(logits, dim=1).detach()\n correct += (predicted_classes == label_batch).sum().item()\n for predicted_class, label in zip(predicted_classes, label_batch):\n confusion_matrix[predicted_class.item()][label.item()] += 1\n\n if batch_size <= 0:\n print(\"Error : batch size is {}\".format(batch_size))\n exit(1)\n\n return correct / test_set_size, running_loss / len(test_loader), confusion_matrix", "def evaluate():\n\tmodel.eval()\n\tstddev = 1 # And mean=0\n\tfor batch_idx, (data, _) in enumerate(syn_test_loader):\n\t\tdata = data.cuda()\n\t\tif batch_idx == 0:\n\t\t\tnoise = torch.autograd.Variable(torch.randn(batch_size, bottleneck).cuda() * stddev)\n\t\t\tsample_representation(\"orig_nat\", data, noise)\n\t\t\tsample_representation(\"natural\", data, noise)\n\t\t\tsample_representation(\"orig_syn\", data, noise)\n\t\t\tsample_representation(\"synth\", data, noise)", "def get_mnist():\n from keras.datasets import mnist\n\n # input image dimensions\n img_rows, img_cols = 28, 28\n num_classes = 10\n # the data, shuffled and split between train and test sets\n (x_train, y_train), (x_test, y_test) = mnist.load_data()\n\n if K.image_data_format() == 'channels_first':\n print (\"Using Channels first\")\n x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols)\n x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols)\n input_shape = (1, img_rows, img_cols)\n else:\n print(\"Channels last\")\n x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)\n x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)\n input_shape = (img_rows, img_cols, 1)\n\n return (x_train, y_train), (x_test, y_test)", "def get_mnist_cnn():\n # Set defaults.\n nb_classes = 10 #dataset dependent \n batch_size = 128\n epochs = 4\n \n # Input image dimensions\n img_rows, img_cols = 28, 28\n\n # Get the data.\n # the data, shuffled and split between train and test sets\n (x_train, y_train), (x_test, y_test) = mnist.load_data()\n \n if K.image_data_format() == 'channels_first':\n x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols)\n x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols)\n input_shape = (1, img_rows, img_cols)\n else:\n x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)\n x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)\n input_shape = (img_rows, img_cols, 1)\n\n #x_train = x_train.reshape(60000, 784)\n #x_test = x_test.reshape(10000, 784)\n \n x_train = x_train.astype('float32')\n x_test = x_test.astype('float32')\n x_train /= 255\n x_test /= 255\n\n #print('x_train shape:', x_train.shape)\n #print(x_train.shape[0], 'train samples')\n #print(x_test.shape[0], 'test samples')\n\n # convert class vectors to binary class matrices\n y_train = to_categorical(y_train, nb_classes)\n y_test = to_categorical(y_test, nb_classes)\n\n # convert class vectors to binary class matrices\n #y_train = keras.utils.to_categorical(y_train, nb_classes)\n #y_test = keras.utils.to_categorical(y_test, nb_classes)\n\n return (nb_classes, batch_size, input_shape, x_train, x_test, y_train, y_test, epochs)", "def train_mnist():\r\n # type: () -> None\r\n\r\n # Build dataset and model\r\n dataset = MNIST_TRAIN(path=Config.video_folder)\r\n model = LSAMNIST(input_shape=dataset.shape, code_length=64,\r\n cpd_channels=100).to(device).train()\r\n\r\n # Set up result helper and perform test\r\n helper = OneClassResultHelper(dataset, model,\r\n checkpoints_dir=Config.model_ckpt,\r\n output_file='mnist.txt')\r\n helper.train_one_class_classification()", "def test(self):\r\n \r\n args = self.args\r\n model = self.model\r\n dataset = self.dataset\r\n \r\n dataset.set_split('test')\r\n batch_generator = generate_nmt_batches(dataset, \r\n batch_size=len(dataset), \r\n device=args.device)\r\n\r\n acc_sum = 0.0\r\n model.eval()\r\n \r\n for batch_index, batch_dict in enumerate(batch_generator):\r\n # step 1. compute the output\r\n if isinstance(model,NMTModelWithMLTM):\r\n y_pred = model(batch_dict['x_source'], \r\n batch_dict['x_source_mltm_vector'],\r\n batch_dict['x_source_length'], \r\n batch_dict['x_target'])\r\n else:\r\n y_pred = model(batch_dict['x_source'], \r\n batch_dict['x_source_length'], \r\n batch_dict['x_target'])\r\n\r\n acc_t = compute_accuracy(y_pred, batch_dict['y_target'], self.mask_index)\r\n acc_sum += acc_t\r\n \r\n return acc_sum / (batch_index+1)", "def evaluate(data_loader, model, device):\n\n\tmodel.eval()\n\ttotal_num_examples = 0\n\ttotal_error = 0\n\tfor idx, batch in enumerate(data_loader):\n\t\tquestion_feature_vec = batch['feature_vec'].to(device)\n\t\tquestion_len = batch['len'].to(device)\n\t\tlabels = batch['labels'].to(device)\n\n\t\t####Your code here ---\n\n\t\t# get the output from the model\n\t\tlogits = model(question_feature_vec, question_len)\n\n\t\t# get error, num_examples using accuracy_fn defined previously\n\t\terror, num_examples = accuracy_fn(logits, labels)\n\n\t\t# update total_error and total_num_examples\n\t\ttotal_error += error\n\t\ttotal_num_examples += num_examples\n\n\taccuracy = 1 - total_error / total_num_examples\n\treturn accuracy", "def evaluate(dataloader, model):\n with torch.no_grad():\n model.eval()\n count = 0\n correct = 0\n total_loss = 0.0\n reg_loss = 0.0\n l2_lambda = 0.00001\n criterion = nn.BCEWithLogitsLoss()\n for images_data, target_labels in tqdm(dataloader):\n if config.use_gpu:\n images_data = images_data.cuda()\n target_labels = target_labels.cuda()\n predicted_labels = model(images_data)\n total_loss += criterion(predicted_labels, target_labels)\n count += predicted_labels.shape[0]\n preds = predicted_labels.argmax(dim=1)\n targets = target_labels.argmax(dim=1)\n correct += (torch.eq(preds, targets)).sum().item()\n \n l2_reg = torch.tensor(0.)\n if config.use_gpu:\n l2_reg = l2_reg.cuda()\n for param in model.parameters():\n l2_reg += torch.norm(param)\n reg_loss += l2_lambda * l2_reg\n\n total_loss += reg_loss\n accuracy = correct * 1.0 / count\n return accuracy, total_loss.item()", "def evaluate(self):\n predictions = self.model.predict(self.test[0])\n accuracy = accuracy_score(self.test[1], predictions)\n print(\"Accuracy:\", str(accuracy * 100) + \"%\")\n self.plot_results(predictions)", "def evaluate(model, g, val_nid, device):\n model.eval()\n nfeat = g.ndata['features']\n labels = g.ndata['labels']\n with th.no_grad():\n pred = model.module.inference(g, nfeat, device, args.batch_size, args.num_workers)\n model.train()\n test_acc = Accuracy()\n return test_acc(th.softmax(pred[val_nid], -1), labels[val_nid].to(pred.device))", "def run_test():\n # Get the sets of images and labels for training, validation, and\n # test on MNIST.\n train ,validation,test = datasets_mnist.read_data_sets(FLAGS.input_data_dir, FLAGS.fake_data)\n # Tell TensorFlow that the model will be built into the default Graph.\n with tf.Graph().as_default():\n # Generate placeholders for the images and labels.\n images_placeholder, labels_placeholder, phase_pl = placeholder_inputs(\n FLAGS.batch_size)\n\n # Build a Graph that computes predictions from the inference model.\n logits = mnist.inference(images_placeholder,\n FLAGS.hidden1,\n FLAGS.hidden2, \n phase_pl)\n\n eval_correct = mnist.evaluation(logits, labels_placeholder)\n # Add the variable initializer Op.\n all_variable = tf.global_variables()\n \n # Create a saver for writing training checkpoints.\n saver = tf.train.Saver()\n\n # Create a session for running Ops on the Graph.\n with tf.Session() as sess:\n\n saver.restore(sess, \"log/model.ckpt-1999\")\n for variable in all_variable:\n if \"moving\" in variable.name:\n print(variable.name, variable.eval())\n do_eval(sess,\n eval_correct,\n images_placeholder,\n labels_placeholder,\n phase_pl,\n test)" ]
[ "0.7296402", "0.69910675", "0.68859035", "0.68567264", "0.68049806", "0.6793459", "0.6782096", "0.6699586", "0.6637153", "0.66119355", "0.6607346", "0.66001266", "0.65700907", "0.65339303", "0.6532473", "0.63975126", "0.6392527", "0.63610214", "0.63309705", "0.6328554", "0.6319658", "0.631655", "0.62928355", "0.62652254", "0.62563694", "0.6254323", "0.6251134", "0.6250999", "0.62289816", "0.62269014" ]
0.70785034
1
This function returns a dataframe containing all the FOVs available on LabKey for a particular cell line.
def query_data_from_labkey(cell_line_id): # Query for labkey data db = LabKey(contexts.PROD) # Get production data for cell line data = db.dataset.get_pipeline_4_production_cells([("CellLine", cell_line_id)]) data = pd.DataFrame(data) # Because we are querying the `cells` dataset and not the `fovs` dataset # We need to clean up just a tiny bit # NOTE: Tyler is looking into this # The only reason we query the `cells` dataset is for the `PixelScale` numbers # But those _should_ be exposed on the `fovs` dataset so he is looking into # why they aren't. In the future this query should be much simpler. # Select down to just the columns we want data = data[[ "FOVId", "CellLine", "Gene", "Protein", "PixelScaleX", "PixelScaleY", "PixelScaleZ", "SourceReadPath", "ChannelNumber405", "ChannelNumber638", "ChannelNumberBrightfield", "NucleusSegmentationReadPath", "MembraneSegmentationReadPath", "StructureSegmentationReadPath" ]] # Drop duplicates because this dataset will have a row for every cell # instead of per-FOV data = data.drop_duplicates("FOVId") data = data.set_index("FOVId") # Fix all filepaths data = fix_filepaths(data) return data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_camfiber_table(date, exp_id):\n filename = '/exposures/nightwatch/{}/{:08d}/qa-{:08d}.fits'.format(date, exp_id, exp_id)\n\n tab = None\n if os.path.isfile(filename):\n tab = Table.read(filename, hdu='PER_CAMFIBER')\n tab = tab['FIBER', 'MEDIAN_CALIB_SNR', 'CAM']\n return tab", "def get_fov(one_sec, times=None):\n\n # We have to do some checking of the input:\n if isinstance(one_sec, pd.Series):\n # We were passed a Series; make it a DataFrame\n one_sec = pd.DataFrame([one_sec])\n elif isinstance(one_sec, pd.DataFrame):\n # We were passed several rows of a DataFrame, so nothing to do\n pass\n elif isinstance(one_sec, Ltg):\n # We were passed a pyltg.Ltg object, get the Dataframe\n one_sec = one_sec.data\n else:\n raise TypeError('Unknown argument type for one_sec')\n\n # If we have times, interpolate to get the spacecraft position\n if times is not None:\n pos, vel, transform_matrix = interp_one_sec(times,\n one_seconds=one_sec)\n\n if len(pos.shape) == 1:\n # If True, then we were passed a scalar for times.\n # Need to a little manipulation before moving on...\n pos = pos.reshape(-1, 1)\n vel = vel.reshape(-1, 1)\n transform_matrix = transform_matrix.reshape(-1, 1)\n\n one_sec = pd.DataFrame({\n 'position_vector': list(pos.T),\n 'velocity_vector': list(vel.T),\n 'transform_matrix': list(transform_matrix.T)})\n\n # At this point, we've made sure we have a DataFrame. This makes\n # the following much more straightforward....\n\n pix = ccd_pixels(border=True)\n\n fov = list()\n\n for _, row in one_sec.iterrows():\n\n lla = geolocate_pixels(row.position_vector,\n row.velocity_vector,\n row.transform_matrix,\n pix.x, pix.y)\n fov.append(lla)\n\n return fov", "def dfrlvi(self):\n if self._dfrlvi is None:\n df = self.tri.rlvi.reset_index(drop=False)\n df = df.rename({\"index\": \"origin\", \"dev\": \"l_act_dev\"}, axis=1)\n self._dfrlvi = df.drop(\"col_offset\", axis=1)\n return(self._dfrlvi)", "def VACF(df,conversion = \"x\"):\n #conversion from pixels to micrometers\n if conversion == \"y\":\n df = df/1200*633\n else:\n df = df/1600*844\n #computes the velocity in one direction between the frames\n dif = pd.DataFrame()\n\n for i in range(1,len(df.T)):\n dif[i-1] = velocity(df[i-1],df[i])\n vel = []\n for i in range(len(dif)):\n vel.append(tidynamics.acf(dif.T[i]))\n\n #return the velocities in array\n return np.array(vel)", "def fcvs(self): \n return self._link_reg.fcvs", "def get_dekosky(self) -> pd.DataFrame:\n return pd.read_feather(self.figure_data_paths.dekosky_vh12_path)", "def _cv_results(self):\n df = pd.DataFrame(\n (self.model_.coef_path_.reshape(-1, self.model_.coef_path_.shape[-1])).T,\n columns=[f\"{col}_coeff_path\" for col in self.X_train_.columns.tolist()],\n )\n df[\"intercept_path\"] = (\n self.model_.intercept_path_.reshape(\n -1, self.model_.intercept_path_.shape[-1]\n )\n ).T\n df[\"lambda_path\"] = self.model_.lambda_path_\n df[\"cv_standard_error\"] = self.model_.cv_standard_error_\n df[\"cv_mean_score\"] = self.model_.cv_standard_error_\n\n return df", "def detections():\n frame = pd.read_csv(PATH + 'detections.csv', decimal=',')\n frame.beeID = frame.beeID.apply(parse_float_list)\n frame.descriptor = frame.descriptor.apply(parse_float_list)\n return frame", "def factor_exposure(self):\n exp_hs_all = pd.DataFrame([])\n exp_zz_all = pd.DataFrame([])\n for i in range(len(self.weekly_date)):\n date = self.weekly_date.iloc[i,0]\n factor = get_barra_factor_from_sql(date)\n factor['secID'] = factor.index.tolist()\n stocklist = factor.index.tolist()\n \n hs300 = get_index_composition(date,'000300.SH')\n zz500 = get_index_composition(date,'000905.SH')\n hs300['secID'] = hs300.index.tolist()\n zz500['secID'] = zz500.index.tolist()\n \n stocklist_hs300 = list(set(hs300.index.tolist()).intersection(set(stocklist)))\n stocklist_zz500 = list(set(zz500.index.tolist()).intersection(set(stocklist)))\n stocklist_hs300.sort()\n stocklist_zz500.sort()\n \n factor_hs = extract_part_from_all(stocklist_hs300,factor,'secID')\n factor_zz = extract_part_from_all(stocklist_zz500,factor,'secID')\n hs_weight = extract_part_from_all(stocklist_hs300,hs300,'secID')\n zz_weight = extract_part_from_all(stocklist_zz500,zz500,'secID')\n del factor_hs['secID'],factor_zz['secID'],hs_weight['secID'],zz_weight['secID']\n \n \n exp_hs = pd.DataFrame(np.dot(hs_weight.T,factor_hs))\n exp_zz = pd.DataFrame(np.dot(zz_weight.T,factor_zz))\n \n \n exp_hs_all = pd.concat([exp_hs_all,exp_hs], axis = 0)\n exp_zz_all = pd.concat([exp_zz_all,exp_zz], axis = 0) \n print(i)\n exp_hs_all.columns = ['Beta','Momentum','Size','EY','RV','Growth',\\\n 'BP','Leverage','Liquidity']\n exp_zz_all.columns = ['Beta','Momentum','Size','EY','RV','Growth',\\\n 'BP','Leverage','Liquidity']\n exp_hs_all.index = self.weekly_date.iloc[:,0]\n exp_zz_all.index = self.weekly_date.iloc[:,0]\n return exp_hs_all,exp_zz_all", "def trace_all_fields(opt_model):\n osp = opt_model.optical_spec\n fld, wvl, foc = osp.lookup_fld_wvl_focus(0)\n fset = []\n for f in osp.field_of_view.fields:\n rset = trace_field(opt_model, f, wvl, foc)\n fset.append(rset)\n\n fdf = pd.concat(fset, keys=osp.field_of_view.index_labels,\n names=['field'])\n return fdf", "def load_and_filer(pwd,rval=0.95):\n df = pd.read_csv(pwd)\n df = rl.give_good_structure(df)\n df = df.loc[(df['end_type']=='DIVISION')|(df['end_type']=='DIV')|(df['end_type']=='div')]\n if 'length_box' in df.columns: #guillaume data\n df['time_sec'] = df['frame']*60*3\n df['length_box_um'] = df['length_box']*0.065\n else:\n df['length_box_um'] = (df['vertical_bottom'] - df['vertical_top'])*0.065\n df = df.groupby('cell').filter(lambda x: True if len(x['length_box_um'])>2 else False)\n df =df.groupby('cell').filter(lambda x: linregress(x['time_sec'],np.log(x['length_box_um'])).rvalue>rval)\n #df = rl.give_unique_dataset(df,6,18)\n df =df[['length_box_um','time_sec','parent_id','id','gl','date','pos','cell','lane_ID','end_type']]\n return df", "def _get_vif_table(self):\n\n vif_data = [['']]\n\n exog = self._model.exog\n\n # for variable in self._explanatory_variables:\n for exog_idx in range(1, exog.shape[1]):\n vif = variance_inflation_factor(exog, exog_idx)\n\n vif_data.append([self._FLOAT_STRING_FORMAT.format(vif)])\n\n vif_table = SimpleTable(vif_data, headers=['VIF'])\n\n return vif_table", "def to_vwlines(_df):\n\n LOG.info(\"start to_vwlines\")\n\n lines_YABCD = to_str_series(_df, \"y\") + \" \" + to_str_series(_df, \"count\")\n \n lines_YABCD += \" |A \" + _df.genre\n lines_YABCD += \" |B \" + _df.user_pref\n lines_YABCD += \" |C\" \n lines_YABCD += np.where(_df.pref0, \" prefme_T\", \" prefme_F\") \n lines_YABCD += np.where(_df.pref24, \" pref24_T\", \" pref24_F\") \n lines_YABCD += \" |D \" + _df.pref\n\n lines_I = to_emptystr_series(_df) \n lines_I += \" |I\"\n lines_I += to_floatstr_series(_df, \"lnpop\")\n\n lines_N = to_emptystr_series(_df) \n lines_N += \" |N\"\n lines_N += to_floatstr_series(_df, \"d2\")\n lines_N += \" d2bin\" + to_str_series(_df, \"d2bin\")\n lines_N += np.where(_df[\"area\"] == 1, \" area\", \"\")\n\n lines_OP = to_emptystr_series(_df) \n lines_OP += \" |O\"\n lines_OP += \" \" + _df.SEX_ID\n lines_OP += \" |P\"\n lines_OP += to_floatstr_series(_df, \"AGE\")\n\n lines_Q = to_emptystr_series(_df) \n lines_Q += \" |Q\"\n lines_Q += np.where(_df.pref0, \" prefme_T\", \" prefme_F\") \n lines_Q += np.where(_df.user_prefNN, \" prefNN_T\", \" prefNN_F\") \n lines_Q += \" \" + to_floatstr_series(_df, \"d2\")\n\n lines_RS = to_emptystr_series(_df) \n lines_RS += \" |R\"\n lines_RS += to_floatstr_series(_df, \"pb_same_sarea\")\n lines_RS += to_floatstr_series(_df, \"pb_same_v_sarea\")\n lines_RS += \" |S\"\n lines_RS += np.where(_df.past_key > 0, \" past_key\", \"\")\n\n lines_FG = to_emptystr_series(_df) \n lines_FG += \" |F\" \n lines_FG += np.where(_df.spot, \" spot_T\", \" spot_F\") \n lines_FG += \" |G\" \n lines_FG += np.where(_df.user_prefNN, \" user_prefNN_T\", \" user_prefNN_F\")\n\n lines_E = \"\"\n lines_E += \" |E\"\n for ge in Utility.genres:\n lines_E += to_floatstr_series(_df, ge) \n \n lines_H = \"\"\n lines_H += \" |H\"\n for pr in Utility.prefs:\n lines_H = lines_H + to_floatstr_series(_df, pr) \n\n lines_K = \"\"\n lines_K += \" |K\"\n for ge in Utility.v_genres:\n lines_K += to_floatstr_series(_df, ge) \n lines_K += to_floatstr_series(_df, \"zprice\")\n lines_K += to_floatstr_series(_df, \"pb_same_genreprice\")\n lines_K += to_floatstr_series(_df, \"pb_same_v_genreprice\")\n\n lines_L = \"\"\n lines_L += \" |L\"\n for pr in Utility.v_prefs:\n lines_L += to_floatstr_series(_df, pr) \n\n lines = (lines_YABCD + lines_E + lines_FG + lines_H + lines_I + lines_K \n + lines_L + lines_N + lines_OP + lines_Q + lines_RS)\n\n LOG.info(\"finish to_vwlines\")\n return lines", "def fov_diag(fov_directory=FOV_DIRECTORY):\n # Collect all FOVs into a dictionary:\n fd = make_fov_dict(fov_directory=fov_directory)\n fov_names = list(fd.keys())\n fov_names.sort()\n print(len(fov_names), ' FOV files found to test in directory_path \\'', fov_directory, \"\\'.\")\n\n # Make empty error dictionary:\n error_dict = [] # key=fov_name, value=list of error messages\n for name in fov_names:\n error_dict[name] = []\n\n # First, verify format versions.\n print(\"FOV format version required to be : \\'\" + CURRENT_SCHEMA_VERSION + '\\'')\n for name in fov_names:\n fov = fd[name]\n if fov.format_version != CURRENT_SCHEMA_VERSION:\n error_dict[name].append('Format version \\'' + fov.format_version + '\\'')\n\n # Verify reasonable JD values:\n jd_min = 2451544.5 # January 1 2000\n jd_max = jd_from_datetime_utc(datetime.now(timezone.utc)) # time at this check\n print(\"JD limits applied: \" + '{0:.3f}'.format(jd_min) + ' to ' + '{0:.3f}'.format(jd_max))\n for name in fov_names:\n fov = fd[name]\n if fov.target_type.lower() != \"standard\":\n if not jd_min <= fov.JD_bright <= jd_max:\n error_dict[name].append(\"JD_bright '\" + '{0:.3f}'.format(fov.JD_bright) +\n \"' unreasonable.\")\n if not jd_min <= fov.JD_faint <= jd_max:\n error_dict[name].append(\": JD_faint '\" + '{0:.3f}'.format(fov.JD_faint) +\n \"' unreasonable.\")\n if fov.JD_second is not None:\n if not jd_min <= fov.JD_second <= jd_max:\n error_dict[name].append(\": JD_second '\" + '{0:.3f}'.format(fov.JD_second) +\n \"' unreasonable.\")\n\n # Verify reasonable mag and color values:\n mag_bright = 5.0\n mag_faint = 18.0\n color_min = -0.2\n color_max = +7.5\n print(\"Mag limits: \" + '{0:.3f}'.format(mag_bright) + ' to ' + '{0:.3f}'.format(mag_faint))\n print(\"Color limits: \" + '{0:.3f}'.format(color_min) + ' to ' + '{0:.3f}'.format(color_max))\n for name in fov_names:\n fov = fd[name]\n if fov.target_type.lower() != \"standard\":\n if not mag_bright <= fov.mag_V_bright <= mag_faint:\n error_dict[name].append(\"mag_V_bright '\" + '{0:.3f}'.format(fov.mag_V_bright) +\n \"' unreasonable.\")\n if not mag_bright <= fov.mag_V_faint <= mag_faint:\n error_dict[name].append(\"mag_V_faint '\" + '{0:.3f}'.format(fov.mag_V_faint) +\n \"' unreasonable.\")\n if fov.mag_V_second is not None:\n if not mag_bright <= fov.mag_V_second <= mag_faint:\n error_dict[name].append(\"mag_V_second '\" + '{0:.3f}'.format(fov.mag_V_second) +\n \"' seems unreasonable.\")\n if not color_min <= fov.color_VI_bright <= color_max:\n error_dict[name].append(\"color_VI_bright '\" +\n '{0:.3f}'.format(fov.color_VI_bright) +\n \"' seems unreasonable.\")\n if not color_min <= fov.color_VI_faint <= color_max:\n error_dict[name].append(\"color_VI_faint '\" + '{0:.3f}'.format(fov.color_VI_faint) +\n \"' seems unreasonable.\")\n if fov.color_VI_second is not None:\n if not color_min <= fov.color_VI_second <= color_max:\n error_dict[name].append(\"color_VI_second '\" +\n '{0:.3f}'.format(fov.color_VI_second) +\n \"' seems unreasonable.\")\n\n # Ensure main target is in star list, as a target (skip standard FOVs):\n print(\" Ensure main_target in star list, as a target [skip standard FOVs]\")\n for name in fov_names:\n fov = fd[name]\n if fov.target_type.lower() != \"standard\":\n main_target_star_type = [star.star_type for star in fov.aavso_stars\n if star.star_id.lower() == fov.main_target.lower()]\n if len(main_target_star_type) <= 0:\n error_dict[name].append(\"main_target '\" + fov.main_target +\n \"' absent from star list.\")\n if len(main_target_star_type) > 1:\n error_dict[name].append(\"main_target '\" + fov.main_target +\n \"' in star list more than once.\")\n if len(main_target_star_type) == 1:\n if main_target_star_type[0] != \"target\":\n error_dict[name].append(\"main_target '\" + fov.main_target +\n \"' is in star list once but not as type 'target'.\")\n\n # Ensure Observing styles are valid:\n print(\"Ensure Observing styles are valid.\")\n valid_obs_styles_lower = \\\n [valid_obs_style.lower() for valid_obs_style in VALID_FOV_OBSERVING_STYLES]\n for name in fov_names:\n fov = fd[name]\n if fov.observing_style.lower() not in valid_obs_styles_lower:\n error_dict[name].append(\"invalid obs_style \\'\" + fov.observing_style + \"\\'\")\n\n # Ensure JD, mag, color are consistent (skip standard FOVs):\n print(\"\\n\", 10*\"=\", \" Ensure mutual consistency of: JD, mag, color (skip standard FOVs)\")\n for name in fov_names:\n fov = fd[name]\n if fov.target_type.lower() != \"standard\":\n # Ensure all present with at least 2 values:\n if None in [fov.JD_bright, fov.JD_faint]:\n error_dict[name].append('missing JD')\n if None in [fov.mag_V_bright, fov.mag_V_faint]:\n error_dict[name].append('missing mag.')\n if None in [fov.color_VI_bright, fov.color_VI_faint]:\n error_dict[name].append('missing color.')\n\n # Ensure secondary min values are either all present or all absent:\n all_present = None not in [fov.JD_second, fov.mag_V_second, fov.color_VI_second]\n all_absent = fov.JD_second is None and \\\n fov.mag_V_second is None and \\\n fov.color_VI_second is None\n if not (all_present or all_absent):\n error_dict[name].append('mismatched JD, mag, color (secondary min?).')\n\n # Alert on out-of-spec phases, or non-positive periods (Eclipser-like only):\n print(\"Alert on out-of-spec phases & non-positive periods (Eclipser-like only)\")\n for name in fov_names:\n fov = fd[name]\n if fov.target_type.lower() in ['eclipser', 'exoplanet']:\n if fov.period <= 0:\n error_dict[name].append(\"PERIOD=\" + '{0:8.3f}'.format(fov.period))\n else:\n # Verify that max JD is reasonable.\n phase_max = ((fov.JD_bright - fov.JD_faint) / fov.period) % 1.0\n if abs(phase_max-0.25) > 0.05 or abs(phase_max-0.75) > 0.05:\n error_dict[name].append('Max phase of ' +\n '{0:.3f}'.format(phase_max) + ' unreasonable.')\n if fov.JD_second is not None:\n phase_second = ((fov.JD_second - fov.JD_faint) / fov.period) % 1.0\n if abs(phase_second-0.5) > 0.02:\n error_dict[name].append('Secondary phase of ' +\n '{0:.3f}'.format(phase_second) + ' unreasonable.')\n\n # Finally, write out all errors, by fov name:\n num_errors = 0\n for name in fov_names:\n num_errors += len(error_dict[name])\n print(str(num_errors) + ' errors found.')\n\n for name in fov_names:\n fov_errors = error_dict[name]\n if len(fov_errors) >= 1:\n print('\\n' + name + ':')\n for error in fov_errors:\n print(4*'' + error)", "def get_kappa_vrc01_aa(self) -> pd.DataFrame:\n return pd.read_feather(self.figure_data_paths.kappa_vrc01_aa_path)", "def getC3DFrameFeats(model, srcVideoPath, onGPU, gpu_id, depth, i):\n # get the VideoCapture object\n cap = cv2.VideoCapture(srcVideoPath)\n \n # if the videoCapture object is not opened then exit without traceback\n if not cap.isOpened():\n print(\"Error reading the video file !!\")\n return None\n \n W, H = (int(cap.get(cv2.CAP_PROP_FRAME_WIDTH)),int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)))\n totalFrames = cap.get(cv2.CAP_PROP_FRAME_COUNT)\n frameCount = 0\n features_current_file = []\n #ret, prev_frame = cap.read()\n assert cap.isOpened(), \"Capture object does not return a frame!\"\n #prev_frame = cv2.cvtColor(prev_frame, cv2.COLOR_BGR2GRAY)\n X = [] # input, initially a list, after first 16 frames converted to ndarray\n # Iterate over the entire video to get the optical flow features.\n while(cap.isOpened()):\n \n ret, curr_frame = cap.read() # H x W x C\n if not ret:\n break\n \n # resize to 180 X 320 and taking centre crop of 112 x 112\n curr_frame = cv2.resize(curr_frame, (W/2, H/2), cv2.INTER_AREA)\n (h, w) = curr_frame.shape[:2]\n # size is 112 x 112 x 3\n curr_frame = curr_frame[(h/2-56):(h/2+56), (w/2-56):(w/2+56), :]\n \n if frameCount < (depth-1): # append to list till first 16 frames\n X.append(curr_frame)\n else: # subsequent frames\n if type(X)==list: # For exactly first 16 frames, convert to np.ndarray \n X.append(curr_frame)\n X = np.stack(X)\n X = np.float32(X)\n X = torch.from_numpy(X)\n if onGPU:\n X = X.cuda(gpu_id)\n else: # sliding the window (taking 15 last frames and append next)\n # Adding a new dimension and concat on first axis\n curr_frame = np.float32(curr_frame)\n curr_frame = torch.from_numpy(curr_frame)\n if onGPU:\n curr_frame = curr_frame.cuda(gpu_id)\n #X = np.concatenate((X[1:], curr_frame[None, :]), axis=0)\n X = torch.cat([X[1:], curr_frame[None, :]])\n \n # TODO: Transpose once, and concat on first axis for subsequent frames\n # passing the matrix X to the C3D model\n # X is (depth, H, W, Ch)\n #input_mat = X.transpose(3, 0, 1, 2) # ch, depth, H, W\n input_mat = X.permute(3, 0, 1, 2) # transpose a 4D torch Tensor\n #input_mat = np.expand_dims(input_mat, axis=0)\n input_mat = input_mat.unsqueeze(0) # expand dims on Tensor\n #input_mat = np.float32(input_mat)\n \n # Convert to Variable\n #input_mat = torch.from_numpy(input_mat)\n input_mat = Variable(input_mat)\n \n # get the prediction after passing the input to the C3D model\n prediction = model(input_mat)\n # convert to numpy vector\n prediction = prediction.data.cpu().numpy()\n features_current_file.append(prediction)\n \n frameCount +=1\n if onGPU and (frameCount%1000)==0:\n print \"Video : {} :: Frame : {} / {}\".format((i+1), frameCount, totalFrames)\n\n # When everything done, release the capture\n cap.release()\n #return features_current_file\n return np.array(features_current_file) # convert to (N-depth+1) x 1 x 4096", "def to_vdf(self):\n\t\treturn (vDataFrame(self.name, self.cursor))", "def get_oas_vh12(self) -> pd.DataFrame:\n return pd.read_feather(self.figure_data_paths.oas_vh12_path)", "def df_from_fits(filename, i=1):\n return pd.DataFrame.from_records(fitsio.FITS(filename)[i].read().byteswap().newbyteorder())", "def get_sec_structure_vecs(all_secdf_dict, dst=False):\n # Initialisation\n abst_df = all_secdf_dict['abstract']\n name = abst_df.category.unique()\n secvec_df = pd.DataFrame({'name':name})\n\n # Fill in dataframe cell by cell\n for sec in ['abstract','introduction','background','related_work','methods','results','discussion','conclusion']:\n # for sec in all_secdf_dict: # col\n print(\"Getting\",sec,\"vectors...\")\n col = []\n for field in secvec_df.name: # row\n seckld = all_secdf_dict[sec]\n col.append(seckld[seckld.category==field]['kld'].mean())\n secvec_df[sec] = col\n print(\"... done\")\n \n secvec_df.columns = secvec_df.columns.str.title()\n secvec_df = secvec_df.rename(columns={'Relatedwork':'Related Work', 'Name':'name'})\n\n if dst:\n secvec_df.to_csv(path_or_buf=dst, index=False)\n return secvec_df", "def OD_CR_FAVO_TRS(Dataframe):\n\n Feature_DF = Dataframe.loc[:,['HNAME','OD_CR_FAVO']]\n Feature_DF.loc[:,'OD_CR_FAVO_TRS'] = Feature_DF.loc[:,'OD_CR_FAVO'].pow(-9/2)\n Feature_DF = Feature_DF.loc[:,['HNAME','OD_CR_FAVO_TRS']]\n\n return Feature_DF", "def read_barcodes_per_fov(\n fname: str = None,\n fov: int = None):\n\n try:\n return pd.concat([\n pd.read_hdf(fname, key=\"fov_%d\" % fov) ],\n axis=1)\n except KeyError:\n print(\"barcodes in fov_%d does not exist\" % fov)\n return None", "def OD_CR_FAVT_TRS(Dataframe):\n\n Feature_DF = Dataframe.loc[:,['HNAME','OD_CR_FAVT']]\n Feature_DF.loc[:,'OD_CR_FAVT_TRS'] = Feature_DF.loc[:,'OD_CR_FAVT'].pow(10)\n Feature_DF = Feature_DF.loc[:,['HNAME','OD_CR_FAVT_TRS']]\n\n return Feature_DF", "def GetContourValuesLengthsAndSubContoursByFrame(watershed, allValsByFrame):\n return [\n [sc.cVLS() for sc in scList]\n for scList in GetSubContoursByFrame(watershed, allValsByFrame)\n ]", "def _buildtable(self):\n\n tabrows = []\n\n for i, (expid, exfiles) in enumerate(self._exposure_files.items()):\n specflux_b, specflux_r, specflux_z = [], [], []\n tab = None\n\n if len(exfiles) == 0:\n continue\n\n print(expid)\n for exfile in exfiles:\n print(exfile)\n hdu = fits.open(exfile)\n\n # The following tables are present in the redux sframes and the\n # nightwatch qcframes.\n wave = hdu['WAVELENGTH'].data\n\n # However, in the nightwatch files the wavelength data are a\n # table of size nfiber x nwavelength.\n if self._filetype == 'nightwatch':\n if wave.ndim > 1:\n wave = wave[0]\n\n fluxhead = hdu['FLUX'].header\n fluxdata = hdu['FLUX'].data\n ivardata = hdu['IVAR'].data\n fibermap = hdu['FIBERMAP'].data\n exptime = fluxhead['EXPTIME']\n if not np.all(self._unditherfa['FIBER'] ==\n np.arange(len(self._unditherfa))):\n raise ValueError('weird fiberassign file format!')\n fibermap = self._unditherfa[fibermap['FIBER']]\n\n target_id = fibermap['TARGETID']\n target_ra = fibermap['TARGET_RA']\n target_dec = fibermap['TARGET_DEC']\n fiber = fibermap['FIBER']\n objtype = fibermap['OBJTYPE']\n flux_g = fibermap['FLUX_G']\n flux_r = fibermap['FLUX_R']\n flux_z = fibermap['FLUX_Z']\n x, y = [fibermap['FIBERASSIGN_{}'.format(val)] for val in ('X', 'Y')]\n\n camera = fluxhead['CAMERA'][0].upper()\n\n if getattr(self, '_deltara', None) is not None:\n dra = self._deltara[i]*np.ones(len(fiber))\n ddec = self._deltadec[i]*np.ones(len(fiber))\n elif self._dithertype == 'telescope':\n dithra = self._ditherfa['target_ra']\n dithdec = self._ditherfa['target_dec']\n udithra = self._unditherfa['target_ra']\n udithdec = self._unditherfa['target_dec']\n ontarget = ((self._ditherfa['targetid'] ==\n self._unditherfa['targetid']) &\n (self._ditherfa['objtype'] == 'TGT'))\n dfiberra = (dithra-udithra)*np.cos(np.radians(udithdec))*60*60\n dfiberdec = (dithdec-udithdec)*60*60\n if not np.all(self._ditherfa['FIBER'] ==\n np.arange(len(self._ditherfa))):\n raise ValueError('unexpected shape of dither file')\n dfiberra[~ontarget] = np.nan\n dfiberdec[~ontarget] = np.nan\n dfiberra = dfiberra[fiber]\n dfiberdec = dfiberdec[fiber]\n wcs = self.lookup_wcs(fluxhead['MJD-OBS'])\n centralwcs = self._central_wcs\n if (~np.isfinite(centralwcs['cenra'][1]) or\n ~np.isfinite(centralwcs['cendec'][1])):\n raise ValueError('central pointing ra/dec is NaN!')\n dtelra = (wcs['cenra'][1]-centralwcs['cenra'][1])\n dtelra *= np.cos(np.radians(centralwcs['cendec'][1]))\n dteldec = wcs['cendec'][1]-centralwcs['cendec'][1]\n dra = dfiberra + dtelra*60*60\n ddec = dfiberdec + dteldec*60*60\n if np.all(~np.isfinite(dra)):\n print('warning: no good telescope offset for %s' %\n exfile)\n else:\n raise ValueError('not implemented')\n \n for j, fiber_id in enumerate(fiber):\n flux = fluxdata[j]\n ivar = ivardata[j]\n if not np.any(ivar > 0):\n specflux = 0\n specflux_ivar = 0\n else:\n meanivar = np.mean(ivar[ivar > 0])\n mask = ivar > meanivar / 100\n specflux = np.trapz(flux*mask, wave)\n specflux_ivar = 1./np.sum(ivar[mask]**-1)\n # Schlegel: sum over correct wavelengths, all three\n # filters, plus 11 pixel median filter to reject\n # cosmics.\n # will require being better about reading in\n # the spectrographs together.\n tabrows.append((expid, exptime,\n target_id[j], target_ra[j], target_dec[j],\n fiber[j], objtype[j],\n flux_g[j], flux_r[j], flux_z[j],\n specflux, specflux_ivar, camera,\n dra[j], ddec[j],\n x[j], y[j]))\n\n tab = Table(rows=tabrows,\n names=('EXPID', 'EXPTIME',\n 'TARGETID', 'TARGET_RA', 'TARGET_DEC',\n 'FIBER', 'OBJTYPE',\n 'FLUX_G', 'FLUX_R', 'FLUX_Z',\n 'SPECTROFLUX', 'SPECTROFLUX_IVAR', 'CAMERA',\n 'DELTA_X_ARCSEC', 'DELTA_Y_ARCSEC',\n 'XFOCAL', 'YFOCAL'),\n meta={'EXTNAME' : 'DITHER',\n 'TILEID' : '{}'.format(self._tileid)})\n\n return tab", "def _fit(self, df):\n return df", "def get_vac_lines(self):\n\n b_sig = np.where(self.AirglowLines['obs_eint'] > 5)\n bVL = self.air_to_vac(self.AirglowLines['obs_wave'])\n bVL = bVL[b_sig] #nm to A\n self.BlueVacLines = bVL[bVL < 700]\n\n r_sig = np.where(self.AirglowLines['obs_eint'] > 5)\n rVL = self.air_to_vac(self.AirglowLines['obs_wave'])\n rVL = rVL[r_sig] #nm to A\n self.RedVacLines = rVL[rVL > 560]", "def kdf(self) -> pd.DataFrame:\n k = self.k\n df = pd.DataFrame({\"freq\": k})\n df = df.query(\"freq > 0\")\n return df", "def generate_visibilities_from_local_skymodel(skymodel, uvw_baselines):\n model_vis = np.zeros(len(uvw_baselines), dtype=np.dtype(complex))\n for src_entry in skymodel:\n model_vis += visibility.visibilities_for_point_source(\n uvw_baselines=uvw_baselines,\n l=src_entry[0],\n m=src_entry[1],\n flux=src_entry[2],\n )\n return model_vis", "def df():\n fs.df()" ]
[ "0.5467637", "0.5411353", "0.534373", "0.52416754", "0.51579815", "0.5038548", "0.49567586", "0.49512663", "0.49279827", "0.4913724", "0.48375818", "0.4798654", "0.47906682", "0.47762877", "0.4746946", "0.4743379", "0.4731285", "0.47206524", "0.47060516", "0.46962562", "0.46680513", "0.46613023", "0.46602118", "0.46542034", "0.4642235", "0.46215624", "0.46059668", "0.46016964", "0.45998457", "0.45989496" ]
0.6620065
0
This function returns a cropped area around an object of interest given the raw data and its corresponding segmentation.
def crop_object(raw, seg, obj_label, isotropic=None): offset = 16 raw = np.pad(raw, ((0, 0), (offset, offset), (offset, offset)), "constant") seg = np.pad(seg, ((0, 0), (offset, offset), (offset, offset)), "constant") _, y, x = np.where(seg == obj_label) if x.shape[0] > 0: xmin = x.min() - offset xmax = x.max() + offset ymin = y.min() - offset ymax = y.max() + offset raw = raw[:, ymin:ymax, xmin:xmax] seg = seg[:, ymin:ymax, xmin:xmax] # Resize to isotropic volume if isotropic is not None: dim = raw.shape (sx, sy, sz) = isotropic # We fix the target scale to 0.135um. Compatible with 40X target_scale = 0.135 output_shape = np.array([ sz / target_scale * dim[0], sy / target_scale * dim[1], sx / target_scale * dim[2]], dtype=np.int) raw = sktrans.resize( image=raw, output_shape=output_shape, preserve_range=True, anti_aliasing=True).astype(np.uint16) seg = sktrans.resize( image=seg, output_shape=output_shape, order=0, preserve_range=True, anti_aliasing=False).astype(np.uint8) seg = (seg == obj_label).astype(np.uint8) return raw, seg else: return None, None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_image_area_to_sample(img):\n\n #TODO: In the paper 'Deep Interactive Object Selection', they calculate g_c first based on the original object instead\n # of the dilated one.\n\n # Dilate the object by d_margin pixels to extend the object boundary\n img_area = np.copy(img)\n img_area = morphology.binary_dilation(img_area, morphology.diamond(D_MARGIN)).astype(np.uint8)\n\n g_c = np.logical_not(img_area).astype(int)\n g_c[np.where(distance_transform_edt(g_c) > D)] = 0\n\n return g_c", "def crop_bounding_box(im, x, y, w, h):\n return im[y:y+h, x:x+w]", "def crop_image(image: np.ndarray) -> np.ndarray:\n # convert image to grayscale and apply blur to reduce noise\n image_gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)\n blurred = cv2.GaussianBlur(image_gray, (3, 3), 0)\n\n # global threshold using Otsu\n # Note: Although unpacking like this results in one of the variables to be unused and makes\n # PyTA heavily depressed, this is standard OpenCV notation.\n # For reference, you may check docs.opencv.org/master/d7/d4d/tutorial_py_thresholding.html\n\n ret1, thresh1 = cv2.threshold(blurred, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)\n\n # invert image color and find contours\n ret2, thresh2 = cv2.threshold(thresh1, 150, 255, cv2.THRESH_BINARY_INV)\n contours, hierarchy = cv2.findContours(thresh2, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n\n # create list of tuples with the contour itself and its arc length\n # then sort by arc length and take the two longest\n cont_len = [(cont, cv2.arcLength(cont, True)) for cont in contours]\n cont_len.sort(key=lambda x: -x[1])\n longest_2 = cont_len[0:2]\n rects = [cv2.boundingRect(tup[0]) for tup in longest_2]\n\n # take the smallest coordinates for the top left corner of rect\n # and largest for the bottom right corner\n min_x0, min_y0, max_x0, max_y0 = rects[0][0], rects[0][1], \\\n rects[0][0] + rects[0][2], \\\n rects[0][1] + rects[0][3]\n min_x1, min_y1, max_x1, max_y1 = rects[1][0], rects[1][1], \\\n rects[1][0] + rects[1][2], \\\n rects[1][1] + rects[1][3]\n min_x = min(min_x0, min_x1) + 1\n min_y = min(min_y0, min_y1) + 1\n max_x = max(max_x0, max_x1) - 1\n max_y = max(max_y0, max_y1) - 1\n\n cropped_img = image[min_y: max_y, min_x: max_x]\n return cropped_img", "def crop(img, boundaries):\n minx, miny, maxx, maxy = boundaries\n return img[miny:maxy, minx:maxx]", "def crop_object_from_image(saving_folder,root_folder_path,root_folder_name,row_info):\n class_name=row_info['class']\n file_id=row_info['file_id']\n img_type=row_info['type']\n xmin=row_info['x_min']\n xmax=row_info['x_max']\n ymin=row_info['y_min']\n ymax=row_info['y_max']\n\n\n origin_img_path=os.path.join(root_folder_path,root_folder_name,img_type,file_id+\".png\")\n crop_img_path=os.path.join(saving_folder,file_id+\"_\"+class_name+\".png\")\n\n origin_img=cv2.imread(origin_img_path)\n crop_img=origin_img[ymin:ymax-1,xmin:xmax-1]\n\n # If width or height only contain 1 pixel, do not crop.\n if xmax-xmin<=2 or ymax-ymin<=2:\n print(\"Only one pixel, pass!\")\n return 0\n # print(origin_img.shape)\n # print(xmin,xmax,ymin,ymax)\n # print(crop_img.shape)\n # print(crop_img_path)\n cv2.imwrite(crop_img_path,crop_img)", "def extract_area(data,box):\n if box is None or box[0] is None or box[1] is None or box[1][0] - box[0][0] == 0 or box[1][1] - box[0][1] == 0:\n box = ((0,0),(10,10));\n area = ut.extract_area(data['frame'],*box,data['uc'],256);\n return area;", "def basic_crop(data):\n return data['crop'];", "def autocrop(hyperspectral_image, segmentation):\n\n #find bounding box of mask image\n bbox = scipy.ndimage.measurements.find_objects(segmentation)[0]\n\n #crop image\n hyperspectral_image = hyperspectral_image[bbox[0].start:bbox[0].stop, bbox[1].start:bbox[1].stop, :]\n segmentation = segmentation[bbox[0].start:bbox[0].stop, bbox[1].start:bbox[1].stop]\n\n return hyperspectral_image, segmentation", "def get_cropped_image(normal_path, segment_path):\n normal_img = cv2.imread(normal_path)\n segment_img = cv2.imread(segment_path)\n\n cropped_path = get_masked_image(normal_img, segment_img)\n\n return cropped_path", "def crop(self, image):\n\t\treturn image.copy()[self.ymin:self.ymax,self.xmin:self.xmax]", "def crop_outlined_image(frame: imageType) -> Opt[imageType]:\n largest_contour = get_largest_contour(frame)\n if largest_contour is not None:\n mask = np.zeros(frame.shape, dtype=np.uint8)\n cv2.drawContours(mask, [largest_contour], -1, color=255, thickness=-1) # color = opacity?\n\n # compute its bounding box of pill, then extract the ROI, and apply the mask\n h: int\n w: int\n x: int\n y: int\n (x, y, w, h) = cv2.boundingRect(largest_contour)\n imageROI = cast(imageType, frame[y:y + h, x:x + w])\n maskROI = mask[y:y + h, x:x + w]\n imageROI = cv2.bitwise_and(imageROI, imageROI, mask=maskROI)\n # skew = get_image_skew(frame)\n # if skew > 0: # , need to rotateanticlockwise\n # imageROI = imutils.rotate_bound(imageROI, -skew)\n return imageROI\n else:\n return None", "def cropbox(row):\n if row['Type'] == 'Rectangle':\n cropbox = [row['X'], row['Y'], row['X'] + row['Width'], \n row['Y'] + row['Height']]\n else:\n # damnit I should set up a logger\n print('WARNING: The annotation \"%s\" (index %d) is not a \\\n rectangle!' %(row['Image'], row['Index']))\n cropbox = None\n return cropbox", "def ClippedArea(rectangle):\n _, x0, y0, x1, y1 = rectangle\n clipped_width = max(0, min(width, x1) - max(0, x0))\n clipped_height = max(0, min(height, y1) - max(0, y0))\n return clipped_width * clipped_height", "def filter_crop(grayscale_image):\n # Blurring the image helps with getting a more consistent binary image\n blurred_image = cv2.bilateralFilter(grayscale_image, d=0, sigmaColor=40, sigmaSpace=2)\n binary_image = get_binary_image(blurred_image)\n marked = find_connected_components(binary_image)\n _, all_coords = get_image_objects(marked, 0)\n M, N = grayscale_image.shape\n average_void_intensity = compute_average_void_intensity(grayscale_image, marked, all_coords)\n cc_id = -1\n # Finding the cc id of the centered particle\n for i in range(N/2, -1, -1):\n current_cc = marked[M/2, i]\n if current_cc != -1:\n cc_id = current_cc\n break\n\n filtered_crop = remove_side_objects(grayscale_image, marked, cc_id, average_void_intensity)\n\n return filtered_crop", "def roi(data):\n return data[data.shape[0] // 2 - args.roi : data.shape[0] // 2 + args.roi, data.shape[1] // 2 - args.roi : data.shape[1] // 2 + args.roi]", "def _view_roi(array, original_area_slice, axis):\n axis += 1\n sl = (slice(None),) * axis + original_area_slice[axis:]\n return array[sl]", "def _crop_region(polygons, left, bottom, right, top, precision):\n cropped_polygons = []\n for p in polygons:\n clipped_polys = clipper._chop(p, [top, bottom], 1, 1 / precision)\n # polygon, [cuts], axis, scale\n for cp in clipped_polys[1]:\n result = clipper._chop(cp, [left, right], 0, 1 / precision)\n cropped_polygons += list(result[1])\n return cropped_polygons", "def crop_image(image):\r\n return image[40:-20, :]", "def crop_img(image, bound):\n scale = 1.01 # 1%\n return image.crop((bound.vertices[0].x // scale, bound.vertices[0].y // scale,\n int(bound.vertices[2].x * scale), int(bound.vertices[2].y) * scale))", "def _seg_image(self, x, y, r_cut=100):\n snr=self.snr\n npixels=self.npixels\n bakground = self.bakground\n error= self.bkg_rms(x,y,r_cut)\n kernel = self.kernel\n image_cutted = self.cut_image(x,y,r_cut)\n image_data = image_cutted\n threshold_detect_objs=detect_threshold(data=image_data, nsigma=snr,error=error)\n segments=detect_sources(image_data, threshold_detect_objs, npixels=npixels, filter_kernel=kernel)\n segments_deblend = deblend_sources(image_data, segments, npixels=npixels,nlevels=10)\n segments_deblend_info = source_properties(image_data, segments_deblend)\n nobjs = segments_deblend_info.to_table(columns=['id'])['id'].max()\n xcenter = segments_deblend_info.to_table(columns=['xcentroid'])['xcentroid'].value\n ycenter = segments_deblend_info.to_table(columns=['ycentroid'])['ycentroid'].value\n image_data_size = np.int((image_data.shape[0] + 1) / 2.)\n dist = ((xcenter - image_data_size) ** 2 + (ycenter - image_data_size) ** 2) ** 0.5\n c_index = np.where(dist == dist.min())[0][0]\n center_mask=(segments_deblend.data==c_index+1)*1 #supposed to be the data mask\n obj_masks = []\n for i in range(nobjs):\n mask = ((segments_deblend.data==i+1)*1)\n obj_masks.append(mask)\n xmin = segments_deblend_info.to_table(columns=['bbox_xmin'])['bbox_xmin'].value\n xmax = segments_deblend_info.to_table(columns=['bbox_xmax'])['bbox_xmax'].value\n ymin = segments_deblend_info.to_table(columns=['bbox_ymin'])['bbox_ymin'].value\n ymax = segments_deblend_info.to_table(columns=['bbox_ymax'])['bbox_ymax'].value\n xmin_c, xmax_c = xmin[c_index], xmax[c_index]\n ymin_c, ymax_c = ymin[c_index], ymax[c_index]\n xsize_c = xmax_c - xmin_c\n ysize_c = ymax_c - ymin_c\n if xsize_c > ysize_c:\n r_center = np.int(xsize_c)\n else:\n r_center = np.int(ysize_c)\n center_mask_info= [center_mask, r_center, xcenter, ycenter, c_index]\n return obj_masks, center_mask_info, segments_deblend", "def crop_image(img):\n gray = cv.cvtColor(img, cv.COLOR_BGR2GRAY)\n _, threshed = cv.threshold(gray, 240, 255, cv.THRESH_BINARY_INV)\n\n kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (11, 11))\n morphed = cv.morphologyEx(threshed, cv.MORPH_CLOSE, kernel)\n\n cnts = cv.findContours(morphed, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE)[-2]\n cnt = sorted(cnts, key=cv.contourArea)[-1]\n\n x, y, w, h = cv.boundingRect(cnt)\n dst = img[y:y+h, x:x+w]\n cv.imwrite(\"001.png\", dst)\n return dst", "def crop(self):\n\n return self.image.crop(self.x, self.y, self.width(), self.height(), centered = True)", "def cropCircleROI(image, additionalCut = 5):\n Rmin = np.min(image.shape[:-1])/3\n Rmin = 1250 / 3040 * image.shape[0]\n Rmax = 1400 / 3040 * image.shape[0]\n\n #downscale image for better performance\n reduceFactor = 5 # squared\n hough_radii = np.arange(Rmin/reduceFactor, Rmax/reduceFactor, dtype = int)\n\n downSampledImage = block_reduce(cv2.cvtColor(image, cv2.COLOR_BGR2GRAY), block_size = (reduceFactor, reduceFactor), func = np.max)\n downSampledEdges = canny(downSampledImage, sigma=3, low_threshold=5, high_threshold=10)\n\n hough_res = hough_circle(downSampledEdges, hough_radii)\n downSampledCircle = np.unravel_index(np.argmax(hough_res, axis=None), hough_res.shape)\n circle = np.array([downSampledCircle[1], downSampledCircle[2], hough_radii[downSampledCircle[0]]])*reduceFactor\n\n circleMask_ = cv2.circle(np.ones(image.shape[:-1],dtype = \"uint8\"), (circle[1], circle[0]), circle[2]-additionalCut, 0, thickness = -1)\n\n return [np.ma.array(image[:,:,i], mask = circleMask_) for i in range (image.shape[2])]", "def doCrop(image, x, y, w, h):\n\tcrop_height = int((config.FACE_HEIGHT / float(config.FACE_WIDTH)) * w)\n\tmidy = y + h/2\n\ty1 = max(0, midy-crop_height/2)\n\ty2 = min(image.shape[0]-1, midy+crop_height/2)\n\treturn image[y1:y2, x:x+w]", "def clip(in_file, out_file, area_file):\n bb = load_bb(area_file)\n in_las = laspy.file.File(in_file, mode='r')\n out_las = laspy.file.File(out_file, mode='w', header=in_las.header)\n\n inside = (in_las.x > bb.x_min) & (in_las.x < bb.x_max) & (in_las.y > bb.y_min) & (in_las.y < bb.y_max)\n out_las.points = in_las.points[inside]\n out_las.close()", "def central_area_crop(imgs_array, crop_size=(144, 192, 160)):\n orig_shape = np.array(imgs_array.shape)\n crop_shape = np.array(crop_size)\n center = orig_shape // 2\n lower_limits = center - crop_shape // 2 # (13, 24, 40) (5, 24, 40)\n upper_limits = center + crop_shape // 2 # (141, 216, 200) (149, 216, 200)\n # upper_limits = lower_limits + crop_shape\n imgs_array = imgs_array[lower_limits[0]: upper_limits[0],\n lower_limits[1]: upper_limits[1], lower_limits[2]: upper_limits[2]]\n return imgs_array", "def reconstruct_3D(*arg):\t\n\n\tuncroppedImage = None\n\tif (len(arg)==2):\n\t\treturn \"You need to supply the bounding box coordinates\"\n\telif (len(arg)==3):\n\t\timage_data, image_header = load(arg[0])\n\t\timageSegmentation_data, imageSegmentation_header = load(arg[1])\n\t\tCoords = arg[2]\n\t\toriginalSize = image_data.shape\n\t\tif (len(image_data.shape)==3) & (all(np.greater(image_data.shape,imageSegmentation_data.shape))):\n\t\t\tif (Coords[0]<originalSize[0]) & (Coords[1]<originalSize[0]) & (Coords[2]<originalSize[1]) & (Coords[3]<originalSize[1]) & (Coords[4]<originalSize[2]) & (Coords[5]<originalSize[2]): \n\t\t\t\tuncroppedImage = np.zeros(originalSize)\n\t\t\t\tuncroppedImage[Coords[0]:Coords[1],Coords[2]:Coords[3],Coords[4]:Coords[5]] = imageSegmentation_data\n\t\t\t\treturn uncroppedImage\n\t\t\telse:\n\t\t\t\treturn \"Original size is smaller than the supplied coordenates\"\n\t\t\t\treturn uncroppedImage\n\t\telse:\n\t\t\tprint \"the original image is smaller than the segmentation.\"\n\t\t\tprint \"The array or coordenates do not have 3 dimensions\"\n\t\t\treturn uncroppedImage\n\telif (len(arg)<2):\n\t\tprint \"Not enough arguments.\"\n\telif (len(arg)>3):\n\t\tprint \"Too many arguments.\"\n\tcroppedImage = None", "def crop(img: 'np.ndarray', x: int, y: int, width: int, height: int) -> 'np.ndarray':\n return img[y:y+height, x:x+width]", "def crop_image(self):\n\n image_data = Image.open(self.img_path)\n return image_data.crop(self.data_type)", "def segment_and_find_positions(self):\n initial_image = self.data\n xdim = self.data.shape[0]\n\n ydim = self.data.shape[1]\n downsized_image = transform.resize(\n initial_image,\n (xdim / DOWNSCALING_FACTOR, ydim / DOWNSCALING_FACTOR),\n mode=\"constant\",\n )\n rescaled_image = exposure.rescale_intensity(downsized_image)\n print(\"Starting Canny filtering\")\n g_edges = skimage.feature.canny(\n rescaled_image,\n sigma=self.canny_sigma,\n low_threshold=self.canny_low_threshold,\n )\n print(\"Starting dilation\")\n dilation = morphology.dilation(g_edges, morphology.disk(3))\n print(\"Starting erosion\")\n eroded = morphology.erosion(dilation, morphology.disk(4))\n dilation = morphology.dilation(\n eroded, morphology.diamond(4)\n ) # Dont change to disk\n print(\"Starting to remove small holes\")\n filled = morphology.remove_small_holes(\n dilation, area_threshold=self.remove_small_holes_area_threshold\n )\n print(\"Starting erosion\")\n eroded = morphology.erosion(filled, morphology.diamond(3))\n print(\"Applying filters\")\n filtered_image = eroded\n if self.colony_filters_dict is not None:\n for filter_name in self.colony_filters_dict.keys():\n filtered_image = segmentation_filters.apply_filter(\n filter_name, filtered_image, self.colony_filters_dict[filter_name]\n )\n\n colony_edges = morphology.dilation(feature.canny(filtered_image, 0.01))\n print(\"Starting outlining\")\n outline = downsized_image.copy()\n outline[colony_edges] = 65535\n distance = ndimage.distance_transform_edt(filtered_image)\n smoothed_well = ndimage.gaussian_filter(downsized_image, 0.35)\n outline.copy()\n objs, num_objs = ndimage.label(filtered_image)\n print(\"Applying filters for points\")\n if self.mode == \"A\":\n # point selection: Smoothest point in the center region\n for obj in range(1, num_objs + 1):\n print(\"On object {} of {}\".format(obj, num_objs))\n mask = objs == obj\n dist_mask = distance * mask\n # for each colony,\n # find the maximum distance from the two fold distance map.\n # The edge is at 0% and the center of the colony is at 100%\n d_max = dist_mask.max()\n # Getting the points which is at least 40% away from the edge\n top_percent = dist_mask > (d_max * 0.40)\n colony_mask = smoothed_well * top_percent\n colony_edges = feature.canny(colony_mask, 0.1)\n # applying the second distance transform\n # to find the smoothest point in the correct region\n inner_edges = ndimage.distance_transform_edt(\n ~colony_edges * top_percent\n )\n smooth_point = numpy.where(inner_edges == inner_edges.max())\n smooth_point = (smooth_point[0][0], smooth_point[1][0])\n smooth_point_corrected = (\n smooth_point[0] * DOWNSCALING_FACTOR,\n smooth_point[1] * DOWNSCALING_FACTOR,\n )\n self._point_locations.append(smooth_point_corrected)\n elif self.mode == \"C\":\n for obj in range(1, num_objs + 1):\n print(\"On object {} of {}\".format(obj, num_objs))\n mask = objs == obj\n dist_mask = distance * mask\n # point selection: edge, ridge & center respectively\n self.get_mode_c_points(dist_mask, 0, 0.03)\n self.get_mode_c_points(dist_mask, 0.15, 0.20)\n self.get_mode_c_points(dist_mask, 0.90, 0.99)" ]
[ "0.6491899", "0.6448897", "0.64305854", "0.63968277", "0.6331116", "0.62781453", "0.625253", "0.6224297", "0.6217467", "0.61932373", "0.6190705", "0.61787575", "0.61554366", "0.61271375", "0.6112032", "0.61013305", "0.60321337", "0.5992562", "0.5986119", "0.5948971", "0.5918662", "0.59090436", "0.5902662", "0.5901158", "0.5891818", "0.588659", "0.5882802", "0.5871927", "0.5871847", "0.5871189" ]
0.67294097
0
Get an existing AdminRoleCustom resource's state with the given name, id, and optional extra properties used to qualify the lookup.
def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None, description: Optional[pulumi.Input[str]] = None, label: Optional[pulumi.Input[str]] = None, permissions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'AdminRoleCustom': opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = _AdminRoleCustomState.__new__(_AdminRoleCustomState) __props__.__dict__["description"] = description __props__.__dict__["label"] = label __props__.__dict__["permissions"] = permissions return AdminRoleCustom(resource_name, opts=opts, __props__=__props__)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get(self, uuid):\n logger.info(\"Get a specific role by Id\", data=uuid)\n\n role = Role.query.get(uuid)\n return role_schema.jsonify(role)", "def get_custom_states(self, *args, **kwargs):\n pass", "def get_role(self, role_id):\n raise exception.NotImplemented() # pragma: no cover", "async def fetch(cls, id: Union[str, int]) -> Optional[\"Role\"]:\n query = \"\"\"SELECT * FROM roles WHERE id = $1;\"\"\"\n role = await cls.pool.fetchrow(query, int(id))\n\n if role is not None:\n role = cls(**role)\n\n return role", "def get_by_id(cls, name):\n\t\treturn super(Locality, cls).get_by_id(cls.normalized_name(name))", "def get(self, pk=None, **kwargs):\n if kwargs.pop('include_debug_header', True):\n debug.log('Getting the role record.', header='details')\n data, self.endpoint = self.data_endpoint(kwargs)\n response = self.read(pk=pk, fail_on_no_results=True,\n fail_on_multiple_results=True, **data)\n item_dict = response['results'][0]\n self.configure_display(item_dict)\n return item_dict", "def get(self, role_id):\n # Right now the only way is to list them all, then iterate.\n # Perhaps a filter or new endpoint would be useful here.\n roles = self.list()\n for role in roles:\n if role.id == role_id:\n return role\n raise exc.HTTPNotFound()", "def get(self, id, timeout=None):\n req = RoleGetRequest()\n\n req.id = (id)\n tries = 0\n plumbing_response = None\n while True:\n try:\n plumbing_response = self.stub.Get(\n req,\n metadata=self.parent.get_metadata('Roles.Get', req),\n timeout=timeout)\n except Exception as e:\n if self.parent.shouldRetry(tries, e):\n tries += 1\n self.parent.jitterSleep(tries)\n continue\n raise plumbing.convert_error_to_porcelain(e) from e\n break\n\n resp = models.RoleGetResponse()\n resp.meta = plumbing.convert_get_response_metadata_to_porcelain(\n plumbing_response.meta)\n resp.role = plumbing.convert_role_to_porcelain(plumbing_response.role)\n resp.rate_limit = plumbing.convert_rate_limit_metadata_to_porcelain(\n plumbing_response.rate_limit)\n return resp", "def get(self, id):\n return Role.query.filter(Role.id == id).one()", "def test_get_by_id(self):\n actual = chef_role.get_by_id(self.role_id)\n eq_(actual['chef_role_name'], self.role_name)", "def get(self, role_id):\n return self.client.get_role(role_id)", "def get_role(role_id):\n\tsession = get_session()\n\tresponse = session.get(\"{url}/api/roles/{role_id}\".format(url=get_registry_url(), role_id=role_id))\n\treturn response.json()", "def find_role(self, *args, **kwargs):\n raise NotImplementedError", "def __init__(__self__,\n resource_name: str,\n args: AdminRoleCustomArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "async def get_role(request, role_id):\n conn = await create_connection()\n\n head_block = await utils.get_request_block(request)\n role_resource = await roles_query.fetch_role_resource(conn, role_id)\n conn.close()\n return await utils.create_response(conn, request.url, role_resource, head_block)", "def test_get_by_name(self):\n actual = chef_role.get_by_name(self.role_name)\n eq_(actual['chef_role_id'], self.role_id)", "def get_role_by_id(self, role_id):\n try:\n role = self.db_handler.get_role_by_id(role_id)\n\n self.logger.write_to_log('got role by id', 'model')\n return role\n except Exception as err:\n method_name = sys._getframe().f_code.co_name\n\n self.logger.write_to_log('exception', 'model')\n self.logger.write_to_err_log(f'exception in method {method_name} - {err}', 'model')", "def get_role(self, name):\n role = Role.query.filter_by(name=name).first()\n\n return role", "def get_with_inventory(self, context, id_):\n try:\n db_resource_mgr_data = self.db_api.get_resource_manager(\n context, id_)\n db_props_data = self.db_api.get_resource_mgr_properties(context,\n id_, key=eon_const.RESOURCE_MGR_STATE_KEY)\n\n driver_obj = driver.load_resource_mgr_driver(\n db_resource_mgr_data['type'])\n inventory = driver_obj.get_inventory(db_resource_mgr_data)\n resource_mgr_data = _make_response(db_resource_mgr_data,\n property_list=db_props_data,\n inventory=inventory)\n LOG.debug(\"[%s] Resource data %s\"\n % (id_, logging.mask_password(resource_mgr_data)))\n return resource_mgr_data\n\n except exception.NotFound as e:\n LOG.error(e)\n raise e\n except Exception as e:\n msg = \"Error retrieving the 'resource':%s. Reason: %s\" % (\n id_, e.message)\n LOG.exception(msg)\n raise exception.RetrieveException(e.message)", "def load_custom_states(self, states, *args, **kwargs):\n pass", "async def get_role(self, guild: discord.Guild, create: bool = False, updatedb: bool = True) -> discord.Role | None:\n # Create role if necessary or return None since no role id\n if self.role is None:\n return await self.create_role(guild, updatedb=updatedb) if create else None\n\n # Try to find role in cache\n if not (role := guild.get_role(self.role)):\n return await self.create_role(guild, updatedb=updatedb) if create else None\n return role", "def get_role(resource_root, service_name, name, cluster_name=\"default\"):\n return _get_role(resource_root, _get_role_path(cluster_name, service_name, name))", "def get_cached_role(self):\n cache = self.get_cache()\n if cache.disabled:\n return self\n roles = cache.get(self.ROLES_BY_ID)\n if roles is None or self.id not in roles:\n self.update_cache()\n roles = cache.get(self.ROLES_BY_ID)\n return roles.get(self.id, self)", "def get_role(role_id: int) -> Optional[Role]:\n return db.session.query(Role).get(role_id)", "def get_role(role_id):\n\n \"\"\"\n example\n role_id = 3409643000000026005\n \"\"\"\n\n # Get instance of RolesOperations Class\n roles_operations = RolesOperations()\n\n # Call get_role method that takes role_id as parameter\n response = roles_operations.get_role(role_id)\n\n if response is not None:\n\n # Get the status code from response\n print('Status Code: ' + str(response.get_status_code()))\n\n if response.get_status_code() in [204, 304]:\n print('No Content' if response.get_status_code() == 204 else 'Not Modified')\n return\n\n # Get object from response\n response_object = response.get_object()\n\n if response_object is not None:\n\n # Check if expected ResponseWrapper instance is received.\n if isinstance(response_object, ResponseWrapper):\n\n # Get the list of obtained Role instances\n roles_list = response_object.get_roles()\n\n for role in roles_list:\n # Get the DisplayLabel of each Role\n print(\"Role DisplayLabel: \" + role.get_display_label())\n\n # Get the forecastManager User instance of each Role\n forecast_manager = role.get_forecast_manager()\n\n # Check if forecastManager is not None\n if forecast_manager is not None:\n\n # Get the ID of the forecast Manager\n print(\"Role Forecast Manager User-ID: \" + str(forecast_manager.get_id()))\n\n # Get the name of the forecast Manager\n print(\"Role Forecast Manager User-Name: \" + forecast_manager.get_name())\n\n # Get the ShareWithPeers of each Role\n print(\"Role ShareWithPeers: \" + str(role.get_share_with_peers()))\n\n # Get the Name of each Role\n print(\"Role Name: \" + role.get_name())\n\n # Get the Description of each Role\n print(\"Role Description: \" + role.get_description())\n\n # Get the Id of each Role\n print(\"Role ID: \" + str(role.get_id()))\n\n # Get the reporting_to User instance of each Role\n reporting_to = role.get_reporting_to()\n\n # Check if reporting_to is not None\n if reporting_to is not None:\n # Get the ID of the reporting_to User\n print(\"Role ReportingTo User-ID: \" + str(reporting_to.get_id()))\n\n # Get the name of the reporting_to User\n print(\"Role ReportingTo User-Name: \" + reporting_to.get_name())\n\n # Get the AdminUser of each Role\n print(\"Role AdminUser: \" + str(role.get_admin_user()))\n\n # Check if the request returned an exception\n elif isinstance(response_object, APIException):\n # Get the Status\n print(\"Status: \" + response_object.get_status().get_value())\n\n # Get the Code\n print(\"Code: \" + response_object.get_code().get_value())\n\n print(\"Details\")\n\n # Get the details dict\n details = response_object.get_details()\n\n for key, value in details.items():\n print(key + ' : ' + str(value))\n\n # Get the Message\n print(\"Message: \" + response_object.get_message().get_value())", "def getRoleInfo(self, role):", "def _get_role(self, cfg, no, key_addr, reg_name):\n\n p = self._current_p\n\n if not BinaryDependencyGraph.is_call(p.factory.block(no.addr)):\n return None\n\n # detect the role\n self._plugin_used = None\n f_addr = no.addr\n self._candidate_role_function = no.successors[0].addr\n\n # prepare the under-contrainted-based initial state\n # we do not allow untaint as we just want to see where the key string is leading to\n self._core_taint = coretaint.CoreTaint(p, interfunction_level=2, smart_call=False,\n follow_unsat=True,\n try_thumb=True,\n exit_on_decode_error=True, force_paths=True, allow_untaint=False,\n logger_obj=log)\n\n # the used register is not a parameter register\n if are_parameters_in_registers(p) and p.arch.registers[reg_name][0] not in ordered_agument_regs[p.arch.name]:\n return Role.UNKNOWN\n\n self._current_par_name = reg_name\n self._current_key_addr = key_addr\n s = self._prepare_state(key_addr, f_addr, reg_name)\n summarized_f = self._prepare_function_summaries()\n\n self._core_taint.set_alarm(TIMEOUT_TAINT, n_tries=TIMEOUT_TRIES)\n\n try:\n self._core_taint.run(s, (), (), summarized_f=summarized_f, force_thumb=False,\n check_func=self._check_str_usage, init_bss=False)\n except TimeOutException:\n log.warning(\"Timeout Triggered\")\n except Exception as e:\n log.warning(\"Excption: %s\" % str(e))\n\n self._core_taint.unset_alarm()\n return self._current_role", "def get_role(self, role_id: int, /) -> Optional[Role]:\n return self.guild.get_role(role_id) if self._roles.has(role_id) else None", "def _get_iam_role_property(self, property_name, default_value=None):\n iam_role_config = self._resource_config.get(\"shared_resource\").get(\"iam_role\")\n return iam_role_config.get(property_name, default_value)", "def getRole(self, desired=None):\n return {\"roleName\":\"hasici\",\n \"roleTitle\":\"Soptici\"}" ]
[ "0.5382666", "0.5352287", "0.5249233", "0.5131461", "0.5116712", "0.50955945", "0.5094461", "0.50409466", "0.5030529", "0.4976748", "0.4898603", "0.4892735", "0.48872188", "0.4849793", "0.48462057", "0.4844067", "0.48259544", "0.48186347", "0.4816291", "0.47465008", "0.473041", "0.47022963", "0.4693163", "0.46726686", "0.46678653", "0.46509638", "0.46214187", "0.46058547", "0.4568795", "0.4549415" ]
0.72646344
0
Check wether loss is NaN
def _check_loss(self, loss): assert not np.isnan(loss), "Model diverged with loss = NaN"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def assert_no_nans(x):\n assert not torch.isnan(x).any()", "def is_nan(self):\n \n return self.coeff.is_nan()", "def isNan(x: float) -> bool:\n return x != x", "def pd_isnan(val):\n return val is None or val != val", "def _no_nan(self, feature: np.array) -> bool:\n if not np.any(np.isnan(feature)):\n return True\n else:\n return False", "def mask_nan_keep_loss(y_true, y_pred):\n y_pred, y_true, num_notnan = mask_nan(y_true, y_pred)\n loss = K.sum((K.flatten(y_pred) - K.flatten(y_true)) ** 2) / num_notnan\n return tf.where(~tf.math.is_nan(loss), loss, 0)", "def has_nans(tensor, verbose=True):\n tensor_numpy = tensor.data.cpu().numpy().flatten()\n where_nan = np.argwhere(tensor_numpy != tensor_numpy)\n\n nan_count = len(where_nan)\n nan = nan_count != 0\n\n if verbose and nan:\n print(f\"Encountered {nan_count} NaNs\")\n\n return nan", "def is_nan(x):\n return (x is np.nan or x != x)", "def test_detect_nan():\r\n nan_detected = [False]\r\n\r\n def detect_nan(i, node, fn):\r\n for output in fn.outputs:\r\n if numpy.isnan(output[0]).any():\r\n print '*** NaN detected ***'\r\n theano.printing.debugprint(node)\r\n print 'Inputs : %s' % [input[0] for input in fn.inputs]\r\n print 'Outputs: %s' % [output[0] for output in fn.outputs]\r\n nan_detected[0] = True\r\n break\r\n\r\n x = theano.tensor.dscalar('x')\r\n f = theano.function([x], [theano.tensor.log(x) * x],\r\n mode=theano.compile.MonitorMode(\r\n post_func=detect_nan))\r\n f(0) # log(0) * 0 = -inf * 0 = NaN\r\n assert nan_detected[0]", "def isnan(x):\n return False", "def is_scalar_nan(x):\n return isinstance(x, numbers.Real) and math.isnan(x)", "def _is_nan(self, x: any) -> bool:\n return isinstance(x, float) and math.isnan(x)", "def _autocheck_nan(self):\n # assert np.isnan(self.W).any() == False, \"W matrix should not contain NaN values.\"\n assert np.isnan(self.Win).any() == False, \"Win matrix should not contain NaN values.\"\n if self.Wfb is not None:\n assert np.isnan(self.Wfb).any() == False, \"Wfb matrix should not contain NaN values.\"", "def _check_nan(self, vector):\n return np.isnan(vector).sum() > 0", "def is_nan(self):\r\n return self._real.is_nan() or self._imag.is_nan()", "def test_nan_check(self):\n values_with_nans = np.array([1, 2, 3, np.nan, np.nan])\n\n with LogCapture(\"puma\") as log:\n _ = hist_w_unc(values_with_nans, bins=4)\n log.check(\n (\n \"puma\",\n \"WARNING\",\n \"Histogram values contain 2 nan values!\",\n )\n )", "def ISNA(value):\n return isinstance(value, float) and math.isnan(value)", "def isnan(self):\n return self.isAny( (lambda x: np.isnan(x)) )", "def isnan(data):\n return _make.isnan(data)", "def gdx_isnan(val,gdxf):\n return val in [SPECIAL_VALUES[0], SPECIAL_VALUES[1]]", "def nan_value(data):\n return data.isnull().any()", "def testPluginContainsNan(self):\n schema = self.dataset.makeMinimalSchema()\n task = lsst.meas.base.SingleFrameMeasurementTask(schema=schema, config=self.config)\n exposure, cat = self.dataset.realize(noise=100.0, schema=schema, randomSeed=2)\n source = cat[0]\n exposure.getMaskedImage().getImage().getArray()[int(source.getY()), int(source.getX())] = np.nan\n task.run(cat, exposure)\n self.assertTrue(source.get(self.algName + \"_flag\"))\n self.assertTrue(source.get(self.algName + \"_flag_containsNan\"))\n self.assertFalse(source.get(self.algName + \"_flag_edge\"))", "def is_nan(self, name):\n return self._data[name].isnull()", "def missing_values():\n print('Missings in the train data:', train_data.isnull().sum())", "def is_nan(self, row_data):\n return math.isnan(row_data)", "def check_loss(self, loss):\r\n if loss in loss_functions:\r\n return loss\r\n else:\r\n raise InvalidNeuralNetwork()", "def isnan(value: float) -> bool:\n return math.isnan(value)", "def is_no_channel(val) -> bool:\n if isinstance(val, torch.Tensor):\n return bool(torch.isnan(val))\n if isinstance(val, str):\n return val == \"no_channel\"\n if np.isscalar(val):\n return bool(np.isnan(val))\n return val is None", "def checkfornan(chosen_df):\n if not chosen_df.isnull().values.any():\n raise ValueError('NaN in DataFrame')", "def test_nan_error(value, nan_strategy, metric_class):\n metric = metric_class(nan_strategy=nan_strategy)\n if nan_strategy == \"error\":\n with pytest.raises(RuntimeError, match=\"Encounted `nan` values in tensor\"):\n metric(value.clone())\n elif nan_strategy == \"warn\":\n with pytest.warns(UserWarning, match=\"Encounted `nan` values in tensor\"):\n metric(value.clone())" ]
[ "0.7288243", "0.7142308", "0.7045204", "0.7011775", "0.696748", "0.68836856", "0.6839107", "0.6837337", "0.68135375", "0.67958283", "0.6794594", "0.67176294", "0.6694199", "0.66769093", "0.66244996", "0.6558433", "0.65426934", "0.65074795", "0.647086", "0.64383054", "0.64144343", "0.63844705", "0.6359852", "0.6348226", "0.633146", "0.63269293", "0.6322757", "0.62937164", "0.62369317", "0.6207988" ]
0.8565298
0
Add weight decay to the variable
def _add_weight_decay(self, var, wd): wd_loss = tf.multiply(tf.nn.l2_loss(var), wd, name='weight_loss') tf.add_to_collection(GKeys.LOSSES, wd_loss)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _variable_with_weight_decay(name, shape, stddev, wd):\n\n #var = _variable_on_cpu(name, shape, tf.truncated_normal_initializer(stddev=stddev))\n var = weight_variable(shape)\n if wd is not None:\n weight_decay = tf.mul(tf.nn.l2_loss(var), wd, name='weight_loss')\n tf.add_to_collection('losses', weight_decay)\n return var", "def _variable_with_weight_decay(name, shape, stddev, wd):\n dtype = tf.float16 if FLAGS.use_fp16 else tf.float32\n var = tf.get_variable(name, shape,\n initializer=tf.truncated_normal_initializer(stddev=stddev, dtype=dtype))\n # add weight decay term to 'losses' collection, so the sum of all loss in 'losses' collection\n # will be the total/final loss\n if wd is not None:\n weight_decay = tf.multiply(tf.nn.l2_loss(var), wd, name='weight_loss')\n tf.add_to_collection('losses', weight_decay)\n return var", "def _variable_with_weight_decay(name, shape, stddev, wd):\n dtype = tf.float16 if FLAGS.use_fp16 else tf.float32\n var = _variable_on_cpu(name, shape, tf.truncated_normal_initializer(stddev=stddev, dtype=dtype))\n\n if wd is not None:\n weight_decay = tf.multiply(tf.nn.l2_loss(var), wd, name = 'weight_loss')\n tf.add_to_collection('losses', weight_decay)\n \n return var", "def _variable_with_weight_decay(name, shape, stddev, wd):\n var = _variable_on_cpu(name, shape,\n tf.truncated_normal_initializer(stddev=stddev))\n if wd:\n weight_decay = tf.mul(tf.nn.l2_loss(var), wd, name='weight_loss')\n tf.add_to_collection('losses', weight_decay)\n return var", "def _variable_with_weight_decay_orig(name, shape, stddev, wd):\n dtype = tf.float16 if FLAGS.use_fp16 else tf.float32\n var = _variable_on_cpu(\n name,\n shape,\n tf.truncated_normal_initializer(stddev=stddev, dtype=dtype))\n if wd is not None:\n weight_decay = tf.multiply(tf.nn.l2_loss(var), wd, name='weight_loss')\n tf.add_to_collection('losses', weight_decay)\n return var", "def _variable_with_weight_decay(self, shape, stddev, wd):\n\n initializer = tf.truncated_normal_initializer(stddev=stddev)\n var = tf.get_variable('weights', shape=shape,\n initializer=initializer)\n\n# if wd and (not tf.get_variable_scope().reuse):\n# weight_decay = tf.mul(tf.nn.l2_loss(var), wd, name='weight_loss')\n# tf.add_to_collection('losses', weight_decay)\n return var", "def weights_decay(self):\n for param_group in self.optimizer.param_groups:\n for param in param_group['params']:\n param.data = param.data.add(-1.*self.weights_decay * param_group['lr'], param.data)", "def _variable_with_weight_decay(name, shape, stddev, wd):\n var = variable(\n name,\n shape,\n initializer=tf.truncated_normal_initializer(stddev=stddev, dtype=tf.float32))\n if wd is not None:\n weight_decay = tf.multiply(tf.nn.l2_loss(var), wd, name='weight_loss')\n tf.add_to_collection('losses', weight_decay)\n return var", "def _variable_with_weight_decay(name, shape, stddev, wd, use_xavier=True):\n if use_xavier:\n # initializer = tf.contrib.layers.xavier_initializer()\n initializer = tf.initializers.glorot_uniform()\n else:\n initializer = tf.truncated_normal_initializer(stddev=stddev)\n var = _variable_on_cpu(name, shape, initializer)\n if wd is not None:\n weight_decay = tf.multiply(tf.nn.l2_loss(var), wd, name='weight_loss')\n tf.add_to_collection('losses', weight_decay)\n return var", "def _variable_with_weight_decay(name, shape, stddev, wd):\n dtype = tf.float16 if FLAGS.use_fp16 else tf.float32\n var = _variable_on_cpu(\n name,\n shape,\n tf.truncated_normal_initializer(stddev=stddev, dtype=dtype))\n if wd is not None:\n weight_decay = tf.mul(tf.nn.l2_loss(var), wd, name='weight_loss')\n tf.add_to_collection('losses', weight_decay)\n return var", "def _variable_with_weight_decay(name, shape, wd = 0.0):\n var = _variable_on_cpu(name, shape, tf.contrib.layers.xavier_initializer())\n # print(\"change var\")\n # var = tf.Variable(tf.truncated_normal(shape, mean= 0.0, stddev = 1.0), name = name)\n if wd != 0.0:\n weight_decay = tf.mul(tf.nn.l2_loss(var), wd, name='weight_loss')\n tf.add_to_collection('losses', weight_decay)\n return var", "def _variable_with_weight_decay(name, shape, stddev, wd):\n var = _variable_on_cpu(\n name,\n shape,\n tf.truncated_normal_initializer(stddev=stddev, dtype=tf.float32))\n if wd is not None:\n weight_decay = tf.multiply(tf.nn.l2_loss(var), wd, name='weight_loss')\n tf.add_to_collection('losses', weight_decay)\n return var", "def _decay(self):\n wd_losses = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)\n log.info('Weight decay variables')\n [log.info(x) for x in wd_losses]\n log.info('Total length: {}'.format(len(wd_losses)))\n if len(wd_losses) > 0:\n return tf.add_n(wd_losses)\n else:\n log.warning('No weight decay variables!')\n return 0.0", "def _variable_with_weight_decay(name, shape, stddev, wd):\n dtype = tf.float32\n var = _variable_on_cpu(\n name,\n shape,\n tf.truncated_normal_initializer(stddev=stddev, dtype=dtype))\n if wd is not None:\n weight_decay = tf.multiply(tf.nn.l2_loss(var), wd, name='weight_loss')\n tf.add_to_collection('losses', weight_decay)\n return var", "def _variable_with_weight_decay(name, shape, stddev, wd):\n dtype = tf.float32\n var = _variable_on_cpu(\n name,\n shape,\n tf.truncated_normal_initializer(stddev=stddev, dtype=dtype))\n if wd is not None:\n weight_decay = tf.multiply(tf.nn.l2_loss(var), wd, name='weight_loss')\n tf.add_to_collection('losses', weight_decay)\n return var", "def weight_decay(self):\n if self._weight_decay is not None:\n return self._weight_decay\n return 5e-5 if 'VG' in self.dataset else 5e-4", "def _variable_with_weight_decay(self, name, shape, wd):\n var = self._variable_on_device(\n name,\n shape,\n tf.contrib.layers.xavier_initializer_conv2d(uniform=True))\n if wd is not None:\n weight_decay = tf.multiply(tf.nn.l2_loss(var), wd,\n name='weight_loss')\n tf.add_to_collection('losses', weight_decay)\n return var\n\n # We will replicate the model structure for the training subgraph, as well\n # as the evaluation subgraphs, while sharing the trainable parameters.", "def decay_weights(cost, weight_decay_rate):\n costs = []\n for var in tf.trainable_variables():\n costs.append(tf.nn.l2_loss(var))\n cost += tf.multiply(weight_decay_rate, tf.add_n(costs))\n return cost", "def add_to_average(self, value, decay=1.0, weight=1.0):\n decay = tf.cast(decay, dtype=self.dtype)\n weight = tf.cast(weight, dtype=self.dtype)\n\n update_var = smart_assign(self._var, decay * self._var + weight * value)\n\n update_total_weight = smart_assign(self._total_weight,\n decay * self._total_weight + weight)\n\n return tf.group(update_var, update_total_weight)", "def setWeights(self, decay):\n for i in range(15):\n self.model_parts[i].weights = decay ** torch.abs(torch.arange(15.0) - i)", "def weight_decay(norm=2):\n costs = []\n for var in tf.trainable_variables():\n if 'weight' in var.op.name or 'fc' in var.op.name or 'conv' in var.op.name:\n if norm == 1:\n lp_norm_var = tf.reduce_sum(tf.abs(var))\n elif norm == 2:\n lp_norm_var = tf.reduce_sum(tf.square(var))\n else:\n raise ValueError('wrong norm of weight decay')\n costs.append(lp_norm_var)\n return tf.add_n(costs)", "def add_weight_decay(model, adjust_per_optimizer=True):\n if adjust_per_optimizer and 'lars' in FLAGS.optimizer:\n # Weight decay are taking care of by optimizer for these cases.\n # Except for supervised head, which will be added here.\n l2_losses = [\n tf.nn.l2_loss(v)\n for v in model.trainable_variables\n if 'head_supervised' in v.name and 'bias' not in v.name\n ]\n if l2_losses:\n return FLAGS.weight_decay * tf.add_n(l2_losses)\n else:\n return 0\n\n # TODO(srbs): Think of a way to avoid name-based filtering here.\n l2_losses = [\n tf.nn.l2_loss(v)\n for v in model.trainable_weights\n if 'batch_normalization' not in v.name\n ]\n loss = FLAGS.weight_decay * tf.add_n(l2_losses)\n return loss", "def _decay(self):\n costs = []\n for var in tf.trainable_variables():\n if var.op.name.find(r'DW') > 0:\n costs.append(tf.nn.l2_loss(var))\n # tf.histogram_summary(var.op.name, var)\n\n return tf.multiply(self.weight_decay_rate, tf.add_n(costs))", "def decay(self):\n self.push_pull_weight_ratio *= self.push_pull_weight_decay", "def __init__(self, weight_decay, **kwargs):\n self._decay_var_list = None # is set in minimize or apply_gradients\n self._weight_decay = weight_decay\n # The tensors are initialized in call to _prepare\n self._weight_decay_tensor = None\n super(DecoupledWeightDecayExtension, self).__init__(**kwargs)", "def get_weight_decay(self):\n if type(self.model.optimizer).__name__ == \"AdamWeightDecay\":\n return self.model.optimizer.weight_decay_rate\n else:\n return None", "def variable_with_weight_decay(kernel_shape, initializer, wd):\n w = tf.get_variable(name=\"weights\", shape=kernel_shape, dtype=tf.float32, initializer=initializer)\n\n collection = tf.GraphKeys.REGULARIZATION_LOSSES\n if wd and (not tf.get_variable_scope().reuse):\n weight_decay = tf.multiply(tf.nn.l2_loss(w), wd, name=\"w_loss\")\n tf.add_to_collection(collection, weight_decay)\n variable_summaries(w)\n return w", "def variable_with_weight_decay(kernel_shape, initializer, wd):\n w = tf.get_variable('weights', kernel_shape, tf.float32, initializer=initializer)\n\n collection_name = tf.GraphKeys.REGULARIZATION_LOSSES\n if wd and (not tf.get_variable_scope().reuse):\n weight_decay = tf.multiply(tf.nn.l2_loss(w), wd, name='w_loss')\n tf.add_to_collection(collection_name, weight_decay)\n variable_summaries(w)\n return w", "def set_weight_decay(self, wd=U.DEFAULT_WD):\n self._recompile(wd=wd)\n return", "def _add_weight_decay(net, l2_value, skip_list=()):\n decay, no_decay = [], []\n for name, param in net.named_parameters():\n if not param.requires_grad:\n continue # frozen weights\n if len(param.shape) == 1 or name.endswith(\".bias\") or name in skip_list:\n no_decay.append(param)\n else:\n decay.append(param)\n return [{'params': decay, 'weight_decay': l2_value}, {'params': no_decay, 'weight_decay': 0.}, ]" ]
[ "0.73574615", "0.7281516", "0.72360826", "0.7235467", "0.7189649", "0.7180601", "0.716254", "0.7162298", "0.7158315", "0.71548766", "0.7137905", "0.7137743", "0.71284556", "0.708134", "0.708134", "0.70520884", "0.7029988", "0.6992265", "0.6888524", "0.6865175", "0.6782144", "0.6753364", "0.67404354", "0.67325836", "0.6630841", "0.6625631", "0.6624412", "0.66033155", "0.6595134", "0.6551788" ]
0.7537218
0
Compute total loss value in the collections
def _total_loss(self, collections=None, name=None): if collections is None: collections = [GKeys.LOSSES] loss_vars = [] for key in collections: loss_vars.extend(tf.get_collection(GKeys.LOSSES)) total_loss = tf.add_n(loss_vars, name=name) return total_loss
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def compute_loss(self):", "def loss_total(self):\r\n def loss(y_true, y_pred):\r\n l2 = 1/2*K.sum(K.square(y_true-y_pred))\r\n\r\n return l2\r\n return loss", "def get_loss(self):\n return self.loss / self.cnt", "def sum_factors(self) -> float:\n return sum([x for x in self._loss_dict.values()])", "def loss(self):\n return la.norm(self.resids) / self.normX", "def compute_loss(self, x, gt):\n loss = sum([torch.mean((out - gt)**2) for out in self.forward(x)])\n return loss", "def compute_loss(self, obs, returns):", "def _get_total_variational_loss(self, content):\n return tf.reduce_sum(tf.image.total_variation(content))", "def loss(self):\n return self._loss", "def loss_op(self):\n return self.loss", "def calc_loss(self, outputs, labels):\n information_loss = self.bottleneck.buffer_capacity.mean() # Taking the mean is equivalent of scaling with 1/K\n cross_entropy = F.cross_entropy(outputs, target=labels)\n total = cross_entropy + self.beta * information_loss\n self.ce_loss.append(cross_entropy.cpu().detach().numpy())\n self.info_loss.append(information_loss.cpu().detach().numpy())\n self.total_loss.append(total.cpu().detach().numpy())\n return total", "def compute_loss(self):\n self.prototypes = self.compute_prototypes()\n self.test_logits = self.compute_logits()\n loss = tf.nn.sparse_softmax_cross_entropy_with_logits(\n labels=self.episode.test_labels, logits=self.test_logits)\n cross_entropy_loss = tf.reduce_mean(loss)\n regularization = tf.reduce_sum(\n tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))\n loss = cross_entropy_loss + self.weight_decay * regularization\n return loss", "def reduce_loss(self, all_loss):\n if self._gpu_num == 1:\n total_loss = all_loss[0]\n else:\n layer_loss = [all_loss[j] for j in range(self._gpu_num)]\n total_loss = tf.reduce_mean(layer_loss)\n\n return total_loss", "def compute_loss(self):\n self.test_logits = self.compute_logits()\n loss = tf.nn.sparse_softmax_cross_entropy_with_logits(\n labels=self.data.test_labels, logits=self.test_logits)\n cross_entropy_loss = tf.reduce_mean(loss)\n regularization = tf.reduce_sum(\n tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))\n loss = cross_entropy_loss + self.weight_decay * regularization\n return loss", "def computeLoss(self):\n return sum(np.arccosh(-minkowskiArrayDot(self.examples, self.centroid)) ** 2)[0] / np.shape(self.examples)[0]", "def loss(self):\n if not self.run:\n self._run()\n return self.model_loss", "def get_probability_loss(self):\n return sum(self._loss)/len(self._loss)", "def _get_loss_weight(self) -> torch.Tensor:\n n_pos: torch.Tensor = 0.0\n n_neg: torch.Tensor = 0.0\n\n for _, ground_truth in self.train_loader:\n n_poss_curr = ground_truth.sum()\n n_pos += n_poss_curr\n n_neg += ground_truth.numel() - n_poss_curr\n\n eps = torch.finfo(n_pos.dtype).eps\n return n_neg / (n_pos + eps)", "def loss(self) -> KernelLoss:\n return self._loss", "def calculate_total_loss(self, train_x, train_y):\n return np.sum([self.calculate_error(x, y)\n for x, y in zip(train_x, train_y)])", "def cost(lossvalues):\n return np.sum(lossvalues ** 2) / (2 * lossvalues.shape[1])", "def envisaged_loss(self):\n loss = round(\n self.calcul_buy_nb_action() * self.stop_loss - self.investment_price(),\n 2,\n )\n percent_loss = round(loss * 100 / self.capital, 2)\n return loss, percent_loss", "def calculate_loss(self, output, batch):\n\n detailed_loss = {}\n for loss_func_key, this_loss_func, weight in self.loss_funcs:\n this_loss = this_loss_func(output, batch) * weight\n detailed_loss[loss_func_key] = this_loss\n loss = sum(detailed_loss.values())\n return loss, detailed_loss", "def _calc_loss(self, fvs, labels, w, b):\n\n loss = 0.5 * self.lda * (np.linalg.norm(w) ** 2)\n tmp = sum(map(lambda x, y: (x - y) ** 2, fvs.dot(w) + b, labels))\n loss += tmp / fvs.shape[0]\n\n return loss", "def loss(self):\n return self._get(\"loss\")", "def get_current_loss(self):\n return sum(self.recent_loss_array)/sum(self.recent_loss_bs_array)", "def evaluate_loss(net, data_iter, loss): #@save\n metric = d2l.Accumulator(2) # Sum of losses, no. of examples\n for X, y in data_iter:\n l = loss(net(X), y)\n metric.add(d2l.reduce_sum(l), d2l.size(l))\n return metric[0] / metric[1]", "def loss_(self, batch):\n raise NotImplementedError", "def calc_loss(predictions, labels):\n return np.mean(np.square(predictions - labels))", "def _compute_loss(self, parameters, inputs, ground_truth):\n predictions = self.network_forward(parameters, inputs)\n loss = np.mean((ground_truth - predictions) ** 2)\n return loss" ]
[ "0.7515837", "0.74702716", "0.72111726", "0.71222854", "0.7107367", "0.6941016", "0.6920439", "0.6903721", "0.6892803", "0.6834876", "0.6811724", "0.680063", "0.67770433", "0.6751744", "0.67042065", "0.66952884", "0.6648413", "0.661181", "0.6596353", "0.65950805", "0.6591228", "0.6552497", "0.6551396", "0.6532303", "0.65301144", "0.650853", "0.6489241", "0.6486489", "0.64517814", "0.6442314" ]
0.7766601
0
Flatten tensor to shape [1, size]
def _flatten(self, inputT, size): return tf.reshape(inputT, (-1, size))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def flatten(x_tensor):\n # TODO: Implement Function\n b, w, h, d = x_tensor.get_shape().as_list()\n img_size = w * h * d\n return tf.reshape(x_tensor, [-1, img_size])", "def flatten(x):\n all_dims_exc_first = np.prod([v.value for v in x.get_shape()[1:]])\n o = tf.reshape(x, [-1, all_dims_exc_first])\n return o", "def batch_flatten(x):\n shape = x.get_shape().as_list()[1:]\n if None not in shape:\n return tf.reshape(x, [-1, int(np.prod(shape))])\n return tf.reshape(x, tf.stack([tf.shape(x)[0], -1]))", "def batch_flatten(x):\n shape = x.get_shape().as_list()[1:]\n if None not in shape:\n return tf.reshape(x, [-1, int(np.prod(shape))])\n return tf.reshape(x, tf.stack([tf.shape(x)[0], -1]))", "def flatten(x_tensor):\n old_shape = x_tensor.get_shape().as_list()\n new_shape = [-1, old_shape[1] * old_shape[2] * old_shape[3]]\n return tf.reshape(x_tensor, new_shape)", "def flatten(x_tensor):\n import numpy as np\n #print(x_tensor)\n\n shape = x_tensor.get_shape().as_list() # a list: [None, height, width, channels]\n dim = np.prod(shape[1:]) # dim = prod(height,width,channels) \n flattened_tensor = tf.reshape(x_tensor, [-1, dim]) # -1 means \"all\"\n #print(flattened_tensor)\n return flattened_tensor", "def layer_flatten(x_tensor):\n return tf.reshape(\n x_tensor,\n [-1, (x_tensor.shape[1] * x_tensor.shape[2] * x_tensor.shape[3]).value]\n )", "def batch_flatten(this,x):\n shape = x.get_shape().as_list()[1:]\n if None not in shape:\n return tf.reshape(x, [-1, int(np.prod(shape))])\n return tf.reshape(x, tf.stack([tf.shape(x)[0], -1]))", "def batch_flatten(this,x):\n shape = x.get_shape().as_list()[1:]\n if None not in shape:\n return tf.reshape(x, [-1, int(np.prod(shape))])\n return tf.reshape(x, tf.stack([tf.shape(x)[0], -1]))", "def flatten(tensor):\n C = tensor.size(1)\n # new axis order\n axis_order = (1, 0) + tuple(range(2, tensor.dim()))\n # Transpose: (N, C, D, H, W) -> (C, N, D, H, W)\n transposed = tensor.permute(axis_order)\n # Flatten: (C, N, D, H, W) -> (C, N * D * H * W)\n return transposed.contiguous().view(C, -1)", "def flatten(x, name=\"flatten\"):\n all_dims_exc_first = np.prod([v.value for v in x.get_shape()[1:]])\n o = tf.reshape(x, [-1, all_dims_exc_first], name=name)\n return o", "def flatten(tensor):\n # number of channels\n C = tensor.size(1)\n # new axis order\n axis_order = (1, 0) + tuple(range(2, tensor.dim()))\n # Transpose: (N, C, D, H, W) -> (C, N, D, H, W)\n transposed = tensor.permute(axis_order)\n # Flatten: (C, N, D, H, W) -> (C, N * D * H * W)\n return transposed.contiguous().view(C, -1)", "def _flatten(prev_layer):\n\n with tf.name_scope('flatten'):\n shape = int(np.prod(prev_layer.get_shape()[1:]))\n return tf.reshape(prev_layer, [-1, shape])", "def flatten(x_tensor):\n # TODO: Implement Function\n return tf.contrib.layers.flatten(x_tensor)", "def flatten(self, x_tensor):\n shape = x_tensor.get_shape().as_list()\n assert len(shape) >= 4, 'shape of image is not correct'\n single_image_dimension = shape[1] * shape[2] * shape[3]\n x_tensor = tf.reshape(x_tensor, [-1, single_image_dimension])\n return x_tensor\n # return tf.contrib.layers.flatten(x_tensor)", "def flatten(x_tensor):\n with tf.name_scope('input_reshape'):\n x = x_tensor.get_shape().as_list()[1]\n y = x_tensor.get_shape().as_list()[2]\n z = x_tensor.get_shape().as_list()[3]\n image_shaped_input = tf.reshape(x_tensor, [-1, x*y*z])\n return image_shaped_input", "def flatten(x_tensor):\n # TODO: Implement Function\n \n shape = x_tensor.get_shape().as_list()\n reshape = tf.reshape(x_tensor, (-1, shape[1] * shape[2] * shape[3]))\n \n return reshape", "def tf_flatten(x):\n return tf.contrib.layers.flatten(x)", "def flatten(x):\n return reshape(x, (x.shape[0], -1))", "def flatten(x):\n return x.view(x.size(0), -1)", "def flatten_reshape(variable):\n dim = 1\n for d in variable.get_shape()[1:].as_list():\n dim *= d\n return tf.reshape(variable, shape=[-1, dim])", "def flatten(x, ndim=1, name='Flatten'):\n with tf.name_scope(name, values=[x]):\n shape = x.get_shape()\n total_dim = len(shape)\n\n if total_dim == ndim:\n return x\n elif total_dim < ndim:\n raise ValueError('Attempt to flatten \"x\" to %r dimensions, but \"x\" '\n 'only has %r dimensions.' % (ndim, total_dim))\n\n if shape.is_fully_defined():\n # all the dimensions are fixed, thus we can use the static shape.\n shape = shape.as_list()[:ndim - 1] + [-1]\n else:\n # the shape is dynamic, so we have to generate a dynamic flatten\n # shape.\n shape = tf.concat(0, [tf.shape(x)[:ndim - 1], [-1]])\n\n return tf.reshape(x, shape)", "def flatten(self, input_layer):\n # Note: This ensures the output order matches that of NHWC networks\n input_layer = self._to_nhwc(input_layer)\n input_shape = input_layer.get_shape().as_list()\n num_inputs = input_shape[1]*input_shape[2]*input_shape[3]\n return tf.reshape(input_layer, [-1, num_inputs], name='flatten')", "def flatten_image(x):\n *batch_shape, h, w, c = x.shape\n return x.reshape((*batch_shape, h * w * c))", "def flatten_layers(data):\n return data.reshape((data.shape[0], data.shape[1], -1))", "def flatten(input, name):\n with tf.name_scope(name):\n l = tf.layers.flatten(input)\n return l", "def _flatten_and_concat(x, batch_shape, dtype):\n # For convenience.\n if x is None:\n return x\n\n def _reshape_part(part):\n part = tf.cast(part, dtype)\n new_shape = ps.concat(\n [batch_shape, [-1]],\n axis=-1,\n )\n return tf.reshape(part, ps.cast(new_shape, tf.int32))\n\n x = tf.nest.map_structure(_reshape_part, x)\n return tf.concat(tf.nest.flatten(x), axis=-1)", "def flatten(inputs, is_batched=True, scope=None):\n with tf.name_scope(scope, 'flatten'):\n shape = get_shape(inputs)\n if is_batched:\n num_units = np.prod(shape[1:])\n return tf.reshape(inputs, [-1, num_units])\n else:\n num_units = np.prod(shape)\n return tf.reshape(inputs, [num_units])", "def flatten_all_but_last(a):\n ret = tf.reshape(a, [-1, tf.shape(a)[-1]])\n if not tf.contrib.eager.in_eager_mode():\n ret.set_shape([None] + a.get_shape().as_list()[-1:])\n return ret", "def reshape_0(tensor):\n row = tf.shape(tensor)[0]\n og_shape = tensor.get_shape().as_list()\n shape_list = [row, og_shape[1], og_shape[2], 1]\n out = tf.reshape(tensor, shape_list)\n return out" ]
[ "0.80815935", "0.80499464", "0.802613", "0.802613", "0.79824466", "0.796515", "0.7958361", "0.79401034", "0.79401034", "0.79224795", "0.7860755", "0.7819301", "0.7798498", "0.7773259", "0.7763785", "0.7733254", "0.7664588", "0.76089823", "0.7535897", "0.7423828", "0.73723274", "0.73627794", "0.73344386", "0.7228658", "0.69954765", "0.69613737", "0.6953634", "0.69501454", "0.6899759", "0.6715046" ]
0.86480945
0
String representation for Dice.
def __str__( self ): return "Die1: %s\nDie2: %s" % ( str(self.die1), str(self.die2) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __str__(self):\n out_string = ''\n for die in self.dice:\n out_string = out_string + str(die) + ', '\n return out_string[:-2]", "def str(self):\n if self.num_dice is not None and self.dice_type is not None:\n descr = \"{}D{}\".format(self.num_dice, self.dice_type)\n if self.plus > 0:\n descr += \"+{}\".format(self.plus)\n elif self.min_value is not None and self.max_value is not None:\n descr = \"{}-{}\".format(self.min_value, self.max_value)\n elif self.plus != 0:\n descr = str(self.plus)\n else:\n descr = \"\"\n\n return descr", "def dice(name):", "def __str__(self):\n return '{}: {}'.format(self.name, str(self.sides))", "def __str__(self):\n \n is_random_print = \"\"\n if self.is_random == True:\n is_random_print = \"randomly\"\n else:\n is_random_print = \"deterministically\"\n\n return \"Player for \" + self.side + \", ply = \" + str(self.ply) + \", breaks ties \" + is_random_print", "def __str__(self):\n prob = str(round(self.probability, 5))\n dprob = str(round(self.postdProbability, 5))\n output = \"dprob: \" + dprob + \" \\tprob: \" + prob + \"\\t: \"\n for key in self.attackDict.keys():\n output += key + \" \"\n return output", "def __str__(self):\n data = []\n for index in range(1, 15):\n key = f\"d{index}\"\n val = self._user_data[key]\n data.append((key, val))\n return vars_to_string(data)", "def dice_roll(name):\n roll = random.randint(1,6)\n print \"{name} shook the die \\\nand rolled a {roll}.\".format(name=name, roll=roll)\n return roll", "def dice_roll(name):\n roll = random.randint(1,6)\n print \"{name} shook the die \\\nand rolled a {roll}.\".format( name=name, roll=roll)\n return roll", "def display_current_dice(self):\n print(\"You rolled:\\n a = [ {} ]\\n b = [ {} ]\\n\".\n format(self.die_a, self.die_b))", "def __repr__(self):\n stringrepr = self.__class__.__name__ + \" PRNG. seed: \" + \\\n str(self.baseseed) + \" counter: \" + str(self.counter) + \\\n \" randbits_remaining: \" + str(self.randbits_remaining)\n return stringrepr", "def __str__(self):\n string = \"Deck contains \"\n\n for i in range(len(self.deck)):\n string += str(self.deck[i].get_suit()) + str(self.deck[i].get_rank()) + \" \"\n return string", "def __str__(self):\n return f'Character name: {self.name}\\nhealth: {self.health}\\n' \\\n f'strength: {self.strength}\\nchance dodge: ' \\\n f'{round(self.chance_dodge, 2)}\\nchance critical:' \\\n f' {round(self.chance_critical, 2)} '", "def view(self):\n return tuple(self._dice)", "def __str__(self):\r\n output = \"Deck contains\"\r\n for card in self.deck:\r\n output += \" %s\" % (card)\r\n return output + \".\"", "def __str__(self):\n return \"UID {0}, Key {1}, Cipher {2}, PRNG {3}\".format(hex(self.uid), \n hex(self.key), hex(self.cipher), hex(self.prng))", "def to_string(self):\n\n return '[[%s], [%s]], [%d, %d], [%s], %s, %s, [%s]' % \\\n (', '.join(INT2STRING_CARD[h] for h in self.hand[0]),\n ', '.join(INT2STRING_CARD[h] for h in self.hand[1]),\n self.pot[0], self.pot[1],\n ', '.join(INT2STRING_CARD[p] for p in self.pub),\n INT2STRING_PHASE[self.phase],\n INT2STRING_PLAYER[self.player],\n ', '.join(INT2STRING_STATUS[s] for s in self.status))", "def __str__(self):\n result = \", \".join(map(str, self.hand))\n result += \"\\n \" + str(self.get_score()) + \" points\"\n return result", "def __str__(self):\r\n to_string = \"ID: \" + str(self.dat_id) + \" --- CLASSIFICATION: \" + str(self.dat_party) + \" --- VOTED: \" + str(self.dat_votes)\r\n return to_string", "def __str__(self):\n return_string = self.name + \"\\n\" + str(self.traits)\n\n return return_string", "def __str__(self):\n return f\"{self.face} of {self.suit} with a value of {self.value}\"", "def to_string(self):\n return self.dungeon_string", "def __str__(self):\n return f\"This player has {self.hand} for a current total of {self.total} and {self.aceCount} Aces \" \\\n f\"valued at a soft 11. This player is a dealer: {self.dealer}.\"", "def print_dice(self):\n\n stage_to_print = 3 if self.current_stage == 4 else self.current_stage\n print(\"You rolled:\\n a = [ {} ]\\n b = [ {} ]\\n\\nYou are in Stage {}\"\n .format(self.die_a, self.die_b, stage_to_print))", "def __str__(self):\n result = \"\"\n (v1, v2) = self._lastRoll\n result = result + str((v1, v2)) + \" \" + \\\n str(v1 + v2) + \"\\n\"\n return result", "def __str__(self):\n return \"{} of {}\".format(self.rank,self.suit)", "def __str__(self):\r\n to_print = (\"Name: \" + self.name + \", Age: \" +\r\n str(self.age) + \", Hobbys: \" + str(self.hobbys))\r\n return to_print", "async def roll(self, dice: str):\n try:\n rolls, limit = map(int, dice.split('d'))\n except Exception:\n await self.bot.say('Format has to be in NdN!')\n return\n\n result = ', '.join(str(random.randint(1, limit)) for r in range(rolls))\n await self.bot.say(result)", "def __repr__(self):\n s = \"Player for \" + self.ox + \"\\n\"\n s += \" with tiebreak type: \" + self.tbt + \"\\n\"\n s += \" and ply == \" + str(self.ply) + \"\\n\\n\"\n return s", "def roll_die(self, number_of_rolls):\n\t\tfor roll in range(0, number_of_rolls):\n\t\t\tprint(str(randint(1, self.sides)), end = \", \")\n\t\tprint()" ]
[ "0.7938284", "0.68876106", "0.6755719", "0.6654932", "0.6594377", "0.64787066", "0.6398786", "0.6361623", "0.635217", "0.63273734", "0.6259721", "0.62585074", "0.6243883", "0.62208056", "0.6196698", "0.6190639", "0.61750054", "0.6167577", "0.6157307", "0.61312824", "0.6118187", "0.61103296", "0.6092314", "0.60920554", "0.605368", "0.5994521", "0.59900266", "0.59872764", "0.5981263", "0.5977914" ]
0.7110353
1
Sets the value of a register chosen by its index
def set_register(self, index, value): if index < 0 or index > 32: raise Exception('Register out of index') self.register[index].set_value(str(value))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_reg_to_val(self):\n register = (self.opcode & 0x0F00) >> 8\n value = self.opcode & 0x00FF\n self.registers[register] = value\n logger.info(\"Set register V{} to {}\".format(register, value))", "def setData(self, index, value):\n \n self.state[index.row()][index.column()] = value\n return value", "def _setReg(address, new_value):\n mem[address:address+4] = struct.pack(\"<L\", new_value)", "def set_bit(self, register, bit_index, state):\n oldvalue = self.device.readregister(register)\n if state:\n newvalue = oldvalue | 2 ** bit_index\n else:\n newvalue = oldvalue & ~(2 ** bit_index)\n \n self.device.writeregister(register, newvalue)", "def set_register(self, regnum, value, regtype=\"double\"):\n self.registers.__dict__['input_{}_register_{}'.format(regtype, regnum)] = value", "def set_register(self, regnr, value):\n value = struct.pack('<I', value)\n value = binascii.b2a_hex(value).decode('ascii')\n self.sendpkt(\"P %x=%s\" % (regnr, value))\n res = self.rxqueue.get()", "def set_at_index(self, index: int, value: object) -> None:\n self.data[index] = value", "def set_value(self, index, mode, value):\n address = self.get_address(index, mode)\n self.program[address] = value", "def set_config_value(self, value, index=None):", "def set_user_register(self, reg_index, value):\n\n self.hd.setd(f'awgs/{self.index}/userregs/{reg_index}', int(value))", "def set_register(self, register, value):\n if not Vm.is_register(register):\n raise ValueError(\"Expected register value, instead got: \" + str(register))\n\n self.registers[(register - REGISTER_0)] = value", "def set(loc, val=None):\n global simulator\n if simulator is None:\n print \"program is not started\"\n return\n try:\n if isinstance(loc, str):\n regs = prog().info()[\"registers\"]\n if loc in regs:\n if val is None:\n if simulator.concretize_register(loc) is None:\n print \"\"\"\ncan not choose a value in not-assigned register '{}'; choose a value by\nyourself.\\n\"\"\".format(loc)\n else:\n simulator.set_register(loc, val)\n else:\n print \"unknown register \", loc\n elif val is None:\n if simulator.concretize_memory(loc) is None:\n print \"can not choose a value in a not-assigned memory cell\\n\"\n elif isinstance(val, int):\n simulator.set_memory(loc, 0xFF & val)\n else:\n for b in val:\n simulator.set_memory(loc, 0xFF & b)\n loc += 1\n except insight.error.ConcretizationException:\n print \"try to assign an inconsistent value to\", loc\n except:\n simulation_error()", "def set_data(self, addr, value):\n\t\tif addr < 0:\n\t\t\tprint(\"FAIL - negative address\")\n\t\tif addr >= len(self.data):\n\t\t\tself.regs[ addr ] = value\n\t\telse:\n\t\t\tself.data[ addr ] = value", "def set(self, index, data):\n self.data[index] = data", "def update_pit(self, value, pit_index, index):\n if index == 1:\n self.state[pit_index] = value\n else:\n self.state[pit_index + self.M + 1] = value", "def __setitem__(self, index, value):\n if not isinstance(index, numbers.Integral):\n raise TypeError(\"Input index must be integer\")\n if index >= len(self._fsm.get(self._id)):\n raise ValueError(\"Input index is out of boundary\")\n ts = self._fsm.get(self._id)\n ts[index] = value\n self._fsm.store(self._id, ts)", "def set_register(self, name, value):\n if name is 'P':\n value = value | (1 << 5)\n\n self.regs[name].set_value(value & 0xFF)\n return value & 0xFF", "def __setitem__(self, index, value):\n self.elem[index] = value", "def set_dna_value(self, value: int, index: int):\n self.dna[index] = value", "def setvalue(self, index, value):\n self._checkIndex(index)\n self._items[index].value = value", "def set_reg_to_reg(self):\n register = self.return_middle_registers(self.opcode)\n self.registers[register[0]] = self.registers[register[1]]\n logger.info(\"Set register V{} to V{}\".format(register[0], register[1]))", "def __setitem__(self, idx, value):\n if not isinstance(value, nodes.Node):\n raise NotImplementedError(\"setitem with non-blaze rhs\")\n result = self.getitem(idx, context='set')\n result = Assign('assign', [result, value])\n result.eval()", "def __setitem__(self, index, fill):\n nth_int, nth_bit = divmod(index, BitArray._UNSIGNED_INT)\n if fill:\n self.bits[nth_int] |= (1 << nth_bit)\n else:\n self.bits[nth_int] &= ~(1 << nth_bit)", "def __setitem__(self, index, value):\n self._update_value_at(index, value)", "def set_custom_register(self, index, value=0):\n\t\tif (index in self.custom_registers):\n\t\t\tsys.stderr.write(\"WARNING : Custom register \" + str(index) + \" is already declared for \" + self._target_id +\"\\n\")\n\t\t\treturn False\n\n\t\tcustom_register = nutaq.custom_register(self._target_id, 2)\n\t\tcustom_register.set_index(index)\n\t\tcustom_register.set_default_value(value)\n\t\tcustom_register.set_update_rate(1)\n\n\t\tself.custom_registers[index] = custom_register\n\n\t\treturn True;", "def set_value ( self, object, value ):\n object[ self.index ] = value", "def replace(self, index, value):\n index += self.n\n self.data[index] = value\n index //= 2\n while index > 0:\n self.data[index] = self.func(self.data[2*index], self.data[2*index+1])\n index //= 2", "def __setitem__(self, index: int, value: object) -> None:\n self.set_at_index(index, value)", "def update_store(self, value, index):\n if index == 1:\n self.state[self.M] = value\n else:\n self.state[-1] = value", "def gene(self, idx, value):\r\n self.genes[idx] = value" ]
[ "0.7115892", "0.68625295", "0.68161756", "0.68078434", "0.67504513", "0.6697386", "0.666284", "0.6620759", "0.6548634", "0.6545254", "0.65339446", "0.6529928", "0.65193087", "0.650311", "0.64888734", "0.6484818", "0.6483465", "0.6436545", "0.635332", "0.63366884", "0.63226175", "0.63179415", "0.63155794", "0.63047117", "0.62856054", "0.6276891", "0.62765366", "0.6267645", "0.62076974", "0.6206709" ]
0.78160965
0
Sets the value of the program counter
def set_pc(self, value): self.program_counter.set_value(str(value))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_counter_increase(self, val=1):\r\n return self._arm.set_counter_increase(val)", "def set_count(c):\n global count\n count = c", "def increment_pc(self):\n self.program_counter[-1] += 1", "def increment_counter(self) -> None:", "def setCount(self, num):\n self.count=num", "def reset_counter(self) -> None:", "def counter(self, value: int, /) -> None:", "def count(self, value):\n \n self._count = int(value)", "def increase_counter(self):\n self.values = self.values + 1", "def actualizar_counter(self, digito):\r\n self.counter.setText(str(digito))\r\n self.counter.repaint()", "def set_power_management(value: int) -> None:", "def set(self, val: int) -> None:\n self.val = val\n self.notes = []", "def updateCounter(self):\n self.counter = self.counter + 1\n self.syncDataStructure[\"+\"][str(self.instanceID)] = self.counter", "def set_value(self, index, mode, value):\n address = self.get_address(index, mode)\n self.program[address] = value", "def inc(self):\n self._value += 1", "def set_current(self, val: int) -> None:\n self._bin_iter.set_current(val)", "def update_count(self):\n pass # Do nothing", "def setInteger(self, value):", "def setInteger(self, value):", "def setInteger(self, value: int):\n self.value = value", "def _inc_counter(self) -> None:\n self._state_storage.increment_counter()", "def increase_count(self, number=1):\n self.count += number", "def set_count(self, count, asset=None):\n self._set_property('pc:count', count, asset)", "def make_count_change():\n \"*** YOUR CODE HERE ***\"", "def setMancount(self, cnt):\n self.__mancount=cnt", "def set_sequence(self, counter):\n self.seq_counter = counter", "def inc_counter(self, *_, **__): # pylint: disable=arguments-differ\n pass", "def fork_pc(self):\n self.program_counter.append(0)", "def update_counter(ai_counter):\n if ai_counter < 140:\n ai_counter += 1\n else:\n ai_counter = 60\n return ai_counter", "def inc( self ):\n self.count += 1" ]
[ "0.68771935", "0.67240435", "0.6649765", "0.6593157", "0.65347755", "0.64093655", "0.63686216", "0.6187651", "0.6128649", "0.6124087", "0.6109218", "0.6085519", "0.6061288", "0.59808475", "0.5976789", "0.59526414", "0.59166974", "0.5915294", "0.5915294", "0.5910445", "0.58844644", "0.588147", "0.5877331", "0.58713377", "0.58327746", "0.5831098", "0.58279896", "0.58131367", "0.5802978", "0.5801039" ]
0.7705855
0
Sets the value of the storage
def set_storage(self, value): self.storage.setPlainText(str(value))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set(self, value):\n self._storage.set(self._item, value)", "def value(self, value):\n self.set_data(value)", "def assignValue(self,value):\n self.itemset(value)", "def assignValue(self,value):\n self.itemset(value)", "def set_storage(self):\n pass", "def set_Value(self, n_value):\n#Joerg S/Martin W advice\n self.StoredValue=n_value", "def set(self, value):\n self.value = value\n self.synced = True", "def setUseStorage(self, value):\n return self._set(useStorage=value)", "async def set(self, key, value):\n trace_log(\"PersistantStorage: setting key \", key, \" to value \", value)\n self.dict[key] = value\n #self.log_set(key, value)", "def __setitem__(self, key, value):\n self._get_storage()[key] = value", "def assign(self, value):\n self.value = value", "def setValue(self, name: unicode, value: object) -> None:\n ...", "def set_value (self):\n raise NotImplementedError", "def __value_set(self, new_value):\n self.db_value = to_pickle(new_value)\n # print(\"value_set, self.db_value:\", repr(self.db_value)) # DEBUG\n self.save(update_fields=[\"db_value\"])", "def __setSettingsToStorage(value):\n AccountSettings.setSettings(NEW_SETTINGS_COUNTER, value)", "def write(self, key, value):\n if( self.storage is None ): \n self.storage = {};\n if( value is None ):\n self.storage[ key ] = None;\n del self.storage[ key ];\n else:\n self.storage[ key ] = value;", "def set_persistent_value(self, value, *args, **kwargs):\n pass", "def set(self, name, value):\n pass", "def set_global_storage(self, name: str, value: Any) -> None:\n self.global_storage[name] = value", "def value(self, value):\n\n\t\tself.__value = value", "def setValue(self, value):\n self._value = value", "def value(self, value):\n self._value = value\n self.is_dirty = True", "def value(self, value):\n\n self._value = value", "def value(self, value):\n\n self._value = value", "def value(self, value):\n\n self._value = value", "def value(self, value):\n\n self._value = value", "def value(self, value):\n\n self._value = value", "def value(self, value):\n\n self._value = value", "def put(self):\n self._val = True", "def set_value(self, key, value):\n self.data[key] = value\n self.save_data()" ]
[ "0.82022053", "0.7120909", "0.7109629", "0.7109629", "0.70900124", "0.7048648", "0.70038307", "0.6964936", "0.696448", "0.6918625", "0.6916776", "0.68932664", "0.689154", "0.6870634", "0.6795299", "0.67443794", "0.67187697", "0.6685269", "0.6677824", "0.66491085", "0.6641952", "0.66088134", "0.66021115", "0.66021115", "0.66021115", "0.66021115", "0.66021115", "0.66021115", "0.659442", "0.65784854" ]
0.7936727
1
Fills the symboltable with parsed labels and addresses
def set_symbols(self, symboltable: dict): for index in range(1, self.symbol_layout.rowCount()): self.symbol_layout.removeRow(index) font = QFont('Fira Code', 8, QFont.Medium) for entry in symboltable: symbol = QLineEdit() symbol.setReadOnly(True) symbol.setText(entry) symbol.setFont(font) address = QLineEdit() address.setReadOnly(True) address.setFont(font) address.setText(str(symboltable[entry])) self.symbol_layout.addRow(address, symbol)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __generate_symboltable(self, code):\n\n code_without_lables = []\n address = 0\n for line in code:\n label_code = line.split(':')\n label = label_code[0]\n if len(label) != len(line):\n self.__symboltable[label] = address\n address += REG_SIZE\n instruction = label_code.pop().strip()\n code_without_lables = code_without_lables + [instruction]\n else:\n instruction = label_code.pop().strip()\n code_without_lables = code_without_lables + [instruction]\n\n tokens = instruction.split(' ')\n asm_directive = tokens[0]\n if tokens[0] in AssemblerDirectives.to_string():\n if asm_directive == AssemblerDirectives.ORG.name:\n address = int(tokens[1])\n else:\n address += REG_SIZE\n\n return code_without_lables", "def __init__(self):\r\n self.s_table = SymbolTable.preSymbols", "def parse_symbol_table(data, sections, elf_header):\n if is64bit(elf_header):\n symbol_entry_str = symbol_64_entry_str\n symbol_entry_spec = symbol_64_entry_spec\n else:\n symbol_entry_str = symbol_32_entry_str\n symbol_entry_spec = symbol_32_entry_spec\n entry_len = struct.calcsize(symbol_entry_str)\n \n st_offset = None\n if \".symtab\" in sections:\n section = \".symtab\"\n if \".strtab\" in sections:\n st_offset = sections[\".strtab\"][\"offset\"]\n else:\n st_offset = sections[section][\"offset\"]\n \n elif \".dynsym\" in sections:\n section = \".dynsym\"\n if \".dynstr\" in sections:\n st_offset = sections[\".dynstr\"][\"offset\"]\n else:\n st_offset = sections[section][\"offset\"]\n \n \n if section not in sections:\n return {}, {} \n \n symbols = {}\n imports = {}\n offset = sections[section][\"offset\"]\n size = sections[section][\"size\"]\n index = offset\n while index < offset + size:\n vals = {}\n if len(data) < index+entry_len: \n break\n \n val_data = struct.unpack(symbol_entry_str, data[index:index+entry_len])\n for i, elem in enumerate(symbol_entry_spec):\n vals[elem[0]] = val_data[i]\n \n if st_offset is None:\n symbols[vals[\"name\"]] = vals\n else:\n func_name = get_name_from_string_table(data, st_offset, vals[\"name\"])\n if func_name:\n vals.pop(\"name\")\n vals[\"info\"] = get_symbol_info(vals[\"info\"])\n vals[\"shndx\"] = get_symbol_shndx(vals[\"shndx\"])\n \n if vals[\"info\"] == \"UNDEFINED\" and vals[\"value\"] == 0:\n tmp_name = func_name\n import_name = \"Unknown\"\n if \"@@\" in func_name:\n i = tmp_name.find(\"@@\")\n func_name = tmp_name[:i]\n import_name = tmp_name[i:].strip(\"@@\") \n if import_name not in imports:\n imports[import_name] = {}\n imports[import_name][func_name] = vals\n symbols[func_name] = vals\n \n index += entry_len \n \n return symbols, imports", "def symbol_table(self, value: str):\n self._symbol_table = value", "def addPair(self, symbol, address):\r\n self.s_table[symbol] = address", "def __init__(self, DEBUG=False):\n self.DEBUG = DEBUG\n\n self.classTable = {}\n self.subroutineTable = {}\n\n self.counts = {}\n self.counts[\"STATIC\"] = 0\n self.counts[\"FIELD\"] = 0\n self.counts[\"ARG\"] = 0\n self.counts[\"VAR\"] = 0\n\n if self.DEBUG:\n print(\"DEBUG(SymbolTable): INITIALIZED SYMBOL TABLES\")", "def addEntry(self, symbol, address):\n self.table[symbol] = address", "def __init__(self, entries: ghidra.program.model.address.AddressSetView, source: ghidra.program.model.symbol.SourceType):\n ...", "def handle_labels(ls):\r\n\r\n # assign each line a number\r\n line_num = {}\r\n counter = 0\r\n for i in ls:\r\n if not i.startswith('('):\r\n line_num[i] = counter\r\n counter += 1\r\n else:\r\n sb = i[1:-1]\r\n line_num[sb] = counter\r\n\r\n # replace @XXX with number\r\n var_address = 16\r\n mem = {}\r\n for i in range(len(ls)):\r\n if ls[i].startswith('@'):\r\n # if @XXX is already in numeral form, do nothing\r\n if ls[i][1:].isdigit():\r\n pass\r\n\r\n # replace with pre-defined symbols if found\r\n elif pre_defined_sb.get(ls[i][1:]) is not None:\r\n ls[i] = '@' + pre_defined_sb[ls[i][1:]]\r\n\r\n # replace by (XXX) line number if search failed\r\n elif line_num.get(ls[i][1:]) is not None:\r\n ls[i] = '@' + str(line_num[ls[i][1:]])\r\n\r\n # else must be user defined variable\r\n # assign same address for same variable\r\n else:\r\n if ls[i] not in mem:\r\n mem[ls[i]] = '@' + str(var_address)\r\n ls[i] = '@' + str(var_address)\r\n var_address += 1\r\n else:\r\n ls[i] = mem[ls[i]]\r\n\r\n # remove (XXX)'s\r\n ls = list(filter(lambda x: not x.startswith('('), ls))\r\n\r\n return ls", "def getSymbolTable(self) -> ghidra.app.util.bin.format.pe.debug.DebugCodeViewSymbolTable:\n ...", "def _parse_value_label_table(self, sfile):\n byteorder = self._byteorder\n \n nentries = unpack(byteorder + 'l', sfile.read(4))[0]\n txtlen = unpack(byteorder + 'l', sfile.read(4))[0]\n off = []\n val = []\n txt = []\n for i in range(nentries):\n off.append(unpack(byteorder+'l',sfile.read(4))[0])\n for i in range(nentries):\n val.append(unpack(byteorder+'l',sfile.read(4))[0])\n \n txt_block = unpack(str(txtlen) + \"s\", sfile.read(txtlen))\n txt = [t.decode('iso-8859-1') \n for b in txt_block for t in b.split(b'\\0')]\n \n # put (off, val) pairs in same order as txt\n sorter = list(zip(off, val))\n sorter.sort()\n \n # dict of val[i]:txt[i]\n table = {sorter[i][1]: txt[i] for i in range(len(sorter))}\n \n return table", "def __set_symbol_dict(self):\r\n return {0: list(alph) if self.is_case_snstv else list(alph)[:26],\r\n 1: list(dgt),\r\n 2: list(spcl) if self.is_spcl else []}", "def build_gdb_symbol_table():\n\n tab = Symtab()\n n = gdb.parse_and_eval (\"symtab->nodes\")\n while (long(n)):\n if symtab_node_is_function (n):\n current_symbol = GdbFunction(tab, n)\n tab.all_functions.append (current_symbol)\n elif symtab_node_is_variable (n):\n current_symbol = GdbVariable(tab, n)\n tab.all_variables.append (current_symbol)\n else:\n raise gdb.GdbError (\"Encountered an unknown symbol table node\");\n\n tab.order_to_sym[current_symbol.order] = current_symbol\n tab.all_symbols.append (current_symbol)\n\n n = n[\"next\"]\n pass\n\n tab.fixup()\n return tab", "def __init__(self, owner, name, addr, size, binding, sym_type, sh_info):\n super(Symbol, self).__init__()\n self.owner_obj = owner\n self.name = name\n self.addr = addr\n self.size = size\n self.binding = binding\n self.type = sym_type\n self.sh_info = sh_info if sh_info != 'SHN_UNDEF' else None\n self.resolved = False\n self.resolvedby = None\n if (claripy and isinstance(self.addr, claripy.ast.Base)) or self.addr != 0:\n self.owner_obj.symbols_by_addr[self.addr] = self\n # would be nice if we could populate demangled_names here...\n\n #demangled = self.demangled_name\n #if demangled is not None:\n # self.owner_obj.demangled_names[self.name] = demangled", "def add_labels(self):\n counter = 0\n labels_list = []\n for i in range(len(self.commands)):\n command = self.commands[i]\n if command.startswith('('):\n raw_value = command.replace('(', '').replace(')', '')\n self.symbol_table[raw_value] = str(counter)\n labels_list.append(command)\n else:\n counter += 1\n for label in labels_list: # remove labels with parentheses we don't need them.\n self.commands.remove(label)", "def __init__(self, epsilon=EPSILON):\n cdef bytes name = 'SymbolTable<{0}>'.format(id(self)).encode('ascii')\n self.table = new sym.SymbolTable(<string> name)\n assert (self[epsilon] == EPSILON_ID)", "def symbol_table_addresses(self):\n all_address = []\n for node in self.all_nodes[0]:\n all_address.extend(node['addresses'])\n return all_address", "def clean_symbols(self):\n self.add_labels()\n variable_counter = 16\n for i in range(len(self.commands)):\n command = self.commands[i]\n if command.startswith('@'): # symbols always reside in A instructions\n value = command.split('@')[1]\n if not value.isdigit(): # is a symbol\n if value not in self.symbol_table: # is a variable\n self.symbol_table[value] = str(variable_counter)\n variable_counter += 1\n numeric_value = self.symbol_table.get(value)\n command = '@' + numeric_value\n self.commands[i] = command", "def add_symbol_empty(self):\n if osarch_is_32_bit():\n self.add_data((\"empty symbol\", 4, (0, 0, 0, 0)))\n elif osarch_is_64_bit():\n self.add_data((\"empty symbol\", 4, (0, 0)))\n self.add_data((\"empty symbol\", PlatformVar(\"addr\"), (0, 0)))\n else:\n raise_unknown_address_size()", "def add_dt_symtab(self, op):\n d_tag = AssemblerVariable((\"d_tag, DT_SYMTAB = 6\", PlatformVar(\"addr\"), 6))\n d_un = AssemblerVariable((\"d_un\", PlatformVar(\"addr\"), op))\n self.__data[0:0] = [d_tag, d_un]\n self.refresh_name_label()", "def save(self):\n # First, just allocate enough memory for the SDAT header.\n data = bytearray(0x40)\n\n # -------------------\n # Make the SYMB block\n\n symbolsStringTable = bytearray()\n def addSymbolAndGetOffset(symbol):\n if symbol is None:\n return -1\n offset = len(symbolsStringTable)\n symbolsStringTable.extend(symbol.encode('latin-1') + b'\\0')\n return offset\n\n symbolsHeaderOffsets = []\n\n # Parallel arrays, here.\n symbolsTableValues = []\n shouldIncrementByTableLen = []\n\n anySymbolsInWholeFile = False\n\n def addSymbolsFrom(namedList, nested=False):\n\n # First, figure out if any actual symbols exist\n anyActualSymbols = False\n anyActualSubsymbols = False\n if not nested:\n for symbol, _ in namedList:\n if symbol is not None:\n anyActualSymbols = True\n break\n else:\n for symbol, entry in namedList:\n if symbol is not None:\n anyActualSymbols = True\n break\n for subSymbol, subEntry in entry.sequences:\n if subSymbol is not None:\n anyActualSubsymbols = True\n break\n\n\n nonlocal anySymbolsInWholeFile\n anySymbolsInWholeFile |= anyActualSymbols\n anySymbolsInWholeFile |= anyActualSubsymbols\n\n # If there *are* any symbols, keep going\n symbolsHeaderOffsets.append(len(symbolsTableValues) * 4)\n\n if not nested:\n symbolsTableValues.append(len(namedList))\n shouldIncrementByTableLen.append(False)\n\n for symbol, _ in namedList:\n symbolsTableValues.append(addSymbolAndGetOffset(symbol))\n shouldIncrementByTableLen.append(True)\n\n else:\n mainList, subListsArea = [], []\n mainListSIBTL, subListsAreaSIBTL = [], []\n\n mainList.append(len(namedList))\n mainListSIBTL.append(False)\n\n mainListFullLength = (1 + 2 * len(namedList)) * 4\n subListsAreaOffset = (0x40\n + len(symbolsTableValues) * 4\n + mainListFullLength)\n\n for symbol, entry in namedList:\n\n mainList.append(addSymbolAndGetOffset(symbol))\n mainListSIBTL.append(True)\n\n subListOffset = subListsAreaOffset + len(subListsArea) * 4\n\n if entry is None:\n subNames = []\n else:\n subNames = [n for (n, s) in entry.sequences]\n\n if entry or subNames:\n subListsArea.append(len(subNames))\n subListsAreaSIBTL.append(False)\n\n for subSymbol in subNames:\n subListsArea.append(addSymbolAndGetOffset(subSymbol))\n subListsAreaSIBTL.append(True)\n\n mainList.append(subListOffset)\n mainListSIBTL.append(False)\n\n else:\n mainList.append(0)\n mainListSIBTL.append(False)\n\n symbolsTableValues.extend(mainList)\n symbolsTableValues.extend(subListsArea)\n shouldIncrementByTableLen.extend(mainListSIBTL)\n shouldIncrementByTableLen.extend(subListsAreaSIBTL)\n\n addSymbolsFrom(self.sequences)\n addSymbolsFrom(self.sequenceArchives, True)\n addSymbolsFrom(self.banks)\n addSymbolsFrom(self.waveArchives)\n addSymbolsFrom(self.sequencePlayers)\n addSymbolsFrom(self.groups)\n addSymbolsFrom(self.streamPlayers)\n addSymbolsFrom(self.streams)\n\n # Only add the SYMB block if there are any symbols\n if anySymbolsInWholeFile:\n symbolsBlockOffset = len(data)\n\n symbolsTableLen = len(symbolsTableValues) * 4\n symbolsTable = bytearray()\n for value, shouldIncrement in itertools.zip_longest(symbolsTableValues,\n shouldIncrementByTableLen):\n if value == -1:\n symbolsTable.extend(b'\\0\\0\\0\\0')\n else:\n if shouldIncrement:\n value += symbolsTableLen + 0x40\n symbolsTable.extend(struct.pack('<I', value))\n\n symbolsBlockSize = 0x40 + len(symbolsTable) + len(symbolsStringTable)\n paddedSymbSize = symbolsBlockSize\n while paddedSymbSize % 4:\n paddedSymbSize += 1\n if self.padSymbSizeTo4InSDATHeader:\n symbolsBlockSize = paddedSymbSize\n\n symbolsHeaderOffsetsTable = bytearray()\n for value in symbolsHeaderOffsets:\n if value is None:\n symbolsHeaderOffsetsTable.extend(b'\\0\\0\\0\\0')\n else:\n symbolsHeaderOffsetsTable.extend(struct.pack('<I', value + 0x40))\n\n symbolsHeader = struct.pack('<4sI',\n b'SYMB', paddedSymbSize)\n\n data.extend(symbolsHeader)\n data.extend(symbolsHeaderOffsetsTable)\n data.extend(b'\\0' * 0x18)\n data.extend(symbolsTable)\n data.extend(symbolsStringTable)\n\n else:\n symbolsBlockOffset = None\n symbolsBlockSize = None\n\n\n # -------------------\n # Make the INFO block\n while len(data) % 4: data.append(0)\n infoBlockOffset = len(data)\n\n # Add room to add the header later\n data.extend(b'\\0' * (8 + 8 * 4))\n\n # Pad to 0x20 relative to the INFO block, for some reason\n while (len(data) - infoBlockOffset) % 0x20: data.append(0)\n\n # Helper functions\n def info_declarePart(partNumber):\n struct.pack_into('<I', data, infoBlockOffset + 8 + 4 * partNumber,\n len(data) - infoBlockOffset)\n def addFileAndGetID(file, dataMergeOptimizationID):\n idx = _common.listFind(files, file)\n\n while idx != -1:\n if dataMergeOptimizationID == fileMergeIDs[idx]:\n return idx\n idx = _common.listFind(files, file, idx + 1)\n\n files.append(file)\n fileMergeIDs.append(dataMergeOptimizationID)\n return len(files) - 1\n\n # We encode sections out of order, so that the files will be in\n # the same order as in retail SDATs.\n fileMergeIDs = []\n files = []\n\n # Info part 0: SSEQ\n info_declarePart(0)\n\n data.extend(struct.pack('<I', len(self.sequences)))\n sseqOffsetsTableOffset = len(data)\n data.extend(b'\\0' * (4 * len(self.sequences)))\n\n for i, (_, sseq) in enumerate(self.sequences):\n if sseq is None:\n entryOff = 0\n else:\n entryOff = len(data) - infoBlockOffset\n\n (file, unk02, bankID, volume, channelPressure,\n polyphonicPressure, playerID) = sseq.save()\n fileID = addFileAndGetID(file, sseq.dataMergeOptimizationID)\n\n data.extend(struct.pack('<3H4Bxx',\n fileID, unk02, bankID, volume, channelPressure,\n polyphonicPressure, playerID))\n\n struct.pack_into('<I', data, sseqOffsetsTableOffset + 4 * i, entryOff)\n\n # Info part 1: SSAR\n info_declarePart(1)\n\n data.extend(struct.pack('<I', len(self.sequenceArchives)))\n ssarOffsetsTableOffset = len(data)\n data.extend(b'\\0' * (4 * len(self.sequenceArchives)))\n\n for i, (_, ssar) in enumerate(self.sequenceArchives):\n if ssar is None:\n entryOff = 0\n else:\n entryOff = len(data) - infoBlockOffset\n\n file, unk02, _ = ssar.save()\n fileID = addFileAndGetID(file, ssar.dataMergeOptimizationID)\n\n data.extend(struct.pack('<HH',\n fileID, unk02))\n\n struct.pack_into('<I', data, ssarOffsetsTableOffset + 4 * i, entryOff)\n\n # Info part 2: SBNK\n info_declarePart(2)\n\n data.extend(struct.pack('<I', len(self.banks)))\n sbnkOffsetsTableOffset = len(data)\n data.extend(b'\\0' * (4 * len(self.banks)))\n\n for i, (sbnkName, sbnk) in enumerate(self.banks):\n if sbnk is None:\n entryOff = 0\n else:\n entryOff = len(data) - infoBlockOffset\n\n file, unk02, waveArchives = sbnk.save()\n fileID = addFileAndGetID(file, sbnk.dataMergeOptimizationID)\n\n swarIDs = []\n for s in waveArchives:\n swarIDs.append(-1 if s is None else s)\n while len(swarIDs) < 4:\n swarIDs.append(-1)\n\n if len(swarIDs) > 4:\n raise ValueError(f'SBNK {i} (\"{sbnkName}\") uses '\n f'{len(swarIDs)} SWARs. The maximum is 4.')\n\n data.extend(struct.pack('<HH4h',\n fileID, unk02, *swarIDs))\n\n struct.pack_into('<I', data, sbnkOffsetsTableOffset + 4 * i, entryOff)\n\n\n # Info part 3: SWAR\n info_declarePart(3)\n\n data.extend(struct.pack('<I', len(self.waveArchives)))\n swarOffsetsTableOffset = len(data)\n data.extend(b'\\0' * (4 * len(self.waveArchives)))\n\n for i, (_, swar) in enumerate(self.waveArchives):\n if swar is None:\n entryOff = 0\n else:\n entryOff = len(data) - infoBlockOffset\n\n file, unk02 = swar.save()\n fileID = addFileAndGetID(file, swar.dataMergeOptimizationID)\n\n data.extend(struct.pack('<HH',\n fileID, unk02))\n\n struct.pack_into('<I', data, swarOffsetsTableOffset + 4 * i, entryOff)\n\n\n # Info part 4: Sequence players\n info_declarePart(4)\n\n data.extend(struct.pack('<I', len(self.sequencePlayers)))\n spOffsetsTableOffset = len(data)\n data.extend(b'\\0' * (4 * len(self.sequencePlayers)))\n\n for i, (_, sp) in enumerate(self.sequencePlayers):\n if sp is None:\n entryOff = 0\n else:\n entryOff = len(data) - infoBlockOffset\n\n maxSequences, channels, heapSize = sp.save()\n\n channelMask = 0\n for j in range(16):\n if j in channels:\n channelMask |= 1 << j\n\n data.extend(struct.pack('<HHI',\n maxSequences, channelMask, heapSize))\n\n struct.pack_into('<I', data, spOffsetsTableOffset + 4 * i, entryOff)\n\n\n # Info part 5: Groups\n info_declarePart(5)\n\n data.extend(struct.pack('<I', len(self.groups)))\n groupOffsetsTableOffset = len(data)\n data.extend(b'\\0' * (4 * len(self.groups)))\n\n for i, (_, group) in enumerate(self.groups):\n if group is None:\n entryOff = 0\n else:\n entryOff = len(data) - infoBlockOffset\n\n data.extend(struct.pack('<I', len(group)))\n\n for gEntry in group:\n data.extend(struct.pack('<BHxI', *gEntry.save()))\n\n struct.pack_into('<I', data, groupOffsetsTableOffset + 4 * i, entryOff)\n\n\n # Info part 6: Stream players\n info_declarePart(6)\n\n data.extend(struct.pack('<I', len(self.streamPlayers)))\n spOffsetsTableOffset = len(data)\n data.extend(b'\\0' * (4 * len(self.streamPlayers)))\n\n for i, (_, sp) in enumerate(self.streamPlayers):\n if sp is None:\n entryOff = 0\n else:\n entryOff = len(data) - infoBlockOffset\n\n channels = sp.save()\n chanCount = len(channels)\n while len(channels) < 16:\n channels.append(0xFF)\n\n data.append(chanCount)\n data.extend(channels)\n\n # This has to occur in order for the padding to work out\n # correctly. Weird, but, what can you do. Might even be\n # an unknown value.\n data.extend(b'\\0\\0\\0\\0')\n\n struct.pack_into('<I', data, spOffsetsTableOffset + 4 * i, entryOff)\n\n while len(data) % 4: data.append(0)\n\n\n # Info part 7: Streams\n info_declarePart(7)\n\n data.extend(struct.pack('<I', len(self.streams)))\n strmOffsetsTableOffset = len(data)\n data.extend(b'\\0' * (4 * len(self.streams)))\n\n for i, (_, strm) in enumerate(self.streams):\n if strm is None:\n entryOff = 0\n else:\n entryOff = len(data) - infoBlockOffset\n\n file, unk02, volume, priority, playerID, unk07 = strm.save()\n fileID = addFileAndGetID(file, strm.dataMergeOptimizationID)\n\n data.extend(struct.pack('<HH4B4x',\n fileID, unk02, volume, priority, playerID, unk07))\n\n struct.pack_into('<I', data, strmOffsetsTableOffset + 4 * i, entryOff)\n\n # Now we can finally fill the header in.\n struct.pack_into('<4sI', data, infoBlockOffset,\n b'INFO', len(data) - infoBlockOffset)\n\n infoBlockSize = len(data) - infoBlockOffset\n\n\n # ----------------------\n # Make a dummy FAT block, to be filled in when adding to the\n # FILE block\n\n while len(data) % 4: data.append(0)\n fatBlockOffset = len(data)\n fatBlockSize = 0xC + 0x10 * len(files)\n fatTableOffset = fatBlockOffset + 0xC\n\n fatHeader = struct.pack('<4sII',\n b'FAT ', 0xC + 0x10 * len(files), len(files))\n\n data.extend(fatHeader)\n data.extend(b'\\0' * (0x10 * len(files)))\n\n\n # -------------------\n # Make the FILE block and fill in the FAT block\n while len(data) % 4: data.append(0)\n fileBlockOffset = len(data)\n\n # Dummy header (to be filled in after we know the total size)\n data.extend(b'\\0' * 0xC)\n\n # Some games align the first file differently\n if self.firstFileAlignment is not None:\n while len(data) % self.firstFileAlignment:\n data.append(0)\n\n # Add each file\n for i, file in enumerate(files):\n\n # Files must be aligned to 0x20 relative to the SDAT\n # itself... usually. Some games align to other amounts.\n while len(data) % self.fileAlignment:\n data.append(0)\n\n # Actually add the file\n fileOffset = len(data)\n data.extend(file)\n \n # Add the appropriate FAT entry\n fLen = len(file)\n if self.fatLengthsIncludePadding:\n while fLen % self.fileAlignment: fLen += 1\n\n struct.pack_into('<II', data, fatTableOffset + 0x10 * i,\n fileOffset, fLen)\n\n # And one last pad for good measure. (And because retail files\n # do so.)\n if self.padAtEnd:\n while len(data) % self.fileAlignment:\n data.append(0)\n\n # Add the header\n struct.pack_into('<4sII', data, fileBlockOffset,\n b'FILE', len(data) - fileBlockOffset, len(files))\n\n fileBlockSize = len(data) - fileBlockOffset\n\n\n # -----------------------\n # Put the blocks together\n\n # Write the SDAT header\n struct.pack_into('<8I', data, 0x10,\n 0 if symbolsBlockOffset is None else symbolsBlockOffset,\n 0 if symbolsBlockSize is None else symbolsBlockSize,\n 0 if infoBlockOffset is None else infoBlockOffset,\n 0 if infoBlockSize is None else infoBlockSize,\n 0 if fatBlockOffset is None else fatBlockOffset,\n 0 if fatBlockSize is None else fatBlockSize,\n 0 if fileBlockOffset is None else fileBlockOffset,\n 0 if fileBlockSize is None else fileBlockSize)\n\n # Write the standard header to the beginning\n _common.NDS_STD_FILE_HEADER.pack_into(data, 0,\n b'SDAT', 0xFEFF, 0x100, len(data), 0x40,\n 3 if symbolsBlockOffset is None else 4)\n\n return data", "def address(self, symbol):\r\n return self.s_table[symbol]", "def add_labels(data_lists, table_labels):\n labeled_dictionary_collection = {}\n \n\n for symbol, data_list in data_lists.iteritems():\n if len(data_list) > 1:\n labeled_dictionary_collection[symbol] = dict(zip(table_labels,data_list))\n return labeled_dictionary_collection", "def create_symbol_to_possible_cell_mapping(self):\r\n symbols_to_cells = defaultdict(set)\r\n for cell in self.iterate_empty_cells():\r\n for symbol in cell.get_possible_symbols():\r\n symbols_to_cells[symbol].add(cell)\r\n return symbols_to_cells", "def add_symbol_und(self, name):\n label_name = \"symtab_\" + name\n if osarch_is_32_bit():\n self.add_data((\"st_name\", 4, \"strtab_%s - strtab\" % (name)))\n self.add_data((\"st_value\", PlatformVar(\"addr\"), label_name, label_name))\n self.add_data((\"st_size\", PlatformVar(\"addr\"), PlatformVar(\"addr\")))\n self.add_data((\"st_info\", 1, 17))\n self.add_data((\"st_other\", 1, 0))\n self.add_data((\"st_shndx\", 2, 1))\n elif osarch_is_64_bit():\n self.add_data((\"st_name\", 4, \"strtab_%s - strtab\" % (name)))\n self.add_data((\"st_info\", 1, 17))\n self.add_data((\"st_other\", 1, 0))\n self.add_data((\"st_shndx\", 2, 1))\n self.add_data((\"st_value\", PlatformVar(\"addr\"), label_name, label_name))\n self.add_data((\"st_size\", PlatformVar(\"addr\"), PlatformVar(\"addr\")))\n else:\n raise_unknown_address_size()", "def fixAddress(data):\n\tfor each in data:\n\t\ttempK = list()\n\t\tfor tag in each['k']:\n\t\t\tif tag.split(':')[0] == 'addr':\n\t\t\t\ttempK.append(tag.split(':')[1])\n\t\t\telse:\n\t\t\t\ttempK.append(tag)\n\t\t\teach['k'] = deepcopy(tempK)\n\t\tyield each", "def _construct_all_positions(self):\n d = dict((s, 0) for s in self.symbol_list)\n d['datetime'] = self.backtest_date\n return [d]", "def __init__(self):\n\n self.st = {} # symbol table\n self.st[\"itrs\"] = [{}, {}, {}]\n self.existVecs = False\n self.existMats = False\n self.applyOnce = {}", "def labelTable(self,table):\n \n for sslice,_, lFields in self._lLabellingInstruction:\n for field in lFields:\n if field is not None:\n try:\n for cell in np.nditer(table.getNPArray()[sslice],['refs_ok'],op_dtypes=np.dtype(object)):\n cell[()].addField(field.cloneMe())\n except: pass", "def create_symbol_table(root):\n\n set_depth(root, 0)\n #Initialize the stack, with the AST root\n stack = Stack(root)\n\n #the symbol table maps the name to the scope.\n #Any node can belong to multiple scopes, therefore this\n #is a list of scope\n symbol_table = STable()\n \n #this represents objects imported from\n #other modules\n other_modules = {}\n\n for node, children, ntype in stack:\n\n if ntype == \"Import\":\n #Import object has names prop which\n #is an array of names\n for name in node.names:\n #name can be the name or an alias \n name_val = name.asname or name.name\n #insert in symbol_table \n symbol_table[name_val] = ()\n\n elif ntype == \"ImportFrom\":\n if node.names[0].name == '*':\n try:\n imp_mod = importlib.import_module(node.module)\n #Add all names in imported module, except those\n #starting with '_'\n for name in dir(imp_mod):\n if name[0] != '_':\n symbol_table[name] = stack_top(scopes)\n\n except ImportError:\n print \"Error: local system does not have {}. Skipping!\".format(node.module)\n pass\n else:\n #TODO: store node.module\n for name in node.names:\n #TODO: store name.name even if name.asname defined \n name_val = name.asname or name.name\n symbol_table[name_val] = stack.get_scopes(src_module=node.module)\n\n elif ntype == \"ClassDef\" or ntype == \"FunctionDef\": \n symbol_table[node.name] = stack.get_scopes()\n \n #NOTE: if a name is being loaded then it already exists and doesn't need\n #to be added to symbol_table\n elif ntype == \"Name\" and not is_load(children) and not has_global(stack.scope_tail(), node.id): \n symbol_table[node.id] = stack.get_scopes()\n\n elif ntype == \"arguments\":\n if node.vararg: \n symbol_table[node.vararg] = stack.get_scopes()\n if node.kwarg:\n symbol_table[node.kwarg] = stack.get_scopes()\n\n elif ntype == \"Global\":\n #add a list global vars on node on the top of \n #the stack\n #nonlocal could be handled in similar way\n set_globals(scopes[-1], node.names)\n\n #set lineno property of children nodes\n set_lineno(node, children)\n\n for child in children[::-1]:\n #set depth of child\n set_depth(child, node.depth + 1)\n #Add children to stack\n stack.append(child)\n\n #Add any new scopes\n #Need to do it here since scoping_nodes are defined in their parent scope\n stack.check_and_push_scope()\n\n print \"Symbol table is \"\n print symbol_table\n return symbol_table" ]
[ "0.6294537", "0.6035416", "0.5964839", "0.57555133", "0.5608731", "0.5571357", "0.5532071", "0.546508", "0.54362375", "0.539975", "0.53911436", "0.53900325", "0.5384787", "0.5346625", "0.52863693", "0.5274382", "0.52723765", "0.5272353", "0.5223648", "0.5216137", "0.52151185", "0.5197586", "0.5186744", "0.51520896", "0.50745434", "0.5061294", "0.50487405", "0.50420356", "0.50192493", "0.500825" ]
0.61611015
1
Turn on the scene.
def run(self) -> None: self._hass.turn_on('scene.{0}'.format(self._args['scene']))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def turn_on(self) -> None:\n self._state = self._player.turn_on()", "def turnOn(self):\n self.off = False\n self.turnOnAnimation()", "def turn_on(self, **kwargs: Any) -> None:\n self._set_light(ON_STATE)", "def turn_on(self, **kwargs):\n self._state = True\n\n # Make initial update\n self.update_switch(self._initial_transition)\n\n self.schedule_update_ha_state()", "def turn_on(self, **kwargs):\n self._is_on = True", "def lightning_turnon(self):\n self.turnOn()", "def turn_on(self, **kwargs) -> None:\n self.heater.turn_on()", "def turn_on(self, **kwargs):\n self._send_command(\"turn_on\")", "def _turn_on(self):\n self._turn_display('ON')", "def show(self):\n self.scene().show()", "def turn_on(self, **kwargs: Any) -> None:\n with self._wemo_call_wrapper(\"turn on\"):\n self.wemo.on()", "def turn_on(self):\n GPIO.output(self.gpio, True) # turn on light", "def turn_on(self, **kwargs):\n self.smartplug.turn_on()", "def scenes_command_on(scene_id, **kwargs):\n return scenes_command_state(scene_id=scene_id, request_type='CMD_SCENE_ON', **kwargs)", "def turn_on(self):\n self._state = True\n self.write_state(bytes([9]))\n self.schedule_update_ha_state()", "def turn_on(self):\n self._remote.power(1)", "def onClick(self):\n self.app.setActiveMode(\"start\")", "def on(self):\n self._set_state(on=True)", "def turn_on(self):\n self._lms.query(self._id, 'power', '1')\n self.update_ha_state()", "def turn_on(self, **kwargs):\n set_sonoff_state(self._host, \"on\")\n self._state = True", "def set_light_on(self):\r\n self._light = \"ON\"", "def set_light_on(self):\n self._light = \"ON\"", "def _on_scene_change(self, *args, **kwargs):\n self.on_scene_change()", "def turn_on(self):\n self._interrupt_flash()\n if not self.on:\n GPIO.output(self.pin, GPIO.HIGH)\n self.on = True", "def turn_on(self, **kwargs):\n _LOGGER.debug(\"Turning on Motion Detection \")\n self.data.set_camera_recording(self._camera_id, \"motion\")", "def activate():\n num = int(var.get())\n return light_controller.activate_scene(light_controller.show_scene_id(scenes[num]))", "def on_show_view(self):\n self.setup()\n arcade.set_background_color(arcade.color.BLACK)", "def start(self):\n self.active = True", "def on_show_view(self):\r\n self.setup()\r\n arcade.set_background_color(BACKGROUND_COLOR)", "def turn_on(self) -> None:\n self._monoprice.set_power(self._zone_id, True)" ]
[ "0.71402043", "0.71132696", "0.70279604", "0.6954936", "0.68721765", "0.68652636", "0.68229365", "0.67939657", "0.67710656", "0.6735029", "0.6730508", "0.6596266", "0.65804565", "0.6536582", "0.6518706", "0.6516858", "0.64804995", "0.64552146", "0.6397112", "0.63840973", "0.6360979", "0.6353104", "0.63384336", "0.6338329", "0.63256687", "0.6288376", "0.6274399", "0.6272724", "0.62661827", "0.62567747" ]
0.7613048
0
print header from fits file to either stdout or to a file
def print_header(fitsfile, ext=0, ofileh=sys.stdout): hdr = fitsio.read_header(fitsfile, ext=ext) ofileh.write(f"{hdr}") ofileh.write("\n")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def print_header(name, texfile):\n texfile.write('\\n')\n texfile.write('%--------------------\\n')\n texfile.write('%---' + name.upper() + ('-' * (17 - len(name))) + '\\n')\n texfile.write('%--------------------\\n')", "def print_header():\n \n print_from_file(\"html/header.html\")", "def header(out_file=sys.stdout, ac=None):\n if ac is not None:\n print(*Features.FEATURE_COLS, \"AC\", sep=\"\\t\", file=out_file)\n else:\n print(*Features.FEATURE_COLS, sep=\"\\t\", file=out_file)", "def print_header():\n print(\"STEM Center Temperature Project\")\n print(\"Shaotong Wen\")", "def header_print(output):\n print(\"\\n----------------------------------------------------------------\")\n print(output)\n print(\"----------------------------------------------------------------\")", "def write_header(self):\r\n if self.arguments['--out']:\r\n self.file = open(self.arguments['--out'], \"w+\")\r\n self.file.write(self.version)\r\n for list_item in self.list_of_header_objects:\r\n self.file.write(list_item.line)\r\n self.file.write(self.body_header_line.line)\r\n self.file.close()\r\n else:\r\n for list_item in self.list_of_header_objects:\r\n print(list_item.line)\r\n print(self.body_header_line.line)", "def test_fitsheader():\n extensions = ('fts', 'fits')\n for ext in extensions:\n for ffile in Path(testpath).glob(f\"*.{ext}*\"):\n fits_file = fits.open(ffile)\n fits_file.verify(\"fix\")\n data, header = fits_file[0].data, fits_file[0].header\n meta_header = MetaDict(OrderedDict(header))\n sunpy.io.fits.header_to_fits(meta_header)", "def print_header(filename):\n\n date_list = filename[0:10].split('_')\n # Hint: CWB Metadata cannot contain dashes -\n name = 'id=\"{}\"'.format(filename[0:-4].replace('-', '_'))\n date = 'date=\"{}\"'.format('_'.join(date_list))\n year = 'year=\"{}\"'.format(date_list[0])\n month = 'month=\"{}\"'.format(date_list[1])\n day = 'day=\"{}\"'.format(date_list[2])\n\n header = '<text {} {} {} {} {}>'.format(name, date, year, month, day)\n\n print(header)", "def write_sff_header(header, fh, num=None):\r\n\r\n lines = [\"Common Header:\"]\r\n if (num is not None):\r\n header[\"# of Flows\"] = num\r\n\r\n lines.extend([\" %s:\\t%s\" % (param, header[param])\r\n for param in header])\r\n fh.write(\"\\n\".join(lines) + \"\\n\\n\")", "def fitshead(imgname):\n try:\n img_header = pyfits.getheader(imgname, ignore_missing_end = True)\n return img_header\n except IOError:\n print \"FITSHEAD: Unable to open FITS image %s. Stopping.\" %imgname\n \n return", "def write_header(self, fd):\n fd.write(f\"BEGIN {self.name}\")\n if len(self.data_items) > 0:\n if isinstance(self.data_items[0], mfdatascalar.MFScalar):\n one_based = (\n self.data_items[0].structure.type == DatumType.integer\n )\n entry = self.data_items[0].get_file_entry(\n values_only=True, one_based=one_based\n )\n else:\n entry = self.data_items[0].get_file_entry()\n fd.write(str(entry.rstrip()))\n if len(self.data_items) > 1:\n for data_item in self.data_items[1:]:\n entry = data_item.get_file_entry(values_only=True)\n fd.write(\"%s\" % (entry.rstrip()))\n if self.get_comment().text:\n fd.write(\" \")\n self.get_comment().write(fd)\n fd.write(\"\\n\")", "def writeHeading(fil, nodes, elems, text=''): #currently only for hexahedral mesh\n fil.write(\" CONTROL INFO 2.2.30\\n\")\n fil.write(\"** GAMBIT NEUTRAL FILE\\n\")\n fil.write('%s\\n' %text)\n fil.write('PROGRAM: Gambit VERSION: 2.2.30\\n')\n fil.write(strftime('%d %b %Y %H:%M:%S\\n', gmtime()))\n fil.write(' NUMNP NELEM NGRPS NBSETS NDFCD NDFVL\\n')\n fil.write('%10i%10i%10i%10i%10i%10i\\n' % (shape(nodes)[0],shape(elems)[0],1,0,3,3))\n fil.write('ENDOFSECTION\\n')", "def export_mlab_zone_header(output, header, options):\n headerdata = header.read()\n headerdata = headerdata % options.__dict__\n output.write(headerdata)", "def header(filename):\n\n if not os.path.isfile(filename):\n filename = match_filename(filename,'voltages')\n try:\n print('header length: ', glia.get_header(filename)[1])\n except:\n raise(ValueError, \"Could not get header, are you sure it's a MCD binary export?\")", "def print_header(msg):\n\n tf.print(BColors.BOLD + msg + BColors.ENDC, output_stream=sys.stderr)", "def header(filename):\n\n if not os.path.isfile(filename):\n filename = glia.match_filename(filename,'voltages')\n try:\n print('header length: ', glia.get_header(filename)[1])\n except:\n raise(ValueError, \"Could not get header, are you sure it's a MCD binary export?\")", "def header(self, hdata):\n self = self\n file = open(\"imdb_output.txt\", \"w\")\n file.write(str(\"\\t\".join(hdata)) + \"\\n\")", "def write_header(outfbfile, header_params, header):\n for hp in header_params:\n hdrval = sigproc.addto_hdr(hp, header[hp])\n outfbfile.write(hdrval)", "def print_header():\n print('------------------------------------')\n print(' Lesson04')\n print(' Kata Fourteen Assignment')\n print('------------------------------------\\n')", "def debug_info_header(header):\n print(colored(\"Header:\", 'cyan'), colored(\"Valid FDT magic value found\", \"green\", attrs=['bold']))\n print(colored(\"Header\", 'cyan'), \"-> Total Size of file: \",\n colored('{0:>8d} {0:>#8x}'.format(header.totalsize), 'yellow'))\n print(colored(\"Header\", 'cyan'), \"-> Offset to Struct Block: \",\n colored('{0:>8d} {0:>#8x}'.format(header.off_dt_struct), 'yellow'), \" with size: \",\n colored('{0:>8d} {0:>#8x}'.format(header.size_dt_struct), 'yellow'))\n print(colored(\"Header\", 'cyan'), \"-> Offset to String Block: \",\n colored('{0:>8d} {0:>#8x}'.format(header.off_dt_strings), 'yellow'), \" with size: \",\n colored('{0:>8d} {0:>#8x}'.format(header.size_dt_strings), 'yellow'))\n print(colored(\"Header\", 'cyan'), \"-> Offset to Memory Reser: \",\n colored('{0:>8d} {0:>#8x}'.format(header.off_mem_rsvmap), 'yellow'))\n print(colored(\"Header\", 'cyan'), \"-> Version of DTB: \",\n colored('{0:>8d} {0:>#8x}'.format(header.version), 'yellow'))\n print(colored(\"Header\", 'cyan'), \"-> Previous Version of DTB:\",\n colored('{0:>8d} {0:>#8x}'.format(header.last_comp_version), 'yellow'))\n print(colored(\"Header\", 'cyan'), \"-> Boot CPU Number: \",\n colored('{0:>8d} {0:>#8x}'.format(header.boot_cpuid_phys), 'yellow'))\n print()", "def print_header_information():\n\t\tprint \"Elijah Molloy\"\n\t\tprint \"70-510 - Spring 1 - 2018\"\n\t\tprint \"PROGRAMMING ASSIGNMENT #4\\n\"", "def add_header(in_file, file_type):\n\n if file_type == 'bowtie':\n header = \"Read name\\t\" + \"Reference strand\\t\" + \"Name of reference sequence\\t\" \\\n + \"Position alignment occurs\\t\" + \"Read sequence\\t\" + \"Read qualities\\t\" \\\n + \"Ceiling\\t\" + \"Mismatch descriptors\\n\"\n else:\n header = ''\n\n # Temp file for final results including header\n temp_out = tempfile.mkstemp()\n f_in = open(in_file, 'r')\n results = f_in.read()\n f_out = open(temp_out[1] + '.txt', 'w')\n f_out.write(header)\n f_out.write(results)\n\n f_in.close()\n f_out.close()\n return temp_out[1] + '.txt'", "def header(name, value):\n print '%s: %s\\n' % (name, value)", "def write_header(filename, data, lima):\n\tfrom utilities import file_type\n\tfrom EMAN2db import db_open_dict\n\n\tftp = file_type(filename)\n\tif ftp == \"bdb\":\n\t\tDB = db_open_dict(filename)\n\t\tDB.set_header(lima, data)\n\telif ftp == \"hdf\":\n\t\tdata.write_image(filename, lima, EMUtil.ImageType.IMAGE_HDF, True)\n\telse:\n\t\tERROR(\"Unacceptable file format\",\"write_headers\",1)", "def writeheader(fh,colnames):\n for i in range(len(colnames)):\n fh.write('# %d %s\\n'%(i+1,colnames[i]))", "def print_from_file(filepath):\n \n \n header_filehandler = open(filepath, 'r')\n print header_filehandler.read()\n header_filehandler.close()", "def pp_file_header(self):\n self.separator()\n for item in self.file_header:\n print(item.ljust(27, ' ') + \": {}\".format(self.file_header[item]))\n \n self.separator()", "def writeHeader( self ):\n for k in self.secondaryTargets.keys():\n fileName = self.treyGene[k] + \"-GenesinCommon.txt\" \n with open( fileName, 'w' ) as out:\n out.write(\"%s\\t%s\\t%s\\n\" %(\"Gene_trey\", \"Gene\", \"Gene_inCommon\" ))\n out.close()", "def write_header(output_file, line, file_headers):\n output_file.write('%s\\t' % line)\n for index, header in enumerate(file_headers):\n output_file.write(header.strip())\n if index < (len(file_headers) - 1):\n output_file.write('\\t')\n output_file.write('\\n')", "def CosmicFish_write_header(name):\n\n print\n print \"**************************************************************\"\n print \" _____ _ _____ __ \"\n print \" / ___/__ ___ __ _ (_)___/ __(_)__ / / \"\n print \" / /__/ _ \\(_-</ ' \\/ / __/ _// (_-</ _ \\ \"\n print \" \\___/\\___/___/_/_/_/_/\\__/_/ /_/___/_//_/ Py Lib\"\n print \" \"\n print \"**************************************************************\"\n print name\n print \" This application was developed using the CosmicFish code.\"\n print \"**************************************************************\"\n print" ]
[ "0.7188707", "0.69402814", "0.67326516", "0.66753113", "0.6652564", "0.6571709", "0.6567003", "0.65550375", "0.64363533", "0.6409678", "0.6390028", "0.63504195", "0.6322972", "0.63169914", "0.6312098", "0.6295499", "0.6233899", "0.62321246", "0.6227358", "0.6210287", "0.61959153", "0.61947453", "0.61765575", "0.61713994", "0.6166106", "0.61612743", "0.61574847", "0.6120252", "0.61169964", "0.6116172" ]
0.86701775
0