query
stringlengths
9
9.05k
document
stringlengths
10
222k
metadata
dict
negatives
listlengths
30
30
negative_scores
listlengths
30
30
document_score
stringlengths
4
10
document_rank
stringclasses
2 values
Method to view virtual panels, if panel ID given then only return virtual panels from that panel
def view_virtual_panels(id=None): if not id: id = request.args.get('id') if id: panels = get_virtual_panels_by_panel_id(s, id) else: panels = get_virtual_panels_simple(s) result = [] panel_name = "Virtual" for i in panels: row = dict(zip(i.keys(), i)) row["current_version"] = round(row["current_version"], 1) status = check_virtualpanel_status(s, row["id"]) row["status"] = status permission = check_user_has_permission(s, current_user.id, row["projectid"]) locked = check_if_locked_by_user_vpanel(s, current_user.id, row["panelid"]) row['permission'] = permission row['locked'] = locked status = check_virtualpanel_status(s, row["id"]) row["status"] = status if id: panel_name = row['panelname'] + ' Virtual' # if check_user_has_permission(s, current_user.id, row["projectid"]): # result.append(row) result.append(row) table = ItemTableVPanels(result, classes=['table', 'table-striped']) return render_template('panels.html', panels=table, project_name=panel_name, message='Virtual Panels are locked if their parent panel is being edited')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def view_vpanel():\n id = request.args.get('id')\n try:\n version = request.form[\"versions\"]\n except KeyError:\n version = None\n if id:\n status = check_virtualpanel_status(s, id)\n if not status:\n message = \"This panel has changes which cannot be viewed here as they have not been made live yet, if you have permission you can view these by editing the panel\"\n else:\n message = None\n panel_details = get_vpanel_details_by_id(s, id)\n for i in panel_details:\n if not version:\n version = panel_details.current_version\n panel_name = panel_details.name\n project_id = panel_details.project_id\n panel = get_regions_by_vpanelid(s, id, version)\n result = []\n rows = list(panel)\n if len(rows) != 0:\n bed = ''\n for i in rows:\n row = dict(zip(i.keys(), i))\n result.append(row)\n panel_name = i.panel_name\n current_version = i.current_version\n else:\n message = \"This Panel has no regions yet & may also have changes that have not been made live yet\"\n bed = 'disabled'\n current_version = version\n print(type(version))\n current_version = round(current_version, 1)\n version = round(float(version), 1)\n\n if check_user_has_permission(s, current_user.id, project_id):\n edit = ''\n else:\n edit = 'disabled'\n\n form = ViewPanel()\n v_list = get_prev_versions_vp(s, id)\n choices = []\n for i in v_list:\n choices.append((i, i))\n\n if (current_version, current_version) not in choices:\n choices.append((current_version, current_version))\n\n form.versions.choices = choices\n form.versions.default = current_version\n form.process()\n\n table = []\n\n for i in result:\n line = []\n line.append(i['chrom'])\n line.append(str(i['region_start']))\n line.append(str(i['region_end']))\n line.append(i['gene_name'])\n line.append(i['name'].replace(',', ' '))\n table.append(line)\n\n return render_template('panel_view.html', table=json.dumps(table), panel=table, panel_name=panel_name,\n edit=edit, bed=bed,\n version=version, panel_id=id, message=message, url=url_for('panels.view_vpanel'),\n scope='Virtual', form=form)\n\n else:\n return redirect(url_for('panels.view_virtual_panels'))", "def view_panel():\n id = request.args.get('id')\n try:\n version = request.form[\"versions\"]\n except KeyError:\n version = None\n if id:\n status = check_panel_status(s, id)\n if not status:\n message = \"This panel has changes which cannot be viewed here as they have not been made live yet, if you have permission you can view these by editing the panel\"\n else:\n message = None\n panel_details = get_panel_details_by_id(s, id)\n if not version:\n version = panel_details.current_version\n panel_name = panel_details.name\n panel = get_regions_by_panelid(s, id, version)\n project_id = get_project_id_by_panel_id(s, id)\n result = []\n rows = list(panel)\n if len(rows) != 0:\n bed = ''\n for i in rows:\n row = dict(zip(i.keys(), i))\n result.append(row)\n # panel_name = i.panel_name\n current_version = i.current_version\n else:\n message = \"This Panel has no regions yet & may also have changes that have not been made live\"\n bed = 'disabled'\n current_version = version\n\n if check_user_has_permission(s, current_user.id, project_id):\n edit = ''\n else:\n edit = 'disabled'\n\n form = ViewPanel()\n v_list = range(1, current_version + 1)\n choices = []\n for i in v_list:\n choices.append((i, i))\n form.versions.choices = choices\n form.versions.default = version\n form.process()\n\n table = []\n\n for i in result:\n line = []\n line.append(i['chrom'])\n line.append(str(i['region_start']))\n line.append(str(i['region_end']))\n line.append(i['gene_name'])\n line.append(i['name'].replace(',', ' '))\n table.append(line)\n return render_template('panel_view.html', scope='Panel', table=json.dumps(table), panel=table,\n panel_name=panel_name, edit=edit, bed=bed,\n version=version, panel_id=id, project_id=project_id, message=message,\n url=url_for('panels.view_panel'),\n form=form)\n\n else:\n return redirect(url_for('panels.view_panels'))", "def view_panels(id=None):\n if not id:\n id = request.args.get('id')\n\n if id:\n panels = get_panels_by_project_id(s, id)\n else:\n panels = get_panels(s)\n result = []\n project_name = \"All\"\n for i in panels:\n row = dict(zip(i.keys(), i))\n status = check_panel_status(s, row[\"panelid\"])\n row[\"status\"] = status\n permission = check_user_has_permission(s, current_user.id, row[\"projectid\"])\n locked = check_if_locked(s, row[\"panelid\"])\n row['permission'] = permission\n row['locked'] = locked\n\n if id:\n project_name = row['projectname']\n # if check_user_has_permission(s, current_user.id, row[\"projectid\"]):\n # result.append(row)\n result.append(row)\n table = ItemTablePanels(result, classes=['table', 'table-striped'])\n return render_template('panels.html', panels=table, project_name=project_name)", "def make_virtualpanel_live():\n vpanelid = request.args.get('id')\n panelid = get_panel_by_vp_id(s, vpanelid)\n locked = check_if_locked(s, panelid)\n if locked:\n if current_user.id == get_locked_user(s, panelid):\n make_vp_panel_live(s, vpanelid)\n add_to_starlims(vpanelid)\n return redirect(url_for('panels.view_virtual_panels'))\n else:\n make_vp_panel_live(s, vpanelid)\n add_to_starlims(vpanelid)\n return redirect(url_for('panels.view_virtual_panels'))", "def get_panel(self, panel_id):\n return self.panels.get(panel_id, None)", "def load_panelapp_panel(adapter, panel_id=None, institute=\"cust000\", confidence=\"green\"):\n panel_ids = [panel_id]\n\n if not panel_id:\n LOG.info(\"Fetching all panel app panels\")\n panel_ids = _panelapp_panel_ids()\n\n for _ in panel_ids:\n parsed_panel = _parse_panelapp_panel(adapter, _, institute, confidence)\n\n if len(parsed_panel[\"genes\"]) == 0:\n LOG.warning(\"Panel %s is missing genes. Skipping.\", parsed_panel[\"display_name\"])\n continue\n\n try:\n adapter.load_panel(parsed_panel=parsed_panel, replace=True)\n except Exception as err:\n raise err", "def get_vpanel(self):\n if not self.vpanel:\n self.vpanel = VerticalPanel()\n RootPanel().add(self.vpanel)\n return self.vpanel", "def edit_virtual_panel_process():\n form = EditVirtualPanelProcess()\n\n vp_id = request.args.get('id')\n panel_id = get_panel_by_vp_id(s, vp_id)\n if request.method == \"POST\":\n if request.form['make_live'] == \"on\":\n make_vp_panel_live(s, vp_id)\n add_to_starlims(vp_id)\n unlock_panel_query(s, panel_id)\n return redirect(url_for('panels.view_vpanel') + \"?id=\" + vp_id)\n elif request.method == \"GET\":\n lock_panel(s, current_user.id, panel_id)\n panel_info = get_panel_details_by_id(s, panel_id)\n panel_name = panel_info.name\n form.panel.choices = [(panel_id, panel_name), ]\n\n panel_version = get_current_version(s, panel_id)\n panel_genes = get_genes_by_panelid(s, panel_id, panel_version)\n vp_info = get_vpanel_details_by_id(s, vp_id)\n vp_version = vp_info.current_version\n vp_name = vp_info.name\n form.vpanelname.data = vp_name\n vp_genes = get_genes_by_vpanelid_edit(s, vp_id, vp_version)\n genelist = \"\"\n vp_list = []\n for i in vp_genes:\n vp_list.append(i.id)\n\n genes = []\n print('new method')\n for i in panel_genes:\n if i.id in vp_list:\n genes.append({\"name\": i.name, \"id\": i.id, \"vp_list\": True})\n button = render_template(\"gene_button.html\", gene_name=i.name, gene_id=i.id, added=True)\n genelist += button\n\n else:\n genes.append({\"name\": i.name, \"id\": i.id, \"vp_list\": False})\n\n gene_html = render_template(\"panel_genes.html\", panel_genes=genes)\n\n url = url_for('panels.edit_virtual_panel_process') + '?id=' + str(vp_id)\n return render_template('virtualpanels_createprocess.html', form=form, genes=gene_html, genelist=genelist,\n vp_id=vp_id, panel_name=vp_name, current_version=vp_version, url=url)", "def create_virtual_panel_process():\n form = CreateVirtualPanelProcess()\n\n if request.method == \"POST\":\n make_live = request.form['make_live']\n vp_id = request.args.get('id')\n if make_live == \"on\":\n make_vp_panel_live(s, vp_id)\n add_to_starlims(vp_id)\n panel_id = get_panel_by_vp_id(s, vp_id)\n unlock_panel_query(s, panel_id)\n return redirect(url_for('panels.view_vpanel') + \"?id=\" + vp_id)\n elif request.method == \"GET\":\n form.panel.choices = get_panel_choices(s, current_user.id)\n url = url_for('panels.create_virtual_panel_process')\n return render_template('virtualpanels_createprocess.html', form=form, url=url, vp_id=\"main\")", "def getPanels(self, LibraryID):\n response = self.request(\"getPanels\", LibraryID=LibraryID)\n if not response:\n return None\n return response[\"Result\"][\"Panels\"]", "def _panelapp_panel_ids():\n json_lines = fetch_resource(PANELAPP_BASE_URL.format(\"list_panels\"), json=True)\n return [panel_info[\"Panel_Id\"] for panel_info in json_lines.get(\"result\", [])]", "def check_virtualpanel_status(s, id):\n panels = check_virtualpanel_status_query(s, id)\n status = True\n for i in panels:\n if i.intro > i.current_version:\n status = False\n break\n if i.last is not None:\n if i.last == i.current_version:\n status = False\n break\n\n return status", "def _viewer_by_id(self, vid):\n return self._viewer_store.get(vid)", "def generate_panel(self):\r\n \r\n self.PanelData = self.RawData.filter(['ID', 'X', 'Z', 'W', 'R', 'β', 'LFP', 'H'], axis=1)", "def _handler_voxel_view(self,event):\n self._mgr.LoadPerspective(\n\t\t\tself._perspectives['voxel_view'])", "def _independent_panel(width, height):\n\n from maya import cmds\n\n # center panel on screen\n screen_width, screen_height = _get_screen_size()\n topLeft = [int((screen_height-height)/2.0),\n int((screen_width-width)/2.0)]\n\n window = cmds.window(width=width,\n height=height,\n topLeftCorner=topLeft,\n menuBarVisible=False,\n titleBar=False)\n cmds.paneLayout()\n panel = cmds.modelPanel(menuBarVisible=False,\n label='CapturePanel')\n\n # Hide icons under panel menus\n bar_layout = cmds.modelPanel(panel, q=True, barLayout=True)\n cmds.frameLayout(bar_layout, e=True, collapse=True)\n\n cmds.showWindow(window)\n\n # Set the modelEditor of the modelPanel as the active view so it takes\n # the playback focus. Does seem redundant with the `refresh` added in.\n editor = cmds.modelPanel(panel, query=True, modelEditor=True)\n cmds.modelEditor(editor, e=1, activeView=True)\n\n # Force a draw refresh of Maya so it keeps focus on the new panel\n # This focus is required to force preview playback in the independent panel\n cmds.refresh(force=True)\n\n try:\n yield panel\n finally:\n # Delete the panel to fix memory leak (about 5 mb per capture)\n cmds.deleteUI(panel, panel=True)\n cmds.deleteUI(window)", "def _parse_panelapp_panel(adapter, panel_id, institute, confidence):\n hgnc_map = adapter.ensembl_to_hgnc_mapping()\n json_lines = fetch_resource(PANELAPP_BASE_URL.format(\"get_panel\") + panel_id, json=True)\n parsed_panel = parse_panel_app_panel(\n panel_info=json_lines[\"result\"],\n hgnc_map=hgnc_map,\n institute=institute,\n confidence=confidence,\n )\n if confidence != \"green\":\n parsed_panel[\"panel_id\"] = \"_\".join([panel_id, confidence])\n else: # This way the old green panels will be overwritten, instead of creating 2 sets of green panels, old and new\n parsed_panel[\"panel_id\"] = panel_id\n\n return parsed_panel", "def panels(self, request, panel_list, group):\n return panel_list", "def detail(request, target_id):\n temp_values = {\n \"subscroll\":True,\n }\n return render(request, 'server/detail.html', temp_values)", "def getview(viewid) :\n\treturn Jikji.getinstance().getview(viewid)", "def get_active_panel(cls):\n active_panel = None\n panel_list = pm.getPanel(type='modelPanel')\n for panel in panel_list:\n if pm.modelEditor(panel, q=1, av=1):\n active_panel = panel\n break\n\n return active_panel", "def show_sddc_ipsec_vpn(**kwargs):\n proxy = kwargs['proxy']\n session_token = kwargs['sessiontoken']\n json_response = get_sddc_vpn_info_json(proxy, session_token)\n sddc_vpn = json_response['results']\n table = PrettyTable(['Name', 'ID', 'Local Address', 'Remote Address'])\n for i in sddc_vpn:\n table.add_row([i['display_name'], i['id'], i['local_endpoint_path'].strip(\"/infra/tier-0s/vmc/ipsec-vpn-services/default/local-endpoints/\"), i['peer_address']])\n sys.exit(table)", "def panel_show(keyword):\n\n nodes = sorted(\n (\n n\n for n in nuke.allNodes()\n if keyword in n.name()\n and not nuke.numvalue(\n cast_str(\"%s.disable\" % n.name()),\n 0,\n )\n ),\n key=lambda n: cast_text(n.name()),\n reverse=True,\n )\n for n in nodes:\n n.showControlPanel()", "def _viewer_item_by_id(self, vid):\n def find_viewer_item(stack_items):\n for stack_item in stack_items:\n for viewer_item in stack_item.get('viewers'):\n if viewer_item['id'] == vid:\n return viewer_item\n\n if len(stack_item.get('children')) > 0:\n return find_viewer_item(stack_item.get('children'))\n\n viewer_item = find_viewer_item(self.state.stack_items)\n\n return viewer_item", "def get_score_panel(self, idattr):\n return self.get_node('//ScorePanels/ScorePanel[@id=\"%s\"]' % idattr)", "def get_product_panels(self, product_bp, panel_list):\n for child in product_bp.children:\n props = props_closet.get_object_props(child)\n if props.is_panel_bp:\n assembly = fd_types.Assembly(child)\n if common_closet_utils.part_is_not_hidden(assembly):\n panel_list.append(child)\n self.get_product_panels(child, panel_list)\n panel_list.sort(key=lambda obj: obj.location.x, reverse=False)\n return panel_list", "def add_vp():\n vp_name = request.json['vp_name']\n panel_id = request.json['panel_id']\n vp_id = create_virtualpanel_query(s, vp_name, panel_id)\n if vp_id != -1:\n lock_panel(s, current_user.id, panel_id)\n return jsonify(vp_id)", "def _sidepanel(self):\n global curdir\n global sidepanel_layout\n global root\n\n #Create the sidepanel layout.\n sidepanel_layout = BoxLayout(orientation=\"vertical\", pos_hint={\"x\": 0.0, \"top\": 0.92}, size_hint=(0.1, 0.92))\n\n #If \"curdir\" contains folders, a button is created for each, and bind the button to update the\n # showphotos layout.\n if curdir == \" \":\n return sidepanel_layout\n else:\n root.remove_widget(sidepanel_layout)\n for folders in sorted(glob(join(curdir, \"thumb\", \"*\"))):\n name = basename(folders)\n btn = Button(text=name, on_press=lambda n=name: self._update_scroll(n.text))\n sidepanel_layout.add_widget(btn)\n root.add_widget(sidepanel_layout)\n sidepanel_layout.do_layout()", "def getPanel(self, LibraryID, PanelID, EmbeddedData=None, LastRecipientID=None, NumberOfRecords=None,\n ExportLanguage=None, Unsubscribed=None, Subscribed=None, **kwargs):\n return self.request(\n \"getPanel\",\n LibraryID=LibraryID,\n PanelID=PanelID,\n EmbeddedData=EmbeddedData,\n LastRecipientID=LastRecipientID,\n NumberOfRecords=NumberOfRecords,\n ExportLanguage=ExportLanguage,\n Unsubscribed=Unsubscribed,\n Subscribed=Subscribed,\n **kwargs\n )", "def show(self):\n self.window.run_command(\"show_panel\", {\"panel\": self.full_name})" ]
[ "0.6817255", "0.6743722", "0.6515746", "0.64239585", "0.61308205", "0.59219176", "0.57346356", "0.55168694", "0.54971176", "0.54806674", "0.5439954", "0.5401917", "0.52581835", "0.52198446", "0.51601046", "0.5151167", "0.51509696", "0.5147451", "0.5058827", "0.504147", "0.49991512", "0.4990909", "0.49214628", "0.49213442", "0.49045074", "0.49036855", "0.48972303", "0.4895045", "0.4880346", "0.48554894" ]
0.80881995
0
Method to view the regions in a virtual panel. The method checks permissions to determine if edit etc is available.
def view_vpanel(): id = request.args.get('id') try: version = request.form["versions"] except KeyError: version = None if id: status = check_virtualpanel_status(s, id) if not status: message = "This panel has changes which cannot be viewed here as they have not been made live yet, if you have permission you can view these by editing the panel" else: message = None panel_details = get_vpanel_details_by_id(s, id) for i in panel_details: if not version: version = panel_details.current_version panel_name = panel_details.name project_id = panel_details.project_id panel = get_regions_by_vpanelid(s, id, version) result = [] rows = list(panel) if len(rows) != 0: bed = '' for i in rows: row = dict(zip(i.keys(), i)) result.append(row) panel_name = i.panel_name current_version = i.current_version else: message = "This Panel has no regions yet & may also have changes that have not been made live yet" bed = 'disabled' current_version = version print(type(version)) current_version = round(current_version, 1) version = round(float(version), 1) if check_user_has_permission(s, current_user.id, project_id): edit = '' else: edit = 'disabled' form = ViewPanel() v_list = get_prev_versions_vp(s, id) choices = [] for i in v_list: choices.append((i, i)) if (current_version, current_version) not in choices: choices.append((current_version, current_version)) form.versions.choices = choices form.versions.default = current_version form.process() table = [] for i in result: line = [] line.append(i['chrom']) line.append(str(i['region_start'])) line.append(str(i['region_end'])) line.append(i['gene_name']) line.append(i['name'].replace(',', ' ')) table.append(line) return render_template('panel_view.html', table=json.dumps(table), panel=table, panel_name=panel_name, edit=edit, bed=bed, version=version, panel_id=id, message=message, url=url_for('panels.view_vpanel'), scope='Virtual', form=form) else: return redirect(url_for('panels.view_virtual_panels'))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def view_panel():\n id = request.args.get('id')\n try:\n version = request.form[\"versions\"]\n except KeyError:\n version = None\n if id:\n status = check_panel_status(s, id)\n if not status:\n message = \"This panel has changes which cannot be viewed here as they have not been made live yet, if you have permission you can view these by editing the panel\"\n else:\n message = None\n panel_details = get_panel_details_by_id(s, id)\n if not version:\n version = panel_details.current_version\n panel_name = panel_details.name\n panel = get_regions_by_panelid(s, id, version)\n project_id = get_project_id_by_panel_id(s, id)\n result = []\n rows = list(panel)\n if len(rows) != 0:\n bed = ''\n for i in rows:\n row = dict(zip(i.keys(), i))\n result.append(row)\n # panel_name = i.panel_name\n current_version = i.current_version\n else:\n message = \"This Panel has no regions yet & may also have changes that have not been made live\"\n bed = 'disabled'\n current_version = version\n\n if check_user_has_permission(s, current_user.id, project_id):\n edit = ''\n else:\n edit = 'disabled'\n\n form = ViewPanel()\n v_list = range(1, current_version + 1)\n choices = []\n for i in v_list:\n choices.append((i, i))\n form.versions.choices = choices\n form.versions.default = version\n form.process()\n\n table = []\n\n for i in result:\n line = []\n line.append(i['chrom'])\n line.append(str(i['region_start']))\n line.append(str(i['region_end']))\n line.append(i['gene_name'])\n line.append(i['name'].replace(',', ' '))\n table.append(line)\n return render_template('panel_view.html', scope='Panel', table=json.dumps(table), panel=table,\n panel_name=panel_name, edit=edit, bed=bed,\n version=version, panel_id=id, project_id=project_id, message=message,\n url=url_for('panels.view_panel'),\n form=form)\n\n else:\n return redirect(url_for('panels.view_panels'))", "def RegionList(self):\n command = \"\"\"\n IPython.notebook.kernel.execute(\"RegionList=\" + JSON.stringify(JS9.GetShapes(\"regions\", {{display: '{wid}JS9'}})));\n \"\"\".format(wid=self.wid)\n get_ipython().run_cell_magic('javascript', '', command)", "def render_regions(view=None):\r\n # Get current active view\r\n if view is None:\r\n view = sublime.active_window().active_view()\r\n # Unable to set regions when no view available\r\n if view is None:\r\n return\r\n\r\n # Do no set regions if view is empty or still loading\r\n if view.size() == 0 or view.is_loading():\r\n return\r\n\r\n # Remove all markers to avoid marker conflict\r\n view.erase_regions(S.REGION_KEY_BREAKPOINT)\r\n view.erase_regions(S.REGION_KEY_CURRENT)\r\n view.erase_regions(S.REGION_KEY_DISABLED)\r\n\r\n # Get filename of current view and check if is a valid filename\r\n filename = view.file_name()\r\n if not filename:\r\n return\r\n\r\n # Determine icon for regions\r\n icon_current = get_region_icon(S.KEY_CURRENT_LINE)\r\n icon_disabled = get_region_icon(S.KEY_BREAKPOINT_DISABLED)\r\n icon_enabled = get_region_icon(S.KEY_BREAKPOINT_ENABLED)\r\n\r\n # Get all (disabled) breakpoint rows (line numbers) for file\r\n breakpoint_rows = []\r\n disabled_rows = []\r\n if filename in S.BREAKPOINT and isinstance(S.BREAKPOINT[filename], dict):\r\n for lineno, bp in S.BREAKPOINT[filename].items():\r\n # Do not show temporary breakpoint\r\n if S.BREAKPOINT_RUN is not None and S.BREAKPOINT_RUN['filename'] == filename and S.BREAKPOINT_RUN['lineno'] == lineno:\r\n continue\r\n # Determine if breakpoint is enabled or disabled\r\n if bp['enabled']:\r\n breakpoint_rows.append(lineno)\r\n else:\r\n disabled_rows.append(lineno)\r\n\r\n # Get current line from breakpoint hit\r\n if S.BREAKPOINT_ROW is not None:\r\n # Make sure current breakpoint is in this file\r\n if filename == S.BREAKPOINT_ROW['filename']:\r\n # Remove current line number from breakpoint rows to avoid marker conflict\r\n if S.BREAKPOINT_ROW['lineno'] in breakpoint_rows:\r\n breakpoint_rows.remove(S.BREAKPOINT_ROW['lineno'])\r\n # Set icon for current breakpoint\r\n icon_breakpoint_current = get_region_icon(S.KEY_BREAKPOINT_CURRENT)\r\n if icon_breakpoint_current:\r\n icon_current = icon_breakpoint_current\r\n if S.BREAKPOINT_ROW['lineno'] in disabled_rows:\r\n disabled_rows.remove(S.BREAKPOINT_ROW['lineno'])\r\n # Set current line marker\r\n if icon_current:\r\n view.add_regions(S.REGION_KEY_CURRENT, rows_to_region(S.BREAKPOINT_ROW['lineno']), S.REGION_SCOPE_CURRENT, icon_current, sublime.HIDDEN)\r\n\r\n # Set breakpoint marker(s)\r\n if breakpoint_rows and icon_enabled:\r\n view.add_regions(S.REGION_KEY_BREAKPOINT, rows_to_region(breakpoint_rows), S.REGION_SCOPE_BREAKPOINT, icon_enabled, sublime.HIDDEN)\r\n if disabled_rows and icon_disabled:\r\n view.add_regions(S.REGION_KEY_DISABLED, rows_to_region(disabled_rows), S.REGION_SCOPE_BREAKPOINT, icon_disabled, sublime.HIDDEN)", "def view_virtual_panels(id=None):\n if not id:\n id = request.args.get('id')\n if id:\n panels = get_virtual_panels_by_panel_id(s, id)\n else:\n panels = get_virtual_panels_simple(s)\n result = []\n panel_name = \"Virtual\"\n for i in panels:\n row = dict(zip(i.keys(), i))\n\n row[\"current_version\"] = round(row[\"current_version\"], 1)\n\n status = check_virtualpanel_status(s, row[\"id\"])\n row[\"status\"] = status\n permission = check_user_has_permission(s, current_user.id, row[\"projectid\"])\n locked = check_if_locked_by_user_vpanel(s, current_user.id, row[\"panelid\"])\n\n row['permission'] = permission\n row['locked'] = locked\n\n status = check_virtualpanel_status(s, row[\"id\"])\n row[\"status\"] = status\n\n if id:\n panel_name = row['panelname'] + ' Virtual'\n # if check_user_has_permission(s, current_user.id, row[\"projectid\"]):\n # result.append(row)\n result.append(row)\n table = ItemTableVPanels(result, classes=['table', 'table-striped'])\n return render_template('panels.html', panels=table, project_name=panel_name,\n message='Virtual Panels are locked if their parent panel is being edited')", "def create_panel_custom_regions():\n panel_id = request.json[\"panel_id\"]\n chrom = request.json[\"chrom\"]\n start = request.json[\"start\"]\n end = request.json[\"end\"]\n name = request.json[\"name\"]\n regions = select_region_by_location(s, chrom, start, end) # if region already exists, return current entry\n if regions:\n for i in regions:\n add_region_to_panel(s, i.id, panel_id)\n s.commit()\n continue\n else:\n create_custom_region(s, panel_id, chrom, start, end, name)\n\n return jsonify(\"complete\")", "def draw_visible_area(self):\n\n if not self.given_center:\n return\n\n # Figure out what regions we should be showing\n horiz_width = self.hbar.pageStep()\n gui_start_x = self.hbar.value()\n vert_height = self.vbar.pageStep()\n gui_start_y = self.vbar.value()\n (game_min_x, game_max_y) = self.scene_to_ingame(gui_start_x, gui_start_y)\n (game_max_x, game_min_y) = self.scene_to_ingame(\n gui_start_x + horiz_width,\n gui_start_y + vert_height,\n )\n min_rx = game_min_x//32 - 1\n max_rx = game_max_x//32 + 1\n min_ry = game_min_y//32 - 1\n max_ry = game_max_y//32 + 1\n\n # First find out how many regions we're going to have to load (so that\n # we can initialize a progressbar)\n valid_regions = set()\n regions_to_load = []\n for rx in range(min_rx, max_rx+1):\n for ry in range(min_ry, max_ry+1):\n region = (rx, ry)\n valid_regions.add(region)\n if region in self.regions:\n if not self.regions[region].loaded:\n regions_to_load.append(region)\n\n # Initialize progressbar\n region_loading = self.mainwindow.region_loading\n region_loading.start(len(regions_to_load))\n\n # Now actually do the loading\n for idx, region in enumerate(regions_to_load):\n #print('Loading region {}'.format(region))\n self.regions[region].load()\n self.loaded_regions.add(region)\n region_loading.update(idx)\n\n # Unload regions which are too far out\n regions_to_unload = []\n for region in list(self.loaded_regions):\n if region not in valid_regions:\n regions_to_unload.append(region)\n\n region_loading.start(len(regions_to_unload), label='Unloading Regions')\n for idx, region in enumerate(regions_to_unload):\n #print('Unloading region {}'.format(region))\n self.regions[region].unload()\n self.loaded_regions.remove(region)\n region_loading.update(idx)\n\n # Finish our progress bar\n region_loading.finish()", "def add_panel_regions():\n version_ids = request.json['id_ext']\n panel_id = request.json['panel_id']\n project_id = request.json['project_id']\n gene_name = request.json['gene_name']\n\n try:\n tx_id = request.json['pref_tx_id']\n add_preftxs_to_panel(s, project_id, [{\"gene\": gene_name, \"tx_id\": tx_id}, ])\n except KeyError:\n pass\n\n for i in version_ids:\n if i[\"ext_5\"] == 0:\n ext_5 = None\n else:\n ext_5 = i[\"ext_5\"]\n\n if i[\"ext_3\"] == 0:\n ext_3 = None\n else:\n ext_3 = i[\"ext_3\"]\n add_region_to_panel(s, i[\"id\"], panel_id, ext_3=ext_3, ext_5=ext_5)\n s.commit()\n return jsonify(\"complete\")", "def run_view(self, expanded, unexpanded) :\n\t\treturn self.manage_view_properties(expanded, unexpanded, \"\", perms = \"View\")", "def add_all_regions_vp():\n gene_id = request.json['gene_id']\n vpanel_id = request.json['vpanel_id']\n panel_id = request.json['panel_id']\n add_all_regions_to_vp(s, panel_id, gene_id, vpanel_id)\n return jsonify({\"genes\": [gene_id, ]})", "def view_system():\n\n pass", "def rendered_regions(self, obj):\n return obj.render_json(self.context.get('request'))", "def _handler_voxel_view(self,event):\n self._mgr.LoadPerspective(\n\t\t\tself._perspectives['voxel_view'])", "def list_regions():\n regions_areas = (\n db.session.query(\n models.Region.code.label(\"region_code\"),\n models.Region.name.label(\"region_name\"),\n db.case([(models.District.code.is_(None),\n db.literal_column(\"'admin_area'\"))],\n else_=db.literal_column(\"'district'\")).label(\"area_type\"),\n db.case([(models.District.code.is_(None), models.AdminArea.code)],\n else_=models.District.code).label(\"area_code\"),\n db.case([(models.District.code.is_(None), models.AdminArea.name)],\n else_=models.District.name).label(\"area_name\")\n ).select_from(models.Region)\n .join(models.Region.areas)\n .outerjoin(models.AdminArea.districts)\n .filter(models.Region.code != \"GB\")\n .order_by(\"region_name\", \"area_name\")\n .all()\n )\n regions = {}\n areas = {}\n for row in regions_areas:\n regions[row.region_code] = row.region_name\n areas.setdefault(row.region_code, []).append(row)\n\n return render_template(\"regions.html\", regions=regions, areas=areas)", "def vtk_viewer(request):\n try:\n data = _refresh(request)\n except Exception:\n data = {}\n data['main'] = 'main'\n data['error'] = 'error'\n data['search'] = {\n 'help': ''\n }\n options = {\n 'resizable': True\n }\n data['options'] = mark_safe(json.dumps(options))\n return render(\n request,\n 'vtk_view/cdat_viewer.html',\n data\n )", "def layout_graphics(self):\n # Graphics layout object to place viewboxes in\n self.g_layout = pg.GraphicsLayoutWidget(border=(80, 80, 80))\n self.g_layout.setCursor(QtCore.Qt.CrossCursor)\n\n # Viewboxes for images\n # aspect locked so that pixels are square\n # y inverted so that (0,0) is top left as in Thorlabs software\n options = {\"lockAspect\":True, \"invertY\":True}\n self.vb_image = self.g_layout.addViewBox(row=0, col=0, rowspan=2, **options)\n self.vb_zoom = self.g_layout.addViewBox(row=0, col=2, **options)\n self.vb_residuals = self.g_layout.addViewBox(row=1, col=2, **options)\n\n # Link zoom and residual views\n self.vb_zoom.setXLink(self.vb_residuals)\n self.vb_zoom.setYLink(self.vb_residuals)\n\n # Viewboxes for slice data\n # Both boxes have mouse disabled - range is fixed so we don't want to\n # scale them accidentally\n # Y box has y inverted to match the main image\n # Y box has x inverted so that zero pixel value is far from the image\n options = {\"enableMouse\":False, \"enableMenu\": False}\n self.vb_x = self.g_layout.addViewBox(row=2, col=0, **options)\n self.vb_y = self.g_layout.addViewBox(row=0, col=1, rowspan=2,\n invertX=True, invertY=True, **options)\n\n # Link the slice axes to the main image so that when we zoom/pan the\n # main image, our slices zoom/pan also\n self.vb_x.setXLink(self.vb_image)\n self.vb_y.setYLink(self.vb_image)\n\n # Disable autoscaling and fix range to maximum pixel intensity\n self.vb_x.setRange(yRange=(0,255))\n self.vb_y.setRange(xRange=(0,255))\n self.vb_x.disableAutoRange(axis=self.vb_x.YAxis)\n self.vb_y.disableAutoRange(axis=self.vb_y.XAxis)\n\n # Background color must not be black so that we can see where images\n # start/end\n color = pg.mkColor(40,40,40)\n self.vb_image.setBackgroundColor(color)\n self.vb_zoom.setBackgroundColor(color)\n self.vb_residuals.setBackgroundColor(color)\n self.vb_x.setBackgroundColor(color)\n self.vb_y.setBackgroundColor(color)\n self.g_layout.setBackground(color)\n\n self.vb_image.addItem(self.image)\n self.vb_image.addItem(self.fit_v_line)\n self.vb_image.addItem(self.fit_h_line)\n self.vb_image.addItem(self.mark_v_line)\n self.vb_image.addItem(self.mark_h_line)\n # self.vb_image.addItem(self.cursor_text)\n self.vb_image.addItem(self.cursor_delta)\n self.vb_image.addItem(self.beam_delta)\n self.vb_image.addItem(self.history_plot)\n # Figure out how to overlay properly?\n # self.vb_image.addItem(self.x_slice)\n # self.vb_image.addItem(self.x_fit)\n # self.vb_image.addItem(self.y_slice)\n # self.vb_image.addItem(self.y_fit)\n self.vb_zoom.addItem(self.zoom)\n self.vb_zoom.addItem(self.fit_maj_line)\n self.vb_zoom.addItem(self.fit_min_line)\n self.vb_zoom.addItem(self.zoom_text)\n self.vb_residuals.addItem(self.residuals)\n self.vb_residuals.addItem(self.residuals_text)\n self.vb_x.addItem(self.x_slice)\n self.vb_x.addItem(self.x_fit)\n self.vb_x.addItem(self.cursor_v)\n self.vb_y.addItem(self.y_slice)\n self.vb_y.addItem(self.y_fit)\n self.vb_y.addItem(self.cursor_h)\n\n self.res_legend.setParentItem(self.vb_residuals)\n self.cursor_text.setParentItem(self.vb_image)\n\n self.vb_image.setRange(QtCore.QRectF(0, 0, 1280, 1024))\n self.vb_zoom.setRange(QtCore.QRectF(0, 0, 50, 50))\n self.vb_residuals.setRange(QtCore.QRectF(0, 0, 50, 50))\n\n #\n # Size hints below here\n #\n self.g_layout.ci.layout.setColumnStretchFactor(0, 4)\n self.g_layout.ci.layout.setColumnStretchFactor(1, 1)\n self.g_layout.ci.layout.setColumnStretchFactor(2, 2)\n self.g_layout.ci.layout.setRowStretchFactor(0, 2)\n self.g_layout.ci.layout.setRowStretchFactor(1, 2)\n self.g_layout.ci.layout.setRowStretchFactor(2, 1)\n\n self.vb_x.setMinimumHeight(50)\n self.vb_y.setMinimumWidth(50)\n self.vb_x.setMaximumHeight(100)\n self.vb_y.setMaximumWidth(100)\n self.vb_image.setMinimumSize(640, 512)\n self.vb_zoom.setMinimumSize(320, 320)\n self.vb_residuals.setMinimumSize(320, 320)\n\n self.g_layout.setMinimumSize(1100,562)", "def intf_VIEWSHOW(E):\n out= \"View Properties\\n\"\n out+= \"---------------\\n\"\n out+= \"svgoutfile=%s\\n\" % OUT.outfile\n out+= \"camera=%s {camset}\\n\" % (','.join([str(x) for x in OUT.camera]))\n out+= \"target=%s {tarset}\\n\" % (','.join([str(x) for x in OUT.target]))\n out+= \"opacity=%s {hlr,hide}\\n\" % str(OUT.opacity)\n out+= \"facelines=%s {facelines}\\n\" % str(OUT.facelines)\n out+= \"vlinewidth=%0.2f {vlw,viewlinewidth}\\n\" % OUT.vlinewidth\n out+= \"vrefreshms=%d {refreshms,viewrefreshms}\\n\" % OUT.vrefreshms\n out+= \"vbox=(%d,%d) {viewbox[xy]}\\n\" % (OUT.vboxX,OUT.vboxY)\n out+= \"vtran=(%d,%d) {vtran[xy],viewtran[xy]}\\n\" % (OUT.vtranX,OUT.vtranY)\n out+= \"vscale=(%d,%d) {vscale[xy],viewscale[xy]}\\n\" % (OUT.vscaleX,OUT.vscaleY)\n print(out)", "def run(self):\n\n self.window.run_command(\"show_panel\", {\"panel\": \"output.reg_replace\"})", "def _handler_control_view(self,event):\n self._mgr.LoadPerspective(\n\t\t\tself._perspectives['control_view'])", "def checkRegionControl(self, iPlayer, regionID, bVassal = False):\n\t\t\n\t\tbFound = False\n\t\tplotList = self.getRegionPlotList([regionID])\n\t\tfor tPlot in plotList:\n\t\t\t\tpCurrent = gc.getMap().plot(tPlot[0], tPlot[1])\n\t\t\t\tif pCurrent.isCity():\n\t\t\t\t\tiOwner = pCurrent.getPlotCity().getOwner()\n\t\t\t\t\tif iOwner != iPlayer:\n\t\t\t\t\t\tif bVassal:\n\t\t\t\t\t\t\tif gc.getTeam(gc.getPlayer(iOwner).getTeam()).isVassal(iPlayer):\n\t\t\t\t\t\t\t\tbFound = True\n\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\treturn False\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\treturn False\n\t\t\t\t\telse:\n\t\t\t\t\t\tbFound = True\n\t\tif bFound:\n\t\t\treturn True\n\t\telse:\n\t\t\tfor tPlot in plotList:\n\t\t\t\tpCurrent = gc.getMap().plot(tPlot[0], tPlot[1])\n\t\t\t\tiOwner = pCurrent.getOwner()\n\t\t\t\tif iOwner != iPlayer:\n\t\t\t\t\tbFound = False\n\t\t\t\t\tbreak\n\t\t\t\telse:\n\t\t\t\t\tbFound = True\n\t\t\tif bFound:\n\t\t\t\treturn True\n\t\t\telse:\n\t\t\t\treturn False", "def show(self):\n self.window.run_command(\"show_panel\", {\"panel\": self.full_name})", "def ReadRegions(self, fname=\"temp\"):\n self.fname = fname\n command = \"\"\"IPython.notebook.kernel.execute('file = open(\"temp\", \"w\"); [file.write(x[\"wcsstr\"]) for x in '+ JSON.stringify(JS9.GetShapes(\"regions\", {{display: '{wid}JS9'}})) +']; file.close()');\"\"\".format(wid=self.wid)\n get_ipython().run_cell_magic('javascript', '', command)", "def _choose_regions(self, display_regions=False):\n dstl = Load_DSTL()\n if self.class_type == 1:\n # Select regions where there are buildings (with red roofs)\n test_image, test_mask = dstl.extract_region_pos(2300, 3000, cutout_size=[400, 400], object_class=self.class_type)\n train_image, train_mask = dstl.extract_region_pos(1900, 3100, cutout_size=[400, 400], object_class=self.class_type)\n cv_image, cv_mask = dstl.extract_region_pos(950, 1450, cutout_size=[200, 200], object_class=self.class_type)\n elif self.class_type == 5:\n train_image, train_mask = dstl.extract_region_pos(1150, 2150, cutout_size=[400, 400], object_class=self.class_type)\n test_image, test_mask = dstl.extract_region_pos(2300, 3000, cutout_size=[400, 400], object_class=self.class_type)\n cv_image, cv_mask = dstl.extract_region_pos(1900, 1950, cutout_size=[400, 400], object_class=self.class_type)\n else:\n pass\n self.images = {'train': train_image, 'cv': cv_image, 'test': test_image}\n self.masks = {'train': train_mask, 'cv': cv_mask, 'test': test_mask}\n if display_regions:\n for key in self.images.keys():\n display_three_band(self.images[key], self.masks[key], colors='green', title='{:} region'.format(key))", "def _getBrailleRegionsForPanel(self, obj):\n\n self._debugGenerator(\"_getBrailleRegionsForPanel\", obj)\n\n regions = []\n\n text = \"\"\n text = self._script.appendString(\n text, self._script.getDisplayedLabel(obj))\n\n # If there was no label for the panel, but it has a name, we'll\n # use the name.\n #\n if len(text) == 0:\n text = self._script.appendString(\n text, self._script.getDisplayedText(obj))\n\n text = self._script.appendString(text, self._getTextForRole(obj))\n\n regions = []\n componentRegion = braille.Component(obj, text)\n regions.append(componentRegion)\n\n return [regions, componentRegion]", "def _showView(self, win, fn=None):\n raise RuntimeError('Not implemented')", "def visualise():\n\n column = request.form.getlist('columnName')\n regions = request.form.getlist('raw_regions')\n #take the single string and return a list\n regions = query_proc.prep_regions(regions)\n #get that tables of interst\n table = query_proc.column_to_table(column)\n\n var_data = query_proc.get_region_data(table, column, regions)\n minval = query_proc.get_region_data_min(table, column, regions)\n maxval = query_proc.get_region_data_max(table, column, regions)\n\n #column diction to get human fiendly designation\n column_dict = name_column.get_name_column_dict()\n real_column = column_dict[column[0]]\n\n\n ##packing for the template\n region = regions[0]\n min_max = [minval, maxval]\n step = query_proc.calc_steps(min_max)\n min_max.append(step)\n\n min_max = json.dumps(min_max)\n json_vardata = json.dumps(var_data)\n\n return render_template('visualise.html',\n title='Data on a Map!',\n column=column,\n real_column=real_column,\n region=region,\n min_max=min_max,\n json_vardata=json_vardata)", "def enableEditorDrop():\n\n perspPanel = cmds.getPanel( withLabel='Persp View')\n panelControl = cmds.panel( perspPanel, query=True, control=True)\n cmds.control(panelControl, edit=True, dropCallback=panelDropLoad)\n global viewportHeight\n viewportHeight = cmds.control(panelControl, query=True, h=True)", "def getViews(read):\n ...", "def GetRegionVertices(self, *float, **kwargs):\n ...", "def edit_virtual_panel_process():\n form = EditVirtualPanelProcess()\n\n vp_id = request.args.get('id')\n panel_id = get_panel_by_vp_id(s, vp_id)\n if request.method == \"POST\":\n if request.form['make_live'] == \"on\":\n make_vp_panel_live(s, vp_id)\n add_to_starlims(vp_id)\n unlock_panel_query(s, panel_id)\n return redirect(url_for('panels.view_vpanel') + \"?id=\" + vp_id)\n elif request.method == \"GET\":\n lock_panel(s, current_user.id, panel_id)\n panel_info = get_panel_details_by_id(s, panel_id)\n panel_name = panel_info.name\n form.panel.choices = [(panel_id, panel_name), ]\n\n panel_version = get_current_version(s, panel_id)\n panel_genes = get_genes_by_panelid(s, panel_id, panel_version)\n vp_info = get_vpanel_details_by_id(s, vp_id)\n vp_version = vp_info.current_version\n vp_name = vp_info.name\n form.vpanelname.data = vp_name\n vp_genes = get_genes_by_vpanelid_edit(s, vp_id, vp_version)\n genelist = \"\"\n vp_list = []\n for i in vp_genes:\n vp_list.append(i.id)\n\n genes = []\n print('new method')\n for i in panel_genes:\n if i.id in vp_list:\n genes.append({\"name\": i.name, \"id\": i.id, \"vp_list\": True})\n button = render_template(\"gene_button.html\", gene_name=i.name, gene_id=i.id, added=True)\n genelist += button\n\n else:\n genes.append({\"name\": i.name, \"id\": i.id, \"vp_list\": False})\n\n gene_html = render_template(\"panel_genes.html\", panel_genes=genes)\n\n url = url_for('panels.edit_virtual_panel_process') + '?id=' + str(vp_id)\n return render_template('virtualpanels_createprocess.html', form=form, genes=gene_html, genelist=genelist,\n vp_id=vp_id, panel_name=vp_name, current_version=vp_version, url=url)", "def viewAll(self):\n self._sceneviewer.viewAll()" ]
[ "0.6146995", "0.56700337", "0.5658867", "0.5657484", "0.5576248", "0.5533098", "0.55307925", "0.54637206", "0.5391918", "0.52538276", "0.52468014", "0.5194485", "0.51648325", "0.5118682", "0.51112306", "0.51092696", "0.50995123", "0.5076863", "0.50713927", "0.50579756", "0.50460374", "0.503407", "0.50216883", "0.50118434", "0.49771124", "0.49763036", "0.49623057", "0.49477184", "0.49418625", "0.4932576" ]
0.62893504
0
Method for create virtual panel wizard. If request is "GET" method renders virtual panel process html. If request if "POST" the method checks if the virtual panel is to be made live and unlocks the panel for future editing.
def create_virtual_panel_process(): form = CreateVirtualPanelProcess() if request.method == "POST": make_live = request.form['make_live'] vp_id = request.args.get('id') if make_live == "on": make_vp_panel_live(s, vp_id) add_to_starlims(vp_id) panel_id = get_panel_by_vp_id(s, vp_id) unlock_panel_query(s, panel_id) return redirect(url_for('panels.view_vpanel') + "?id=" + vp_id) elif request.method == "GET": form.panel.choices = get_panel_choices(s, current_user.id) url = url_for('panels.create_virtual_panel_process') return render_template('virtualpanels_createprocess.html', form=form, url=url, vp_id="main")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_panel_process():\n form = CreatePanelProcess()\n if request.method == \"POST\":\n make_live = request.form['make_live']\n panel_id = request.args.get('id')\n project_id = get_project_id_by_panel_id(s, panel_id)\n preftx_id = get_preftx_id_by_project_id(s, project_id)\n version = get_current_preftx_version(s, preftx_id)\n if not version:\n version = 0\n if make_live == \"on\":\n make_preftx_live(s, preftx_id, version + 1, current_user.id)\n make_panel_live(s, panel_id, 1, current_user.id)\n unlock_panel_query(s, panel_id)\n return redirect(url_for('panels.view_panel') + \"?id=\" + panel_id)\n elif request.method == \"GET\":\n form.project.choices = get_project_choices(s, current_user.id)\n return render_template('panel_createprocess.html', form=form, panel_id=\"main\",\n url=url_for('panels.create_panel_process'))", "def edit_virtual_panel_process():\n form = EditVirtualPanelProcess()\n\n vp_id = request.args.get('id')\n panel_id = get_panel_by_vp_id(s, vp_id)\n if request.method == \"POST\":\n if request.form['make_live'] == \"on\":\n make_vp_panel_live(s, vp_id)\n add_to_starlims(vp_id)\n unlock_panel_query(s, panel_id)\n return redirect(url_for('panels.view_vpanel') + \"?id=\" + vp_id)\n elif request.method == \"GET\":\n lock_panel(s, current_user.id, panel_id)\n panel_info = get_panel_details_by_id(s, panel_id)\n panel_name = panel_info.name\n form.panel.choices = [(panel_id, panel_name), ]\n\n panel_version = get_current_version(s, panel_id)\n panel_genes = get_genes_by_panelid(s, panel_id, panel_version)\n vp_info = get_vpanel_details_by_id(s, vp_id)\n vp_version = vp_info.current_version\n vp_name = vp_info.name\n form.vpanelname.data = vp_name\n vp_genes = get_genes_by_vpanelid_edit(s, vp_id, vp_version)\n genelist = \"\"\n vp_list = []\n for i in vp_genes:\n vp_list.append(i.id)\n\n genes = []\n print('new method')\n for i in panel_genes:\n if i.id in vp_list:\n genes.append({\"name\": i.name, \"id\": i.id, \"vp_list\": True})\n button = render_template(\"gene_button.html\", gene_name=i.name, gene_id=i.id, added=True)\n genelist += button\n\n else:\n genes.append({\"name\": i.name, \"id\": i.id, \"vp_list\": False})\n\n gene_html = render_template(\"panel_genes.html\", panel_genes=genes)\n\n url = url_for('panels.edit_virtual_panel_process') + '?id=' + str(vp_id)\n return render_template('virtualpanels_createprocess.html', form=form, genes=gene_html, genelist=genelist,\n vp_id=vp_id, panel_name=vp_name, current_version=vp_version, url=url)", "def make_virtualpanel_live():\n vpanelid = request.args.get('id')\n panelid = get_panel_by_vp_id(s, vpanelid)\n locked = check_if_locked(s, panelid)\n if locked:\n if current_user.id == get_locked_user(s, panelid):\n make_vp_panel_live(s, vpanelid)\n add_to_starlims(vpanelid)\n return redirect(url_for('panels.view_virtual_panels'))\n else:\n make_vp_panel_live(s, vpanelid)\n add_to_starlims(vpanelid)\n return redirect(url_for('panels.view_virtual_panels'))", "def edit_panel_process():\n if request.method == \"POST\":\n make_live = request.form['make_live']\n panel_id = request.args.get('id')\n project_id = get_project_id_by_panel_id(s, panel_id)\n preftx_id = get_preftx_id_by_project_id(s, project_id)\n tx_version = get_current_preftx_version(s, preftx_id)\n panel_version = get_current_version(s, panel_id)\n if not tx_version:\n tx_version = 0\n if make_live == \"on\":\n print('make_live')\n make_preftx_live(s, preftx_id, tx_version + 1, current_user.id)\n make_panel_live(s, panel_id, panel_version + 1, current_user.id)\n unlock_panel_query(s, panel_id)\n return redirect(url_for('panels.view_panel') + \"?id=\" + panel_id)\n elif request.method == \"GET\":\n panel_id = request.args.get('id')\n form = EditPanelProcess()\n panel_info = get_panel_info(s, panel_id)\n project_id = panel_info.project_id\n form.project.choices = [(project_id, panel_info.project_name), ]\n form.panelname.data = panel_info.name\n\n lock_panel(s, current_user.id, panel_id)\n\n genes = get_genes_by_panelid_edit(s, panel_id, panel_info.current_version)\n html = \"\"\n buttonlist = \"\"\n print('hello')\n for gene in genes:\n gene_id = gene.id\n gene_name = gene.name\n preftx_id = get_preftx_by_gene_id\n upcoming_preftx = get_upcoming_preftx_by_gene_id(s, project_id, gene_id)\n all_tx = get_tx_by_gene_id(s, gene_id)\n\n buttonlist += render_template(\"gene_button.html\", gene_name=gene_name, gene_id=gene_id, added=True)\n tx_html = render_template(\"tx_list.html\", gene_name=gene_name, all_tx=all_tx, preftx=preftx_id,\n upcoming=upcoming_preftx, disabled=True)\n html += tx_html\n\n return render_template('panel_createprocess.html', form=form, genes=html, genelist=buttonlist,\n panel_id=panel_id,\n url=url_for('panels.edit_panel_process') + \"?id=\" + panel_id)", "def add_vp():\n vp_name = request.json['vp_name']\n panel_id = request.json['panel_id']\n vp_id = create_virtualpanel_query(s, vp_name, panel_id)\n if vp_id != -1:\n lock_panel(s, current_user.id, panel_id)\n return jsonify(vp_id)", "def view_vpanel():\n id = request.args.get('id')\n try:\n version = request.form[\"versions\"]\n except KeyError:\n version = None\n if id:\n status = check_virtualpanel_status(s, id)\n if not status:\n message = \"This panel has changes which cannot be viewed here as they have not been made live yet, if you have permission you can view these by editing the panel\"\n else:\n message = None\n panel_details = get_vpanel_details_by_id(s, id)\n for i in panel_details:\n if not version:\n version = panel_details.current_version\n panel_name = panel_details.name\n project_id = panel_details.project_id\n panel = get_regions_by_vpanelid(s, id, version)\n result = []\n rows = list(panel)\n if len(rows) != 0:\n bed = ''\n for i in rows:\n row = dict(zip(i.keys(), i))\n result.append(row)\n panel_name = i.panel_name\n current_version = i.current_version\n else:\n message = \"This Panel has no regions yet & may also have changes that have not been made live yet\"\n bed = 'disabled'\n current_version = version\n print(type(version))\n current_version = round(current_version, 1)\n version = round(float(version), 1)\n\n if check_user_has_permission(s, current_user.id, project_id):\n edit = ''\n else:\n edit = 'disabled'\n\n form = ViewPanel()\n v_list = get_prev_versions_vp(s, id)\n choices = []\n for i in v_list:\n choices.append((i, i))\n\n if (current_version, current_version) not in choices:\n choices.append((current_version, current_version))\n\n form.versions.choices = choices\n form.versions.default = current_version\n form.process()\n\n table = []\n\n for i in result:\n line = []\n line.append(i['chrom'])\n line.append(str(i['region_start']))\n line.append(str(i['region_end']))\n line.append(i['gene_name'])\n line.append(i['name'].replace(',', ' '))\n table.append(line)\n\n return render_template('panel_view.html', table=json.dumps(table), panel=table, panel_name=panel_name,\n edit=edit, bed=bed,\n version=version, panel_id=id, message=message, url=url_for('panels.view_vpanel'),\n scope='Virtual', form=form)\n\n else:\n return redirect(url_for('panels.view_virtual_panels'))", "def view_panel():\n id = request.args.get('id')\n try:\n version = request.form[\"versions\"]\n except KeyError:\n version = None\n if id:\n status = check_panel_status(s, id)\n if not status:\n message = \"This panel has changes which cannot be viewed here as they have not been made live yet, if you have permission you can view these by editing the panel\"\n else:\n message = None\n panel_details = get_panel_details_by_id(s, id)\n if not version:\n version = panel_details.current_version\n panel_name = panel_details.name\n panel = get_regions_by_panelid(s, id, version)\n project_id = get_project_id_by_panel_id(s, id)\n result = []\n rows = list(panel)\n if len(rows) != 0:\n bed = ''\n for i in rows:\n row = dict(zip(i.keys(), i))\n result.append(row)\n # panel_name = i.panel_name\n current_version = i.current_version\n else:\n message = \"This Panel has no regions yet & may also have changes that have not been made live\"\n bed = 'disabled'\n current_version = version\n\n if check_user_has_permission(s, current_user.id, project_id):\n edit = ''\n else:\n edit = 'disabled'\n\n form = ViewPanel()\n v_list = range(1, current_version + 1)\n choices = []\n for i in v_list:\n choices.append((i, i))\n form.versions.choices = choices\n form.versions.default = version\n form.process()\n\n table = []\n\n for i in result:\n line = []\n line.append(i['chrom'])\n line.append(str(i['region_start']))\n line.append(str(i['region_end']))\n line.append(i['gene_name'])\n line.append(i['name'].replace(',', ' '))\n table.append(line)\n return render_template('panel_view.html', scope='Panel', table=json.dumps(table), panel=table,\n panel_name=panel_name, edit=edit, bed=bed,\n version=version, panel_id=id, project_id=project_id, message=message,\n url=url_for('panels.view_panel'),\n form=form)\n\n else:\n return redirect(url_for('panels.view_panels'))", "def make_live():\n panelid = request.args.get('id')\n locked = check_if_locked(s, panelid)\n if locked:\n unlock_panel_query(s, panelid)\n current_version = get_current_version(s, panelid)\n if not current_version:\n current_version = 0\n new_version = current_version + 1\n make_panel_live(s, panelid, new_version, current_user.id)\n return redirect(url_for('panels.view_panels'))", "def do_POST(self):\n if self.is_viewvc():\n self.run_viewvc()\n else:\n self.send_error(501, \"Can only POST to %s\"\n % (options.script_alias))", "def post(self, request, *args, **kwargs):\n form = self.get_form()\n if form.is_valid():\n self.use_template(self.render_template())\n return self.form_valid(form)\n else:\n return self.form_invalid(form)", "def post(self):\n return CreateSavingPlan(request, current_user.id)", "def preview_handler(self, _, __):\r\n template = self.system.render_template('lti_form.html', self.get_context())\r\n return Response(template, content_type='text/html')", "def createPanel(self, LibraryID, Name, **kwargs):\n if self.request(\"createPanel\", LibraryID=LibraryID, Name=Name, **kwargs) is None:\n return None\n return self.json_response[\"Result\"][\"PanelID\"]", "def new_dynamic(request):\n\n if request.method == 'POST':\n dynamic_form = DynamicForm(request.POST)\n\n if dynamic_form.is_valid():\n new_dynamic = dynamic_form.save(commit=False)\n new_dynamic.user = request.user\n new_dynamic.save()\n request.session['dynamic_id'] = new_dynamic.id\n return HttpResponseRedirect('/dynamics/attach-molecules')\n else:\n context = {\n 'dynamic_form': dynamic_form,\n }\n return render(request, 'dynamics/new_dynamic.html', context)\n # if dynamic_id is None:\n context = {\n 'dynamic_form': DynamicForm(),\n }\n # else:\n # dynamic = Dynamic.objects.get(pk=dynamic_id)\n # dynamic_form = DynamicForm(instance=dynamic)\n # dynamic_form.fields['box_size'].disabled = True\n # dynamic_form.fields['number_of_molecules'].label = 'Number of molecules to be added'\n # dynamic_form.fields['number_of_atoms_for_alignment'].disabled = True\n # context = {\n # 'dynamic_form': dynamic_form,\n # }\n return render(request, 'dynamics/new_dynamic.html', context)", "def serve(self, request, *args, **kwargs):\n\n template = self.get_template(request)\n\n if request.method == 'POST':\n\n form = self.get_form(request.POST, page=self, user=request.user)\n\n if form.is_valid():\n self.process_form_submission(form)\n return HttpResponseRedirect(self.url + '?thank=you')\n\n else:\n\n thanks = request.GET.get('thank', False)\n if thanks:\n form = None\n template = self.get_landing_page_template(request)\n if self.thanks_page_title:\n self.title = self.thanks_page_title\n else:\n form = self.get_form(page=self, user=request.user)\n\n context = self.get_context(request)\n context['form'] = form\n if form:\n context['conditional_rules'] = json.dumps(form.conditional_rules)\n\n return render(\n request,\n template,\n context\n )", "def render(self):\n _ = self.request.getText\n form = self.request.form\n \n if form.has_key('cancel'):\n # User canceled\n return self.page.send_page(self.request)\n\n try:\n if not self.allowed():\n raise ActionError(_('You are not allowed to edit this page.'))\n elif not self.page.exists():\n raise ActionError(_('This page is already deleted or was never created!'))\n \n self.package()\n except ActionError, e:\n return self.page.send_page(self.request, msg=e.args[0])", "def view_virtual_panels(id=None):\n if not id:\n id = request.args.get('id')\n if id:\n panels = get_virtual_panels_by_panel_id(s, id)\n else:\n panels = get_virtual_panels_simple(s)\n result = []\n panel_name = \"Virtual\"\n for i in panels:\n row = dict(zip(i.keys(), i))\n\n row[\"current_version\"] = round(row[\"current_version\"], 1)\n\n status = check_virtualpanel_status(s, row[\"id\"])\n row[\"status\"] = status\n permission = check_user_has_permission(s, current_user.id, row[\"projectid\"])\n locked = check_if_locked_by_user_vpanel(s, current_user.id, row[\"panelid\"])\n\n row['permission'] = permission\n row['locked'] = locked\n\n status = check_virtualpanel_status(s, row[\"id\"])\n row[\"status\"] = status\n\n if id:\n panel_name = row['panelname'] + ' Virtual'\n # if check_user_has_permission(s, current_user.id, row[\"projectid\"]):\n # result.append(row)\n result.append(row)\n table = ItemTableVPanels(result, classes=['table', 'table-striped'])\n return render_template('panels.html', panels=table, project_name=panel_name,\n message='Virtual Panels are locked if their parent panel is being edited')", "def post(self):\n if self.data.GET.get('cbox'):\n cbox = True\n else:\n cbox = False\n\n if self.validate():\n self.redirect.program()\n self.redirect.to('edit_gci_program', validated=True, cbox=cbox)\n else:\n self.get()", "def newPage(request):\n newForm = newWikiPageForm()\n newFormTitle = newForm[\"newFormTitle\"]\n newFormBody = newForm[\"newFormBody\"]\n if request.method == \"POST\":\n form = newWikiPageForm(request.POST)\n # check that all fields are filled\n if form.is_valid():\n title = form.cleaned_data[\"newFormTitle\"]\n content = form.cleaned_data[\"newFormBody\"]\n\n # add new wiki page if page doesn't already exist\n if util.get_entry(title) is None:\n\n util.save_entry(title, content)\n\n # take user to their newly created page\n return HttpResponseRedirect(reverse(\"entry\", kwargs={\n \"title\": title\n }))\n # render template again with the inputted data along with a error message \n else:\n return render(request, \"encyclopedia/newPage.html\", {\n \"formTitle\": title,\n \"formBody\": content,\n \"exists\": True\n }) \n # render template again with error message \n else: \n return render(request, \"encyclopedia/newPage.html\", {\n \"formTitle\": title,\n \"formBody\": content,\n \"exists\": False\n }) \n #when 'create new page' button is clicked user is taken to newPage.html\n else: \n return render(request, \"encyclopedia/newPage.html\", {\n \"formTitle\": newFormTitle,\n \"formBody\": newFormBody,\n \"exists\": False\n })", "def render_POST(self, request):", "def save(self, *args, **kwargs):\n step_numeral, step_name = kwargs.pop('step', (None, None))\n\n if step_numeral == 1:\n \"\"\"\n Basic Form: Application & File Uploader\n \"\"\"\n return self.cleaned_data\n if step_numeral == 2:\n \"\"\"\n Basic Form + Mapping Fields\n \"\"\"\n return self.cleaned_data\n\n if step_numeral == 3:\n pass # end-user is previewing", "def render_step(self,step):\n data = self.storage.get_step_data(step)\n files = self.storage.get_step_files(step)\n form = self.get_form(step,data,files)\n self.clean_for_commit()\n return self.render(form)", "def post(self, request, *args, **kwargs):\n return self.render_to_response(self.get_context_data())", "def get_template(self, request, step, form):\n return 'forms/wizard.html'", "def render_creation_form(request: Request):\n return templates.TemplateResponse(\"creation_form.html\",{'request': request})", "def process_show_form(self, request, step, form):\n pass", "def pipeline_new(request):\n t = loader.get_template('pipeline/pipeline.html')\n method_families = MethodFamily.filter_by_user(request.user).order_by('name')\n acf = metadata.forms.AccessControlForm()\n c = {\n 'method_families': method_families,\n 'compound_datatypes': _get_compound_datatypes(request),\n \"access_control_form\": acf\n }\n return HttpResponse(t.render(c, request))", "def management_create(request, slug):\n #verifies if the company exists if not returns a 404 page\n company =get_object_or_404(Company,slug=slug)\n\n #verifies the person has access to the company or is an incubator employee\n edit = validate_user_company_access_or_redirect(request,company)\n\n #if the request is GET presents empty form\n if request.method == 'GET':\n\n management_form = ManagementForm()\n return render_to_response('management_form.html', {'form': management_form, 'company':company},\n context_instance=RequestContext(request))\n \n else:\n management_form = ManagementForm(request.POST)\n #if is POST Validates the form is well filled and save it redirecting to the company page\n if management_form.is_valid():\n mf = management_form.save(commit=False)\n mf.company = company\n mf.save()\n return HttpResponseRedirect('/company/'+str(slug))\n\n #if not well filled redirect to the original create and display error\n else:\n return render_to_response('management_form.html', \n {'form': management_form, 'form_errors': management_form.errors, 'company':company},\n context_instance=RequestContext(request))", "def KLP_Boundary_Create(request):\n\n # Checking user Permissions\n\n KLP_user_Perm(request.user, 'Boundary', 'Add')\n buttonType = request.POST.get('form-buttonType')\n KLP_Create_Boundary = \\\n KLP_Boundary(queryset=Boundary.objects.filter(pk=0),\n permitted_methods=('GET', 'POST'),\n responder=TemplateResponder(template_dir='viewtemplates'\n , template_object_name='boundary',\n extra_context={'buttonType': buttonType}),\n receiver=XMLReceiver())\n\n response = KLP_Create_Boundary.responder.create_form(request,\n form_class=Boundary_Form)\n\n return HttpResponse(response)", "def create_form(self, r, **attr):\n\n # User must be permitted to create site activity reports\n authorised = self._permitted(method=\"create\")\n if not authorised:\n r.unauthorised()\n\n s3db = current.s3db\n\n T = current.T\n response = current.response\n settings = current.deployment_settings\n\n # Page title\n output = {\"title\": T(\"Create Residents Report\")}\n\n # Form fields\n table = s3db.dvr_site_activity\n table.date.default = r.utcnow.date()\n formfields = [table.site_id,\n table.date,\n ]\n\n # Form buttons\n from gluon import INPUT, SQLFORM\n submit_btn = INPUT(_class = \"tiny primary button\",\n _name = \"submit\",\n _type = \"submit\",\n _value = T(\"Create Report\"),\n )\n cancel_btn = A(T(\"Cancel\"),\n _href = r.url(id=None, method=\"\"),\n _class = \"action-lnk\",\n )\n buttons = [submit_btn, cancel_btn]\n\n # Generate the form and add it to the output\n resourcename = r.resource.name\n formstyle = settings.get_ui_formstyle()\n form = SQLFORM.factory(record = None,\n showid = False,\n formstyle = formstyle,\n table_name = resourcename,\n buttons = buttons,\n *formfields)\n output[\"form\"] = form\n\n # Process the form\n formname = \"%s/manage\" % resourcename\n if form.accepts(r.post_vars,\n current.session,\n formname = formname,\n onvalidation = self.validate,\n keepvalues = False,\n hideerror = False,\n ):\n\n from core import S3PermissionError, set_last_record_id\n from ..helpers import DRKSiteActivityReport\n\n formvars = form.vars\n report = DRKSiteActivityReport(site_id = formvars.site_id,\n date = formvars.date,\n )\n try:\n record_id = report.store()\n except S3PermissionError:\n # Redirect to list view rather than index page\n current.auth.permission.homepage = r.url(id=None, method=\"\")\n r.unauthorised()\n\n r.resource.lastid = str(record_id)\n set_last_record_id(\"dvr_site_activity\", record_id)\n\n current.response.confirmation = T(\"Report created\")\n self.next = r.url(id=record_id, method=\"read\")\n\n response.view = self._view(r, \"create.html\")\n\n return output" ]
[ "0.7490138", "0.7386656", "0.6583208", "0.6571529", "0.6242242", "0.5805392", "0.55935615", "0.5549157", "0.5532851", "0.54516643", "0.54052836", "0.5399087", "0.537762", "0.53703177", "0.53167623", "0.5260761", "0.52282465", "0.5182984", "0.51700354", "0.51629823", "0.5153818", "0.51396817", "0.5133905", "0.5106809", "0.50944555", "0.5078249", "0.50503933", "0.50433886", "0.5043121", "0.5041729" ]
0.84285897
0
Method for edit virtual panel wizard. If request is "GET" the method locks the panel so it cannot be edited by any other user. It then retrieves all information about the current panel and virtual panel. The query associated with this method gets the virtual panel information with respect to the future version number so changes that have already been added to the virtual panel can also be viewed. If request is "POST" the method checks if the panel should be made live and unlocks the panel for editing. The method then redirects to the view virtual panel page.
def edit_virtual_panel_process(): form = EditVirtualPanelProcess() vp_id = request.args.get('id') panel_id = get_panel_by_vp_id(s, vp_id) if request.method == "POST": if request.form['make_live'] == "on": make_vp_panel_live(s, vp_id) add_to_starlims(vp_id) unlock_panel_query(s, panel_id) return redirect(url_for('panels.view_vpanel') + "?id=" + vp_id) elif request.method == "GET": lock_panel(s, current_user.id, panel_id) panel_info = get_panel_details_by_id(s, panel_id) panel_name = panel_info.name form.panel.choices = [(panel_id, panel_name), ] panel_version = get_current_version(s, panel_id) panel_genes = get_genes_by_panelid(s, panel_id, panel_version) vp_info = get_vpanel_details_by_id(s, vp_id) vp_version = vp_info.current_version vp_name = vp_info.name form.vpanelname.data = vp_name vp_genes = get_genes_by_vpanelid_edit(s, vp_id, vp_version) genelist = "" vp_list = [] for i in vp_genes: vp_list.append(i.id) genes = [] print('new method') for i in panel_genes: if i.id in vp_list: genes.append({"name": i.name, "id": i.id, "vp_list": True}) button = render_template("gene_button.html", gene_name=i.name, gene_id=i.id, added=True) genelist += button else: genes.append({"name": i.name, "id": i.id, "vp_list": False}) gene_html = render_template("panel_genes.html", panel_genes=genes) url = url_for('panels.edit_virtual_panel_process') + '?id=' + str(vp_id) return render_template('virtualpanels_createprocess.html', form=form, genes=gene_html, genelist=genelist, vp_id=vp_id, panel_name=vp_name, current_version=vp_version, url=url)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def view_vpanel():\n id = request.args.get('id')\n try:\n version = request.form[\"versions\"]\n except KeyError:\n version = None\n if id:\n status = check_virtualpanel_status(s, id)\n if not status:\n message = \"This panel has changes which cannot be viewed here as they have not been made live yet, if you have permission you can view these by editing the panel\"\n else:\n message = None\n panel_details = get_vpanel_details_by_id(s, id)\n for i in panel_details:\n if not version:\n version = panel_details.current_version\n panel_name = panel_details.name\n project_id = panel_details.project_id\n panel = get_regions_by_vpanelid(s, id, version)\n result = []\n rows = list(panel)\n if len(rows) != 0:\n bed = ''\n for i in rows:\n row = dict(zip(i.keys(), i))\n result.append(row)\n panel_name = i.panel_name\n current_version = i.current_version\n else:\n message = \"This Panel has no regions yet & may also have changes that have not been made live yet\"\n bed = 'disabled'\n current_version = version\n print(type(version))\n current_version = round(current_version, 1)\n version = round(float(version), 1)\n\n if check_user_has_permission(s, current_user.id, project_id):\n edit = ''\n else:\n edit = 'disabled'\n\n form = ViewPanel()\n v_list = get_prev_versions_vp(s, id)\n choices = []\n for i in v_list:\n choices.append((i, i))\n\n if (current_version, current_version) not in choices:\n choices.append((current_version, current_version))\n\n form.versions.choices = choices\n form.versions.default = current_version\n form.process()\n\n table = []\n\n for i in result:\n line = []\n line.append(i['chrom'])\n line.append(str(i['region_start']))\n line.append(str(i['region_end']))\n line.append(i['gene_name'])\n line.append(i['name'].replace(',', ' '))\n table.append(line)\n\n return render_template('panel_view.html', table=json.dumps(table), panel=table, panel_name=panel_name,\n edit=edit, bed=bed,\n version=version, panel_id=id, message=message, url=url_for('panels.view_vpanel'),\n scope='Virtual', form=form)\n\n else:\n return redirect(url_for('panels.view_virtual_panels'))", "def view_panel():\n id = request.args.get('id')\n try:\n version = request.form[\"versions\"]\n except KeyError:\n version = None\n if id:\n status = check_panel_status(s, id)\n if not status:\n message = \"This panel has changes which cannot be viewed here as they have not been made live yet, if you have permission you can view these by editing the panel\"\n else:\n message = None\n panel_details = get_panel_details_by_id(s, id)\n if not version:\n version = panel_details.current_version\n panel_name = panel_details.name\n panel = get_regions_by_panelid(s, id, version)\n project_id = get_project_id_by_panel_id(s, id)\n result = []\n rows = list(panel)\n if len(rows) != 0:\n bed = ''\n for i in rows:\n row = dict(zip(i.keys(), i))\n result.append(row)\n # panel_name = i.panel_name\n current_version = i.current_version\n else:\n message = \"This Panel has no regions yet & may also have changes that have not been made live\"\n bed = 'disabled'\n current_version = version\n\n if check_user_has_permission(s, current_user.id, project_id):\n edit = ''\n else:\n edit = 'disabled'\n\n form = ViewPanel()\n v_list = range(1, current_version + 1)\n choices = []\n for i in v_list:\n choices.append((i, i))\n form.versions.choices = choices\n form.versions.default = version\n form.process()\n\n table = []\n\n for i in result:\n line = []\n line.append(i['chrom'])\n line.append(str(i['region_start']))\n line.append(str(i['region_end']))\n line.append(i['gene_name'])\n line.append(i['name'].replace(',', ' '))\n table.append(line)\n return render_template('panel_view.html', scope='Panel', table=json.dumps(table), panel=table,\n panel_name=panel_name, edit=edit, bed=bed,\n version=version, panel_id=id, project_id=project_id, message=message,\n url=url_for('panels.view_panel'),\n form=form)\n\n else:\n return redirect(url_for('panels.view_panels'))", "def edit_panel_process():\n if request.method == \"POST\":\n make_live = request.form['make_live']\n panel_id = request.args.get('id')\n project_id = get_project_id_by_panel_id(s, panel_id)\n preftx_id = get_preftx_id_by_project_id(s, project_id)\n tx_version = get_current_preftx_version(s, preftx_id)\n panel_version = get_current_version(s, panel_id)\n if not tx_version:\n tx_version = 0\n if make_live == \"on\":\n print('make_live')\n make_preftx_live(s, preftx_id, tx_version + 1, current_user.id)\n make_panel_live(s, panel_id, panel_version + 1, current_user.id)\n unlock_panel_query(s, panel_id)\n return redirect(url_for('panels.view_panel') + \"?id=\" + panel_id)\n elif request.method == \"GET\":\n panel_id = request.args.get('id')\n form = EditPanelProcess()\n panel_info = get_panel_info(s, panel_id)\n project_id = panel_info.project_id\n form.project.choices = [(project_id, panel_info.project_name), ]\n form.panelname.data = panel_info.name\n\n lock_panel(s, current_user.id, panel_id)\n\n genes = get_genes_by_panelid_edit(s, panel_id, panel_info.current_version)\n html = \"\"\n buttonlist = \"\"\n print('hello')\n for gene in genes:\n gene_id = gene.id\n gene_name = gene.name\n preftx_id = get_preftx_by_gene_id\n upcoming_preftx = get_upcoming_preftx_by_gene_id(s, project_id, gene_id)\n all_tx = get_tx_by_gene_id(s, gene_id)\n\n buttonlist += render_template(\"gene_button.html\", gene_name=gene_name, gene_id=gene_id, added=True)\n tx_html = render_template(\"tx_list.html\", gene_name=gene_name, all_tx=all_tx, preftx=preftx_id,\n upcoming=upcoming_preftx, disabled=True)\n html += tx_html\n\n return render_template('panel_createprocess.html', form=form, genes=html, genelist=buttonlist,\n panel_id=panel_id,\n url=url_for('panels.edit_panel_process') + \"?id=\" + panel_id)", "def make_virtualpanel_live():\n vpanelid = request.args.get('id')\n panelid = get_panel_by_vp_id(s, vpanelid)\n locked = check_if_locked(s, panelid)\n if locked:\n if current_user.id == get_locked_user(s, panelid):\n make_vp_panel_live(s, vpanelid)\n add_to_starlims(vpanelid)\n return redirect(url_for('panels.view_virtual_panels'))\n else:\n make_vp_panel_live(s, vpanelid)\n add_to_starlims(vpanelid)\n return redirect(url_for('panels.view_virtual_panels'))", "def create_virtual_panel_process():\n form = CreateVirtualPanelProcess()\n\n if request.method == \"POST\":\n make_live = request.form['make_live']\n vp_id = request.args.get('id')\n if make_live == \"on\":\n make_vp_panel_live(s, vp_id)\n add_to_starlims(vp_id)\n panel_id = get_panel_by_vp_id(s, vp_id)\n unlock_panel_query(s, panel_id)\n return redirect(url_for('panels.view_vpanel') + \"?id=\" + vp_id)\n elif request.method == \"GET\":\n form.panel.choices = get_panel_choices(s, current_user.id)\n url = url_for('panels.create_virtual_panel_process')\n return render_template('virtualpanels_createprocess.html', form=form, url=url, vp_id=\"main\")", "def view_virtual_panels(id=None):\n if not id:\n id = request.args.get('id')\n if id:\n panels = get_virtual_panels_by_panel_id(s, id)\n else:\n panels = get_virtual_panels_simple(s)\n result = []\n panel_name = \"Virtual\"\n for i in panels:\n row = dict(zip(i.keys(), i))\n\n row[\"current_version\"] = round(row[\"current_version\"], 1)\n\n status = check_virtualpanel_status(s, row[\"id\"])\n row[\"status\"] = status\n permission = check_user_has_permission(s, current_user.id, row[\"projectid\"])\n locked = check_if_locked_by_user_vpanel(s, current_user.id, row[\"panelid\"])\n\n row['permission'] = permission\n row['locked'] = locked\n\n status = check_virtualpanel_status(s, row[\"id\"])\n row[\"status\"] = status\n\n if id:\n panel_name = row['panelname'] + ' Virtual'\n # if check_user_has_permission(s, current_user.id, row[\"projectid\"]):\n # result.append(row)\n result.append(row)\n table = ItemTableVPanels(result, classes=['table', 'table-striped'])\n return render_template('panels.html', panels=table, project_name=panel_name,\n message='Virtual Panels are locked if their parent panel is being edited')", "def make_live():\n panelid = request.args.get('id')\n locked = check_if_locked(s, panelid)\n if locked:\n unlock_panel_query(s, panelid)\n current_version = get_current_version(s, panelid)\n if not current_version:\n current_version = 0\n new_version = current_version + 1\n make_panel_live(s, panelid, new_version, current_user.id)\n return redirect(url_for('panels.view_panels'))", "def add_vp():\n vp_name = request.json['vp_name']\n panel_id = request.json['panel_id']\n vp_id = create_virtualpanel_query(s, vp_name, panel_id)\n if vp_id != -1:\n lock_panel(s, current_user.id, panel_id)\n return jsonify(vp_id)", "def create_panel_process():\n form = CreatePanelProcess()\n if request.method == \"POST\":\n make_live = request.form['make_live']\n panel_id = request.args.get('id')\n project_id = get_project_id_by_panel_id(s, panel_id)\n preftx_id = get_preftx_id_by_project_id(s, project_id)\n version = get_current_preftx_version(s, preftx_id)\n if not version:\n version = 0\n if make_live == \"on\":\n make_preftx_live(s, preftx_id, version + 1, current_user.id)\n make_panel_live(s, panel_id, 1, current_user.id)\n unlock_panel_query(s, panel_id)\n return redirect(url_for('panels.view_panel') + \"?id=\" + panel_id)\n elif request.method == \"GET\":\n form.project.choices = get_project_choices(s, current_user.id)\n return render_template('panel_createprocess.html', form=form, panel_id=\"main\",\n url=url_for('panels.create_panel_process'))", "def unlock_panel():\n panelid = request.args.get('panelid')\n unlock_panel_query(s, panelid)\n\n return redirect(url_for('panels.view_panels'))", "def get_edit_form(self, data):\n self.add_success(data)\n rv = self.get((data[self.id_field], self.edit_url))\n assert not is_404(rv)\n assert in_response(rv, 'Edit {}'.format(data[self.name_field]))\n for field, name in self.fields:\n assert in_response(rv, name)\n return rv", "def update(self):\n context_state = getMultiAdapter((self.context, self.request), name=u'plone_context_state')\n self.manageUrl = '%s/@@manage-footerportlet' % context_state.view_url()\n \n ## This is the way it's done in plone.app.portlets.manager, so we'll do the same\n mt = getToolByName(self.context, 'portal_membership')\n self.canManagePortlets = mt.checkPermission('Portlets: Manage portlets', self.context)", "def voter_edit_process_view(request):\n authority_required = {'admin'} # admin, verified_volunteer\n if not voter_has_authority(request, authority_required):\n return redirect_to_sign_in_page(request, authority_required)\n\n voter_on_stage = Voter()\n at_least_one_value_changed = False\n\n voter_id = request.POST.get('voter_id', 0)\n voter_id = convert_to_int(voter_id)\n first_name = request.POST.get('first_name', False)\n last_name = request.POST.get('last_name', False)\n twitter_handle = request.POST.get('twitter_handle', False)\n email = request.POST.get('email', False)\n password_text = request.POST.get('password_text', False)\n\n # Check to see if this voter is already being used anywhere\n voter_on_stage_found = False\n try:\n voter_query = Voter.objects.filter(id=voter_id)\n if len(voter_query):\n voter_on_stage = voter_query[0]\n voter_on_stage_found = True\n except Exception as e:\n handle_record_not_found_exception(e, logger=logger)\n\n if voter_on_stage_found:\n try:\n # Update existing voter\n if first_name is not False:\n voter_on_stage.first_name = first_name\n at_least_one_value_changed = True\n if last_name is not False:\n voter_on_stage.last_name = last_name\n at_least_one_value_changed = True\n if twitter_handle is not False:\n voter_on_stage.twitter_screen_name = twitter_handle\n at_least_one_value_changed = True\n if email is not False:\n voter_on_stage.email = email\n at_least_one_value_changed = True\n if password_text is not False:\n voter_on_stage.set_password(password_text)\n at_least_one_value_changed = True\n\n if at_least_one_value_changed:\n voter_on_stage.save()\n\n if password_text:\n # Check to see if a login has already been created\n pass\n messages.add_message(request, messages.INFO, 'Voter information updated.')\n except Exception as e:\n handle_record_not_saved_exception(e, logger=logger)\n messages.add_message(request, messages.ERROR, 'Could not save voter.')\n else:\n try:\n # Create new\n voter_on_stage = Voter.objects.create_user(email, email, password_text)\n\n # Update new voter\n if first_name is not False:\n voter_on_stage.first_name = first_name\n at_least_one_value_changed = True\n if last_name is not False:\n voter_on_stage.last_name = last_name\n at_least_one_value_changed = True\n if twitter_handle is not False:\n voter_on_stage.twitter_screen_name = twitter_handle\n at_least_one_value_changed = True\n if email is not False:\n voter_on_stage.email = email\n at_least_one_value_changed = True\n\n if at_least_one_value_changed:\n voter_on_stage.save()\n\n messages.add_message(request, messages.INFO, 'Added new Voter.')\n except Exception as e:\n messages.add_message(request, messages.ERROR, 'Could not save voter.')\n\n return HttpResponseRedirect(reverse('voter:voter_edit', args=(voter_id,)))", "def adpanel():\n if 'user_id' not in session or session['user_id'] != 'admin':\n return redirect(url_for('login'))\n return render_template('adminpanel.html')", "def edit_plante(id):\n plante = get_plante(id)\n form = PlanteForm(plante)\n return render_template(\n \"create-plante.html\",\n title = plante.get_name()+\" - edit\",\n form = form,\n plante = plante,\n param = \"modif\")", "def management_update(request, slug, id):\n #verifies if the company exists if not returns a 404 page\n company =get_object_or_404(Company,slug=slug)\n management_reference = get_object_or_404(Management, id=id,company=company)\n management_form = ManagementForm(instance=management_reference)\n\n #verifies the person has access to the company or is an incubator employee\n edit = validate_user_company_access_or_redirect(request,company)\n\n #if the request is GET presents info, \n if request.method == 'GET':\n return render_to_response('management_form.html',{'form':management_form, 'info': management_reference},context_instance=RequestContext(request))\n else:\n management_form = ManagementForm(request.POST, instance=management_reference)\n #if is POST Validates the form is well filled and save it redirecting to the company page \n if management_form.is_valid():\n management_form.save()\n\n return HttpResponseRedirect('/company/'+str(slug))\n #if not well filled redirect to the original update page and display error\n else:\n return render_to_response('management_form.html', \n {'form': management_form, 'form_errors': management_form.errors, 'info': management_reference},\n context_instance=RequestContext(request))", "def show_and_edit_pet_page(pet_id):\n \n pet = Pet.query.get(pet_id)\n\n form = EditPetPage(obj=pet)\n\n if form.validate_on_submit():\n pet.photo_url = form.photo_url.data\n pet.notes = form.notes.data\n pet.available = form.available.data\n\n db.session.commit()\n\n return redirect('/')\n\n else:\n return render_template('display_pet.html', pet=pet, form=form)", "def view_panels(id=None):\n if not id:\n id = request.args.get('id')\n\n if id:\n panels = get_panels_by_project_id(s, id)\n else:\n panels = get_panels(s)\n result = []\n project_name = \"All\"\n for i in panels:\n row = dict(zip(i.keys(), i))\n status = check_panel_status(s, row[\"panelid\"])\n row[\"status\"] = status\n permission = check_user_has_permission(s, current_user.id, row[\"projectid\"])\n locked = check_if_locked(s, row[\"panelid\"])\n row['permission'] = permission\n row['locked'] = locked\n\n if id:\n project_name = row['projectname']\n # if check_user_has_permission(s, current_user.id, row[\"projectid\"]):\n # result.append(row)\n result.append(row)\n table = ItemTablePanels(result, classes=['table', 'table-striped'])\n return render_template('panels.html', panels=table, project_name=project_name)", "def editPanels(self, panels, **properties):\n\n if type(panels) is not list:\n panels = [panels]\n\n panels.reverse()\n\n panelsXML = []\n for panel in panels:\n panelXML = ET.fromstring(panel)\n\n multiTrackXML = panelXML.find(\"MultiTrackElements\")\n if multiTrackXML is not None:\n self.__saveSetupData(multiTrackDataXMLS=ET.tostring(multiTrackXML), properties=panelXML.attrib)\n\n panelsXML.append({\"properties\":panelXML.attrib, \"multiTrackXML\":multiTrackXML})\n\n if panelsXML:\n if (OSUtils.type == OSUtils.LINUX):\n paths = []\n for panel in panelsXML:\n properties = panel['properties'].copy()\n properties['frame'] = '#'\n mode = Mode(properties.get('show', None), properties.get('sequence', None))\n path = mode.get('[recipeCompedFile]', properties)\n paths.append(path)\n if not self.fileServiceLocal.exists(path):\n raise utils.FlixException(msg=\"Missing File: %s\"%path)\n command = Mode().get(\"[editImageCommand]\")\n log('Edit command %s' % command)\n os.system(command + \" \" + ' '.join(paths))\n else:\n Photoshop().createPhotoshopFileForPanels(panelsXML)\n\n return \"Done\"", "def edit(self, id, *args, **kw):\n atras = \"/rolesplantilla/\"\n if (not kw['contexto']):\n redirect('../')\n elif (kw['contexto'] == \"proyecto\"):\n selector = SelectorPermisosPlantillaProy\n elif (kw['contexto'] == \"fase\"):\n selector = SelectorPermisosPlantillaFase\n elif (kw['contexto'] == \"ti\"):\n kw[\"contexto\"] = u\"Tipo de Ítem\"\n selector = SelectorPermisosPlantillaTi\n \n self.edit_form = RolPlantillaEditForm(DBS=DBSession, selector=selector) \n tmpl_context.widget = self.edit_form\n rol_plantilla_edit_form = self.edit_form\n \n \n page=u\"Editar Rol Plantilla de {contexto}\".format(contexto=kw['contexto'])\n \n value = self.edit_filler.get_value(values={'id_rol': int(id)})\n \n #agregado\n if value[\"tipo\"].find(\"Plantilla\") < 0:\n page=u\"Editar Rol de {contexto}\".format(contexto=kw['contexto'])\n atras = \"/roles/\"\n \n return dict(value=value, page=page, atras=atras)", "def onEditToolClicked(self, event):\n i_selected = self.db_listBox.GetSelection()\n if i_selected >= 0:\n manager = self.getManager()\n if manager:\n try:\n data = manager.getData()\n db_record = data['records'][i_selected]\n manager.editDB(parent=self, db_guid=db_record['guid'])\n\n self.refreshDBList()\n except:\n log_func.fatal(u'Error edit DB engine')\n event.Skip()", "def edit(self):\n current_user_roles = get_user_roles()\n\n if current_user_roles[\"STFADM\"] or request.args.get(\"user_id\", None) == current_user.net_id:\n folder_path = \"{0}/user_uploads/{1}/{2}/\".format(self.__APP_PATH__, request.args.get(\"user_id\", None), request.args.get(\"request_id\", None))\n\n with open(\"{0}submission.json\".format(folder_path), mode=\"r\") as request_json:\n request_details = json.load(request_json)\n\n request_saved = path.exists(\"{0}/request.saved\".format(folder_path))\n request_returned = path.exists(\"{0}/request.returned\".format(folder_path))\n\n if request_saved or request_returned:\n valid_accounts = [acc for acc in PIAccounts.query.filter((PIAccounts.net_id == current_user.net_id) & (((datetime.now() - timedelta(days=2*365)) < PIAccounts.end_date) | (PIAccounts.end_date.is_(None)))).with_entities(PIAccounts.end_date, PIAccounts.account_number, PIAccounts.description)]\n\n return render_template(\"{0}/edit_request.html\".format(self.__APP_DIR__), user_accounts=valid_accounts, request_details=request_details, request_saved=request_saved, request_returned=request_returned)\n return abort(403)", "def toggle_locked():\n panel_id = request.args.get('id')\n json = False\n if not panel_id:\n json = True\n panel_id = request.json['id']\n project_id = get_project_id_by_panel_id(s, panel_id)\n if current_user.id == get_locked_user(s, panel_id) and json:\n unlock_panel_query(s, panel_id)\n return jsonify(\"complete\")\n elif check_user_has_permission(s, current_user.id, project_id):\n unlock_panel_query(s, panel_id)\n return manage_locked(message=\"Panel Unlocked\")\n else:\n return manage_locked(message=\"Hmmmm you don't have permission to do that\")", "def office_update(request, slug, id):\n #verifies if the company exists if not returns a 404 page\n company =get_object_or_404(Company,slug=slug)\n office_reference = get_object_or_404(Office, id=id,company=company)\n office_form = OfficeForm(instance=office_reference)\n\n #verifies the person has access to the company or is an incubator employee\n edit = validate_user_company_access_or_redirect(request,company)\n\n #if the request is GET presents info, \n if request.method == 'GET':\n return render_to_response('office_form.html',{'form':office_form, 'info': office_reference},context_instance=RequestContext(request))\n else:\n office_form = OfficeForm(request.POST, instance=office_reference)\n #if is POST Validates the form is well filled and save it redirecting to the company page \n if office_form.is_valid():\n office_form.save(commit = False)\n\n return HttpResponseRedirect('/company/'+str(slug))\n #if not well filled redirect to the original update page and display error\n else:\n return render_to_response('office_form.html', \n {'form': office_form, 'form_errors': office_form.errors, 'info': office_reference},\n context_instance=RequestContext(request))", "def check_virtualpanel_status(s, id):\n panels = check_virtualpanel_status_query(s, id)\n status = True\n for i in panels:\n if i.intro > i.current_version:\n status = False\n break\n if i.last is not None:\n if i.last == i.current_version:\n status = False\n break\n\n return status", "def edit_employee(request, employee_id):\n employee = Employee.objects.get(pk=int(employee_id))\n current_employee = Employee.objects.get(user__pk=request.user.pk)\n\n assert isinstance(employee, Employee)\n assert isinstance(current_employee, Employee)\n\n # if not current_employee.isEnsoUser() and current_employee.company.pk != employee.company.pk:\n # raise PermissionDenied()\n\n if not current_employee.hasAccessTo(employee):\n raise PermissionDenied()\n\n form = EditEmployeeForm(request.user, employee, {\n 'first_name': employee.user.first_name,\n 'last_name': employee.user.last_name,\n 'email': employee.user.email,\n 'manager': employee.manager.id if employee.manager else 0,\n 'language_code': employee.language_code,\n # 'development_plan_type': employee.development_plan_type.id,\n 'is_manager': employee.is_manager\n })\n if 'manager' in form.fields:\n managerQS = Employee.objects.filter(is_manager=True, company__pk=employee.company.pk)\n form.fields['manager'].queryset = managerQS\n # form.fields['development_plan_type'].queryset = DevelopmentPlanType.objects.filter(\n # Q(company__pk=employee.company.pk) | Q(company__isnull=True)\n # )\n is_me = employee.user.pk == request.user.pk\n return TemplateResponse(\n request,\n 'mus/edit_employee_form.html',\n {\n 'edit_employee_form': form,\n 'employee_id': employee_id,\n 'me': is_me,\n 'name': employee.user.get_full_name()\n }\n )", "def office_edit_process_view(request):\n authority_required = {'verified_volunteer'} # admin, verified_volunteer\n if not voter_has_authority(request, authority_required):\n return redirect_to_sign_in_page(request, authority_required)\n\n office_id = convert_to_int(request.POST.get('office_id', 0))\n office_name = request.POST.get('office_name', False)\n google_civic_office_name = request.POST.get('google_civic_office_name', False)\n google_civic_election_id = request.POST.get('google_civic_election_id', 0)\n primary_party = request.POST.get('primary_party', False)\n state_code = request.POST.get('state_code', False)\n\n election_state = ''\n if state_code is not False:\n election_state = state_code\n elif google_civic_election_id:\n election_manager = ElectionManager()\n results = election_manager.retrieve_election(google_civic_election_id)\n if results['election_found']:\n election = results['election']\n election_state = election.get_election_state()\n\n # Check to see if this office is already in the database\n office_on_stage_found = False\n try:\n office_query = ContestOffice.objects.filter(id=office_id)\n if len(office_query):\n office_on_stage = office_query[0]\n office_on_stage_found = True\n except Exception as e:\n handle_record_not_found_exception(e, logger=logger)\n\n try:\n if office_on_stage_found:\n # Update\n # Removed for now: convert_to_int(office_on_stage.google_civic_election_id) >= 1000000 and\n if office_name is not False:\n office_on_stage.office_name = office_name\n if google_civic_office_name is not False:\n office_on_stage.google_civic_office_name = google_civic_office_name\n if primary_party is not False:\n office_on_stage.primary_party = primary_party\n if positive_value_exists(election_state):\n office_on_stage.state_code = election_state\n office_on_stage.save()\n office_on_stage_id = office_on_stage.id\n messages.add_message(request, messages.INFO, 'Office updated.')\n google_civic_election_id = office_on_stage.google_civic_election_id\n\n return HttpResponseRedirect(reverse('office:office_summary', args=(office_on_stage_id,)) +\n \"?google_civic_election_id=\" + str(google_civic_election_id))\n else:\n # Create new\n office_on_stage = ContestOffice(\n office_name=office_name,\n google_civic_election_id=google_civic_election_id,\n state_code=election_state,\n )\n # Removing this limitation: convert_to_int(office_on_stage.google_civic_election_id) >= 1000000 and\n if primary_party is not False:\n office_on_stage.primary_party = primary_party\n office_on_stage.save()\n messages.add_message(request, messages.INFO, 'New office saved.')\n\n # Come back to the \"Create New Office\" page\n return HttpResponseRedirect(reverse('office:office_new', args=()) +\n \"?google_civic_election_id=\" + str(google_civic_election_id))\n except Exception as e:\n handle_record_not_saved_exception(e, logger=logger)\n messages.add_message(request, messages.ERROR, 'Could not save office.')\n\n return HttpResponseRedirect(reverse('office:office_list', args=()) +\n \"?google_civic_election_id=\" + google_civic_election_id)", "def admin_panel(request):\n if not request.user.is_superuser: # security to redirect user that aren't admin\n return redirect(reverse('accueil'))\n context = context_general()\n return render(request, 'administration/admin_panel.html', context)", "def vineyard_edit(request):\n\n if request.method != 'POST':\n return HttpResponseNotAllowed(['POST'])\n\n data = json.loads(request.body.decode('utf-8'))\n\n auth_token = str(data.get('auth_token', ''))\n edit_vineyard_info = data.get('edit_vineyard_info', '')\n vineyard_id = str(edit_vineyard_info.get('vineyard_id', ''))\n is_enable = edit_vineyard_info.get('enable', '')\n\n try:\n if not verify_admin(auth_token):\n raise PlantalyticsAuthException(ADMIN_INVALID)\n\n message = (\n 'Attempting to edit info for vineyard id: {}.'\n ).format(vineyard_id)\n logger.info(message)\n invalid = (\n vineyard_id == '' or\n int(vineyard_id) < 0\n )\n if invalid:\n raise PlantalyticsDataException(VINEYARD_ID_INVALID)\n if is_enable != '':\n if not isinstance(is_enable, bool):\n raise PlantalyticsDataException(DATA_INVALID)\n cassy.edit_vineyard(edit_vineyard_info)\n message = (\n 'Successfully edited info for vineyard id: {}.'\n ).format(vineyard_id)\n logger.info(message)\n body = {\n 'errors': {}\n }\n return HttpResponse(\n json.dumps(body),\n content_type='application/json'\n )\n except PlantalyticsException as e:\n message = (\n 'Error attempting to edit vineyard info. Error code: {}'\n ).format(str(e))\n logger.warn(message)\n error = custom_error(str(e))\n return HttpResponseForbidden(error, content_type='application/json')\n except Exception as e:\n message = (\n 'Unknown error occurred while attempting '\n 'to edit vineyard info:'\n )\n logger.exception(message)\n error = custom_error(UNKNOWN, str(e))\n return HttpResponseServerError(error, content_type='application/json')", "def office_edit_process_view(request):\n status = ''\n success = True\n # admin, analytics_admin, partner_organization, political_data_manager, political_data_viewer, verified_volunteer\n authority_required = {'verified_volunteer'}\n if not voter_has_authority(request, authority_required):\n return redirect_to_sign_in_page(request, authority_required)\n\n ballotpedia_office_id = request.POST.get('ballotpedia_office_id', False) # Related to office_held\n ballotpedia_race_id = request.POST.get('ballotpedia_race_id', False) # Related to contest_office\n ballotpedia_race_office_level = request.POST.get('ballotpedia_race_office_level', False)\n ballotpedia_office_name = request.POST.get('ballotpedia_office_name', False)\n ballotpedia_is_marquee = request.POST.get('ballotpedia_is_marquee', False)\n ctcl_uuid = request.POST.get('ctcl_uuid', False)\n district_id = request.POST.get('district_id', False)\n google_civic_office_name = request.POST.get('google_civic_office_name', False)\n google_civic_office_name2 = request.POST.get('google_civic_office_name2', False)\n google_civic_office_name3 = request.POST.get('google_civic_office_name3', False)\n google_civic_office_name4 = request.POST.get('google_civic_office_name4', False)\n google_civic_office_name5 = request.POST.get('google_civic_office_name5', False)\n google_civic_election_id = request.POST.get('google_civic_election_id', 0)\n ocd_division_id = request.POST.get('ocd_division_id', False)\n office_held_we_vote_id = request.POST.get('office_held_we_vote_id', False)\n office_id = convert_to_int(request.POST.get('office_id', 0))\n office_name = request.POST.get('office_name', False)\n primary_party = request.POST.get('primary_party', False)\n state_code = request.POST.get('state_code', False)\n vote_usa_office_id = request.POST.get('vote_usa_office_id', False)\n is_battleground_race = request.POST.get('is_battleground_race', False)\n remove_duplicate_process = request.POST.get('remove_duplicate_process', False)\n redirect_to_contest_office_list = convert_to_int(request.POST.get('redirect_to_contest_office_list', 0))\n\n election_state = ''\n if state_code is not False:\n election_state = state_code\n elif google_civic_election_id:\n election_manager = ElectionManager()\n results = election_manager.retrieve_election(google_civic_election_id)\n if results['election_found']:\n election = results['election']\n election_state = election.get_election_state()\n\n # Check to see if this office is already in the database\n office_on_stage_found = False\n office_on_stage = None\n try:\n office_query = ContestOffice.objects.filter(id=office_id)\n if len(office_query):\n office_on_stage = office_query[0]\n office_on_stage_found = True\n except Exception as e:\n handle_record_not_found_exception(e, logger=logger)\n success = False\n\n if success:\n try:\n if office_on_stage_found:\n office_on_stage_id = office_on_stage.id\n google_civic_election_id = office_on_stage.google_civic_election_id\n else:\n # Create new\n office_on_stage = ContestOffice(\n office_name=office_name,\n google_civic_election_id=google_civic_election_id,\n state_code=election_state,\n )\n office_on_stage_id = office_on_stage.id\n google_civic_election_id = office_on_stage.google_civic_election_id\n office_on_stage_found = True\n if office_on_stage_found:\n # Update\n # Removing this limitation: convert_to_int(office_on_stage.google_civic_election_id) >= 1000000 and\n office_on_stage.ballotpedia_is_marquee = positive_value_exists(ballotpedia_is_marquee)\n if ballotpedia_office_id is not False:\n office_on_stage.ballotpedia_office_id = convert_to_int(ballotpedia_office_id)\n if ballotpedia_office_name is not False:\n office_on_stage.ballotpedia_office_name = ballotpedia_office_name\n if ballotpedia_race_id is not False:\n office_on_stage.ballotpedia_race_id = convert_to_int(ballotpedia_race_id)\n if ballotpedia_race_office_level is not False:\n office_on_stage.ballotpedia_race_office_level = ballotpedia_race_office_level\n if ctcl_uuid is not False:\n office_on_stage.ctcl_uuid = ctcl_uuid\n if district_id is not False:\n office_on_stage.district_id = district_id\n if positive_value_exists(election_state):\n office_on_stage.state_code = election_state\n if google_civic_office_name is not False:\n office_on_stage.google_civic_office_name = google_civic_office_name\n if google_civic_office_name2 is not False:\n office_on_stage.google_civic_office_name2 = google_civic_office_name2\n if google_civic_office_name3 is not False:\n office_on_stage.google_civic_office_name3 = google_civic_office_name3\n if google_civic_office_name4 is not False:\n office_on_stage.google_civic_office_name4 = google_civic_office_name4\n if google_civic_office_name5 is not False:\n office_on_stage.google_civic_office_name5 = google_civic_office_name5\n # Save office is_battleground_race for this year, and then prepare to update all related objects\n office_on_stage.is_battleground_race = positive_value_exists(is_battleground_race)\n election_day_text = office_on_stage.get_election_day_text()\n year = 0\n years_false_list = []\n years_true_list = []\n if positive_value_exists(election_day_text):\n date_as_integer = convert_we_vote_date_string_to_date_as_integer(election_day_text)\n year = date_as_integer // 10000\n if positive_value_exists(year):\n if positive_value_exists(is_battleground_race):\n years_false_list = []\n years_true_list = [year]\n else:\n years_false_list = [year]\n years_true_list = []\n years_list = list(set(years_false_list + years_true_list))\n if ocd_division_id is not False:\n office_on_stage.ocd_division_id = ocd_division_id\n if office_held_we_vote_id is not False:\n office_on_stage.office_held_we_vote_id = office_held_we_vote_id\n from office_held.models import OfficeHeldManager\n office_held_manager = OfficeHeldManager()\n office_held_results = office_held_manager.retrieve_office_held(\n office_held_we_vote_id=office_held_we_vote_id,\n read_only=True)\n if office_held_results['office_held_found']:\n office_held = office_held_results['office_held']\n office_on_stage.office_held_name = office_held.office_held_name\n if office_name is not False:\n office_on_stage.office_name = office_name\n if primary_party is not False:\n office_on_stage.primary_party = primary_party\n if vote_usa_office_id is not False:\n office_on_stage.vote_usa_office_id = vote_usa_office_id\n\n office_on_stage.save()\n office_on_stage_id = office_on_stage.id\n office_on_stage_we_vote_id = office_on_stage.we_vote_id\n messages.add_message(request, messages.INFO, 'Office updated.')\n # ##################################\n # Update \"is_battleground_race\" for candidates under this office through the link CandidateToOfficeLink\n # We can't automatically update all of these candidates with the office's setting,\n # because we may be saving a primary election office which isn't a battleground race,\n # and the candidate may have made it through to the general election which\n # *is* a battleground.\n # from candidate.controllers import update_candidates_with_is_battleground_race\n # results = update_candidates_with_is_battleground_race(office_we_vote_id=office_on_stage.we_vote_id)\n if positive_value_exists(office_on_stage_we_vote_id) and len(years_list) > 0:\n from politician.controllers import update_parallel_fields_with_years_in_related_objects\n results = update_parallel_fields_with_years_in_related_objects(\n field_key_root='is_battleground_race_',\n master_we_vote_id_updated=office_on_stage_we_vote_id,\n years_false_list=years_false_list,\n years_true_list=years_true_list,\n )\n if not results['success']:\n status += results['status']\n status += \"FAILED_TO_UPDATE_PARALLEL_FIELDS_FROM_OFFICE \"\n messages.add_message(request, messages.ERROR, status)\n\n return HttpResponseRedirect(reverse('office:office_summary', args=(office_on_stage_id,)) +\n \"?google_civic_election_id=\" + str(google_civic_election_id) +\n \"&state_code=\" + str(state_code))\n except Exception as e:\n handle_record_not_saved_exception(e, logger=logger)\n messages.add_message(request, messages.ERROR, 'Could not save office (create new): ' + str(e))\n else:\n messages.add_message(request, messages.ERROR, 'Could not save office, success = False from above: ' + status)\n\n if redirect_to_contest_office_list:\n return HttpResponseRedirect(reverse('office:office_list', args=()) +\n '?google_civic_election_id=' + str(google_civic_election_id) +\n '&state_code=' + str(state_code))\n\n if remove_duplicate_process:\n return HttpResponseRedirect(reverse('office:find_and_merge_duplicate_offices', args=()) +\n \"?google_civic_election_id=\" + str(google_civic_election_id) +\n \"&state_code=\" + str(state_code))\n else:\n return HttpResponseRedirect(reverse('office:office_edit', args=(office_id,)))" ]
[ "0.72431135", "0.6876697", "0.660208", "0.64722663", "0.64284134", "0.626455", "0.592217", "0.5538486", "0.5535285", "0.5225926", "0.5187769", "0.5110371", "0.5093875", "0.50041443", "0.49602246", "0.49470374", "0.49301475", "0.4923213", "0.4913949", "0.49104533", "0.4892809", "0.48711905", "0.48621714", "0.48528862", "0.48394752", "0.48359683", "0.48275566", "0.48244944", "0.47885314", "0.47854152" ]
0.74897695
0
Method to add virtual panel to DB.
def add_vp(): vp_name = request.json['vp_name'] panel_id = request.json['panel_id'] vp_id = create_virtualpanel_query(s, vp_name, panel_id) if vp_id != -1: lock_panel(s, current_user.id, panel_id) return jsonify(vp_id)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_panel(self, panel):\n assert panel.PANEL_ID not in self.panels\n assert not self.tools, \"tools must be added after panels\"\n self.panels[panel.PANEL_ID] = panel\n panel.register_panel(self)", "def create_virtual_panel_process():\n form = CreateVirtualPanelProcess()\n\n if request.method == \"POST\":\n make_live = request.form['make_live']\n vp_id = request.args.get('id')\n if make_live == \"on\":\n make_vp_panel_live(s, vp_id)\n add_to_starlims(vp_id)\n panel_id = get_panel_by_vp_id(s, vp_id)\n unlock_panel_query(s, panel_id)\n return redirect(url_for('panels.view_vpanel') + \"?id=\" + vp_id)\n elif request.method == \"GET\":\n form.panel.choices = get_panel_choices(s, current_user.id)\n url = url_for('panels.create_virtual_panel_process')\n return render_template('virtualpanels_createprocess.html', form=form, url=url, vp_id=\"main\")", "def add_panel():\n panel_name = request.json['panel_name']\n project_id = request.json['project_id']\n panel_id = create_panel_query(s, project_id, panel_name, current_user.id)\n return jsonify(panel_id)", "def make_virtualpanel_live():\n vpanelid = request.args.get('id')\n panelid = get_panel_by_vp_id(s, vpanelid)\n locked = check_if_locked(s, panelid)\n if locked:\n if current_user.id == get_locked_user(s, panelid):\n make_vp_panel_live(s, vpanelid)\n add_to_starlims(vpanelid)\n return redirect(url_for('panels.view_virtual_panels'))\n else:\n make_vp_panel_live(s, vpanelid)\n add_to_starlims(vpanelid)\n return redirect(url_for('panels.view_virtual_panels'))", "def edit_virtual_panel_process():\n form = EditVirtualPanelProcess()\n\n vp_id = request.args.get('id')\n panel_id = get_panel_by_vp_id(s, vp_id)\n if request.method == \"POST\":\n if request.form['make_live'] == \"on\":\n make_vp_panel_live(s, vp_id)\n add_to_starlims(vp_id)\n unlock_panel_query(s, panel_id)\n return redirect(url_for('panels.view_vpanel') + \"?id=\" + vp_id)\n elif request.method == \"GET\":\n lock_panel(s, current_user.id, panel_id)\n panel_info = get_panel_details_by_id(s, panel_id)\n panel_name = panel_info.name\n form.panel.choices = [(panel_id, panel_name), ]\n\n panel_version = get_current_version(s, panel_id)\n panel_genes = get_genes_by_panelid(s, panel_id, panel_version)\n vp_info = get_vpanel_details_by_id(s, vp_id)\n vp_version = vp_info.current_version\n vp_name = vp_info.name\n form.vpanelname.data = vp_name\n vp_genes = get_genes_by_vpanelid_edit(s, vp_id, vp_version)\n genelist = \"\"\n vp_list = []\n for i in vp_genes:\n vp_list.append(i.id)\n\n genes = []\n print('new method')\n for i in panel_genes:\n if i.id in vp_list:\n genes.append({\"name\": i.name, \"id\": i.id, \"vp_list\": True})\n button = render_template(\"gene_button.html\", gene_name=i.name, gene_id=i.id, added=True)\n genelist += button\n\n else:\n genes.append({\"name\": i.name, \"id\": i.id, \"vp_list\": False})\n\n gene_html = render_template(\"panel_genes.html\", panel_genes=genes)\n\n url = url_for('panels.edit_virtual_panel_process') + '?id=' + str(vp_id)\n return render_template('virtualpanels_createprocess.html', form=form, genes=gene_html, genelist=genelist,\n vp_id=vp_id, panel_name=vp_name, current_version=vp_version, url=url)", "def view_virtual_panels(id=None):\n if not id:\n id = request.args.get('id')\n if id:\n panels = get_virtual_panels_by_panel_id(s, id)\n else:\n panels = get_virtual_panels_simple(s)\n result = []\n panel_name = \"Virtual\"\n for i in panels:\n row = dict(zip(i.keys(), i))\n\n row[\"current_version\"] = round(row[\"current_version\"], 1)\n\n status = check_virtualpanel_status(s, row[\"id\"])\n row[\"status\"] = status\n permission = check_user_has_permission(s, current_user.id, row[\"projectid\"])\n locked = check_if_locked_by_user_vpanel(s, current_user.id, row[\"panelid\"])\n\n row['permission'] = permission\n row['locked'] = locked\n\n status = check_virtualpanel_status(s, row[\"id\"])\n row[\"status\"] = status\n\n if id:\n panel_name = row['panelname'] + ' Virtual'\n # if check_user_has_permission(s, current_user.id, row[\"projectid\"]):\n # result.append(row)\n result.append(row)\n table = ItemTableVPanels(result, classes=['table', 'table-striped'])\n return render_template('panels.html', panels=table, project_name=panel_name,\n message='Virtual Panels are locked if their parent panel is being edited')", "def panel(self):\n data_to_track = {}\n for possession in self.possessions_to_track_panel:\n data_to_track[possession] = self._haves[possession]\n\n for variable in self.variables_to_track_panel:\n try:\n data_to_track[variable] = self.__dict__[variable]\n except KeyError:\n pass\n self.database_connection.put([\"panel\",\n data_to_track,\n str(self.id),\n self.group,\n str(self.round)])", "def addViewToDb(self,name):\n\t\tsql = \"INSERT INTO hudson_views(viewname) VALUES (%s)\"\n\t\tcsr = self.db.cursor()\n\t\tcsr.execute(sql,[name])", "def onNewToolClicked(self, event):\n manager = self.getManager()\n if manager:\n try:\n manager.newDB(parent=self)\n\n self.refreshDBList()\n except:\n log_func.fatal(u'Error new DB engine')\n event.Skip()", "def get_vpanel(self):\n if not self.vpanel:\n self.vpanel = VerticalPanel()\n RootPanel().add(self.vpanel)\n return self.vpanel", "def create_panel(self):\n return\n # return Panel(self)", "def view_vpanel():\n id = request.args.get('id')\n try:\n version = request.form[\"versions\"]\n except KeyError:\n version = None\n if id:\n status = check_virtualpanel_status(s, id)\n if not status:\n message = \"This panel has changes which cannot be viewed here as they have not been made live yet, if you have permission you can view these by editing the panel\"\n else:\n message = None\n panel_details = get_vpanel_details_by_id(s, id)\n for i in panel_details:\n if not version:\n version = panel_details.current_version\n panel_name = panel_details.name\n project_id = panel_details.project_id\n panel = get_regions_by_vpanelid(s, id, version)\n result = []\n rows = list(panel)\n if len(rows) != 0:\n bed = ''\n for i in rows:\n row = dict(zip(i.keys(), i))\n result.append(row)\n panel_name = i.panel_name\n current_version = i.current_version\n else:\n message = \"This Panel has no regions yet & may also have changes that have not been made live yet\"\n bed = 'disabled'\n current_version = version\n print(type(version))\n current_version = round(current_version, 1)\n version = round(float(version), 1)\n\n if check_user_has_permission(s, current_user.id, project_id):\n edit = ''\n else:\n edit = 'disabled'\n\n form = ViewPanel()\n v_list = get_prev_versions_vp(s, id)\n choices = []\n for i in v_list:\n choices.append((i, i))\n\n if (current_version, current_version) not in choices:\n choices.append((current_version, current_version))\n\n form.versions.choices = choices\n form.versions.default = current_version\n form.process()\n\n table = []\n\n for i in result:\n line = []\n line.append(i['chrom'])\n line.append(str(i['region_start']))\n line.append(str(i['region_end']))\n line.append(i['gene_name'])\n line.append(i['name'].replace(',', ' '))\n table.append(line)\n\n return render_template('panel_view.html', table=json.dumps(table), panel=table, panel_name=panel_name,\n edit=edit, bed=bed,\n version=version, panel_id=id, message=message, url=url_for('panels.view_vpanel'),\n scope='Virtual', form=form)\n\n else:\n return redirect(url_for('panels.view_virtual_panels'))", "def add_visualization(self, visualization, size_x=6, size_y=3, col=0, row=0):\n new_panel_index = self.get_max_index()+1\n if col and row:\n new_panel = {\n 'col': col, 'row': row,\n 'size_x': size_x, 'size_y': size_y,\n 'panelIndex': new_panel_index,\n 'type': 'visualization',\n 'id': visualization.id\n }\n self.panels.append(new_panel)\n return new_panel\n else:\n new_panel = append_panel(self.panels, size_x, size_y)\n if new_panel:\n new_panel['id'] = visualization.id\n new_panel['panelIndex'] = new_panel_index\n new_panel['type'] = 'visualization'\n return new_panel", "def register(self, panel):\n new_instance = panel()\n new_event_type = new_instance._meta.event_type\n if new_event_type in self.__class__._panels:\n raise Exception(\"Two panels with the same event type: %s\" % \\\n new_event_type)\n self.__class__._panels[new_event_type] = new_instance\n self.storage.register_event(new_event_type, new_instance._meta.dimensions.keys())", "def add(self):\n\n db.session.add(self)\n db.session.commit()", "def add(self):\n\n db.session.add(self)\n db.session.commit()", "def add_to_starlims(vpanelid):\n details = get_vpanel_details_by_id(s, vpanelid)\n print(details)\n version = round(details.current_version,1)\n panel_name = 'Analysis: ' + details.name + ' v' + str(version) + ' (' + details.panel_name + ')'\n print(len(panel_name))\n if len(panel_name) > 50:\n #todo do something with the name here!\n pass\n gene_result = get_genes_by_vpanelid(s, vpanelid, version)\n gene_list = list()\n for g in gene_result:\n gene_list.append(g.name)\n starlims = StarLims.StarLimsApi(test=True)\n testcode = starlims.add_new_test(panel_name, 'NGS Analysis', details.project_name, gene_list)\n if testcode > 0:\n add_testcode(s, vpanelid, details.current_version, testcode)\n\n return testcode", "def add_virtual_element(self, name, value=None, add_indicator=None):\n value_handler=values_module.VirtualValueHandler(value)\n if add_indicator is None:\n add_indicator=self.add_indicator\n indicator_handler=values_module.VirtualIndicatorHandler if add_indicator else None\n self._add_widget(name,self.ParamRow(None,None,value_handler,indicator_handler))", "def add_plant(db_path: str, plant: Plant) -> None:\n query = f'INSERT INTO plants (name, family_name, metadata) VALUES (\"{str(plant.name)}\", \"{str(plant.family_name)}\", \"{str(plant.metadata)}\")'\n\n conn: Connection = sqlite3.connect(path.join(db_path, 'company_data.db'))\n curr: Cursor = conn.cursor()\n try:\n curr.execute(query)\n except sqlite3.IntegrityError:\n raise ValueError(\"Error, plant already exists in database.\")\n\n conn.commit()\n curr.close()\n conn.close()", "def add_panel_regions():\n version_ids = request.json['id_ext']\n panel_id = request.json['panel_id']\n project_id = request.json['project_id']\n gene_name = request.json['gene_name']\n\n try:\n tx_id = request.json['pref_tx_id']\n add_preftxs_to_panel(s, project_id, [{\"gene\": gene_name, \"tx_id\": tx_id}, ])\n except KeyError:\n pass\n\n for i in version_ids:\n if i[\"ext_5\"] == 0:\n ext_5 = None\n else:\n ext_5 = i[\"ext_5\"]\n\n if i[\"ext_3\"] == 0:\n ext_3 = None\n else:\n ext_3 = i[\"ext_3\"]\n add_region_to_panel(s, i[\"id\"], panel_id, ext_3=ext_3, ext_5=ext_5)\n s.commit()\n return jsonify(\"complete\")", "def add_view(self, *args, **kwargs):\r\n if not kwargs.get(\"extra_context\"):\r\n kwargs[\"extra_context\"] = {}\r\n kwargs[\"extra_context\"].update({\r\n \"insert_classes\": self.admin_site.insert_classes,\r\n \"form_url\": \"herp\"\r\n })\r\n return super(ServeeModelAdmin, self).add_view(*args, **kwargs)", "def add_view_pl_button(self):\n self.view_pl = QPushButton(\"View Playlist\")\n self.view_pl.clicked.connect(self.view_pl_btn_push)\n self.hbtnbox.addWidget(self.view_pl)", "def create_main_panel(self):\n self.panel = wx.Panel(self)\n\n self.init_plot()\n self.canvas = FigCanvas(self.panel, -1, self.fig)\n\n self.control_box = VSControlBox(self.panel, -1, 'Information board')\n\n self.vbox = wx.BoxSizer(wx.VERTICAL)\n self.vbox.Add(self.canvas, 1, wx.LEFT | wx.TOP | wx.GROW)\n self.vbox.Add(self.control_box, 0, wx.ALIGN_LEFT | wx.TOP | wx.EXPAND)\n \n self.panel.SetSizer(self.vbox)\n self.vbox.Fit(self)", "def add_create_pl_btn(self):\n self.create_pl = QPushButton(\"Add to playlist\")\n self.create_pl.clicked.connect(self.pl_btn_push)\n self.hbtnbox.addWidget(self.create_pl)", "def createPanel(self, LibraryID, Name, **kwargs):\n if self.request(\"createPanel\", LibraryID=LibraryID, Name=Name, **kwargs) is None:\n return None\n return self.json_response[\"Result\"][\"PanelID\"]", "def add_virtualsource(self, name, **kwargs):\n return self._auraliser._add_object(name, Virtualsource, self.name, **kwargs)\n #obj = Virtualsource(self, name=name)\n #self._virtualsources.append(obj)\n #return self.get_virtualsource(obj.name)", "def enableEditorDrop():\n\n perspPanel = cmds.getPanel( withLabel='Persp View')\n panelControl = cmds.panel( perspPanel, query=True, control=True)\n cmds.control(panelControl, edit=True, dropCallback=panelDropLoad)\n global viewportHeight\n viewportHeight = cmds.control(panelControl, query=True, h=True)", "def add_plant(self, plant: 'Plant') -> None:\r\n self.plants.append(plant)\r\n self.centers.append(plant.get_center())\r\n self.make_rows()", "def save_data(self):\n db.session.add(self)\n db.session.commit( )", "def hdri_adding_panel(self, context):\r\n \r\n AM = context.window_manager.asset_m\r\n layout = self.layout\r\n \r\n box = layout.box()\r\n row = box.row()\r\n row.prop(AM, \"existing_thumb\", text = \"Use existing Thumbnails\")\r\n \r\n row = box.row()\r\n row.label(\"Thumbnail extention:\")\r\n row = box.row(align = True)\r\n row.prop(AM, \"thumb_ext\", expand = True)\r\n \r\n row = box.row(align = True)\r\n \r\n row.operator(\"wm.ibl_importer\", text=\"OK\", icon='FILE_TICK')\r\n row.operator(\"object.cancel_panel_choise\", text=\"Cancel\", icon='X')" ]
[ "0.655961", "0.5985063", "0.5937451", "0.576908", "0.559643", "0.5505857", "0.5472811", "0.5391672", "0.5387777", "0.5306345", "0.5272926", "0.52606857", "0.52213275", "0.5207247", "0.5133307", "0.5133307", "0.5127811", "0.51272595", "0.5119451", "0.5064431", "0.5005942", "0.49431825", "0.49383253", "0.49226654", "0.4918987", "0.48763034", "0.48597226", "0.4843575", "0.48395568", "0.4839391" ]
0.6955528
0
Method to add all regions for a gene to versions table
def add_all_regions_vp(): gene_id = request.json['gene_id'] vpanel_id = request.json['vpanel_id'] panel_id = request.json['panel_id'] add_all_regions_to_vp(s, panel_id, gene_id, vpanel_id) return jsonify({"genes": [gene_id, ]})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_all_regions():\n gene_id = request.json['gene_id']\n panel_id = request.json['panel_id']\n tx_id = request.json['tx_id']\n gene_name = request.json['gene_name']\n project_id = get_project_id_by_panel_id(s, panel_id)\n\n add_preftxs_to_panel(s, project_id, [{\"gene\": gene_name, \"tx_id\": tx_id}, ])\n add_genes_to_panel_with_ext(s, panel_id, gene_id)\n return jsonify({\"genes\": [gene_id, ]})", "def add_panel_regions():\n version_ids = request.json['id_ext']\n panel_id = request.json['panel_id']\n project_id = request.json['project_id']\n gene_name = request.json['gene_name']\n\n try:\n tx_id = request.json['pref_tx_id']\n add_preftxs_to_panel(s, project_id, [{\"gene\": gene_name, \"tx_id\": tx_id}, ])\n except KeyError:\n pass\n\n for i in version_ids:\n if i[\"ext_5\"] == 0:\n ext_5 = None\n else:\n ext_5 = i[\"ext_5\"]\n\n if i[\"ext_3\"] == 0:\n ext_3 = None\n else:\n ext_3 = i[\"ext_3\"]\n add_region_to_panel(s, i[\"id\"], panel_id, ext_3=ext_3, ext_5=ext_5)\n s.commit()\n return jsonify(\"complete\")", "def vcf_add_gene(vrecs,gi_by_chr_map):\n def _overlap(gi,vr):\n return gi['end_i'] >= vr.start and gi['start_i'] <= vr.end\n def _genes_for_vr(vr,gi_by_chr_map):\n cm = gi_by_chr_map[vr.CHROM]\n genes = [ gi['gene'] for gi in cm if _overlap(gi,vr) ] \n return genes\n for vr in vrecs:\n vr.genes = _genes_for_vr(vr,gi_by_chr_map)", "def regions(self, regions):\n self._regions = regions", "def geneExonicRegions(self, df):\n scaffold = df.iloc[0].scaffold\n strand = df.iloc[0].strand\n gene_type = df.iloc[0].gene_type\n gene_id = df.iloc[0].gene_id\n gene_name = df.iloc[0].gene_name\n start = df.start.min()\n end = df.end.max()\n bp = [False] * (end - start + 1)\n for i in range(df.shape[0]):\n s = df.iloc[i]['start'] - start\n e = df.iloc[i]['end'] - start + 1\n bp[s:e] = [True] * (e - s)\n regions = list(range(start, end + 1))\n groups = []\n\n for i, j in groupby(bp):\n groups.append((i, len(list(j))))\n e_start = 0\n\n for i in groups:\n e_end = e_start + i[1]\n if i[0]:\n record = Record(scaffold=scaffold, start=regions[e_start],\n end=regions[e_end - 1], gene_type=gene_type, gene_id=gene_id,\n gene_name=gene_name, strand=strand)\n yield record\n e_start += i[1]", "def add_regions(self, regions, **options):\n \n options.setdefault(\"col\", color(0,0,1))\n options.setdefault(\"style\", \"box\")\n options.setdefault(\"height\", 0.5)\n \n return self.add_track(RegionTrack, -.5, regions, **options)", "def _add_transform_genes(self):\n self._alleles.add(pu.make_int_gene(1, 1, 10, 1)) # 'AR' backshift (p)\n self._alleles.add(pu.make_choice_gene(1, [0, 1, 2])) # 'I' backshift (d) \n self._alleles.add(pu.make_choice_gene(1, [1, 2, 3])) # 'MA' backshift (q)\n self._alleles.add(pu.make_int_gene(1, 1, 10, 1)) # Seasonal 'AR' backshift (p)\n self._alleles.add(pu.make_choice_gene(1, [0, 1, 2])) # Seasonal 'I' backshift (d) \n self._alleles.add(pu.make_choice_gene(1, [1, 2, 3])) # Seasonal 'MA' backshift (q)\n self._loci_list += ['AR_order', 'I_order', 'MA_order',\n 'ssn_AR_order', 'ssn_I_order', 'ssn_MA_order']", "def ensembl_genes(species,log=0):\n \n t1=time.time()\n #open ace genes by- ensembl region\n path=os.environ['PYDATA']+'/'+species+'/aceview/'+species+'_genes_by_ensembl_region.bkdb'\n aceGeneDB=bsddb.btopen(path,'r') \n \n path=os.environ['PYDATA']+\"/\"+species+\"/ensembl/\"+species+'_genes_by_region.bkdb'\n otherGenesDB=bsddb.btopen(path,'r')\n '''create ensembl genes by ace gene'''\n path=os.environ['PYDATA']+'/'+species+'/aceview/'+species+'_ensembl_genes_by_gene.bkdb'\n otherGenesByGeneDB=bsddb.btopen(path,'w')\n geneNb=0\n\n for region in aceGeneDB.keys():\n if region not in otherGenesDB.keys():\n print 'region %s not in ensembl/%s_genes_by_region.bkdb'%(region,species)\n else: \n print 'processing region ',region\n aceGenes=cPickle.loads(aceGeneDB[region])\n otherGenes=cPickle.loads(otherGenesDB[region])\n #construct dictionnary of ace genes containing ensembl genes\n ace={}\n '''process each ensembl gene in the current region'''\n for ensemblGeneIndex in range(len(otherGenes.IDs)): \n '''find the index of the ace genes that contains either the start or the end of the current ensembl gene'''\n aceGeneIndexes=[]\n indexes=bintools.owningStructure(otherGenes.starts[ensemblGeneIndex],otherGenes.strands[ensemblGeneIndex],aceGenes.starts,aceGenes.ends,aceGenes.strands,1)\n aceGeneIndexes.extend(indexes) \n indexes=bintools.owningStructure(otherGenes.ends[ensemblGeneIndex],otherGenes.strands[ensemblGeneIndex],aceGenes.starts,aceGenes.ends,aceGenes.strands,1)\n aceGeneIndexes.extend(indexes) \n '''register the current ensembl gene index in the ace dictionnary'''\n for aceGeneIndex in aceGeneIndexes: \n try:\n ace[aceGenes.IDs[aceGeneIndex]].append(ensemblGeneIndex)\n except: \n ace[aceGenes.IDs[aceGeneIndex]]=[ensemblGeneIndex] \n \n '''process each ace gene in the current region''' \n for aceGeneIndex in range(len(aceGenes.IDs)): \n '''find the index of the ensembl genes that contains either the start or the end of the current ace gene'''\n ensemblGeneIndexes=[]\n indexes=bintools.owningStructure(aceGenes.starts[aceGeneIndex],aceGenes.strands[aceGeneIndex],otherGenes.starts,otherGenes.ends,otherGenes.strands,1) \n ensemblGeneIndexes.extend(indexes)\n indexes=bintools.owningStructure(aceGenes.ends[aceGeneIndex],aceGenes.strands[aceGeneIndex],otherGenes.starts,otherGenes.ends,otherGenes.strands,1) \n ensemblGeneIndexes.extend(indexes)\n '''add the ensembl gene indexes found in the previous for loop processing ensembl genes'''\n try:\n ensemblGeneIndexes.extend(ace[aceGenes.IDs[aceGeneIndex]])\n except:\n pass \n ensemblGeneIndexes=set(ensemblGeneIndexes)\n '''recover the corresponding ensembl gene IDs'''\n ensemblGeneIDs=[] \n for i in range(len(ensemblGeneIndexes)):\n ensemblGeneIDs.append(otherGenes.IDs[ensemblGeneIndexes.pop()]) \n '''register the current ace gene'''\n ensemblGeneIDs.sort()\n otherGenesByGeneDB[aceGenes.IDs[aceGeneIndex]]=cPickle.dumps(ensemblGeneIDs,protocol=-1)\n geneNb=geneNb+1\n \n otherGenesByGeneDB.close() \n otherGenesDB.close()\n aceGeneDB.close()\n t2=time.time()\n if log!=0:\n log.write('%s\\t%s\\t\\t17\\t%s_ensembl_genes_by_gene.bkdb\\taceview\\t%u\\t%.2f\\n'%(date.today(),species,species,geneNb,t2-t1))", "def add_liftover_file(in_file, regions):\n with open(in_file) as in_handle:\n for line in in_handle:\n if not line.startswith(\"#\"):\n chrom, start, end = line.strip().split()\n key = (chrom.replace(\"chr\", \"\"), int(start), int(end))\n regions[key] += 1\n return regions", "def regions(self):\n\n class RegionIter(object):\n def __init__(self, region_based):\n self._region_based = region_based\n\n def __len__(self):\n return self._region_based._region_len()\n\n def __iter__(self):\n return self()\n\n def _fix_chromosome(self, regions):\n for r in regions:\n r.fix_chromosome(copy=True)\n\n def __call__(self, key=None, *args, **kwargs):\n fix_chromosome = kwargs.pop('fix_chromosome', False)\n\n if key is None:\n iterator = self._region_based._region_iter(*args, **kwargs)\n else:\n if isinstance(key, string_types) or isinstance(key, GenomicRegion):\n iterator = self._region_based.region_subset(key, *args, **kwargs)\n else:\n iterator = self._region_based._get_regions(key, *args, **kwargs)\n\n if fix_chromosome:\n return self._fix_chromosome(iterator)\n else:\n return iterator\n\n def __getitem__(self, item):\n if isinstance(item, string_types) or isinstance(item, GenomicRegion):\n return self._region_based.region_subset(item)\n return self._region_based._get_regions(item)\n\n return RegionIter(self)", "def add_sr_ebi_brc4_names(self,\n seq_region_file: str,\n seq_region_map: dict,\n attrib_type_map: dict,\n work_dir: str,\n unversion: bool = False):\n os.makedirs(work_dir, exist_ok=True)\n\n # return if there's nothing to add\n if not seq_region_file: return\n\n # technical / optimization. get atttib_type_id(s) for \"(EBI|BRC4)_seq_region_name\"\n tagged_sr_name_attrib_id = {\n tag : self.id_from_map_or_die(f\"{tag}_seq_region_name\", attrib_type_map, \"attrib_type_map\") for tag in [\"EBI\", \"BRC4\"]\n }\n\n # load BRC4/EBI name from seq_region file\n brc4_ebi_name_attrib_trios = [] # [ (seq_region_id, attrib_id, value)... ] list of trios for inserting into db \n with open(seq_region_file) as in_file:\n seq_regions = list(json.load(in_file))\n for seq_region in seq_regions:\n # get seq_region_id (perhaps, by using unversioned name)\n seq_region_name, seq_region_id, unversioned_name = \\\n self.name_and_id_from_seq_region_item(seq_region, seq_region_map, try_unversion = unversion)\n # append attribs to the brc4_ebi_name_attrib_trios list\n for tag in [\"BRC4\", \"EBI\"]:\n attrib_name = f\"{tag}_seq_region_name\"\n attrib_id = tagged_sr_name_attrib_id[tag]\n value = seq_region.get(attrib_name, seq_region_name)\n brc4_ebi_name_attrib_trios.append( (seq_region_id, attrib_id, self.quote_or_null(value)) )\n\n # run insertion SQL\n self.insert_to_db(\n brc4_ebi_name_attrib_trios,\n \"seq_region_attrib\",\n [\"seq_region_id\", \"attrib_type_id\", \"value\"],\n self.pjc(work_dir, \"brc4_ebi_seq_region_synonyms\"),\n ignore = True\n )", "def _add_transform_genes(self):\n self._alleles.add(pu.make_int_gene(1, 1, 10, 1)) # 'AR' backshift (p)\n self._alleles.add(pu.make_choice_gene(1, [0, 1, 2])) # 'I' backshift (d) \n self._alleles.add(pu.make_choice_gene(1, [1, 2, 3])) # 'MA' backshift (q)\n self._loci_list += ['AR_order', 'I_order', 'MA_order']", "def sync_region(self, region_id):\n self.init_structures()\n con = SimConnection()\n con.connect(self.gridinfo._url)\n scenedata = con._con.ogrescene_list({\"RegionID\":region_id})[\"res\"]\n objects = editor.getSelected()\n if not objects:\n objects = bpy.data.objects\n for obj in objects:\n obj_uuid = str(self.get_uuid(obj))\n if obj_uuid:\n if obj_uuid in scenedata:\n self.import_group(obj_uuid, scenedata[obj_uuid], 10)", "def RegionList(self):\n command = \"\"\"\n IPython.notebook.kernel.execute(\"RegionList=\" + JSON.stringify(JS9.GetShapes(\"regions\", {{display: '{wid}JS9'}})));\n \"\"\".format(wid=self.wid)\n get_ipython().run_cell_magic('javascript', '', command)", "def get_regions_from_genes(gene_list, gtf_pr):\n\n regions = []\n\n bad_genes = []\n for gene in gene_list:\n\n gene_pr = gtf_pr[gtf_pr.gene_name == gene]\n\n if gene_pr.empty:\n bad_genes.append(gene)\n continue\n\n chrom = gene_pr.df.Chromosome.to_list()[0]\n start = gene_pr.df.Start.min() - 100\n end = gene_pr.df.End.max() + 100\n\n regions.append(\"{}:{}-{}\".format(chrom, start, end))\n\n if bad_genes:\n print(\n \"\\n!!ERROR!! At least one gene from the list was not found in the gtf file. Please make sure the gene symbol provided is correct and in the gtf file. If the symbol is a correct symbol, check for alternative gene symbols in the gtf file.\"\n )\n print(\"Bad Gene(s):\\n\\t- {}\\n\".format(\"\\n\\t- \".join(bad_genes)))\n sys.exit(1)\n\n return regions", "def AddRegions(self, **kwargs):\n # Addregions use pixel coordinates. listRegions and SaveRegions use RA and Dec.\n n_objs = 0\n objs = []\n # default shape is circle\n if not 'shape' in kwargs:\n kwargs['shape'] = ['circle']\n for k in kwargs.keys():\n n_objs = max(n_objs, len(kwargs[k]))\n for j in range(n_objs):\n temp = {}\n for k in kwargs.keys():\n try:\n temp[k] = kwargs[k][j]\n except IndexError:\n if k == 'shape': \n temp[k] = 'circle'\n objs.append(temp)\n self.all_objs = json.dumps(objs)\n command = \"JS9.AddRegions({objs}, {{display:'{wid}{suffix}'}})\".format(objs=self.all_objs, wid=self.wid, suffix=self.suffix)\n get_ipython().run_cell_magic('javascript', '', command)", "def _import_insee_region(self, cr, uid, ids, data_dir, context=None):\n if context is None:\n context = {}\n filepath = os.path.abspath(os.path.join(data_dir, 'reg2011.csv'))\n region_obj = self.pool.get('insee.region')\n with open(filepath, 'rb') as regfile:\n reader = csv.DictReader(regfile)\n for row in reader:\n values = {\n 'region': row['REGION'],\n 'cheflieu': row['CHEFLIEU'],\n 'tncc': row['TNCC'],\n 'ncc': row['NCC'],\n 'nccenr': row['NCCENR'],\n }\n region_obj.create(cr, uid, values, context=context)", "def write_regions(pathfolder, key_firms, regions, methodvalues):\n ## Generate namefile\n namefile = generate_namefile(pathfolder, methodvalues)\n\n ## Writting\n db = shelve.open(namefile)\n db['nif'] = key_firms\n db['regions'] = regions\n db['methodvalues'] = methodvalues\n db.close()", "def _add_transform_genes(self):\n pass", "def __init__(self):\n self.regions = []", "def add(cls, name, supply_centers, excel_table):\r\n # Make and register a lovely new variant\r\n variant = Variant(name, supply_centers)\r\n cls.registry.append(variant)\r\n\r\n # Split string into a list of lists\r\n excel_rows = [line.split('\\t') for line in excel_table.split('\\n')]\r\n\r\n # Remove leading blank rows\r\n while excel_rows[0] == ['']:\r\n excel_rows.pop(0)\r\n \r\n # Remove and store header in CSV format\r\n if excel_rows[0][0] == '':\r\n excel_rows[0].pop(0) # Remove leading tab\r\n variant.header = ','.join(excel_rows.pop(0)).strip()\r\n\r\n # Iterate through pre-computed country data\r\n for row in excel_rows:\r\n if row == ['']:\r\n continue\r\n \r\n # Remove \"Sample \" prefix if William still has it in the country name for some reason\r\n row[0] = row[0][len(\"Sample \"):] if row[0].startswith(\"Sample \") else row[0]\r\n\r\n # Store pre-computed information as CSV string in a `countries` dict with the country name as the key\r\n variant.countries[row.pop(0)] = ','.join(row)", "def create_genes_table(self, fn_genes):\n log.info(\"Creating table with information about the genes ...\")\n gene_record = GeneParser.GeneRecord()\n names = gene_record.fields_names\n types = gene_record.fields_types\n if len(names) != len(types):\n raise ValueError, \"The number of fields is different from the \"\\\n \"number of types\"\n self.create_table(self.GenesTable,names,types)\n fh = open(fn_genes, \"r\")\n log.debug(\"Reading file %s\",fn_genes)\n reader = csv.reader(fh, delimiter=\"\\t\")\n reader.next() # discard first line\n data = []\n for row in reader:\n if row[0] == \"\":\n continue\n g = GeneParser.GeneRecord()\n g.read(reader, row)\n data.append(g.get_values())\n self.store_data(self.GenesTable,data)", "def add_karyotype_bands(self,\n seq_region_file: str,\n seq_region_map: dict,\n attrib_type_map: dict,\n work_dir: str,\n unversion: bool = False,\n karyotype_bands_property = \"karyotype_bands\") -> list: # [ (seq_region_name, seq_region_id, unversioned_name) ]\n os.makedirs(work_dir, exist_ok=True)\n\n # return if there's nothing to add\n if not seq_region_file: return\n\n # resulting list of seq regions with bands\n seq_regions_with_karyotype_bands = [] # [ ( seq_region_name, seq_region_id, unversioned_name )... ] \n\n # load BRC4/EBI name from seq_region file\n band_tuples = [] # [ (seq_region_id, seq_region_start, seq_region_end, band|\"NULL\", stain|\"NULL\")... ] list of tuples for inserting into db \n with open(seq_region_file) as in_file:\n seq_regions = list(json.load(in_file))\n for seq_region in filter(lambda sr: sr.get(karyotype_bands_property, False), seq_regions):\n # iterate through all seq_regions having non-empty \"karyotype_bands\" \n\n # get seq_region_id (perhaps, by using unversioned name)\n seq_region_name, seq_region_id, unversioned_name = \\\n self.name_and_id_from_seq_region_item(seq_region, seq_region_map, try_unversion = unversion)\n\n #append trio to the resulting list \n seq_regions_with_karyotype_bands.append( (seq_region_name, seq_region_id, unversioned_name) )\n\n # append bands to the band_tuples list\n for band in seq_region[ karyotype_bands_property ]:\n # print(\"BAND: \" + str(band), file = sys.stderr)\n # coords\n seq_region_start = band[\"start\"]\n seq_region_end = band[\"end\"]\n # band_name and stain\n band_name = band.get(\"name\", None)\n stain = band.get(\"stain\", None)\n # special cases for stain\n structure = band.get(\"structure\", None)\n if structure == \"telomere\":\n stain = \"TEL\"\n elif structure == \"centromere\":\n stain = \"ACEN\"\n\n # append tuple\n band_tuples.append( (seq_region_id, seq_region_start, seq_region_end, self.quote_or_null(band_name), self.quote_or_null(stain)) )\n\n # run insertion SQL\n self.insert_to_db(\n band_tuples,\n \"karyotype\",\n [\"seq_region_id\", \"seq_region_start\", \"seq_region_end\", \"band\", \"stain\"],\n self.pjc(work_dir, \"karyotype_insertion\"),\n ignore = True\n )\n\n # return resulting list of regions with bands trios\n return seq_regions_with_karyotype_bands", "def update (self) :\n for met in self.gene :\n met(self)", "def _discoverNewZincRegions(self):\n newRegionCount = 0\n zincChildRef = self._zincRegion.getFirstChild()\n while zincChildRef.isValid():\n childName = zincChildRef.getName()\n neonChild = self._findChildByName(childName)\n if not neonChild:\n neonChild = NeonRegion(childName, zincChildRef, self)\n neonChild._ancestorModelSourceCreated = True\n self._children.append(neonChild)\n newRegionCount += (1 + neonChild._discoverNewZincRegions())\n zincChildRef = zincChildRef.getNextSibling()\n return newRegionCount", "def add_gene_ids(self, genes_list):\n orig_num_genes = len(self.genes)\n\n for g in list(set(genes_list)):\n if not self.genes.has_id(g):\n new_gene = GenePro(id=g, pdb_file_type=self.pdb_file_type, root_dir=self.genes_dir)\n if self.model:\n self.model.genes.append(new_gene)\n else:\n self.genes.append(new_gene)\n\n log.info('Added {} genes to GEM-PRO project'.format(len(self.genes)-orig_num_genes))", "def initialize_region(self):\n self.new_region_name = \"\"\n self.map.regions.create_new_region()", "def create_panel_custom_regions():\n panel_id = request.json[\"panel_id\"]\n chrom = request.json[\"chrom\"]\n start = request.json[\"start\"]\n end = request.json[\"end\"]\n name = request.json[\"name\"]\n regions = select_region_by_location(s, chrom, start, end) # if region already exists, return current entry\n if regions:\n for i in regions:\n add_region_to_panel(s, i.id, panel_id)\n s.commit()\n continue\n else:\n create_custom_region(s, panel_id, chrom, start, end, name)\n\n return jsonify(\"complete\")", "def _informRegionChange(self, treeChange):\n rootRegion = self\n while rootRegion._parent:\n rootRegion = rootRegion._parent\n for callback in rootRegion._regionChangeCallbacks:\n callback(self, treeChange)", "def generateAllRegionVectors():\n\tregionVectors = []\n\tfor i in range(NUM_REGION_VECTORS):\n\t\tregionVectors.append('{0:04x}'.format(i))\n\treturn regionVectors" ]
[ "0.66544604", "0.6378839", "0.5810066", "0.5718972", "0.557586", "0.54156053", "0.5386809", "0.53579724", "0.5317539", "0.5312527", "0.5252136", "0.52013516", "0.51998466", "0.51921564", "0.51599437", "0.51550764", "0.5132998", "0.5104016", "0.5098529", "0.50962406", "0.5076107", "0.50634766", "0.5029969", "0.5025251", "0.50160724", "0.5011248", "0.49889308", "0.49489906", "0.4938177", "0.49292248" ]
0.6766564
0
Method to add new test to StarLIMS. Method calls bioinfoweb API method through commonlibs producers. The TESTCODE value returned is added to the virtual panels table.
def add_to_starlims(vpanelid): details = get_vpanel_details_by_id(s, vpanelid) print(details) version = round(details.current_version,1) panel_name = 'Analysis: ' + details.name + ' v' + str(version) + ' (' + details.panel_name + ')' print(len(panel_name)) if len(panel_name) > 50: #todo do something with the name here! pass gene_result = get_genes_by_vpanelid(s, vpanelid, version) gene_list = list() for g in gene_result: gene_list.append(g.name) starlims = StarLims.StarLimsApi(test=True) testcode = starlims.add_new_test(panel_name, 'NGS Analysis', details.project_name, gene_list) if testcode > 0: add_testcode(s, vpanelid, details.current_version, testcode) return testcode
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def addTest(self, test):\r\n self.tests.append(test)\r\n return", "def add_test(self, testsuite, test):\n self.tests[testsuite].append(TestCase(test, self))\n self.num_tests += 1", "def add_test(self,test):\n l = test.id.split('.')\n s_obj = self\n while len(l) > 0:\n s_name = l.pop(0)\n if len(l) > 0:\n if s_name in s_obj.suites:\n s_obj = s_obj.suites[s_name]\n else:\n new_suite = Suite(s_name,parent=s_obj)\n s_obj.suites[s_name] = new_suite\n s_obj = new_suite\n s_obj.tests.append(test)", "def test_instrument_add_adds_instrument(self):\n test_instrument = {'abbv': 'TEST',\n 'name': 'SELENIUM TEST INSTRUMENT',\n 'type': 'TEST_RADAR',\n 'vendor': 'SELENIUM',\n 'description': 'Selenium Injected Instrument'}\n\n self.browser.get(self.warno_url)\n self.browser.find_element_by_link_text('Instruments').click()\n self.browser.find_element_by_id('new-instrument-redirect-button').click()\n\n for key in test_instrument.keys():\n element = self.browser.find_element_by_name(key)\n element.send_keys(test_instrument[key])\n\n site_select = Select(self.browser.find_element_by_name('site'))\n\n site_select.select_by_visible_text('OLI')\n\n self.browser.find_element_by_id('submit').click()\n # Should insert instrument, and returns us to instruments page.\n\n self.assertTrue('instruments' in self.browser.current_url,\n 'Did not redirect to instruments page after instrument insert')\n\n table_id = self.browser.find_element_by_id('instrument-table')\n rows = table_id.find_elements_by_tag_name('tr')\n test_row = []\n for row in rows[1:]:\n if row.find_elements_by_tag_name(\"td\")[1].text == test_instrument['abbv']:\n test_row = row\n\n # Now we make sure we get back out what we put in\n test_values = [td.text for td in test_row.find_elements_by_tag_name('td')]\n for key in test_instrument.keys():\n self.assertIn(test_instrument[key], test_values, 'Missing Value %s' % test_instrument[key])", "def newTestLibrary(self):\n self.newTab( extension = TestLibrary.TYPE, repoDest=UCI.REPO_UNDEFINED, newLib=True )", "def newTestAdapter(self):\n self.newTab( extension = TestAdapter.TYPE, repoDest=UCI.REPO_UNDEFINED, newAdp=True )", "def test_register(self):\n self._configure_testshib_provider()\n self._test_register()", "def newTestData(self):\n self.newTab( extension = TestData.TYPE, repoDest=UCI.REPO_UNDEFINED )", "def testAdd(self):\n kinds = ('tiny', 'small')\n # Tests a series of three index adds, where each load overlaps\n # the previous one and the data spans the 0/360 longitude angle\n # discontinuity. Compares the results to those obtained with\n # one shot index generation.\n in_tables = self.in_tables_v1[:3]\n actions = [(i, 1, t) for i, t in enumerate(in_tables)]\n self._run_actions(kinds, in_tables, actions, 'add1')\n # Tests a series of 6 index adds using the 3 loads above and\n # then replacing each load with a second version.\n in_tables = self.in_tables_v2[:3]\n actions.extend([(i, 2, t) for i, t in enumerate(in_tables)])\n self._run_actions(kinds, in_tables, actions, 'add2')", "def _new(self):\n return self.lib.iperf_new_test()", "def newTestUnit(self):\n self.newTab( extension = TestUnit.TYPE, repoDest=UCI.REPO_UNDEFINED )", "def test_code001(self, *test_code_args, **test_code_kwargs):\n print(\"This is test_code001 from TestDefinition #\", self.ID, \", test case #\", self.test_case_ID, sep='')", "def create_new_test_run():\n if debug:\n print('[DEBUG] Func: create_new_test_run...')\n\n new_test_run_url = \"https://eei.testrail.com/index.php?/api/v2/add_run/{0}=\".format(project_id)\n\n new_test_run_json = {\n \"suite_id\": suite_id,\n \"name\": suite_name,\n \"assignedto_id\": assignedto_id,\n \"include_all\": False,\n \"case_ids\": [testcase_id]\n }\n\n new_test_run = requests.post(new_test_run_url, auth=authorization, json=new_test_run_json)\n\n if str(new_test_run.status_code) != '200':\n print('[ERROR] new_test_run: non 200 status code... ' + str(new_test_run.status_code))\n print(str(new_test_run.json()))\n sys.exit(1)\n\n global new_test_run_id\n new_test_run_id = str(new_test_run.json()[\"id\"])", "def test_customize_test_loads(self):\n self.create_user_with_role(\n self.user.name, self.user.email, self.user.password, Role.tester)\n self.create_forktest(\"own-fork-commit\", TestPlatform.linux, regression_tests=[2])\n self.create_completed_regression_t_entries(3, [2])\n response = self.app.test_client().get('/test/3')\n self.assertEqual(response.status_code, 200)\n self.assert_template_used('test/by_id.html')\n regression_tests = RegressionTest.query.all()\n self.assertIn(regression_tests[1].command, str(response.data))\n self.assertNotIn(regression_tests[0].command, str(response.data))", "def test_code002(self, *test_code_args, **test_code_kwargs):\n print(\"This is test_code002 from TestDefinition #\", self.ID, \", test case #\", self.test_case_ID, sep='')", "def addCustomTests(self, tests):\n pass", "def add_test(self, id):\n id = int(id)\n workflow = Workflow.get(id)\n scufl = Scufl(workflow.data)\n match_types = [\"exact\", \"ignore\", \"regex\"]\n \n return dict(id=None, workflow_id=id, name=workflow.name,\n description=workflow.description, scufl=scufl,\n workflow_test=None, sources={},\n match_types = match_types,\n out_types = {},\n sinks={})", "def test_code010(self, *test_code_args, **test_code_kwargs):\n print(\"This is test_code010 from TestDefinition #\", self.ID, \", test case #\", self.test_case_ID, sep='')", "def addTestResult(self, val):\n conn = sqlite3.connect(self._db)\n conn.text_factory = str\n c = conn.cursor()\n c.execute('insert into test (test_module, test_type, test_dt_start,'\n 'test_dt_end, test_name, test_port, test_proto, test_payload,'\n 'test_sig_match, test_alert, test_flag)'\n 'values (?,?,?,?,?,?,?,?,?,?,?)', val)\n conn.commit()\n c.close()", "def test_new(self):", "def test_new(self):", "def add_test(self, test_name, xml_path, error_class, parsed_xml=None):\n test_set = (test_name, xml_path, error_class, parsed_xml)\n\n self.__test_set_list.append(test_set)", "def add(self, test_unique=True, setiotid=True):\n\n # get existing sets self.iotid\n if not test_unique or not self.get_existing(self.api_tag):\n self.logger.info('payload {}'.format(self.payload()))\n resp = requests.post('{}/{}'.format(self.base_url, self.api_tag),\n auth=('write', self._password),\n json=self.payload())\n self.logger.info('response {}'.format(resp.text))\n self.logger.info('headers {}'.format(resp.headers))\n if setiotid:\n m = IDREGEX.search(resp.headers.get('location', ''))\n\n if m:\n iotid = m.group('id')[1:-1]\n else:\n iotid = resp.json()['@iot.id']\n\n self.setiotid(iotid)", "def add(self, prompt, identifier, command, args: list,\n delays: list, result_query: list,\n initial_value_query: list, measurement_count: int):\n self.tests.append(Test(self.logger, prompt,\n identifier, command, args,\n delays, result_query, initial_value_query,\n measurement_count))", "def start_next_test(self):\n next_test_num = self.test_numbers.popleft()\n self.tests.append(\n self.TEST(\n process=Popen(COMMANDS[next_test_num],\n stdout=PIPE,\n stderr=PIPE),\n number=next_test_num))", "def addSuccess(self, test):\n self.passing.append(proto_test(test))", "def newTestPlan (self):\n self.newTab( extension = TestPlan.TYPE, repoDest=UCI.REPO_UNDEFINED )\n self.findWidget.setDisabled(True)", "def _AddTestData(self, test_name, rows, sheriff_key,\n improvement_direction=anomaly.UNKNOWN):\n testing_common.AddTests(\n ['ChromiumGPU'],\n ['linux-release'], {\n 'scrolling_benchmark': {\n test_name: {},\n },\n })\n test = utils.TestKey(\n 'ChromiumGPU/linux-release/scrolling_benchmark/' + test_name).get()\n test.improvement_direction = improvement_direction\n test_container_key = utils.GetTestContainerKey(test.key)\n\n sheriff_key = sheriff_key.get()\n if sheriff_key.patterns:\n sheriff_key.patterns.append(test.test_path)\n else:\n sheriff_key.patterns = [test.test_path]\n sheriff_key.put()\n\n for i, val in enumerate(rows):\n graph_data.Row(id=(i+1), value=val, parent=test_container_key).put()\n\n # Add test config.\n overridden_config = {\n 'min_relative_change': 0.1,\n 'min_absolute_change': 10.0\n }\n anomaly_config.AnomalyConfig(\n id='config_' + test_name, config=overridden_config,\n patterns=[test.test_path]).put()\n test.put()\n return test", "def newTestConfig(self):\n self.newTab( extension = TestConfig.TYPE, repoDest=UCI.REPO_UNDEFINED )\n self.findWidget.setDisabled(True)", "def addRemoteTestToTestplan(self, data, testParentId=0):\n path_file, name_file, ext_file, encoded_data, project = data\n tabId = self.tab.currentIndex()\n if tabId == -1:\n return False\n currentDoc = self.tab.widget(tabId)\n # \n if currentDoc.extension == TestPlan.TYPE :\n content = base64.b64decode(encoded_data)\n currentDoc.addRemoteSubItem(path_file, \n name_file, \n ext_file, \n content, \n project, \n testParentId=testParentId)" ]
[ "0.63559365", "0.6069125", "0.59046876", "0.5759549", "0.5705489", "0.5694319", "0.5653221", "0.5620037", "0.5569933", "0.55606675", "0.550797", "0.55018616", "0.5477621", "0.5447666", "0.54451877", "0.542421", "0.5414771", "0.5409606", "0.54056925", "0.5385775", "0.5385775", "0.5376005", "0.53692836", "0.53675103", "0.53352076", "0.5327096", "0.5312624", "0.53122246", "0.52979946", "0.52782583" ]
0.65358037
0
given a panel id this method makes a panel live
def make_virtualpanel_live(): vpanelid = request.args.get('id') panelid = get_panel_by_vp_id(s, vpanelid) locked = check_if_locked(s, panelid) if locked: if current_user.id == get_locked_user(s, panelid): make_vp_panel_live(s, vpanelid) add_to_starlims(vpanelid) return redirect(url_for('panels.view_virtual_panels')) else: make_vp_panel_live(s, vpanelid) add_to_starlims(vpanelid) return redirect(url_for('panels.view_virtual_panels'))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_live():\n panelid = request.args.get('id')\n locked = check_if_locked(s, panelid)\n if locked:\n unlock_panel_query(s, panelid)\n current_version = get_current_version(s, panelid)\n if not current_version:\n current_version = 0\n new_version = current_version + 1\n make_panel_live(s, panelid, new_version, current_user.id)\n return redirect(url_for('panels.view_panels'))", "def add_panel(self, panel):\n assert panel.PANEL_ID not in self.panels\n assert not self.tools, \"tools must be added after panels\"\n self.panels[panel.PANEL_ID] = panel\n panel.register_panel(self)", "def create_panel_process():\n form = CreatePanelProcess()\n if request.method == \"POST\":\n make_live = request.form['make_live']\n panel_id = request.args.get('id')\n project_id = get_project_id_by_panel_id(s, panel_id)\n preftx_id = get_preftx_id_by_project_id(s, project_id)\n version = get_current_preftx_version(s, preftx_id)\n if not version:\n version = 0\n if make_live == \"on\":\n make_preftx_live(s, preftx_id, version + 1, current_user.id)\n make_panel_live(s, panel_id, 1, current_user.id)\n unlock_panel_query(s, panel_id)\n return redirect(url_for('panels.view_panel') + \"?id=\" + panel_id)\n elif request.method == \"GET\":\n form.project.choices = get_project_choices(s, current_user.id)\n return render_template('panel_createprocess.html', form=form, panel_id=\"main\",\n url=url_for('panels.create_panel_process'))", "def show(self):\n self.window.run_command(\"show_panel\", {\"panel\": self.full_name})", "def edit_panel_process():\n if request.method == \"POST\":\n make_live = request.form['make_live']\n panel_id = request.args.get('id')\n project_id = get_project_id_by_panel_id(s, panel_id)\n preftx_id = get_preftx_id_by_project_id(s, project_id)\n tx_version = get_current_preftx_version(s, preftx_id)\n panel_version = get_current_version(s, panel_id)\n if not tx_version:\n tx_version = 0\n if make_live == \"on\":\n print('make_live')\n make_preftx_live(s, preftx_id, tx_version + 1, current_user.id)\n make_panel_live(s, panel_id, panel_version + 1, current_user.id)\n unlock_panel_query(s, panel_id)\n return redirect(url_for('panels.view_panel') + \"?id=\" + panel_id)\n elif request.method == \"GET\":\n panel_id = request.args.get('id')\n form = EditPanelProcess()\n panel_info = get_panel_info(s, panel_id)\n project_id = panel_info.project_id\n form.project.choices = [(project_id, panel_info.project_name), ]\n form.panelname.data = panel_info.name\n\n lock_panel(s, current_user.id, panel_id)\n\n genes = get_genes_by_panelid_edit(s, panel_id, panel_info.current_version)\n html = \"\"\n buttonlist = \"\"\n print('hello')\n for gene in genes:\n gene_id = gene.id\n gene_name = gene.name\n preftx_id = get_preftx_by_gene_id\n upcoming_preftx = get_upcoming_preftx_by_gene_id(s, project_id, gene_id)\n all_tx = get_tx_by_gene_id(s, gene_id)\n\n buttonlist += render_template(\"gene_button.html\", gene_name=gene_name, gene_id=gene_id, added=True)\n tx_html = render_template(\"tx_list.html\", gene_name=gene_name, all_tx=all_tx, preftx=preftx_id,\n upcoming=upcoming_preftx, disabled=True)\n html += tx_html\n\n return render_template('panel_createprocess.html', form=form, genes=html, genelist=buttonlist,\n panel_id=panel_id,\n url=url_for('panels.edit_panel_process') + \"?id=\" + panel_id)", "def view_panel():\n id = request.args.get('id')\n try:\n version = request.form[\"versions\"]\n except KeyError:\n version = None\n if id:\n status = check_panel_status(s, id)\n if not status:\n message = \"This panel has changes which cannot be viewed here as they have not been made live yet, if you have permission you can view these by editing the panel\"\n else:\n message = None\n panel_details = get_panel_details_by_id(s, id)\n if not version:\n version = panel_details.current_version\n panel_name = panel_details.name\n panel = get_regions_by_panelid(s, id, version)\n project_id = get_project_id_by_panel_id(s, id)\n result = []\n rows = list(panel)\n if len(rows) != 0:\n bed = ''\n for i in rows:\n row = dict(zip(i.keys(), i))\n result.append(row)\n # panel_name = i.panel_name\n current_version = i.current_version\n else:\n message = \"This Panel has no regions yet & may also have changes that have not been made live\"\n bed = 'disabled'\n current_version = version\n\n if check_user_has_permission(s, current_user.id, project_id):\n edit = ''\n else:\n edit = 'disabled'\n\n form = ViewPanel()\n v_list = range(1, current_version + 1)\n choices = []\n for i in v_list:\n choices.append((i, i))\n form.versions.choices = choices\n form.versions.default = version\n form.process()\n\n table = []\n\n for i in result:\n line = []\n line.append(i['chrom'])\n line.append(str(i['region_start']))\n line.append(str(i['region_end']))\n line.append(i['gene_name'])\n line.append(i['name'].replace(',', ' '))\n table.append(line)\n return render_template('panel_view.html', scope='Panel', table=json.dumps(table), panel=table,\n panel_name=panel_name, edit=edit, bed=bed,\n version=version, panel_id=id, project_id=project_id, message=message,\n url=url_for('panels.view_panel'),\n form=form)\n\n else:\n return redirect(url_for('panels.view_panels'))", "def create_virtual_panel_process():\n form = CreateVirtualPanelProcess()\n\n if request.method == \"POST\":\n make_live = request.form['make_live']\n vp_id = request.args.get('id')\n if make_live == \"on\":\n make_vp_panel_live(s, vp_id)\n add_to_starlims(vp_id)\n panel_id = get_panel_by_vp_id(s, vp_id)\n unlock_panel_query(s, panel_id)\n return redirect(url_for('panels.view_vpanel') + \"?id=\" + vp_id)\n elif request.method == \"GET\":\n form.panel.choices = get_panel_choices(s, current_user.id)\n url = url_for('panels.create_virtual_panel_process')\n return render_template('virtualpanels_createprocess.html', form=form, url=url, vp_id=\"main\")", "def create_panel(self):\n return\n # return Panel(self)", "def liveview(self):\n if self.liveviewButton.isChecked():\n# self.save = False\n self.paramChangedInitialize()\n self.openShutter(\"red\")\n self.liveviewStart()\n\n else:\n self.liveviewStop()", "def load_panelapp_panel(adapter, panel_id=None, institute=\"cust000\", confidence=\"green\"):\n panel_ids = [panel_id]\n\n if not panel_id:\n LOG.info(\"Fetching all panel app panels\")\n panel_ids = _panelapp_panel_ids()\n\n for _ in panel_ids:\n parsed_panel = _parse_panelapp_panel(adapter, _, institute, confidence)\n\n if len(parsed_panel[\"genes\"]) == 0:\n LOG.warning(\"Panel %s is missing genes. Skipping.\", parsed_panel[\"display_name\"])\n continue\n\n try:\n adapter.load_panel(parsed_panel=parsed_panel, replace=True)\n except Exception as err:\n raise err", "def liveview(self):\n if self.liveviewButton.isChecked():\n self.save = False\n self.channelsOpen()\n self.liveviewStart()\n\n else:\n self.liveviewStop()", "def add_panel():\n panel_name = request.json['panel_name']\n project_id = request.json['project_id']\n panel_id = create_panel_query(s, project_id, panel_name, current_user.id)\n return jsonify(panel_id)", "def handle_panel_update(self, section_dict):", "def main():\n PanelDemo().mainloop()", "def createPanel(self, LibraryID, Name, **kwargs):\n if self.request(\"createPanel\", LibraryID=LibraryID, Name=Name, **kwargs) is None:\n return None\n return self.json_response[\"Result\"][\"PanelID\"]", "def panel_callback():\n if self.minwidth is None:\n self.minwidth = self.button_stack.winfo_width()\n\n if self.current_panel:\n self.sashpos = self.paned.sashpos(0)\n self.current_panel.forget()\n if isinstance(self.current_panel, class_obj):\n self.current_panel = None\n self.paned.sashpos(0, self.minwidth)\n return\n\n if class_obj.__name__ in self.panels:\n panel = self.panels[class_name]\n else:\n panel = self.panels[class_name] = class_obj(self, self.app)\n\n panel.pack(side=tk.LEFT, expand=1, fill=tk.BOTH)\n\n if self.sashpos is None:\n self.sashpos = 300\n\n self.paned.sashpos(0, self.sashpos)\n\n self.current_panel = panel", "def view_panels(id=None):\n if not id:\n id = request.args.get('id')\n\n if id:\n panels = get_panels_by_project_id(s, id)\n else:\n panels = get_panels(s)\n result = []\n project_name = \"All\"\n for i in panels:\n row = dict(zip(i.keys(), i))\n status = check_panel_status(s, row[\"panelid\"])\n row[\"status\"] = status\n permission = check_user_has_permission(s, current_user.id, row[\"projectid\"])\n locked = check_if_locked(s, row[\"panelid\"])\n row['permission'] = permission\n row['locked'] = locked\n\n if id:\n project_name = row['projectname']\n # if check_user_has_permission(s, current_user.id, row[\"projectid\"]):\n # result.append(row)\n result.append(row)\n table = ItemTablePanels(result, classes=['table', 'table-striped'])\n return render_template('panels.html', panels=table, project_name=project_name)", "def register(self, panel):\n new_instance = panel()\n new_event_type = new_instance._meta.event_type\n if new_event_type in self.__class__._panels:\n raise Exception(\"Two panels with the same event type: %s\" % \\\n new_event_type)\n self.__class__._panels[new_event_type] = new_instance\n self.storage.register_event(new_event_type, new_instance._meta.dimensions.keys())", "def edit_virtual_panel_process():\n form = EditVirtualPanelProcess()\n\n vp_id = request.args.get('id')\n panel_id = get_panel_by_vp_id(s, vp_id)\n if request.method == \"POST\":\n if request.form['make_live'] == \"on\":\n make_vp_panel_live(s, vp_id)\n add_to_starlims(vp_id)\n unlock_panel_query(s, panel_id)\n return redirect(url_for('panels.view_vpanel') + \"?id=\" + vp_id)\n elif request.method == \"GET\":\n lock_panel(s, current_user.id, panel_id)\n panel_info = get_panel_details_by_id(s, panel_id)\n panel_name = panel_info.name\n form.panel.choices = [(panel_id, panel_name), ]\n\n panel_version = get_current_version(s, panel_id)\n panel_genes = get_genes_by_panelid(s, panel_id, panel_version)\n vp_info = get_vpanel_details_by_id(s, vp_id)\n vp_version = vp_info.current_version\n vp_name = vp_info.name\n form.vpanelname.data = vp_name\n vp_genes = get_genes_by_vpanelid_edit(s, vp_id, vp_version)\n genelist = \"\"\n vp_list = []\n for i in vp_genes:\n vp_list.append(i.id)\n\n genes = []\n print('new method')\n for i in panel_genes:\n if i.id in vp_list:\n genes.append({\"name\": i.name, \"id\": i.id, \"vp_list\": True})\n button = render_template(\"gene_button.html\", gene_name=i.name, gene_id=i.id, added=True)\n genelist += button\n\n else:\n genes.append({\"name\": i.name, \"id\": i.id, \"vp_list\": False})\n\n gene_html = render_template(\"panel_genes.html\", panel_genes=genes)\n\n url = url_for('panels.edit_virtual_panel_process') + '?id=' + str(vp_id)\n return render_template('virtualpanels_createprocess.html', form=form, genes=gene_html, genelist=genelist,\n vp_id=vp_id, panel_name=vp_name, current_version=vp_version, url=url)", "def set_upgrade_panel(self):\n lot = self.city_graphics.get_clicked_lot(pg.mouse.get_pos())\n if lot.construct:\n self.upgrade_panel.set_lot(lot)\n self.upgrade_panel.enable()\n else:\n self.upgrade_panel.disable()", "def panel(self):\n data_to_track = {}\n for possession in self.possessions_to_track_panel:\n data_to_track[possession] = self._haves[possession]\n\n for variable in self.variables_to_track_panel:\n try:\n data_to_track[variable] = self.__dict__[variable]\n except KeyError:\n pass\n self.database_connection.put([\"panel\",\n data_to_track,\n str(self.id),\n self.group,\n str(self.round)])", "def run(self):\n\n self.window.run_command(\"show_panel\", {\"panel\": \"output.reg_replace\"})", "def panel(*args, control: bool=True, copy: AnyStr=\"\", createString: bool=True, defineTemplate:\n AnyStr=\"\", docTag: Union[AnyStr, bool]=\"\", editString: bool=True, exists: bool=True,\n init: bool=True, isUnique: bool=True, label: Union[AnyStr, bool]=\"\",\n menuBarRepeatLast: bool=True, menuBarVisible: bool=True, needsInit: bool=True,\n parent: AnyStr=\"\", popupMenuProcedure: Union[Script, bool]=None, replacePanel:\n AnyStr=\"\", tearOff: bool=True, tearOffCopy: AnyStr=\"\", tearOffRestore: bool=True,\n unParent: bool=True, useTemplate: AnyStr=\"\", q=True, query=True, e=True, edit=True,\n **kwargs)->Union[None, Any]:\n pass", "def get_panels(config):\n\n task = TaskPanels(config)\n task.execute()\n\n task = TaskPanelsMenu(config)\n task.execute()\n\n logging.info(\"Panels creation finished!\")", "def live(self):\n curses.wrapper(self.__liveActually)", "def add_to_starlims(vpanelid):\n details = get_vpanel_details_by_id(s, vpanelid)\n print(details)\n version = round(details.current_version,1)\n panel_name = 'Analysis: ' + details.name + ' v' + str(version) + ' (' + details.panel_name + ')'\n print(len(panel_name))\n if len(panel_name) > 50:\n #todo do something with the name here!\n pass\n gene_result = get_genes_by_vpanelid(s, vpanelid, version)\n gene_list = list()\n for g in gene_result:\n gene_list.append(g.name)\n starlims = StarLims.StarLimsApi(test=True)\n testcode = starlims.add_new_test(panel_name, 'NGS Analysis', details.project_name, gene_list)\n if testcode > 0:\n add_testcode(s, vpanelid, details.current_version, testcode)\n\n return testcode", "def shopify_instances_onboarding_panel(self):\n\n current_company_id = request.httprequest.cookies.get('cids').split(',') if request.httprequest.cookies.get(\n 'cids', []) else []\n company = False\n if len(current_company_id) > 0 and current_company_id[0] and current_company_id[0].isdigit():\n company = request.env['res.company'].sudo().search([('id', '=', int(current_company_id[0]))])\n if not company:\n company = request.env.company\n hide_panel = company.shopify_onboarding_toggle_state != 'open'\n btn_value = 'Create More Shopify Instance' if hide_panel else 'Hide On boarding Panel'\n shopify_manager_group = request.env.ref(\"shopify_ept.group_shopify_manager_ept\")\n if request.env.uid not in shopify_manager_group.users.ids:\n return {}\n return {\n 'html': request.env.ref('shopify_ept.shopify_instances_onboarding_panel_ept')._render({\n 'company': company,\n 'toggle_company_id': company.id,\n 'hide_panel': hide_panel,\n 'btn_value': btn_value,\n 'state': company.get_and_update_shopify_instances_onboarding_state(),\n 'is_button_active': company.is_create_shopify_more_instance\n })\n }", "def update_panel_displays(self):\n #\n # The individual gauges\n for name in self.people:\n person = self.people[name]\n panel = self.health_panels[name]\n panel.set_health(person.health)\n panel.set_water(person.water)\n #\n # The vessel of water\n self.vessel.new_value = self.water_container.amount", "def adpanel():\n if 'user_id' not in session or session['user_id'] != 'admin':\n return redirect(url_for('login'))\n return render_template('adminpanel.html')", "def create(self):\n self.panel = pg.rect.Rect(self.position, self.dimensions)" ]
[ "0.7494106", "0.6255343", "0.62282526", "0.6057684", "0.5991913", "0.5888008", "0.57380563", "0.573186", "0.5687909", "0.5653041", "0.56348544", "0.561588", "0.5607468", "0.56046987", "0.554078", "0.553629", "0.55315703", "0.55211776", "0.5510027", "0.5504226", "0.54796404", "0.5373594", "0.536908", "0.53678626", "0.53645426", "0.52907807", "0.52824014", "0.52608126", "0.5212082", "0.5174176" ]
0.69726217
1
toggles the locked status of a panel useful if someone has forgotten they have left a panel locked an admin can unlock
def toggle_locked(): panel_id = request.args.get('id') json = False if not panel_id: json = True panel_id = request.json['id'] project_id = get_project_id_by_panel_id(s, panel_id) if current_user.id == get_locked_user(s, panel_id) and json: unlock_panel_query(s, panel_id) return jsonify("complete") elif check_user_has_permission(s, current_user.id, project_id): unlock_panel_query(s, panel_id) return manage_locked(message="Panel Unlocked") else: return manage_locked(message="Hmmmm you don't have permission to do that")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def lockSliderPanel(self, flag): \n\t\tself.doLockSliderPanel = flag", "def unlock_panel():\n panelid = request.args.get('panelid')\n unlock_panel_query(s, panelid)\n\n return redirect(url_for('panels.view_panels'))", "def ToggleLock(self, event):\n pass", "def toggleFrontPanelLock(self, unitCode=0):\n resp = self.XAPCommand('LFP', 2, unitCode=unitCode)\n return bool(int(resp))", "def action_lock(self):\n self.state = 'locked'", "def toggle(self, id):\n e = self.objectmanager.objects.get(id=id)\n e.enabled = not e.enabled\n e.save()\n return render({\"id\": id, \"status\": e.enabled})", "async def toggle(self, ctx):\r\n server = ctx.guild\r\n if self._logs[str(server.id)][\"toggle\"] == True:\r\n self._logs[str(server.id)][\"toggle\"] = False\r\n dataIO.save_json(self._logs_file, self._logs)\r\n await ctx.send(\"Modlogs are now disabled.\")\r\n return\r\n if self._logs[str(server.id)][\"toggle\"] == False:\r\n self._logs[str(server.id)][\"toggle\"] = True\r\n dataIO.save_json(self._logs_file, self._logs)\r\n await ctx.send(f\"Modlogs are now enabled {self.bot.get_emoji(470063310386233344)}\")\r\n return", "def setFrontPanelLock(self, isLocked=True, unitCode=0):\n resp = self.XAPCommand('LFP', (\"1\" if isLocked else \"0\"), unitCode=unitCode)\n return bool(int(resp))", "def change_player_state(self):\n if self.active_player.get() is True:\n # Get game phase and unlock respective buttons?\n # or should game do that\n pass\n else:\n pass\n #self.disable_all_buttons()", "def lock_table(self):\n\n self.status = 'Locked'", "def f_unlock(self):\n self._locked = False", "def pilotUnlock (self):\n return self.unlock()", "def set_lock_status(use_lock):\r\n get_lock.lock_is_enabled = use_lock", "def toggle_login(self, **kwargs):\n should_enable = kwargs['enable']\n\n if should_enable is not None:\n if should_enable:\n self._is_login_enabled = True\n else:\n self._is_login_enabled = False\n else:\n self._is_login_enabled = not self._is_login_enabled", "async def lockdown(self, ctx, action=None):\r\n try:\r\n if not action:\r\n return await ctx.send(\"Lockdown command:\\n*;lockdown [on/off]*\")\r\n if action.lower() == 'on':\r\n msg = await ctx.send(\"Locking down the channel...\")\r\n for x in ctx.guild.members:\r\n await ctx.channel.set_permissions(x, send_messages=False)\r\n return await msg.edit(content=\"The channel has been successfully locked down. :lock: \")\r\n elif action.lower() == 'off':\r\n msg = await ctx.send(\"Unlocking the channel...\")\r\n for x in ctx.guild.members:\r\n await ctx.channel.set_permissions(x, send_messages=True)\r\n return await msg.edit(content=\"The channel has been successfully unlocked. :unlock: \")\r\n else:\r\n return await ctx.send(\"Lockdown command:\\n*;lockdown [on/off]*\")\r\n except discord.Forbidden:\r\n await ctx.send(\"I need to have the permission: Manage Server\")", "async def locked(self):\n return not \"not\" in await self.ask(\"locked\")", "def toggle_numlock(turn_on):\n\n is_on = 0\n keys = []\n\n is_on = windll.user32.GetKeyState(VK_NUMLOCK) & 1\n\n if is_on != turn_on:\n windll.user32.keybd_event(\n VK_NUMLOCK,\n 69,\n KEYEVENTF_EXTENDEDKEY | 0,\n 0\n )\n windll.user32.keybd_event(\n VK_NUMLOCK,\n 69,\n KEYEVENTF_EXTENDEDKEY | KEYEVENTF_KEYUP,\n 0\n )\n\n return is_on", "def is_locked(self, obj):\n html = ('<span id=\"locking-{obj_id}\" data-object-id=\"{obj_id}\" class=\"locking-status\">'\n '</span>')\n return mark_safe(html.format(obj_id=obj.pk))", "def is_locked(self):\r\n pass", "def toggle(self):\n if self.is_enabled:\n self.disable()\n else:\n self.enable()", "def workflow_unlock(self, cr, uid, ids, context=None):\n\n all_locked = True\n for sched in self.browse(cr, uid, ids, context=context):\n if not self.details_locked(cr, uid, [sched.id], context):\n self.write(\n cr, uid, sched.id, {'state': 'unlocked'}, context=context)\n else:\n all_locked = False\n\n return all_locked is False", "def set_Off(self):\n if not(self._locked):\n self.__dict__['statusOn']=False\n self._undo_action()\n else:\n self._log.info('The JobProperty %s is blocked', self.__name__)", "def locked(self):\n with self._block:\n status = repr(self).split(maxsplit=1)[0][1:]\n assert status in ('locked', 'unlocked')\n return status == 'locked'", "def set_locked(self, *args):\n return _ida_hexrays.vdui_t_set_locked(self, *args)", "async def admin_disable(self, ctx: commands.Context):\n if ctx.guild.id not in self.guilds:\n await ctx.send('Team management is already disabled in this guild.')\n return\n await self._disable_guild(guild=ctx.guild)\n await ctx.send('Team management disabled.')", "def set_status(self, locked=None, exclusive=None):\n self.locked = locked\n self.exclusive = exclusive", "def unlock_instance(self, instance_name, check=True):\n with self.page_instances().table_instances.row(\n name=instance_name).dropdown_menu as menu:\n menu.button_toggle.click()\n menu.item_unlock.click()\n\n if check:\n self.close_notification('success')", "async def toggle(self, ctx):\r\n serverid = ctx.message.server.id\r\n if self.adkillr[serverid]['toggle'] is True:\r\n self.adkillr[serverid]['toggle'] = False\r\n e = discord.Embed(description='**AntiAdv is now disabled.**')\r\n await self.bot.say(embed=e)\r\n elif self.adkillr[serverid]['toggle'] is False:\r\n self.adkillr[serverid]['toggle'] = True\r\n e = discord.Embed(description='**AntiAdv is now enabled.**')\r\n await self.bot.say(embed=e)\r\n dataIO.save_json(\"data/adkillr/adkillr.json\", self.adkillr)", "def ToggleSpinner(event, state, widget):\n if state == True:\n widget.Enable()\n else:\n widget.Disable()\n event.Skip()", "def locked(challenge):\n \n if not challenge or challenge.type != 'locking':\n return False\n team = Teams.query.filter_by(id=session['id']).first()\n if not team:\n return True\n if team.score() >= challenge.unlock_at:\n return False\n return True" ]
[ "0.6745608", "0.6713125", "0.6654994", "0.65814847", "0.6289808", "0.620875", "0.611685", "0.59749204", "0.59507865", "0.5889249", "0.586993", "0.5864686", "0.5826866", "0.5818667", "0.5771882", "0.57535344", "0.57477957", "0.5727761", "0.57048416", "0.5700825", "0.56826514", "0.56816375", "0.5672122", "0.5601997", "0.55998296", "0.5549113", "0.5533922", "0.5517758", "0.5506463", "0.54999256" ]
0.8498502
0
scrape the goodreads quote page for the mood submitted
def how_you_feelin(mood): to_scrape = "https://www.goodreads.com/quotes/tag?utf8=✓&id=" + mood scraped = requests.get(to_scrape) soup = BeautifulSoup(scraped.content, 'html.parser') cleaned = soup.prettify() quote_mess = soup.find_all(class_='quoteText') return quote_mess
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def retrieving_data():\n for x in range(1):\n page_number=random.randint(1,500)\n page_num=str(page_number)\n url = 'http://www.tastespotting.com/browse/'+page_num\n req = http.request('GET', url)\n data = BeautifulSoup(req.data,'html.parser')\n for each_div in data.find_all(\"div\", { \"class\": \"trendspotted-item\"}):\n for each_recipe in each_div.find_all('a', href=True):\n \"\"\"links starting with /clicks are the links of recipe to their original sites, so just retrieve those links\"\"\"\n if each_recipe['href'].startswith('/click'):\n retrieving_data.recipe_link=each_recipe['href'][16:-12]\n for each_img in each_recipe.find_all('img', alt=True):\n retrieving_data.recipe_image=each_img['src']\n for each_caption in each_div.find(\"p\", { \"class\": \"photo_caption\"}):\n retrieving_data.recipe_title=each_caption", "def _scrape(self):", "def scrape():\n\n #Setting the URL\n my_url = 'https://en.wikiquote.org/wiki/Wikiquote:Quote_of_the_day'\n \n try:\n #Opening up connection with url, get info and close\n request = urllib.request.urlopen(my_url)\n page_html = request.read()\n request.close()\n \n #HTML parsing\n page_soup = soup(page_html,'html.parser')\n \n #Get date and qoute\n date = page_soup.find_all('center')[1]\n qoute = page_soup.find('i')\n author = page_soup.find('td', style='font-size:smaller;')\n \n #Get text from date\n date_txt = date.get_text()\n qoute_txt = qoute.get_text()\n author_txt = author.get_text()\n except Exception as e:\n print(e.message, e.args)\n return 1; \n\n #Remove '~\\n' & ' ' from str for formatting \n author_txt = author_txt.replace('~\\n', '')\n date_txt = date_txt.replace('~\\n', '') \n qoute_txt = qoute_txt.replace('~\\n', '').replace(' ', ' ') \n \n #Result\n message = date_txt + ' *** ' + qoute_txt + ' *** ' + author_txt\n \n return message", "def scrape(self):\n pass", "def extract_donald_trump_quotes():\n browser = webdriver.Firefox()\n browser.get('http://www.brainyquote.com/quotes/authors/d/donald_trump.html')\n while True:\n try:\n all_quotes = browser.find_elements_by_css_selector(BrainQuotePage.donald_trump_quotes)\n all_quote_links = browser.find_elements_by_css_selector(BrainQuotePage.donald_trump_links)\n for quotes, quote_link in zip(all_quotes, all_quote_links):\n # donald trump quote and link\n print (quotes.text, quote_link.get_attribute('href'))\n next_page = browser.find_element_by_css_selector(BrainQuotePage.donald_trump_next_page)\n next_page.click()\n except:\n # we have reached the last page\n break\n browser.quit()", "def process_poem(url):\r\n\r\n response = get(url)\r\n html_soup = BeautifulSoup(response.text, 'html.parser')\r\n beyts = html_soup.find_all('span', class_ = 'verse')\r\n beyts = [beyt.text for beyt in beyts]\r\n info_dict = process_key_items(html_soup)\r\n info_dict['beyts'] = beyts\r\n\r\n return info_dict", "def scrape_one_book(self, url):\n\n if url in self.url_to_explore:\n self.url_to_explore.remove(url)\n req = requests.get(url, headers = self.headers).content\n soup = BeautifulSoup(req, 'html5lib')\n soupbody = soup.body\n\n book_data = {}\n # get book url\n book_url = url\n book_data[\"url\"] = book_url\n\n # get book title\n book_title = soupbody.find('h1', attrs={'id':'bookTitle'}).text.strip()\n if book_title:\n book_data[\"title\"] = book_title\n\n # # get book id\n reg = 'https://www.goodreads.com/book/show/([0-9]+)'\n book_id = re.search(reg, url).group(1)\n book_data[\"id\"] = book_id\n\n # get book ISBN\n book_databox = soupbody.find('div', attrs={'id':'bookDataBox'})\n if book_databox:\n all_float_divs = book_databox.find_all('div',\n attrs = {'class' : 'clearFloats'})\n book_isbn = ''\n for div in all_float_divs:\n title = div.find('div',\n attrs = {'class':'infoBoxRowTitle'}).text.strip()\n if title == 'ISBN':\n book_isbn = div.find('div',\n attrs = {'class':'infoBoxRowItem'}).contents[0].strip()\n book_data[\"ISBN\"] = book_isbn\n\n # get book author url and author name\n author_name_container = soupbody.find('div',\n attrs = {'class':\"authorName__container\"})\n if author_name_container:\n all_authors = author_name_container.find_all('a',\n href = True, attrs = {'class':\"authorName\"})\n cur_author_url = []\n cur_author_name = []\n for author in all_authors:\n cur_author_url.append(author['href'])\n name = author.find('span', attrs = {'itemprop':'name'}).text.strip()\n cur_author_name.append(name)\n book_data[\"authorURLs\"] = cur_author_url\n book_data[\"author_names\"] = cur_author_name\n\n # get book rating and review\n book_meta = soupbody.find('div', attrs = {'id':'bookMeta'})\n if book_meta:\n rating = book_meta.find('span',\n attrs = {'itemprop':'ratingValue'}).text.strip()\n book_data[\"rating\"] = rating\n\n book_rating_count_container = book_meta.find('meta',\n attrs = {'itemprop':'ratingCount'})\n if book_rating_count_container:\n book_rating_count = book_rating_count_container['content']\n book_data[\"rating_count\"] = book_rating_count\n\n book_review_count_container = book_meta.find('meta',\n attrs = {'itemprop':'reviewCount'})\n if book_review_count_container:\n book_review_count = book_review_count_container['content']\n book_data[\"review_count\"] = book_review_count\n\n # get book image\n image_tag = soupbody.find('img', attrs = {'id':'coverImage'})\n if image_tag:\n image_src = image_tag['src']\n book_data[\"bookImage\"] = image_src\n # print(authorLink.span.text)\n\n # get related_books\n related_works_container = soupbody.find('div', id=re.compile('relatedWorks-'))\n if related_works_container:\n related_books_div = related_works_container.find('div', class_='bigBoxBody')\n if related_books_div:\n related_books_carousel = related_books_div.find('div', class_='bookCarousel')\n if related_books_carousel:\n carousel_row = related_books_carousel.find('div', class_='carouselRow')\n if carousel_row:\n related_books_list_li = carousel_row.find('ul').find_all('li')\n related_books = []\n for item in related_books_list_li:\n link = item.find('a', href = True)['href']\n self.url_to_explore.add(link)\n related_books.append(link)\n book_data[\"similar_books\"] = related_books\n\n self.data_collection.push_to_collection(book_data)\n print(\"Book successfully scraped: \" + book_title)", "def get_quote_and_movie_name():\n html_content = urlopen(MOVIE_QUOTE_SOURCE).read().decode('utf-8')\n soup = BeautifulSoup(html_content, 'html.parser')\n results = soup.find_all(attrs={'class': 'col-xs-9 col-lg-10'})\n quote_regex = re.compile('<blockquote>(.*?)</blockquote>')\n movie_regex = re.compile('</strong>(.*?)</span>')\n movie_em_regex = re.compile('<em>(.*?)</em>')\n movie_regex_second = re.compile('</strong>(.*?)</a>')\n last_results = []\n\n for result in results:\n\n quote_line = str(result.find('blockquote')).replace('\\n', '')\n quote = quote_regex.findall(quote_line)[0].strip()\n movie_line = str(result.find_all(attrs={'class': 'source'})[0])\n try:\n movie_name = movie_regex.findall(movie_line)[0].strip()\n except:\n movie_name = movie_regex_second.findall(movie_line)[0].strip()\n if '<em>' in movie_name:\n movie_name = movie_em_regex.findall(movie_name)[0].strip()\n\n last_results.append((quote, movie_name))\n\n return random.choice(last_results)", "def parse(self, response):\n\n #下面这种写法使用生成器方式比较好\n \"\"\" items = []\n for i in response.css('div.quote'):\n item = ScrapequoteItem()\n item['tag'] = i.css('span.text[itemprop]::text').get()\n item['author'] = i.css('small.author::text').get()\n items.append(item)\n return items \"\"\"\n\n for i in response.css('div.quote'):\n item = ScrapequoteItem()\n item['tag'] = i.css('span.text[itemprop]::text').get()\n item['author'] = i.css('small.author::text').get()\n yield item\n\n #以下循环获取其他页面\n next_page = response.css('li.next a::attr(href)').get()\n if next_page is not None:\n yield response.follow(next_page, callback=self.parse) #返回一个Request instance", "async def scrape(self):\n bulb = \"https://bulbapedia.bulbagarden.net/wiki/Kanto_Route_1\"\n http = urllib3.PoolManager()\n response = http.request('GET', bulb)\n soup = BeautifulSoup(response.data, 'html.parser')\n chance = soup.find_all('td')[0].get_text()\n print(chance.encode(\"utf-8\"))", "def search_goodreads(movie):\n\n driver.get(\"https://www.goodreads.com/\")\n \n # Set book default\n book = {'book_title': np.nan,\n 'author': np.nan,\n 'rating_value': np.nan,\n 'rating_count': np.nan,\n 'review_count': np.nan,\n 'page': np.nan,\n 'year': np.nan}\n\n # Search movie title\n search_bar = driver.find_element_by_xpath(\"//input[@name='query'][@type='text']\")\n search_bar.clear()\n search_bar.send_keys(movie)\n search_bar.send_keys(Keys.RETURN)\n time.sleep(5)\n\n actions = ActionChains(driver)\n actions.send_keys(Keys.RETURN)\n actions.perform()\n # Click on the first result\n try:\n elem = driver.find_element_by_class_name(\"bookTitle\").click()\n # Make soup and get data\n book_soup = BeautifulSoup(driver.page_source, \"lxml\")\n book = get_bookdata(book_soup)\n except:\n pass\n \n\n return book", "def get_song_html(self, url):\n request = urllib.request.Request(url)\n request.add_header(\"Authorization\", \"Bearer \" + self.client_access_token)\n request.add_header(\"User-Agent\",\n \"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Safari/537.36'\")\n\n page = urllib.request.urlopen(request)\n html = BeautifulSoup(page, \"lxml\")\n print(\"Scraped: \" + url)\n return html", "def summon_text(quote_mess):\n\n # clean up to get an individual quote out of the poorly formatted soup\n # goodreads does not format their webpages in a way that is scraping-friendly\n quotes = []\n for quote in quote_mess:\n to_trim = str(quote)\n trimmed = to_trim[30:]\n this_quote = ''\n for char in trimmed:\n if char == '<':\n break\n else:\n this_quote = this_quote + char\n quotes.append(this_quote)\n\n # clean up the line breaks and unnecessary punctuation\n # without this step, we would end up with random punctuation and unreliable chaining\n quote_list = []\n for quote in quotes:\n cleaned = quote.replace(\"\\n\", '')\n purged = re.sub(r'(\\.|,|:|;|\\(|\\)|\\\"|\\?|”|“|!)', '', cleaned)\n quote_list.append(purged)\n\n # create a final clean list of all the words available\n word_list = []\n for index in range(0, len(quote_list) - 1):\n quote = quote_list[index]\n words = quote.split(' ')\n for word in words:\n # just checking if we have a first person word or not\n if word != 'I' or word[:2] != \"I'\":\n word_list.append(word.lower())\n else:\n word_list.append(word)\n\n return word_list", "def get_details(page, dataWriter):\n nextPage = True\n pageNo = 1\n while (nextPage and pageNo <= 5):\n response = read_url(page + 'page/' + str(pageNo))\n soup = BeautifulSoup(response, 'lxml')\n\n rows = soup.find_all('div', 'quote')\n if (len(rows) > 0):\n print(\"Page \",pageNo,\" Total Quotes Found \",len(rows))\n for row in rows:\n if row.find('span',attrs={'itemprop':'text'}):\n\n title = row.find(attrs={'itemprop':'text'}).text.strip()\n author = row.find(attrs={'itemprop':'author'}).text.strip()\n authorLink = row.find('a',href=re.compile(r'/author/')).get('href')\n tags = row.find('div','tags').find(itemprop=\"keywords\").get('content')\n print(title, ' : ', author,' : ',authorLink, ' : ',tags)\n\n if authorLink:\n authorLink = 'http://quotes.toscrape.com' + authorLink\n linkDetail = read_url(authorLink)\n soupInner = BeautifulSoup(linkDetail, 'lxml')\n\n born_date = soupInner.find('span','author-born-date').text.strip()\n born_location = soupInner.find('span','author-born-location').text.strip()\n\n # Write a list of values in file\n dataWriter.writerow([tags,authorLink,author,born_date,born_location.replace('in ',''),title])\n\n nextPage = True\n pageNo += 1\n else:\n print(\"Quotes Not Listed!\")", "def scrape_question(page_text, ans):\n\n sq = BeautifulSoup(page_text, 'html.parser')\n question = Question(sq, ans)\n\n return question.__dict__", "def _process_html(self) -> None:\n opinion_json = self.request[\"response\"].json()\n for case in opinion_json:\n url = self._get_url(case[\"docketNumber\"], case[\"docketEntryId\"])\n status = (\n \"Published\"\n if case[\"documentType\"] == \"T.C. Opinion\"\n else \"Unpublished\"\n )\n self.cases.append(\n {\n \"judge\": case[\"judge\"],\n \"date\": case[\"filingDate\"][:10],\n \"docket\": case[\"docketNumber\"],\n \"url\": url,\n \"name\": titlecase(case[\"caseCaption\"]),\n \"status\": status,\n }\n )", "def generate_quote():\n headers = {\n 'accept': 'text/plain'\n }\n response = requests.get(QUOTE_URL, headers=headers)\n quote = response.json()['starWarsQuote']\n card = make_card(title=quote, colour=0xF5C518, thumbnail=THUMBNAIL)\n return card", "def get(soup, data, dictionary):\n\n # Steg 1: Ting som alltid er sant:\n dictionary['fb_like'] = None #0\n dictionary['others_share'] = None #0\n dictionary['fb_share'] = len(soup.select(\".share-facebook\"))\n dictionary['googleplus_share'] = len(soup.select(\".share-googleplus\"))\n dictionary['twitter_share'] = len(soup.select(\".share-twitter\"))\n dictionary['email_share'] = len(soup.select(\".share-mail\"))\n\n # tror ikke disse har noen aside...\n dictionary['related_stories_box_les'] = len(soup.select(\"aside.articlewidgets article\"))\n\n # related thematic (found in footer part of page)\n dictionary['related_stories_box_thematic'] = 0\n # grab that footer part with data-relation-limit attr\n related_thematic = soup.find_all(has_data_relation_limit)\n # loop\n for el in related_thematic:\n #check divs\n for div in el.select(\"div\"):\n if has_data_id(div):\n dictionary['related_stories_box_thematic'] +=1\n\n # re related stories is the combined previous two\n dictionary['related_stories'] = dictionary['related_stories_box_les'] + dictionary['related_stories_box_thematic']\n \n\n # antall js dokumenter\n dictionary['js'] = count_js(soup, data, dictionary) # = len(re.findall(\"<iframe src=\", data)) # .js\n # remove javascript.\n [s.decompose() for s in soup.body.article('script')]\n # I believe this is what creates the somewhat awkward line-breaks in the soup\n\n # Find author(s)\n byline = soup.find('div', 'byline')\n authors = []\n try:\n for address, li in izip(byline.find_all('address'), byline.find_all('li', 'icon-email')):\n authorName = address.strong.text #address.find(class_='fn').string.encode('utf-8')\n # NRK is still trying to hide the email address from spammers. #href = li.a['href']\n authorMail = None # 'abandon this? too hard?'#unquote(href[21:-1])[7:] # Antakelsen er at epost vil holde seg til ASCII. \n authorRole = address.span.text #address.find(class_='role').string.strip().encode('utf-8')\n author = [authorName, authorMail, authorRole]\n authors.append(author)\n # and remove author image (so not to count it later..) \n address.figure.decompose()\n except AttributeError:\n # Finner ingen forfatter(e)\n new_logger.warn(\"Ingen forfattere \\\"{0}\\\". Oppgir \\\"<UKJENT>\\\" som forfatter\".format(dictionary['url']))\n #print \n authors.append([None, None, None])\n dictionary['authors'] = authors\n \n # Find published datetime\n try:\n dictionary['published'] = strptime(soup.time['datetime'][0:19], \"%Y-%m-%dT%H:%M:%S\")\n except TypeError:\n new_logger.info(\"finner ikke publiseringsdato\")\n dictionary['published'] = None\n\n new_logger.debug(\"published: %s\", type(dictionary['published']))\n # Find update datetime\n try:\n updated = soup.find('span', 'update-date')\n dictionary['updated'] = datetime.strptime(updated.time['datetime'][0:19], \"%Y-%m-%dT%H:%M:%S\")\n except:\n new_logger.info(\"finner ikke oppdateringsdato\")\n dictionary['updated'] = None\n\n # Find headline\n try:\n dictionary['headline'] = soup.body.article.find('h1').text.strip()\n #dictionary['headline'] = soup.header.find('div', 'articletitle').h1.text # .text gived unicode, .string gives 'bs4.element.NavigableString'\n except AttributeError:\n new_logger.debug( \"NB: bruker doc-title...\" )\n dictionary['headline'] = soup.title.text\n\n # Find fact-boxes :\n # Should be removes from body, but includes in LIX. Right?\n faktabokser = []\n #for boks in soup.find_all(\"section\", class_=\"articlewidget cf facts lp_faktaboks\"):\n for boks in soup.find_all(\"section\", class_=\"facts\"):\n faktaboks_text = boks.text.strip()\n lix = Lix(faktaboks_text)\n faktaboks_analysis = lix.analyzeText(faktaboks_text)\n faktabokser.append({\"text\":faktaboks_text, \"links\":boks.find_all(\"a\"), \"wordcount\":faktaboks_analysis['wordCount']})\n # and remove from soup\n #boks.decompose() #ikke fjern boks'n fra suppa.\n # NB, this also removes pictures if any in the fact-box\n dictionary['factbox'] = faktabokser\n\n new_logger.debug(\"faktabokser: %s\", len(dictionary['factbox']))\n\n # Find full text \n # article MINUS .universes OR is it .lp_related ?\n # remove the related section\n # try:\n # soup.body.article.find('section', 'lp_related').decompose()\n # except:\n # pass\n # # remove div.published (the top-bar)\n # soup.body.article.find('div', 'published').decompose()\n # # remove div.shareurl (the sharebar)\n # soup.body.article.find('div', 'sharing').decompose()\n\n # Find self declared url # get this before decomposing the header this is found in..\n dictionary['url_self_link'] = soup.select(\"time > a\")[0]['href']\n\n # remove header with sharing links and date\n soup.select(\".bulletin-header\")[0].decompose()\n # store body text\n dictionary['body'] = soup.body.article.text.strip() \n # .stripped_strings option?\n # soup.get_text(\"|\", strip=True) perhaps?\n\n # Find char count, line count, word count and Lix\n lix = Lix(dictionary['body']) \n\n analyse = lix.analyzeText(dictionary['body'])\n try:\n dictionary['char_count'] = len(dictionary['body'])\n dictionary['word_count'] = analyse['wordCount']\n dictionary['line_count'] = analyse['sentenceCount']\n dictionary['lesbahet'] = analyse['lixScore']\n except TypeError:\n new_logger.error(\"Kunne ikke kjøre lix\", dictionary['body']) \n dictionary['line_count'] = None\n dictionary['word_count'] = None\n dictionary['char_count'] = None\n dictionary['lesbahet'] = -1.0\n\n # look through the last part of the body text to find news bureau\n # add more in settings.py\n dictionary['news_bureau'] = matches_pattern(dictionary['body'].strip()[-200:], syndicators)\n\n\n # Find language. Defaults can be tampered with in settings.py\n (language, certainty) = langid.classify(soup.body.article.text)\n new_logger.debug( \"(language, certainty) (%s, %s)\" % (language, certainty))\n language_code = uncertain_language_string\n if (certainty > language_identification_threshold):\n language_code = language\n\n dictionary['language'] = language_code\n\n\n get_video(soup.body.article, data, dictionary)\n\n # flash (untested)\n dictionary['flash_file'] = get_flash(soup.body.article, data, dictionary)\n\n # Tell opp iframe. \n dictionary['iframe'] = count_iframes(soup, data, dictionary)\n \n # Tell opp css (karakterer)\n dictionary['css'] = count_css(soup, data, dictionary)\n\n\n # Finnes det en form for kommentarer her? I de nyere NRK sidene er det tydeligvis kun det på Ytring.\n # Men vi søker generelt nå, og håper på det beste. I verste fall vil et interessant krasj fortelle meg at dette ikke er tilfellet. –Haakon\n dictionary['comment_fields'] = 0\n dictionary['comment_number'] = 0\n if len(re.findall('<div id=\"disqus_thread\"', data)) != 0:\n dictionary['comment_fields'] = 1\n dictionary['comment_number'] = None # -9999#num_comments(dictionary)\n \n # tar seg av lenker i siden\n count_links(soup, data, dictionary)\n\n # antall bilder.\n # Beautiful Soup teller feil her og. Noe er galt.\n # Regex matching gir riktig resultat så vi får gå for det.\n #result = soup.article.find_all('figure', 'image')\n #print len(result)\n #new_logger.debug( \"antall bilder: %s\", len(re.findall(\"<img src=\\\"http:\", data)) )\n \n dictionary['images'] = count_images(soup.body.article, data, dictionary)\n \n # bildesamlinger\n dictionary['image_collection'] = len(soup.select(\".slideshow\")) # er dette nok?\n # Som diskutert med Eirik, dette henter ut bildetekstene og deler dem med pipe symboler.\n\n imgtagger = re.findall(u\"<img src=\\\"http.*\\n.*\", str(soup.body.article) )\n bildetekst = \"\"\n for imgtag in imgtagger:\n funn = re.findall(\"alt=\\\".*\\\"\", imgtag)\n if len(funn) > 0:\n bildetekst += ((funn[0])[5:-1] + \" | \")\n bildetekst = bildetekst[:-3] # Fjerner siste pipen\n dictionary['image_captions'] = bildetekst\n\n\n\n dictionary['map'] = count_map(soup.body.article, data, dictionary)\n dictionary['poll'] = None # -9999\n dictionary['game'] = None # -9999\n \n dictionary['interactive_elements'] = count_interactive( \\\n dictionary['comment_fields'] , dictionary['image_collection'] , \\\n dictionary['video_files'] , dictionary['video_files_nrk'] , \\\n dictionary['fb_like'] , dictionary['fb_share'] , \\\n dictionary['googleplus_share'] , dictionary['twitter_share'] , \\\n dictionary['others_share'] , dictionary['email_share'] , \\\n dictionary['map'] , dictionary['poll'] , dictionary['game'])\n \n\n\n\n\n return dictionary", "def get_products_from_page(url):\n\n def get_data_from_book(book):\n \"\"\"Return data from one book.\"\"\"\n src_img = book.find(\"img\").get(\"src\")\n src_img = src_img.replace(\"../\", \"\")\n image = \"http://books.toscrape.com/\" + src_img\n\n in_stock = False\n in_stock_or_not = book.find(\"p\", {\"class\", \"instock\"}).text\n if \"In stock\" in in_stock_or_not:\n in_stock = True\n\n name = book.find(\"h3\").find(\"a\").text\n\n price = book.find(\"p\", {\"class\", \"price_color\"}).text\n price = price.replace(\"Â\", \"\")\n\n rating = book.find(\"p\", {\"class\", \"star-rating\"}).get(\"class\")[1]\n rating = w2n.word_to_num(rating)\n\n return {\n \"image\": image,\n \"in_stock\": in_stock,\n \"name\": name,\n \"price\": price,\n \"rating\": rating,\n }\n\n r = requests.get(url)\n soup = BeautifulSoup(r.text, \"html.parser\")\n books = soup.find_all(\"article\", {\"class\", \"product_pod\"})\n\n result = list(map(get_data_from_book, books))\n return result", "async def quote(self, ctx: discord.ext.commands.Context):\n message_channel: discord.abc.Messageable = ctx.message.channel\n async with aiohttp.ClientSession() as session:\n async with session.get(\n \"https://quotesondesign.com/wp-json/wp/v2/posts/?orderby=rand\") as resp:\n json_resp = await resp.json()\n r = json_resp[random.randint(0, len(json_resp)-1)]\n await message_channel.send(\"**\" + BotMethods.cleanhtml(\"From \" + r['title']['rendered']) + \":**\" + html.unescape(\n BotMethods.cleanhtml(r['content']['rendered'])))", "def pizza():\n url = str(request.args.get('url'))\n html = urlopen(url).read()\n pizzas = parseHtml(html)\n return pizzas", "def page_data():\n return scrape()", "def get_random_quote(self):\n url = \"https://got-quotes.herokuapp.com/quotes\"\n q = Request(url)\n q.add_header('User-Agent', 'curl/7.51.0')\n q.add_header('Accept', 'application/json')\n\n result = urlopen(q).read().decode('utf-8')\n data = json.loads(result)\n\n return data", "async def quotes(self, ctx):\n\n\t\tawait self.message_leaderboard(ctx, \"quotes\")", "def get_cur_quotes(self):\n self.form_url_str()\n if self.__print_url: print self.cur_quotes_full_url\n self.downloading_csv(self.cur_quotes_full_url)\n self.cur_quotes_create_dataframe()", "def HTMLparser(self):\n soup = self.getHTML()\n \n # Sort through all the text in the html:\n for text in soup.find_all('p'):\n try:\n paragraphNo = int(text.parent.p['id'][14:])\n \n # Only grab paragraphs in \"On the Social Contract\"\n if paragraphNo < self.START_PARAGRAPH or paragraphNo > self.END_PARAGRAPH:\n continue\n \n elif text.string:\n \n # Ignore those \"paragraphs\" in the html that simply outline different chapters/books\n if re.search('^(CHAPTER|BOOK)(.*):', text.string):\n continue\n \n else:\n \n # Want to read in the document by sentence (for RousseauBot to use individually later on)\n tempList = re.split('(?<!etc)\\.\\s(?!.*\\\")|\\!', text.string)\n for sentence in tempList:\n \n # When a \"paragraph\" is just a single sentence, re's .split() returns the sentence and a ''\n # Also, remove overly long quotes - Twitter has char limit\n if sentence != '' and len(sentence.strip()) < self.TWITTER_LIMIT:\n self.quotes.append(sentence.strip())\n \n except KeyError:\n \n # BS throws KeyError when <p>'s id field is blank; ignore - all paragraphs I need has an id\n continue", "def get_certain_joke(html):\n soup = BeautifulSoup(html, 'lxml')\n\n # for i in len(soup.select('div.content')):\n # print(soup.select('div.content')[i].get_text())\n\n joke_content = soup.select('div.content')[0].get_text()\n\n return joke_content", "def extract_from_html(self, data, url):\n\n records = [ ]\n self.logger.debug(\"Found %d recipes in %s\" % (len(data.xpath(self.scope)), url))\n for rcp in data.xpath(self.scope):\n\n record = { }\n for prop in [ \"name\", \"recipeYield\", \"author\" ]:\n record[prop] = self.extract_text(prop, rcp)\n for prop in [ \"image\" ]:\n record[prop] = self.extract_attribute(prop, [ \"content\", \"src\" ], data)\n for prop in [ \"totalTime\", \"prepTime\", \"cookTime\", \"datePublished\" ]:\n record[prop] = self.extract_attribute(prop, [ \"content\" ], rcp)\n # I have no idea if cookingMethod should be text or a list because I've never seen it so\n # using the most general option for it\n for prop in [ \"recipeIngredient\", \"recipeInstructions\", \"cookingMethod\",\n \"recipeCategory\", \"recipeCuisine\" ]:\n record[prop] = self.extract_list(prop, rcp)\n\n # Older versions of the schema use \"ingredients\" rather than \"recipeIngredient\"\n if not record[\"recipeIngredient\"]:\n record[\"recipeIngredient\"] = self.extract_list(\"ingredients\", rcp)\n\n record = dict([ (k, v) for k, v in record.items() if k in self.store_fields ])\n record[\"url\"] = url\n record[\"collect_time\"] = datetime.utcnow()\n\n if self.validate(record):\n records.append(record)\n\n return records", "def scrape_listings():\n scraped_listings = [] # Used to store apartment listings\n links = [] # Used to store links to apartment listings (seperate tag)\n\n # Download \"The Canon\" website\n URL = \"https://www.thecannon.ca/classifieds/housing\"\n headers = program_features.HEADERS\n page = requests.get(URL, headers=headers)\n\n # Parse document\n soup = BeautifulSoup(page.content, \"html.parser\")\n page_listings = list(soup.find_all('td')) # Find all listing information, and store as list\n\n # Used to find URL parameters for each apartment listing\n for link in soup.find_all(\"a\"):\n # URL Format Example: \"<a href=\"/page.php?cid=347306&amp;id=26&amp;t=housing\">1219 Gordon St, Guelph</a>\"\n if link.has_attr('href') and (\"t=housing\" in link.attrs['href']):\n links.append(\"https://www.thecannon.ca\" + link.attrs['href'])\n\n # Iterate list \n for i, listing in enumerate(page_listings, 1):\n # Group every 10 elements into a listing object\n if i % 10 == 0:\n index = int(i / 10) - 1 # Calculate index of link that matches the current listing \n\n # Append listing object to array\n scraped_listings.append(\n # Create listing object\n WebListing(\n page_listings[i - 10].get_text().strip(), # Date post was created\n page_listings[i - 9].get_text().strip(), # Date apartment is available\n page_listings[i - 8].get_text().strip(), # Offering type\n page_listings[i - 7].get_text().strip(), # Housing type\n page_listings[i - 6].get_text().strip(), # Address \n page_listings[i - 5].get_text().strip(), # Price\n page_listings[i - 4].get_text().strip(), # Distance \n page_listings[i - 3].get_text().strip(), # Sublet permission\n page_listings[i - 2].get_text().strip(), # Number of rooms\n page_listings[i - 1].get_text().strip(), # Features\n links[index][38:44], # Listing ID (stored in link)\n links[index] # Listing Link\n )\n )\n\n return scraped_listings # Return listings array", "def get_all_grammy_pages():\n res = urllib2.urlopen(GRAMMY_DATA_URL)\n html = res.read()\n\n lines = [line.strip() for line in html.split(\"\\n\") if \"More Winners\" in line]\n urls = [re.search('\\\".*\\\"',line).group(0).replace('\"','') for line in lines]\n return urls" ]
[ "0.6069228", "0.6050294", "0.59508425", "0.58939606", "0.589346", "0.58812517", "0.5798072", "0.5686617", "0.5512013", "0.5441507", "0.5422525", "0.54129744", "0.5399996", "0.5375564", "0.5344539", "0.5341562", "0.5331536", "0.5272266", "0.525688", "0.5251573", "0.52459145", "0.5245627", "0.52438354", "0.5209525", "0.51868224", "0.5124121", "0.51144797", "0.51098037", "0.5105222", "0.5098849" ]
0.73806167
0
Prepares the directory of a target located at dst for copying and returns its full location as specified below. src can be the location of a source file target, which is (e.g.) used by a file copy or move operation. When dst is already a directory, calling this method has no effect and the dst path is returned, optionally joined with the basename of src. When dst is a file, the absolute dst path is returned. Otherwise, when dst does not exist yet, it is interpreted as a file path and missing directories are
def _prepare_dst_dir(self, dst, src=None, perm=None, **kwargs): if self.isdir(dst): full_dst = os.path.join(dst, os.path.basename(src)) if src else dst elif self.isfile(dst): full_dst = dst else: # interpret dst as a file name, create missing dirs dst_dir = self.dirname(dst) if dst_dir and self.create_file_dir and not self.isdir(dst_dir): self.mkdir(dst_dir, perm=perm, recursive=True) full_dst = dst return full_dst
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _prepare_dst_dir(self, dst, src=None, perm=None, **kwargs):\n rstat = self.exists(dst, stat=True)\n\n if rstat:\n if self.file_interface.isdir(dst, stat=rstat) and src:\n full_dst = os.path.join(dst, os.path.basename(src))\n else:\n full_dst = dst\n\n else:\n # interpret dst as a file name, create missing dirs\n dst_dir = self.dirname(dst)\n if dst_dir and self.create_file_dir and not self.isdir(dst_dir):\n self.mkdir(dst_dir, perm=perm, recursive=True, **kwargs)\n full_dst = dst\n\n return full_dst", "def dst_to_src(self,dst_file):\n rel_path=os.path.relpath(dst_file,start=self.dst_root)\n if (rel_path == '.'):\n rel_path=''\n else:\n rel_path= '/'+rel_path\n if (os.sep != '/'):\n # if directoty path sep isn't / then translate for URI \n rel_path=rel_path.replace(os.sep,'/')\n return(self.src_root+rel_path)", "def copy_dir(src: Text, dst: Text) -> None:\n\n if tf.io.gfile.exists(dst):\n tf.io.gfile.rmtree(dst)\n tf.io.gfile.makedirs(dst)\n\n for dir_name, sub_dirs, leaf_files in tf.io.gfile.walk(src):\n for leaf_file in leaf_files:\n leaf_file_path = os.path.join(dir_name, leaf_file)\n new_file_path = os.path.join(dir_name.replace(src, dst, 1), leaf_file)\n tf.io.gfile.copy(leaf_file_path, new_file_path)\n\n for sub_dir in sub_dirs:\n tf.io.gfile.makedirs(os.path.join(dir_name.replace(src, dst, 1), sub_dir))", "def prepare(self, dst, options):\n self.checkExisting(dst)\n self.makedirs(dst.parent())", "def _copy_dir(src, dst):\n if os.path.isdir(src):\n os.makedirs(dst, exist_ok=True)\n for item in os.listdir(src):\n s = os.path.join(src, item)\n d = os.path.join(dst, item)\n\n if os.path.isdir(s):\n _copy_dir(s, d)\n else:\n shutil.copy2(s, d)\n\n else:\n os.makedirs(os.path.dirname(dst), exist_ok=True)\n _delete_file(dst)\n shutil.copy2(src, dst)", "def clone(src: str, dst: str):\n if dst is None:\n dst = getcwd()\n destination = path.abspath(dst)\n # TODO: replace with false this is just for testing:\n makedirs(destination, exist_ok=True)\n\n sync_chunk(src, destination)\n copy(src, destination)", "def safecopy(src, dst):\r\n abs_src = os.path.abspath(src)\r\n abs_dst = os.path.abspath(dst)\r\n if (abs_src != abs_dst) \\\r\n and os.path.isfile(abs_src): \r\n dirname = os.path.dirname(abs_dst)\r\n recurse_mkdir(dirname)\r\n shutil.copy(abs_src, abs_dst)", "def copy(self, src, dst, label=None):\n self._tag(dst, label)\n self._mkdir_for(dst)\n shutil.copyfile(self._rootjoin(src), os.path.join(self.chroot, dst))", "def make_relative_path(source, dest, dest_is_directory=True):\n source = os.path.dirname(source)\n if not dest_is_directory:\n dest_filename = os.path.basename(dest)\n dest = os.path.dirname(dest)\n else:\n dest_filename = None\n dest = os.path.normpath(os.path.abspath(dest))\n source = os.path.normpath(os.path.abspath(source))\n dest_parts = dest.strip(os.path.sep).split(os.path.sep)\n source_parts = source.strip(os.path.sep).split(os.path.sep)\n while dest_parts and source_parts and dest_parts[0] == source_parts[0]:\n dest_parts.pop(0)\n source_parts.pop(0)\n full_parts = [\"..\"] * len(source_parts) + dest_parts\n if not dest_is_directory and dest_filename is not None:\n full_parts.append(dest_filename)\n if not full_parts:\n # Special case for the current directory (otherwise it'd be '')\n return \"./\"\n return os.path.sep.join(full_parts)", "def copy(self, src, dst, label=None):\r\n self._tag(dst, label)\r\n self._mkdir_for(dst)\r\n shutil.copyfile(self._rootjoin(src), os.path.join(self.chroot, dst))", "def copy(self, src, dst, label=None):\r\n self._tag(dst, label)\r\n self._mkdir_for(dst)\r\n shutil.copyfile(self._rootjoin(src), os.path.join(self.chroot, dst))", "def copy_dir(src, dst):\n try:\n debug.log(\"copy dir from \"+ src, \"to \"+ dst)\n shutil.copytree(src, dst)\n except Exception as e:\n debug.log(\"Error: happened while copying!\\n%s\\n\"%e)", "def src_to_dst(self,src_uri):\n m=re.match(self.src_root+\"(.*)$\",src_uri)\n if (m is None):\n raise \"FIXME - Does not match\"\n rel_path=m.group(1)\n if (os.sep != '/'):\n # if directoty path sep isn't / then translate for URI \n rel_path=rel_path.replace('/',os.sep)\n return(self.dst_root+rel_path)", "def CopyPath(options, src, dst):\n if options.includes:\n if not IncludeFiles(options.includes, [src]):\n return\n\n if options.excludes:\n if not ExcludeFiles(options.excludes, [src]):\n return\n\n if options.verbose:\n print('cp %s %s' % (src, dst))\n\n # If the source is a single file, copy it individually\n if os.path.isfile(src):\n # We can not copy over a directory with a file.\n if os.path.exists(dst):\n if not os.path.isfile(dst):\n msg = \"cp: cannot overwrite non-file '%s' with file.\" % dst\n raise OSError(msg)\n # If the destination exists as a file, remove it before copying to avoid\n # 'readonly' issues.\n os.remove(dst)\n\n # Now copy to the non-existent fully qualified target\n shutil.copy(src, dst)\n return\n\n # Otherwise it's a directory, ignore it unless allowed\n if os.path.isdir(src):\n if not options.recursive:\n print(\"cp: omitting directory '%s'\" % src)\n return\n\n # We can not copy over a file with a directory.\n if os.path.exists(dst):\n if not os.path.isdir(dst):\n msg = \"cp: cannot overwrite non-directory '%s' with directory.\" % dst\n raise OSError(msg)\n else:\n # if it didn't exist, create the directory\n os.makedirs(dst)\n\n # Now copy all members\n for filename in os.listdir(src):\n srcfile = os.path.join(src, filename)\n dstfile = os.path.join(dst, filename)\n CopyPath(options, srcfile, dstfile)\n return", "def install(src, dst):\n try:\n dst = os.path.join(install_dir, dst, os.path.basename(src))\n src = os.path.join(source_dir, src)\n assert os.path.isfile(src)\n assert not os.path.isdir(dst)\n if not os.path.isdir(os.path.dirname(dst)):\n os.makedirs(os.path.dirname(dst))\n shutil.copy(src, dst)\n print 'Installed', dst\n except Exception:\n print 'Could not install', dst", "def get_shortest_path(self, src, dst):\n \n return self.get_sorted_paths(src, dst)[0]", "def _copy_file_with_parents(src, dst, ignore_no_src=False):\n if not os.path.isfile(src) and ignore_no_src:\n return\n\n dst_dir = os.path.dirname(dst)\n create_directories(dst_dir)\n\n copyfile(src, dst)", "def copydir(src, dst):\n for item in os.listdir(src):\n s, d = os.path.join(src, item), os.path.join(dst, item)\n if os.path.isdir(s):\n if not os.path.isdir(d):\n os.mkdir(d)\n copydir(s, d)\n else:\n shutil.copy(s, d)", "def path_src_to_dest(src_pathname, dest_filename_suffix=None):\n src_relpath = Path(src_pathname).relative_to(config[\"topdir\"])\n dest_pathname = Path(config[\"outdir\"]).joinpath(src_relpath)\n if dest_filename_suffix:\n dest_pathname = dest_pathname.with_suffix(dest_filename_suffix)\n return dest_pathname", "def copy(src, dst):\n os.makedirs(os.path.dirname(dst), exist_ok=True)\n shutil.copy2(src, dst)", "def copy_file(filename, dst):\n # Create dir if needed\n dir_path = os.path.dirname(os.path.expanduser(dst))\n if not os.path.isdir(dir_path):\n os.makedirs(dir_path)\n\n src = os.path.join(get_data(''), filename)\n dst = os.path.expanduser(dir_path)\n shutil.copy2(src, dst)", "def copyDir(self, src, subpath):\n dst = self.output_path + \"/\" + subpath\n shutil.copytree(src, dst)", "def copy_deep(src: str, dst: str, create_dst_dir: bool = False) -> None:\n system_is_darwin = platform.system().lower() == \"darwin\"\n if create_dst_dir:\n mkdir_p(os.path.dirname(dst))\n src_is_link = os.path.islink(src)\n dst_exists = os.path.lexists(dst)\n if os.path.isdir(src) and not src_is_link:\n logging.debug(\"Copying directory {} to {}\".format(src, dst))\n mkdir_p(dst)\n for name in os.listdir(src):\n copy_deep(os.path.join(src, name), os.path.join(dst, name))\n elif src_is_link:\n if dst_exists:\n return\n target = os.readlink(src)\n logging.debug(\"Creating symlink {} -> {}\".format(dst, target))\n os.symlink(target, dst)\n else:\n if dst_exists:\n if not system_is_darwin:\n return\n # Only overwrite the file if the source is newer than the destination.\n if os.path.getmtime(src) <= os.path.getmtime(dst):\n return\n logging.debug(\"Copying file {} to {}\".format(src, dst))\n # Preserve the file attributes.\n shutil.copy2(src, dst)", "def rel(dest, curdir):\n\n sc = splitpath(curdir)\n sd = splitpath(dest)\n\n while len(sc) > 0 and len(sd) > 0:\n if sc[0] != sd[0]:\n break\n sc = sc[1:]\n sd = sd[1:]\n\n if len(sc) == 0 and len(sd) == 0:\n out = \"\"\n elif len(sc) == 0:\n out = apply(join, sd)\n elif len(sd) == 0:\n out = apply(join, map(lambda x: os.pardir, sc))\n else:\n out = apply(join, map(lambda x: os.pardir, sc) + list(sd))\n\n # make sure the path is suitable for html consumption\n return out", "def copy_dir(src=\"\", dst=\"\", header=\"\", footer=\"\", clip=0, ext=\"\", test=False):\n failed = []\n nfiles = 0\n if not os.path.exists(dst):\n os.makedirs(dst)\n if not os.path.exists(src):\n raise argparse.ArgumentError(\"source does not exist! It must be a directory.\")\n else:\n for root, dirs, files in os.walk(src, topdown=False):\n for name in files:\n name_wo_ext, file_ext = os.path.splitext(name)\n\n src_path = os.path.join(root, name)\n dstfilename = header + os.path.join(root[len(src)+1:], name_wo_ext[clip:]) + footer + file_ext\n dst_path = os.path.join(dst, dstfilename)\n\n dst_pdir = os.path.dirname(dst_path)\n if not os.path.exists(dst_pdir):\n os.makedirs(dst_pdir)\n\n if not os.path.exists(dst_path):\n if ext == \"\" or ext == file_ext[1:]:\n try:\n shutil.copy(src_path, dst_path)\n except:\n failed.append(src_path)\n print(f\"... {src_path} failed\")\n else:\n print(f\"... {dst_path} already exists'. Skipping\")\n nfiles += 1\n\n if test:\n break\n if test:\n break\n print(f\"{nfiles - len(failed)} / {nfiles} files were copied.\")\n return failed", "def buildDestination(self, mapping, options, src):\n prefixTemplate = options['prefix']\n if prefixTemplate is None:\n prefixTemplate = self.defaultPrefixTemplate\n\n if prefixTemplate is not None:\n prefix = os.path.expanduser(\n prefixTemplate.safe_substitute(mapping))\n else:\n prefixTemplate = string.Template(src.dirname())\n prefix = prefixTemplate.template\n\n ext = src.splitext()[-1]\n\n nameTemplate = options['name']\n if nameTemplate is None:\n nameTemplate = self.defaultNameTemplate\n\n filename = nameTemplate.safe_substitute(mapping)\n logging.msg(\n 'Building filename: prefix=%r name=%r mapping=%r' % (\n prefixTemplate.template, nameTemplate.template, mapping),\n verbosity=3)\n return FilePath(prefix).child(filename).siblingExtension(ext)", "def copyAsset(self, src, dst, **kw):\n if self.isfile(src):\n self.copyfile(src, dst)\n else:\n # copy folder\n if not self.exists(dst):\n self.makedirs(dst)\n for name in self.listdir(src):\n self.copyAsset(self.joinpath(src, name), self.joinpath(dst, name), copycache=0)\n\n # copy cache\n cache_src = self.cache_path(src)\n if not os.path.exists(cache_src):\n return\n\n cache_dst = self.cache_path(dst)\n cache_dst_parent = os.path.dirname(cache_dst)\n if not os.path.exists( cache_dst_parent ):\n os.makedirs(cache_dst_parent )\n if not os.path.exists(cache_dst):\n ucopytree(cache_src, cache_dst)", "def copy(src, dst):\n try:\n shutil.copytree(src, dst)\n except OSError as exc:\n if exc.errno == errno.ENOTDIR:\n shutil.copy(src, dst)\n else:\n raise", "def copy(src, dst):\n try:\n shutil.copytree(src, dst)\n except OSError as exc:\n if exc.errno == errno.ENOTDIR:\n shutil.copy(src, dst)\n else:\n raise", "def move(self,src,dst):\n src = os.path.join(self.testpath,src)\n dst = os.path.join(self.testpath,dst)\n directory = os.path.split(dst)[0]\n try:\n os.makedirs(directory)\n except OSError:\n pass\n\n shutil.move(src,dst)" ]
[ "0.7756741", "0.66299", "0.6474231", "0.63675195", "0.63613135", "0.63447046", "0.62880516", "0.62483966", "0.62306374", "0.6207867", "0.6207867", "0.6200936", "0.61562246", "0.615402", "0.61524415", "0.60604155", "0.6053617", "0.6051028", "0.6029516", "0.59734154", "0.593691", "0.59244937", "0.5919101", "0.58693165", "0.5865745", "0.58336025", "0.57903606", "0.57276183", "0.57276183", "0.5727572" ]
0.77938974
0
Constructs a Datastore key for a phone
def phone_key(phone_id=None): return db.Key.from_path('Pictbook', phone_id or 'default_phone')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_key ():", "def make_external_key(self, data):\n return data['key']", "def build_key(cls, song_id):\n return ndb.Key(cls, song_id)", "def ds_key(ip, port):\n return DS_PREFIX + ip + ':' + str(port)", "def keygen(cls, device, **kwargs):\n key = '%s/%s' % (cls.__name__, device)\n for v in kwargs.values():\n key += '/' + str(v)\n return key", "def _create_key(chip):\n try:\n suffix = chip['version'][0]\n except IndexError:\n suffix = ''\n\n if chip['classification'] == 'secret':\n classification = 'z'\n else:\n classification = chip['classification'][0]\n\n return '%s-%s%s%s' % (chip['game'], classification, chip['indice'], suffix)", "def MakeKey(self, string, string_1, string_2):\n ...", "def get_key_id(self):", "def key(self):\n def validate(name):\n '''Compute the key if necessary and validate'''\n found = getattr(self, name)\n value = found() if callable(found) else found\n if value is None:\n raise BadKeyError(\"The key for %s cannot be None\" % self)\n return str(value) \n if self.__key is None:\n namespace, kind, key = Schema.Get(self)\n self.__id = key\n value = validate(key)\n self.__key = Key(namespace, kind, value)\n else:\n self.__key.id = validate(self.__id)\n return self.__key", "async def build_key(self, attribute, value, record_id='*'):\n self.key = '{var1}:{var2}:{var3}:{var4}:{var5}'.format(var1=record_id, var2=self.industry, var3=self.merchant,\n var4=attribute, var5=value)", "def generate_key(self):\n return str(uuid4())", "def database_key(database_name='mnddb01'):\n return ndb.Key('db', database_name)", "def build_key(model, id):\n return \"{}.{}\".format(model.__name__, id)", "def generate_key():\n return str(uuid.uuid4())", "def make_embedded_key(uid):\n return \"embedded-{0}\".format(uid)", "def get_datastore_key(model, pk):\n\n kind = get_top_concrete_parent(model)._meta.db_table\n return Key.from_path(kind, pk)", "def dbmodel_key(model_name=DATABASE_NAME):\n return ndb.Key('ModelDB', model_name)", "def generate_key():\n # generate random key\n key = get_random_string()\n\n # if it's already taken, generate another\n if EmailManager.objects.filter(key=key).exists():\n return EmailManager.generate_key()\n\n # return it\n return key", "def get_key(self) -> str:\n return f'{self.address}_{self.port}'", "def game_key(proto_obj):\n return game_key_full(proto_obj.id_str)", "def _create_key(_type, name):\n return \"{}{}{}\".format(_type, DiagnosticManager._type_separator, name)", "def GenerateKey(self):\n self.key_name = self.key_name or str(uuid.uuid4())\n if self.key is None or not self.key.id():\n self.key = ndb.Key(self._get_kind(), self.key_name)\n return True\n return False", "def key(self):\n return self._key if self._key else self.factory().key", "def query_key(self):\r\n query_db_string = u'qs:%s::db:%s' % (self.query_string, self.db)\r\n return make_key(query_db_string, with_locale=False)", "def key_from_path(db_table, value):\r\n if isinstance(value, (int, long)):\r\n ValidateInteger(value, 'id')\r\n return Key.from_path(db_table, value)", "def generate_discovery_cache_key(name, ext):\n\n return 'wopi_' + name + '_' + ext", "def generate_prototype_key(self):\n return str(uuid.uuid5(UUID_XYZ_NAMESPACE, str((self.X, self.Y, self.Z))))", "def key(self)->str:\n return \"{}:{}.{}.{}\".format(self.source, self.db, self.ed, self.rec)", "def keyify(content_type_pk, pk):\n return '%s:%s' % (content_type_pk, pk)", "def meme_dbkey(meme_dbname=MEME_DB_NAME):\n return ndb.Key('meme_db', meme_dbname)" ]
[ "0.70503455", "0.6636935", "0.64906794", "0.6414169", "0.62262106", "0.62203693", "0.61674833", "0.6158786", "0.6078497", "0.59785026", "0.59565264", "0.5950048", "0.5886137", "0.58423275", "0.58145356", "0.5807809", "0.5783175", "0.57496804", "0.5748418", "0.57472914", "0.5743136", "0.5734063", "0.5729307", "0.5718008", "0.57087874", "0.5704833", "0.5700175", "0.5696428", "0.56753457", "0.5670179" ]
0.7932787
0
Update the API key. This can be used when you've regenerated your service API key and want to update longlived clients.
def update_key(self, key): self._api_key = key
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def SetAPIKey(self, api_key):\n self._api_key = api_key", "def SetAPIKey(self, api_key):\n self._api_key = api_key", "def set_api_key(new_api_key):\n global api_key\n api_key = new_api_key", "def update_api_key(self, apikey_id, **kwargs):\n\n all_params = ['apikey_id', 'api_key']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method update_api_key\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'apikey_id' is set\n if ('apikey_id' not in params) or (params['apikey_id'] is None):\n raise ValueError(\"Missing the required parameter `apikey_id` when calling `update_api_key`\")\n\n resource_path = '/apikeys/{apikeyId}'.replace('{format}', 'json')\n path_params = {}\n if 'apikey_id' in params:\n path_params['apikeyId'] = params['apikey_id']\n\n query_params = {}\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'api_key' in params:\n body_params = params['api_key']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json'])\n\n # Authentication setting\n auth_settings = ['privileges', 'apikey']\n\n response = self.api_client.call_api(resource_path, 'PUT',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='ApiKeyWithPrivileges',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def api_key(self, api_key):\n\n self._api_key = api_key", "def SetAPIKey(self, api_key):\n self._analyzer.SetAPIKey(api_key)", "async def statset_apikey(self, key):\n self._set_api_key(key)\n await self.bot.say(\"API key successfully set.\")", "def update_api_key(api_key, enable):\n if enable is not None:\n api.update(api_key, enable)\n else:\n click.echo(\"Example usage: lecli update apikey 12345678-aaaa-bbbb-1234-1234cb123456 \"\n \"--enable\")\n click.echo(\"Example usage: lecli update apikey 12345678-aaaa-bbbb-1234-1234cb123456 \"\n \"--disable\")", "def reset_api_key(request):\r\n user = request.user\r\n # Generate new api key and assign it to user's api key\r\n user.api_key = User.gen_api_key()\r\n return _api_response(request, {\r\n 'api_key': user.api_key,\r\n 'message': 'Api Key was successfully changed',\r\n })", "def api_key_set(self, api_key):\n self.request('/v1.1/auth_key', 'POST', body={'auth_key': api_key})", "def api_key(self, value):\n self.__creds.api_key_v2 = value", "def set_api_key(self, api_key):\n self.api_key = api_key\n self.session.auth = (\"api\", api_key)", "def set_api_key(api_key):\n Movie.__api_key = api_key", "def regenerate_API_key(self) -> None:\n session = create_session()\n new_key = generate_random_string(24)\n # Check if there is any user with exact same API key as just generated\n if new_key not in session.query(User.API_KEY).all():\n self.API_KEY = new_key\n session.merge(self)\n session.commit()\n else:\n while new_key in session.query(User.API_KEY).all():\n new_key = generate_random_string(24)\n self.API_KEY = new_key\n session.merge(self)\n session.commit()", "async def osukey(self, ctx, key):\r\n\r\n # Load config\r\n config_boards = await self.config.apikey()\r\n\r\n # Set new config\r\n await self.config.apikey.set(key)\r\n await ctx.send(\"The apikey has been added.\")", "def set_APIKey(self, value):\n super(UpdateTriggerInputSet, self)._set_input('APIKey', value)", "def reset_api_key(self):\r\n url = '{0}/{1}'.format(self.get_url(), 'api_key')\r\n\r\n return http.Request('POST', url, None), parsers.parse_json", "def api_key(self, api_key):\n if api_key is None:\n raise ValueError(\"Invalid value for `api_key`, must not be `None`\") # noqa: E501\n\n self._api_key = api_key", "def API_KEY(self):\n return 10", "def reset_api_key(name):\r\n user = User.query.filter_by(name=name).first()\r\n if not user:\r\n return abort(404)\r\n if current_user.name != user.name:\r\n return abort(403)\r\n\r\n title = (\"User: %s &middot; Settings\"\r\n \"- Reset API KEY\") % current_user.fullname\r\n user = db.session.query(model.user.User).get(current_user.id)\r\n user.api_key = model.make_uuid()\r\n db.session.commit()\r\n cached_users.delete_user_summary(user.name)\r\n msg = gettext('New API-KEY generated')\r\n flash(msg, 'success')\r\n return redirect(url_for('account.profile', name=name))", "async def apikey_bing(self, ctx, key):\n settings = loadauth()\n settings['apikey'] = key\n saveauth(settings)\n return await self.bot.say(\"Bing API key saved.\")", "def set_apikey(self, apikey):\n self.apikey = apikey\n self.__init_submodules(apikey)", "def set_api_key(self, force):\n\n if self.first_run == 'yes' and force:\n raise ValueError('Need redmine API key!')\n elif self.first_run == 'yes':\n input_api_key = 'y'\n elif not self.first_run == 'yes' and force:\n input_api_key = 'n'\n else:\n self.timelog.time_print(\"Would you like to set the redmine api key? (y/n)\")\n input_api_key = input()\n\n if input_api_key == 'y':\n self.timelog.time_print(\"Enter your redmine api key (will be encrypted to file)\")\n self.api_key = input()\n # Encode and send to json file\n self.config_loader.redmine_api_key_encrypted = Encryption.encode(Values.encryption_key,\n self.api_key).decode('utf-8')\n self.config_loader.first_run = 'no'\n self.config_loader.dump(self.config_json)\n else:\n # Import and decode from file\n self.timelog.time_print(\"Used Redmine API key from the json file.\")\n self.api_key = Encryption.decode(Values.encryption_key, self.api_key)\n\n import re\n if not re.match(r'^[a-z0-9]{40}$', self.api_key):\n self.timelog.time_print(\"Invalid Redmine API key!\")\n exit(1)", "def API_KEY(self):\n return 11", "def API_KEY(self):\n return 2", "def API_KEY(self):\n return 12", "def API_KEY(self):\n raise NotImplementedError()", "def api_key(self):\n return copy(self._api_key)", "def API_KEY(self):\n return 9", "def API_KEY(self):\n return 1" ]
[ "0.7447722", "0.7447722", "0.74372375", "0.7170894", "0.71437603", "0.70787835", "0.69710577", "0.6895766", "0.6862906", "0.6694672", "0.66607916", "0.657263", "0.6552993", "0.6464547", "0.64579034", "0.64280456", "0.6423422", "0.6378686", "0.6366006", "0.63227934", "0.6321924", "0.6291807", "0.61960953", "0.61743253", "0.6158551", "0.6151528", "0.61448336", "0.6144451", "0.6128186", "0.6111889" ]
0.8198307
0
Create a TURN allocation.
async def connect(self): request = stun.Message(message_method=stun.Method.ALLOCATE, message_class=stun.Class.REQUEST) request.attributes['LIFETIME'] = self.lifetime request.attributes['REQUESTED-TRANSPORT'] = 0x11000000 try: response, _ = await self.request(request, self.server) except exceptions.TransactionFailed as e: response = e.response if response.attributes['ERROR-CODE'][0] == 401: # update long-term credentials self.nonce = response.attributes['NONCE'] self.realm = response.attributes['REALM'] self.integrity_key = hashlib.md5( ':'.join([self.username, self.realm, self.password]).encode('utf8')).digest() # retry request with authentication request.transaction_id = random_transaction_id() self.__add_authentication(request) response, _ = await self.request(request, self.server) relayed_address = response.attributes['XOR-RELAYED-ADDRESS'] logger.info('TURN allocation created %s', relayed_address) # periodically refresh allocation self.refresh_handle = asyncio.ensure_future(self.refresh()) return relayed_address
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_connection(location=None, bandwidth=None, connectionName=None):\n pass", "def createTMsender():\n global s_tmSender\n nctrsTMfields = GRND.NCTRS.NCTRStmFields()\n nctrsTMfields.spacecraftId = int(UTIL.SYS.s_configuration.SPACECRAFT_ID)\n s_tmSender = TMsender(\n portNr=int(UTIL.SYS.s_configuration.NCTRS_TM_SERVER_PORT),\n nctrsTMfields=nctrsTMfields)\n if not s_tmSender.openConnectPort(UTIL.SYS.s_configuration.HOST):\n sys.exit(-1)", "def createAllocation(name, tag, facility):\n return Allocation(Cuebot.getStub('allocation').Create(\n facility_pb2.AllocCreateRequest(name=name, tag=tag, facility=facility),\n timeout=Cuebot.Timeout).allocation)", "def create_outbound(self, addr, use_new_connection=False):", "def create(self):\n logging.warning(\n \"IRC back-end does not support explicit creation, joining room \"\n \"instead to ensure it exists.\"\n )\n self.join()", "def create_tournament():\n data = view.create_tournament_view()\n tournament = Tournament(data['name'], data['place'], data['description'], data['turn_number'])\n return tournament", "def create():", "def create():", "def create(self):\n\t\treturn handle_to_object(call_sdk_function('PrlPortFwd_Create'))", "async def new(ctx):\n if ctx.message.channel.name.lower() not in tod_channels:\n return\n\n room = ctx.message.channel.name.lower()\n host = ctx.message.author\n if room not in tod_games:\n tod_games[room] = {'host': host.name, 'host_id': host.name, 'participants': {}, 'last': None}\n tod_games[room]['current'] = host.name\n tod_games[room]['last'] = host.name\n tod_games[room]['participants'][host.name.lower()] = {'spins': 0}\n await amor_manager.say(\"New Game of Truth Or Dare started in {}\".format(room))\n else:\n host = tod_games[room]['host']\n await amor_manager.say(\"Truth or Dare already in progress in {}. Game host: {}\".format(room, host))", "def create(self):\n\t\treturn handle_to_object(call_sdk_function('PrlVirtNet_Create'))", "def allocate_for_instance(self, context, instance, **kwargs):\n args = kwargs\n args['instance_id'] = instance['id']\n args['project_id'] = instance['project_id']\n args['host'] = instance['host']\n args['instance_type_id'] = instance['instance_type_id']\n\n return rpc.call(context, FLAGS.network_topic,\n {'method': 'allocate_for_instance',\n 'args': args})", "async def create_turn_endpoint(protocol_factory, server_addr, username, password, lifetime=600):\n loop = asyncio.get_event_loop()\n _, inner_protocol = await loop.create_datagram_endpoint(\n lambda: TurnClientProtocol(server_addr,\n username=username,\n password=password,\n lifetime=lifetime),\n family=socket.AF_INET)\n\n protocol = protocol_factory()\n transport = TurnTransport(protocol, inner_protocol)\n await transport._connect()\n\n return transport, protocol", "def create(self, params):\n return self.make_client_call(\n 'create_vpn_connection_route', params)", "def create_connection(\n self,\n from_id: str,\n to_id: str\n ):\n raise NotImplementedError", "def create_session(self, transport):\n session_id = self.session_id_allocator.allocate()\n session = self.SESSION_CLS(self, transport, session_id)\n self.sessions[session.id] = session\n return session", "def makeConnection(self, transport):\n pass", "def create_acct_packet(self, **args):\n return host.Host.create_acct_packet(self, secret=self.secret, **args)", "def create_session(self, transport):\n session_id = self.session_id_allocator.allocate()\n session = self.SESSION_CLS(self, transport, session_id, self.message_mgr)\n self.sessions[session.id] = session\n return session", "def create_bgp_peer(virtualInterfaceId=None, newBGPPeer=None):\n pass", "def alloc_request():\n return SentmanRequest(SentmanRequest.ALLOCATE_SENTINEL)", "async def create_accounts(self):\n self._logger.info(\"Creating accounts...\")\n\n validator_peer_id = ((self.my_id - 1) % self.num_validators) + 1\n host, _ = self.experiment.get_peer_ip_port_by_id(validator_peer_id)\n horizon_uri = \"http://%s:%d\" % (host, 19000 + validator_peer_id)\n\n root_keypair = Keypair.from_secret(\"SDJ5AQWLIAYT22TCYSKOQALI3SNUMPAR63SEL73ASALDP6PYDN54FARM\")\n async with Server(horizon_url=horizon_uri, client=AiohttpClient()) as server:\n root_account = await server.load_account(root_keypair.public_key)\n self.root_seq_num = root_account.sequence\n self._logger.info(\"Setting root sequence number to %d\", self.root_seq_num)\n\n builder = TransactionBuilder(\n source_account=root_account,\n network_passphrase=\"Standalone Pramati Network ; Oct 2018\"\n )\n\n async def append_create_account_op(builder, root_keypair, receiver_pub_key, amount):\n builder.append_create_account_op(receiver_pub_key, amount, root_keypair.public_key)\n if len(builder.operations) == 100:\n self._logger.info(\"Sending create transaction ops...\")\n tx = builder.build()\n tx.sign(root_keypair)\n response = requests.get(\"http://%s:%d/tx?blob=%s\" % (host, 11000 + validator_peer_id,\n quote_plus(tx.to_xdr())))\n self._logger.info(\"Received response for create accounts request: %s\", response.text)\n\n await sleep(2)\n\n self.root_seq_num += 1\n\n partial_root_acc = Account(root_keypair.public_key, self.root_seq_num)\n builder = TransactionBuilder(\n source_account=partial_root_acc,\n network_passphrase=\"Standalone Pramati Network ; Oct 2018\"\n )\n\n return builder\n\n for client_index in range(self.num_validators + 1, self.num_validators + self.num_clients + 1):\n receiver_keypair = Keypair.random()\n builder = await append_create_account_op(builder, root_keypair, receiver_keypair.public_key, \"10000000\")\n self.experiment.send_message(client_index, b\"receive_account_seed\", receiver_keypair.secret.encode())\n\n # Create the sender accounts\n for account_ind in range(self.num_accounts_per_client):\n sender_keypair = Keypair.random()\n builder = await append_create_account_op(builder, root_keypair, sender_keypair.public_key, \"10000000\")\n self.experiment.send_message(client_index, b\"send_account_seed_%d\" % account_ind,\n sender_keypair.secret.encode())\n\n # Send the remaining operations\n if builder.operations:\n self._logger.info(\"Sending remaining create transaction ops...\")\n tx = builder.build()\n tx.sign(root_keypair)\n response = requests.get(\"http://%s:%d/tx?blob=%s\" % (host, 11000 + validator_peer_id,\n quote_plus(tx.to_xdr())))\n self._logger.info(\"Received response for create accounts request: %s\", response.text)\n self.root_seq_num += 1", "def create_chaussette(self):\n ch = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n ch.connect((self.address, self.port))\n\n return ch", "def Create(self):\n\n gateway = None\n netmask = None\n\n self._AcquireNetworkDetails()\n\n if self.is_vpc:\n # Create a VPC first\n\n cidr = '10.0.0.0/16'\n vpc = self.cs.create_vpc(self.vpc_name,\n self.zone_id,\n cidr,\n self.vpc_offering_id,\n self.project_id)\n self.vpc_id = vpc['id']\n gateway = '10.0.0.1'\n netmask = '255.255.255.0'\n\n acl = self.cs.get_network_acl('default_allow', self.project_id)\n assert acl, \"Default allow ACL not found\"\n\n\n # Create the network\n network = self.cs.create_network(self.network_name,\n self.network_offering_id,\n self.zone_id,\n self.project_id,\n self.vpc_id,\n gateway,\n netmask,\n acl['id'])\n\n\n\n assert network, \"No network could be created\"\n\n self.network_id = network['id']\n self.id = self.network_id", "async def new_tx(request: Request) -> dict:\n peer = request.client.host\n tx = await request.json()\n tx = Transaction(**tx)\n chain.mempool.put_nowait(tx)\n return {\"sender\": peer, \"receipt\": tx.receipt()}", "def create_transaction(self, receiver, amount, comment=\"\"):\n new_tx = Transaction.new(sender=self.pubkey, receiver=receiver,\n amount=amount, privkey=self.privkey,\n comment=comment)\n tx_json = new_tx.to_json()\n msg = \"t\" + json.dumps({\"tx_json\": tx_json})\n self.add_transaction(tx_json)\n self.broadcast_message(msg)\n return new_tx", "def create_transfer(self, src, dest, **kw):\n _submission_id = self._get_submission_id()\n if not _submission_id:\n logger.error(\"Globus: Unable to obtain Globus transfer submission ID\")\n return None\n _transfer = api_client.Transfer(_submission_id, src, dest,\n notify_on_succeeded=False, notify_on_failed=False, notify_on_inactive=False, **kw)\n self.transfer = _transfer\n return _transfer", "def remote_createTunnel(self, name, targetIP):\r\n if name not in self._bridges:\r\n raise InternalError('Bridge does not exist.')\r\n\r\n key = (name, targetIP)\r\n\r\n if key in self._uid:\r\n raise InternalError('Tunnel already exists.')\r\n\r\n while 1:\r\n uid = randomString(self._UID_LEN)\r\n\r\n if uid not in self._uid.itervalues():\r\n break\r\n\r\n self._uid[key] = uid\r\n port = 'gre-{0}'.format(uid)\r\n\r\n return execute(('/usr/bin/ovs-vsctl', 'add-port', 'br-{0}'.format(name),\r\n port, '--', 'set', 'interface', port, 'type=gre',\r\n 'options:remote_ip={0}'.format(targetIP)),\r\n reactor=self._reactor)", "def create(self):\n\t\tself.creating += 1\n\t\ttry:\n\t\t\tself.adopt(self.factory())\n\t\tfinally:\n\t\t\tself.creating -= 1", "def new_connection(self, transport):\n assert transport not in self.clients\n self.clients[transport] = Client(transport, self)" ]
[ "0.5665241", "0.5590401", "0.555418", "0.5452348", "0.54044276", "0.5399572", "0.5387226", "0.5387226", "0.5319276", "0.5272297", "0.5236255", "0.5185947", "0.5185715", "0.5166859", "0.51485175", "0.5109881", "0.5103653", "0.50794905", "0.50685924", "0.5062864", "0.505619", "0.5026404", "0.50212103", "0.50119776", "0.50094384", "0.5008326", "0.50034326", "0.49717394", "0.4969704", "0.49678767" ]
0.69904995
0
Periodically refresh the TURN allocation.
async def refresh(self): while True: await asyncio.sleep(5/6 * self.lifetime) request = stun.Message(message_method=stun.Method.REFRESH, message_class=stun.Class.REQUEST) request.attributes['LIFETIME'] = self.lifetime self.__add_authentication(request) await self.request(request, self.server)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def release(self):\n if self.refresh_handle:\n self.refresh_handle.cancel()\n self.refresh_handle = None\n\n request = stun.Message(message_method=stun.Method.REFRESH,\n message_class=stun.Class.REQUEST)\n request.attributes['LIFETIME'] = 0\n self.__add_authentication(request)\n await self.request(request, self.server)\n\n logger.info('TURN allocation released')\n if self.receiver:\n self.receiver.connection_lost(None)", "def refresh(self):\n\n self._refreshed_on = time.time() * 1000", "async def _timein_refresh(self):\n\t\t\n\t\tawait self.refresh_cache()", "def refresh(self) -> None:\n pass", "def refresh(self) -> None:\n pass", "def refresh(self) -> None:\n pass", "def refresh(self):\n pass", "def refresh(self):\n pass", "def refresh(self):\n self.active_member_count\n self.description\n self.lbmethod\n self.members\n self.minimum_active_member\n self.minimum_up_member\n self.slow_ramp_time\n self.statistics", "def refresh(self):\n self.fetch(False)", "def _refresh_discovery(self):\n if self.terminate_flag:\n return\n\n self.devices = discover_drones(self.ip_range, self.skyserve_port)\n time.sleep(self.refresh_interval / 1000)\n self._refresh_discovery()", "def refresh(self):\n self.lease = self.blazar.lease.get(self.id)", "def refresh(self):\n self.__refresh()", "def refresh(self):\n self._list_of_points = []\n self._add_points()", "def start_timer(self):\n print \"Timer Object Started. Will update ADC Information every %s seconds\" % self.refreshTime\n self.timer=Timer(float(self.refreshTime)*1000, self._refresh_Visible_channels)", "def _refresh(self):\n resp = self._cb.get_object(self._build_api_request_uri())\n self._info = resp\n self._last_refresh_time = time.time()\n return True", "def _refresh(self):\n resp = self._cb.get_object(self._build_api_request_uri())\n self._info = resp\n self._last_refresh_time = time.time()\n return True", "def update(self):\n if self._refreshed_at is None or (\n self._refreshed_at + self._refresh_rate <= datetime.datetime.now()):\n\n self.run()", "async def connect(self):\n request = stun.Message(message_method=stun.Method.ALLOCATE,\n message_class=stun.Class.REQUEST)\n request.attributes['LIFETIME'] = self.lifetime\n request.attributes['REQUESTED-TRANSPORT'] = 0x11000000\n\n try:\n response, _ = await self.request(request, self.server)\n except exceptions.TransactionFailed as e:\n response = e.response\n if response.attributes['ERROR-CODE'][0] == 401:\n # update long-term credentials\n self.nonce = response.attributes['NONCE']\n self.realm = response.attributes['REALM']\n self.integrity_key = hashlib.md5(\n ':'.join([self.username, self.realm, self.password]).encode('utf8')).digest()\n\n # retry request with authentication\n request.transaction_id = random_transaction_id()\n self.__add_authentication(request)\n response, _ = await self.request(request, self.server)\n\n relayed_address = response.attributes['XOR-RELAYED-ADDRESS']\n logger.info('TURN allocation created %s', relayed_address)\n\n # periodically refresh allocation\n self.refresh_handle = asyncio.ensure_future(self.refresh())\n\n return relayed_address", "def Refresh(self):\n pass", "def update(self):\n now = farc.Framework._event_loop.time()\n # Collect and prune expired neighbors\n expired_ngbrs = []\n for ngbr_addr, ngbr_data in self._ngbrs.items():\n frame = ngbr_data[\"BCN_FRAME\"]\n rx_time = frame.rx_meta[0]\n if now > rx_time + self._EXPIRATION_PRD:\n expired_ngbrs.append(ngbr_addr)\n for ngbr_addr in expired_ngbrs:\n del self._ngbrs[ngbr_addr]", "async def periodic_refresh(self):\n started_cycle = time()\n while True:\n now = time()\n if (now - started_cycle) < CYCLE_DELAY:\n await asyncio.sleep(CYCLE_DELAY - (now - started_cycle) + 1)\n started_cycle = time()\n await self.refresh_cache()\n # Never exits.", "def refresh(self):\r\n # todo, use vid_info as property instead of this\r\n # reset properties and rebuild streams\r\n self.setup()", "def LocalUpdate(self):\n\n # Get current timestamp in miliseconds from unix epoch\n t = int(time.time() * 1000)\n\n # Number of times refill has occured\n lstrefil = self.status['timestamp'] - (60000 - self.status['refillIn'])\n nrefil = (t - lstrefil) / 60000.0\n\n if nrefil > 1:\n self.status['tokensLeft'] += self.status['refillRate'] * \\\n int(nrefil)\n\n if self.status['tokensLeft'] > 60 * self.status['refillRate']:\n self.status['tokensLeft'] = 60 * self.status['refillRate']\n\n # Update timestamps\n self.status['timestamp'] = t\n self.status['refillIn'] = int((1 - nrefil % 1) * 60000)", "def refresh(self):\n raise NotImplementedError", "def refresh(self):\n raise NotImplementedError", "async def _updateFlow(self):\n while True:\n self._flow = (self._count - self._lastCount)/self._ppl*6\n self._lastCount = self._count\n await asyncio.sleep(10)", "def refresh(self) -> None:\n self._itempage.get()", "def run(self):\n last_time = time.time()\n while self.running:\n now_time = time.time()\n interval = now_time - last_time\n last_time = now_time\n self.update(interval)\n time.sleep(Options['update interval'])", "def refresh(self):\n raise NotImplementedError(\"To be implemented\")" ]
[ "0.68299145", "0.6135103", "0.5778847", "0.57332826", "0.57332826", "0.57332826", "0.5703909", "0.5703909", "0.5681639", "0.56502575", "0.5648896", "0.5647276", "0.560292", "0.55943274", "0.55868673", "0.55836076", "0.55836076", "0.5580496", "0.5553782", "0.5540401", "0.5504945", "0.5483464", "0.54787576", "0.54651904", "0.543585", "0.543585", "0.54182374", "0.5393253", "0.538623", "0.5381187" ]
0.6948456
0
Releases the TURN allocation.
async def release(self): if self.refresh_handle: self.refresh_handle.cancel() self.refresh_handle = None request = stun.Message(message_method=stun.Method.REFRESH, message_class=stun.Class.REQUEST) request.attributes['LIFETIME'] = 0 self.__add_authentication(request) await self.request(request, self.server) logger.info('TURN allocation released') if self.receiver: self.receiver.connection_lost(None)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def release(self):\n if self._tr is not None:\n self._tr.release()", "async def release(self) -> None:\n ...", "async def release(self) -> None:\n ...", "async def release(self) -> None:\n ...", "def release(self):\r\n pass", "def release(self):\n self.stream.release()", "def release(self):\n if not self._acquired:\n return\n\n logger.debug(\"Releasing the media transport.\")\n self._proxy.proxy.Release()\n self._acquired = False", "def release(self):\n\n if self.session is not None:\n self.session.close()", "def release(self):\n self.acquired = False", "def release(self):\n del self.ref\n self.ref = None\n gc.collect()", "def release(self):\r\n self._pool.release(self.resource)\r\n self._pool = None", "def release(self):", "def api_release(self):\n\n self._api_release_lock_with_timer()", "def __del__(self):\n self.video.release()", "def __del__(self):\n\n if self._needs_release:\n send_message(self, \"release\", restype=objc_id, argtypes=[])", "def __del__(self):\n self.vid.release()", "def __del__(self):\n self.release()", "def __del__(self):\n self.release()", "def __del__(self):\n self.release()", "def __del__(self):\n self.release()", "def __del__(self):\n self.release()", "def __del__(self):\n self.release()", "def release(self):\n self._needs_release = False\n send_message(self, \"release\", restype=objc_id, argtypes=[])", "def __del__(self):\r\n self.release()", "def release(self):\n self.__wrapped__ = ReleasedResource()", "def release(local):\n local.__release__()", "def __del__(self):\n if self.video:\n self.video.release()", "def free(self):\n libruss.russ_sconn_free(self._ptr)\n self._ptr = None", "def release(self) -> DeprecatedAwaitable:\n self._lock.release()\n return DeprecatedAwaitable(self.release)", "def release_resource(self, uri):\n request = concert_service_msgs.CaptureResourceRequest()\n request.rocon_uri = uri\n request.release = True\n self.captured_resource_uri = request.rocon_uri\n msg_id = self._pair_capture_resource(request, timeout=rospy.Duration(self._capture_timeout), callback=self._release_resource_callback, error_callback=self._error_resource_callback)\n self._service_pair_msg_q.append(msg_id)" ]
[ "0.6602504", "0.6601743", "0.6601743", "0.6601743", "0.6311859", "0.6118295", "0.6088195", "0.6069963", "0.60375977", "0.60316527", "0.60200906", "0.59722954", "0.5802416", "0.57991636", "0.57692593", "0.57690936", "0.57463837", "0.57463837", "0.57463837", "0.57463837", "0.57463837", "0.57463837", "0.57257026", "0.5714316", "0.5699856", "0.569341", "0.55941", "0.5552198", "0.5540563", "0.54974717" ]
0.7737725
0
Execute a STUN transaction and return the response.
async def request(self, request, addr): assert request.transaction_id not in self.transactions transaction = stun.Transaction(request, addr, self) self.transactions[request.transaction_id] = transaction try: return await transaction.run() finally: del self.transactions[request.transaction_id]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def transaction(self, transaction):\n # Allow for a list of blocks..\n transaction = utils.request_type(transaction)\n\n res = r.get(self.url + self.tx_info + str(transaction))\n return self.execute(res)", "def _run_transaction(operation_name, txn):\n if not txn:\n return None\n\n response = None\n try:\n response = requests.put(PoliciesOutput.CONSUL_TRANSACTION_URL, json=txn)\n except requests.exceptions.RequestException as ex:\n ctx.logger.error(\n \"RequestException - failed to {0} at {1}: {2} on txn={3}\"\n .format(operation_name, PoliciesOutput.CONSUL_TRANSACTION_URL,\n str(ex), json.dumps(txn)))\n return None\n\n if response.status_code != requests.codes.ok:\n ctx.logger.error(\n \"failed {0} for {1} {2}: text={3} txn={4}\"\n .format(response.status_code, operation_name,\n PoliciesOutput.CONSUL_TRANSACTION_URL, response.text, json.dumps(txn)))\n return None\n ctx.logger.info(\n \"response {0} for {1} {2}: text={3} txn={4}\"\n .format(response.status_code, operation_name,\n PoliciesOutput.CONSUL_TRANSACTION_URL, response.text, json.dumps(txn)))\n return True", "def run(self, statement, parameters=None):\n if self.transaction:\n raise ProtocolError(\"Statements cannot be run directly on a session with an open transaction;\"\n \" either run from within the transaction or use a different session.\")\n return run(self.connection, statement, parameters)", "def post_transaction():\n tx_dict = encode_transaction(\"gautham=awesome\") \n print(tx_dict)\n\n tendermint_host = 'localhost'\n tendermint_port = 26657\n endpoint = 'http://{}:{}/'.format(tendermint_host, tendermint_port)\n\n payload = {\n 'method': 'broadcast_tx_commit',\n 'jsonrpc': '2.0',\n #'params': [encode_transaction(tx_dict)],\n 'params': [tx_dict],\n 'id': str(uuid4())\n }\n # TODO: handle connection errors!\n print(payload)\n return requests.post(endpoint, json=payload)", "def execute_transaction(self, statement, mapset=None):\n if mapset is None:\n mapset = self.current_mapset\n\n mapset = decode(mapset)\n if mapset not in self.tgis_mapsets.keys():\n self.msgr.fatal(_(\"Unable to execute transaction. \" +\n self._create_mapset_error_message(mapset)))\n\n return self.connections[mapset].execute_transaction(statement)", "def _send_execute_command(self):\n client = SBusClient(self.storlet_pipe_path)\n try:\n resp = client.execute(self.srequest.params, self.remote_fds)\n if not resp.status:\n raise StorletRuntimeException(\"Failed to send execute command\")\n\n if not resp.task_id:\n raise StorletRuntimeException(\"Missing task id\")\n else:\n self.task_id = resp.task_id\n except SBusClientException:\n raise StorletRuntimeException(\"Failed to send execute command\")", "def transaction():\n data = jsonpickle.decode(request.get_data())\n address = data[\"address\"]\n amount = int(data[\"amount\"])\n keyname = data[\"keyname\"]\n\n pkplus, pkminus = wallet.keys(keyname)\n\n my_balance = p2p.query(\"/balance\", address=pkplus)[\"balance\"]\n if my_balance < amount:\n abort(404, description=\"Not enough funds.\")\n\n my_utxo = p2p.query(\"/find-utxos\", address=pkplus, amount=amount)[\"utxos\"]\n rem = sum(utxo.amount for utxo in my_utxo) - amount\n address_amount = [(address, amount)]\n\n assert rem >= 0\n\n if rem > 0:\n address_amount.append((pkplus, rem))\n\n tx = build_transaction(my_utxo, address_amount, pkminus)\n try:\n p2p.broadcast(\"/transaction-pool\", transaction=tx)\n return SUCCESSFUL_PATCH\n except UnsuccessfulPatch:\n payload = jsonpickle.encode(\n {\"message\": \"Transaction wasn't accepted by the network.\"})\n return payload, 420, {\"ContentType\": \"application/json\"}", "def local_push(tx, rpc_user=None, rpc_password=None):\n\n rpc_connection = AuthServiceProxy(\"http://\"+rpc_user+\":\"+rpc_password+\"@127.0.0.1:18332\")\n\n try:\n tx_hash = rpc_connection.sendrawtransaction(tx)\n code = 200\n print \"Transaction broadcast \" + tx_hash\n except JSONRPCException as e:\n print e.message\n tx_hash = None\n code = 500\n\n return tx_hash, code", "def sendrawtransaction(self, hexstring):\n return self.proxy.sendrawtransaction(hexstring)", "def cypher_transaction():\n session = cypher.Session(HOST)\n return session.create_transaction()", "def send_transaction_and_return_status(self, transaction):\n self.net.send_tx(transaction)\n stati = []\n\n for status in self.net.tx_status_stream(transaction):\n stati.append(status)\n\n return stati", "def execute(self):\n self.logger.info(\"Executing '{0}' on {1}\".format(self.command, self.salt_target))\n return self.salt_client.cmd(*self.executor_args, full_return=True)", "def _query_transaction():\n # if not mode or mode not in mode_list:\n # raise ValidationError('Mode must be one of the following {}.'\n # .format(', '.join(mode_list)))\n\n tx_dict = \"gautham=awesome\"\n\n tendermint_host = 'localhost'\n tendermint_port = 26657\n endpoint = 'http://{}:{}/'.format(tendermint_host, tendermint_port)\n\n payload = {\n \"method\": \"abci_query\",\n \"jsonrpc\": \"2.0\",\n #\"params\": [None, tx_dict, None, None],\n \"params\": [None, encode_transaction(tx_dict), None, False],\n #\"params\": [None, encode_transaction(tx_dict), None],\n \"id\": str(uuid4())\n }\n\n # TODO: handle connection errors!\n print(payload)\n return requests.post(endpoint, json=payload)", "def execute_transaction(self, statement, mapset=None):\n connected = False\n if not self.connected:\n self.connect()\n connected = True\n\n sql_script = \"\"\n sql_script += \"BEGIN TRANSACTION;\\n\"\n sql_script += statement\n sql_script += \"END TRANSACTION;\"\n\n try:\n if self.dbmi.__name__ == \"sqlite3\":\n self.cursor.executescript(statement)\n else:\n self.cursor.execute(statement)\n self.connection.commit()\n except:\n if connected:\n self.close()\n self.msgr.error(_(\"Unable to execute transaction:\\n %(sql)s\" %\n {\"sql\": statement}))\n raise\n\n if connected:\n self.close()", "def start_transaction(self) -> \"Transaction\":\n response = super().start_transaction()\n self._set_transaction_id(response.id)\n return response", "def send_response(self, transaction):\n host, port = transaction.request.source\n key_token = hash(str(host) + str(port) + str(transaction.request.token))\n if (key_token in self._block2_receive and transaction.response.payload is not None) or \\\n (transaction.response.payload is not None and len(transaction.response.payload) > defines.MAX_PAYLOAD):\n if key_token in self._block2_receive:\n\n byte = self._block2_receive[key_token].byte\n size = self._block2_receive[key_token].size\n num = self._block2_receive[key_token].num\n\n else:\n byte = 0\n num = 0\n size = defines.MAX_PAYLOAD\n m = 1\n\n self._block2_receive[key_token] = BlockItem(byte, num, m, size)\n\n if len(transaction.response.payload) > (byte + size):\n m = 1\n else:\n m = 0\n transaction.response.payload = transaction.response.payload[byte:byte + size]\n del transaction.response.block2\n transaction.response.block2 = (num, m, size)\n\n self._block2_receive[key_token].byte += size\n self._block2_receive[key_token].num += 1\n if m == 0:\n del self._block2_receive[key_token]\n\n return transaction", "def execute_command(self, command: str, **parameters: str) -> etree.Element:\n # Prepare command\n # Need to specify no extra headers cause otherwise its funky\n # even though the headers param is supposed to be optional\n # (the ** splat causes the funkiness i think)\n prepare = self.shards_response(\n c=command, headers=None, mode=\"prepare\", **parameters\n )\n # response Returns <NATION id=\"name\"><SUCCESS/ERROR></SUCCESS/ERROR></NATION> format\n # we should probably throw an error if SUCCESS is not returned,\n # but too lazy / not sure what kind of error to throw\n # (should maybe create a custom tree?)\n node = as_xml(prepare.text)[0]\n if node.tag != \"SUCCESS\":\n raise ValueError(\n f\"Command 'command={command}' {parameters} was not succesful.\"\n f\" Got message: '{node.text}'\"\n )\n token = node.text if node.text else \"\"\n # Execute command using the returned token\n execute = self.shards_response(\n c=command, headers=None, mode=\"execute\", token=token, **parameters\n )\n return as_xml(execute.text)", "async def test_txn_get(self):\n self.stream.preset_response(transaction=Mocks.make_txns('1')[0])\n\n response = await self.get_assert_200('/transactions/1')\n self.stream.assert_valid_request_sent(transaction_id='1')\n\n self.assertNotIn('head', response)\n self.assert_has_valid_link(response, '/transactions/1')\n self.assertIn('data', response)\n self.assert_txns_well_formed(response['data'], '1')", "def send_transaction(self, signd_txn):\n return self.web3.eth.send_raw_transaction(signd_txn.rawTransaction).hex()", "def exec(self):\n if self._transaction_state in (None, \"watch\"):\n raise ValueError(\"EXEC without MULTI\")\n handler_list, self._transaction_state = self._transaction_state, None\n return self._command(b'EXEC', handler=handler_list)", "def send_raw_transaction(signed_tx):\n try:\n txid = subprocess.check_output([\"litecoin-cli\", \"sendrawtransaction\", signed_tx])\n except:\n sys.exit(1)\n return txid.strip()", "def transact(self, msg, timeout=-1):\n\n if msg.xid == None:\n msg.xid = util.gen_xid()\n\n self.logger.debug(\"Running transaction %d\" % msg.xid)\n\n with self.xid_cv:\n if self.xid:\n self.logger.error(\"Can only run one transaction at a time\")\n return (None, None)\n\n self.xid = msg.xid\n self.xid_response = None\n self.message_send(msg)\n\n self.logger.debug(\"Waiting for transaction %d\" % msg.xid)\n util.timed_wait(self.xid_cv, lambda: self.xid_response, timeout=timeout)\n\n if self.xid_response:\n (resp, pkt) = self.xid_response\n self.xid_response = None\n else:\n (resp, pkt) = (None, None)\n\n if resp is None:\n self.logger.warning(\"No response for xid \" + str(self.xid))\n return (resp, pkt)", "def get_transaction(tx):\n global INVOKE_COUNTER\n INVOKE_COUNTER = INVOKE_COUNTER + 1\n if INVOKE_COUNTER % 3 == 0:\n return \"\"\n else:\n raise_connection_error()", "def process_response(self, request, response):\r\n if transaction.is_managed():\r\n if transaction.is_dirty():\r\n transaction.commit()\r\n transaction.leave_transaction_management()\r\n return response", "def _assemble_and_send_request(self):\r\n # Fire off the query.\r\n response = self.client.service.processShipment(WebAuthenticationDetail=self.WebAuthenticationDetail,\r\n ClientDetail=self.ClientDetail,\r\n TransactionDetail=self.TransactionDetail,\r\n Version=self.VersionId,\r\n RequestedShipment=self.RequestedShipment)\r\n return response", "def _assemble_and_send_request(self):\r\n client = self.client\r\n # Fire off the query.\r\n response = client.service.deleteShipment(WebAuthenticationDetail=self.WebAuthenticationDetail,\r\n ClientDetail=self.ClientDetail,\r\n TransactionDetail=self.TransactionDetail,\r\n Version=self.VersionId,\r\n ShipTimestamp = datetime.now(), \r\n TrackingId=self.TrackingId,\r\n DeletionControl=self.DeletionControlType)\r\n\r\n return response", "def error(transaction, code): # pragma: no cover\n transaction.block_transfer = True\n transaction.response = Response()\n transaction.response.destination = transaction.request.source\n transaction.response.type = defines.Types[\"RST\"]\n transaction.response.token = transaction.request.token\n transaction.response.code = code\n return transaction", "def createrawtransaction(self, inputs, outputs):\n return self.proxy.createrawtransaction(inputs, outputs)", "def execute(self, command, block=True):\n\n if not self.is_authenticated:\n raise AuthenticationError\n request = self.request(Message.SERVERDATA_EXECCOMAND, unicode(command))\n if block:\n with self.response_to(request):\n pass\n return request", "def commit(self):\n token = self.transaction\n self.transaction = None\n if token:\n return self.client.commit(self.creds, token, self.environment)\n else:\n return False" ]
[ "0.6163789", "0.59890455", "0.57953054", "0.57152027", "0.56557715", "0.56381756", "0.556138", "0.55261993", "0.5462677", "0.54577535", "0.5392021", "0.538656", "0.5381371", "0.53764015", "0.5374454", "0.5368948", "0.5350154", "0.5341703", "0.5334163", "0.5331038", "0.53301734", "0.5319638", "0.52294856", "0.5200312", "0.5182125", "0.5176485", "0.51718616", "0.5163315", "0.5139663", "0.5133691" ]
0.6468085
0
Send a STUN message.
def send_stun(self, message, addr): logger.debug('%s > %s %s', self, addr, message) self.transport.sendto(bytes(message), addr)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def send_message(stdscr, username=None):\n # Show the cursor and echo output.\n curses.curs_set(1)\n curses.echo()\n stdscr.clear()\n stdscr.refresh()\n if username is None:\n safe_put(stdscr, \"Recipient username: \", (0, 0))\n username = stdscr.getstr(0, 20)\n stdscr.clear()\n stdscr.refresh()\n tnu = taunet.users.by_name(username)\n if tnu == None:\n print(\"No such user. Known users: \" + \", \".join(sorted([u.name for u in taunet.users.all()])))\n return\n if not is_online(tnu):\n print(\"Couldn't connect to that user's host.\")\n return\n safe_put(stdscr, \"Message:\", (0, 0))\n message = stdscr.getstr(0, 9)\n stdscr.clear()\n stdscr.refresh()\n ship_tnm(tnu, taunet.TauNetMessage().outgoing(tnu.name, message))", "def ship_tnm(tnu, tnm):\n user_string = \"{user} ({host}:{port})\".format(user=tnu.name, host=tnu.host, port=str(tnu.port))\n sender = socket.socket()\n sender.settimeout(1)\n try:\n sender.connect((tnu.host, tnu.port))\n sender.send(tnm.ciphertext)\n sender.shutdown(socket.SHUT_RDWR)\n except (socket.error, socket.timeout) as e:\n # Commented out to save it for the message queue later.\n # print(\"Unable to reach {user}: {reason}\".format(user=user_string, reason=str(e)))\n if tnm.ciphertext:\n # Only log for real messages, not status checks\n logger.error(\"Failed to send a message to {user}: {reason}\".format(user=user_string, reason=str(e)))\n sender.close()\n return False\n else:\n if tnm.ciphertext:\n logger.info(\"Sent a message to {user}.\".format(user=user_string))\n filesystem.write_message(tnu.name, tnm)\n sender.close()\n return True", "def sendPDU(self, pdu):\n # this operation does not verify the contents of the DU\n self.send(pdu.getBufferString())", "def sendPDU(self, pdu):\n # this operation does not verify the contents of the PDU\n self.send(pdu.getBufferString())", "def sendMessage(self):\n #print('sendMessage\\r')\n self.pub.publish(Twist(linear=self.linearVector, angular=self.angularVector))", "def transmit(self, msg):\r\n # send our message to the client\r\n self.conn.sendall(msg)", "def _send_syn(self):\n syn_packet = packet.Packet.from_data(\n self._get_next_sequence_number(),\n self.dest_addr,\n self.own_addr,\n ack=self._next_expected_seqnum,\n syn=True\n )\n self._schedule_send_in_order(syn_packet, constants.PACKET_TIMEOUT)", "def _send(self, data: str):\n try:\n self._sock.sendto(data.encode(\"ascii\"), self._addr)\n except (socket.error, RuntimeError):\n # No time for love, Dr. Jones!\n pass", "def send_sus_list(key):\n while True:\n if not receive_sus():\n signature = key.create_signature(json.dumps(SUS) + '2')\n\n pack_send = Ether(dst='98:98:98:22:22:22') / \\\n IP(dst='172.16.104.16') / \\\n UDP(dport=2223, sport=2223) / \\\n DB(len_sign=len(signature), cmd=2,\n send_num=5, param=signature + json.dumps(SUS).encode())\n\n conf.iface = 'eth0'\n sendp(pack_send)", "async def send_msg(self, message: str) -> None:\n await self.socket.sendall(message.encode())", "def sendMessage(self):\n print(\"sendMessage\")\n self.pub.publish(Twist(linear=self.linearVector, angular=self.angularVector))", "def sendMessage(self):\n print('sendMessage')\n self.pub.publish(Twist(linear=self.linearVector, angular=self.angularVector))", "def send(message):\n\tmessage = message.encode()\n\tconn.send(message)", "async def send(self, message):", "def _send(self, message):\n self.sock.sendall('%s\\n' % message)", "def send(self):\n if(self.target):\n try:\n self.message = self.message +\"\\r\\n\"\n self.target[0].send(self.message)\n except socket.error, err:\n print err", "async def onSend(self, stream, stanza):\n pass", "def send(self,message):\n self.transport.write(message, (\"228.0.0.5\", udpbport))", "def send(self, s):\n self.port.write(bytes(s, 'latin-1'))\n sys.stdout.write(s)", "def sendShout(self, user, message):\n pass", "def send_command(command):\n\tmessage = json.dumps (command)\n\tbottle = local_out_port.prepare()\n\tbottle.clear()\n\tbottle.addString(message)\n\tlocal_out_port.write(False)", "def send_udp_message(socket, address, type, data=None):\n if data:\n data = json.dumps({\"type\": type, \"payload\": data})\n else:\n data = json.dumps({\"type\": type})\n socket.sendto(data.encode('utf-8'), address)", "def send_as_server(self, command, msg):\n self._write(f':{self.server.name} {command} {msg}')", "def _send_via_transport(self, message):\n\n self.message_interface.send(message)", "def sendCmdAnsw(self, message):\n LOG_INFO(\"EDEN.Server.sendCmdAnsw\")\n pdu = EGSE.EDENPDU.PDU()\n pdu.pduType = EGSE.EDENPDU.PDU_TYPE_CMD\n pdu.subType = EGSE.EDENPDU.SUB_TYPE_ANSW\n pdu.setDataField(message)\n self.sendPDU(pdu)", "def _send(self, p: str, s) -> bytes:\n\n b = self._to_netstring(p.encode('ascii'))\n\n failure = s.sendall(b)\n if failure is not None:\n self._send(p, s)\n return b", "def send(self, data):\n if UdpTransport._sendNeedsStr:\n # This version of sendall can't use a memoryview, etc., so convert.\n self._socket.sendto(str(bytearray(data)), self._address)\n else:\n try:\n self._socket.sendto(data, self._address)\n except TypeError:\n # Assume we need to convert to a str.\n UdpTransport._sendNeedsStr = True\n self.send(data)", "def send(msg): # event is passed by binders.\n # print(\"i sended: \" + msg)\n msg = msg + \";\"\n client_socket.send(bytes(msg, \"utf8\"))", "def send(self, message):\n self.sock.send(message)", "def send_message(self, message):\r\n\t\tself.__tcpSocket.write(message.encode('utf8'))" ]
[ "0.61616313", "0.6152894", "0.58543134", "0.5825989", "0.5467833", "0.5367065", "0.5363118", "0.53626806", "0.5354391", "0.5348102", "0.53142506", "0.5306756", "0.52913076", "0.5251448", "0.5242878", "0.5239616", "0.52390784", "0.52369165", "0.522766", "0.5224386", "0.5217544", "0.52056706", "0.5201497", "0.5194779", "0.519334", "0.51905304", "0.51869243", "0.5185079", "0.5177573", "0.51641536" ]
0.72028255
0
Close the transport. After the TURN allocation has been released, the protocol's `connection_lost()` method will be called with None as its argument.
def close(self): asyncio.ensure_future(self.__inner_protocol.release())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def close(self) -> None:\n self.transport.close()", "def close(self):\n if self.transport.isOpen():\n self.transport.close()", "def close(self) -> None:\n if self.client is None:\n return\n\n if logger is not None:\n # If called from __del__(), module variables may no longer exist.\n logger.debug(f\"Closing Thrift transport to {self.host}:{self.port}\")\n\n self.client.close()\n self.client = None", "def close(self):\n with self._transport_lock:\n self._transport.close()\n\n with self._store_lock:\n self._packet_store.clear_all()", "async def _close_transport(transport: WebsocketsTransport):\n is_closing = transport.close_task is not None\n already_closed = transport.websocket is None\n\n if not already_closed and not is_closing:\n logger.info(\"Closing websocket connection.\")\n await transport.close()", "def close_device(self):\n raise NotImplementedError(\"implement in derived transport class\")", "def close(self):\n try:\n self.tunnel.close()\n self.transport.close()\n except Exception as exp:\n logging.error('Unable to close the device handle: %s' % exp)\n raise TobyException('Unable to close the device handle: %s' % exp)\n return True", "def close(self):\n self.cmd_channel.debug(\"PassiveDTP.close()\")\n asyncore.dispatcher.close(self)", "async def close(self):\n self.debug(\"Closing connection\")\n self.real_local_key = self.local_key\n if self.heartbeater is not None:\n self.heartbeater.cancel()\n try:\n await self.heartbeater\n except asyncio.CancelledError:\n pass\n self.heartbeater = None\n if self.dispatcher is not None:\n self.dispatcher.abort()\n self.dispatcher = None\n if self.transport is not None:\n transport = self.transport\n self.transport = None\n transport.close()", "async def close(self, noreply_wait = True):\n if hasattr(self._tl, \"conn\"):\n if self._tl.conn.is_open():\n await self._tl.conn.close(noreply_wait)\n del self._tl.conn", "def close(self):\n self._client.close()\n self._transport.close()", "def close(self):\n self._relay.close()\n super(EventRouterHTTPC, self).close()\n return", "def close(self):\n if self._closing or self._handle.closed:\n return\n elif self._protocol is None:\n raise TransportError('transport not started')\n # If the write buffer is empty, close now. Otherwise defer to\n # _on_write_complete that will close when the buffer is empty.\n if self._write_buffer_size == 0:\n self._handle.close(self._on_close_complete)\n assert self._handle.closed\n else:\n self._closing = True", "def close(self) -> None:\n self.transport._data_channel_close(self)", "def close(self):\n try:\n if not self.telnet:\n return\n self.telnet.close()\n self.telnet = None\n except (ConnectionError, Exception):\n error_message = \"Remote device is disconnected abnormally\"\n LOG.error(error_message, error_no=\"00401\")", "def close(self):\n if self.channel_is_closed:\n raise StandardError(\"channel is closed\")\n try:\n self.channel.close()\n except GtpTransportError, e:\n raise GtpTransportError(\"error closing %s:\\n%s\" % (self.name, e))\n self.channel_is_closed = True", "def _end_attacker_transport(self, transport_pair: TransportPair):\n try:\n transport_pair.attacker_transport.close()\n except Exception as exc:\n logger.exception(\"Failed to close attacker transport\", exc_info=exc)", "def disconnect(self):\r\n self._manual_disconnect = True\r\n self.transport.close()", "def close(self):\n self.cmd_channel.debug(\"ActiveDTP.close()\")\n asyncore.dispatcher.close(self)", "async def close(self) -> None:\n\n # for conn_handle in self._conn_handles:\n # await agent.agent_close_connection(conn_handle)\n # self._conn_handles.clear()\n await wallet.close_wallet(self.wallet_handle)", "def shutdown(self):\n ts.client.transport.close()", "def close_relay():\n \n ser_relay.close()", "def close(self) -> None:\n self.relay(\"close\")()", "def close(self):\n self.context['socket'].close()", "def close(self):\n self.log.debug(\"Stopping...\")\n total = len(TransportTypes)\n cnt = 0\n for trsp in self.transports:\n try:\n if trsp is not None:\n trsp.close()\n cnt += 1\n except Exception as err:\n self.log.warning(\"Exception when stopping transport %s: %r\",\n transport, err)\n if self.mesgloop:\n self.mesgloop.stop()\n self.started = False\n self.initialized = False\n self.log.debug(\"%d/%d transports stopped.\", cnt, total)", "def _close(self):\n \n # Close device\n logger.debug(\"%s: TCP port closing started...\" % \\\n self.__class__.__name__)\n self._router = None\n self._platform = None\n self._tcp_socket.close()\n logger.debug(\"%s: ...TCP port closing complete.\" % \\\n self.__class__.__name__)", "def close(self):\n self.connection.close()\n print(\"Connection on port \" + str(self.port) + \" closed.\")", "def close(self):\n if self.socket is None or self._is_connected is False:\n return\n\n try:\n self.socket.shutdown(socket.SHUT_RDWR)\n except socket.error:\n pass\n\n self.socket.close()\n self.socket = None\n self._is_connected = False\n # Evt17: Transport connection closed\n self.event_queue.put('Evt17')", "def close(self) -> None:\n self._socket.close()", "def close(self):\n if self.sftp_open:\n self.sftp.close()\n self.sftp_open = False\n self.transport.close()" ]
[ "0.7232531", "0.69609827", "0.6923589", "0.6796147", "0.6639552", "0.65796196", "0.64923614", "0.6464494", "0.6451994", "0.6403425", "0.6360436", "0.6339348", "0.63273036", "0.6292693", "0.6274413", "0.6261059", "0.6217416", "0.6202988", "0.6185388", "0.615356", "0.6153044", "0.61515445", "0.6146947", "0.6133199", "0.61212647", "0.6098568", "0.609666", "0.6095042", "0.6019262", "0.5996849" ]
0.71569246
1
Sends the `data` bytes to the remote peer given `addr`. This will bind a TURN channel as necessary.
def sendto(self, data, addr): asyncio.ensure_future(self.__inner_protocol.send_data(data, addr))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sendto(self, data, addr=None):\n if not isinstance(data, (bytes, bytearray, memoryview)):\n raise TypeError(\"data: expecting a bytes-like instance, got {!r}\"\n .format(type(data).__name__))\n self._check_status()\n if not self._writable:\n raise TransportError('transport is not writable')\n try:\n self._handle.send(addr, data, self._on_send_complete)\n except pyuv.error.UVError as e:\n error = TransportError.from_errno(e.args[0])\n # Try to discern between permanent and transient errors. Permanent\n # errors close the transport. This list is very likely not complete.\n if error.errno != pyuv.errno.UV_EBADF:\n raise error\n self._error = error\n self.abort()\n self._write_buffer_size += 1\n self._maybe_pause_protocol()", "def send_data_xbee(data, dest_addr=None):\n try:\n if dest_addr is None:\n device.send_data_broadcast(data)\n else:\n device.send_data(RemoteXBeeDevice(device, XBee64BitAddress.from_hex_string(dest_addr)), data)\n return True\n except XBeeException as e:\n print_error(\"Could not send data: {}\".format(str(e)))\n\n return False", "def send_data(self, data):\r\n try:\r\n self.sock.sendto(data, self.addr)\r\n except Exception:\r\n print(\"Cant't send a package\")", "def sendMessage(self, data, (host, port)):\n\t\tdef send_to_ip(IPaddrs):\n\t\t\tself.transport.write(data, (IPaddrs, port))\n\t\t\tself.resolvedAdrs[host] = IPaddrs\n\t\ttry:\n\t\t\tself.transport.write(data, (self.resolvedAdrs[host], port))\n\t\texcept KeyError, e:\n\t\t\t# Resolve and call the send function\n\t\t\treactor.resolve(host).addCallback(send_to_ip)", "def send(self, data):\n print \"Attempting to send packet of size %d to %s\" % (len(data), self.hostname)\n self.sock.sendto(data, (self.dst_ip, 0))", "def SendPacket(self, endpoint_addr, data):\n _, _, pipe = self._ep_fds[endpoint_addr]\n pipe.send(data)", "def true_sendto(conn, data, address, special=False):\n if special:\n data = pickle.dumps(data)\n else:\n data = data.encode()\n conn.sendto(key.encrypt(data), (address[0], address[1])) # (ip_dst, dport)", "def sendto(self, data: bytes, address: Tuple) -> int:\n ...", "def send_stun(self, message, addr):\n logger.debug('%s > %s %s', self, addr, message)\n self.transport.sendto(bytes(message), addr)", "def send(self,data,address,isBinary=False):\n if DEBUG: print \"In class Server, function, send\"\n #dest = self.resolve_address(address)\n peer_to_send_message = None\n #or uid in self.clientManager.peer_servers:\n #pdb.set_trace()\n #peer_server = self.clientManager.connections[uid]\n #if peer_server.ip == address:\n #peer_to_send_message = peer_server\n #pdb.set_trace()\n return self.connection_manager.send(data,address,isBinary)\n \n #for client in self.clientManager.connections.keys():\n #pdb.set_trace()\n #self.clientManager.connections[client].sendMessage(\"------From RBAnalysis---Hi\")", "def send_data(self, data):\n self._transport.write(data)", "def send_bytes(self, data):\n raw_data = bytes(data)\n\n attempts = 0\n while True:\n try:\n self._sock.sendall(raw_data)\n return\n except (socket.timeout, BrokenPipeError):\n print('in socket exeption....')\n if (attempts < self._retries):\n attempts += 1\n self._sock.close()\n self._sock.connect((self._ip, self._port))\n else:\n raise", "def send_data_on_udp(ip_address, port, data):\n sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n try:\n sock.sendto(data.encode(\"utf-8\"), (ip_address, port))\n except: # noqa\n LOGGER.exception(\"Failed to send trace to X-Ray Daemon\")\n finally:\n sock.close()", "def _send(self, data: bytes):\n if self._pre_send is not None:\n data = self._pre_send(data)\n if data is None:\n return\n\n self._transport.sendto(data, self._peer)", "def send(self, data):\n self.sock.send(data)", "def send(self, data):\n self.sock.send(data)", "def _send_frame(self, dest, data):\n self._log.debug(\"write {} to {}\".format(len(data), dest)) \n # send to endpoint\n self._conn.sendto(data, (dest,0))", "def send(self, data):\n self.socket.sendall(data)", "def send(self, data):\n print(\"sending: {}\".format(data))\n self.forward_in_sock.send_string(\"{}\\n\".format(data))", "def send_data(self, data, data_size, acpi, dest_group_addr):\n\n # -----------------------------------\n # -> (0) Establish connection\n # -----------------------------------\n\n self.establish_connection(data, acpi, data_size, dest_group_addr)\n\n # -----------------------------------\n # -> (1) Send tunneling ack request\n # -----------------------------------\n\n self.tunneling_ack_request()\n\n print('-----------------------------------')\n\n # -----------------------------------\n # -> (2) Disconnect request\n # -----------------------------------\n\n disconnect_resp_object = self.disconnect_request()\n\n # <- Retrieving data from disconnect request\n disconnect_channel_id = disconnect_resp_object.channel_id\n disconnect_status = disconnect_resp_object.status\n print('Channel ID: ', disconnect_channel_id)\n print('Channel status: ', disconnect_status)\n\n print('-----------------------------------')", "def __send_message(self, data):\n if RemotePlayerProxy.DEBUG:\n print(f'[RPP] [SEND] -> [{self.name}]: {data}')\n\n try:\n self.__socket.sendall(bytes(data, 'ascii'))\n except Exception as e:\n if RemotePlayerProxy.DEBUG:\n print(e)", "def send(ip=None, port=0, data=None):\n\tsock = socket.socket(socket.AF_INET, # Internet\n socket.SOCK_DGRAM) # UDP\n\n\tsock.sendto(data, (ip, port))\n\n\treturn True", "def write(self, data):\n with self._write_lock:\n self.socket.send(data)", "def send(self, address, data):\n if not self.__closed:\n # get the socket to send to\n sock = self.__connected[address]\n # add the socket and data to the 'to_send' list\n self.__to_send.append((sock, data))\n else:\n print \"ERROR: The Server Is Closed.\"", "def send_bytes(self, data: bytes) -> None:", "def send_to_data_channel(self, sock, data):\n resp = sock.send(data)\n print_debug(resp)\n self.logger.log(\"Sent: %s\" % data)\n return resp", "def send(self, dst, data):\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n\n for tries in range(self.MAX_RETRIES):\n try:\n sock.connect((dst, self.port))\n sock.setblocking(0)\n sock.sendall(data + \"\\n\")\n except EnvironmentError as exc :\n if exc.errno == errno.ECONNREFUSED:\n time.sleep(5)\n else:\n raise\n else:\n break\n finally:\n sock.close()\n break", "def send(self, data):\n if UdpTransport._sendNeedsStr:\n # This version of sendall can't use a memoryview, etc., so convert.\n self._socket.sendto(str(bytearray(data)), self._address)\n else:\n try:\n self._socket.sendto(data, self._address)\n except TypeError:\n # Assume we need to convert to a str.\n UdpTransport._sendNeedsStr = True\n self.send(data)", "def _send(self, data: str):\n try:\n self._sock.sendto(data.encode(\"ascii\"), self._addr)\n except (socket.error, RuntimeError):\n # No time for love, Dr. Jones!\n pass", "def sendData(data):\n\n\tslen = struct.pack('<I', len(data))\n\t#connSock.sendall(slen + data)\n\tconnSock.sendall(slen)\n\tconnSock.sendall(data)\n\n\treturn 0" ]
[ "0.69670016", "0.66431624", "0.63675064", "0.63365537", "0.6225546", "0.6202067", "0.61579084", "0.61440784", "0.60198617", "0.6018413", "0.59791154", "0.5967798", "0.5965921", "0.596309", "0.59398365", "0.59398365", "0.5916635", "0.5850885", "0.58379453", "0.5826466", "0.5824075", "0.58167875", "0.58058065", "0.5797865", "0.5775475", "0.57735354", "0.5717557", "0.5717426", "0.5697913", "0.5683922" ]
0.7759187
0
Get host info from request META
def get_host(req): return req.META["HTTP_HOST"].split(":")[0]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getHostInfo():", "def get_host(request):\n return request.META[\"HTTP_HOST\"].split(\":\")[0]", "def _get_host(request):\n return request.headers.get('X-Forwarded-Host', request.headers['Host'])", "def getHost():", "def getHost():", "def get_host_info(hass: HomeAssistant) -> dict[str, Any] | None:\n return hass.data.get(DATA_HOST_INFO)", "def getRequestHostname():", "def request_host(request):\n host = urlsplit(request.url).hostname\n if host == \"\":\n host = request.get_header(\"Host\", \"\").partition(\":\")[0]\n\n # remove port, if present\n return host.lower()", "def get_headers(request: Dict[str, str]) -> Tuple[str, Dict[str, str]]:\n host = None\n headers = {}\n for name, value in request['META'].items():\n if name == \"HTTP_HOST\":\n host = value\n continue # comment to preserve host header, but eventual output contains host twice.\n if name.startswith('HTTP_'):\n headers[convert_header_names(name[5:])] = value.replace('\"', r'\\\"')\n assert host is not None, \"HTTP_HOST not found in request headers.\"\n return host, headers", "def getHost(self):\n host = self.url[self.host_head:self.host_tail]\n return host", "def host_info(self, host):\n\n endpoint = '/Domain/Host/Info'\n\n params = {\n 'Host' : host,\n }\n \n response = self.__perform_get_request(endpoint, params)\n\n if response.status_code == 200:\n parsed_response = response.json()\n return parsed_response", "def host(self):\r\n return self._environ.get('HTTP_HOST', '')", "def get_host(self):\r\n return self.host", "def get_host_name(self):\n if self.have_metadata is False:\n self._get_metadata()\n self.have_metadata = True\n\n try:\n return self.keyinfo['tracking_id'].attrs['hostname']\n except:\n return None\n\n if self.have_metadata is False:\n self._get_metadata()\n self.have_metadata = True", "def host(self):\n return self._environ.get('HTTP_HOST', '')", "def get_host_info(self, args, get_all=False):\n return None", "def process_request(self, request):\n\n try:\n request.get_host()\n except DisallowedHost:\n if not request.META['HTTP_HOST'] == '45.56.115.140':\n logger.critical(request.META)", "def eff_request_host(request):\n erhn = req_host = request_host(request)\n if req_host.find(\".\") == -1 and not cookiejar.IPV4_RE.search(req_host):\n erhn = req_host + \".local\"\n return req_host, erhn", "def getRemoteHost():", "def head_host(self) -> str:\n return self.head_args.host if self.head_args else None", "def host(self):\n if self.url.startswith(\"dns:\"):\n return self.url[4:]\n else:\n return urlparse(self.url).hostname", "def getHost(self):\n\n\t\treturn HOST", "def getHost(self):\n return self._host", "def get_host(self):\n return self.host", "def get_remote_host(request):\n \n return utilities.get_remote_host(request)", "def get_current_request_hostname():\r\n hostname = None\r\n request = get_current_request()\r\n if request:\r\n hostname = request.META.get('HTTP_HOST')\r\n\r\n return hostname", "def gethost(self):\n return self.__host", "def get_hostname(self):\n module = 'hostname'\n method = 'GET'\n response = self.axapi_call(module, method)\n hostname = response.json()['hostname']['value']\n print(self.device + ' Device hostname is: ' + hostname)", "def getHost(self):\n return self._host", "def get_host(self):\n self.server_name = subprocess.check_output(['hostname', '-s']).split('\\n')[0]\n self.ip_addr = subprocess.check_output(['hostname', '-i']).split('\\n')[0]\n return self.server_name, self.ip_addr" ]
[ "0.7799314", "0.71128947", "0.7056515", "0.7014446", "0.7014446", "0.69996834", "0.68279314", "0.6817718", "0.6813407", "0.6598185", "0.6563933", "0.64535975", "0.64207745", "0.6417893", "0.6402575", "0.63990295", "0.6384519", "0.6373008", "0.63592684", "0.63389564", "0.63380617", "0.6336828", "0.62895244", "0.6244936", "0.62380135", "0.62244284", "0.61962706", "0.6193261", "0.6187632", "0.6143561" ]
0.75950074
1
Read 6dcmd txt files to numpy array.
def read_file(self, path, is_6dcmd = True): if not os.path.isfile(path): raise ValueError(f'Error: File not exist! {path}') data = list() txtFile = open(path) if is_6dcmd: cmd = list() for row in txtFile: row = row.split() data.append([float(row[2]), float(row[3]), float(row[4]), float(row[5]), float(row[6]), float(row[7])]) cmd.append([row[0], row[1], row[8], row[9]]) return np.array(data), np.array(cmd) else: for row in txtFile: data.append([float(i) for i in row.split()]) return np.array(data)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_txt_file(txtfile):\n array = np.genfromtxt(txtfile)\n return array", "def _parse_txt(path, n_channels):\n f = open(path)\n lines = f.readlines()\n f.close()\n\n geom = np.zeros((0, 2))\n\n for i, line in zip(range(n_channels), lines):\n line = line.replace('\\r', '')\n line = line.replace('\\n', '')\n row = line.split(' ')\n geom = np.vstack((geom, row[:2])).astype('float')\n\n return geom", "def read_arrays(path, delimiter=','):\n arrays = np.genfromtxt(path, delimiter=delimiter)\n arrays = arrays[1:]\n arrays = arrays[:, 4]\n return arrays", "def read_data(self, path):\n if self.data_format == 'twenty': \n length = 20\n else: raise ValueError(\"self.data_format = '%s' unknown.\" % \n self.data_format)\n data = []\n with open(path,'r') as f:\n for line in f:\n data.append([float(line[k:(k + length)]) for k in range(\n 0, len(line.strip('\\n')),length)])\n return np.array(data)", "def load_text(path):\n\n with open(path) as f:\n read_text = f.read().splitlines()\n \n return np.array(read_text)", "def read_data(path, d=','):\r\n arr = numpy.genfromtxt(path, delimiter=d, dtype=None)\r\n length = len(arr)\r\n x = numpy.zeros(shape=(length, 2))\r\n t = numpy.zeros(length, dtype=int)\r\n for i, (x1, x2, tv) in enumerate(arr):\r\n x[i, 0] = x1\r\n x[i, 1] = x2\r\n t[i] = int(tv)\r\n return x, t", "def load_txt(file_path):\n lines = load_lines(file_path)\n\n if 'E' in lines[0]:\n dtype = np.float32\n else:\n dtype = np.int32\n\n data = list(map(str.split, lines))\n array = np.array(data, dtype=dtype)\n return array", "def read_file(self,file_name):\r\n data = np.genfromtxt(file_name)\r\n return data;", "def numpy_read_features(path):\n import numpy\n # read table as a structured array (each row is a tuple)\n feature_array = numpy.genfromtxt(path, delimiter='\\t', names=True, dtype=None)\n source = feature_array['source']\n target = feature_array['target']\n status = feature_array['status']\n feature_names = numpy.array(feature_array.dtype.names[3: ])\n features = feature_array[feature_names]\n # convert from structured array to normal ndarray\n features = features.view((numpy.float, len(features.dtype.names)))\n return source, target, status, features, feature_names", "def get_data():\n return np.genfromtxt(FILENAME, delimiter=',', skip_header=1)", "def read_data(filepath, d = ','):\n return np.genfromtxt(filepath, delimiter=d, dtype=None)", "def read_positions():\n return np.genfromtxt(\"POSITIONS.OUT\").transpose()", "def read_data(path):\n fnames = sorted([f for f in glob.glob(os.path.join(path, 'pos', '*.txt'))])\n data = [(1, open(f).readlines()[0]) for f in sorted(fnames)]\n fnames = sorted([f for f in glob.glob(os.path.join(path, 'neg', '*.txt'))])\n data += [(0, open(f).readlines()[0]) for f in sorted(fnames)]\n data = sorted(data, key=lambda x: x[1])\n return np.array([d[1] for d in data]), np.array([d[0] for d in data])", "def read_data(path):\n fnames = sorted([f for f in glob.glob(os.path.join(path, 'pos', '*.txt'))])\n data = [(1, open(f).readlines()[0]) for f in sorted(fnames)]\n fnames = sorted([f for f in glob.glob(os.path.join(path, 'neg', '*.txt'))])\n data += [(0, open(f).readlines()[0]) for f in sorted(fnames)]\n data = sorted(data, key=lambda x: x[1])\n return np.array([d[1] for d in data]), np.array([d[0] for d in data])", "def textread(filepath):\n return np.array(pd.read_csv(filepath, \n sep = \"\\s+|\\t+|\\s+\\t+|\\t+\\s+\",\n header=None,\n comment='#',\n engine='python'))", "def cast_txt_to_numpy(iuput_file):\n # Load the txt file\n with open(iuput_file, 'r') as tmpfile:\n lines = tmpfile.readlines()\n\n # Restore the numpy array\n holder = []\n for line in lines:\n holder.append([float(x) for x in line.split(' ')])\n\n # Construct the numpy array\n holder = np.array(holder)\n\n return holder", "def readin():\r\n nodes = np.loadtxt('Vnodes.txt', ndmin=2)\r\n mats = np.loadtxt('Vmater.txt', ndmin=2)\r\n elements = np.loadtxt('Veles.txt', ndmin=2)\r\n loads = np.loadtxt('Vloads.txt', ndmin=2)\r\n return nodes, mats, elements, loads", "def loadtxt(filename):\n txt = []\n with open(filename, \"r\") as f:\n for line in f:\n txt.append(line.strip())\n return np.asarray(txt)", "def read_file(path_file):\n with open(path_file, 'r') as f:\n L = f.readlines()\n if len(L[0]) == 9:\n #Y file\n matrix = np.zeros(len(L)-1)\n for index, l in enumerate(L):\n if index > 0:\n matrix[index-1] = 2*int(l.split(',')[1])-1\n elif len(L[0]) == 7:\n #X file\n matrix = np.chararray((len(L)-1,100))\n for index, l in enumerate(L):\n if index > 0:\n matrix[index-1,:] = list(l.split(',')[1][:-2])\n elif len(L[0]) > 100:\n #X_mat100 file\n matrix = np.zeros((len(L),100))\n for index, l in enumerate(L):\n matrix[index, :] = list(map(float, l.split(\" \")))\n else:\n assert('ERROR')\n return(matrix)", "def load_data(file_to_read):\n\n data = np.recfromtxt(file_to_read)\n data = np.asarray(data)\n\n return data", "def read_mhd_and_raw(path, numpyFlag=True):\n img = sitk.ReadImage(path)\n if not numpyFlag:\n return img\n\n nda = sitk.GetArrayFromImage(img) # (img(x,y,z)->numpyArray(z,y,x))\n return nda", "def read_file(self, fullname):\n\n data = np.genfromtxt(fullname, dtype=None, names=True, skip_header=0)\n return data", "def Read_RMCA_basic(Complete_Path):\n fid = open(Complete_Path,'r')\n S = []\n while 1: \n line = fid.readline()\n if line =='': \n break \n else :\n S.append(float(line))\n #R.append(float(line[27:-2]))\n return np.array(S)", "def read_pattern_file(file_path: str) -> np.ndarray:\n\n # Check if the example file exists\n if not os.path.isfile(file_path):\n return None\n\n rows = 0\n cols = 0\n with open(file_path) as f:\n for i, l in enumerate(f):\n if l[0] != \"!\":\n rows += 1\n if len(l) > cols:\n cols = len(l) - 1 # Exclude the end of line char from the column count\n\n grid = np.zeros((rows, cols), dtype=np.uint8)\n\n skip_rows = 0\n with open(file_path) as f:\n for j, line in enumerate(f):\n for k, c in enumerate(line):\n if c == \"!\" and k == 0:\n skip_rows += 1\n break\n elif c == \"O\":\n grid[j - skip_rows, k] = 1\n\n return grid", "def load_labels(path):\n with open(path, 'r', encoding='utf-8') as f:\n lines = f.readlines()\n labels = []\n for row_number, content in enumerate(lines):\n pair = re.split(r'[:\\s]+', content.strip(), maxsplit=1)\n #if len(pair) == 2 and pair[0].strip().isdigit():\n labels.append(np.array([int(pair[0].strip()),pair[1].strip()]))\n #else:\n # labels.append(pair[0].strip())\n return np.array(labels)", "def load_raw(fname):\n # Read all the data from the file\n ctd = []\n with open(fname) as ctdfile:\n \n for line in ctdfile:\n \n if (line.find('*') < 0) and (line.find('#') < 0):\n \n # This line contains data; parse the line\n entries = line.strip().split()\n # Convert data to float64\n entries = [np.float64(entries[i]) \n for i in range(len(entries))]\n # Append to list\n ctd.append(entries)\n \n # Return the raw data as an numpy array\n return np.array(ctd)", "def readData():\n\tN = 800\n\tD = 28*28\n\tX = np.zeros((N, D), dtype=np.uint8)\n\n\tf = open(\"data/a012_images.dat\", 'rb')\n\n\tfor i in range(0, N):\n\t\tX[i, :] = np.fromstring(f.read(D), dtype='uint8')\n\n\tf.close()\n\n\treturn X", "def _read_datafile(self,path):\n \tlabels, images = [], []\n \twith gzip.GzipFile(path) as f:\n \t for line in f:\n \t vals = line.strip().split()\n \t labels.append(float(vals[0]))\n \t images.append([float(val) for val in vals[1:]])\n \tlabels = np.array(labels, dtype=np.int32)\n \tlabels[labels == 10] = 0 # fix weird 0 labels\n \timages = np.array(images, dtype=np.float32).reshape(-1, 16, 16, 1)\n \timages = (images + 1) / 2\n \treturn images, labels", "def parse_data(fn):\n data = []\n with open(fn, \"rb\") as f:\n for line in f:\n if py_ver == 3:\n # Python 3 code in this block\n dline = \"\".join(filter(lambda char: char != '\"', line.decode())).split(\",\")\n else:\n # Python 2 code in this block\n dline = line.translate(None, '\"').split(\",\")\n \n if len(dline) == 11 and dline[0].isdigit():\n data.append([float(i) for i in dline])\n\n return np.array(data)", "def read_KNN_dataFile(file):\n A = np.genfromtxt(file)\n return A" ]
[ "0.68975633", "0.67013437", "0.66498756", "0.64822465", "0.6397323", "0.63771707", "0.631287", "0.6249159", "0.62446815", "0.6187702", "0.6152389", "0.609308", "0.60849917", "0.60849917", "0.60615635", "0.6027606", "0.6026791", "0.60097194", "0.5984443", "0.598411", "0.59762305", "0.59751314", "0.59745634", "0.5956971", "0.5948583", "0.5926429", "0.59162897", "0.58715236", "0.58698523", "0.5857067" ]
0.75817925
0
saves the current ball state. puts all the general values in the simArgs dictionary subclasses should implement
def saveParameters(self): # save current scale values into the ball state for the current ball x = self.initialXScale.get() y = self.initialYScale.get() xVel = self.initialXVelScale.get() yVel = self.initialYVelScale.get() self.ballStates[self.currentBall] = [x, y, xVel, yVel] # set new currentBall if changed self.currentBall = int(self.ballSelector.get()) # the states of all the balls to be simulated self.simArgs['balls']=self.ballStates self.simArgs['playbackSpeed'] = self.playbackSpeedScale.get() self.simArgs['trace'] = self.toTrace.get() self.simArgs['friction'] = self.friction.get() # get number of balls from formation string self.simArgs['nBalls'] = self.nBalls # for s in self.numberOfBallsSelector.get().split(): # if s.isdigit(): # self.simArgs['nBalls']=int(s)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def writeState(self, saveState: ghidra.framework.options.SaveState) -> None:\n ...", "def save_state(self):\n pass", "def saveGame(self) -> None:\n self.state[\"phase\"] = self._phase\n\n state_as_string = json.dumps(self.state)\n with open(self.save_location, \"w\") as File:\n File.write(state_as_string)", "def save_state(self) -> None:\n raise NotImplementedError(\"Save state is is not implemented.\")", "def saveState(self,filename=None):\n # For now we just use pickle for convenience. In the future, could use np.savez or HDF5 (or FITS)\n if filename is None:\n if self.statefile:\n filename = self.statefile\n else:\n filename = self.filename + '.cysolve.pkl'\n orig_statefile = self.statefile\n orig_ar = self.ar\n self.ar = None\n fh = open(filename,'w')\n cPickle.dump(self,fh,protocol=-1)\n fh.close()\n self.ar = orig_ar\n self.statefile = orig_statefile\n print \"Saved state in:\", filename", "def saveState(self) -> None:\n # TODO: Saves State\n pass", "def SaveProgramState(self, sess=None, global_step=None):\n pass", "def saveParameters(self):\n super(BuminTab,self).saveParameters()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = Buminovich.Buminovich(**self.simArgs)", "def savestate(self, state):\n pass", "def _save_state(self) -> None:\n state_file = self._get_state_file()\n logger.info(\"Saving state to %s\", state_file)\n\n data = {}\n data[\"version\"] = mopidy.__version__\n data[\"state\"] = CoreState(\n tracklist=self.tracklist._save_state(),\n history=self.history._save_state(),\n playback=self.playback._save_state(),\n mixer=self.mixer._save_state(),\n )\n storage.dump(state_file, data)\n logger.debug(\"Saving state done\")", "def save_state():\n logger.debug(\"called\")\n pwd_gate.save()\n preferences.save()\n shareBuffer.save()\n contacts.save()\n secrets.save()", "def save_snapshot(self,save_dir):\n if not os.path.exists(save_dir):\n os.mkdir(save_dir)\n keys=[\n 'dna_size',\n 'pop_size',\n 'cross_rate',\n 'mutate_rate',\n 'eta_c',\n 'eta_m',\n 'mp_size',\n 'elitism',\n 'generations',\n 'pop',\n 'fitness',\n 'fitness_rank',\n 'mp',\n 'offspring',\n 'start_state',\n 'iters'\n ]\n running_states={}\n for key in keys:\n assert hasattr(self,key)\n running_states[key]=getattr(self,key)\n th.save(running_states,os.path.join(save_dir,'state_gen%d'%(self.iters)) )", "def saveParameters(self):\n super(CircTab,self).saveParameters()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = circle.CircleTable(**self.simArgs)", "def saveParameters(self):\n super(LorentzTab,self).saveParameters()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = Lorentz.Lorentz(**self.simArgs)", "def _save_state(self, saver, session, data, checkpts_path):\n # Save variable state\n if checkpts_path:\n logging.info('Saving cotrain checkpoint at %s.', checkpts_path)\n saver.save(session, checkpts_path, write_meta_graph=False)\n\n # Save dataset state.\n if self.data_dir:\n logging.info('Saving self-labeled dataset backup.')\n data.save_state_to_file(self.data_dir)", "def saveParameters(self):\n super(LTab,self).saveParameters()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = Ltab.LTable(**self.simArgs)", "def saveState(self, name):\n return self.parameters()", "def save(self, *args, **kwargs):\n if self.state: self.state.save()", "def make_save(self):\n\t\tsave = {}\n\t\tsave['p'] = self.p\n\t\tsave['injail'] = self.injail.copy()\n\t\tsave['tile'] = self.tile.copy()\n\t\tsave['bal'] = self.bal.copy()\n\t\tsave['goojf'] = self.goojf.copy()\n\t\tsave['isalive'] = self.isalive.copy()\n\t\tsave['jailturn'] = self.jailturn.copy()\n\t\tsave['ownedby'] = self.ownedby.copy()\n\t\tsave['numhouse'] = self.numhouse.copy()\n\t\tsave['ismortgaged'] = self.ismortgaged.copy()\n\t\tsave['num'] = self.num\n\t\tsave['numalive'] = self.numalive\n\t\tsave['uid'] = self.uid.copy()\n\t\tsave['freeparkingsum'] = self.freeparkingsum\n\t\tself.autosave = save", "def _save(trainer, checkpoint_dir, state_dict_key_name, world_rank=None):\n\n # save current model parameters as a checkpoint\n makedir(checkpoint_dir)\n checkpoint_file_name = 'checkpoint{}.ortcp'.format('' if world_rank is None else str(world_rank))\n trainer.save_checkpoint(os.path.join(checkpoint_dir, checkpoint_file_name))\n state_dict = trainer.state_dict()\n with open(os.path.join(checkpoint_dir, state_dict_key_name+'.pkl'), \"wb\") as f:\n pickle.dump({state_dict_key_name : state_dict}, f)", "def saveState(self, name):\n import copy\n d = copy.deepcopy(self._params)\n d['pulses'] = None\n return self._params", "def save_to_checkpoint(self, chkpt):\n chkpt[self.name] = self.state_dict()", "def save():", "def save(self):\n data = (\n self.Joints,\n self.Links,\n self.joint_syms,\n self.global_syms,\n self.name,\n self.sym_prefix,\n )\n cloudpickle.dump(data, open(self.save_filename, \"wb\"))", "def saveParameters(self):\n super(RectTab,self).saveParameters()\n self.simArgs['width'] = self.width.get()\n self.simArgs['height'] = self.height.get()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = rect.RectTable(**self.simArgs)", "def save_state(self):\n\t\tself._history['time'].append(self.t)\n\t\tstate = np.array(self.x[np.newaxis,:,:])\n\t\tself._history['state'] = np.vstack([self._history['state'],state])", "def _save_state(self, config_path=\"\"):\n state = {\n 'output_dir': str(self.ui.lineEdit_output_path.text()),\n 'device': str(self.ui.comboBox_device.currentText()),\n 'event_bits': str(self.ui.lineEdit_data_bits.text()),\n 'freq': str(self.ui.lineEdit_pulse_freq.text()),\n 'labels': self.digital_labels,\n 'timestamp': self.ui.checkBox_timestamp.isChecked(),\n 'comm_interface': self.ui.checkBox_comm.isChecked(),\n 'analog_on': self.ui.checkBox_analog_channels.isChecked(),\n 'analog_channels': eval(str(self.ui.lineEdit_analog_channels.text())),\n 'analog_sample_rate': self.ui.comboBox_analog_freq.currentIndex(),\n 'analog_dtype': self.ui.comboBox_dtype.currentIndex(),\n 'analog_labels': self.analog_labels,\n 'counter_input_terminal': str(self.ui.comboBox_ci.currentText()),\n 'counter_input_on': self.ui.checkBox_ci.isChecked(),\n }\n if not config_path:\n config_path = LAST_SESSION\n with open(config_path, 'wb') as f:\n pickle.dump(state, f)", "def save():\n pass", "def save_states(self, checkpoint):\n raise NotImplementedError()", "def Save(self):\n if not self.simFilePath:\n path = self.PromptPathSaveAs()\n if not path: return\n else: self.simFilePath = path\n \n #Why bother doing new code if the cmd interface does it already\n if self.shell.interp != self.sim42interp:\n self.UseCommandInterface(True)\n self.shell.run('store %s' %self.simFilePath)\n #self.sim42interp.cmd.Store(self.simFilePath)\n\n self.SetTitle('Simulation --> ' + self.simFilePath)" ]
[ "0.6960438", "0.68173647", "0.6787115", "0.6763044", "0.67436576", "0.66671205", "0.6664326", "0.6580951", "0.6552863", "0.6498651", "0.64759964", "0.6463431", "0.6384361", "0.63266534", "0.6282891", "0.6272506", "0.6190858", "0.61732596", "0.6162837", "0.6148459", "0.6140278", "0.61367494", "0.61261654", "0.6104393", "0.61022663", "0.6098878", "0.6098092", "0.609427", "0.6091976", "0.6071356" ]
0.79439396
0
Saves parameters, generates the preview and displays it to the canvas
def generatePreview(self): self.saveParameters() image=self.simulation.generatePreview() # convert pil image to a tkinter image self.photo = ImageTk.PhotoImage(image) # display image self.preview.create_image(0, 0, anchor='nw', image=self.photo)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw(canvas_result,automatic_save,manual_save):\r\n if canvas_result is not None and canvas_result.image_data is not None and (automatic_save or manual_save):\r\n # Receive the user's drawing with the dimensions: 512X512X4\r\n img_data = canvas_result.image_data\r\n # the user's drawing is in RGBA mode with floats instead of integers - convert to uint8 type and to RGB format\r\n im = Image.fromarray(img_data.astype(np.uint8)[:,:,:3]).convert('RGB') # convert to dimensions 512X512X3\r\n # initialize a copy of the user's drawing.\r\n add_bg = np.array(im, dtype='uint8') # initalize a copy\r\n # allow the user to know that the saving is in progress.\r\n with st.spinner(\"Saving image...\"):\r\n # the drawing is lack of the GauGAN background because streamlit_drawable_canvas library doesn't allow it yet.\r\n # Because of that the background will be added manually - o(n^3) at the moment.\r\n for i in range(add_bg.shape[0]):\r\n for j in range(add_bg.shape[1]):\r\n if list(add_bg[i,j]) != [0,0,0]: # if the current RGB value is not (0,0,0) (black) -\r\n for k in range(add_bg.shape[2]): # then make sure we don't have white values (255)\r\n if add_bg[i,j][k] == 255: # we will fill them with the relevant background color position\r\n add_bg[i,j][k] = colors['Sky'][k] if i<300 else colors['Sea'][k]\r\n else: # else, we do indeed have RGB value of (0,0,0), then replace it by its entirety to the relevant\r\n # background color.\r\n add_bg[i,j] = colors['Sky'] if i<300 else colors['Sea']\r\n\r\n # Create PIL object of the manually added background with drawing on the canvas\r\n add_bg = Image.fromarray(add_bg)\r\n # Assign the path where the file will be saved\r\n if not os.path.exists(\"tmp/\"):\r\n os.makedirs(\"tmp/\")\r\n file_path = f\"tmp/pic%s.png\"%(len(counter))\r\n # Increase the counter by adding dummy element into the counter list\r\n counter.append(0)\r\n # Save the drawing in PNG format\r\n\r\n add_bg.save(file_path, \"PNG\")\r\n st.success(\"Image saved successfully. Keep drawing!!\")", "def on_save_parameters(self):\n obj_points = self.get_object_points()\n cam_pos = self.get_camera_position()\n distortion = self.get_distortion_coeeficients()\n\n d = {\n 'object positions': obj_points,\n 'camera positions': cam_pos,\n 'distortion coefficients': distortion\n }\n\n jsn = json.dumps(d)\n h = hashlib.sha1(jsn.encode('utf-8')).hexdigest()\n fn = f'{h}.json'\n\n with open(fn, 'w') as f:\n f.write(jsn)\n\n self.statusBar().showMessage(f'Parameters have been save to {fn}.')\n self.param_file = fn", "def save(self, *args, **kwargs):\n super(Preview, self).save(*args, **kwargs)\n self.use_effect()", "def update_display(self):\n self.lick_plot_0.setData(self.k+self.T,self.buffer[:,1]) \n self.lick_plot_1.setData(self.k+self.T,self.buffer[:,2]) \n self.breathing_plot.setData(self.k+self.T,self.buffer[:,0]) \n \n if self.settings.movie_on.value():\n self.camera_image.setImage(self.camera.read())\n if self.settings.save_movie.value():\n self.camera.write()\n \n #print(self.buffer_h5.size)", "def preview(self,*args,**kwargs):\n self.cam.start_preview(*args,**kwargs)", "def preview():\n return render_template(\"controls/preview.html\")", "def save_params():\n file_name = filedialog.asksaveasfilename(\n filetypes=[\n (\"JSON\", \"*.json\")\n ],\n initialdir=os.getcwd())\n if file_name: # save option not cancelled by user\n self.parent_class.classes[\"fractal\"].curve.store_curve_tofile(\n file_name)", "def init_save_curve_params_button(self):\n def save_params():\n \"\"\"\n function to invoke different save routines\n \"\"\"\n file_name = filedialog.asksaveasfilename(\n filetypes=[\n (\"JSON\", \"*.json\")\n ],\n initialdir=os.getcwd())\n if file_name: # save option not cancelled by user\n self.parent_class.classes[\"fractal\"].curve.store_curve_tofile(\n file_name)\n\n self.buttons[\"btn_save_params\"] = Button(\n self.frame, text=\"Save Parameters\", command=save_params)\n self.buttons[\"btn_save_params\"].grid(row=4, column=1)", "def plot_preview_png():\n name = request.args.get('prev_instance')\n name = str(name)\n fig = create_preview(name)\n output = io.BytesIO()\n FigureCanvas(fig).print_png(output)\n return Response(output.getvalue(), mimetype='image/png')", "def save(self):\n\n # TODO:Find place to save data, write logic to save images(Filter out video?)", "def state_preview_do(cfg, app, win, events):", "def saveParameters (self , filePath):\r\n # productive #onButton\r\n profprint()\r\n widget = slicer.modules.NeedleFinderWidget\r\n config = ConfigParser.RawConfigParser()\r\n config.add_section('NeedleFinder Parameters')\r\n config.add_section('BooleanSection')\r\n config.add_section('IntegerSection')\r\n\r\n config.set('BooleanSection', 'autoCorrectTip', widget.autoCorrectTip.isChecked())\r\n config.set('BooleanSection', 'invertedContrast', widget.invertedContrast.isChecked())\r\n config.set('BooleanSection', 'gradient', widget.gradient.isChecked())\r\n config.set('BooleanSection', 'filterControlPoints', widget.filterControlPoints.isChecked())\r\n config.set('BooleanSection', 'drawFiducialPoints', widget.drawFiducialPoints.isChecked())\r\n config.set('BooleanSection', 'autoStopTip', widget.autoStopTip.isChecked())\r\n config.set('BooleanSection', 'extendNeedle', widget.extendNeedle.isChecked())\r\n config.set('BooleanSection', 'maxLength', widget.maxLength.isChecked())\r\n config.set('BooleanSection', 'gaussianAttenuationButton', widget.gaussianAttenuationButton.isChecked())\r\n\r\n config.set('IntegerSection', 'realNeedleLength', widget.realNeedleLength.value)\r\n config.set('IntegerSection', 'sigmaValue', widget.sigmaValue.value)\r\n config.set('IntegerSection', 'gradientPonderation', widget.gradientPonderation.value)\r\n config.set('IntegerSection', 'exponent', widget.exponent.value)\r\n config.set('IntegerSection', 'distanceMax', widget.radiusMax.value)\r\n config.set('IntegerSection', 'nbRotatingIterations', widget.nbRotatingIterations.value)\r\n config.set('IntegerSection', 'numberOfPointsPerNeedle', widget.numberOfPointsPerNeedle.value)\r\n config.set('IntegerSection', 'lenghtNeedleParameter', widget.lenghtNeedleParameter.value)\r\n config.set('IntegerSection', 'radiusNeedleParameter', widget.radiusNeedleParameter.value)\r\n config.set('IntegerSection', 'algoVersParameter', widget.algoVersParameter.value)\r\n\r\n # Writing our configuration file to 'example.cfg'\r\n with open(filePath, 'wb') as configfile:\r\n config.write(configfile)", "def saveParameters (self ,filePath):\n #productive #onButton\n profprint()\n widget = slicer.modules.NeedleFinderWidget\n config = ConfigParser.RawConfigParser()\n config.add_section('NeedleFinder Parameters')\n config.add_section('BooleanSection')\n config.add_section('IntegerSection')\n\n config.set('BooleanSection', 'autoCorrectTip', widget.autoCorrectTip.isChecked())\n config.set('BooleanSection', 'invertedContrast', widget.invertedContrast.isChecked())\n config.set('BooleanSection', 'gradient', widget.gradient.isChecked())\n config.set('BooleanSection', 'filterControlPoints', widget.filterControlPoints.isChecked())\n config.set('BooleanSection', 'drawFiducialPoints', widget.drawFiducialPoints.isChecked())\n config.set('BooleanSection', 'autoStopTip', widget.autoStopTip.isChecked())\n config.set('BooleanSection', 'extendNeedle', widget.extendNeedle.isChecked())\n config.set('BooleanSection', 'maxLength', widget.maxLength.isChecked())\n config.set('BooleanSection', 'gaussianAttenuationButton', widget.gaussianAttenuationButton.isChecked())\n\n config.set('IntegerSection', 'realNeedleLength', widget.realNeedleLength.value)\n config.set('IntegerSection', 'sigmaValue', widget.sigmaValue.value)\n config.set('IntegerSection', 'gradientPonderation', widget.gradientPonderation.value)\n config.set('IntegerSection', 'exponent', widget.exponent.value)\n config.set('IntegerSection', 'distanceMax', widget.distanceMax.value)\n config.set('IntegerSection', 'nbRotatingIterations', widget.nbRotatingIterations.value)\n config.set('IntegerSection', 'numberOfPointsPerNeedle', widget.numberOfPointsPerNeedle.value)\n config.set('IntegerSection', 'lenghtNeedleParameter', widget.lenghtNeedleParameter.value)\n config.set('IntegerSection', 'radiusNeedleParameter', widget.radiusNeedleParameter.value)\n config.set('IntegerSection', 'algoVersParameter', widget.algoVersParameter.value)\n\n # Writing our configuration file to 'example.cfg'\n with open(filePath, 'wb') as configfile:\n config.write(configfile)", "def para_saver(self):\n\n para_list = []\n path = self.lineEdit_params.text()\n # we need the 'image' button to be checked.\n radio_is_circle = self.radioButton_circle.isChecked()\n\n if radio_is_circle:\n self.radioButton_image.setChecked(True)\n\n file = open(path, 'wb')\n # we go over all instances of the sliderclass to save their respective parameters\n for clazz in SliderClass.all_sliders:\n para1 = clazz._params_image\n para_list.append(para1)\n if clazz.radio_image is not None:\n para2 = clazz._params_circle\n para_list.append(para2)\n # manually add the morph state, since it is no slider class yet they are parameters\n para_list.append(self.morph_state)\n para_list.append(self.coords)\n para_list.append(self.checkBox_segment.isChecked())\n para_list.append(radio_is_circle)\n\n pickle.dump(para_list, file)\n file.close()\n\n if radio_is_circle:\n self.radioButton_circle.setChecked(True)", "def save_plot(self, ):\n pass", "def save(self, filename):\n print(\"Saving...\", end=\"\\r\")\n canvas = self.canvas[self.N:self.S,self.W:self.E]\n cv2.imwrite(\"./Output/\"+filename, canvas)\n print(\"Saved:\",filename)", "def save(self, fileName, *args):\n if self._renderRequired:\n self.render()\n self.qimage.save(fileName, *args)", "def save(self):\n dyad = self.video.dyad\n comment = self.comment_text\n video_number = self.video.camera\n start_frame = self.framenumber #correct for default half size pic for selection:\n coordinates = [int(self.coordinates.top()*2),int(self.coordinates.left()*2),int(self.coordinates.bottom()*2),int(self.coordinates.right()*2)]\n if coordinates[0] > coordinates[2]:#ensure to have coordinates correct\n v = coordinates[0]\n coordinates[0] = coordinates[2]\n coordinates[2] = v\n if coordinates[1] > coordinates[3]:\n v = coordinates[1]\n coordinates[1] = coordinates[3]\n coordinates[3] = v\n mother = self.mother\n child = self.child\n self.video.database.add_roi(dyad, video_number, start_frame, coordinates, mother, child, comment)\n self.video.database.save_as_json()\n QMessageBox.about(self, \"Saved successfully\", \"The data was added to the database\")", "def save_particles_image(self):\n base_filename = self.config['info']['filename_microscope']\n self.save_image_microscope_camera(base_filename)", "def _update_preview_content(self):\n if self.index == -1:\n preview_str = \"(no result selected)\"\n else:\n record = self.results[self.index]\n\n id_str = f\"RECORD_ID:\\t\\t{record['record_id']}\\n\"\n _kw_str = \", \".join(sorted(record[\"keywords\"]))\n kw_str = f\"KEYWORDS:\\t\\t{_kw_str}\\n\"\n date = datetime.fromtimestamp(record[\"utc_last_access\"])\n _date_str = datetime.strftime(date, \"%Y-%m-%d %H:%M:%S\")\n access_date_str = f\"LAST ACCESS:\\t{_date_str}\\n\"\n summary_str = f\"\\n{record['record_summary']}\"\n preview_str = id_str + kw_str + access_date_str + summary_str\n\n self.preview_textcontrol.buffer.text = preview_str", "def preview_camera(self):\n self.statusbar.clearMessage()\n self.statusbar.showMessage('Previewing the camera. Press the Enter key to exit.')\n self.buttonPreview.setText('Press Enter\\nto finish.')\n self.comboCamera.setEnabled(False)\n self.buttonSelectColor.setEnabled(False)\n self.buttonLogSet.setEnabled(False)\n cap = webcam.initiate_camera(self.comboCamera.currentIndex())\n while True:\n _, frame = cap.read()\n frame = np.rot90(frame, self.comboRotation.currentIndex())\n cv2.imshow('Camera Preview', frame)\n key = cv2.waitKey(1) & 0xFF\n if key == ord('\\r'):\n break\n cap.release()\n cv2.destroyAllWindows()\n self.buttonPreview.setText('Preview')\n self.comboCamera.setEnabled(True)\n self.buttonSelectColor.setEnabled(True)\n self.buttonLogSet.setEnabled(True)\n self.statusbar.clearMessage()", "def saveSettings(self):\n self.genFiles.applyData()\n self.genGraph.applyData()", "def visualise(self):\n\n scores, education = self.get_data()\n self.write_data(scores, education)\n\n return True", "def upd_preview(self):\n\n if self.data_type != \"layer\":\n self.dlg.uLabelImgPreview.clear()\n self.dlg.uLabelImgPreview.setText(\"No preview available\")\n return\n\n if self.get_preview(\"300x200\", 0.5):\n return\n if self.get_preview(\"150x100\", 5):\n return\n\n self.dlg.uLabelImgPreview.clear()\n self.dlg.uLabelImgPreview.setText(\"No preview available\")", "def savePicture(self):\n self.file_name = QtGui.QFileDialog.getSaveFileName(self, \n \"Save as... (specify extension)\", \"\")\n cv2.imwrite(self.file_name, self.frame)", "def create_preview(message):", "def render_and_save():\n\n rendering_config = configuration.get_config()\n rendering_config = ml_collections.FrozenConfigDict(rendering_config)\n aspect_ratio = rendering_config.aspect_ratio\n height = rendering_config.height\n width = int(aspect_ratio * height)\n\n scene_camera = build_camera(rendering_config, aspect_ratio)\n world = build_world(rendering_config)\n\n # Render.\n logging.info(\"Tracing rays...\")\n render_image_fn = jax.jit(\n render.generate_image,\n static_argnames=[\"height\", \"width\", \"config\"])\n image = render_image_fn(height, width, scene_camera, world, rendering_config)\n\n image = render.correct_gamma(image, gamma=rendering_config.gamma_correction)\n\n logging.info(\"Saving to file...\")\n output.export_as_ppm(image, rendering_config.output_file)\n\n return image", "def saveParameters(self):\n super(RectTab,self).saveParameters()\n self.simArgs['width'] = self.width.get()\n self.simArgs['height'] = self.height.get()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = rect.RectTable(**self.simArgs)", "def display(self, canvas, x, y, width, height):\n pass", "def dynPaintEditor(*args, activeOnly: bool=True, autoSave: bool=True, camera: Union[AnyStr,\n bool]=\"\", canvasMode: bool=True, canvasUndo: bool=True, changeCommand:\n Union[List[AnyStr, AnyStr, AnyStr, AnyStr], bool]=None, clear: List[float,\n float, float]=None, control: bool=True, currentCanvasSize: bool=True,\n defineTemplate: AnyStr=\"\", displayAppearance: Union[AnyStr, bool]=\"\",\n displayFog: bool=True, displayImage: Union[int, bool]=0, displayLights:\n Union[AnyStr, bool]=\"\", displayStyle: Union[AnyStr, bool]=\"\",\n displayTextures: bool=True, docTag: Union[AnyStr, bool]=\"\", doubleBuffer:\n bool=True, drawAxis: bool=True, drawContext: bool=True, exists: bool=True,\n fastUpdate: int=0, fileName: Union[AnyStr, bool]=\"\", filter: Union[AnyStr,\n bool]=\"\", forceMainConnection: Union[AnyStr, bool]=\"\", highlightConnection:\n Union[AnyStr, bool]=\"\", iconGrab: bool=True, loadImage: AnyStr=\"\",\n lockMainConnection: bool=True, mainListConnection: Union[AnyStr, bool]=\"\",\n menu: AnyStr=\"\", nbImages: bool=True, newImage: Union[List[int, int, float,\n float, float], bool]=None, paintAll: float=0.0, panel: Union[AnyStr,\n bool]=\"\", parent: Union[AnyStr, bool]=\"\", redrawLast: bool=True, refresh:\n bool=True, refreshMode: Union[int, bool]=0, removeAllImages: bool=True,\n removeImage: bool=True, rollImage: List[float, float]=None, saveAlpha:\n bool=True, saveBumpmap: Union[AnyStr, bool]=\"\", saveImage: bool=True,\n scaleBlue: Union[float, bool]=0.0, scaleGreen: Union[float, bool]=0.0,\n scaleRed: Union[float, bool]=0.0, selectionConnection: Union[AnyStr,\n bool]=\"\", singleBuffer: bool=True, snapShot: bool=True, stateString:\n bool=True, swap: int=0, tileSize: int=0, unParent: bool=True, undoCache:\n bool=True, unlockMainConnection: bool=True, updateMainConnection: bool=True,\n useTemplate: AnyStr=\"\", wrap: List[bool, bool]=None, writeImage: AnyStr=\"\",\n zoom: Union[float, bool]=0.0, q=True, query=True, e=True, edit=True,\n **kwargs)->Union[AnyStr, Any]:\n pass" ]
[ "0.62056845", "0.61532867", "0.61443603", "0.6139312", "0.606531", "0.59494275", "0.5938108", "0.58969593", "0.58924055", "0.5826511", "0.5789883", "0.5724899", "0.56783843", "0.5656564", "0.5616673", "0.5614471", "0.55980814", "0.55951744", "0.5587141", "0.55860794", "0.5567963", "0.5542513", "0.5499675", "0.54988444", "0.5488363", "0.5476885", "0.5470247", "0.5448692", "0.5432791", "0.54309005" ]
0.6919115
0
Changes what balls can be selected to change values for. Gets called when number of balls is changed Also saves the parameters
def changeFormation(self, *args): # get the number of balls # formation = self.numberOfBallsSelector.get(first=None, last=None) newNBalls = int(self.numberOfBallsSelector.get()) if newNBalls >= self.nBalls: for i in range(self.nBalls, newNBalls): # for some reason self.initballstate gets edited if ballstates gets edited self.ballStates[i] = [1.5, 0.75, 0, 0] # TODO: change this later after we figure out proper initial ball locations self.ballStates[i][2] += random.uniform(-3, 3) self.ballStates[i][3] += random.uniform(-3, 3) self.nBalls = newNBalls self.balls = tuple(map(str, range(self.nBalls))) # self.saveParameters() # recreate combobox with updated number of balls self.ballSelector = Pmw.ComboBox(self, label_text='Choose Ball', labelpos='nw', selectioncommand=self.changeBall, scrolledlist_items=self.balls, dropdown=1) self.ballSelector.grid(column=0, row=3) self.ballSelector.selectitem(self.currentBall)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def changeBall(self, *args):\n\n # set sliders to state for new ball\n self.currentBall = int(self.ballSelector.get())\n newState = self.ballStates[self.currentBall]\n self.initialXScale.set(newState[0])\n self.initialYScale.set(newState[1])\n self.initialXVelScale.set(newState[2])\n self.initialYVelScale.set(newState[3])\n self.saveParameters()", "def changeBarriers(self, value):\r\n\t\tself.numBarriers = value", "def saveParameters(self):\n # save current scale values into the ball state for the current ball\n x = self.initialXScale.get()\n y = self.initialYScale.get()\n xVel = self.initialXVelScale.get()\n yVel = self.initialYVelScale.get()\n self.ballStates[self.currentBall] = [x, y, xVel, yVel]\n # set new currentBall if changed\n self.currentBall = int(self.ballSelector.get())\n # the states of all the balls to be simulated\n self.simArgs['balls']=self.ballStates\n self.simArgs['playbackSpeed'] = self.playbackSpeedScale.get()\n self.simArgs['trace'] = self.toTrace.get()\n self.simArgs['friction'] = self.friction.get()\n # get number of balls from formation string\n self.simArgs['nBalls'] = self.nBalls\n # for s in self.numberOfBallsSelector.get().split():\n # if s.isdigit():\n # self.simArgs['nBalls']=int(s)", "def updateBall(self):\n \n self._ball.moveBall()\n self._ball.collideBallPaddle(self._paddle)\n for a in self._bricks:\n self._ball.collideBallBrick(a)", "def changeInitValues(self, betas):\n\n if self.name in betas:\n self.initValue = betas[self.name]", "def updatemaxbombs(self):\n tiles: int = int(self.widthbox.get()) * int(self.heightbox.get())\n self.bombsbox.configure(to=tiles/2)", "def change(self):\r\n\r\n # If checkboxes are available, check status and set boat speed reference line visibility accordingly.\r\n if self.cb:\r\n if self.cb_bt.checkState() == QtCore.Qt.Checked:\r\n for item in self.bt:\r\n item.set_visible(True)\r\n else:\r\n for item in self.bt:\r\n item.set_visible(False)\r\n # GGA\r\n if self.cb_gga.checkState() == QtCore.Qt.Checked:\r\n for item in self.gga:\r\n item.set_visible(True)\r\n # self.gga[0].set_visible(True)\r\n elif self.gga is not None:\r\n for item in self.gga:\r\n item.set_visible(False)\r\n # self.gga[0].set_visible(False)\r\n # VTG\r\n if self.cb_vtg.checkState() == QtCore.Qt.Checked:\r\n for item in self.vtg:\r\n item.set_visible(True)\r\n # self.vtg[0].set_visible(True)\r\n elif self.vtg is not None:\r\n for item in self.vtg:\r\n item.set_visible(False)\r\n # self.vtg[0].set_visible(False)\r\n\r\n # Draw canvas\r\n self.canvas.draw()", "def changeInitValues(self, betas):\n\n for e in self.children:\n e.changeInitValues(betas)", "def _set_bet_limit(self) -> None:\n for i, ratio in enumerate(BET_LIMIT_RATIOS):\n self._bet_limits[i] = self._treasury_min.get() // ratio", "def checkball(self):\r\n for ball in self.overlapping_sprites:\r\n ball.bottom=self.top\r\n if math.fabs(ball.x-self.x)<math.fabs(ball.x-self.left) and math.fabs(ball.x-self.x)<math.fabs(ball.x-self.right):\r\n ball.vertbounce()\r\n if math.fabs(ball.x-self.left)<math.fabs(ball.x-self.x) and math.fabs(ball.x-self.left)<math.fabs(ball.x-self.right):\r\n ball.leftbounce()\r\n if math.fabs(ball.x-self.right)<math.fabs(ball.x-self.left) and math.fabs(ball.x-self.right)<math.fabs(ball.x-self.x):\r\n ball.rightbounce()\r\n self.points.value+=10\r\n if self.points.value==500:\r\n ball.ballchange()\r\n elif self.points.value==2000:\r\n ball.ballchange()\r\n elif self.points.value==4500:\r\n ball.ballchange()\r\n elif self.points.value==10000:\r\n ball.ballchange()", "def updateSize(self, *args):\n width = self.width.get()\n height = self.height.get()\n self.initialXScale.config(to=width)\n self.initialYScale.config(to=height)\n # error check that state is not outside bounds\n for ball, state in self.ballStates.items():\n if state[0] > width:\n state[0] = width\n if state[1] > height:\n state[1] = height", "def bayesian_update(self):\n for p in self.parameters:\n p.bayesian_update()", "def set_legs(self, number_of_legs):\n self.legs = number_of_legs", "def updateBricks(self):\n for a in self._bricks:\n if (a.collidesTopBottom(self._ball) == True) or \\\n (a.collidesSide(self._ball) == True):\n self._score = self._score + 1\n self._bricks.remove(a)", "def cb_changeN(val):\n num_samples = int(sN.val)\n alpha_update = [sAlpha0.val, sAlpha1.val, sAlpha2.val]\n\n # ML\n CatML.set_param(lambda0)\n global x_cat\n x_cat = CatML.sampling(num_samples) # Samplingし直し\n global lambda_ML\n lambda_ML = CatML.MLinfer(x_cat)\n\n axLikelihood.cla()\n draw_likelihood_contours(axLikelihood, x_cat, CatML)\n\n axML.cla()\n drawBarGraph( axML, \"ML\", lambda_ML, bar_y_max, col_ML ) # Draw Bar graph\n\n\n # MAP\n CatMAP.set_param(lambda0)\n global lambda_MAP\n lambda_MAP = CatMAP.MAPinfer(x_cat, dirichlet)\n\n axMAP.cla()\n drawBarGraph( axMAP, \"MAP\", lambda_MAP, bar_y_max, col_MAP ) # Draw Bar Graph\n\n # Bayes\n posteriorDirichlet.set_param(alpha_update)\n posteriorDirichlet.calcPosterior(x_cat)\n draw_pdf_contours(axPosteriorDirichlet, posteriorDirichlet) # Draw Posterior Dirichlet\n lambda_Bayes = np.zeros(3)\n for k in range(3):\n lambda_Bayes[k] = posteriorDirichlet.BayesInfer(k)\n axBayes.cla()\n drawBarGraph( axBayes, \"Bayes\", lambda_Bayes, bar_y_max, col_Bayes ) # Draw Bar Graph\n\n print('Change N')\n print('lambda_ML =', lambda_ML)\n print('lambda_MAP =', lambda_MAP)\n print('lambda_Bayes=', lambda_Bayes)\n\n axDirichlet.cla()\n draw_pdf_contours(axDirichlet, dirichlet, True) # Draw Dirichlet\n axDirichlet.text(0.5, 0.9, 'Prior', ha='center', va='center', fontsize=14)\n\n draw_point(axDirichlet, lambda_ML, col_ML)\n draw_point(axDirichlet, lambda_MAP, col_MAP)\n draw_point(axDirichlet, lambda_Bayes, col_Bayes)\n draw_point(axLikelihood, lambda_ML, col_ML)\n draw_point(axPosteriorDirichlet, lambda_MAP, col_MAP)\n draw_point(axPosteriorDirichlet, lambda_Bayes, col_Bayes)\n\n fig.canvas.draw_idle()", "def playfield_switch_hit(self, **kwargs):\n if (not self.balls or (kwargs.get('balls') and self.balls - kwargs['balls'] < 0)):\n self.mark_playfield_active()\n\n if not self.num_balls_requested:\n if self.machine.game:\n self.unexpected_balls = 1\n\n if self.machine.config['machine']['glass_off_mode']:\n self.log.debug(\"Playfield_active switch hit with no balls \"\n \"expected. glass_off_mode is enabled, so \"\n \"this will be ignored.\")\n else:\n self.log.debug(\"Playfield_active switch hit with no balls \"\n \"expected. glass_off_mode is not enabled, \"\n \"setting playfield ball count to 1\")\n\n self.balls = 1\n self.machine.events.post('unexpected_ball_on_' + self.name)", "def __init__(self):\n super(RobinBoundary, self).__init__()\n self.value = RobinBoundary.value\n RobinBoundary.value += 1\n self.update(param=\"1\")", "def reset(self, balls=None):\r\n if balls is None:\r\n balls = self.balls\r\n for ball in balls:\r\n ball.reset()", "def numBinsChanged(self, val):\n self.numBins = val", "def cb_update(val):\n alpha_update = [sAlpha0.val, sAlpha1.val, sAlpha2.val]\n\n # update Dirichlet's parameters alpha\n dirichlet.set_param(alpha_update)\n draw_pdf_contours(axDirichlet, dirichlet, True) # Draw Dirichlet\n\n # MAP\n lambda_MAP = CatMAP.MAPinfer(x_cat, dirichlet)\n axMAP.cla()\n drawBarGraph( axMAP, \"MAP\", lambda_MAP, bar_y_max, col_MAP ) # Draw Bar graph\n\n # Bayes\n posteriorDirichlet.set_param(alpha_update)\n posteriorDirichlet.calcPosterior(x_cat)\n draw_pdf_contours(axPosteriorDirichlet, posteriorDirichlet) # Draw Posterior Dirichlet\n lambda_Bayes = np.zeros(3)\n for k in range(3):\n lambda_Bayes[k] = posteriorDirichlet.BayesInfer(k)\n\n axBayes.cla()\n drawBarGraph( axBayes, \"Bayes\", lambda_Bayes, bar_y_max, col_Bayes ) # Draw Bar graph\n\n print('Update')\n print('lambda_ML =', lambda_ML)\n print('lambda_MAP =', lambda_MAP)\n print('lambda_Bayes=', lambda_Bayes)\n draw_point(axDirichlet, lambda_ML, col_ML)\n draw_point(axDirichlet, lambda_MAP, col_MAP)\n draw_point(axDirichlet, lambda_Bayes, col_Bayes)\n draw_point(axPosteriorDirichlet, lambda_MAP, col_MAP)\n draw_point(axPosteriorDirichlet, lambda_Bayes, col_Bayes)\n\n fig.canvas.draw_idle()", "def updateParameters(self, parameters):\r\n #return\r\n parameters[2].enabled = 0\r\n parameters[3].enabled = 0\r\n parameters[4].enabled = 0", "def updateParameters(self, parameters):\r\n #return\r\n parameters[2].enabled = 0\r\n parameters[3].enabled = 0\r\n parameters[4].enabled = 0", "def set_parameters(self,params):\n K3Supervisor.set_parameters(self,params)\n self.blending.set_parameters(self.parameters)", "def _onSetParameterB(self, value):\n self._parameters['b'] = min(max(value, self._parameters['lower']), self._parameters['upper']) # Limit at upper and lower\n self._logger.info(\"Parameter ba' of function '{}' changed to {}\".format(self._function, value))\n self.functionChanged.emit(self._dim, self._function, self._parameters.copy())", "def ball_collision_update(self):\r\n ball_pairs = self.balls_colliding()\r\n for ball_pair in ball_pairs:\r\n b1,b2 = ball_pair\r\n self.ball_pair_collision_update(b1,b2)", "def _activate(self):\n # Remember the original speed of the ball.\n self._orig_speed = self.game.ball.base_speed\n\n # Slow the balls down.\n for ball in self.game.balls:\n ball.speed = self._SLOW_BALL_SPEED\n ball.base_speed = self._SLOW_BALL_SPEED", "def spinBoxChanged(self):\n sdr = self.sender()\n\n if sdr == self.ui.spinBoxSpectraToCut:\n self.parameters['cut_m'] = self.ui.spinBoxSpectraToCut.value()\n elif sdr == self.ui.spinBoxEveryNSpectra:\n self.parameters['every_n'] = self.ui.spinBoxEveryNSpectra.value()\n elif sdr == self.ui.spinBoxOffset:\n self.parameters['offset'] = self.ui.spinBoxOffset.value()\n\n self.changed.emit()", "def _playfield_switch_hit(self, **kwargs):\n if self.balls <= 0 or (kwargs.get('balls') and self.balls - kwargs['balls'] < 0):\n self._mark_playfield_active()\n\n if not self.num_balls_requested:\n self.debug_log(\"Playfield was activated with no balls expected.\")\n self.machine.events.post('unexpected_ball_on_' + self.name)\n '''event: unexpected_ball_on_(name)\n desc: The playfield named (name) just had a switch hit,\n meaning a ball is on it, but that ball was not expected.\n '''\n\n self.ball_search.reset_timer()", "def updateParameters(self, parameters):\n if parameters[1].value == True:\n parameters[2].enabled = True;\n else:\n parameters[2].enabled = False; \n \n if parameters[3].value == True:\n parameters[4].enabled = True;\n \n else:\n parameters[4].enabled = False; \n return", "def updateParameters(self, parameters):\n if parameters[0].value:\n parameters[1].enabled = True\n parameters[2].enabled = True\n else:\n parameters[1].enabled = False\n parameters[2].enabled = False\n return" ]
[ "0.6785369", "0.5880804", "0.58548313", "0.5546707", "0.5517481", "0.5512108", "0.5484628", "0.5483159", "0.5383988", "0.537589", "0.5337195", "0.5318962", "0.5283062", "0.52816105", "0.5265503", "0.5248229", "0.5230199", "0.5221469", "0.52191705", "0.5217716", "0.5216568", "0.5216568", "0.5214385", "0.5213733", "0.520737", "0.51928455", "0.5189107", "0.5175431", "0.5175236", "0.5174068" ]
0.75181323
0
run when a different ball is selected. saveParameters and sets sliders to settings for new ball.
def changeBall(self, *args): # set sliders to state for new ball self.currentBall = int(self.ballSelector.get()) newState = self.ballStates[self.currentBall] self.initialXScale.set(newState[0]) self.initialYScale.set(newState[1]) self.initialXVelScale.set(newState[2]) self.initialYVelScale.set(newState[3]) self.saveParameters()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def saveParameters(self):\n # save current scale values into the ball state for the current ball\n x = self.initialXScale.get()\n y = self.initialYScale.get()\n xVel = self.initialXVelScale.get()\n yVel = self.initialYVelScale.get()\n self.ballStates[self.currentBall] = [x, y, xVel, yVel]\n # set new currentBall if changed\n self.currentBall = int(self.ballSelector.get())\n # the states of all the balls to be simulated\n self.simArgs['balls']=self.ballStates\n self.simArgs['playbackSpeed'] = self.playbackSpeedScale.get()\n self.simArgs['trace'] = self.toTrace.get()\n self.simArgs['friction'] = self.friction.get()\n # get number of balls from formation string\n self.simArgs['nBalls'] = self.nBalls\n # for s in self.numberOfBallsSelector.get().split():\n # if s.isdigit():\n # self.simArgs['nBalls']=int(s)", "def onResetParameters(self):\r\n # productive #button\r\n profprint()\r\n fileName = pathToScene = slicer.modules.needlefinder.path.replace(\"NeedleFinder.py\", \"Config/default.cfg\")\r\n self.logic.loadParameters(fileName)", "def changeFormation(self, *args):\n\n # get the number of balls\n # formation = self.numberOfBallsSelector.get(first=None, last=None)\n newNBalls = int(self.numberOfBallsSelector.get())\n\n if newNBalls >= self.nBalls:\n for i in range(self.nBalls, newNBalls):\n # for some reason self.initballstate gets edited if ballstates gets edited\n self.ballStates[i] = [1.5, 0.75, 0, 0]\n\n # TODO: change this later after we figure out proper initial ball locations\n self.ballStates[i][2] += random.uniform(-3, 3)\n self.ballStates[i][3] += random.uniform(-3, 3)\n\n self.nBalls = newNBalls\n self.balls = tuple(map(str, range(self.nBalls)))\n # self.saveParameters()\n # recreate combobox with updated number of balls\n self.ballSelector = Pmw.ComboBox(self, label_text='Choose Ball',\n labelpos='nw', selectioncommand=self.changeBall,\n scrolledlist_items=self.balls, dropdown=1)\n self.ballSelector.grid(column=0, row=3)\n self.ballSelector.selectitem(self.currentBall)", "def para_loader(self, stdpath=True):\n if stdpath is True:\n path = self.lineEdit_params.text()\n else:\n path = \"./data/parameters/parameters_blur.pcl\"\n self.lineEdit_params.setText(path)\n\n file = open(path, 'rb')\n loaded_p_list = pickle.load(file)\n file.close()\n counter = 0\n\n radio_is_image = self.radioButton_circle.isChecked()\n\n if radio_is_image:\n self.radioButton_image.setChecked(True)\n\n for clazz in SliderClass.all_sliders:\n\n # ## if you added a new sliderclass and want to load your old parameters, you can use this little hack\n # classname = 'linefinder'\n # #print(loaded_p_list[counter])\n # #print(f\"the latest keyword = {clazz.keyword}\")\n # if clazz.keyword == [classname]:\n # warnings.warn(\"parameters are messed up! save new ones\")\n # break\n\n try:\n clazz.settr(loaded_p_list[counter])\n counter += 1\n if clazz.radio_image is not None:\n clazz._params_circle = loaded_p_list[counter]\n counter += 1\n except IndexError:\n # again a small hack for when you add an extra element to the class and this is not yet implemented\n # in your parameter saves.\n warnings.warn(\"parameters are messed up! save new ones\")\n counter += 1\n print(f\"should be morph: {loaded_p_list[counter]}\")\n # in the save, manually set the morph_state as the final thing to load in\n self.morph_state = loaded_p_list[counter]\n self.checkBox_morph.setChecked(self.morph_state[0][0])\n self.textEdit_morph.setText(self.morph_state[0][1])\n\n self.coords = loaded_p_list[counter + 1]\n self.checkBox_segment.setChecked(loaded_p_list[counter + 2])\n self.lineEdit_coords.setText(str(self.coords))\n radio_is_circle = loaded_p_list[counter + 3]\n\n if radio_is_circle:\n self.radioButton_circle.setChecked(True)\n self.radioButton_circle.click()", "def onReset(self):\n #productive\n profprint()\n fileName = pathToScene = slicer.modules.needlefinder.path.replace(\"NeedleFinder.py\",\"Config/default.cfg\")\n self.logic.loadParameters(fileName)", "def spinBoxChanged(self):\n sdr = self.sender()\n\n if sdr == self.ui.spinBoxSpectraToCut:\n self.parameters['cut_m'] = self.ui.spinBoxSpectraToCut.value()\n elif sdr == self.ui.spinBoxEveryNSpectra:\n self.parameters['every_n'] = self.ui.spinBoxEveryNSpectra.value()\n elif sdr == self.ui.spinBoxOffset:\n self.parameters['offset'] = self.ui.spinBoxOffset.value()\n\n self.changed.emit()", "def set_config(self): # called from button_set object \n self.settings['lights_on'] = self.lights_on.get()\n self.settings['lights_off'] = self.lights_off.get()\n self.settings['ambient_min'] = self.ambient_min.get()\n self.settings['soil_1'] = self.smc1.get()\n self.settings['soil_2'] = self.smc2.get()\n self.settings['soil_3'] = self.smc3.get()\n self.settings['soil_4'] = self.smc4.get()\n self.settings['overhead_level'] = self.overhead_level.get()\n\n # Save settings to config file in case of reboot / power-loss\n print \"UPDATING SETTINGS FILE\"\n with open(self.settings_path, 'w') as jsonfile:\n jsonfile.write(json.dumps(self.settings, indent=4))\n self.active_changes = True # (flag) changes are active!", "def cb_reset(event):\n axDirichlet.cla()\n # Reset Sliders\n sAlpha0.reset() # resetが駄目!一番最初に戻ってしまう\n sAlpha1.reset()\n sAlpha2.reset()\n alpha_update = [sAlpha0.val, sAlpha1.val, sAlpha2.val]\n print('alpha_update=', alpha_update)\n\n # ML\n lambda_ML = CatML.MLinfer(x_cat)\n\n axML.cla()\n drawBarGraph( axML, \"ML\", lambda_ML, bar_y_max, col_ML ) # Draw Bar graph\n\n\n # MAP\n dirichlet.set_param(alpha_update)\n lambda_MAP = CatMAP.MAPinfer(x_cat, dirichlet)\n\n axMAP.cla()\n drawBarGraph( axMAP, \"MAP\", lambda_MAP, bar_y_max, col_MAP ) # Draw Bar Graph\n\n # Bayes\n posteriorDirichlet.set_param(alpha_update)\n posteriorDirichlet.calcPosterior(x_cat)\n lambda_Bayes = np.zeros(3)\n for k in range(3):\n lambda_Bayes[k] = posteriorDirichlet.BayesInfer(k)\n\n axBayes.cla()\n drawBarGraph( axBayes, \"Bayes\", lambda_Bayes, bar_y_max, col_Bayes ) # Draw Bar Graph\n\n draw_pdf_contours(axDirichlet, dirichlet, True) # Draw Dirichlet\n\n print('Reset')\n print('lambda_ML =', lambda_ML)\n print('lambda_MAP =', lambda_MAP)\n print('lambda_Bayes=', lambda_Bayes)\n draw_point(axDirichlet, lambda_ML, col_ML)\n draw_point(axDirichlet, lambda_MAP, col_MAP)\n draw_point(axDirichlet, lambda_Bayes, col_Bayes)\n draw_point(axLikelihood, lambda_ML, col_ML)\n draw_point(axPosteriorDirichlet, lambda_MAP, col_MAP)\n draw_point(axPosteriorDirichlet, lambda_Bayes, col_Bayes)\n\n fig.canvas.draw_idle()", "def sliderChange(self):\n for rdout, sldr in zip(self.joint_slider_rdouts, self.joint_sliders):\n rdout.setText(str(sldr.value()))\n\n self.ui.rdoutTorq.setText(str(self.ui.sldrMaxTorque.value()) + \"%\")\n self.ui.rdoutSpeed.setText(str(self.ui.sldrSpeed.value()) + \"%\")\n\n # Do nothing if the rexarm is not initialized\n if self.rexarm.initialized:\n self.rexarm.set_torque_limits([self.ui.sldrMaxTorque.value() / 100.0] * self.rexarm.num_joints)\n self.rexarm.set_speeds_normalized_all(self.ui.sldrSpeed.value() / 100.0)\n joint_positions = np.array([sldr.value() * D2R for sldr in self.joint_sliders])\n # Only send the joints that the rexarm has\n self.rexarm.set_positions(joint_positions[0:self.rexarm.num_joints])", "def setup_mode():\n status_label.color = WHITE\n status_label.text = \"-SET-\"\n\n ave_label.color = BLACK # Turn off average label and value display\n ave_value.color = BLACK\n\n max_value.text = str(MAX_RANGE_F) # Display maximum range value\n min_value.text = str(MIN_RANGE_F) # Display minimum range value\n\n time.sleep(0.8) # Show SET status text before setting parameters\n status_label.text = \"\" # Clear status text\n\n param_index = 0 # Reset index of parameter to set\n\n setup_state = \"SETUP\" # Set initial state\n while setup_state == \"SETUP\":\n # Select parameter to set\n setup_state = \"SELECT_PARAM\" # Parameter selection state\n while setup_state == \"SELECT_PARAM\":\n param_index = max(0, min(2, param_index))\n status_label.text = SETUP_COLORS[param_index][0]\n image_group[param_index + 226].color = BLACK\n status_label.color = BLACK\n time.sleep(0.25)\n image_group[param_index + 226].color = SETUP_COLORS[param_index][1]\n status_label.color = WHITE\n time.sleep(0.25)\n\n param_index -= get_joystick()\n\n _buttons = panel.events.get()\n if _buttons and _buttons.pressed:\n if _buttons.key_number == BUTTON_UP: # HOLD button pressed\n param_index = param_index - 1\n if _buttons.key_number == BUTTON_DOWN: # SET button pressed\n param_index = param_index + 1\n if _buttons.key_number == BUTTON_HOLD: # HOLD button pressed\n play_tone(1319, 0.030) # Musical note E6\n setup_state = \"ADJUST_VALUE\" # Next state\n if _buttons.key_number == BUTTON_SET: # SET button pressed\n play_tone(1319, 0.030) # Musical note E6\n setup_state = \"EXIT\" # Next state\n\n # Adjust parameter value\n param_value = int(image_group[param_index + 230].text)\n\n while setup_state == \"ADJUST_VALUE\":\n param_value = max(32, min(157, param_value))\n image_group[param_index + 230].text = str(param_value)\n image_group[param_index + 230].color = BLACK\n status_label.color = BLACK\n time.sleep(0.05)\n image_group[param_index + 230].color = SETUP_COLORS[param_index][1]\n status_label.color = WHITE\n time.sleep(0.2)\n\n param_value += get_joystick()\n\n _buttons = panel.events.get()\n if _buttons and _buttons.pressed:\n if _buttons.key_number == BUTTON_UP: # HOLD button pressed\n param_value = param_value + 1\n if _buttons.key_number == BUTTON_DOWN: # SET button pressed\n param_value = param_value - 1\n if _buttons.key_number == BUTTON_HOLD: # HOLD button pressed\n play_tone(1319, 0.030) # Musical note E6\n setup_state = \"SETUP\" # Next state\n if _buttons.key_number == BUTTON_SET: # SET button pressed\n play_tone(1319, 0.030) # Musical note E6\n setup_state = \"EXIT\" # Next state\n\n # Exit setup process\n status_label.text = \"RESUME\"\n time.sleep(0.5)\n status_label.text = \"\"\n\n # Display average label and value\n ave_label.color = YELLOW\n ave_value.color = YELLOW\n return int(alarm_value.text), int(max_value.text), int(min_value.text)", "def update(self):\n #update checkboxes\n self.average_check_box.SetValue(self.parent.fftsink.average)\n self.use_persistence_check_box.SetValue(self.parent.fftsink.use_persistence)\n self.peak_hold_check_box.SetValue(self.parent.fftsink.peak_hold)\n #update radio buttons\n try:\n index = list(DIV_LEVELS).index(self.parent.fftsink.y_per_div)\n self.radio_buttons[index].SetValue(True)\n except: pass", "def update(self, *args):\n #Fetches slider information\n s1=self.s1.get()\n s2=self.s2.get()\n r1=self.r1.get()\n r2=self.r2.get()\n p=self.p.get()\n\n #Changes the number next to the bar\n self.r1_string.configure(text=\"%.2f\"% r1)\n self.r2_string.configure(text=\"%.2f\"% r2)\n self.s1_string.configure(text=\"%.2f\"% s1)\n self.s2_string.configure(text=\"%.2f\"% s2)\n self.p_string.configure(text=\"%.2f\"% self.p.get())\n\n #Creates two asset objects\n self.I1 = Instrument(r1, s1, \"Asset 1\", \"Equity\")\n self.I2 = Instrument(r2, s2, \"Asset 2\", \"Bond\")\n\n #Builds a portfolio object\n self.port = Portfolio([self.I1, self.I2])\n self.port.addcorr([[0,p]])\n\n #Displays the new graph to the graph frame\n fff =Frame(height=400, width=400, bd=10, bg='white')\n Chart(self.port, 0.02).scatter(fff)\n fff.grid(row=1, column=0)", "def saveParameters (self ,filePath):\n #productive #onButton\n profprint()\n widget = slicer.modules.NeedleFinderWidget\n config = ConfigParser.RawConfigParser()\n config.add_section('NeedleFinder Parameters')\n config.add_section('BooleanSection')\n config.add_section('IntegerSection')\n\n config.set('BooleanSection', 'autoCorrectTip', widget.autoCorrectTip.isChecked())\n config.set('BooleanSection', 'invertedContrast', widget.invertedContrast.isChecked())\n config.set('BooleanSection', 'gradient', widget.gradient.isChecked())\n config.set('BooleanSection', 'filterControlPoints', widget.filterControlPoints.isChecked())\n config.set('BooleanSection', 'drawFiducialPoints', widget.drawFiducialPoints.isChecked())\n config.set('BooleanSection', 'autoStopTip', widget.autoStopTip.isChecked())\n config.set('BooleanSection', 'extendNeedle', widget.extendNeedle.isChecked())\n config.set('BooleanSection', 'maxLength', widget.maxLength.isChecked())\n config.set('BooleanSection', 'gaussianAttenuationButton', widget.gaussianAttenuationButton.isChecked())\n\n config.set('IntegerSection', 'realNeedleLength', widget.realNeedleLength.value)\n config.set('IntegerSection', 'sigmaValue', widget.sigmaValue.value)\n config.set('IntegerSection', 'gradientPonderation', widget.gradientPonderation.value)\n config.set('IntegerSection', 'exponent', widget.exponent.value)\n config.set('IntegerSection', 'distanceMax', widget.distanceMax.value)\n config.set('IntegerSection', 'nbRotatingIterations', widget.nbRotatingIterations.value)\n config.set('IntegerSection', 'numberOfPointsPerNeedle', widget.numberOfPointsPerNeedle.value)\n config.set('IntegerSection', 'lenghtNeedleParameter', widget.lenghtNeedleParameter.value)\n config.set('IntegerSection', 'radiusNeedleParameter', widget.radiusNeedleParameter.value)\n config.set('IntegerSection', 'algoVersParameter', widget.algoVersParameter.value)\n\n # Writing our configuration file to 'example.cfg'\n with open(filePath, 'wb') as configfile:\n config.write(configfile)", "def _handle_gui_set(self,p_name):\n if self._live_update:\n self._update_param_from_tkvar(p_name)", "def update(self):\n self.plot.draw()\n \n func=str(self.edit1b.currentText())\n if self.win.test()==0:\n x=np.linspace(0,10,200)\n elif self.win.test()==1:\n x=np.linspace(0,0.40,200)\n \n pattern1=r'Steel'\n pattern2=r'Aluminium'\n pattern3=r'[\\d]+'\n \n if (func!='Comparison Chart'):\n self.edit2b.setDisabled(False)\n self.edit3b.setDisabled(False)\n self.edit4b.setDisabled(False)\n if (func=='Quenched/Tempered Steel'):\n alpha = 0.0025\n elif (func=='Annealed Steel'):\n alpha = 0.01\n elif (func=='Steel (input Su)'):\n S = str(self.edit2b.text())\n if (self.win.test()==0):\n S = str(float(S)/6.895)\n alpha = notch.alpha(eval(S))\n elif (func=='Aluminium Alloy 356.0 as cast'):\n rho = 0.08\n elif (func=='Aluminium Alloy 6061'):\n rho = 0.025\n elif (func=='Aluminium Alloy 7075'):\n rho = 0.015\n elif (func=='Material dropdown'):\n pass\n \n y1=[]\n if re.search(pattern1,func):\n Su=notch.su_s(alpha)\n if (self.win.test()==0):\n Su = Su*6.895\n for i in range(len(x)):\n y1.append(notch.nsp(alpha,x[i],self.win.test()))\n y=np.asarray(y1)\n if (re.search(pattern3,str(self.edit3b.text()))):\n r=eval(str(self.edit3b.text()))\n self.edit4b.setText(str(notch.nsp(alpha,r,self.win.test())))\n elif re.search(pattern2,func):\n Su=notch.su_a(rho)\n if (self.win.test()==0):\n Su = Su*6.895\n for i in range(len(x)):\n y1.append(notch.nsn(rho,x[i],self.win.test()))\n y=np.asarray(y1)\n if (re.search(pattern3,str(self.edit3b.text()))):\n r=eval(str(self.edit3b.text()))\n self.edit4b.setText(str(notch.nsn(rho,r,self.win.test())))\n \n self.edit2b.setText(str(Su))\n func1 = 'Steel (Su='+str(self.edit2b.text())+')'\n if (func!='Steel (input Su)'):\n self.plot.redraw(x,y,func, self.xlabel)\n elif (func=='Steel (input Su)'):\n self.plot.redraw(x,y,func1, self.xlabel)\n \n elif (func=='Comparison Chart'):\n self.edit2b.setText(\"\")\n self.edit2b.setDisabled(True)\n self.edit3b.setText(\"\")\n self.edit3b.setDisabled(True)\n self.edit4b.setText(\"\")\n self.edit4b.setDisabled(True)\n self.plot.draw_comp(self.xlabel, self.win.test())", "def on_reload_button_cicked_(self):\n self.pause_subscriber = True\n\n self._load_robot_description()\n controllers = self.get_current_controllers()\n\n self.joints = self._create_joints(controllers)\n\n self.synergy = self._create_synergy(controllers)\n\n self.delete_old_sliders_()\n\n # self._widget.sliderReleaseCheckBox.setCheckState(Qt.Unchecked)\n\n self.load_new_synergy_sliders_()\n\n # self.load_new_sliders_()\n\n self._update_synergy_viewer()\n\n self.pause_subscriber = False", "def on_click(event):\n ax = event.inaxes\n \n if ax is None:\n # Occurs when a region not in an axis is clicked...\n return\n \n if self.current_plot == 'single':\n if event.button is 1:\n if not self.ax_zoomed:\n # Change over to a single baseline plot\n try:\n self.ax_zoomed = True\n self.current_ax = ax\n ax.set_position([0.1, 0.05, 0.85, 0.80])\n ax.set_xlabel(\"Frequency\")\n #ax.set_ylabel(\"Time\")\n \n for axis in self.sp_fig.axes:\n if axis is not ax:\n axis.set_visible(False)\n \n except ValueError:\n raise\n self.sp_fig.canvas.mpl_disconnect(self.fig_connect)\n \n elif event.button is 3:\n if self.ax_zoomed:\n self.ax_zoomed = False\n #self.sp_fig.canvas.mpl_disconnect(self.fig_connect)\n self.updatePlot()\n \n else:\n # No need to re-draw the canvas if it's not a left or right click\n return\n \n elif self.current_plot == 'multi':\n if ax is None:\n # Occurs when a region not in an axis is clicked...\n return\n if event.button is 1:\n if not self.ax_zoomed:\n # Change over to a single baseline plot\n try:\n ant1, ant2 = ax.get_title().split(\" \")\n except:\n ant1 = int(ax.get_title().strip('Tile').strip('Antenna').strip('Stand'))\n ant2 = ant1 \n try:\n self.spin_ref_ant.setValue(int(ant1))\n self.spin_ref_ant2.setValue(int(ant2))\n self.plot_select.setCurrentIndex(0)\n self.current_plot = 'single'\n \n self.updatePlot()\n except:\n raise\n self.sp_fig.canvas.mpl_disconnect(self.fig_connect)\n \n elif event.button is 3:\n if not self.ax_zoomed:\n ax.set_position([0.1, 0.1, 0.85, 0.85])\n # TODO: fix labelling of zoom plots\n ax.set_xlabel(\"Frequency\")\n #ax.set_ylabel(\"Time\")\n self.orig_position = ax.get_position()\n for axis in event.canvas.figure.axes:\n # Hide all the other axes...\n if axis is not ax:\n axis.set_visible(False)\n self.ax_zoomed=True\n else:\n self.updatePlot()\n \n else:\n # No need to re-draw the canvas if it's not a left or right click\n return\n \n event.canvas.draw()", "def saveParameters (self , filePath):\r\n # productive #onButton\r\n profprint()\r\n widget = slicer.modules.NeedleFinderWidget\r\n config = ConfigParser.RawConfigParser()\r\n config.add_section('NeedleFinder Parameters')\r\n config.add_section('BooleanSection')\r\n config.add_section('IntegerSection')\r\n\r\n config.set('BooleanSection', 'autoCorrectTip', widget.autoCorrectTip.isChecked())\r\n config.set('BooleanSection', 'invertedContrast', widget.invertedContrast.isChecked())\r\n config.set('BooleanSection', 'gradient', widget.gradient.isChecked())\r\n config.set('BooleanSection', 'filterControlPoints', widget.filterControlPoints.isChecked())\r\n config.set('BooleanSection', 'drawFiducialPoints', widget.drawFiducialPoints.isChecked())\r\n config.set('BooleanSection', 'autoStopTip', widget.autoStopTip.isChecked())\r\n config.set('BooleanSection', 'extendNeedle', widget.extendNeedle.isChecked())\r\n config.set('BooleanSection', 'maxLength', widget.maxLength.isChecked())\r\n config.set('BooleanSection', 'gaussianAttenuationButton', widget.gaussianAttenuationButton.isChecked())\r\n\r\n config.set('IntegerSection', 'realNeedleLength', widget.realNeedleLength.value)\r\n config.set('IntegerSection', 'sigmaValue', widget.sigmaValue.value)\r\n config.set('IntegerSection', 'gradientPonderation', widget.gradientPonderation.value)\r\n config.set('IntegerSection', 'exponent', widget.exponent.value)\r\n config.set('IntegerSection', 'distanceMax', widget.radiusMax.value)\r\n config.set('IntegerSection', 'nbRotatingIterations', widget.nbRotatingIterations.value)\r\n config.set('IntegerSection', 'numberOfPointsPerNeedle', widget.numberOfPointsPerNeedle.value)\r\n config.set('IntegerSection', 'lenghtNeedleParameter', widget.lenghtNeedleParameter.value)\r\n config.set('IntegerSection', 'radiusNeedleParameter', widget.radiusNeedleParameter.value)\r\n config.set('IntegerSection', 'algoVersParameter', widget.algoVersParameter.value)\r\n\r\n # Writing our configuration file to 'example.cfg'\r\n with open(filePath, 'wb') as configfile:\r\n config.write(configfile)", "def state_chosen_do(cfg, app, win, events):", "def changeValue(self):\r\n # productive #onUpDnArrow\r\n profprint()\r\n widget = slicer.modules.NeedleFinderWidget\r\n # widget.scrollPointButton.setText('Scroll Point for Needle ' + str(widget.editNeedleTxtBox.value) + ' (pt: ' + str(self.ptNumber) + ')')\r\n self.lockControlPoints(widget.editNeedleTxtBox.value)\r\n self.unlockControlPoints(widget.editNeedleTxtBox.value)\r\n widget.drawValidationNeedlesButton.text = \"Render Manual Needle \" + str(widget.editNeedleTxtBox.value)", "def _selection_changed(self, i):\n\n # Check if we have disconnected\n if i < 0:\n return\n # First check if we need to stop the old block\n if self._started_previous and self._previous_config:\n logger.debug(\"Should stop config [%s], stopping!\",\n self._previous_config.name)\n self._previous_config.delete()\n\n # Remove our callback for the previous config\n if self._previous_config:\n self._previous_config.data_received_cb.remove_callback(\n self._log_data_signal_wrapper)\n self._previous_config.error_cb.remove_callback(\n self._log_error_signal_wrapper)\n\n lg = self._model.get_config(i)\n if not lg.started:\n logger.debug(\"Config [%s] not started, starting!\", lg.name)\n self._started_previous = True\n lg.start()\n else:\n self._started_previous = False\n self._plot.removeAllDatasets()\n color_selector = 0\n\n self._plot.set_title(lg.name)\n\n self.avgsumvalue = dict();\n self.avgsumnumb = 0;\n for d in lg.variables:\n self._plot.add_curve(d.name, self.colors[\n color_selector % len(self.colors)])\n color_selector += 1\n self.avgsumvalue[d.name]=0;\n lg.data_received_cb.add_callback(self._log_data_signal_wrapper)\n lg.error_cb.add_callback(self._log_error_signal_wrapper)\n\n self._previous_config = lg", "def load_params(self, event):\n \n self.robot_type = rospy.get_param(\"robot_type\" , 'pendulum' )\n self.robot_config = rospy.get_param(\"robot_config\", 'wrist-only' )\n self.robot_ctl = rospy.get_param(\"controller\", 'RfixCTC' )\n self.fixed_mode = rospy.get_param(\"fixed_mode\", 1 )\n \n \n ###############################################\n # Load robot model for the right configuration\n if self.robot_config == 'wrist-only':\n self.R = Proto.SingleRevoluteDSDM()\n \n elif self.robot_config == 'dual-plane' :\n self.R = Proto.TwoPlanarSerialDSDM()\n \n else:\n self.R = None\n \n ###############################################\n # Load controller\n if self.robot_ctl == 'RfixCTC' :\n self.Ctl = RminCTC.RfixComputedTorqueController( self.R , self.fixed_mode )\n \n elif self.robot_ctl == 'RminCTC' :\n self.Ctl = RminCTC.RminComputedTorqueController( self.R )\n \n elif self.robot_ctl == 'RfixSLD' :\n self.Ctl = RminCTC.RfixSlidingModeController( self.R , self.fixed_mode )\n \n elif self.robot_ctl == 'RminSLD' :\n self.Ctl = RminCTC.RminSlidingModeController( self.R )\n \n elif self.robot_ctl == 'RollCTC' :\n self.Ctl = RollCTC.RolloutComputedTorqueController( self.R )\n \n elif self.robot_ctl == 'RollSLD' :\n self.Ctl = RollCTC.RolloutSlidingModeController( self.R )\n \n else:\n self.Ctl = None\n \n \n if self.robot_config == 'wrist-only':\n self.Ctl.n_gears = rospy.get_param(\"n_gears\", 2 )\n self.x_d = np.array( rospy.get_param(\"goal\", [0,0] ) )\n \n elif self.robot_config == 'dual-plane' :\n self.Ctl.n_gears = rospy.get_param(\"n_gears\", 4 )\n self.x_d = np.array( rospy.get_param(\"goal\", [0.0,0.0,0.0,0.0] ) )\n #self.x_d = np.array( [-3.14 , 0 , 0 , 0] )\n \n # Gen ctl params\n self.Ctl.hysteresis = rospy.get_param(\"hysteresis\", True )\n self.Ctl.min_delay = rospy.get_param(\"min_delay\", 0.5 )\n \n self.Ctl.w0 = rospy.get_param(\"w0\", 1 )\n self.Ctl.zeta = rospy.get_param(\"zeta\", 0.7 )\n \n self.Ctl.lam = rospy.get_param(\"lam\", 1 )\n self.Ctl.nab = rospy.get_param(\"nab\", 1 )\n self.Ctl.D = rospy.get_param(\"D\", 0 )\n \n self.Ctl.horizon = rospy.get_param(\"horizon\", 0.5 )\n self.Ctl.sim_dt = rospy.get_param(\"sim_dt\", 0.1 )\n \n self.Ctl.domain_check = rospy.get_param(\"domain_check\", False )\n \n # Base policy param for roll \n if self.robot_ctl == 'RollCTC' :\n self.Ctl.FixCtl.lam = self.Ctl.lam\n \n elif self.robot_ctl == 'RollSLD' :\n self.Ctl.FixCtl.lam = self.Ctl.lam \n self.Ctl.FixCtl.nab = self.Ctl.nab \n self.Ctl.FixCtl.D = self.Ctl.D", "def change(self):\r\n\r\n # If checkboxes are available, check status and set boat speed reference line visibility accordingly.\r\n if self.cb:\r\n if self.cb_bt.checkState() == QtCore.Qt.Checked:\r\n for item in self.bt:\r\n item.set_visible(True)\r\n else:\r\n for item in self.bt:\r\n item.set_visible(False)\r\n # GGA\r\n if self.cb_gga.checkState() == QtCore.Qt.Checked:\r\n for item in self.gga:\r\n item.set_visible(True)\r\n # self.gga[0].set_visible(True)\r\n elif self.gga is not None:\r\n for item in self.gga:\r\n item.set_visible(False)\r\n # self.gga[0].set_visible(False)\r\n # VTG\r\n if self.cb_vtg.checkState() == QtCore.Qt.Checked:\r\n for item in self.vtg:\r\n item.set_visible(True)\r\n # self.vtg[0].set_visible(True)\r\n elif self.vtg is not None:\r\n for item in self.vtg:\r\n item.set_visible(False)\r\n # self.vtg[0].set_visible(False)\r\n\r\n # Draw canvas\r\n self.canvas.draw()", "def slider_action(self, sender):\n self.r = self.rslider.value\n self.g = self.gslider.value\n self.b = self.bslider.value\n self.preview.background_color = self.rgb\n self.colorlabel.text = self.hexcode", "def run(self):\n MAX_ANGULAR_VELOCITY = 3.14/2 * 0.5\n\n # After 1.5 meters, we don't care about how far the ball is. It doesn't make us\n # approach it any faster.\n DISTANCE_THRESHOLD = 1.5\n \n # Factor to multiply thresholded distance by to get a maximum value equal to one\n DISTANCE_CONSTANT = 2/3.\n \n # Ball pursing thresholds\n MAX_FORWARD_VELOCITY = .75\n MIN_FORWARD_VELOCITY = 0.50\n \n if self.getTime() > 2.0:\n self.postSignal(\"restart\")\n \n ball = memory.world_objects.getObjPtr(core.WO_BALL)\n if not ball.seen:\n return\n \n # Reset the timer to act as a failsafe against losing the ball\n self.reset()\n \n # Ball in the bottom frame?\n if not ball.fromTopCamera:\n self.finish()\n \n # Ball coordinates\n ball_x, ball_y = ball.imageCenterX, ball.imageCenterY\n \n # Calculate forward velocity\n ball_distance = ball.visionDistance / 1000\n# print('Ball distance: {}'.format(ball_distance))\n ball_distance = min(ball_distance, DISTANCE_THRESHOLD)\n \n # Cache the ball distances\n PursueBall.ball_distances = (PursueBall.ball_distances + [ball_distance])[-30:]\n# print('Ball distances: {}'.format(PursueBall.ball_distances))\n slope = sum(PursueBall.ball_distances[-10:])/10 - sum(PursueBall.ball_distances[:10])/10\n# print('Slope: {} - {} = {}'.format(sum(PursueBall.ball_distances[-10:]) / 10,\n# sum(PursueBall.ball_distances[:10]) / 10,\n# slope))\n# print('Input: {}'.format(1 / slope if slope else 1))\n \n \n # Get the maximum velocity to be 1\n forward_vel = ball_distance * DISTANCE_CONSTANT\n forward_vel *= MAX_FORWARD_VELOCITY\n forward_vel = max(MIN_FORWARD_VELOCITY, forward_vel)\n# print('forward velocity: {}'.format(forward_vel))\n \n # Calculate sideways velocity\n angular_vel = -(ball_x-160.0) / 160.0 * MAX_ANGULAR_VELOCITY\n# print('Sideways Amount: {}'.format(angular_vel))\n \n commands.setWalkVelocity(forward_vel, 0, angular_vel)", "def update_change(self):\n self.meas_selected_number = self.__input_meas.value()\n self.meas_selected_series = self.__input_series.value()\n self.selected_data = self.__input_select.value()\n self.noice_reduction_number = self.__input_noice.value()\n self.particle_size_number = self.__input_p_size.value()\n self.particle_density_number = self.__input_p_density.value()\n self.saturation_percentage = self.__input_saturation_percentage.value()\n\n # print(\"Selected series:\", self.meas_selected_series)\n # print(\"Selected measurement:\", self.meas_selected_number)\n\n if not 1 <= self.meas_selected_number <= 17: # In series 2 there's measurement 0, but that's a copy of the 8th\n raise ValueError\n if self.noice_reduction_number < 0:\n raise ValueError\n if self.particle_size_number <= 0:\n raise ValueError\n if self.particle_density_number < 0:\n raise ValueError\n if not 0 <= self.saturation_percentage <= 100:\n raise ValueError\n\n meas_index = self.meas_selected_number - 1\n if self.meas_selected_series == 1:\n self.measurement = self.meas_pressure[meas_index]\n elif self.meas_selected_series == 2:\n self.measurement = self.meas_mixture[meas_index]\n else:\n raise ValueError\n\n if self.selected_data == 1:\n self.data = toolbox_2.remove_noise(self.measurement.p_diff, self.noice_reduction_number)\n elif self.selected_data == 2:\n self.data = toolbox_2.remove_noise(self.measurement.p_abs, self.noice_reduction_number)\n elif self.selected_data == 3:\n self.data = toolbox_2.remove_noise(\n toolbox_2.flip_and_normalize(self.measurement.ext), self.noice_reduction_number)\n self.plot_zoom.setYRange(-0.1, 1, padding=0)\n elif self.selected_data == 4:\n self.data = toolbox_2.remove_noise(self.measurement.ext, self.noice_reduction_number)\n else:\n raise ValueError\n\n self.index_of_drop = toolbox_2.find_drop_index(self.measurement.p_diff)\n # time vector starts from the beginning of pressure drop\n time = self.measurement.time - self.measurement.time[self.index_of_drop]\n\n self.update_distribution()\n\n if self.first_update:\n self.curve_select = self.plot_select.plot(time, self.data)\n self.curve_zoom = self.plot_zoom.plot(self.measurement.time, self.data)\n self.curve_simulate = self.plot_zoom.plot(pen=pg.mkPen((100, 255, 200)))\n\n self.curve_distribution_cumulative = self.plot_distribution.plot(\n pen=pg.mkPen((100, 200, 255)),\n name=\"´ Kumulatiivinen pitoisuus\",\n symbolBrush=(80, 160, 201), symbolPen='w')\n self.curve_distribution = self.plot_distribution.plot(\n name=\"Pitoisuusjakauma\", symbolBrush=(50, 50, 255), symbolPen='w')\n self.curve_rotatometer = self.plot_rotatometer.plot(\n name=\"Mitattu pitoisuus\", symbolBrush=(50, 50, 255), symbolPen='w')\n self.curve_rotatometer_fit = self.plot_rotatometer.plot(\n pen=pg.mkPen((100, 255, 200)), name=\"´ Ideaalinen pitoisuus\")\n\n self.first_update = False\n else:\n self.curve_select.setData(time, self.data)\n self.curve_zoom.setData(time, self.data)\n\n self.curve_distribution.setData(self.particle_distribution_x, self.particle_distribution_y*1e-10)\n self.curve_distribution_cumulative.setData(self.smallest_particles, self.number_counts*1e-10)\n self.curve_rotatometer.setData(np.array([4, 6, 8, 10, 12, 14, 16, 18]), self.number_counts_2*1e-10)\n x = np.linspace(3.5, 20, 100)\n self.curve_rotatometer_fit.setData(x, self.number_counts_2[0]*4*(1/x)*1e-10)\n\n if self.simulate_bool:\n self.simulation()\n\n self.set_labels()\n\n # set the graphs to the point of pressure drop, units are in seconds\n self.plot_select.setXRange(-2, 4, padding=0)\n self.plot_zoom.setXRange(0, 0.3, padding=0)\n self.line.setX(0.1)\n\n self.update_zoom_region()\n # self.update_zoom_plot()", "def changeFridge(self,*args):\n self.selectedADR = self.adrSelect.get()\n # clear temps plot\n self.stage60K.set_xdata([])\n self.stage60K.set_ydata([])\n self.stage03K.set_xdata([])\n self.stage03K.set_ydata([])\n self.stageGGG.set_xdata([])\n self.stageGGG.set_ydata([])\n self.stageFAA.set_xdata([])\n self.stageFAA.set_ydata([])\n # load saved temp data\n # We have to sleep for 0.5s here because it seems like it takes\n # a moment for the connected server to register in self.cxn, even\n # though all this starts because a message is received saying it\n # is connected :\\\n time.sleep(0.5)\n startDateTime = yield self.cxn[self.selectedADR].get_start_datetime()\n try:\n reg = self.cxn.registry\n yield reg.cd(ADR_SETTINGS_BASE_PATH + [self.selectedADR])\n logPath = yield reg.get('Log Path')\n tempDataChest = dataChest(logPath)\n ds = dateStamp()\n dset = '%s_temperatures'%ds.dateStamp(startDateTime.isoformat())\n tempDataChest.openDataset(dset)\n\n n = tempDataChest.getNumRows()\n # load approximately the last 6 hours of data\n pastTempData = tempDataChest.getData(max(0,n-6*60*60),None )\n for newRow in pastTempData:\n # change utc time to local\n utc = newRow[0] # (float)\n utc = datetime.datetime.utcfromtimestamp(utc)\n utc = utc.replace(tzinfo=tz.tzutc())\n newRow[0] = mpl.dates.date2num(utc)\n # add old data from file into plot\n self.stage60K.set_xdata(numpy.append(self.stage60K.get_xdata(),newRow[0]))\n self.stage60K.set_ydata(numpy.append(self.stage60K.get_ydata(),newRow[1]))\n self.stage03K.set_xdata(numpy.append(self.stage03K.get_xdata(),newRow[0]))\n self.stage03K.set_ydata(numpy.append(self.stage03K.get_ydata(),newRow[2]))\n self.stageGGG.set_xdata(numpy.append(self.stageGGG.get_xdata(),newRow[0]))\n self.stageGGG.set_ydata(numpy.append(self.stageGGG.get_ydata(),newRow[3]))\n self.stageFAA.set_xdata(numpy.append(self.stageFAA.get_xdata(),newRow[0]))\n self.stageFAA.set_ydata(numpy.append(self.stageFAA.get_ydata(),newRow[4]))\n except IOError:\n # file not created yet if adr server just opened\n print( 'temp file not created yet?' )\n self.updatePlot()\n # clear and reload last 20 messages of log\n self.log.clear()\n logMessages = yield self.cxn[self.selectedADR].get_log(20)\n for (t,m,a) in logMessages:\n self.updateLog(t,m,a)\n # update instrument status stuff: delete old, create new\n for widget in self.instrumentStatusFrame.winfo_children():\n widget.destroy()\n returnStatus = yield self.cxn[self.selectedADR].get_instrument_state()\n self.instrumentStatuses = {}\n for name,status in returnStatus:\n self.instrumentStatuses[name] = Tkinter.Label(self.instrumentStatusFrame,\n text=name,\n relief=Tkinter.RIDGE,\n bg='gray70')\n self.instrumentStatuses[name].pack(side=Tkinter.LEFT,\n expand=True,\n fill=Tkinter.X)\n # update field limits and button statuses\n self.setFieldLimits()\n self.magUpButton.configure(state=Tkinter.NORMAL)\n self.regulateButton.configure(state=Tkinter.NORMAL)\n self.compressorButton.configure(state=Tkinter.DISABLED)\n mUp = yield self.cxn[self.selectedADR].get_state_var('maggingUp')\n reg = yield self.cxn[self.selectedADR].get_state_var('regulating')\n if mUp:\n self.magUpButton.configure(text='Stop Magging Up',\n command=self.cancelMagUp)\n self.regulateButton.configure(state=Tkinter.DISABLED)\n if reg:\n self.regulateButton.configure(text='Stop Regulating',\n command=self.cancelRegulate)\n self.magUpButton.configure(state=Tkinter.DISABLED)\n # update heat switch buttons\n HSAvailable = yield self.cxn[self.selectedADR].get_instrument_state(['Heat Switch'])\n if HSAvailable[0][1][0]:\n self.HSCloseButton.configure(state=Tkinter.NORMAL)\n self.HSOpenButton.configure(state=Tkinter.NORMAL)\n else:\n self.HSCloseButton.configure(state=Tkinter.DISABLED)\n self.HSOpenButton.configure(state=Tkinter.DISABLED)\n # refresh interface\n self.updateInterface()", "def loadParameters (self, filePath):\n #productive #onButton\n profprint()\n widget = slicer.modules.NeedleFinderWidget\n config = ConfigParser.RawConfigParser()\n config.read(filePath)\n\n autoCorrectTip = config.getboolean('BooleanSection', 'autoCorrectTip')\n invertedContrast = config.getboolean('BooleanSection', 'invertedContrast')\n gradient = config.getboolean('BooleanSection', 'gradient')\n filterControlPoints = config.getboolean('BooleanSection', 'filterControlPoints')\n drawFiducialPoints = config.getboolean('BooleanSection', 'drawFiducialPoints')\n autoStopTip = config.getboolean('BooleanSection', 'autoStopTip')\n extendNeedle = config.getboolean('BooleanSection', 'extendNeedle')\n maxLength = config.getboolean('BooleanSection', 'maxLength')\n gaussianAttenuationButton = config.getboolean('BooleanSection', 'gaussianAttenuationButton')\n\n realNeedleLength = config.getint('IntegerSection', 'realNeedleLength')\n sigmaValue = config.getint('IntegerSection', 'sigmaValue')\n gradientPonderation = config.getint('IntegerSection', 'gradientPonderation')\n exponent = config.getint('IntegerSection', 'exponent')\n distanceMax = config.getint('IntegerSection', 'distanceMax')\n nbRotatingIterations = config.getint('IntegerSection', 'nbRotatingIterations')\n numberOfPointsPerNeedle = config.getint('IntegerSection', 'numberOfPointsPerNeedle')\n lenghtNeedleParameter = config.getint('IntegerSection', 'lenghtNeedleParameter')\n radiusNeedleParameter = config.getint('IntegerSection', 'radiusNeedleParameter')\n algoVersParameter = config.getint('IntegerSection', 'algoVersParameter')\n \n widget.autoCorrectTip.checked = autoCorrectTip\n widget.invertedContrast.checked = invertedContrast\n widget.gradient.checked = gradient \n widget.filterControlPoints.checked = filterControlPoints\n widget.drawFiducialPoints.checked = drawFiducialPoints\n widget.autoStopTip.checked = autoStopTip\n widget.extendNeedle.checked = extendNeedle\n widget.maxLength.checked = maxLength\n widget.gaussianAttenuationButton.checked = gaussianAttenuationButton\n\n widget.realNeedleLength.value = realNeedleLength\n widget.sigmaValue.value = sigmaValue\n widget.gradientPonderation.value = gradientPonderation\n widget.exponent.value = exponent\n widget.distanceMax.value = distanceMax\n widget.nbRotatingIterations.value = nbRotatingIterations\n widget.numberOfPointsPerNeedle.value = numberOfPointsPerNeedle\n widget.lenghtNeedleParameter.value = lenghtNeedleParameter\n widget.radiusNeedleParameter.value = radiusNeedleParameter\n widget.algoVersParameter.value = algoVersParameter\n print \"algoVers: \",algoVersParameter\n print \"Parameters successfully loaded!\"", "def apply_changes(self,event):\n newspeed = float(self.speedSound_txtBox.GetValue())\n newpath = self.path_txtBox.GetValue()\n try:\n if newspeed != data.speed:\n data.changespeed(newspeed)\n pub.sendMessage('update_feed',\n msg=\"Speed of Sound changed to: \"\n +str(data.speed)+\"m/s\\n\",\n arg2='wx.DEFAULT')\n if newpath != data.path:\n data.changepath(newpath)\n pub.sendMessage('update_feed',\n msg=\"Log File Path changed to: \"\n +str(data.path)+\"\\n\",\n arg2='wx.DEFAULT')\n except ValueError:\n pub.sendMessage('update_feed',\n msg=\"Error Changing Preferences.\\n\",\n arg2='wx.RED')\n self.MakeModal(False)\n self.Close()\n event.Skip()", "def cb_update(val):\n alpha_update = [sAlpha0.val, sAlpha1.val, sAlpha2.val]\n\n # update Dirichlet's parameters alpha\n dirichlet.set_param(alpha_update)\n draw_pdf_contours(axDirichlet, dirichlet, True) # Draw Dirichlet\n\n # MAP\n lambda_MAP = CatMAP.MAPinfer(x_cat, dirichlet)\n axMAP.cla()\n drawBarGraph( axMAP, \"MAP\", lambda_MAP, bar_y_max, col_MAP ) # Draw Bar graph\n\n # Bayes\n posteriorDirichlet.set_param(alpha_update)\n posteriorDirichlet.calcPosterior(x_cat)\n draw_pdf_contours(axPosteriorDirichlet, posteriorDirichlet) # Draw Posterior Dirichlet\n lambda_Bayes = np.zeros(3)\n for k in range(3):\n lambda_Bayes[k] = posteriorDirichlet.BayesInfer(k)\n\n axBayes.cla()\n drawBarGraph( axBayes, \"Bayes\", lambda_Bayes, bar_y_max, col_Bayes ) # Draw Bar graph\n\n print('Update')\n print('lambda_ML =', lambda_ML)\n print('lambda_MAP =', lambda_MAP)\n print('lambda_Bayes=', lambda_Bayes)\n draw_point(axDirichlet, lambda_ML, col_ML)\n draw_point(axDirichlet, lambda_MAP, col_MAP)\n draw_point(axDirichlet, lambda_Bayes, col_Bayes)\n draw_point(axPosteriorDirichlet, lambda_MAP, col_MAP)\n draw_point(axPosteriorDirichlet, lambda_Bayes, col_Bayes)\n\n fig.canvas.draw_idle()" ]
[ "0.6481118", "0.616664", "0.59381336", "0.5876839", "0.576337", "0.5656577", "0.5645619", "0.5610462", "0.5600084", "0.55788124", "0.55068946", "0.5481602", "0.5478214", "0.54687715", "0.5447701", "0.54400563", "0.54133815", "0.5406938", "0.539074", "0.5374289", "0.5361342", "0.5359383", "0.5357042", "0.5339366", "0.5323757", "0.5281663", "0.5278987", "0.52361786", "0.52271307", "0.5224135" ]
0.7484556
0
Saves parameters and starts simulation.
def startSimulation(self): self.saveParameters() self.simulation.main()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_simulation(self):\n pass", "def performSimulation(self):\n \n if self.parameters['verbose']:\n print(\"=====================\\nStarting simulation with parameters\\n\",self.parameters)\n print(\"=====================\\nInitial Graph\\n\")\n self.showState()\n print(\"=====================\")\n\n while self.parameters['steps'] > 0:\n if self.parameters['verbose']: print(\"Performing step\")\n self.performStep()\n if self.parameters['verbose']: self.showState()\n\n if self.parameters['verbose']:\n print(\"=====================\\nFinished Simulation\\n\\nResult graph:\")\n self.showState()\n #self.showGraph(self.parameters['file_name'])\n #self.showState()\n #self.showStats()", "def saveParameters(self):\n super(LorentzTab,self).saveParameters()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = Lorentz.Lorentz(**self.simArgs)", "def saveParameters(self):\n super(BuminTab,self).saveParameters()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = Buminovich.Buminovich(**self.simArgs)", "def run_simulator(self):\n\n self.update_settings()\n\n # Pass in the progress bar and the master so that the simulator can\n # update the progress bar and then refresh the screen when the progress\n # checkpoints are hit\n\n self.sim_results = self.sim.run(self.progress_bar, self.master)\n self.graph_results()", "def saveParameters(self):\n super(LTab,self).saveParameters()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = Ltab.LTable(**self.simArgs)", "def run():\n\n # Set up environment and agent\n e = Environment() # create environment (also adds some dummy traffic)\n a = e.create_agent(LearningAgent) # create agent\n e.set_primary_agent(a, enforce_deadline=False) # set agent to track\n\n # Now simulate it\n sim = Simulator(e, update_delay=0.0) # reduce update_delay to speed up simulation\n sim.run(n_trials=num_of_experiments) # press Esc or close pygame window to quit\n \n pd.Series(a.success).to_pickle('success_' + exp_id + '.pickle')\n a.Q_table.to_pickle('qtable_' + exp_id + '.pickle')\n pd.Series(a.q_delta_avg).to_pickle('convergence_' + exp_id + '.pickle')\n pd.Series(a.t_total).to_pickle('steps_' + exp_id + '.pickle')", "def run_simulation(self):\n\n # Create agents for simulation\n self.spawn_agents(self.num_agents)\n\n if self.force_personalities != None:\n self.force_personalities(self)\n\n if self.visualizer == True:\n V.Visualizer.createVisualizer(types=self.visualizerOptions, showAtEnd=True)\n\n TM.TimeManager.createManager()\n for x in range (self.time_to_run):\n for agent in self.agents:\n agent.take_turn()\n while self.agents_to_settle:\n self.agents_to_settle.pop().settle_reposts()\n if self.data_collector != None:\n self.data_collector.collector_turn(x, agent)\n if self.visualizer == True:\n self.generate_visualizations(x)\n TM.TimeManager.sharedManager.increaseTime()\n if self.data_collector != None:\n self.data_collector.collector_round(x)\n self.generate_statistics(x)\n\n if self.visualizer == True:\n V.Visualizer.sharedVisualizer.updateEverything()\n\n if self.data_collector != None:\n self.data_collector.finalize()", "def run(sim_attr_generator):\n#TODO: clean\n#TODO: integrate analyses\n def analyze_and_save(simulation,simulation_attributes):\n#? Ugly conf file analyses integration.\n if simulation_attributes.analyses and Args.output_file != None:\n verbose_print(\"Saving analyses for {0}.\".format(simulation_attributes.id_name),2)\n results = analyze_datas(\n simulation.result,\n simulation_attributes.analyses\n )\n plotables = ana_results_to_plotables(\n results,\n simulation_attributes.analyses\n )\n#TODO error handling for save\n analysis_save_dm(\n results,\n plotables,\n simulation_attributes.analyses,\n simulation_attributes.id_name\n )\n\n def save_simulation(simulation,simulation_attributes):\n if not simulation_attributes.analyses and Args.output_file != None:\n verbose_print(\"Saving simulation datas of {0}.\".format(\n simulation_attributes.id_name\n ),2) \n try:\n np.save(\n simulation_attributes.id_name,\n simulation.result\n )\n except:\n raise EnvironmentError(\"Can't save data to {}.\".format(\n simulation_attributes.id_name\n ))\n\n verbose_print(\"Starting simulation run.\",1)\n for i,simulation_attributes in enumerate(sim_attr_generator):\n verbose_print(\"Starting simulation number {0}: {1}\".format(\n i,\n simulation_attributes.id_name\n ),2)\n simulation = Simulation(\n SimulationVariables(simulation_attributes)\n )\n simulation.start()\n save_simulation(simulation,simulation_attributes)\n analyze_and_save(simulation,simulation_attributes)", "def saveParameters(self):\n super(CircTab,self).saveParameters()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = circle.CircleTable(**self.simArgs)", "def run_simulation(**kwargs):\n print(\"executing run_simulation() in file\", __file__)\n print(\"got the dictionary kwargs =\", kwargs)\n\n # HERE is where you would usually run your simulation (e.g. DMRG).\n # simulate some heavy calculations:\n for i in range(30):\n print(\"step \", i, flush=True) # (remove `flush=True` for Python 2)\n # the flush=True makes the output appear immediately\n time.sleep(5)\n\n results = {'kwargs': kwargs, 'example_data': np.random.random((2, 2))}\n\n output_filename = kwargs['output_filename']\n print(\"save results to \", output_filename)\n with open(output_filename, 'wb') as f:\n pickle.dump(results, f)", "def Save(self):\n if not self.simFilePath:\n path = self.PromptPathSaveAs()\n if not path: return\n else: self.simFilePath = path\n \n #Why bother doing new code if the cmd interface does it already\n if self.shell.interp != self.sim42interp:\n self.UseCommandInterface(True)\n self.shell.run('store %s' %self.simFilePath)\n #self.sim42interp.cmd.Store(self.simFilePath)\n\n self.SetTitle('Simulation --> ' + self.simFilePath)", "def Main():\n numberOfPopulation = 350\n numberOfDays = 60\n \n simulation = Simulation(Covid19(), numberOfPopulation, numberOfDays, \"Covid 19 Simulation\")\n simulation.run() \n simulation = Simulation(Ebola(), numberOfPopulation, numberOfDays, \"Ebola Simulation\")\n simulation.run()", "def start() -> None:\n\n # PREPARE\n clone_game_files()\n\n # SIMULATE\n turns = run.simulation()\n\n # LOG\n logs = read.combine_logs(turns)\n\n # CALCULATE\n results = calculate.results(logs)\n\n # DISPLAY\n visualize.charts(results)\n\n # CLEAN\n remove_cloned_files()", "def start_simulation(self):\n regime_name = str(self.regime_list.item(self._current_regime_index).text())\n self.statusLabel.setText(u\"simulating {}\".format(regime_name))\n self._logger.info(u\"Simulating: {}\".format(regime_name))\n\n self.actSimulate.setDisabled(True)\n self.shortRunSimulation.setEnabled(False)\n self.shortRunRegimeBatch.setEnabled(False)\n self.actExecuteRegimes.setDisabled(True)\n self.guiProgress = QtGui.QProgressBar(self)\n self.sim.simulationProgressChanged.connect(self.guiProgress.setValue)\n self.statusBar().addWidget(self.guiProgress)\n self.runSimulation.emit()", "def run():\n\n # Set up environment and agent\n e = Environment() # create environment (also adds some dummy traffic)\n a = e.create_agent(LearningAgent) # create agent\n e.set_primary_agent(a, enforce_deadline=True) # set agent to track\n\n # Now simulate it\n sim = Simulator(e, update_delay=0.5) # reduce update_delay to speed up simulation\n sim.run(n_trials=100) # press Esc or close pygame window to quit", "def run():\n\n # Set up environment and agent\n e = Environment() # create environment (also adds some dummy traffic)\n a = e.create_agent(LearningAgent) # create agent\n e.set_primary_agent(a, enforce_deadline= True ) # set agent to track\n\n # Now simulate it\n sim = Simulator(e, update_delay=0.5) # reduce update_delay to speed up simulation\n sim.run(n_trials=100) # press Esc or close pygame window to quit", "def _setup_simulation(self\n ) -> None:\n pass", "def start_simulation(parameters):\n # If there's a sphere file to load, we need to load it. Otherwise,\n # we need to initialize the sphere.\n if parameters.filename:\n triangle_ids = init.load_sphere_from_file(parameters.filename)\n else:\n triangle_ids = init.initialize_sphere(parameters.target_area)\n\n # With the sphere initialized, we need to make an initialized\n # metropolis algorithm class instance.\n algorithm = parameters.algorithm\n if algorithm == monte_carlo.select_for_curvature:\n metro = algorithm(parameters.target_area,parameters.target_std,\n parameters.area_damping_strength,\n parameters.std_damping_strength)\n elif algorithm == monte_carlo.select_for_area:\n metro = algorithm(parameters.target_area,\n parameters.area_damping_strength)\n else: #raise an error\n raise TypeError(\"The algorithm \"+algorithm+\" is not valid.\")\n\n # Now gather data!\n parameters.gather_data_function(metro,\n parameters.final_sweep,\n parameters.v5damping,\n parameters.v6damping,\n parameters.current_sweep,\n parameters.save_every_n_sweeps)", "def __init__(self, name=None, params=None, params_from_file=False, params_from_user=False):\n\n print(\"\")\n if name:\n self._name = name\n else:\n self._name = input(\"Simulation Name : \")\n\n print(\"Name : \"+str(self._name))\n\n self.plot_path = os.getcwd()+'/session/'+self._name+'_plots/'\n try:\n os.mkdir(self.plot_path)\n except (FileExistsError, FileNotFoundError):\n beep = lambda x: os.system(\"echo '\\a';sleep 0.5;\" * x)\n beep(1)\n print(\"WARNING : FOLDER PATH ALREADY EXISTS\")\n print(self.plot_path)\n print(\"WRITING OVER\")\n for fn in os.listdir(self.plot_path):\n os.remove(self.plot_path+fn)\n\n if params:\n self.params = params\n else:\n if params_from_file:\n self.params = load_input_pickle(params_from_file)\n elif params_from_user:\n self.params = get_user_params()\n else:\n #Define default params\n self.params = load_input_pickle('default')\n\n self.default_runs = [] # array of simulation runs with default parameters\n self.mod_runs = [] # array of tuples that contain 0) a list of simulation runs\n # and 1) a dictionary clarifying which parameter was given\n # which value for each run. (for convenience, can also\n # determine by comparing the simulation_run.params\n # directly\n\n\n print(\"Running Model with Default Parameters...\")\n self.run_default()\n print(\"\")", "def simulation_fixture():\n input_data = {\n \"Tools\": {\"ForwardEuler\": {}},\n \"Clock\": {\"start_time\": 0, \"end_time\": 1, \"dt\": 1},\n \"PhysicsModules\": {},\n \"Diagnostics\": {},\n }\n simulation = Simulation(input_data)\n simulation.prepare_simulation()\n return simulation", "def Main():\n EnigmaSim = simulation() #Creates the simulation object\n EnigmaSim.Run() #Runs the simulation", "def run():\n\n # Set up environment and agent\n e = Environment() # create environment (also adds some dummy traffic)\n a = e.create_agent(LearningAgent) # create agent\n e.set_primary_agent(a, enforce_deadline=True) # specify agent to track\n # NOTE: You can set enforce_deadline=False while debugging to allow longer trials\n\n # Now simulate it\n sim = Simulator(e, update_delay=0.00000001, display=False) # create simulator (uses pygame when display=True, if available)\n # NOTE: To speed up simulation, reduce update_delay and/or set display=False\n\n sim.run(n_trials=100) # run for a specified number of trials\n # NOTE: To quit midway, press Esc or close pygame window, or hit Ctrl+C on the command-line", "def run():\n\n # Set up environment and agent\n e = Environment() # create environment (also adds some dummy traffic)\n a = e.create_agent(LearningAgent) # create agent\n e.set_primary_agent(a, enforce_deadline=True) # specify agent to track\n # NOTE: You can set enforce_deadline=False while debugging to allow longer trials\n\n # Now simulate it\n sim = Simulator(e, update_delay=0.001, display=True) # create simulator (uses pygame when display=True, if available)\n # NOTE: To speed up simulation, reduce update_delay and/or set display=False\n\n sim.run(n_trials=100) # run for a specified number of trials\n # NOTE: To quit midway, press Esc or close pygame window, or hit Ctrl+C on the command-line", "def simulation():\n#TODO: Handle errors.\n verbose_print(\"Start simulation attributes generation.\",1)\n generator = input_simulation_attributes_generator()\n verbose_print(\"Simulation attributes generated.\",1)\n Args.simulation_function(generator)", "def testSimParamsStored(self):\n params = self.tree.get_simulation_parameters()\n actual_sim_parameters = dict(\n seed=5,\n task=4,\n output_dir=\"output\",\n speciation_rate=0.1,\n sigma=4.0,\n tau=1.0,\n deme=0.25,\n sample_size=1.0,\n max_time=3600.0,\n dispersal_relative_cost=1.0,\n min_num_species=1,\n habitat_change_rate=0.0,\n gen_since_historical=0.0,\n time_config_file=\"null\",\n coarse_map_file=\"sample/SA_sample_coarse.tif\",\n coarse_map_x=35,\n coarse_map_y=41,\n coarse_map_x_offset=11,\n coarse_map_y_offset=14,\n coarse_map_scale=1.0,\n fine_map_file=\"sample/SA_sample_fine.tif\",\n fine_map_x=13,\n fine_map_y=13,\n fine_map_x_offset=0,\n fine_map_y_offset=0,\n sample_file=\"null\",\n grid_x=13,\n grid_y=13,\n sample_x=13,\n sample_y=13,\n sample_x_offset=0,\n sample_y_offset=0,\n historical_coarse_map=\"none\",\n historical_fine_map=\"none\",\n sim_complete=1,\n dispersal_method=\"normal\",\n m_probability=0.0,\n cutoff=0.0,\n landscape_type=\"closed\",\n protracted=0,\n min_speciation_gen=0.0,\n max_speciation_gen=0.0,\n dispersal_map=\"none\",\n )\n for key in params.keys():\n self.assertEqual(\n params[key],\n actual_sim_parameters[key],\n msg=\"Error in {}: {}!={}\".format(key, params[key], actual_sim_parameters[key]),\n )\n self.assertEqual(self.tree.get_job()[0], 5, msg=\"Seed not stored correctly.\")\n self.assertEqual(self.tree.get_job()[1], 4, msg=\"Job number not stored correctly.\")", "def testSimParamsStored(self):\n params = self.tree.get_simulation_parameters()\n actual_sim_parameters = dict(\n seed=7,\n task=4,\n output_dir=\"output\",\n speciation_rate=0.1,\n sigma=4.0,\n tau=1.0,\n deme=0.01,\n sample_size=1.5,\n max_time=3600.0,\n dispersal_relative_cost=1.0,\n min_num_species=1,\n habitat_change_rate=0.0,\n gen_since_historical=0.0,\n time_config_file=\"null\",\n coarse_map_file=\"sample/SA_sample_coarse.tif\",\n coarse_map_x=35,\n coarse_map_y=41,\n coarse_map_x_offset=11,\n coarse_map_y_offset=14,\n coarse_map_scale=1.0,\n fine_map_file=\"sample/SA_sample_fine.tif\",\n fine_map_x=13,\n fine_map_y=13,\n fine_map_x_offset=0,\n fine_map_y_offset=0,\n sample_file=\"null\",\n grid_x=13,\n grid_y=13,\n sample_x=13,\n sample_y=13,\n sample_x_offset=0,\n sample_y_offset=0,\n historical_coarse_map=\"none\",\n historical_fine_map=\"none\",\n sim_complete=1,\n dispersal_method=\"normal\",\n m_probability=0.0,\n cutoff=0.0,\n landscape_type=\"closed\",\n protracted=0,\n min_speciation_gen=0.0,\n max_speciation_gen=0.0,\n dispersal_map=\"none\",\n )\n for key in params.keys():\n self.assertEqual(\n params[key],\n actual_sim_parameters[key],\n msg=\"Error in {}: {}!={}\".format(key, params[key], actual_sim_parameters[key]),\n )\n self.assertEqual(self.tree.get_job()[0], 7, msg=\"Seed not stored correctly.\")\n self.assertEqual(self.tree.get_job()[1], 4, msg=\"Job number not stored correctly.\")", "def initialise_sim(self):\n pass", "def testSimParamsStored(self):\n params = self.tree.get_simulation_parameters()\n actual_sim_parameters = dict(\n seed=3,\n task=3,\n output_dir=\"output\",\n speciation_rate=0.1,\n sigma=4.0,\n tau=4.0,\n deme=1,\n sample_size=0.1,\n max_time=2.0,\n dispersal_relative_cost=1.0,\n min_num_species=1,\n habitat_change_rate=0.0,\n gen_since_historical=0.0,\n time_config_file=\"null\",\n coarse_map_file=\"none\",\n coarse_map_x=13,\n coarse_map_y=13,\n coarse_map_x_offset=0,\n coarse_map_y_offset=0,\n coarse_map_scale=1.0,\n fine_map_file=\"sample/SA_sample_fine.tif\",\n fine_map_x=13,\n fine_map_y=13,\n fine_map_x_offset=0,\n fine_map_y_offset=0,\n sample_file=\"null\",\n grid_x=13,\n grid_y=13,\n sample_x=13,\n sample_y=13,\n sample_x_offset=0,\n sample_y_offset=0,\n historical_coarse_map=\"none\",\n historical_fine_map=\"none\",\n sim_complete=1,\n dispersal_method=\"normal\",\n m_probability=0.0,\n cutoff=0.0,\n landscape_type=\"closed\",\n protracted=0,\n min_speciation_gen=0.0,\n max_speciation_gen=0.0,\n dispersal_map=\"none\",\n )\n for key in params.keys():\n self.assertEqual(params[key], actual_sim_parameters[key], msg=\"Error in {}\".format(key))\n self.assertEqual(self.tree.get_job()[0], 3)\n self.assertEqual(self.tree.get_job()[1], 3)", "def test_simulation(self):\n\t\tprint \"Simulation is being tested\"\n\n\t\tif toggles.DEBUG_FLAG:\n\t\t\tprint \"Debug Flag Set!\"\n\t\t\tprint self.getConfig()\n\n\t\tif toggles.PACKING:\n\t\t\ttoggles.OUTPUT_PATH = toggles.OUTPUT_PATH+toggles.RUN_NAME+'/'\n\t\t\tpackageMaker(toggles.OUTPUT_PATH,self.getConfig())\n\t\tif toggles.IDEAL_GRID:\n\t\t\tself.consensusGrid()\n\n\t\tif toggles.REAL_DATA:\n\t\t\tsampleData = self.load_data()\n\t\t\tif toggles.RUN_DATA_STATS:\n\t\t\t\tself.output_data_stats(sampleData)\n\t\t\t\tself.reset_database()\n\t\t\tif toggles.RUN_AVERAGE_COST:\n\t\t\t\tself.sim_average_cost(sampleData)\n\t\t\t\tself.reset_database()\n\t\t\tif toggles.RUN_SINGLE_PAIR:\n\t\t\t\tself.sim_single_pair_cost(sampleData, pending_eddy(self.pick_worker([0], [0])))\n\t\t\t\tself.reset_database()\n\t\telse:\n\t\t\tsampleData = {}\n\t\t\tsyn_load_data()\n\n\t\tif toggles.RUN_ITEM_ROUTING and not (toggles.RUN_TASKS_COUNT or toggles.RUN_MULTI_ROUTING):\n\t\t\tif toggles.DEBUG_FLAG:\n\t\t\t\tprint \"Running: item Routing\"\n\t\t\tself.run_sim(deepcopy(sampleData))\n\t\t\tself.reset_database()\n\n\t\tif PRED_SCORE_COUNT and not (RUN_TASKS_COUNT or RUN_MULTI_ROUTING):\n\t\t\tif DEBUG_FLAG:\n\t\t\t\tprint \"Running: Pred Score count\"\n\t\t\tself.run_sim(sampleData)\n\t\t\tself.reset_database()\n\n\n\n\t\tif toggles.COUNT_TICKETS and not (toggles.RUN_TASKS_COUNT or toggles.RUN_MULTI_ROUTING):\n\t\t\tif toggles.DEBUG_FLAG:\n\t\t\t\tprint \"Running: ticket counting\"\n\t\t\tself.run_sim(deepcopy(sampleData))\n\t\t\tself.reset_database()\n\n\t\tif toggles.SELECTIVITY_GRAPH and not (toggles.RUN_TASKS_COUNT or toggles.RUN_MULTI_ROUTING):\n\t\t\tif toggles.DEBUG_FLAG:\n\t\t\t\tprint \"Running: selectivity amounts over time\"\n\t\t\tself.run_sim(sampleData)\n\t\t\tself.reset_database()\n\n\t\t#____FOR LOOKING AT ACCURACY OF RUNS___#\n\t\tif toggles.TEST_ACCURACY and toggles.REAL_DATA:\n\t\t\tcorrectAnswers = self.get_correct_answers(toggles.INPUT_PATH + toggles.ITEM_TYPE + '_correct_answers.csv')\n\t\t\tpassedItems = self.get_passed_items(correctAnswers)\n\n\n\t\tif toggles.RUN_OPTIMAL_SIM:\n\t\t\tcountingArr=[]\n\t\t\tself.reset_database()\n\t\t\tfor i in range(toggles.NUM_SIM):\n\t\t\t\tprint \"running optimal_sim \" +str(i)\n\t\t\t\tself.num_tasks = self.optimal_sim(sampleData)\n\t\t\t\tcountingArr.append(self.num_tasks)\n\t\t\t\tself.reset_database()\n\t\t\tdest = toggles.OUTPUT_PATH+toggles.RUN_NAME+'_optimal_tasks'\n\t\t\tgeneric_csv_write(dest+'.csv',[countingArr])\n\t\t\tif toggles.DEBUG_FLAG:\n\t\t\t\tprint \"Wrote File: \" + dest+'.csv'\n\n\n\n\t\tif toggles.RUN_TASKS_COUNT or toggles.RUN_MULTI_ROUTING or toggles.RUN_CONSENSUS_COUNT:\n\t\t\tif toggles.RUN_TASKS_COUNT:\n\t\t\t\t#print \"Running: task_count\"\n\t\t\t\t#f = open(toggles.OUTPUT_PATH + toggles.RUN_NAME + '_tasks_count.csv', 'a')\n\t\t\t\t#f1 = open(toggles.OUTPUT_PATH + toggles.RUN_NAME + '_incorrect_count.csv', 'a')\n\n\t\t\t\tif toggles.GEN_GRAPHS:\n\t\t\t\t\toutputArray = []\n\n\t\t\trunTasksArray = []\n\t\t\tgoodArray, badArray = [], []\n\t\t\tgoodPoints, badPoints = [], []\n\t\t\taccCount = []\n\t\t\tlocArray = [[],[],[],[]]\n\n\t\t\tfor i in range(toggles.NUM_SIM):\n\t\t\t\tprint \"running simulation \" + str(i+1)\n\t\t\t\tself.run_sim(deepcopy(sampleData))\n\t\t\t\trunTasksArray.append(self.num_tasks)\n\n\t\t\t\t#____FOR LOOKING AT ACCURACY OF RUNS___#\n\t\t\t\tif toggles.TEST_ACCURACY and toggles.REAL_DATA:\n\t\t\t\t\tnum_incorrect = self.final_item_mismatch(passedItems)\n\t\t\t\t\taccCount.append(num_incorrect)\n\t\t\t\tif toggles.RUN_CONSENSUS_COUNT or toggles.VOTE_GRID:\n\t\t\t\t\tdonePairs = IP_Pair.objects.filter(Q(num_no__gt=0)|Q(num_yes__gt=0))\n\t\t\t\t\tif toggles.TEST_ACCURACY:\n\t\t\t\t\t\tgoodPairs, badPairs = [], []\n\t\t\t\t\t\tfor pair in donePairs:\n\t\t\t\t\t\t\tval = bool((pair.num_yes-pair.num_no)>0)\n\t\t\t\t\t\t\tif toggles.REAL_DATA:\n\t\t\t\t\t\t\t\tcorrect = ((correctAnswers[(pair.item,pair.predicate)]) == val)\n\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\tcorrect = (pair.true_answer == val)\n\t\t\t\t\t\t\tif correct:\n\t\t\t\t\t\t\t\tgoodArray.append(pair.num_no+pair.num_yes)\n\t\t\t\t\t\t\t\tgoodPoints.append((pair.num_no,pair.num_yes))\n\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\tbadArray.append(pair.num_no+pair.num_yes)\n\t\t\t\t\t\t\t\tbadPoints.append((pair.num_no,pair.num_yes))\n\t\t\t\t\telse:\n\t\t\t\t\t\tfor pair in donePairs:\n\t\t\t\t\t\t\tgoodArray.append(pair.num_no + pair.num_yes)\n\t\t\t\t\t\t\tgoodPoints.append((pair.num_no,pair.num_yes))\n\n\t\t\t\t\t#print \"This is number of incorrect items: \", num_incorrect\n\n\t\t\t\tself.reset_database()\n\n\t\t\tif toggles.RUN_TASKS_COUNT:\n\t\t\t\tgeneric_csv_write(toggles.OUTPUT_PATH+toggles.RUN_NAME+'_tasks_count.csv',[runTasksArray])\n\t\t\t\tif toggles.DEBUG_FLAG:\n\t\t\t\t\tprint \"Wrote File: \" + toggles.OUTPUT_PATH + toggles.RUN_NAME + '_tasks_count.csv'\n\t\t\t\tif toggles.GEN_GRAPHS:\n\t\t\t\t\tif len(runTasksArray)>1:\n\t\t\t\t\t\tdest = toggles.OUTPUT_PATH + toggles.RUN_NAME + '_tasks_count.png'\n\t\t\t\t\t\ttitle = toggles.RUN_NAME + ' Cost distribution'\n\t\t\t\t\t\thist_gen(runTasksArray, dest, labels = ('Cost','Frequency'), title = title)\n\t\t\t\t\t\tif toggles.DEBUG_FLAG:\n\t\t\t\t\t\t\tprint \"Wrote File: \" + dest\n\t\t\t\t\telif toggles.DEBUG_FLAG:\n\t\t\t\t\t\tprint \"only ran one sim, not running hist_gen\"\n\n\t\t\tif toggles.RUN_MULTI_ROUTING:\n\t\t\t\t\tdest = toggles.OUTPUT_PATH + toggles.RUN_NAME + '_Eddy_sys_' + str(toggles.EDDY_SYS) + '_multi_routing.png'\n\t\t\t\t\ttitle = toggles.RUN_NAME + ' Average Predicate Routing'\n\t\t\t\t\tquestions = toggles.CHOSEN_PREDS\n\t\t\t\t\tarrayData = []\n\t\t\t\t\tfor i in range(len(questions)):\n\t\t\t\t\t\tarrayData.append([])\n\t\t\t\t\tfor routingL in ROUTING_ARRAY:\n\t\t\t\t\t\tfor i in range(len(questions)):\n\t\t\t\t\t\t\tarrayData[i].append(routingL[i])\n\t\t\t\t\tmrsavefile = open(toggles.OUTPUT_PATH+toggles.RUN_NAME+'_multi_routing.csv','w')\n\t\t\t\t\tmrwriter = csv.writer(mrsavefile)\n\t\t\t\t\tmrwriter.writerow(questions)\n\t\t\t\t\tfor row in arrayData:\n\t\t\t\t\t\tmrwriter.writerow(row)\n\t\t\t\t\tmrsavefile.close()\n\t\t\t\t\tif toggles.DEBUG_FLAG:\n\t\t\t\t\t\tprint \"Wrote File: \"+toggles.OUTPUT_PATH+toggles.RUN_NAME+'_multi_routing.csv'\n\t\t\t\t\tif toggles.GEN_GRAPHS:\n\t\t\t\t\t\tstats_bar_graph_gen(arrayData, questions, dest, labels = ('Predicate','# of Items Routed'), title = title)\n\t\t\t\t\t\tif toggles.DEBUG_FLAG:\n\t\t\t\t\t\t\tprint \"Wrote File: \" + toggles.OUTPUT_PATH+toggles.RUN_NAME+'_multi_routing.png'\n\t\t\tif toggles.ACCURACY_COUNT:\n\t\t\t\tdest = toggles.OUTPUT_PATH+toggles.RUN_NAME+'_acc_count'\n\t\t\t\tgeneric_csv_write(dest+'.csv',[accCount])\n\t\t\t\tif toggles.GEN_GRAPHS:\n\t\t\t\t\thist_gen(accCount, dest+'.png')\n\n\t\t\tif toggles.RUN_CONSENSUS_COUNT:\n\t\t\t\tdest = toggles.OUTPUT_PATH + toggles.RUN_NAME+'_consensus_count'\n\t\t\t\tif len(goodArray)>1:\n\t\t\t\t\tif len(badArray) == 0:\n\t\t\t\t\t\tgeneric_csv_write(dest+'.csv',[goodArray])\n\t\t\t\t\t\t#print goodArray\n\t\t\t\t\telse:\n\t\t\t\t\t\tgeneric_csv_write(dest+'.csv',[goodArray,badArray])\n\t\t\t\t\t\t#print goodArray,badArray\n\t\t\t\t\tif toggles.DEBUG_FLAG:\n\t\t\t\t\t\tprint \"Wrote File: \" + dest + '.csv'\n\t\t\t\t\tif toggles.GEN_GRAPHS:\n\t\t\t\t\t\ttitle = 'Normalized Distribution of Tasks before Consensus'\n\t\t\t\t\t\tlabels = ('Number of Tasks', 'Frequency')\n\t\t\t\t\t\tif len(badArray) < 2:\n\t\t\t\t\t\t\thist_gen(goodArray, dest+'.png',labels=labels,title=title)\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tleg = ('Correctly Evaluated IP pairs','Incorrectly Evaluated IP pairs')\n\t\t\t\t\t\t\tmulti_hist_gen([goodArray,badArray],leg,dest+'.png',labels=labels,title=title)\n\t\t\t\telif toggles.DEBUG_FLAG:\n\t\t\t\t\tprint \"only ran one sim, ignoring results\"\n\t\t\tif toggles.VOTE_GRID:\n\t\t\t\tdest = toggles.OUTPUT_PATH + toggles.RUN_NAME+'_vote_grid'\n\t\t\t\tif len(goodPoints)>1:\n\t\t\t\t\tif len(badPoints)==0:\n\t\t\t\t\t\tgeneric_csv_write(dest+'.csv',goodPoints)\n\t\t\t\t\telse:\n\t\t\t\t\t\tgeneric_csv_write(dest+'_good.csv',goodPoints)\n\t\t\t\t\t\tgeneric_csv_write(dest+'_bad.csv',badPoints)\n\t\t\t\t\tif toggles.GEN_GRAPHS:\n\t\t\t\t\t\ttitle = \"Vote Grid Graph\"\n\t\t\t\t\t\tlabels = (\"Number of No Votes\",\"Number of Yes Votes\")\n\t\t\t\t\t\tif len(badPoints)==0:\n\t\t\t\t\t\t\txL,yL=zip(*goodPoints)\n\t\t\t\t\t\t\tline_graph_gen(xL,yL,dest+'.png',title=title,labels=labels,scatter=True,square=True)\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tgX,gY = zip(*goodPoints)\n\t\t\t\t\t\t\tbX,bY = zip(*badPoints)\n\t\t\t\t\t\t\tmulti_line_graph_gen((gX,bX),(gY,bY),('Correct','Incorrect'),dest+'_both.png',title=title,labels=labels,scatter=True,square=True)\n\t\t\t\t\t\t\tline_graph_gen(gX,gY,dest+'_good.png',title=title+\" goodPoints\",labels=labels,scatter=True,square=True)\n\t\t\t\t\t\t\tline_graph_gen(bX,bY,dest+'_bad.png',title=title+\" badPoints\",labels=labels,scatter=True,square=True)\n\t\tif toggles.TIME_SIMS:\n\t\t\tself.timeRun(sampleData)\n\n\t\tif toggles.RUN_ABSTRACT_SIM:\n\t\t\tself.abstract_sim(sampleData, toggles.ABSTRACT_VARIABLE, toggles.ABSTRACT_VALUES)" ]
[ "0.688976", "0.6853158", "0.681872", "0.67933166", "0.66235477", "0.65973157", "0.6543065", "0.65319335", "0.65228367", "0.64699984", "0.64614207", "0.6456239", "0.64392173", "0.64163697", "0.63972443", "0.6383207", "0.63710415", "0.6326187", "0.632123", "0.6271647", "0.6254463", "0.62337005", "0.622191", "0.6216871", "0.62017983", "0.6201399", "0.61864257", "0.61799115", "0.6174716", "0.6166085" ]
0.85764873
0
initializes the super class and adds the height and widthScale for the Rectangle.
def initialize(self): super(RectTab,self).initialize() # special tkinter variables that will be changed with the scales self.width = tk.IntVar() self.height = tk.IntVar() # make width scale self.widthScale = tk.Scale(self, from_=1, to=5, orient=tk.HORIZONTAL, label='Width', resolution=1, variable=self.width, command=self.updateSize) self.widthScale.grid(column=2, row=6, columnspan=1, sticky='W' + 'E') self.widthScale.set(2) # make height scale self.heightScale = tk.Scale(self, from_=1, to=5, orient=tk.HORIZONTAL, label='Height', resolution=1, variable=self.height, command=self.updateSize) self.heightScale.grid(column=2, row=7, columnspan=1, sticky='W' + 'E') self.heightScale.set(2)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, height, width):\n\n\t\t# _width and _height are internal (private) Rectangle Instance's attributes. This is something\n\t\t# We keep to ourselves to make sure the User can't just update these attrs randomly and also\n\t\t# so that the code has backward compatibility.\n\t\tself._width = None\n\t\tself._height = None\n\n\t\t# Lets now use the SETTER Method the width and height of the newly initialised Rectangle Class\n\t\tself.width = width\n\t\tself.height = height", "def __init__(self, width, height):\n super(GRect, self).__init__()\n self.__init__(0, 0, width, height)", "def __init___(self, x, y, width, height):\n super(GRect, self).__init__()\n frameWidth = width\n frameHeight = height\n setLocation(x, y)", "def update(self, *args, **kwargs):\n if args and len(args) > 0:\n if len(args) == 1:\n Base.__init__(self, args[0])\n elif len(args) == 2:\n Base.__init__(self, args[0])\n self.__width = args[1]\n elif len(args) == 3:\n Base.__init__(self, args[0])\n self.__width = args[1]\n self.__height = args[2]\n elif len(args) == 4:\n Base.__init__(self, args[0])\n self.__width = args[1]\n self.__height = args[2]\n self.__x = args[3]\n elif len(args) == 5:\n Base.__init__(self, args[0])\n self.__width = args[1]\n self.__height = args[2]\n self.__x = args[3]\n self.__y = args[4]\n else:\n for key, value in kwargs.items():\n if key == 'width':\n Rectangle.width.__set__(self, value)\n elif key == 'height':\n Rectangle.height.__set__(self, value)\n elif key == 'x':\n Rectangle.x.__set__(self, value)\n elif key == 'y':\n Rectangle.y.__set__(self, value)\n elif key == 'id':\n Base.__init__(self, value)", "def __init__(self, width, height, x=0, y=0, id=None):\n super().__init__(id)\n self.width = width\n self.height = height\n self.x = x\n self.y = y", "def __init__(self, width, height, x=0, y=0, id=None):\n super().__init__(id)\n self.width = width\n self.height = height\n self.x = x\n self.y = y", "def __init__(self, width, height, x=0, y=0, id=None):\n self.width = width\n self.height = height\n self.x = x\n self.y = y\n super().__init__(id)", "def __init__(self, width, height, x=0, y=0, id=None):\n self.width = width\n self.height = height\n self.x = x\n self.y = y\n super().__init__(id)", "def __init__(self, width, height, x=0, y=0, id=None):\n self.width = width\n self.height = height\n self.x = x\n self.y = y\n super().__init__(id)", "def __init__(self, width, height, x=0, y=0, id=None):\n\n super().__init__(id)\n self.width = width\n self.height = height\n self.x = x\n self.y = y", "def __init__(self, width, height):\n self.integer_validator(\"width\", width)\n self.__width = width\n self.integer_validator(\"height\", height)\n self.__height = height", "def __init__(self, width, height):\n self.integer_validator(\"width\", width)\n self.integer_validator(\"height\", height)\n self.__width = width\n self.__height = height", "def __init__(self, width, height, x=0, y=0, id=None):\n Base.__init__(self, id)\n self.width = width\n self.height = height\n self.x = x\n self.y = y", "def set_size(self, width, height):\r\n \r\n self.image = pygame.transform.scale(self.image, (width, height))\r\n self.rect = self.image.get_rect()", "def __init__(self, scale=False):\n self.scale = scale", "def __init__(self, min_x, min_y, max_x=0, max_y=0,\n width=0, height=0):\n self.min_x = min_x\n self.min_y = min_y\n if width > 0:\n self.max_x = min_x + width\n else:\n self.max_x = max_x\n if height > 0:\n self.max_y = min_y + height\n else:\n self.max_y = max_y", "def __init__(self,scale):\n self.scale = scale", "def __init__(self, width, height, x, y):\n self.w = width\n self.h = height\n self.x = x\n self.y = y", "def __init__(self, y: int):\n self.y = y\n self.x = 0\n self.first_x = 0\n self.second_x = Base.Width\n self.rect = pygame.Rect(self.x, self.y, Base.Width, Base.Height)", "def __init__(self, size, x=0, y=0, id=None):\n super().__init__(size, size, x, y, id)\n self.size = size", "def __init__(self, width, height):\n self.width = width\n self.height = height\n self.pos_x = START_X\n self.pos_y = START_Y\n self.col_d = False\n self.col_l = False\n self.col_r = False", "def __init__(self, width, height, x=0, y=0, id=None):\n __dict_args = {\"width\": width, \"height\": height, \"x\": x, \"y\": y}\n self.input_validator(__dict_args)\n self.__width = width\n self.__height = height\n self.__x = x\n self.__y = y\n super().__init__(id)", "def __init__(self) -> None:\n super().__init__()\n self.dimensions = 2", "def __init__(self, dim, rect, scale=4):\n self.surface = pygame.Surface(dim)\n self.rect = rect\n self.width = dim[0] // scale\n self.height = dim[1] // scale\n self.scale = scale\n # initialize values\n # scaled down surface\n self.drawsurface = pygame.Surface((self.width, self.height))\n self.drawsurface.fill((0, 0, 0))\n self.array2d = None\n self.fire = None\n self.palette = None\n self.initialize()", "def __init__(self, xRange, yData, max_width, min_width, max_gap):\n\n super(Classic, self).__init__(xRange, yData)\n self.max_width = max_width\n self.min_width = min_width\n self.max_gap = max_gap", "def __init__(self):\n\n # Call the parent class (sprite) constructor\n super().__init__()\n # Create image of block and fill with color.\n self.image = pygame.Surface([20, 20])\n self.image.fill(BLACK)\n\n # Fetch rectangle object that has dimensions of image. Update position of object by setting values\n # of rect.x and rect.y\n self.rect = self.image.get_rect()", "def __init__(self, dimensions, lower=0.0, upper=1.0, name=None):\n super().__init__((dimensions,), name=name)\n self.dimensions = dimensions\n self.lower = lower\n self.upper = upper", "def __init__(self, *args):\n _snap.TFltRect_swiginit(self, _snap.new_TFltRect(*args))", "def __init__(self, size, x=0, y=0, id=None):\n\n super().__init__(size, size, x, y, id)", "def __init__(self, size, x=0, y=0, id=None):\n super().__init__(size, size, x, y, id)" ]
[ "0.78380597", "0.7689887", "0.7388933", "0.6878956", "0.6873641", "0.6873641", "0.68665445", "0.68665445", "0.68665445", "0.686135", "0.68408537", "0.68150795", "0.68055373", "0.67979264", "0.6774993", "0.67658263", "0.67010826", "0.6605518", "0.6600063", "0.6598783", "0.6586661", "0.65571797", "0.65363574", "0.6528782", "0.65227526", "0.6517133", "0.6514371", "0.64839536", "0.6466819", "0.64395624" ]
0.8060661
0
Saves super class parameters as well as height and width. Also initializes simulation if not already done or updates it.
def saveParameters(self): super(RectTab,self).saveParameters() self.simArgs['width'] = self.width.get() self.simArgs['height'] = self.height.get() # updates simulation if it exists # makes one if it doesn't try: self.simulation.update(**self.kwargs) except AttributeError: self.simulation = rect.RectTable(**self.simArgs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def saveParameters(self):\n super(BuminTab,self).saveParameters()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = Buminovich.Buminovich(**self.simArgs)", "def saveParameters(self):\n super(CircTab,self).saveParameters()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = circle.CircleTable(**self.simArgs)", "def saveParameters(self):\n super(LTab,self).saveParameters()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = Ltab.LTable(**self.simArgs)", "def saveParameters(self):\n super(LorentzTab,self).saveParameters()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = Lorentz.Lorentz(**self.simArgs)", "def saveParameters(self):\n # save current scale values into the ball state for the current ball\n x = self.initialXScale.get()\n y = self.initialYScale.get()\n xVel = self.initialXVelScale.get()\n yVel = self.initialYVelScale.get()\n self.ballStates[self.currentBall] = [x, y, xVel, yVel]\n # set new currentBall if changed\n self.currentBall = int(self.ballSelector.get())\n # the states of all the balls to be simulated\n self.simArgs['balls']=self.ballStates\n self.simArgs['playbackSpeed'] = self.playbackSpeedScale.get()\n self.simArgs['trace'] = self.toTrace.get()\n self.simArgs['friction'] = self.friction.get()\n # get number of balls from formation string\n self.simArgs['nBalls'] = self.nBalls\n # for s in self.numberOfBallsSelector.get().split():\n # if s.isdigit():\n # self.simArgs['nBalls']=int(s)", "def __init__(self, screen_size, grid_size):\n super(MainScreen, self).__init__(screen_size)\n self.gamegrid = QuadraticGrid(grid_size[0], grid_size[1])\n self.grid_width = grid_size[0]\n self.grid_height = grid_size[1]\n self.block_width = screen_size[0] / grid_size[0]\n self.block_height = screen_size[1] / grid_size[1]\n print str(self.block_width) + \" \" + str(self.block_height)\n \n self.game_model = GameModel(grid_size)\n self.dragon_group = pygame.sprite.Group()\n self.gun_group = pygame.sprite.Group()\n self.hat_group = pygame.sprite.Group()", "def __init__(self):\n\n self.width = 10\n self.height = 10\n self.new_game()", "def _save_size(self):\n if self.width_key is not None:\n (width, height) = self.window.get_size()\n config.set(self.width_key, width)\n config.set(self.height_key, height)\n config.save()", "def save(self, path: str) -> None:\n torch.save({\"state_dict\": self.state_dict(),\n \"data\": self._get_constructor_parameters()}, path)", "def __init__(self, width, height):\n Game.__init__(self, width, height)", "def attributes_to_save(self):\r\n return ['initial_pos', 'height', 'width']", "def startSimulation(self):\n self.saveParameters()\n self.simulation.main()", "def __init__(self, game_size):\n\n self.game_size = game_size\n self.screen_size = 10 * self.game_size, 20 * self.game_size\n self.game_board = GameBoard()\n self.background_color = 55, 55, 40\n self.tetrino_set = dict()\n self.tetrino_id = 1\n self.score = 0", "def __init__(self, width, height, rand_seed=None, filepath=None):\r\n self.height = height\r\n self.width = width\r\n\r\n if rand_seed:\r\n seed(rand_seed)", "def save(self, *args, **kwargs):\n self.small_particles = (int(self.dylos_bin_1) +\n int(self.dylos_bin_2) + int(self.dylos_bin_3))\n self.big_particles = int(self.dylos_bin_4)\n super(Dylos, self).save(*args, **kwargs)", "def __init__(self) -> None:\n super().__init__()\n self.dimensions = 2", "def __init__(self, name=None, params=None, params_from_file=False, params_from_user=False):\n\n print(\"\")\n if name:\n self._name = name\n else:\n self._name = input(\"Simulation Name : \")\n\n print(\"Name : \"+str(self._name))\n\n self.plot_path = os.getcwd()+'/session/'+self._name+'_plots/'\n try:\n os.mkdir(self.plot_path)\n except (FileExistsError, FileNotFoundError):\n beep = lambda x: os.system(\"echo '\\a';sleep 0.5;\" * x)\n beep(1)\n print(\"WARNING : FOLDER PATH ALREADY EXISTS\")\n print(self.plot_path)\n print(\"WRITING OVER\")\n for fn in os.listdir(self.plot_path):\n os.remove(self.plot_path+fn)\n\n if params:\n self.params = params\n else:\n if params_from_file:\n self.params = load_input_pickle(params_from_file)\n elif params_from_user:\n self.params = get_user_params()\n else:\n #Define default params\n self.params = load_input_pickle('default')\n\n self.default_runs = [] # array of simulation runs with default parameters\n self.mod_runs = [] # array of tuples that contain 0) a list of simulation runs\n # and 1) a dictionary clarifying which parameter was given\n # which value for each run. (for convenience, can also\n # determine by comparing the simulation_run.params\n # directly\n\n\n print(\"Running Model with Default Parameters...\")\n self.run_default()\n print(\"\")", "def set_parameters(self, population_size=540, extern_arc_rate=2.6, pbest_factor=0.11, hist_mem_size=6, **kwargs):\n super().set_parameters(population_size=population_size,\n individual_type=kwargs.pop('individual_type', SolutionSHADE), **kwargs)\n self.extern_arc_rate = extern_arc_rate\n self.pbest_factor = pbest_factor\n self.hist_mem_size = hist_mem_size", "def setup_scene(self):\n\n # read map\n options, landscapes, statics, dynamics, trees, hero, hare = read_map('test.map')\n self.num_of_blocks_X, self.num_of_blocks_Y = options['size']\n with self.canvas:\n # init landscapes\n block_x = 0\n for i in xrange(self.num_of_blocks_X):\n block_y = 0\n for j in xrange(self.num_of_blocks_Y):\n class_name = landscapes[i][j]\n if class_name is not None:\n clazz = eval(class_name.capitalize())\n else:\n clazz = Grass\n block = clazz(pos=(block_x, block_y),\n size=(self.block_width, self.block_height), border=(0, 0))\n self.blocks[i][j] = block\n block_y += self.block_height \n block_x += self.block_width\n\n # init dynamics\n for x, y, class_name in dynamics:\n if 'dynamics_as_blocks' in options and options['dynamics_as_blocks']:\n x, y = (x + 0.5) * self.block_width, (y + 0.5) * self.block_height\n eval(class_name.capitalize())(x, y)\n \n with self.canvas:\n # draw or hero\n HeroRabbit(BLOCK_SIZE[0]*(hero[0] + 0.5), BLOCK_SIZE[1]*(hero[1] + 0.5))\n Hare(BLOCK_SIZE[0]*(hare[0] + 0.5), BLOCK_SIZE[1]*(hare[1] + 0.5))\n\n # init statics\n def _is_mountain(i, j):\n return int(0 <= i < self.num_of_blocks_X and 0 <= j <= self.num_of_blocks_Y and\n statics[i][j] == 'mountain')\n\n def _get_mountain_type(i, j):\n opensides = (_is_mountain(i - 1, j), _is_mountain(i, j + 1),\n _is_mountain(i + 1, j), _is_mountain(i, j - 1)) # left, top, right, bottom\n opensides_to_type = {\n (1, 1, 1, 1): 'center',\n (1, 0, 1, 0): 'horizontal_center',\n (0, 1, 0, 1): 'vertical_center',\n (1, 0, 0, 0): 'horizontal_right',\n (0, 1, 0, 0): 'vertical_bottom',\n (0, 0, 1, 0): 'horizontal_left',\n (0, 0, 0, 1): 'vertical_top',\n }\n return opensides_to_type.get(opensides, 'horizontal_center')\n \n _mountains = []\n _bushes= []\n \n for i in xrange(self.num_of_blocks_X):\n for j in xrange(self.num_of_blocks_Y):\n class_name = statics[i][j]\n if class_name is not None:\n pos = (i + 0.5) * self.block_width, (j + 0.5) * self.block_height\n if class_name == 'bush':\n #Bush(*pos)\n _bushes.append(pos)\n elif class_name == 'mountain':\n _mountains.append((pos, _get_mountain_type(i, j)))\n #Mountain(*pos, type=_get_mountain_type(i, j))\n \n for tree_pos in trees:\n Tree(BLOCK_SIZE[0]*(tree_pos[0] + 0.5), BLOCK_SIZE[1]*(tree_pos[1] + 0.5))\n \n with self.canvas:\n for pos in _bushes:\n Bush(*pos)\n \n for pos, type in _mountains:\n Mountain(*pos, type=type)\n\n HolyCarrot(13.5*self.block_width, 7.5*self.block_height)\n # This should be called at the end\n self.reindex_graphics()", "def make_save(self):\n\t\tsave = {}\n\t\tsave['p'] = self.p\n\t\tsave['injail'] = self.injail.copy()\n\t\tsave['tile'] = self.tile.copy()\n\t\tsave['bal'] = self.bal.copy()\n\t\tsave['goojf'] = self.goojf.copy()\n\t\tsave['isalive'] = self.isalive.copy()\n\t\tsave['jailturn'] = self.jailturn.copy()\n\t\tsave['ownedby'] = self.ownedby.copy()\n\t\tsave['numhouse'] = self.numhouse.copy()\n\t\tsave['ismortgaged'] = self.ismortgaged.copy()\n\t\tsave['num'] = self.num\n\t\tsave['numalive'] = self.numalive\n\t\tsave['uid'] = self.uid.copy()\n\t\tsave['freeparkingsum'] = self.freeparkingsum\n\t\tself.autosave = save", "def save_config(self):\n self.config.app_w = self.width()\n self.config.app_h = self.height()\n self.config.splitter = self.splitter.saveState()\n self.config.save()", "def update(self, *args, **kwargs):\n if args and len(args) > 0:\n if len(args) == 1:\n Base.__init__(self, args[0])\n elif len(args) == 2:\n Base.__init__(self, args[0])\n self.__width = args[1]\n elif len(args) == 3:\n Base.__init__(self, args[0])\n self.__width = args[1]\n self.__height = args[2]\n elif len(args) == 4:\n Base.__init__(self, args[0])\n self.__width = args[1]\n self.__height = args[2]\n self.__x = args[3]\n elif len(args) == 5:\n Base.__init__(self, args[0])\n self.__width = args[1]\n self.__height = args[2]\n self.__x = args[3]\n self.__y = args[4]\n else:\n for key, value in kwargs.items():\n if key == 'width':\n Rectangle.width.__set__(self, value)\n elif key == 'height':\n Rectangle.height.__set__(self, value)\n elif key == 'x':\n Rectangle.x.__set__(self, value)\n elif key == 'y':\n Rectangle.y.__set__(self, value)\n elif key == 'id':\n Base.__init__(self, value)", "def __init__(self):\n self.robot = None\n self.humans = None\n self.global_time = None\n self.human_times = None\n # Simulation configuration\n self.config = None\n self.time_limit = None\n self.time_step = None\n self.end_on_collision = True\n self.side = None\n self.pixel_side = None\n self.closed = None\n self.goal_radius = None\n self.max_humans = None\n self.min_humans = None\n self.human_num_mode = None\n self.human_num = None\n self.perpetual = None\n self.rotate_path = None\n self.randomize_attributes = None\n self.square_width = None\n self.circle_radius = None\n # Reward function\n self.success_reward = None\n self.collision_penalty = None\n self.discomfort_dist = None\n self.discomfort_scale = None\n self.discomfort_penalty_factor = None\n self.group_discomfort_penalty = None\n self.time_penalty = None\n self.progress_reward = None\n self.initial_distance = None\n self.previous_distance = None\n # Internal environment configuration\n self.case_capacity = None\n self.case_size = None\n self.case_counter = None\n self.parallel = None\n self.max_tries = None\n self.train_val_sim = None\n self.test_sim = None\n # For visualization\n self.force_list = [\n \"desired_force\",\n \"social_force\",\n \"obstacle_force\",\n \"group_coherence_force\",\n \"group_repulsive_force\",\n \"group_gaze_force\",\n ] # TODO Configure this?\n self.forces = None\n self.states = None\n self.action_values = None\n self.attention_weights = None\n # For information return\n self.obs_history = np.array([])\n self.episode_info = dict()\n self.movie_file = \"\"\n\n self.scene_manager = None\n self.use_groups = None\n self.min_group_num = None\n self.max_group_num = None\n self.centralized_planning = None\n self.centralized_planner = None\n\n self.enable_intent = None\n self.intent_type = None\n\n self.obstacles = [] # xmin,xmax,ymin,ymax\n\n self.app = None", "def __init__(self, width, height, speed, change_rate, visibility):\n assert change_rate >= 0, \"Snell `change_rate` must be non-negative\"\n\n super().__init__(width=width, height=height, visibility=visibility, render_value=(235, 76, 52))\n self.speed = speed\n self._initial_speed = speed\n self.change_rate = change_rate", "def __init__(self, initial_x:int, initial_y:int, width:int, height:int, power_type:str, time_to_live:int, debug:bool = False):\n\n #Call the superclass contructor\n super().__init__(initial_x, initial_y, width, height, PowerUp.sprites[power_type], debug)\n\n #Store variables\n self.power_type = power_type\n self.ttl = time_to_live\n\n #Scale the image\n self.scale(30,30)", "def __init__(self, sim_size=20, test_size=0.25):\n super().__init__(sim_size, test_size)\n self.setting = 'n neighbors'\n self.leaf_size = 30\n self.p = 2\n self.metric = 'minkowski'", "def __call__(self):\n properties = dict(self.parameters['properties'], **self.properties)\n topo.sim[str(self)]=self.sheet_type(**dict(self.parameters,\n properties=properties))", "def __init__(self):\n super().__init__()\n self.dimensionality = 1\n self.distType = 'Continuous'", "def save_snapshot(self,save_dir):\n if not os.path.exists(save_dir):\n os.mkdir(save_dir)\n keys=[\n 'dna_size',\n 'pop_size',\n 'cross_rate',\n 'mutate_rate',\n 'eta_c',\n 'eta_m',\n 'mp_size',\n 'elitism',\n 'generations',\n 'pop',\n 'fitness',\n 'fitness_rank',\n 'mp',\n 'offspring',\n 'start_state',\n 'iters'\n ]\n running_states={}\n for key in keys:\n assert hasattr(self,key)\n running_states[key]=getattr(self,key)\n th.save(running_states,os.path.join(save_dir,'state_gen%d'%(self.iters)) )", "def make_simulation(self):\n pass" ]
[ "0.66938424", "0.6504182", "0.6381987", "0.6361401", "0.6132515", "0.6020849", "0.58820665", "0.5859305", "0.5638845", "0.56341285", "0.5596021", "0.5587229", "0.55682236", "0.5510473", "0.544595", "0.5442569", "0.5404173", "0.54035205", "0.5392418", "0.53837395", "0.53671896", "0.5366003", "0.5365159", "0.5344655", "0.5336357", "0.5331216", "0.53283584", "0.53195363", "0.5313747", "0.530198" ]
0.76072294
0
checks if the y position set will be outside the circle and will hold it at that point
def checkYPos(self, *args): x = self.initialXScale.get() y = self.initialYScale.get() if x ** 2 + y ** 2 > self.radius**2: if y > 0: self.initialYScale.set(np.sqrt(self.radius**2 - x ** 2)) else: self.initialYScale.set(-np.sqrt(self.radius**2 - x ** 2))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def check_boundary(self):\n\n\t\tif self.Bubble_initial_pos[0] <= self.Bubble_radius or self.Bubble_initial_pos[0] >= self.tk_pic.width - self.Bubble_radius:\n\t\t\tself.Bubble_vel[0] = -self.Bubble_vel[0]", "def point_inside_circle(x,y,center_x,center_y,radius):\n return (x-center_x)**2 + (y - center_y)**2 < radius**2", "def incircle(self,xpos,ypos,cellx,celly):\n xcell, ycell = self.getcellcenter(cellx,celly)\n if ((xpos - xcell)**2 + (ypos - ycell)**2) < self.crad2:\n return True\n return False\n\n return cellx, celly", "def isOutsideBorder(self):\n if (self.posX < -self.myGalaxy.worldWidth or self.posX > self.myGalaxy.worldWidth or\n self.posY < -self.myGalaxy.worldHeight or self.posY > self.myGalaxy.worldHeight):\n return 1\n return 0", "def joy_axis_y_up(event: EventType, widget: WidgetType) -> bool:\n return event.axis == JOY_AXIS_Y and event.value < -JOY_DEADZONE", "def check_pos(self, x, y):\n if x >= WINDOWWIDTH or y >= WINDOWHEIGHT or x <=0 or y <= 0:\n return True", "def outside(self, x, y):\n bbox = self.canvas.coords(self.container) # get image area\n if bbox[0] < x < bbox[2] and bbox[1] < y < bbox[3]:\n return False # point (x,y) is inside the image area\n else:\n return True # point (x,y) is outside the image area", "def outside(self, x, y):\n bbox = self.canvas_image.coords(self.container) # get image area\n if bbox[0] < x < bbox[2] and bbox[1] < y < bbox[3]:\n return False # point (x,y) is inside the image area\n else:\n return True # point (x,y) is outside the image area", "def isInCircle(self,x1,y1,radius1):\r\n if(distance(self.x,x1,self.y,y1) < (self.radius+radius1)):\r\n return True\r\n return False", "def joy_axis_y_down(event: EventType, widget: WidgetType) -> bool:\n return event.axis == JOY_AXIS_Y and event.value > JOY_DEADZONE", "def isOutside(self, point):\n return 1-self.isInside(point)", "def check_inside(self, pos):\n x,y = pos\n return x >= self.posx and x <= self.posx + self.sizex and y >= self.posy and y <= self.posy + self.sizey", "def in_circle(x0, y0, x, y, r):\n return ((x - x0) ** 2 + (y - y0) ** 2) <= (r ** 2)", "def get_y_position(self): \n return self.rect.y", "def _on_post(self, x, y):\n return x == self._x and (y < self._top or y > (3 - self._bottom))", "def is_inside(self, x: int, y: int) -> bool:\n pass", "def check_reached(self):\n m_x, m_y = self.destination.get_pos()\n m_radius = self.destination.radius\n distance_centre = math.sqrt((m_x - self.x)**2 + (m_y - self.y)**2)\n sum_radii = m_radius + self.radius\n if distance_centre < sum_radii:\n self.color = pygame.colordict.THECOLORS['green']\n self.has_reached = True", "def get_y(self):\n\t\treturn self._collision_rect.y + 25", "def is_point_within(self, x, y):\n return abs(x - self._x_position) <= self._x_length / 2 and abs(y - self._y_position) <= self._y_length / 2", "def test_check_y(self):\n r1 = Rectangle(10, 2)\n self.assertEqual(r1.y, 0)\n\n r2 = Rectangle(2, 10, 6, 4)\n self.assertEqual(r2.y, 4)\n\n r3 = Rectangle(5, 2, 3, 9, 12)\n self.assertEqual(r3.y, 9)\n\n r4 = Rectangle(5, 2, 3, 0, 12)\n self.assertEqual(r4.y, 0)", "def clicked(self, x_pos, y_pos):\n img = self.tower_imgs\n if self.x - img.get_width() // 2 + self.width >= x_pos >= self.x - img.get_width() // 2:\n if self.y + self.height - img.get_height() // 2 >= y_pos >= self.y - img.get_height() // 2:\n return True\n return False", "def _inside(self, x, y):\n wx, wy, w, h = self._raw_graph_window_dim()\n if wx <= x < wx + w and wy <= y < wy + h:\n return True\n return False", "def if_overlap(self, x, y) -> bool:\n if self.pos[y][x] != '-':\n print('此坐标已有棋子,请仔细观察棋盘')\n return True\n return False", "def parse_area(x,y):\r\n # if (x,y) in gone :\r\n # return False\r\n # print(\"!\",end=\"\")\r\n # made useless thanks to the loop's conditions\r\n gone.add((x,y))\r\n if (x,y) in pos_turtle.values() :\r\n return True\r\n else :\r\n for (i,j) in [(x-UNIT,y), (x+UNIT,y), (x,y-UNIT), (x,y+UNIT)] :\r\n if (i,j) in pos_tracker or (i,j) in gone or abs(i)>=RAY or abs(j)>=RAY :\r\n continue\r\n if parse_area(i,j) :\r\n return True\r\n return False", "def check_contained(self,x,y):\n if self.active:\n self.reset()\n #if in horizontal bounds\n if x > self.left and x < self.right:\n slope = 1/sqrt(3)\n #use to set create verticle bounds\n if x - self.center_x <= 0:\n slope *= -1\n\n ################\n x_rel = x - self.center_x #bounds depends on x location of the mouse \n bottom_bound = self.bottom - (x_rel*slope)\n top_bound = self.top - (x_rel*-slope)\n ################\n\n if y >= top_bound and y <= bottom_bound:\n if Ctrl_Vars.Left_MouseDown:\n self.press() # if all conditions are met use functionality", "def bounce_y(self):\n result = super(GPolygon, self).bounce_y()\n self.move_inc(self.move.x, self.move.y)\n return result", "def is_valid_move(self, side_color, x, y):\n return self.disc_place(side_color, x, y, check_only=True)", "def isinsidearcXY(c,p):\n\n x = c[0]\n r = c[1][0]\n if dist(x,p) > r:\n return False\n if iscircle(c):\n return True\n start = c[1][1]%360.0\n end = c[1][2]%360.0\n if end < start:\n end+= 360.0\n p2 = sub(p,x)\n ang = (atan2(p2[1],p2[0]) % pi2)*360/pi2\n\n if end <= 360.0:\n return (ang >= start and ang <= end)\n else:\n return ang >= start or ang <= (end-360.0)", "def y(self):\r\n return self.position.y", "def click_car(self, pos):\n a = self.h / 50\n x, y = pos\n if ((x > self.x) and (x < self.x + 260 * a) and (y > self.y - 40 * a)\n and (y < self.y + self.h + 25 * a)):\n self.x = randint(200, 500)\n self.y = randint(200, 500)\n self.h = randint(10, 50)\n self.dir = 1\n self.speed_x = randint(10, 200)\n return True\n else:\n return False" ]
[ "0.6409995", "0.64027065", "0.6278436", "0.62465423", "0.6142362", "0.6125373", "0.60288906", "0.6002736", "0.5950301", "0.59413105", "0.5895492", "0.5886205", "0.5884686", "0.5869564", "0.5853532", "0.58382905", "0.5837317", "0.58295673", "0.579188", "0.57895184", "0.5787874", "0.5775529", "0.57555676", "0.57551605", "0.5750643", "0.5745688", "0.57433224", "0.5725615", "0.5710039", "0.5706457" ]
0.72029173
0
checks if the x position set will be outside the circle and will hold it at that point
def checkXPos(self, *args): x = self.initialXScale.get() y = self.initialYScale.get() if x ** 2 + y ** 2 > self.radius**2: if x > 0: self.initialXScale.set(np.sqrt(self.radius**2 - y ** 2)) else: self.initialXScale.set(-np.sqrt(self.radius**2 - y ** 2))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def incircle(self,xpos,ypos,cellx,celly):\n xcell, ycell = self.getcellcenter(cellx,celly)\n if ((xpos - xcell)**2 + (ypos - ycell)**2) < self.crad2:\n return True\n return False\n\n return cellx, celly", "def point_inside_circle(x,y,center_x,center_y,radius):\n return (x-center_x)**2 + (y - center_y)**2 < radius**2", "def check_contained(self,x,y):\n if self.active:\n self.reset()\n #if in horizontal bounds\n if x > self.left and x < self.right:\n slope = 1/sqrt(3)\n #use to set create verticle bounds\n if x - self.center_x <= 0:\n slope *= -1\n\n ################\n x_rel = x - self.center_x #bounds depends on x location of the mouse \n bottom_bound = self.bottom - (x_rel*slope)\n top_bound = self.top - (x_rel*-slope)\n ################\n\n if y >= top_bound and y <= bottom_bound:\n if Ctrl_Vars.Left_MouseDown:\n self.press() # if all conditions are met use functionality", "def on_press(self, event):\n if event.inaxes is None:\n return\n mX = event.xdata\n mY = event.ydata\n index = None\n for i in range(len(self.x)):\n if self.is_inside(mX, mY, (self.x[i], self.y[i])):\n index = i\n break\n self.current_point = index", "def check_pos(self, x, y):\n if x >= WINDOWWIDTH or y >= WINDOWHEIGHT or x <=0 or y <= 0:\n return True", "def check_boundary(self):\n\n\t\tif self.Bubble_initial_pos[0] <= self.Bubble_radius or self.Bubble_initial_pos[0] >= self.tk_pic.width - self.Bubble_radius:\n\t\t\tself.Bubble_vel[0] = -self.Bubble_vel[0]", "def isInCircle(self,x1,y1,radius1):\r\n if(distance(self.x,x1,self.y,y1) < (self.radius+radius1)):\r\n return True\r\n return False", "def is_inside(self, x: int, y: int) -> bool:\n pass", "def check_inside(self, pos):\n x,y = pos\n return x >= self.posx and x <= self.posx + self.sizex and y >= self.posy and y <= self.posy + self.sizey", "def _inside(self, x, y):\n wx, wy, w, h = self._raw_graph_window_dim()\n if wx <= x < wx + w and wy <= y < wy + h:\n return True\n return False", "def isInside(self, point):\n # we rotate back the point to the frame parallel to the axis of the ellipse\n rotatedPoint = self.rotatePoint(point)\n # we check if each point is inside the associated liquid drop\n return ((rotatedPoint[:, :, 0]/self.axisA[:, None])**2 + (rotatedPoint[:, :, 1]/self.axisB[:, None])**2 < 1)", "def clicked(self, x_pos, y_pos):\n img = self.tower_imgs\n if self.x - img.get_width() // 2 + self.width >= x_pos >= self.x - img.get_width() // 2:\n if self.y + self.height - img.get_height() // 2 >= y_pos >= self.y - img.get_height() // 2:\n return True\n return False", "def joy_axis_x_left(event: EventType, widget: WidgetType) -> bool:\n return event.axis == JOY_AXIS_X and event.value < -JOY_DEADZONE", "def inside(self, x, on_boundary):\n return bool((near(x[0], xmin) or near(x[1], ymin)) and \\\n (not ((near(x[0], xmin) and near(x[1], ymax)) \\\n or (near(x[0], xmax) and near(x[1], ymin)))) \\\n and on_boundary)", "def outside(self, x, y):\n bbox = self.canvas.coords(self.container) # get image area\n if bbox[0] < x < bbox[2] and bbox[1] < y < bbox[3]:\n return False # point (x,y) is inside the image area\n else:\n return True # point (x,y) is outside the image area", "def inside_unit_circle(point):\n distance = math.sqrt(point[0] ** 2 + point[1] ** 2)\n return distance < 1", "def in_circle(x0, y0, x, y, r):\n return ((x - x0) ** 2 + (y - y0) ** 2) <= (r ** 2)", "def is_valid_position(self, x, y):\n if (x > self.n_cols-1 or y > self.n_rows-1) or (x < 0 or y < 0):\n return False\n\n elif self.grid[x][y] == 3:\n return False\n\n return True", "def checkCanvasBoundsAndWrap(self):\n #check along the x axis\n if (self.xPos<0):\n self.setXPos(self.canvasIGetDrawnOnsWidth)\n \n elif (self.xPos>self.canvasIGetDrawnOnsWidth):\n self.setXPos(0)\n #check along the y axis\n if (self.yPos<0):\n self.setYPos(self.canvasIGetDrawnOnsHeight)\n \n elif (self.yPos>self.canvasIGetDrawnOnsHeight):\n self.setYPos(0)", "def contains(self, position):\n return np.linalg.norm(position - self._center) < self._radius", "def check_value(self, pos):\n # Check the X gantry\n if self.xgantry.decoupled.get():\n raise PermissionError(\"Can not move the horizontal gantry is \"\n \"uncoupled\")\n # Allow StatePositioner to check the state\n return super().check_value(pos)", "def _on_post(self, x, y):\n return x == self._x and (y < self._top or y > (3 - self._bottom))", "def outside(self, x, y):\n bbox = self.canvas_image.coords(self.container) # get image area\n if bbox[0] < x < bbox[2] and bbox[1] < y < bbox[3]:\n return False # point (x,y) is inside the image area\n else:\n return True # point (x,y) is outside the image area", "def _point_in_circle(self, df):\n if df.empty:\n return df\n df = df[df.apply(self._is_point_in_range, axis=1)]\n return df", "def is_position_allowed(new_x, new_y):\n\n return min_x <= new_x <= max_x and min_y <= new_y <= max_y", "def is_valid_move(self, side_color, x, y):\n return self.disc_place(side_color, x, y, check_only=True)", "def isOutsideBorder(self):\n if (self.posX < -self.myGalaxy.worldWidth or self.posX > self.myGalaxy.worldWidth or\n self.posY < -self.myGalaxy.worldHeight or self.posY > self.myGalaxy.worldHeight):\n return 1\n return 0", "def xle(y, span, radius):\n py = y * span\n yarc = span - radius\n if py < yarc:\n return 0;\n else:\n dy = py - yarc\n rad = radius**2 - dy**2\n if rad<0: return 0\n x = radius - math.sqrt(rad)\n return x", "def contains ( self, pos ):\n dr2 = (pos[0, :]-self.x)**2 + (pos[1, :]-self.y)**2\n # which points are in the circle?\n if self.include_border:\n inds = (dr2 - self.r**2) < self.abs_tol\n else:\n inds = (dr2 - self.r**2) < -self.abs_tol\n \n \n # if there's no poit inside\n if ~inds.any() and self.default_nearest: \n inds[argmin(dr2)] = True\n \n return inds", "def point_in_map(self, x, y):\r\n return 0 <= x < self.width and 0 <= y < self.height and (x,y) not in self.walls" ]
[ "0.6634102", "0.65800136", "0.6350049", "0.63221407", "0.6293997", "0.62786114", "0.62291116", "0.6227815", "0.6144363", "0.6010454", "0.59905386", "0.5946691", "0.58729887", "0.58541554", "0.5845365", "0.584352", "0.5834603", "0.5824528", "0.57955706", "0.577924", "0.5777117", "0.5776547", "0.5772091", "0.5760206", "0.575172", "0.5745992", "0.5743065", "0.57362854", "0.56927407", "0.5681098" ]
0.72299653
0
Saves super class parameters. Updates or initializes the simulation
def saveParameters(self): super(BuminTab,self).saveParameters() # updates simulation if it exists # makes one if it doesn't try: self.simulation.update(**self.kwargs) except AttributeError: self.simulation = Buminovich.Buminovich(**self.simArgs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def saveParameters(self):\n super(LorentzTab,self).saveParameters()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = Lorentz.Lorentz(**self.simArgs)", "def saveParameters(self):\n super(LTab,self).saveParameters()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = Ltab.LTable(**self.simArgs)", "def saveParameters(self):\n super(CircTab,self).saveParameters()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = circle.CircleTable(**self.simArgs)", "def saveParameters(self):\n super(RectTab,self).saveParameters()\n self.simArgs['width'] = self.width.get()\n self.simArgs['height'] = self.height.get()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = rect.RectTable(**self.simArgs)", "def __init__(self, initial_params, save_name=\"model_param.joblib\"):\n super().__init__()\n self.initial_params = initial_params\n self.save_name = save_name", "def saveParameters(self):\n # save current scale values into the ball state for the current ball\n x = self.initialXScale.get()\n y = self.initialYScale.get()\n xVel = self.initialXVelScale.get()\n yVel = self.initialYVelScale.get()\n self.ballStates[self.currentBall] = [x, y, xVel, yVel]\n # set new currentBall if changed\n self.currentBall = int(self.ballSelector.get())\n # the states of all the balls to be simulated\n self.simArgs['balls']=self.ballStates\n self.simArgs['playbackSpeed'] = self.playbackSpeedScale.get()\n self.simArgs['trace'] = self.toTrace.get()\n self.simArgs['friction'] = self.friction.get()\n # get number of balls from formation string\n self.simArgs['nBalls'] = self.nBalls\n # for s in self.numberOfBallsSelector.get().split():\n # if s.isdigit():\n # self.simArgs['nBalls']=int(s)", "def updateData(self, *args):\n # if self.move_next_option == \"R\":\n # self.restSampling()\n # elif self.move_next_option == \"A\":\n # self.addExtra()\n # else:\n # self.continueReview()\n for name, value in self.parameter_inputs.items():\n self.parameters[name] = value.value\n # directly change the value of class variables\n logMsg((\"update settings: \", self.ml_classifier_cls, name, value.value))\n setattr(self.ml_classifier_cls, name, value.value)\n\n pass", "def _update(self):\n self.all_params = {}\n self._update_experiment_params()\n self._update_preprocessing_params()\n self._update_model_params()", "def startSimulation(self):\n self.saveParameters()\n self.simulation.main()", "def __init__(self, **variables):\n vars(self).update(variables)", "def set_instance_vars(self, new_vars=None):\n if new_vars is None:\n new_vars = Bird.default_player_values\n\n vars(self).update(new_vars)", "def make_save(self):\n\t\tsave = {}\n\t\tsave['p'] = self.p\n\t\tsave['injail'] = self.injail.copy()\n\t\tsave['tile'] = self.tile.copy()\n\t\tsave['bal'] = self.bal.copy()\n\t\tsave['goojf'] = self.goojf.copy()\n\t\tsave['isalive'] = self.isalive.copy()\n\t\tsave['jailturn'] = self.jailturn.copy()\n\t\tsave['ownedby'] = self.ownedby.copy()\n\t\tsave['numhouse'] = self.numhouse.copy()\n\t\tsave['ismortgaged'] = self.ismortgaged.copy()\n\t\tsave['num'] = self.num\n\t\tsave['numalive'] = self.numalive\n\t\tsave['uid'] = self.uid.copy()\n\t\tsave['freeparkingsum'] = self.freeparkingsum\n\t\tself.autosave = save", "def __init__(self) -> None:\n self.simulation = None\n self.update_time = 0.1\n self.time = None\n self.config = None", "def update_parameters(self):\n self.alignment_factor = rospy.get_param('/dyn_reconf/alignment_factor')\n self.cohesion_factor = rospy.get_param('/dyn_reconf/cohesion_factor')\n self.separation_factor = rospy.get_param('/dyn_reconf/separation_factor')\n self.avoid_factor = rospy.get_param('/dyn_reconf/avoid_factor')\n self.max_speed = rospy.get_param('/dyn_reconf/max_speed')\n self.max_force = rospy.get_param('/dyn_reconf/max_force')\n self.friction = rospy.get_param('/dyn_reconf/friction')\n self.crowd_radius = rospy.get_param('/dyn_reconf/crowd_radius')\n self.search_radius = rospy.get_param('/dyn_reconf/search_radius')\n\n rospy.loginfo(rospy.get_caller_id() + \" -> Parameters updated\")\n if DEBUG:\n print('alignment_factor: ', self.alignment_factor)\n print('cohesion_factor: ', self.cohesion_factor)\n print('separation_factor: ', self.separation_factor)\n print('avoid_factor: ', self.avoid_factor)\n print('max_speed: ', self.max_speed)\n print('max_force: ', self.max_force)\n print('friction: ', self.friction)\n print('crowd_radius: ', self.crowd_radius)\n print('search_radius: ', self.search_radius)", "def params(self,new):\n self._params = new\n self._config_set()\n self._make_model()", "def save(self, path: str) -> None:\n torch.save({\"state_dict\": self.state_dict(),\n \"data\": self._get_constructor_parameters()}, path)", "def __init__(self, to_save):\r\n self.mesh = to_save.mesh\r\n self.max_T = to_save.max_T\r\n self.d_T = to_save.d_T\r\n # Data retention\r\n self.saved_data = to_save.saved_data\r\n self.node_map = to_save.node_map", "def save_data(self):\n pass", "def save(self):\n pass", "def save(self):\n pass", "def save(self):\n pass", "def save(self):\n pass", "def save(self):\n pass", "def save (self):\n pass", "def save(self):\n SignalPlug.save(self)\n self.config.set(\"port\", self._port)\n self.config.set(\"maxrand\", self._maxrand)\n self.config.set(\"minrand\", self._minrand)", "def _save_base(self):\n #make sure this object gets the permissions set in the form\n self.setpermissions(self.permission_lvl)", "def set_attributes(self):\n s = _setter(oself=self, e1=NameError, e2=AttributeError)\n\n s('oself.coef_ = oself.model.coef_')\n s('oself.intercept_ = oself.model.intercept_')\n\n self.time_prepare = None\n s('oself.time_prepare = oself.model.time_prepare')\n self.time_upload_data = None\n s('oself.time_upload_data = oself.model.time_upload_data')\n self.time_fitonly = None\n s('oself.time_fitonly = oself.model.time_fitonly')", "def save(self):\n\n pass", "def save(self):\n raise NotImplementedError", "def save(self):\n raise NotImplementedError" ]
[ "0.73744684", "0.72086346", "0.69888663", "0.6818995", "0.63069427", "0.62870556", "0.6189242", "0.60028684", "0.5987332", "0.5974823", "0.58391196", "0.58210707", "0.5813146", "0.58054054", "0.580224", "0.57546777", "0.5752146", "0.5731866", "0.5727853", "0.5727853", "0.5727853", "0.5727853", "0.5727853", "0.5721526", "0.5707944", "0.5699469", "0.5694122", "0.5691344", "0.56901675", "0.56901675" ]
0.73409647
1
Saves super class parameters. Updates or initializes the simulation
def saveParameters(self): super(LorentzTab,self).saveParameters() # updates simulation if it exists # makes one if it doesn't try: self.simulation.update(**self.kwargs) except AttributeError: self.simulation = Lorentz.Lorentz(**self.simArgs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def saveParameters(self):\n super(BuminTab,self).saveParameters()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = Buminovich.Buminovich(**self.simArgs)", "def saveParameters(self):\n super(LTab,self).saveParameters()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = Ltab.LTable(**self.simArgs)", "def saveParameters(self):\n super(CircTab,self).saveParameters()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = circle.CircleTable(**self.simArgs)", "def saveParameters(self):\n super(RectTab,self).saveParameters()\n self.simArgs['width'] = self.width.get()\n self.simArgs['height'] = self.height.get()\n # updates simulation if it exists\n # makes one if it doesn't\n try:\n self.simulation.update(**self.kwargs)\n except AttributeError:\n self.simulation = rect.RectTable(**self.simArgs)", "def __init__(self, initial_params, save_name=\"model_param.joblib\"):\n super().__init__()\n self.initial_params = initial_params\n self.save_name = save_name", "def saveParameters(self):\n # save current scale values into the ball state for the current ball\n x = self.initialXScale.get()\n y = self.initialYScale.get()\n xVel = self.initialXVelScale.get()\n yVel = self.initialYVelScale.get()\n self.ballStates[self.currentBall] = [x, y, xVel, yVel]\n # set new currentBall if changed\n self.currentBall = int(self.ballSelector.get())\n # the states of all the balls to be simulated\n self.simArgs['balls']=self.ballStates\n self.simArgs['playbackSpeed'] = self.playbackSpeedScale.get()\n self.simArgs['trace'] = self.toTrace.get()\n self.simArgs['friction'] = self.friction.get()\n # get number of balls from formation string\n self.simArgs['nBalls'] = self.nBalls\n # for s in self.numberOfBallsSelector.get().split():\n # if s.isdigit():\n # self.simArgs['nBalls']=int(s)", "def updateData(self, *args):\n # if self.move_next_option == \"R\":\n # self.restSampling()\n # elif self.move_next_option == \"A\":\n # self.addExtra()\n # else:\n # self.continueReview()\n for name, value in self.parameter_inputs.items():\n self.parameters[name] = value.value\n # directly change the value of class variables\n logMsg((\"update settings: \", self.ml_classifier_cls, name, value.value))\n setattr(self.ml_classifier_cls, name, value.value)\n\n pass", "def _update(self):\n self.all_params = {}\n self._update_experiment_params()\n self._update_preprocessing_params()\n self._update_model_params()", "def startSimulation(self):\n self.saveParameters()\n self.simulation.main()", "def __init__(self, **variables):\n vars(self).update(variables)", "def set_instance_vars(self, new_vars=None):\n if new_vars is None:\n new_vars = Bird.default_player_values\n\n vars(self).update(new_vars)", "def make_save(self):\n\t\tsave = {}\n\t\tsave['p'] = self.p\n\t\tsave['injail'] = self.injail.copy()\n\t\tsave['tile'] = self.tile.copy()\n\t\tsave['bal'] = self.bal.copy()\n\t\tsave['goojf'] = self.goojf.copy()\n\t\tsave['isalive'] = self.isalive.copy()\n\t\tsave['jailturn'] = self.jailturn.copy()\n\t\tsave['ownedby'] = self.ownedby.copy()\n\t\tsave['numhouse'] = self.numhouse.copy()\n\t\tsave['ismortgaged'] = self.ismortgaged.copy()\n\t\tsave['num'] = self.num\n\t\tsave['numalive'] = self.numalive\n\t\tsave['uid'] = self.uid.copy()\n\t\tsave['freeparkingsum'] = self.freeparkingsum\n\t\tself.autosave = save", "def __init__(self) -> None:\n self.simulation = None\n self.update_time = 0.1\n self.time = None\n self.config = None", "def update_parameters(self):\n self.alignment_factor = rospy.get_param('/dyn_reconf/alignment_factor')\n self.cohesion_factor = rospy.get_param('/dyn_reconf/cohesion_factor')\n self.separation_factor = rospy.get_param('/dyn_reconf/separation_factor')\n self.avoid_factor = rospy.get_param('/dyn_reconf/avoid_factor')\n self.max_speed = rospy.get_param('/dyn_reconf/max_speed')\n self.max_force = rospy.get_param('/dyn_reconf/max_force')\n self.friction = rospy.get_param('/dyn_reconf/friction')\n self.crowd_radius = rospy.get_param('/dyn_reconf/crowd_radius')\n self.search_radius = rospy.get_param('/dyn_reconf/search_radius')\n\n rospy.loginfo(rospy.get_caller_id() + \" -> Parameters updated\")\n if DEBUG:\n print('alignment_factor: ', self.alignment_factor)\n print('cohesion_factor: ', self.cohesion_factor)\n print('separation_factor: ', self.separation_factor)\n print('avoid_factor: ', self.avoid_factor)\n print('max_speed: ', self.max_speed)\n print('max_force: ', self.max_force)\n print('friction: ', self.friction)\n print('crowd_radius: ', self.crowd_radius)\n print('search_radius: ', self.search_radius)", "def params(self,new):\n self._params = new\n self._config_set()\n self._make_model()", "def save(self, path: str) -> None:\n torch.save({\"state_dict\": self.state_dict(),\n \"data\": self._get_constructor_parameters()}, path)", "def __init__(self, to_save):\r\n self.mesh = to_save.mesh\r\n self.max_T = to_save.max_T\r\n self.d_T = to_save.d_T\r\n # Data retention\r\n self.saved_data = to_save.saved_data\r\n self.node_map = to_save.node_map", "def save_data(self):\n pass", "def save(self):\n pass", "def save(self):\n pass", "def save(self):\n pass", "def save(self):\n pass", "def save(self):\n pass", "def save (self):\n pass", "def save(self):\n SignalPlug.save(self)\n self.config.set(\"port\", self._port)\n self.config.set(\"maxrand\", self._maxrand)\n self.config.set(\"minrand\", self._minrand)", "def _save_base(self):\n #make sure this object gets the permissions set in the form\n self.setpermissions(self.permission_lvl)", "def set_attributes(self):\n s = _setter(oself=self, e1=NameError, e2=AttributeError)\n\n s('oself.coef_ = oself.model.coef_')\n s('oself.intercept_ = oself.model.intercept_')\n\n self.time_prepare = None\n s('oself.time_prepare = oself.model.time_prepare')\n self.time_upload_data = None\n s('oself.time_upload_data = oself.model.time_upload_data')\n self.time_fitonly = None\n s('oself.time_fitonly = oself.model.time_fitonly')", "def save(self):\n\n pass", "def save(self):\n raise NotImplementedError", "def save(self):\n raise NotImplementedError" ]
[ "0.73409647", "0.72086346", "0.69888663", "0.6818995", "0.63069427", "0.62870556", "0.6189242", "0.60028684", "0.5987332", "0.5974823", "0.58391196", "0.58210707", "0.5813146", "0.58054054", "0.580224", "0.57546777", "0.5752146", "0.5731866", "0.5727853", "0.5727853", "0.5727853", "0.5727853", "0.5727853", "0.5721526", "0.5707944", "0.5699469", "0.5694122", "0.5691344", "0.56901675", "0.56901675" ]
0.73744684
0
Converts raw Smartsheet Sheet objects into a nice and tidy pandas DataFrame, just like mum used to make
def sheet_to_df(access_token, sheet_id): api_prefix_url = 'https://api.smartsheet.com/2.0/sheets/' # base Smartsheet api url for Requests url = api_prefix_url + str(sheet_id) # full url for requests header = { # header for requests 'Authorization': 'Bearer ' + access_token, 'Content-Type': 'application/json', 'cache-control': 'no-cache' } r = requests.get(url, headers=header) # create requests Response object of sheet's json sheet_dic = r.json() # convert json to a dictionary col_list = [] for c in sheet_dic['columns']: # for all columns in the sheet dictionary col_list.append(c['title']) # add title value to col_list df = pd.DataFrame(columns=col_list) # create an empty DataFrame with the col_list for r in sheet_dic['rows']: # iterate through all cells, place in rows and cols values = [] # initialize values list for each row for c in r['cells']: if c.get('value'): # politely handle empty cells values.append(c['value']) else: values.append('') df = df.append(dict(zip(col_list, values)), ignore_index=True) # zip joins col_list and values return df
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def pandas_convert(self):\n data = {}\n\n for names in self.data[0]:\n col_values = []\n\n if names in objects:\n for items in self.data[0][names]:\n col_values = []\n\n col_name = names + \"_\" + items\n\n for i in range(len(self.data)):\n col_values.append(self.data[i][names][items])\n\n data[col_name] = col_values\n else:\n for i in range(len(self.data)):\n col_values.append(self.data[i][names])\n \n data[names] = col_values\n\n self.pandas_df = pd.DataFrame(data=data)\n self.__clean_df()\n\n return self.pandas_df", "def parse_xls_sheet_to_df(sheet: opxl.workbook,\n min_row: Optional[int] = 1,\n relevant_cols: Optional[list] = None,\n irrelevant_cols: Optional[list] = None) -> pd.DataFrame:\n parsed_sheet_dict = {}\n\n for col in sheet.iter_cols(min_row=min_row):\n col_label = col[0].value\n\n if ((relevant_cols is not None and col_label in relevant_cols) or (\n irrelevant_cols is not None and col_label not in irrelevant_cols)):\n parsed_sheet_dict[col_label] = [munge_cell(cell.value) for cell in col[1:]]\n\n return pd.DataFrame.from_dict(parsed_sheet_dict)", "def get_sheet_data(self) -> List[SheetData]:\n result: List[SheetData] = []\n\n for c in range(self.columnCount() - 1):\n s = SheetData(self.horizontalHeaderItem(c + 1).text())\n for r, v in enumerate(sheet_infos):\n wgt = self.cellWidget(r, c + 1)\n if type(wgt) is QLineEdit:\n s[v] = wgt.text()\n elif type(wgt) is QCheckBox:\n s[v] = wgt.isChecked()\n else:\n raise TypeError(\"There is no matched data type for \" + str(type(wgt)))\n result.append(s)\n\n return result", "def sourceToDataframe(self):\n df = pd.read_excel(self.filename)\n df.columns = df.iloc[10]\n df = df.drop(df.index[:11])\n self.df = df #makes this df accessible to the whole class now\n self.insertODN()\n display(df.head())", "def convert_to_df(data):\r\n ans = pd.DataFrame(data)\r\n return ans", "def __load( self, raw_content ):\n return( pd.read_excel( BytesIO( raw_content ) ) )", "def gsheet2df():\n gsheet = get_data()\n header = gsheet.get('values', [])[0] # Assumes first line is header!\n values = gsheet.get('values', [])[1:] # Everything else is data.\n if not values:\n print('No data found.')\n else:\n all_data = []\n for col_id, col_name in enumerate(header):\n column_data = []\n for row in values:\n column_data.append(row[col_id])\n ds = pd.Series(data=column_data, name=col_name)\n all_data.append(ds)\n df = pd.concat(all_data, axis=1)\n return df", "def as_dataframe(self) -> \"pd.DataFrame\":\n import pandas as pd\n\n df = pd.DataFrame([row.as_series() for row in self.rows])\n return df", "def as_dataframe(self, force=False):\n _, content = self._data_export_helper('csv', force)\n return self._as_dataframe(content)", "def as_DF(self):\n\n hc_df = pd.DataFrame(self.s, index=self.s_names)\n hc_df.columns.name = 'type'\n hc_df.index.name = 's'\n\n return hc_df", "def _sheet_to_df(columns_config_url_or_path):\n url = columns_config_url_or_path.replace(\"edit#gid=\", \"export?format=csv&gid=\")\n try:\n return pd.read_csv(StringIO(requests.get(url, timeout=10).content.decode(\"utf-8\")))\n except Exception as e:\n raise BaseDosDadosException(\n \"Check if your google sheet Share are: Anyone on the internet with this link can view\"\n ) from e", "def get_dataframe(config: ModelSettings):\n df = pd.read_excel(config.df_data_source_path, engine=\"openpyxl\")\n\n # only use volumes with more than 30 slices\n if \"z\" in df.columns:\n ddf = df[(df[\"z\"] >= 30)]\n return df", "def normalize_excelSheet(self, sheetname, conversion_dictionary):\n\n sheet = self.wb.sheet_by_name(sheetname)\n\n ami_data = []\n\n date_headers = [\"bibliographic.date\", \"technical.dateCreated\"]\n time_headers = [\"technical.durationHuman\"]\n\n #copy everything from the 3rd row to the last row with a filename\n for rownum in range(2, sheet.nrows):\n if sheet.cell(rownum, 0):\n ami_data.append(sheet.row_values(rownum))\n\n for i in range(0, sheet.ncols):\n #normalize header\n header_entry = self.get_headerEntryAsString(sheetname, i)\n ami_data[0][i] = self.normalize_headerEntry(\n header_entry,\n conversion_dictionary)\n\n #convert excel dates\n if ami_data[0][i] in date_headers:\n for j in range(3, sheet.nrows):\n if sheet.cell(j, i).ctype == 3:\n value = sheet.cell(j, i).value\n ami_data[j-2][i] = self.convert_excelDateTime(value, \"date\")\n\n #convert excel times\n if ami_data[0][i] in time_headers:\n for j in range(3, sheet.nrows):\n if sheet.cell(j, i).ctype == 3:\n value = sheet.cell(j, i).value\n ami_data[j-2][i] = self.convert_excelDateTime(value, \"time\")\n\n ami_df = self.normalize_values(ami_data)\n\n return ami_df", "def get_sheet(sheet, doc):\n scope = [\"https://spreadsheets.google.com/feeds\", \"https://www.googleapis.com/auth/drive\"]\n\n credentials = ServiceAccountCredentials.from_json_keyfile_name(SECRET_FILE, scope)\n\n gc = gspread.authorize(credentials)\n wks = gc.open(doc)\n sheet = wks.worksheet(sheet)\n data = sheet.get_all_values()\n h1 = ffill(data[0])\n\n # remove extra whitespace\n h1 = [k.strip() for k in h1]\n h2 = [k.strip() for k in data[1]]\n\n # create a multiindex\n columns = MultiIndex.from_tuples(zip(h1, h2))\n\n # populate the dataframe\n df = DataFrame(data[2:], columns=columns)\n return df", "def __object_demapper(self, data: list) -> pd.DataFrame:\n data = pd.DataFrame.from_records([s.to_dict() for s in data])\n\n return data", "def make_tables(self):\n return [XLSRowSet(name, self.workbook.sheet_by_name(name), self.window)\n for name in self.workbook.sheet_names()]", "def as_frame(df_like: DataFrameLike) -> pd.DataFrame:\n try:\n return df_like.to_frame()\n except AttributeError:\n return df_like", "def prepareDataframeForTable(self, result):\n df = result\n if isinstance(df, pd.Series):\n df = pd.DataFrame({\"values\": df})\n\n if self._isIndexedDataframe(df):\n if df.size == 0:\n df[\"values\"] = np.nan\n elif len(df.columns) > 1:\n if isinstance(df.columns, pd.MultiIndex):\n df.columns = df.columns.map(' | '.join)\n df = df.stack()\n if isinstance(df, pd.Series):\n df = pd.DataFrame({\"values\": df})\n current_columns_name = list(df.index.names)\n current_columns_name[len(current_columns_name)-1] = \"Measures\"\n df.index.names = current_columns_name\n\n return df", "def to_df(self):\n from ..df import DataFrame\n\n return DataFrame(self.table).filter_parts(self)", "def df(self) -> \"pandas.DataFrame\":\n titles = []\n comments = []\n alternative_codes = []\n for cat in self.values():\n titles.append(cat.title)\n comments.append(cat.comment)\n alternative_codes.append(cat.codes[1:])\n return pandas.DataFrame(\n index=list(self.keys()),\n data={\n \"title\": titles,\n \"comment\": comments,\n \"alternative_codes\": alternative_codes,\n },\n )", "def glass_pandas(self):\n # pandas.set_option('display.width', 120)\n # TODO timeit (git_implementation) vs (my_implementation)\n # * df = pd.DataFrame(json.loads(r.text))\n # * df = df.set_index('t')\n # * df.index = pd.to_datetime(df.index, unit='s')\n # * df = df.sort_index()\n # * s = df.v\n # * s.name = '_'.join(url.split('/')[-2:])\n # * return s\n # for elem in self.loaded:\n # _metric, _data = elem[1]['_metrics'], elem[1]['_data']\n # try:\n # frame_keys = ['t'] + list(_data[0]['o'].keys())\n # framed = pandas.DataFrame(\n # data=[{k: (_data[iters]['t'] if k in 't' else _data[iters]['o'][k])\n # for k in frame_keys} for iters in range(len(_data))],\n # columns=frame_keys)\n # except KeyError:\n # framed = pandas.DataFrame(_data)\n # framed.set_index('t', inplace=True)\n # framed.index = pandas.to_datetime(\n # framed.index.to_flat_index(), unit='s', infer_datetime_format=True)\n # framed.sort_index(inplace=True)\n # framed.name = _metric\n # print(framed.name)\n # print(framed)", "def dataframe():\n headers = get_headers()\n headers = {'headers': headers}\n headers = pd.DataFrame.from_dict(headers, orient='index')\n headers = headers.replace(r'\\n', ' ', regex=True)\n headers = headers.replace(r'\\r', ' ', regex=True)\n headers = headers.replace(r'\\t', ' ', regex=True)\n headers = headers.replace(r'\\\\t', ' ', regex=True)\n headers = headers.replace(r' ', ' ', regex=True)\n headers = headers.replace(r' ', ' ', regex=True)\n\n paragraphs = get_paragraphs()\n paragraphs = {'paragraphs': paragraphs}\n paragraphs = pd.DataFrame.from_dict(paragraphs, orient='index')\n paragraphs = paragraphs.replace(r'\\n', ' ', regex=True)\n paragraphs = paragraphs.replace(r'\\r', ' ', regex=True)\n paragraphs = paragraphs.replace(r'\\t', ' ', regex=True)\n paragraphs = paragraphs.replace(r'\\\\t', ' ', regex=True)\n paragraphs = paragraphs.replace(r' ', ' ', regex=True)\n paragraphs = paragraphs.replace(r' ', ' ', regex=True)\n\n return headers.to_csv('headers.csv', index=False), paragraphs.to_csv('paragraphs.csv', index=False)", "def to_frame(self) -> pd.DataFrame:\n df = pd.DataFrame(data={\n 'Name': [p.name for p in self],\n 'Description': [p.desc for p in self],\n 'Value': [p.value for p in self],\n 'Hyper-Space': [p.hyper_space for p in self]\n }, columns=['Name', 'Description', 'Value', 'Hyper-Space'])\n return df", "def _make_body_cells_df(body_cells_response):\n body_cells_df = util.make_dataframe(body_cells_response)\n if not \"attributes.type\" in body_cells_df.columns.to_list():\n body_cells_df[\"attributes.type\"] = None\n body_cells_df[\"attributes.text\"] = None\n body_cells_df = body_cells_df[\n [\"text\", \"column_index_begin\", \"column_index_end\", \"row_index_begin\", \"row_index_end\", \"cell_id\",\n \"column_header_ids\", \"column_header_texts\", \"row_header_ids\", \"row_header_texts\",\n \"attributes.text\", \"attributes.type\"]]\n return body_cells_df", "def to_dataframe(self, include_metadata: bool = True) -> pd.DataFrame:\n # Get all our data first with async\n # Note that all our pandas work will tax CPU so we wouldn't expect any\n # performance gains from doing the data parsing as a callback\n records = self.to_dict()\n data = []\n for series in records:\n df = pd.DataFrame(series.pop(\"data\"), columns=[\"period\", \"value\"])\n if include_metadata:\n df = df.assign(**series)\n data.append(df)\n return pd.concat(data, ignore_index=True)", "def to_dataframe(self):\n return df_util.to_dataframe(requests.get(self.__url).json())", "def get_data_from_source_sheet(sheet):\n source_data = []\n for row in sheet.iter_rows(min_row=2, values_only=True):\n if row[0]:\n item = Item(\n name=row[COL_ITEM_NAME],\n level=row[COL_LEVEL],\n raw_material=row[COL_RAW_MATERIAL],\n quantity=row[COL_QUANTITY],\n unit=row[COL_UNIT],\n )\n source_data.append(item)\n return source_data", "def to_pandas(self):\n self.meta = pd.DataFrame(self.meta)\n return", "def get_dataframe(path, sheet, header_row, data_row_start, data_row_end): \n \n skip_footer_nrows = get_skip_footer_length(path, sheet, header_row, data_row_end, verbose = SHOW_MESSAGES)\n \n # read data from sheet \n df = pd.read_excel(path, sheet, index_col=0, header=header_row-1, na_values=NA_LIST, skip_footer = skip_footer_nrows)\n \n # cut rows between 'header_row' and 'data_row_start'\n row_slice_index = data_row_start - header_row - 1 \n try: \n df = df[row_slice_index:]\n except:\n # assignment above fails on NaN values in index, so we change index to sequence of intergers in this case\n df = filter_index_column(df, verbose = SHOW_MESSAGES)\n df = df[row_slice_index:] \n return df", "def from_mars(df: \"mars.DataFrame\") -> Dataset[ArrowRow]:\n import mars.dataframe as md\n\n return md.to_ray_dataset(df)" ]
[ "0.63097835", "0.59798735", "0.5798734", "0.5794549", "0.57478553", "0.5717758", "0.569768", "0.5680193", "0.5665196", "0.5657219", "0.5641252", "0.56317824", "0.5627899", "0.5621413", "0.55911493", "0.5583642", "0.55697036", "0.5564284", "0.554248", "0.5540877", "0.5535269", "0.55189496", "0.55122674", "0.55096775", "0.55016875", "0.5501373", "0.5475854", "0.5474772", "0.5472889", "0.5464418" ]
0.7096664
0
Consolidate deflines and sequences from FASTA as dictionary
def fasta_to_dict(fasta_file): deflines = [] sequences = [] sequence = "" with open(fasta_file, "r") as file: for line in file: if line.startswith(">"): deflines.append(line.rstrip().lstrip('>')) if sequence: sequences.append(sequence) sequence = "" else: sequence += line.rstrip() sequences.append(sequence) fasta_dict = {} for x, defline in enumerate(deflines): fasta_dict[defline]=sequences[x] return fasta_dict
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read_fasta(fasta_file):\n\n seq_dict = dict() # Declare a new dictionary\n\n with open(fasta_file,'r') as f:\n lines = f.readlines()\n defline = \"\"\n for li in lines:\n li = li.rstrip() # remove newlines\n if '>' in li:\n defline = li # if i use 'id' it is blue; why?\n seq_dict[defline] = \"\"\n else:\n li = li.upper() # just to clean up sequence\n seq_dict[defline] += li\n\n return seq_dict", "def read_fasta_to_dictionary(genome_file):\n filename = genome_file\n dct = {}\n\n id_name = \"\"\n sequence = \"\"\n first_pass = 1\n\n read_fh = open(filename, 'r')\n for i, line in enumerate(read_fh):\n line = line.rstrip()\n if re.search(r'^>(\\S+)(\\s+)(\\S+)(\\s+)(\\S+)(\\s+)(\\S+)(\\s+)(\\S+)(\\s+)(\\S+)(.*)', line):\n\n match_obj = re.search(r'^>(\\S+)(\\s+)(\\S+)(\\s+)(\\S+)(\\s+)(\\S+)(\\s+)(\\S+)(\\s+)(\\S+)(.*)', line)\n if not first_pass:\n dct[id_name] = sequence\n id_name = match_obj.group(1)\n id_name = re.sub(r',', \"\", id_name)\n first_pass = 0\n sequence = \"\"\n\n elif re.search(r'^>(\\S+)(.*)', line):\n\n match_obj = re.search(r'^>(\\S+)(.*)', line)\n if not first_pass:\n dct[id_name] = sequence\n id_name = match_obj.group(1)\n id_name = re.sub(r'(\\d+)_', \"\", id_name)\n id_name = re.sub(r'.*\\|', \"\", id_name)\n first_pass = 0\n sequence = \"\"\n else:\n sequence += line\n dct[id_name] = sequence\n\n return dct", "def FASTAfile_to_dict(FASTAfile):\n FASTADict = {}\n for line in FASTAfile:\n if '>' in line:\n FASTALabel = line\n FASTADict[FASTALabel] = \"\"\n else:\n FASTADict[FASTALabel] += line\n return FASTADict", "def sequenceDict(self):\n\t\twith open(self.ff) as fastaFile:\n\t\t\tsequences = {}\n\t\t\tfor name, seq in self.readFasta(fastaFile):\n\t\t\t\tsequences[name] = seq\n\t\treturn sequences", "def Parse_Fasta(filename):\n dic = {}\n name = None\n seq = ''\n with open(filename) as F:\n for line in F:\n if line.startswith('>'):\n if name is not None:\n dic[name] = seq\n seq = ''\n name = line.strip()\n else:\n seq += line\n if not name in dic:\n dic[name] = seq\n return dic", "def return_fasta_dic(file):\n seq_dict = {rec.id: rec.seq for rec in SeqIO.parse(file, \"fasta\")}\n return seq_dict", "def fasta_iter(fh: io.TextIOWrapper) -> dict:\n # ditch the boolean (x[0]) and just keep the header or sequence since\n # we know they alternate.\n faiter = (x[1] for x in groupby(fh, lambda line: line[0] == \">\"))\n for header in faiter:\n # drop the \">\"\n header = next(header)[1:].strip()\n # join all sequence lines to one.\n seq = \"\".join(s.strip() for s in next(faiter))\n yield {\"header\": header, \"seq\": seq}", "def get_ref_seq_dict(ref_seq):\n return SeqIO.to_dict(SeqIO.parse(ref_seq, 'fasta')) if ref_seq else None", "def parse_fasta(fasta_file):\n\n fasta_dct = {}\n\n with open(fasta_file,'r') as text:\n label = ''\n for line in text:\n if line.startswith('>'):\n if label in fasta_dct.keys():\n fasta_dct[current_line] = str(''.join(fasta_dct[current_line]))\n label = line.strip()[1:]\n fasta_dct[label] = []\n current_line = label\n else:\n fasta_dct[current_line].append(line.strip())\n fasta_dct[current_line] = str(''.join(fasta_dct[current_line]))\n\n return fasta_dct", "def parse_fasta(dataset):\n\n records = {}\n record_id = None\n\n for line in [lines.strip() for lines in dataset.splitlines()]:\n \n if line.startswith('>'):\n records_id = line[1:]\n records[records_id] = \"\"\n else:\n records[records_id] += line\n #print records\n \n return records", "def safe_collapse(self, inputs, fasta_filepath=None):\n\n seqdict = {}\n superseqs, incomplete = self.load(inputs, parseheaders=False)\n for genome in superseqs:\n seqlist = [ a.seq() for a in superseqs[genome].iteralleles() ]\n if len(seqlist) > 1:\n raise Exception(\"Multiple alleles for genome {}\".format(genome))\n seqdict[genome] = seqlist[0]\n \n if fasta_filepath:\n with open(fasta_filepath, 'w') as f:\n for name,seq in seqdict.iteritems():\n f.write(\">{}\\n{}\\n\".format(name, seq))\n \n return seqdict", "def premrna_desc(gff3, fasta):\n seqs = {}\n for defline, seq in LocusPocus.fasta.parse(fasta):\n seqid = defline[1:].split(' ')[0]\n if seqid not in seqs:\n seqs[seqid] = seq\n\n mrnaacc = ''\n mrnalen = 0\n gccontent = 0.0\n gcskew = 0.0\n ncontent = 0.0\n exoncount = 0\n introncount = 0\n utr5plen = 0\n utr3plen = 0\n for entry in gff3:\n if '\\tmRNA\\t' in entry:\n fields = entry.rstrip().split('\\t')\n assert len(fields) == 9\n mrnaacc = re.search(r'accession=([^;\\n]+)', fields[8]).group(1)\n mrnalen = int(fields[4]) - int(fields[3]) + 1\n mrnaseq = seqs[mrnaacc]\n if len(mrnaseq) != mrnalen:\n message = 'pre-mRNA \"%s\": length mismatch' % mrnaacc\n message += ' (gff3=%d, fa=%d)' % (mrnalen, len(mrnaseq))\n message += '; most likely a duplicated accession, discarding'\n print(message, file=sys.stderr)\n mrnaacc = ''\n gccontent = gc_content(mrnaseq)\n gcskew = gc_skew(mrnaseq)\n ncontent = n_content(mrnaseq)\n elif '\\texon\\t' in entry:\n exoncount += 1\n elif '\\tintron\\t' in entry:\n introncount += 1\n elif '\\tfive_prime_UTR\\t' in entry:\n fields = entry.rstrip().split('\\t')\n assert len(fields) == 9\n utr5plen += int(fields[4]) - int(fields[3]) + 1\n elif '\\tthree_prime_UTR\\t' in entry:\n fields = entry.rstrip().split('\\t')\n assert len(fields) == 9\n utr3plen += int(fields[4]) - int(fields[3]) + 1\n elif entry.startswith('###'):\n if mrnaacc != '':\n values = '%s %d %.3f %.3f %.3f %d %d %d %d' % (\n mrnaacc, mrnalen, gccontent, gcskew, ncontent,\n exoncount, introncount, utr5plen, utr3plen)\n yield values.split(' ')\n mrnaacc = ''\n mrnalen = 0\n gccontent = 0.0\n gcskew = 0.0\n ncontent = 0.0\n exoncount = 0\n exonlen = 0\n introncount = 0\n utr5plen = 0\n utr3plen = 0", "def parse_fasta(fasta_filename):\n\n sequences = {}\n\n with open(fasta_filename, \"r\") as fasta:\n\n # do our best to accept any input that looks vaguely valid\n for line in fasta:\n \n if line.startswith(\">\"):\n # take everything up to the first space as the id\n # get rid of the leading >\n # and get rid of the newline\n fasta_id = line.split(\" \")[0].replace(\">\", \"\", 1).rstrip('\\n')\n \n seq = []\n wholeseq = ''\n if fasta_id == \"\":\n raise Exceptions.MissingId(\"invalid if there is no fasta_id\")\n \n else:\n seq.append(line.rstrip('\\n'))\n # handle sequences on multiple lines\n wholeseq = \"\".join(seq)\n if len(wholeseq) == 0:\n raise Exceptions.MissingSequence(\"invalid if there is no sequence\")\n sequences[fasta_id] = wholeseq\n\n if len(sequences) == 0:\n raise Exceptions.EmptyFasta(\"invalid if there is nothing in the fasta file\")\n\n return sequences", "def readFastaFile(filename):\n if os.path.exists(filename)==False:return {}\n sequences={}\n fhr=open(filename,\"r\")\n for line in fhr:\n if line[0]==\">\":\n sequences[line.strip()[1:].split()[0]]=fhr.readline().strip()\n fhr.close()\n return sequences", "def read_fasta_file(filename):\n sequences_lines = {}\n current_sequence_lines = None\n with open(filename) as fp:\n for line in fp:\n line = line.strip()\n if line.startswith(';') or not line:\n continue\n if line.startswith('>'):\n sequence_name = line.lstrip('>')\n current_sequence_lines = []\n sequences_lines[sequence_name] = current_sequence_lines\n else:\n if current_sequence_lines is not None:\n current_sequence_lines.append(line)\n sequences = {}\n for name, lines in sequences_lines.items():\n sequences[name] = ''.join(lines)\n return sequences", "def fasta_parser(filename):\n fasta = {}\n with open(filename) as f:\n contents = f.read()[1:].split('\\n>')\n for section in contents:\n sample = section.split('\\n')\n sample_id = sample[0]\n seq = ''.join(sample[1:]).strip()\n fasta[sample_id] = seq\n return fasta", "def read_fasta_file(filename):\n sequences_lines = {}\n current_sequence_lines = None\n with open(filename) as fp:\n for line in fp:\n line = line.strip()\n if line.startswith(';') or not line:\n continue\n if line.startswith('>'):\n sequence_name = line.lstrip('>')\n current_sequence_lines = []\n sequences_lines[sequence_name] = current_sequence_lines\n else:\n if current_sequence_lines is not None:\n current_sequence_lines.append(line)\n sequences = {}\n for name, lines in sequences_lines.items():\n sequences[name] = ''.join(lines)\n return sequences", "def read_fasta_file(path):\n with open(path) as data_file:\n output = {}\n sequence_name = None\n for line in data_file.readlines():\n if line.startswith(\">\"):\n sequence_name = line[1:].strip()\n else:\n output.setdefault(sequence_name, \"\")\n line = \"\".join(re.findall(\"[acgtACGT]+\", line))\n\n output[sequence_name]+=line.upper()\n return output", "def getseq(genomefasta):\n genomedict = {}\n for i in SeqIO.parse(open(genomefasta), \"fasta\"):\n genomedict[i.id] = str(i.seq)\n return genomedict", "def mrna_desc(gff3, fasta):\n seqs = {}\n for defline, seq in LocusPocus.fasta.parse(fasta):\n seqid = defline[1:].split(' ')[0]\n if seqid not in seqs:\n seqs[seqid] = seq\n\n mrnaacc = ''\n mrnalen = 0\n for entry in gff3:\n if '\\tmRNA\\t' in entry:\n fields = entry.rstrip().split('\\t')\n assert len(fields) == 9\n mrnalen += int(fields[4]) - int(fields[3]) + 1\n accmatch = re.search(r'accession=([^;\\n]+)', fields[8])\n assert accmatch, 'Unable to parse mRNA accession: %s' % fields[8]\n mrnaacc = accmatch.group(1)\n elif entry.startswith('###'):\n mrnaseq = seqs[mrnaacc]\n if len(mrnaseq) != mrnalen:\n message = 'mature mRNA \"%s\": length mismatch' % mrnaacc\n message += ' (gff3=%d, fa=%d)' % (mrnalen, len(mrnaseq))\n message += '; most likely a duplicated accession, discarding'\n print(message, file=sys.stderr)\n else:\n gccontent = gc_content(mrnaseq)\n gcskew = gc_skew(mrnaseq)\n ncontent = n_content(mrnaseq)\n values = '%s %d %.3f %.3f %.3f' % (\n mrnaacc, mrnalen, gccontent, gcskew, ncontent)\n yield values.split(' ')\n mrnaacc = ''\n mrnalen = 0", "def get_fasta_dict(input_fasta_path):\n\n\ttry:\n\t\tnew_file = open(input_fasta_path, \"rU\")\n\t\tsequence_record_dict = SeqIO.to_dict(SeqIO.parse(new_file, \"fasta\"))\n\t\tnew_file.close()\n\t\treturn sequence_record_dict\n\texcept IOError as e:\n\t\tprint(str(e))\n\t\tsys.exit(1) # Aborts program. (exit(1) indicates that an error occurred)", "def get_fastg_seqs_dict(fastg_name, G):\n fp = open(fastg_name, 'r')\n seqs = {}\n for name,seq,qual in readfq(fp):\n name_parts = re.sub('[:,]',\" \", name[:-1]).split()\n node = name_parts[0]\n seqs[node] = seq\n return seqs", "def read_fasta_to_dict(path_to_file):\n if options.verbose:\n syserr(\"Reading sequences from %s \\n\" % (path_to_file))\n try:\n seq_obj = open(path_to_file, 'Ur')\n seqs = {}\n for seq in SeqIO.parse(seq_obj, 'fasta'):\n seqs[str(seq.id)] = str(seq.seq)\n except IOError:\n raise IOError('Cannot read from %s' % (path_to_file))\n\n return seqs", "def MinimalBpseqParser(lines):\n result = {'HEADER':[], 'SEQ_STRUCT':[]}\n \n for line in lines:\n if line.startswith('Filename') or line.startswith('Organism') or\\\n line.startswith('Accession') or line.startswith('Citation') or\\\n \":\" in line:\n result['HEADER'].append(line.strip())\n elif len(line.split()) == 3:\n result['SEQ_STRUCT'].append(line.strip())\n else:\n continue #unknown\n return result", "def parse_fasta(fasta_f, contig_data):\n\n basen = os.path.basename(fasta_f)\n [soil, ecotype, media] = basen.split(\"_\")[:3]\n\n with open(fasta_f, 'rU') as IN:\n for record in SeqIO.parse(IN, \"fasta\"):\n contig_data[record.description] = {'length': len(record.seq), 'soil': soil, 'ecotype': ecotype, 'media': media}", "def fastaDictionary(inFile, chrName=None):\n\n d = {}\n for (title, seq) in FastaIterator(inFile):\n title = title.split()[0]\n if not chrName:\n d[title] = seq\n elif chrName == title:\n d[title] = seq\n return d\n\n if chrName:\n print \"NOT ABLE TO FIND!\", chrName\n return d", "def parse_fasta(self, filename):\n id = ''\n desc = ''\n tempseq = []\n try:\n seqfile = open(filename,'r')\n for line in seqfile:\n if line.startswith('>'):\n if not id is '':\n yield { 'id': id.strip(), 'desc': desc.strip(), 'dna': ''.join(tempseq) }\n if ' ' in line:\n (id, desc) = line[1::].split(' ', 1)\n else:\n id = line[1::].strip()\n desc = ''\n tempseq = []\n elif not line.startswith('>'):\n tempseq.append(line.rstrip())\n if not id is '':\n yield { 'id': id.strip(), 'desc': desc.strip(), 'dna': ''.join(tempseq) }\n except OSError:\n raise PathError(''.join(['ERROR: cannot open', refseqpath]))", "def extract_seqs(seq_filepath):\n seqs = {}\n for record in SeqIO.parse(seq_filepath.as_posix(), \"fasta\"):\n seqs[record.id] = record\n return seqs", "def read_fasta_to_dicts(fname, CONFIG):\n from Bio import SeqIO\n logger.info('Reading in FASTA from {}.'.format(fname))\n\n data_dicts = []\n with open(fname, \"rU\") as f:\n\n for record in SeqIO.parse(f, \"fasta\"):\n data = {}\n if record.description in CONFIG[\"fasta_header_swaps\"]:\n record.description = CONFIG[\"fasta_header_swaps\"][record.description]\n head = record.description.split(CONFIG['fasta_separator_character'])\n\n if len(head) != len(CONFIG[\"fasta_headers\"]):\n logger.warn(\"Skipping {} which had {} fields (expected {})\".format(record.description, len(head), len(CONFIG[\"fasta_headers\"])))\n continue\n for i in range(len(CONFIG[\"fasta_headers\"])):\n try:\n data[CONFIG[\"fasta_headers\"][i]] = head[i]\n data['sequence'] = str(record.seq)\n except KeyError:\n logger.critical(\"Error parsing FASTA header. Header: {}. CONFIG specifies: {}\".format(head, CONFIG[\"fasta_headers\"])); sys.exit(2)\n data_dicts.append(data)\n return data_dicts", "def read_cDNA_file_to_dict(filename):\n \n #initialize dictionary\n cDNA_dictionary = {}\n\n #open file\n with open(cDNA_file) as f:\n \n #loop through file line by line\n for line in f:\n\n #remove newline\n line = line.rstrip()\n \n #get gene name\n if line.startswith(\">\"):#If the line starts with the character \">\" then,\n gene_name = line.split(\"|\")[1]#I separate the line by the character \"|\" and assign index 1 to gene_name\n \n #read in sequence in uppercase\n if not line.startswith(\">\"):#If the line does not start with the character \">\" then,\n line = line.upper()#I make all of the characters within the line uppercase\n\n #put name and sequence in dictionary\n cDNA_dictionary[gene_name] = line#I assign the gene_name as the key and the line (sequence) as the value\n\n #return dictionary \n return cDNA_dictionary" ]
[ "0.72562164", "0.70544577", "0.6914604", "0.68844557", "0.6714899", "0.6711567", "0.66311735", "0.66147506", "0.6589891", "0.65830344", "0.6581496", "0.6561404", "0.6552191", "0.65519196", "0.6500437", "0.6498833", "0.64926106", "0.64874583", "0.6474808", "0.6440401", "0.63113225", "0.62788886", "0.62094146", "0.6179839", "0.6179035", "0.61652726", "0.6113585", "0.6110887", "0.6096501", "0.60891694" ]
0.7505833
0
Perform multiple sequence alignment and isolate consensus from each group in grouped_hits.txt
def isolate_consensus(groups, fasta): with open(groups,"r") as f: file = f.readlines() sequences_in_out = open("sequences_in_out.table","w") consensus_sequences = open("consensus_sequences.fasta","w") for consensus_num, line in enumerate(file): group_sequences = line.rsplit() open("temp_sequences","w").close() # Write group of sequences to temporary file for group in group_sequences: sequences_in_out.write("{0}\t".format(group)) with open("temp_sequences",'a') as output: output.write(">{0}\n{1}\n".format(group, fasta[group])) # Run MAFFT alignment sp.call(['/apps/mafft/7.127/bin/mafft', '--adjustdirection', '--clustalout', '--preservecase', 'temp_sequences'], stdout=open('temp_alignment.txt','w'), stderr=open('temp_mafft_log','w') ) # cat MAFFT log files sp.call(['cat','temp_mafft_log'], stdout = open("all_alignments.log",'a+')) # cat MAFFT alignment files sp.call(['cat','temp_alignment.txt'], stdout = open("all_alignments.faa",'a+')) #output group_sequences and the consensus generated in a file sequences_in_out.write("||\t>Consensus_{0}\n".format(consensus_num + 1)) align=Bio.AlignIO.read("temp_alignment.txt","clustal") summary_align = AlignInfo.SummaryInfo(align) consensus = summary_align.dumb_consensus(threshold=0.51, ambiguous='N') consensus_sequences.write(">Consensus_{0}\n{1}\n".format( consensus_num + 1,consensus)) sequences_in_out.close() temp_files = glob("*temp*") sp.call(['rm'] + temp_files)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def main(argv):\n \n ### gets data from csv, sets variables\n seq1, seq2 = get_seqs('../data/seq.csv')\n \n \n # Assign the longer sequence to s1, and the shorter to s2\n l1, l2 = len(seq1), len(seq2)\n if l1 >= l2:\n s1, s2 = ((l2 - 1) * \".\" + seq1 + (l2 - 1) * \".\"), seq2\n #puts l2-1 \".\"s both sides of l1, allows alignment of all overlap combos\n else:\n s1, s2 = ((l1 - 1) * \".\" + seq2 + (l1 - 1) * \".\"), seq1\n l1, l2 = l2, l1 \n\n # writes alignment(s) with highest score into output file\n my_best_score = -1 #so 0 beats best score\n for i in range(l1 + l2 -1):\n score, matched, shift, end_shift = calculate_score(s1, s2, l1, l2, i)\n #assigns returns from calc_score function to these variables\n if score > my_best_score:\n my_best_score = score\n statement = \"This alignment occurs when the smaller strand (\" + \\\n str(l2) + \"nt in length) attaches from base \" + str(i - l2 + 2) + \\\n \" of the larger strand, with the highest score of \" + str(score) + \\\n \":\\n\"\n #statement explaining the alignment in detail\n best_comparison_highSP = (shift + matched + (l2 - 1) * \".\" + \"\\n\")\n best_comparison_lowSP = (shift + matched + end_shift + \"\\n\")\n best_s2, best_s1 = (shift + s2 + end_shift + \"\\n\"), (s1 + \"\\n\\n\\n\")\n #formats the matching, s1 and s2 lines to line-up neatly\n if i < l1 - 1:\n best_alignment = (str(statement) + str(best_comparison_lowSP) \\\n + str(best_s2) + str(best_s1))\n else:\n best_alignment = (str(statement) + str(best_comparison_highSP) \\\n + str(best_s2) + str(best_s1))\n # uses returned variables to write a statement about the alignment \n # giving its score and startpoint, and assigns 3 lines of alignment \n # (s1, s2 and matching bases) to a variable each for later printing\n f = open('../results/seqs_align.txt', 'w')\n f.write(best_alignment)\n f.close()\n print(\"Done!\")\n return None", "def align_groups(groups_dir, prog, align_dir):\n ra = RunAlign()\n ra.run_for_all_in(groups_dir, prog, align_dir)", "def parse_sam(rows):\n row1, row2 = rows\n mseqs = {}\n failed_list = []\n insert_list = []\n rname = row1['rname']\n qname = row1['qname']\n cigar1 = row1['cigar']\n cigar2 = row2['cigar']\n\n # filtering criteria\n reason = None\n if cigar1 == '*':\n reason = 'R1 unmapped'\n if int(row1['mapq']) < read_mapping_cutoff:\n reason = 'R1 low mapq'\n\n if cigar2 == '*':\n reason = 'R2 unmapped'\n if int(row2['mapq']) < read_mapping_cutoff:\n reason = 'R2 low mapq'\n\n genotype1, genotype2 = None, None\n try:\n genotype1 = row1['rname'].split('-')[1][0]\n genotype2 = row2['rname'].split('-')[1][0]\n except:\n reason = 'discordant map'\n pass\n\n if genotype1 != genotype2:\n reason = 'map conflict'\n\n if reason:\n failed_list.append({'qname': qname,\n 'rname1': row1['rname'],\n 'rname2': row2['rname'],\n 'reason': reason})\n else:\n pos1 = int(row1['pos'])-1 # convert 1-index to 0-index\n _, seq1, qual1, inserts = apply_cigar(cigar1, row1['seq'], row1['qual'])\n \n # report insertions relative to sample consensus\n for left, (iseq, iqual) in inserts.iteritems():\n insert_list.append({'qname': qname,\n 'fwd_rev': 'F' if is_first_read(row1['flag']) else 'R',\n 'refname': rname,\n 'pos': pos1+left,\n 'insert': iseq,\n 'qual': iqual})\n \n seq1 = '-'*pos1 + seq1 # pad sequence on left\n qual1 = '!'*pos1 + qual1 # assign lowest quality to gap prefix so it does not override mate\n \n \n # now process the mate\n pos2 = int(row2['pos'])-1 # convert 1-index to 0-index\n _, seq2, qual2, inserts = apply_cigar(cigar2, row2['seq'], row2['qual'])\n for left, (iseq, iqual) in inserts.iteritems():\n insert_list.append({'qname': qname,\n 'fwd_rev': 'F' if is_first_read(row2['flag']) else 'R',\n 'refname': rname,\n 'pos': pos2+left,\n 'insert': iseq,\n 'qual': iqual})\n seq2 = '-'*pos2 + seq2\n qual2 = '!'*pos2 + qual2\n \n # merge reads\n for qcut in sam2aln_q_cutoffs:\n mseq = merge_pairs(seq1, seq2, qual1, qual2, qcut)\n prop_N = mseq.count('N') / float(len(mseq.strip('-')))\n if prop_N > max_prop_N:\n # fail read pair\n failed_list.append({'qname': qname,\n 'reason': 'merge failure'})\n continue\n mseqs[qcut] = mseq\n\n return rname, mseqs, insert_list, failed_list", "def build_alignment(self,score,pieces):\n\t \t# build text\n\t\tself.open_seqs()\n\t\ttext1 = text2 = \"\"\n\t\tend1 = end2 = None\n\t\tfor (start1,start2,length,pctId) in pieces:\n\t\t\tif (end1 != None):\n\t\t\t\tif (start1 == end1): # insertion in sequence 2\n\t\t\t\t\ttext1 += self.seq1_gap * (start2-end2)\n\t\t\t\t\ttext2 += self.seq2_file.get(end2,start2-end2)\n\t\t\t\telse: # insertion in sequence 1\n\t\t\t\t\ttext1 += self.seq1_file.get(end1,start1-end1)\n\t\t\t\t\ttext2 += self.seq2_gap * (start1-end1)\n\n\t\t\ttext1 += self.seq1_file.get(start1,length)\n\t\t\ttext2 += self.seq2_file.get(start2,length)\n\t\t\tend1 = start1 + length\n\t\t\tend2 = start2 + length\n\t\t# create alignment\n\t\tstart1 = pieces[0][0]\n\t\tstart2 = pieces[0][1]\n\t\tend1 = pieces[-1][0] + pieces[-1][2]\n\t\tend2 = pieces[-1][1] + pieces[-1][2]\n\t\tsize1 = end1 - start1\n\t\tsize2 = end2 - start2\n\t\ta = Alignment(score=score,species_to_lengths=self.species_to_lengths)\n\t\t#if (self.seq1_strand == \"-\"): start1 = self.seq1_file.length - end1\n\t\ta.add_component(Component(self.seq1_src,start1,size1,self.seq1_strand,text=text1))\n\t\t#if (self.seq2_strand == \"-\"): start2 = self.seq2_file.length - end2\n\t\ta.add_component(Component(self.seq2_src,start2,size2,self.seq2_strand,text=text2))\n\t\treturn a", "def isolate_ungrouped_contigs(fasta_dict, groups):\n with open(groups) as f:\n grouped_contigs = list(chain.from_iterable(\n [i.split() for i in f.readlines()]\n )\n )\n db_deflines = fasta_dict.keys()\n ungrouped_contigs = [i for i in db_deflines if i not in grouped_contigs]\n with open(\"ungrouped_contigs.fasta\",'w') as output:\n for contig in ungrouped_contigs:\n output.write(\">{0}\\n{1}\\n\".format(contig, fasta_dict[contig])) \n sp.call(['cat','consensus_sequences.fasta','ungrouped_contigs.fasta'], \n stdout=open(\"full_assembly_after_consensus.fasta\",'w'))", "def process_results(percentage_ids, alignment_lengths, percentage_ids_other,\n alignment_lengths_other, best_hits):\n\n len_percentage_ids = len(percentage_ids)\n len_alignment_lengths = len(alignment_lengths)\n results = []\n\n for i, j in [(i,j) for i in range(len_percentage_ids) for j in range(len_alignment_lengths)]:\n filename = \"p1_%d-a1_%d_p2_%d-a2_%d\" % (percentage_ids[i],\n alignment_lengths[j], percentage_ids_other[i], alignment_lengths_other[j])\n results.append({ 'filename': filename, 'db_interest': 0, 'db_other': 0,\n 'perfect_interest': 0, 'equal': 0, 'summary': ['#SeqId\\tFirst\\t'\n 'Second'], 'db_seqs_counts': {'a': {}, 'b': {} } })\n\n for seq_name, values in best_hits.items():\n seq_name = seq_name.split(' ')[0].strip()\n for i, vals in enumerate(values):\n if not vals:\n continue\n\n # Validating duplicated results in the databases\n #@@@ Do this step in a different script early in the pipeline\n if vals['a']['subject_id'] not in results[i]['db_seqs_counts']['a']:\n results[i]['db_seqs_counts']['a'][vals['a']['subject_id']]=0\n if vals['a']['subject_id'] == results[i]['db_seqs_counts']['b']:\n raise Warning, \"%s is in both databases\" % vals['a']['subject_id']\n if vals['b']['subject_id'] not in results[i]['db_seqs_counts']['b']:\n results[i]['db_seqs_counts']['b'][vals['b']['subject_id']]=0\n if vals['b']['subject_id'] == results[i]['db_seqs_counts']['a']:\n raise Warning, \"%s is in both databases\" % vals['b']['subject_id']\n\n # Comparing bit_scores to create outputs\n if vals['a']['bit_score']==vals['b']['bit_score']:\n results[i]['equal'] += 1\n results[i]['summary'].append('%s\\t%s\\t%s' % (seq_name, vals['a']['subject_id'], vals['b']['subject_id']))\n results[i]['db_seqs_counts']['a'][vals['a']['subject_id']] += 1\n results[i]['db_seqs_counts']['b'][vals['b']['subject_id']] += 1\n elif vals['a']['bit_score']>vals['b']['bit_score']:\n if not vals['b']['subject_id']:\n results[i]['perfect_interest'] += 1 \n results[i]['summary'].append('%s\\t%s\\t' % (seq_name, vals['a']['subject_id']))\n results[i]['db_seqs_counts']['a'][vals['a']['subject_id']] += 1\n else:\n results[i]['db_other'] += 1\n results[i]['summary'].append('%s\\n\\t%s' % (seq_name, ''))\n results[i]['db_seqs_counts']['b'][vals['b']['subject_id']] += 1\n\n return results", "def muscle_alignment(seqrecs, preserve_order=True, **kwargs):\n\n def commandline(file_obj, **kwargs):\n cline = MuscleCommandline(input=file_obj.name, **kwargs)\n return _generic_aligner_commandline_stdout(cline)\n\n # Muscle reorders alignment by default, but don't overwrite 'group' if already set\n if \"group\" not in kwargs:\n kwargs[\"group\"] = not preserve_order\n return _generic_alignment(commandline, seqrecs, preserve_order=preserve_order, **kwargs)", "def generateAlignment(seqs):\n \"\"\"Create temporary file for MUSCLE\"\"\"\n inFn = tempfile.mktemp(prefix='tmp_align', suffix='.fasta', dir=None)\n outFn = tempfile.mktemp(prefix='tmp_align', suffix='.fasta', dir=None)\n \n \n \"\"\"Creates an align object or pd.Series() with indexing to preserve order but does not appyl padding\"\"\"\n align = padAlignment(seqs, applyPadding=False)\n \"\"\"Put alignments in the tempfiles\"\"\"\n align2fasta(seqs, inFn, applyPadding=False)\n\n muscleCommand = ['muscle', '-in', inFn, '-out', outFn]\n result = subprocess.call(muscleCommand)\n\n \"\"\"If MUSCLE was successful\"\"\"\n if not result:\n outAlign = fasta2align(outFn)\n else:\n print(\"Error in MUSCLE!\")\n raise Exception(\"MUSCLEError\")\n \"\"\"Remove the temporary files\"\"\"\n os.remove(inFn)\n os.remove(outFn)\n \n \"\"\"MUSCLE seqs need to be reorderd using the original index\"\"\"\n outAlign = outAlign.loc[[str(i) for i in align.index]]\n \"\"\"Index was str() through FASTA files so reset index with original index\"\"\"\n outAlign.index = align.index\n \n \"\"\"Check that all seqs are being returned in the correct order\"\"\"\n badSeqs = 0\n if not len(seqs) == len(outAlign):\n print('Different number of output seqs!')\n badSeqs+=1\n\n for i, s1, s2 in zip(np.arange(len(seqs)), seqs, outAlign):\n if not s1.replace('-', '') == s2.replace('-', ''):\n print('%d: %s != %s' % (i, s1, s2))\n badSeqs+=1\n if badSeqs>0:\n raise Exception('Output seqs are different than input seqs! (%d)' % badSeqs)\n\n return outAlign", "def BLAST_alignment(species, index_query, index_alignment, index_identity, prot):\n alignments = {}\n seq_id = []\n boo = True\n with open(blastpPath + '/BLAST_%s_mouse' % species) as f:\n for line in f:\n if boo:\n if line[0] != '#':\n query = re.split(\"\\||\\t\", line)[index_query]\n iden = float(re.split(\"\\||\\t\", line)[index_identity])\n if query in prot:\n seq_id.append(iden)\n boo = False\n if line[0] == '#':\n boo = True\n\n return np.array(seq_id)", "def get_read_alignments(sam_f):\n sparser = samparser.SamParser(sam_f=sam_f, aligned_only=True, mapq=20, mismatches=1)\n \n # parse all the hits into this to make sure multi mapping hits map to the same contig\n hit_dict = {}\n ambig_reads = 0\n processed_reads = 0\n for hit in sparser.parse_sam_file():\n processed_reads += 1\n if hit_dict.get(hit['qname'], 0):\n if hit_dict[hit['qname']] != hit['rname']:\n print(\"Warning read: {} aligns to two different contigs\".format(hit['qname']), file=sys.stderr)\n ambig_reads += 1\n else:\n continue\n else:\n hit_dict[hit['qname']] = hit['rname']\n\n print(\"{} of {} processed reads were ambiguous.\".format(ambig_reads, processed_reads))\n\n # condense the hit dict into a contig dict\n contig_dict = {}\n for read, contig in hit_dict.items():\n if contig_dict.get(contig, 0):\n contig_dict[contig].append(read)\n else:\n contig_dict[contig] = [read]\n\n return contig_dict", "def main(filein, min_reads=150, n_group=2000):\n from random import sample\n assert os.path.exists(filein)\n hq = filter_reads(filein)\n logging.info('remove matching reads')\n no_pol = remove_matching_reads(hq, cont_file)\n # no_pol = 'clean_reads.fasta'\n no_pol_reads = list(SeqIO.parse(no_pol, 'fasta'))\n no_pol_reads = sample(no_pol_reads, k=len(no_pol_reads))\n covering_reads = set([])\n logging.info('blast reads in batches until enough are found')\n total_blasted = 0\n for i, group in enumerate(grouper(n_group, no_pol_reads)):\n if i > 2 and len(covering_reads) < 20:\n sys.exit('not enough reads covering V3 were found')\n logging.info('blast call %d', i + 1)\n _ = blast_reads(group)\n covering_reads.update(_)\n total_blasted += n_group\n logging.info('this blast: %d covering out of %d total - %3.2f %%', len(_), n_group,\n 100 * float(len(_)) / n_group)\n logging.info('cumulative: %d covering out of %d total - %3.2f %%', len(covering_reads), total_blasted,\n 100 * float(len(covering_reads)) / total_blasted)\n if len(covering_reads) >= min_reads:\n break\n\n logging.info('covering_reads used in MSA: %d out of %d blasted (%3.2f %%)', len(covering_reads), total_blasted,\n 100 * float(len(covering_reads)) / total_blasted)\n cov_reads, n_fwd, n_rev = extract_reads(covering_reads, no_pol)\n\n SeqIO.write(cov_reads, 'v3reads.fasta', 'fasta')\n logging.info('%d covering reads in forward orientation', n_fwd)\n logging.info('%d covering reads in reverse orientation', n_rev)\n if n_fwd + n_rev < min_reads:\n logging.error('Not enough reads: %d', n_fwd + n_rev)\n sys.exit('Not enough reads: %d' % (n_fwd + n_rev))\n\n no_singleton_reads = [s for s in SeqIO.parse('v3reads.fasta', 'fasta') if int(s.id.split('_')[-1]) > 1]\n SeqIO.write(no_singleton_reads, 'v3reads_no_singleton.fasta', 'fasta')\n\n cml = shlex.split('muscle -in v3reads_no_singleton.fasta -out msa.fasta -quiet')\n subprocess.call(cml)\n\n df, haplotypes, support = msa_2_df('msa.fasta')\n logging.info('Haplotypes supported by %d reads out of %d: %3.1f%%',\n support, n_fwd + n_rev, 100.0 * support / (n_fwd + n_rev))\n cons_seq = df_2_ambiguous_sequence(df)\n SeqIO.write([SeqRecord(Seq(cons_seq), id='v3_consensus', description='')], 'v3cons.fasta', 'fasta')\n\n haps = []\n hi = 1 # counter for haplotypes, used in fasta file\n accounted_f = 0.0 # keep track of the cumulative accounted frequency\n tot_reads = sum(haplotypes.values())\n for h, support in haplotypes.most_common():\n f = round(float(support) / tot_reads, 2)\n accounted_f += f\n sr = SeqRecord(Seq(h), id='v3_haplotype_%d-support_%3.2f' % (hi, f), description='')\n haps.append(sr)\n hi += 1\n\n SeqIO.write(haps, 'v3haplotypes.fasta', 'fasta')\n for f in ['high_quality.fastq', 'clean_reads.fasta']:\n os.remove(f)\n logging.info('Haplotypes written to haplotypes.fasta')", "def question2():\n \n # load sequences and scoring matrix\n score_matrix = read_scoring_matrix(PAM50_URL)\n human_seq = \"HSGVNQLGGVFVNGRPLPDSTRQKIVELAHSGARPCDISRILQVSNGCVSKILGRYYETGSIRPRAIGGSKPRVATPEVVSKIAQYKRECPSIFAWEIRDRLLSEGVCTNDNIPSVSSINRVLRNLASEKQQ\"\n frfly_seq = \"HSGVNQLGGVFVGGRPLPDSTRQKIVELAHSGARPCDISRILQVSNGCVSKILGRYYETGSIRPRAIGGSKPRVATAEVVSKISQYKRECPSIFAWEIRDRLLQENVCTNDNIPSVSSINRVLRNLAAQKEQQ\"\n consensus_pax = read_protein(CONSENSUS_PAX_URL)\n \n # compute human and fruitfly global alignment matrix with consensus pax\n human_align_matrix = student.compute_alignment_matrix(human_seq, consensus_pax, score_matrix, True)\n frfly_align_matrix = student.compute_alignment_matrix(frfly_seq, consensus_pax, score_matrix, True)\n \n # compute human and fruitfly global alignment sequences\n score_human, human_align, consensus_align = student.compute_global_alignment(human_seq, consensus_pax, \n score_matrix, human_align_matrix)\n score_fly, frfly_align, consensus_align_2 = student.compute_global_alignment(frfly_seq, consensus_pax,\n score_matrix, frfly_align_matrix)\n \n # compute percentages match for human and fruitfly\n human_count = 0.0\n for index in range(len(human_align)):\n if human_align[index] == consensus_align[index]:\n human_count += 1\n \n frfly_count = 0.0\n for index in range(len(frfly_align)):\n if frfly_align[index] == consensus_align_2[index]:\n frfly_count += 1\n \n print \"% Human: \" + str(human_count / len(human_align) * 100)\n print \"Hmn: \" + human_align\n print \"PAX: \" + consensus_align\n \n print \"\"\n \n print \"% FrFly: \" + str(frfly_count / len(frfly_align) * 100)\n print \"Fly: \" + frfly_align\n print \"PAX: \" + consensus_align_2", "def alignScore():\n matrix = mapMatrix(\"BLOSUM62\")\n \n path = \"./data/\"\n for file in os.listdir(path):\n if file.endswith(\".fa\") or file.endswith(\".fasta\"):\n sequences = []\n input_sequences = SeqIO.parse(path + file, \"fasta\", \\\n IUPAC.protein)\n\n for record in input_sequences:\n seq = str(record.seq)\n sequences.append(seq) \n \n SumOfPairs = 0\n for pair in combinations(sequences, 2): \n SumOfPairs += pairwiseScore(pair[0], pair[1], matrix)\n \n print SumOfPairs", "def test_overlapping_alignments_2():\n generate_bam_file(gqd.sam_content, gqd.sam_bam_prefix)\n gqd.gene_wise_quantification._min_overlap = 5\n sam = pysam.Samfile(gqd.sam_bam_prefix + \".bam\")\n # 1 overlapping base in the 5' end of the reads => not enough\n assert mapping_ids(gqd.gene_wise_quantification._overlapping_alignments(\n sam, Gff3EntryMoc(\"chrom\", 1, 10))) == []\n # 4 overlapping base in the 5' end of the reads => not enough\n assert mapping_ids(gqd.gene_wise_quantification._overlapping_alignments(\n sam, Gff3EntryMoc(\"chrom\", 1, 13))) == []\n # 5 overlapping base in the 5' end of the reads => okay\n assert mapping_ids(gqd.gene_wise_quantification._overlapping_alignments(\n sam, Gff3EntryMoc(\"chrom\", 1, 14))) == [\n \"myread:01\", \"myread:02\", \"myread:03\", \"myread:04\", \"myread:05\"]\n # 1 overlapping base in the 3' end of the reads => not enough\n assert mapping_ids(gqd.gene_wise_quantification._overlapping_alignments(\n sam, Gff3EntryMoc(\"chrom\", 19, 23))) == []\n # 4 overlapping base in the 3' end of the reads => not enough\n assert mapping_ids(gqd.gene_wise_quantification._overlapping_alignments(\n sam, Gff3EntryMoc(\"chrom\", 16, 23))) == []\n # 5 overlapping base in the 3' end of the reads => not enough\n assert mapping_ids(gqd.gene_wise_quantification._overlapping_alignments(\n sam, Gff3EntryMoc(\"chrom\", 15, 23))) == [\n \"myread:01\", \"myread:02\", \"myread:03\", \"myread:04\", \"myread:05\"]", "def align(args) :\n from aligner import align_reads\n align_reads(args)", "def do_aligns(in_fastq, references, errors, outdir, picard, scriptdir):\n for ref in references:\n out_sam = run_bowtie(in_fastq, ref[\"file\"], os.path.basename(ref[\"file\"]),\n errors, out_dir=outdir)\n cur_bams = []\n for strand in [True, False]:\n out_bam = to_strand_bam(out_sam, strand)\n sort_bam = run.picard_sort(picard, out_bam)\n cur_bams.append(sort_bam)\n bam_to_wig(sort_bam)\n if ref.get(\"chr_dist\", False):\n plot_chr_dist(cur_bams, ref, picard, scriptdir)\n if ref.get(\"feature_prep\", False):\n genbank_to_gff(\"%s.gb\" % ref[\"file\"])", "def align(aligner, reads):\n counter = 0\n for read in SeqIO.parse(reads, \"fasta\"): \n try:\n alignInfo = next(aligner.map(str(read.seq)))\n print(alignInfo) \n except StopIteration:\n print(read.format(\"fasta\"), end='')", "def align(self):\n\n # load the alignment parameters into the align_params object\n self.align_params.load_params_from_file(self.input_file)\n\n # populate the score matrices based on the input parameters\n self.populate_score_matrices()\n\n # perform a traceback and write the output to an output file\n\n ### FILL IN ###", "def perform_alignment(transcript: str, google_output: object, verbosity: int,\n alignment_parameters: Dict[str, Any]) -> Dict[str, Any]:\n alignments = Bio.pairwise2.align.localms(\n google_output,\n transcript,\n alignment_parameters[\"algorithm\"][\"match_reward\"],\n alignment_parameters[\"algorithm\"][\"mismatch_penalty\"],\n alignment_parameters[\"algorithm\"][\"gap_penalty\"],\n alignment_parameters[\"algorithm\"][\"gap_penalty\"],\n penalize_end_gaps=(True, True),\n one_alignment_only=True,\n )\n\n alignment = alignments[0]\n\n return {\n \"google\": alignment[0],\n \"transcript\": alignment[1],\n \"score\": alignment[2]\n }", "def find_matching_seqs_from_alignment(sequences, ref_sequence):\n\n # if the first sequence (gaps removed) in MSA matches with reference,\n # return this sequence.\n first_seq_in_alignment = sequences[0] \n #first_seq_in_alignment_gaps_removed = first_seq_in_alignment.replace('-','')\n first_seq_in_alignment_gaps_removed = find_and_replace(first_seq_in_alignment, '-','')\n if first_seq_in_alignment_gaps_removed == ref_sequence:\n print('\\n\\tFirst sequence in alignment (gaps removed) matches reference,'\n '\\n\\tSkipping regorous search for matching sequence'\n )\n first_seq = list()\n first_seq.append(first_seq_in_alignment)\n return first_seq\n pairwise_scores = []\n for seq_indx, seq in enumerate(sequences):\n #seq_gaps_removed = seq.replace('-','')\n seq_gaps_removed = find_and_replace(seq, '-', '')\n print(seqs_gaps_removed)\n\n score = align_pairs_local(\n ref_sequence,\n seq_gaps_removed,\n score_only = True,\n )\n score_at_indx = (seq_indx, score)\n pairwise_scores.append(score_at_indx)\n\n seq_indx, max_score = max(pairwise_scores, key=lambda x: x[1])\n matching_seqs_indx = [\n indx for indx, score in pairwise_scores if score == max_score\n ]\n\n best_matching_seqs = [\n sequences[indx] for indx in matching_seqs_indx\n ]\n num_matching_seqs = len(best_matching_seqs)\n if num_matching_seqs > 1 :\n print('\\n\\tFound %d sequences in MSA that match the reference'\n '\\n\\tThe first sequence is taken as matching'% num_matching_seqs\n )\n return best_matching_seqs", "def process_align_output(suffix='_t'):\n for ii in range(1, 4+1):\n s = starset.StarSet('align_{0}{1}'.format(ii, suffix))\n print 'Loaded starset ', ii\n\n n_users = 3\n n_frames = 3\n n_stars = len(s.stars)\n\n x = np.zeros((n_users, n_frames, n_stars), dtype=float)\n y = np.zeros((n_users, n_frames, n_stars), dtype=float)\n m = np.zeros((n_users, n_frames, n_stars), dtype=float)\n\n for uu in range(n_users):\n for ff in range(n_frames):\n icol = ff*n_frames + uu\n\n x[uu, ff, :] = s.getArrayFromEpoch(icol, 'xpix')\n y[uu, ff, :] = s.getArrayFromEpoch(icol, 'ypix')\n m[uu, ff, :] = s.getArrayFromEpoch(icol, 'mag')\n\n cat = np.array((x, y, m))\n pyfits.writeto('catalog_quad_{0}{1}.fits'.format(ii, suffix), cat,\n output_verify='silentfix', clobber=True)", "def _align_single_end_reads(self):\n read_aligner = ReadAligner(self._args.segemehl_bin, self._args.progress)\n if self._file_needs_to_be_created(self._pathcreator.index_path):\n read_aligner.build_index(\n self._pathcreator.ref_seq_path_list,\n self._pathcreator.index_path,\n )\n for read_path, output_path, nomatch_path in zip(\n self._pathcreator.processed_read_paths,\n self._pathcreator.read_alignment_bam_paths,\n self._pathcreator.unaligned_reads_paths,\n ):\n if not self._file_needs_to_be_created(output_path):\n continue\n\n read_aligner.run_alignment(\n read_path,\n self._pathcreator.index_path,\n self._pathcreator.ref_seq_path_list,\n output_path,\n nomatch_path,\n int(self._args.processes),\n int(self._args.segemehl_accuracy),\n float(self._args.segemehl_evalue),\n self._args.split,\n paired_end=False,\n )", "def build_mm_df(sralist):\n\n def convert_to_codon(nts_array):\n \"\"\"\n pysam output is in nucleotides resolution, but scikit_curated_df uses codon resolution.\n This function converts nucleotide arrays to codon length (nts to codon resolution):\n \"\"\"\n \n nts_array = np.array(nts_array)\n codon_array = np.sum( np.reshape(A, (int(np.floor(nts_array[1]/3)),3) ), 1)/3.\n\n return codon_array\n\n\n def compute_mm(mmdata):\n \"\"\"\n get per gene average multi-mapping score\n \"\"\"\n\n mm_df = pd.DataFrame(columns=['ORF', 'MM'])\n counter = 0\n\n for gene in mmdata.keys():\n current_matrix = mmdata[gene]\n current_avrg = np.mean( np.sum(current_matrix, 1) / current_matrix.shape[1] )\n mm_df.loc[counter] = [gene, current_avrg]\n counter += 1\n\n return mm_df\n\n\n mm_mat = {}\n mm_pct = {}\n\n N = len(sralist)\n\n for ix, dataset in enumerate(sralist):\n samfile = pysam.AlignmentFile(TMP_DIR+'/ambiguous_reads/'+dataset+'_STAR_transcriptome_multi_mapped_sorted.bam', 'rb')\n genes_list = list(samfile.references)\n print(ix, dataset)\n\n for geneID in genes_list:\n # count the coverage of genomic positions by reads in region.\n # Returns: four array.arrays of the same length in order A C G T\n # The coverage is computed per-base [ACGT]\n cov = samfile.count_coverage(geneID, read_callback='nofilter')\n # Summ all 4 arrays\n cov_sum = np.sum(cov, axis=0)\n #print(geneID, cov_sum)\n codon_cov = convert_to_codon(cov_sum)\n codon_bool = np.asarray([1 if i > 0 else 0 for i in codon_cov])\n \n M = len(codon_bool)\n\n if ix == 0:\n \tmm_mat[geneID] = np.zeros((N,M)) * np.nan\n \n current_matrix = mm_mat[geneID]\n current_matrix[ix,:] = np.copy(codon_bool)\n mm_mat[geneID] = current_matrix\n\n\n mm_avrg = compute_mm(mm_mat)\n #mm_avrg.to_json('yeast_mm.json')\n #mm_avrg.to_csv('yeast_mm.txt', header=True, index=False, sep='\\t')\n\n \n mm_profile = {}\n theta_mm = 5\n for orf in mm_mat.keys():\n current_mat = mm_mat[orf]\n current_bool = np.sum(current_mat, 0) <= theta_mm\n mm_profile[orf] = current_bool\n\n with open('../data/processed/mm_consensus.pkl', 'wb') as f_mm:\n pickle.dump(mm_profile, f_mm)\n\n\n return mm_mat, mm_avrg, mm_profile", "def test_align():\n target = ('TAAATAAATATCTGGTGTTTGAGGCAAAAAGGCAGACTTAAATTCTAAATCACACCTGTGCTT'\n 'CCAGCACTACCTTCAAGCGCAGGTTCGAGCCAGTCAGGCAGGGTACATAAGAGTCCATTGTGC'\n 'CTGTATTATTTTGAGCAATGGCTAAAGTACCTTCACCCTTGCTCACTGCTCCCCCACTTCCTC'\n 'AAGTCTCATCGTGTTTTTTTTAGAGCTAGTTTCTTAGTCTCATTAGGCTTCAGTCACCAT')\n query = ('TCTGGTGTTTGAGGCAAAAAGGCAGACTTAAATTCTAAATCACACCTGTGCTTCCAGCACTACC'\n 'TTCAAGCGCAGGTTCGAGCCAGTCAGGACTGCTCCCCCACTTCCTCAAGTCTCATCGTGTTTTT'\n 'TTTAGAGCTAGTTTCTTAGTCTCATTAGGCTTCAGTCACCATCATTTCTTATAGGAATACCA')\n assert kevlar.align(target, query) == ('10D91M69D79M20I', 155)", "def mergeChainedAlignedSegments(chainedAlignedSegments, refSequence, readSequence):\n cAR = pysam.AlignedSegment()\n aR = chainedAlignedSegments[0]\n cAR.query_name = aR.query_name\n \n #Parameters we don't and therefore set properly\n #cAR.flag = aR.flag\n #cAR.mapq = aR.mapq\n #cAR.mrnm = 0\n #cAR.mpos=0\n #cAR.isize=0\n #cAR.qual = \"<\" * len(readSequence)\n #cAR.tags = aR.tags \n cAR.next_reference_id = -1\n cAR.reference_start = aR.reference_start #Reference start\n cAR.is_reverse = aR.is_reverse\n cAR.query_sequence = reverseComplement(readSequence) if cAR.is_reverse else readSequence\n cAR.reference_id = aR.reference_id\n cigarList = []\n pPos = aR.reference_start\n #Iterate from the other end of the sequence if reversed\n pQPos = -(len(readSequence)-1) if cAR.is_reverse else 0 \n \n for aR in chainedAlignedSegments:\n assert cAR.is_reverse == aR.is_reverse\n #Add a deletion representing the preceding unaligned reference positions\n assert aR.reference_start >= pPos\n if aR.reference_start > pPos:\n cigarList.append((2, aR.reference_start - pPos))\n pPos = aR.reference_start \n \n #Add an insertion representing the preceding unaligned read positions\n #make it a soft clip if it is the first chained alignment\n qPos = getFirstNonClippedPositionInRead(aR, readSequence)\n assert qPos >= pQPos\n if qPos > pQPos:\n cigarList.append((4 if aR == chainedAlignedSegments[0] else 1, qPos - pQPos)) \n pQPos = qPos\n \n #Add the operations of the cigar, filtering hard and soft clipping\n for op, length in aR.cigar:\n assert op in (0, 1, 2, 4, 5)\n if op in (0, 1, 2):\n cigarList.append((op, length))\n if op in (0, 2): #Is match or deletion\n pPos += length\n if op in (0, 1): #Is match or insertion\n pQPos += length\n \n assert pPos <= len(refSequence)\n \n #Set reference end coordinate (which is exclusive)\n #cAR.reference_end = pPos #We don't do this because it is set by cigar string\n \n #Now add any trailing, necessary soft clipping\n if cAR.is_reverse:\n assert pQPos <= 1\n if pQPos < 1:\n cigarList.append((4, -pQPos + 1))\n else:\n assert pQPos <= len(readSequence)\n if pQPos < len(readSequence):\n cigarList.append((4, len(readSequence) - pQPos))\n \n cAR.cigar = tuple(cigarList)\n \n #Check ops\n for op, length in cAR.cigar: #We should have no hard clipped ops\n assert op in (0, 1, 2, 4)\n \n #Reference sequence check coordinates\n assert sum([ length for op, length in cigarList if op in (0, 2)]) == cAR.reference_end - cAR.reference_start\n assert cAR.reference_start >= 0 and cAR.reference_start < len(refSequence)\n assert cAR.reference_end >= 0 and cAR.reference_end <= len(refSequence)\n \n #Read sequence check coordinates\n assert cAR.query_alignment_start >= 0 and cAR.query_alignment_start < len(readSequence)\n assert cAR.query_alignment_end >= 0 and cAR.query_alignment_end <= len(readSequence)\n assert cAR.query_alignment_start + sum([ length for op, length in cigarList if op in (0, 1)]) == cAR.query_alignment_end\n \n return cAR", "def bam_output(args):\n\n for strand in ['watson', 'crick']:\n merged_sam = os.path.join(args.output_dir, '%s_mergedAligned.out.sam' % strand)\n joined_sam = os.path.join(args.output_dir, '%s_joinedAligned.out.sam' % strand)\n out_sam = tempfile.NamedTemporaryFile(prefix=strand, suffix='.sam', dir=args.output_dir)\n #rewrite sam file merged and joined for watson and crick\n parse_sam(merged_sam, out_sam.name, 'merged', strand)\n #TODO: determine why joined reads have more soft-clips or single read matches\n parse_sam(joined_sam, out_sam.name, 'joined', strand)\n #convert to sorted and indexed bam\n cmd = 'cat %s %s |samtools view -@ 4 -Shb |sambamba sort -m 4GB --tmpdir %s -t %s -o %s /dev/stdin'%(args.header,\n out_sam.name,args.tmpdir, args.threads,\n os.path.join(args.output_dir,'%s.bam' % strand) )\n log = \"make sorted bam file\"\n run_subprocess([cmd], args, log)\n out_sam.close()\n return args", "def readalign(self, opt, fh):\n## print \"entering readalign:\", opt\n edgeInfo = {}\n for p in opt:\n (key, value) = p.split('=')\n edgeInfo[key] = value\n\n s = fh.readline().split()\n## print s;\n if(len(s) == 7 and s[0] == 's'):\n vseq = self._vseq(len(s[6]))\n self.mAlign += vseq\n while len(s) == 7 and s[0] == 's':\n # Add the sequence name to the dictionary,\n # then add a corresponding node to the mapping.\n if s[1] not in self.sequences:\n self.sequences[s[1]] = AnonSequence(int(s[5]), s[1])\n self.mAlign += self.sequences[s[1]]\n\n # PROCESS THE KNOWN INTERVALS\n if(s[4] == '-'):\n ns = self.sequences[s[1]][-int(s[2]):-int(s[2]) - int(s[3])]\n self.sequences[s[1]].seqsplice(reverse_complement(\n s[6].replace('-', '')), ns.start, ns.stop)\n else:\n ns = self.sequences[s[1]][int(s[2]):int(s[2]) + int(s[3])]\n self.sequences[s[1]].seqsplice(s[6].replace('-', ''),\n ns.start, ns.stop)\n\n for inter in refIntervals(s[6]):\n self.mAlign[vseq[inter[0]:inter[1]]][ns[inter[2]:inter[3]]] = \\\n (inter[4])\n self.mAlign[ns[inter[2]:inter[3]]][vseq[inter[0]:inter[1]]] = \\\n (inter[4])\n\n s = fh.readline().split()", "def align_reads(read_fp, # FASTQ file path\n db_fp, # Local path to DB\n temp_folder, # Folder for results\n query_gencode=11, # Genetic code\n threads=1, # Threads\n min_score=20, # Minimum alignment score\n blocks=4, # Memory block size\n top=10, # Report alignments >10% from max\n min_id=80, # Minimum alignment identity\n qcov=95): # Minimum query coverage\n\n align_fp = \"{}.aln\".format(read_fp)\n logging.info(\"Input reads: {}\".format(read_fp))\n logging.info(\"Reference database: {}\".format(db_fp))\n logging.info(\"Genetic code: {}\".format(query_gencode))\n logging.info(\"Threads: {}\".format(threads))\n logging.info(\"Output: {}\".format(align_fp))\n\n run_cmds([\n \"diamond\",\n \"blastx\",\n \"--query\", read_fp, # Input FASTQ\n \"--out\", align_fp, # Alignment file\n \"--threads\", str(threads), # Threads\n \"--db\", db_fp, # Reference database\n \"--outfmt\", \"6\", # Output format\n \"qseqid\", \"sseqid\",\n \"pident\", \"length\",\n \"mismatch\", \"gapopen\",\n \"qstart\", \"qend\",\n \"sstart\", \"send\",\n \"evalue\", \"bitscore\",\n \"qlen\", \"slen\",\n \"--min-score\", str(min_score), # Minimum alignment score\n \"--query-cover\", str(qcov), # Minimum query coverage\n \"--id\", str(min_id), # Minimum alignment identity\n \"--top\", str(top), # Report alignments >10% from max\n \"--block-size\", str(blocks), # Memory block size\n \"--query-gencode\", # Genetic code\n str(query_gencode),\n \"--unal\", \"0\", # Don't report unaligned reads\n ])\n\n return align_fp", "def gen_unaligned_bam(bam_filename, analysis_id, metadata, specimen_dict, work_dir, output_dir, num_processes=4, logger=default_logger ):\n\n read_group_sam = os.path.join(output_dir, 'rg_header.sam')\n\n #get the read groups from the original sample level BAM\n exit_code = os.system(\"samtools view -H %s | grep \\\"@RG\\\" > %s\" %(bam_filename, read_group_sam))\n if exit_code != 0:\n print \"Failure in bam splitting during read group extraction from %s\" % bam_filename\n return 1\n \n\n rg_file = open(read_group_sam, \"r\")\n\n #create the read group fastqs\n try:\n cmd = \"bamtofastq outputperreadgroup=1 gz=1 level=1 inputbuffersize=2097152000 tryoq=1 outputdir=%s T=`mktemp -p %s bamtofastq_XXXXXXXXX` < %s\" %(work_dir, work_dir, bam_filename)\n logger.info(\"Running %s\" % cmd)\n subprocess.check_call(cmd, shell=True)\n except:\n print \"Failure in bam splitting\"\n return 1\n \n\n if header_utils.is_valid_analysis(metadata) or FORCE_RUN:\n pool = multiprocessing.Pool(processes=num_processes)\n results = []\n for line in rg_file:\n rg_dict = header_utils.get_read_group_info(line)\n header = header_utils.create_header(output_dir, metadata, rg_dict, specimen_dict)\n r = pool.apply_async(process_rg, (analysis_id, rg_dict, header, work_dir, output_dir))\n results.append(r)\n\n rg_file.close()\n \n out = []\n for r in results:\n out.append(r.get())\n \n utils.clean_up_dir(output_dir)\n if not all( a[0] for a in out ):\n #one of the read group bamtofastq failed\n return 1\n with open(os.path.join(output_dir, \"results.list\"), \"w\") as out_handle:\n for ok, file_name in out:\n out_handle.write(\"%s\\n\" % (file_name))\n\n else:\n print \"Invalid header/metadata for BAM\" % bam_filename\n return 1\n return 0", "def _generic_alignment(cline, seqrecs, preserve_order=True, **kwargs):\n # convert iterator to list, so that we can extract keys and still run the alignment\n unaligned = list(seqrecs)\n # if alignment sequences from NCBI Blast, id will include spaces\n keys = [seqrec.id.split()[0] for seqrec in unaligned]\n # execute alignment\n aligned = _generic_aligner_commandline_file(cline, unaligned, **kwargs)\n if preserve_order:\n aligned = SeqIO.to_dict(aligned)\n aligned = MultipleSeqAlignment(aligned[key] for key in keys)\n # make all alignment uppercase\n return MultipleSeqAlignment([seqrec.upper() for seqrec in aligned])" ]
[ "0.6396494", "0.6360484", "0.61174154", "0.6071524", "0.6041381", "0.60301906", "0.6022921", "0.5965225", "0.5931068", "0.5860453", "0.580393", "0.57993835", "0.57979983", "0.5702784", "0.57027584", "0.5686417", "0.5671133", "0.5648115", "0.56212074", "0.55878824", "0.5586022", "0.55856866", "0.55759835", "0.55566126", "0.55564785", "0.5552464", "0.5514451", "0.55140185", "0.55052394", "0.5504079" ]
0.7334227
0
Determine which transcripts were not grouped and concatenate both contigs_collapsed.fasta and ungrouped contigs into new transcriptome
def isolate_ungrouped_contigs(fasta_dict, groups): with open(groups) as f: grouped_contigs = list(chain.from_iterable( [i.split() for i in f.readlines()] ) ) db_deflines = fasta_dict.keys() ungrouped_contigs = [i for i in db_deflines if i not in grouped_contigs] with open("ungrouped_contigs.fasta",'w') as output: for contig in ungrouped_contigs: output.write(">{0}\n{1}\n".format(contig, fasta_dict[contig])) sp.call(['cat','consensus_sequences.fasta','ungrouped_contigs.fasta'], stdout=open("full_assembly_after_consensus.fasta",'w'))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def isolate_consensus(groups, fasta):\n with open(groups,\"r\") as f:\n file = f.readlines()\n\n sequences_in_out = open(\"sequences_in_out.table\",\"w\")\n consensus_sequences = open(\"consensus_sequences.fasta\",\"w\")\n\n for consensus_num, line in enumerate(file):\n group_sequences = line.rsplit()\n open(\"temp_sequences\",\"w\").close()\n # Write group of sequences to temporary file\n for group in group_sequences:\n sequences_in_out.write(\"{0}\\t\".format(group))\n with open(\"temp_sequences\",'a') as output:\n output.write(\">{0}\\n{1}\\n\".format(group, fasta[group]))\n # Run MAFFT alignment\n sp.call(['/apps/mafft/7.127/bin/mafft', '--adjustdirection', \n '--clustalout', '--preservecase', 'temp_sequences'],\n stdout=open('temp_alignment.txt','w'), \n stderr=open('temp_mafft_log','w') )\n # cat MAFFT log files\n sp.call(['cat','temp_mafft_log'], \n stdout = open(\"all_alignments.log\",'a+'))\n # cat MAFFT alignment files\n sp.call(['cat','temp_alignment.txt'], \n stdout = open(\"all_alignments.faa\",'a+'))\n\n #output group_sequences and the consensus generated in a file\n sequences_in_out.write(\"||\\t>Consensus_{0}\\n\".format(consensus_num + 1))\n align=Bio.AlignIO.read(\"temp_alignment.txt\",\"clustal\")\n summary_align = AlignInfo.SummaryInfo(align)\n consensus = summary_align.dumb_consensus(threshold=0.51, ambiguous='N')\n consensus_sequences.write(\">Consensus_{0}\\n{1}\\n\".format(\n consensus_num + 1,consensus))\n sequences_in_out.close()\n temp_files = glob(\"*temp*\")\n sp.call(['rm'] + temp_files)", "def safe_collapse(self, inputs, fasta_filepath=None):\n\n seqdict = {}\n superseqs, incomplete = self.load(inputs, parseheaders=False)\n for genome in superseqs:\n seqlist = [ a.seq() for a in superseqs[genome].iteralleles() ]\n if len(seqlist) > 1:\n raise Exception(\"Multiple alleles for genome {}\".format(genome))\n seqdict[genome] = seqlist[0]\n \n if fasta_filepath:\n with open(fasta_filepath, 'w') as f:\n for name,seq in seqdict.iteritems():\n f.write(\">{}\\n{}\\n\".format(name, seq))\n \n return seqdict", "def concatenate_detected_verified(fasta_name, PATH_FASTA_DETECTED, PATH_FASTA_VERIFIED, INFO_folder, PATH_FASTA_CONCATENATED):\n\n\tprint \"\\n#################\"\n\tprint \"# Concatetaned file\"\n\tprint \"#################\\n\"\n\n\t# NOTE Dictionaire avec en clef l'id espèce/système et en value une liste\n\t# NOTE [\"l'id espèce/système du verifié qui correspond\", [liste des sequences ATPase, IM ...]]\n\tdict_remove = {}\n\n\tprint \"\\n------------------------------------------\"\n\tprint \"| First read : Creation of the dictionnary\"\n\tprint \"------------------------------------------\\n\"\n\n\tfor fasta_file in fasta_name :\n\t\tverified_fasta=os.path.join(PATH_FASTA_VERIFIED, fasta_file)\n\t\tdetected_fasta=os.path.join(PATH_FASTA_DETECTED, fasta_file)\n\t\tconcatenated_fasta=os.path.join(PATH_FASTA_CONCATENATED, fasta_file)\n\n\t\tlist_seq_verified = list(SeqIO.parse(verified_fasta, \"fasta\"))\n\t\tlist_id_verified = [seq.id for seq in list_seq_verified]\n\t\tlist_seq_verified = [seq.seq for seq in list_seq_verified]\n\n\t\tseq_parser = SeqIO.parse(detected_fasta, \"fasta\")\n\t\tnumber_seq = len(list(seq_parser))\n\t\tprogression = 1\n\n\t\tseq_parser = SeqIO.parse(detected_fasta, \"fasta\")\n\n\t\t# IDEA Il faut tester au moins une fois pour voir si lors de la concatenation, je ne me retrouve pas avec des systems ou je n'ai pas tous enlevé. Exemple l'ATPase de X n'est pas la même que celle de Y mais l'IMplatform l'ai si c'est le cas X est a enlevé aussi pour son ATPase\n\t\t# IDEA Si idea précédente vrai alors il faut faire des fichiers temporaires des sequences que l'on garde et concatener par \"cat\" à la fin le fichier temporaire et son homonyme en verifié.\n\n\t\t# NOTE Il y avait un problème : le nom/id de l'epèce + système ne doit pas contenir le _NumX_ car ce Num fait référence au nombre de duplicat de la protéine (exemple deux ATPase gspE)\n\t\t# NOTE Quelques systèmes on des sequences qui sont similaire pour toutes les protéines sauf une exemple ESCO3 et NC_011993 qui sont identique pour tous sauf ATPase (98% seulement)\n\n\t\tfor seq in seq_parser :\n\n\t\t\tsys.stdout.write(\"File : {} -> {:.2f}% : {}/{} sequences detected read\\r\".format(fasta_file, progression/float(number_seq)*100, progression,number_seq))\n\t\t\tsys.stdout.flush()\n\t\t\tprogression += 1\n\n\t\t\tid_seq=seq.id.split(\"_\")\n\t\t\tid_seq=re.sub(\"Num[0-9]_\", \"\", \"_\".join(id_seq[:id_seq.index(\"D\")]))\n\n\t\t\tif id_seq in dict_remove :\n\t\t\t\tcontinue\n\n\t\t\telif seq.seq in list_seq_verified :\n\t\t\t\tindex=list_seq_verified.index(seq.seq)\n\n\t\t\t\tid_seq_verif = list_id_verified[index].split(\"_\")\n\t\t\t\tid_seq_verif = re.sub(\"Num[0-9]_\", \"\", \"_\".join(id_seq_verif[:id_seq_verif.index(\"V\")]))\n\n\t\t\t\t# NOTE dans le dictionnaire je met le système vérifié en premier, toutes les séquences du système identitique en deuxième et la séquence qui en est la cause en troisème\n\t\t\t\tdict_remove[id_seq]=[id_seq_verif,[], seq.id]\n\n\t\tprint\n\t\tprint(\"File : {} -> Done!\".format(fasta_file))\n\n\tprint \"\\n-----------------------------\"\n\tprint \"| Second read : Writing files\"\n\tprint \"-----------------------------\\n\"\n\n\tfor fasta_file in fasta_name :\n\t\tverified_fasta=os.path.join(PATH_FASTA_VERIFIED, fasta_file)\n\t\tdetected_fasta=os.path.join(PATH_FASTA_DETECTED, fasta_file)\n\t\tconcatenated_fasta=os.path.join(PATH_FASTA_CONCATENATED, fasta_file)\n\n\t\tos.system('cat \"{}\" > \"{}\"'.format(verified_fasta, concatenated_fasta))\n\n\t\tseq_parser = SeqIO.parse(detected_fasta, \"fasta\")\n\t\tnumber_seq = len(list(seq_parser))\n\t\tprogression = 1\n\n\t\tseq_parser = SeqIO.parse(detected_fasta, \"fasta\")\n\n\t\twith open(concatenated_fasta, \"a\") as w_file :\n\t\t\tfor seq in seq_parser :\n\n\t\t\t\tsys.stdout.write(\"File : {} -> {:.2f}% : {}/{} sequences detected read\\r\".format(fasta_file, progression/float(number_seq)*100, progression,number_seq))\n\t\t\t\tsys.stdout.flush()\n\t\t\t\tprogression += 1\n\n\t\t\t\tid_seq=seq.id.split(\"_\")\n\t\t\t\tid_seq=re.sub(\"Num[0-9]_\", \"\", \"_\".join(id_seq[:id_seq.index(\"D\")]))\n\n\t\t\t\tif id_seq in dict_remove :\n\t\t\t\t\tdict_remove[id_seq][1].append(seq)\n\n\t\t\t\telse :\n\t\t\t\t\tSeqIO.write(seq, w_file, \"fasta\")\n\t\tprint\n\t\tprint(\"File : {} -> Done!\".format(fasta_file))\n\n\t# NOTE Dict remove complete and all concatenate write\n\twrite_remove_concatenate(dict_remove, INFO_folder)\n\n\treturn", "def isoformAln(aln, o):\n\n logger = logging.getLogger(\"main.alignment\")\n logger.info(\"Clustering isoforms.\")\n\n dRem={} #for remaining sequences\n dId2Seq={} #for remaining sequences\n laln=0 #alignement length\n for fasta in SeqIO.parse(open(aln),'fasta'):\n post=fasta.id.find(\"_\")\n if post!=-1: #regular format\n sp=fasta.id[:post]\n tag=fasta.id[post+1:]\n if not sp in dId2Seq:\n dId2Seq[sp]={}\n dId2Seq[sp][tag]=str(fasta.seq)\n if laln==0:\n laln=len(fasta.seq)\n else:\n dRem[fasta.id]=str(fasta.seq)\n\n \n outCov = o+aln.split(\"/\")[-1].split(\".\")[0]+\"_clustiso.fasta\"\n clustok=False #flag to check if a cluster has occured\n for sp,dtagseq in dId2Seq.items():\n lclust=[list(dtagseq)] #list of clusters of tags to be split\n for pos in range(laln):\n lclust2=[]\n for clust in lclust:\n dlet={tag:dtagseq[tag][pos] for tag in clust}\n llet=set([x for x in dlet.values() if x!=\"-\"])\n if len(llet)<=1: #one letter at most, keep all\n lclust2.append(clust)\n continue\n else:\n for x in llet:\n lclust2.append([tag for tag in clust if dlet[tag]==x])\n lind=[tag for tag in clust if dlet[tag]==\"-\"] #conservative, do not know wether to merge, may be improved\n if len(lind)!=0:\n lclust2.append(lind)\n lclust=lclust2\n \n #now merge sequences in each cluster\n for clust in lclust:\n if len(clust)==1:\n dRem[sp+\"_\"+clust[0]]=dtagseq[clust[0]]\n else:\n clustok=True\n ntag=clust[-1]+\"_clust\"\n logger.info(\"Clustered sequences \" + sp+\"_\" + (\", %s_\"%(sp)).join(clust) + \" into %s_\"%(sp)+ntag)\n nseq=\"\".join([max([dtagseq[tag][pos] for tag in clust]) for pos in range(laln)])\n dRem[sp+\"_\"+ntag]=nseq\n\n if clustok:\n with open(outCov, \"w\") as outC:\n \t outC.write(FastaResFunc.dict2fasta(dRem))\n \t outC.close()\n\t\n return(outCov)\n else:\n return(aln)", "def _collapse(in_file):\n out_file = append_stem(in_file, \".trimming\").replace(\".gz\", \"\")\n if file_exists(out_file):\n return out_file\n seqs = collapse(in_file)\n write_output(out_file, seqs, minimum=1, size=16)\n return out_file", "def split_by_contigs(self, output_dir: Path = None) -> None:\n if output_dir is None:\n output_dir = (\n Path(self._input_file.parent) / \"split_\" + self._input_file.name\n )\n else:\n output_dir = Path(output_dir)\n output_dir.mkdir(parents=True, exist_ok=True)\n contigs = pyfastx.Fasta(\n self.file_path.as_posix(), build_index=False, full_name=True\n )\n for contig_name, seq in contigs:\n output_file = (\n output_dir / f\"{contig_name.split(' ')[0]}{self._input_file.suffix}\"\n )\n with open(output_file, \"w+\", encoding=\"UTF-8\") as outfile:\n outfile.write(f\">{contig_name}\\n\")\n outfile.write(seq + \"\\n\")", "def catAlignments(alignA, alignB):\n\n \"\"\"Create temporary files for MUSCLE to work on the two alignments\"\"\"\n aFn = tempfile.mktemp(prefix='tmp_align', suffix='.fasta', dir=None)\n bFn = tempfile.mktemp(prefix='tmp_align', suffix='.fasta', dir=None)\n outFn = tempfile.mktemp(prefix='tmp_align', suffix='.fasta', dir=None)\n\n \n \"\"\"Make sure alignments have the same length and are Series objects\"\"\"\n alignA = padAlignment(alignA)\n alignB = padAlignment(alignB)\n\n \"\"\"Put alignments in the tempfiles\"\"\"\n align2fasta(alignA, aFn)\n align2fasta(alignB, bFn)\n\n muscleCommand = ['muscle', '-profile', '-in1', aFn, '-in2', bFn, '-out', outFn]\n result = subprocess.call(muscleCommand)\n\n \"\"\"If MUSCLE was successful\"\"\"\n if not result:\n outAlign = fasta2align(outFn)\n else:\n print(\"Error in MUSCLE!\")\n raise Exception(\"MUSCLEError\")\n \n \"\"\"\n except:\n pass\n os.remove(aFn)\n os.remove(bFn)\n os.remove(outFn)\n raise\n \"\"\"\n \"\"\"Remove the temporary files\"\"\"\n os.remove(aFn)\n os.remove(bFn)\n os.remove(outFn)\n\n return outAlign", "def MotcorCatenate(self, info, base, anat_tgt):\n# First compute the transformation matrices due to epi-to-epi motion.\n fmt = '3dvolreg -prefix NULL -1Dmatrix_save %s -twopass ' + \\\n '-verbose -base %s+orig[%s] -dfile %s %s+orig'\n cmd = fmt % (info['matfile_m'], info['basefile'], base, \\\n info['mot_file'], info['imgfile_t'])\n self.CheckExec(cmd, [info['matfile_m']])\n\n# Catenate with transformation from epi base image to the anatomical.\n cmd = 'cat_matvec -ONELINE %s -P %s -P > %s' % \\\n (self.info[anat_tgt]['matfile'], info['matfile_m'], \\\n info['matfile_mcat'])\n self.CheckExec(cmd, [info['matfile_mcat']])\n\n# Interpolate the data to the new grid.\n fmt = '3dAllineate -prefix %s -interp cubic -1Dmatrix_apply %s ' + \\\n '-warp shift_rotate -base %s+orig[%s] %s+orig'\n cmd = fmt % (info['imgfile_m'], info['matfile_mcat'], info['basefile'], \\\n base, info['imgfile_t'])\n self.CheckExec(cmd, ['%s+orig.BRIK'%info['imgfile_m'], \\\n '%s+orig.HEAD'%info['imgfile_m']])", "def new_resolve_unique_contigs(scaffold_list, unique_contigs_list):\n \n contig_location = {}\n s_l = copy.deepcopy(scaffold_list)\n \n #first deal with any scaffolds that have more than one copy of a unique contig\n to_remove = []\n for scaf in s_l: \n for contig in unique_contigs_list:\n if scaf.count(contig) > 1:\n scaffold_parts = split_siamese(contig, scaf)\n to_remove.append(scaf)\n s_l.extend(scaffold_parts)\n break \n for scaf in to_remove:\n s_l.remove(scaf) \n\n\n for contig in unique_contigs_list:\n #if contig[:4] == \"five\": \n finds = find_unique_contig(contig, s_l)\n\n if len(finds) > 1:\n contig_location[contig] = finds\n\n sc_ov = {}\n sc_ov = make_scaff_overlap_dict(contig_location)\n\n #This is the new bit that takes just the first conflicted contig \n first_k = list(sc_ov.items())[0:1]\n first_sc_ov = dict(first_k)\n new_scaffold_list = combine_overlapping_contigs(first_sc_ov, s_l)\n\n #Split off unique scaffolds attached by their 3' ends to multiple scaffolds\n \n for contig in contig_location:\n if contig[:5] == \"three\":\n for scaf in contig_location[contig]:\n conflict = False\n if scaf.index(contig) == 1:\n conflict = True\n new_left_scaf = scaf[:3]\n new_right_scaf = scaf[3:]\n if scaf.index(contig) == len(scaf) - 2:\n conflict = True\n new_left_scaf = scaf[:-3]\n new_right_scaf = scaf[-3:]\n if conflict:\n new_left_scaf.append(\"link_conflict6\")\n new_right_scaf.insert(0,\"link_conflict6\")\n if len(new_left_scaf) >= 4: \n new_scaffold_list.append(new_left_scaf)\n if len(new_right_scaf) >= 4:\n new_scaffold_list.append(new_right_scaf)\n if scaf in new_scaffold_list:\n new_scaffold_list.remove(scaf)\n\n return new_scaffold_list", "def reduce_and_save():\n ### Get the signature information\n sig_info = pd.read_csv(join(FILE_PATH, \"GSE92742_Broad_LINCS_sig_info.txt\"), sep=\"\\t\")\n ### Columns are:\n ### Index([u'sig_id', u'pert_id', u'pert_iname', u'pert_type', u'cell_id',\n ### u'pert_dose', u'pert_dose_unit', u'pert_idose', u'pert_time',\n ### u'pert_time_unit', u'pert_itime', u'distil_id'],\n ### dtype='object')\n\n ### Filter for signature ids for small molecule pertubagens\n small_mol_sigs = sig_info['sig_id'][sig_info['pert_type'] == \"trt_cp\"]\n ### Results in 205034 signatures\n\n ### Read in the gene info\n gene_info = pd.read_csv(join(FILE_PATH, \"GSE92742_Broad_LINCS_gene_info.txt\"), sep='\\t')\n ### Index([u'pr_gene_id', u'pr_gene_symbol', u'pr_gene_title', u'pr_is_lm',\n ### u'pr_is_bing'],\n ### dtype='object')\n\n landmark_gene_ids = gene_info['pr_gene_id'][gene_info['pr_is_lm'] == 1] #Filters for directly measured transcripts\n ### Results in the 978 landmark pr_gene_ids\n\n ### LOAD in the main file filtering the columns so that only the small molecules signatures are loaded and the\n ### rows such that only the landmark genes are loaded into their custom gctoo container type\n relevent_sigs_gctoo = parse(join(FILE_PATH, \"GSE92742_Broad_LINCS_Level5_COMPZ.MODZ_n473647x12328.gctx\"),\n cid=small_mol_sigs, rid=landmark_gene_ids)\n # print small_mol_sigs.data_df.shape\n ### Should write an intermediate file with dimensions (978, 205034)\n write_gctx.write(relevent_sigs_gctoo, join(FILE_PATH, \"lm_sm_aggz\"))", "def read_tcr_data(\n organism,\n contig_annotations_csvfile,\n consensus_annotations_csvfile,\n include_gammadelta = False,\n allow_unknown_genes = False,\n verbose = False\n):\n from all_genes import all_genes\n\n expected_gene_names = all_genes[organism].keys()\n\n #from cdr3s_human import all_align_fasta\n\n gene_suffix = '*01' # may not be used\n\n\n # read the contig annotations-- map from clonotypes to barcodes\n # barcode,is_cell,contig_id,high_confidence,length,chain,v_gene,d_gene,j_gene,c_gene,full_length,productive,cdr3,cdr3_nt,reads,umis,raw_clonotype_id,raw_consensus_id\n # AAAGATGGTCTTCTCG-1,True,AAAGATGGTCTTCTCG-1_contig_1,True,695,TRB,TRBV5-1*01,TRBD2*02,TRBJ2-3*01,TRBC2*01,True,True,CASSPLAGYAADTQYF,TGCGCCAGCAGCCCCCTAGCGGGATACGCAGCAGATACGCAGTATTTT,9427,9,clonotype14,clonotype14_consensus_1\n assert exists( contig_annotations_csvfile )\n\n _, lines = parse_csv_file(contig_annotations_csvfile)\n clonotype2barcodes = {}\n clonotype2tcrs_backup = {} ## in case we dont have a consensus_annotations_csvfile\n for l in lines:\n bc = l['barcode']\n clonotype = l['raw_clonotype_id']\n if clonotype =='None':\n if l['productive'] not in [ 'None','False' ]:\n assert l['productive'] == 'True'\n #print 'clonotype==None: unproductive?',l['productive']\n continue\n if clonotype not in clonotype2barcodes:\n clonotype2barcodes[clonotype] = []\n if bc in clonotype2barcodes[clonotype]:\n pass\n #print 'repeat barcode'\n else:\n clonotype2barcodes[clonotype].append( bc )\n\n if not clonotype:\n print 'empty clonotype id:', l\n continue\n assert clonotype\n ## experimenting here ########################################3\n if l['productive'].lower() != 'true':\n continue\n if l['cdr3'].lower() == 'none' or l['cdr3_nt'].lower() == 'none':\n continue\n\n chain = l['chain']\n if chain not in ['TRA','TRB']:\n continue\n ab = chain[2]\n if clonotype not in clonotype2tcrs_backup:\n clonotype2tcrs_backup[ clonotype ] = {'A':Counter(), 'B':Counter() }\n # stolen from below\n vg = l['v_gene']\n if '*' not in vg:\n vg += gene_suffix\n if 'DV' in vg and vg not in expected_gene_names:\n #print 'DV?',vg\n vg = vg[:vg.index('DV')]+'/'+vg[vg.index('DV'):]\n jg = l['j_gene']\n if '*' not in jg:\n jg += gene_suffix\n\n if vg not in expected_gene_names:\n print 'unrecognized V gene:', organism, vg\n if not allow_unknown_genes:\n continue\n if jg not in expected_gene_names:\n print 'unrecognized J gene:', organism, jg\n if not allow_unknown_genes:\n continue\n #assert vg in all_align_fasta[organism]\n #assert jg in all_align_fasta[organism]\n\n tcr_chain = ( vg, jg, l['cdr3'], l['cdr3_nt'].lower() )\n\n clonotype2tcrs_backup[clonotype][ab][tcr_chain] += int(l['umis'])\n\n for id in clonotype2tcrs_backup:\n for ab in 'AB':\n for t1,count1 in clonotype2tcrs_backup[id][ab].iteritems():\n for t2, count2 in clonotype2tcrs_backup[id][ab].iteritems():\n if t2<=t1:continue\n if t1[3] == t2[3]:\n print 'repeat??', count1, count2, t1, t2\n\n\n\n if consensus_annotations_csvfile is None:\n clonotype2tcrs = clonotype2tcrs_backup\n else:\n\n ## now read details on the individual chains for each clonotype\n # ==> tcr/human/JCC176_TX2_TCR_consensus_annotations.csv <==\n # clonotype_id,consensus_id,length,chain,v_gene,d_gene,j_gene,c_gene,full_length,productive,cdr3,cdr3_nt,reads,umis\n # clonotype100,clonotype100_consensus_1,550,TRB,TRBV24-1*01,TRBD1*01,TRBJ2-7*01,TRBC2*01,True,True,CATSDPGQGGYEQYF,TGTGCCACCAGTGACCCCGGACAGGGAGGATACGAGCAGTACTTC,8957,9\n\n assert exists(consensus_annotations_csvfile)\n _, lines = parse_csv_file( consensus_annotations_csvfile )\n\n\n ## first get clonotypes with one alpha and one beta\n clonotype2tcrs = {}\n\n for l in lines:\n if l['productive'] == 'True':\n id = l['clonotype_id']\n if id not in clonotype2tcrs:\n # dictionaries mapping from tcr to umi-count\n clonotype2tcrs[id] = { 'A':Counter(), 'B':Counter() } #, 'G':[], 'D': [] }\n assert id in clonotype2barcodes\n\n ch = l['chain']\n if not ch.startswith('TR'):\n print 'skipline:', consensus_annotations_csvfile, ch, l['v_gene'], l['j_gene']\n continue\n ab = ch[2]\n if ab not in 'AB':\n print 'skipline:', consensus_annotations_csvfile, ch, l['v_gene'], l['j_gene']\n continue\n vg = l['v_gene']\n if '*' not in vg:\n vg += gene_suffix\n if 'DV' in vg and vg not in expected_gene_names:\n #print 'DV?',vg\n vg = vg[:vg.index('DV')]+'/'+vg[vg.index('DV'):]\n jg = l['j_gene']\n if '*' not in jg:\n jg += gene_suffix\n # if vg in tcr_gene_remap[organism]:\n # vg = tcr_gene_remap[organism][vg]\n # if jg in tcr_gene_remap[organism]:\n # jg = tcr_gene_remap[organism][jg]\n\n if vg not in expected_gene_names:\n print 'unrecognized V gene:', organism, vg\n if not allow_unknown_genes:\n continue\n if jg not in expected_gene_names:\n print 'unrecognized J gene:', organism, jg\n if not allow_unknown_genes:\n continue\n #assert vg in all_align_fasta[organism]\n #assert jg in all_align_fasta[organism]\n tcr_chain = ( vg, jg, l['cdr3'], l['cdr3_nt'].lower() )\n\n if tcr_chain not in clonotype2tcrs[id][ab]:\n umis = int( l['umis'] )\n clonotype2tcrs[id][ab][ tcr_chain ] = umis\n old_umis = clonotype2tcrs_backup[id][ab][tcr_chain]\n if umis != old_umis:\n print 'diff_umis:',umis, old_umis, id,ab,tcr_chain\n else:\n print 'repeat?',id,ab,tcr_chain\n else:\n if l['productive'] not in [ 'None','False' ]:\n print 'unproductive?',l['productive']\n\n\n if verbose:\n idl1 = sorted( clonotype2tcrs_backup.keys())\n idl2 = sorted( clonotype2tcrs.keys())\n print 'same ids:', len(idl1), len(idl2), idl1==idl2\n for id in clonotype2tcrs_backup:\n if id in clonotype2tcrs:\n for ab in 'AB':\n tl1 = sorted(clonotype2tcrs_backup[id][ab].keys())\n tl2 = sorted(clonotype2tcrs[id][ab].keys())\n if tl1 != tl2:\n print 'diffids:',id,ab,tl1,tl2\n\n\n return clonotype2tcrs, clonotype2barcodes", "def sort(self):\n \n ct=[]\n rt=[]\n wr=[]\n # search for tags that aren't in the right position\n for i in range(len(self.contigs)):\n c = self.contigs[i]\n if c.wa:\n if not self.wa:\n self.wa=[]\n self.wa.extend(c.wa)\n if c.ct:\n newcts=[ct_tag for ct_tag in c.ct if ct_tag.name!=c.name]\n map(self.contigs[i].ct.remove,newcts)\n ct.extend(newcts)\n for j in range(len(c.reads)):\n r = c.reads[j]\n if r.rt:\n newrts=[rt_tag for rt_tag in r.rt if rt_tag.name!=r.rd.name]\n map(self.contigs[i].reads[j].rt.remove,newrts)\n rt.extend(newrts)\n if r.wr:\n newwrs=[wr_tag for wr_tag in r.wr if wr_tag.name!=r.rd.name]\n map(self.contigs[i].reads[j].wr.remove,newwrs)\n wr.extend(newwrs)\n # now sort them into their proper place\n for i in range(len(self.contigs)):\n c = self.contigs[i]\n for ct_tag in ct:\n if ct_tag.name==c.name:\n if self.contigs[i].ct is None:\n self.contigs[i].ct=[]\n self.contigs[i].ct.append(ct_tag)\n if rt or wr:\n for j in range(len(c.reads)):\n r = c.reads[j]\n for rt_tag in rt:\n if rt_tag.name==r.rd.name:\n if self.contigs[i].reads[j].rt is None:\n self.contigs[i].reads[j].rt=[]\n self.contigs[i].reads[j].rt.append(rt_tag)\n for wr_tag in wr:\n if wr_tag.name==r.rd.name:\n if self.contigs[i].reads[j].wr is None:\n self.contigs[i].reads[j].wr=[]\n self.contigs[i].reads[j].wr.append(wr_tag)", "def merge_ncRNA_fastas():\n\tmerge_cmnd = \"cat %s %s %s > %s\" % (rRNA_fasta_file, fasta_outfile, tRNA_fasta_outfile, ncRNA_output_fasta)\n\tprint merge_cmnd\n\tos.system(merge_cmnd)\n\n\tindex_cmnd = \"samtools faidx %s\" % (ncRNA_output_fasta)\n\tprint index_cmnd\n\tos.system(index_cmnd)", "def all_possible_gene_transcription(dna: str):\n result = set()\n for dna in (dna, reverse_complement(dna)):\n rna = dna_to_rna(dna)\n start = find_motif(rna, START_CODON)\n for s in start:\n r = rna_to_protein(rna, start=s, end=True)\n if r:\n result.add(r)\n return result", "def combine_overlapping_contigs(sc_ov, scaffold_list): \n for k in sc_ov:\n \n conflict = False\n nos = len(sc_ov[k])\n sca_lis = []\n l_length = {}\n r_length = {}\n for n in range(nos):\n \n sca_lis.append(sc_ov[k][n])\n p = sca_lis[n].index(k)\n l_length[n] = p+1\n r_length[n] = len(sca_lis[n]) - p-1\n \n l_longest = max(l_length, key=l_length.get)\n r_longest = max(r_length, key=r_length.get) \n new_scaff = sca_lis[l_longest][:l_length[l_longest]] + sca_lis[r_longest][-r_length[r_longest]:]\n \n alt_scaff = []\n for n in range(nos):\n if str(sca_lis[n][1:-1])[1:-1] not in str(new_scaff): \n conflict = True \n split_scaffs = split_at_conflict(new_scaff, sca_lis[n], k)\n for scaff in split_scaffs:\n if scaff not in alt_scaff:\n alt_scaff.append(scaff)\n\n if not conflict:\n scaffold_list.append(new_scaff)\n else: \n alt_scaff2 = purge_redundancy(alt_scaff) \n for new_scaff in alt_scaff2:\n if len(new_scaff) > 2: #exclude empty scaffolds\n scaffold_list.append(new_scaff)\n \n for scaff in sca_lis:\n if scaff in scaffold_list:\n scaffold_list.remove(scaff)\n else:\n scaff.reverse()\n if scaff in scaffold_list:\n scaffold_list.remove(scaff)\n \n return scaffold_list", "def combine_corpus(include_whole_abstracts=False):\r\n\tif load_combined_corpus_maps():\r\n\t\treturn\r\n\t\r\n\tglobal gensim_dictionary\r\n\tglobal search_criteria_dict, solution_dict, linked_abstracts_dict, abstracts_dict\r\n\tglobal common_corpus_list, question_map, abstract_map, abstract_whole_map\r\n\tglobal corpus_layout\r\n\t\r\n\tprint('\\nbuilding common corpus')\r\n\t\r\n\tcommon_corpus_list = list()\r\n\tquestion_map = dict()\r\n\tabstract_map = dict()\r\n\tabstract_whole_map = dict()\r\n\t\r\n\tsentence_count = 0\r\n\t\r\n\tcorpus_layout = []\r\n\t\r\n\tfor qi,qv in search_criteria_dict.items():\r\n\t\tquestions = qv\r\n\t\t\r\n\t\tsentence_incr = len(questions)\r\n\t\tquestion_map[qi] = (sentence_count,sentence_count + sentence_incr)\r\n\t\tsentence_count += sentence_incr\r\n\t\t\r\n\t\tfor st in questions:\r\n\t\t\tcommon_corpus_list.append(st)\r\n\t\r\n\tcorpus_layout.append(('Q',sentence_count))\r\n\t\r\n\tfor ai,av in abstracts_dict.items():\r\n\t\tabstract = av\r\n\t\t\r\n\t\tsentence_incr = len(abstract)\r\n\t\tabstract_map[ai] = (sentence_count,sentence_count + sentence_incr)\r\n\t\tsentence_count += sentence_incr\r\n\r\n\t\tfor st in abstract:\r\n\t\t\tcommon_corpus_list.append(st)\r\n\t\r\n\tcorpus_layout.append(('A',sentence_count))\r\n\r\n\tif include_whole_abstracts:\r\n\t\tfor ai,av in abstracts_dict.items():\r\n\t\t\tabstract = av\r\n\t\t\t\r\n\t\t\tabstract_whole_map[ai] = (sentence_count,sentence_count + 1)\r\n\t\t\tsentence_count += 1\r\n\r\n\t\t\tst = tuple('\\n'.join(s) for s in zip(*abstract))\r\n\t\t\tif len(st) < 2:\r\n\t\t\t\tst = ('','')\r\n\r\n\t\t\tcommon_corpus_list.append(st)\r\n\t\t\r\n\t\tcorpus_layout.append(('W',sentence_count))\r\n\t\r\n\tpickle.dump(question_map,open(paths.path_data_map_questions_pickle,\"wb\"))\r\n\tpickle.dump(abstract_map,open(paths.path_data_map_abstracts_pickle,\"wb\"))\r\n\tpickle.dump(abstract_whole_map,open(paths.path_data_map_abstracts_whole_pickle,\"wb\"))\t\r\n\tpickle.dump(common_corpus_list,open(paths.path_data_corpus_pickle,\"wb\"))\r\n\tpickle.dump(corpus_layout,open(paths.path_data_corpus_layout_pickle,\"wb\"))", "def collapse_and_write(choice_protos,\n output_base,\n array_name,\n array_prefix,\n copies_per_seq=10):\n count_proto = 0\n count_seq = 0\n count_spot = 0\n\n # There will be duplicate sequences in choice_protos. Write each seq once.\n previous_seqs = set()\n with gfile.GFile(os.path.join(output_base, array_name), 'w') as f:\n for p in choice_protos:\n count_proto += 1\n seq = p.aptamer_sequence\n if seq in previous_seqs:\n continue\n previous_seqs.add(seq)\n count_seq += 1\n for i in range(copies_per_seq):\n count_spot += 1\n probe_id = '%s_%s_%d_of_%d' % (array_prefix, seq, i + 1, copies_per_seq)\n f.write(\n ('%s\\t%s\\n' % (probe_id, seq_to_array_seq(seq))).encode('utf-8'))\n print(('There are %d protos with %d unique sequences, yielding %d array '\n 'spots' % (count_proto, count_seq, count_spot)))\n\n return previous_seqs", "def aggregate_translations(wildcards):\n checkpoint_output = checkpoints.align.get(**wildcards).output.translations\n return expand(build_dir + \"/{build_name}/{segment}/nextalign/masked.gene.{gene}.fasta\",\n build_name=wildcards.build_name,\n segment=wildcards.segment,\n gene=GENES[wildcards.segment])", "def complementary_seq(self):\n if not self.data['DNAseq']:\n self.complement_seq_var.set(0)\n self.warning('No DNA sequence loaded','You have to load a DNA sequence first')\n return\n compl={'A':'T','T':'A','C':'G','G':'C'}\n comDNA=''\n for base in self.data['DNAseq']:\n comDNA=comDNA+compl[base]\n self.data['DNAseq']=comDNA\n\n # Update\n self.update_sequence_window()\n return", "def get_transcript_sgrnas(target_region_seq_df, context_len, pam_start, pam_len,\n sgrna_start, sgrna_len, pams, sg_positions):\n sgrna_df_list = []\n meta_columns = ['object_type', 'strand', 'transcript_id', 'seq_region_name', 'region_id', 'start', 'end']\n for i, row in target_region_seq_df.iterrows():\n seq_start = row['expanded_start']\n seq_end = row['expanded_end']\n sequence = row['seq']\n # Sequences on the positive strand\n pos_sgrna_df = tile.build_sgrna_df(sequence, context_len=context_len, pam_start=pam_start,\n pam_len=pam_len, sgrna_start=sgrna_start,\n sgrna_len=sgrna_len, pams=pams)\n pos_sgrna_df = get_sgrna_global_indices(pos_sgrna_df, seq_start, seq_end, 1, sg_positions)\n # assuming the target_region_seq_df is oriented on the positive sgRNA strand\n pos_sgrna_df['sgrna_strand'] = 1\n # Sequences on the negative strand\n rev_comp_seq = reverse_compliment(sequence)\n neg_sgrna_df = tile.build_sgrna_df(rev_comp_seq, context_len=context_len, pam_start=pam_start,\n pam_len=pam_len, sgrna_start=sgrna_start,\n sgrna_len=sgrna_len, pams=pams)\n neg_sgrna_df = get_sgrna_global_indices(neg_sgrna_df, seq_start, seq_end, -1, sg_positions)\n neg_sgrna_df['sgrna_strand'] = -1\n # Combine and filter sgrna_dfs\n sgrna_df = pd.concat([pos_sgrna_df, neg_sgrna_df])\n for col in meta_columns:\n sgrna_df[col] = row[col]\n sgrna_df_list.append(sgrna_df)\n concatenated_sgrna_dfs = (pd.concat(sgrna_df_list)\n .rename({'strand': 'transcript_strand',\n 'start': 'region_start',\n 'end': 'region_end',\n 'seq_region_name': 'chromosome'}, axis=1))\n return concatenated_sgrna_dfs", "def add_locus_in_fasta(self, fasta, output_file):\n fasta_record = FastA(fasta)\n ids_list = self._get_seq_ids()\n\n # check if both files have same number of contigs\n if len(fasta_record) != len(ids_list): # pragma: no cover\n print(\n \"fasta and annotation files don't have the same number of \"\n \"contigs. Found {} and {}\".format(len(fasta_record), len(ids_list))\n )\n sys.exit(1)\n\n # check if directory exist\n output_dir = os.path.dirname(output_file)\n os.makedirs(output_dir, exist_ok=True)\n\n if sorted(fasta_record.names) == sorted(ids_list):\n logger.info(\"Files have same sequence id.\")\n if os.path.isfile(output_file): # pragma: no cover\n os.remove(output_file)\n os.symlink(os.path.realpath(fasta), output_file)\n return\n else:\n logger.info(\n \"fasta and GFF seem to have different IDs. Creating a\"\n \"new coherent fasta file assuming the chromsome names appear \"\n \"in the same order in the fasta and gff\"\n )\n\n with open(output_file, \"w\") as fp:\n # write fasta with seqid of annotation file\n for n in range(len(fasta_record)):\n seq_id = \">{0} {1}\\n\".format(ids_list[n], fasta_record.names[n])\n seq = fasta_record.sequences[n]\n sequence = \"\\n\".join([seq[i : min(i + 80, len(seq))] for i in range(0, len(seq), 80)]) + \"\\n\"\n contigs = seq_id + sequence\n fp.write(contigs)", "def gtf_processing(genome=None, prefix='gencov'):\n all_bed = prefix + \".all.bed\"\n\n if not os.path.exists(all_bed) or os.stat(all_bed).st_size == 0:\n log.info(\"Preprocessing annotation...\")\n features = ('exon', 'gene', 'intron', 'intergenic')\n merged_exons, merged_genes = map(preprocess, features[:2])\n ins = {\n 'intron': [merged_genes, merged_exons],\n 'intergenic': [merged_genes, genome]\n }\n intron_bed, intergenic_bed = map(preprocess, features[2:], [ins, ins])\n\n log.info(\"Concatenate bed files for all elements...\")\n with open(all_bed, 'w') as out_bed:\n cat_all(merged_exons, merged_genes, intron_bed, intergenic_bed, out_bed=out_bed)\n\n for f in (merged_exons, merged_genes, intron_bed, intergenic_bed):\n os.remove(f)\n\n return all_bed", "def combine_features(c_dat):\n # They are keyed on transcript ID\n for tx in c_dat:\n for cds in c_dat[tx]:\n cds_pieces = c_dat[tx][cds]\n # If there fewer than 2 CDS chunks, then pull the tuple out of the\n # list.\n if len(cds_pieces) < 2:\n c_dat[tx][cds] = cds_pieces[0]\n else:\n # Join pieces\n locs = []\n ph = []\n for chunk in cds_pieces:\n c_loc = FeatureLocation(\n chunk[0].location.start,\n chunk[0].location.end,\n strand=chunk[0].strand)\n locs.append(c_loc)\n ph.append(chunk[2])\n # Sort them, according to strand. We assume that a CDS is not a\n # mixed-strand feature\n if cds_pieces[0][0].strand == 1:\n locs.sort(key=lambda x: x.start)\n else:\n locs.sort(key=lambda x: x.end, reverse=True)\n # Join them into a CompoundLocation\n full_loc = CompoundLocation(locs)\n # And then overwrite the input dictionary values\n full_feat = SeqFeature(full_loc, type='CDS',\n id=cds_pieces[0][0].id)\n full_feat.qualifiers['transl_tabl'] = [1]\n # Keep the phases!\n c_dat[tx][cds] = (full_feat, cds_pieces[0][1], ph)\n return c_dat", "def add_names_to_output_set(fasta_dict_a,fasta_dict_b,output_set):\n out_fasta_list = []\n fasta_contig_name = \"\"\n \n for fasta_data in output_set:\n if fasta_data in fasta_dict_a:\n fasta_contig_name = fasta_dict_a[fasta_data]\n out_fasta_list.append(\"\\n\"+fasta_contig_name+\"\\n\") \n out_fasta_list.append(fasta_data) \n elif fasta_data in fasta_dict_b:\n fasta_contig_name = fasta_dict_b[fasta_data]\n out_fasta_list.append(\"\\n\"+fasta_contig_name+\"\\n\") \n out_fasta_list.append(fasta_data)\n\n return ''.join(out_fasta_list)", "def _print_nonfrozen_transforms(self):\n print(\"TRANSFORMS:\\n\" + str(self._transform_groups))\n for dd in self._datasets:\n if isinstance(dd, AvalancheDataset):\n print(\"PARENT TRANSFORMS:\\n\")\n _print_nonfrozen_transforms(dd)", "def get_sequin_annots(sequin_path, ref_contigs, quiet=False):\n annots = defaultdict(list)\n \n # We need a dummy class to hold the current state while parsing\n # (otherwise the below private functions can't modify it; there's no \"nonlocal\" in python 2.x)\n class _:\n in_contig = None\n in_feature = None\n gene_name = None\n desc = None\n chrom_start = None\n chrom_end = None\n strand = None\n feature_seq_str = \"\"\n coding_blocks = []\n \n def _save_sequin_feature():\n # The only features we care about are the CDS features. Others get discarded during parsing.\n if _.in_feature == \"CDS\":\n if len(_.feature_seq_str) == 0:\n if not quiet: sys.stderr.write(\"WARN: 0-length CDS in contig %s\" % _.in_contig)\n elif _.gene_name is None or _.strand is None or _.chrom_start is None or _.chrom_end is None:\n if not quiet: sys.stderr.write(\"WARN: invalid CDS feature in contig %s\" % _.in_contig)\n else:\n gene_seq = Seq(_.feature_seq_str, generic_dna)\n if _.strand == '-':\n gene_seq = gene_seq.reverse_complement()\n gene_seq_record = SeqRecord(gene_seq, id=_.gene_name, name=_.gene_name, description=_.desc)\n annot = Annot(_.chrom_start, _.chrom_end, _.strand == '-', gene_seq_record, \n _.coding_blocks)\n annots[contig_to_vcf_chrom(_.in_contig)].append(annot)\n _.in_feature = _.gene_name = _.desc = _.chrom_start = _.chrom_end = _.strand = None\n _.feature_seq_str = \"\"\n _.coding_blocks = []\n \n def _update_sequin_feature(fields):\n if fields[0] != \"\" and fields[1] != \"\":\n # If the first two fields are present, this specifies a sequence range\n if not (fields[0].isdigit() and fields[1].isdigit()):\n # We will only attempt to utilize *complete* CDS features\n # (None of the start or end positions can be qualified by \">\" or \"<\")\n _.in_feature = \"CDS-partial\"\n return\n\n # Append the specified sequence to the `_.feature_seq_str`.\n # Note: Sequin table coordinates, like GenBank, are 1-indexed, right-closed.\n start = int(fields[0])\n end = int(fields[1])\n if _.strand is None: \n _.strand = '+' if start <= end else '-'\n elif _.strand != ('+' if start <= end else '-'):\n sys.stderr.write(\"WARN: strand changed direction, invalid CDS\")\n _.in_feature = \"CDS-partial\"\n return\n if _.strand == '-':\n start, end = end, start\n start -= 1\n ref_contig = ref_contigs[_.in_contig]\n seg = str(ref_contig.seq)[start:end]\n _.coding_blocks.append((start, end))\n _.feature_seq_str = seg + _.feature_seq_str if _.strand == '-' else _.feature_seq_str + seg\n _.chrom_start = min(start, _.chrom_start if _.chrom_start is not None else float('inf'))\n _.chrom_end = max(end, _.chrom_end if _.chrom_end is not None else float('-inf'))\n \n elif len(fields) >= 5:\n # If the first three fields are blank, this specifies a qualifier key + value\n if fields[3] == \"gene\":\n _.gene_name = fields[4]\n elif fields[3] == \"product\":\n _.desc = fields[4]\n \n with open(sequin_path) as f:\n for line in f:\n line = line.rstrip(\"\\n\")\n fields = line.split(\"\\t\", 4)\n if len(line.strip()) == 0:\n # Whitespace-only lines signal the end of feature data for a contig.\n # They may be followed by INFO: lines from the annotator, which we ignore.\n _save_sequin_feature()\n _.in_contig = None\n elif _.in_contig is None and line[0] == '>':\n # Lines that begin with \">Feature \" signal the start of feature data for a contig\n # Fields are separated by spaces; the second field is the full contig ID\n _save_sequin_feature()\n sp_fields = line[1:].split(' ')\n if sp_fields[0] == 'Feature' and len(sp_fields) >= 2:\n if ref_contigs.has_key(sp_fields[1]):\n _.in_contig = sp_fields[1]\n elif not quiet:\n sys.stderr.write(\"WARN: unknown contig in Sequin file: %s\" % sp_fields[1])\n elif _.in_contig is not None:\n if len(fields) < 3: \n if not quiet: sys.stderr.write(\"WARN: incomplete Sequin line: %s\" % line)\n next\n in_new_feature = fields[2].strip() != \"\"\n if _.in_feature is None or in_new_feature:\n _save_sequin_feature()\n _.in_feature = fields[2].strip()\n if _.in_feature == \"CDS\":\n _update_sequin_feature(fields)\n elif _.in_feature == \"CDS\":\n _update_sequin_feature(fields)\n \n return annots", "def annotation_csv_consolidator(filenames, sessions=[], subjects=[], sensors=[]):\n \n single_sess_annotats = []\n c = 0\n for filename in filenames:\n single_sess_annotat = annotation_csv_importer(filename)\n if np.iterable(sessions) and len(sessions) == len(filenames):\n single_sess_index = [sessions[c],]*single_sess_annotat.shape[0]\n single_sess_annotat['session'] = single_sess_index\n if np.iterable(subjects) and len(subjects) == len(filenames):\n single_subj_index = [subjects[c],]*single_sess_annotat.shape[0]\n single_sess_annotat['subject'] = single_subj_index\n if np.iterable(sensors) and len(sensors) == len(filenames):\n single_sensor_index = [sensors[c],]*single_sess_annotat.shape[0]\n single_sess_annotat['sensor'] = single_sensor_index\n c += 1\n single_sess_annotats.append(single_sess_annotat)\n consolidate_annotation_data = pd.concat(single_sess_annotats)\n # consolidate_annotation_data = consolidate_annotation_data.reset_index(drop=False)\n # consolidate_annotation_data = consolidate_annotation_data.rename(columns={\"index\":\"index per sensor\"})\n return consolidate_annotation_data", "def main(directory, csv_file, task_name):\n csv_data = pd.read_csv(csv_file)\n colnames = csv_data.columns.tolist()\n\n edat_files = glob.glob(directory + \"*.edat*\")\n text_files = glob.glob(directory + \"*-*.txt\")\n all_files = edat_files + text_files\n pairs = []\n paired_texts = []\n\n for text_file in text_files:\n [text_fname, _] = os.path.splitext(text_file)\n for edat_file in edat_files:\n [edat_fname, _] = os.path.splitext(edat_file)\n if text_fname == edat_fname:\n pairs.append([text_file, edat_file])\n\n for pair in pairs:\n paired_texts.append(pair[0])\n\n unpaired_texts = list(set(text_files) - set(paired_texts))\n three_files = []\n pop_idx = []\n\n # List of lists\n for i_file in range(len(unpaired_texts)):\n for j_pair in range(len(paired_texts)):\n if (unpaired_texts[i_file][:len(unpaired_texts[i_file])-6] in paired_texts[j_pair]):\n three_files.append([paired_texts[j_pair], pairs[j_pair][1],\n unpaired_texts[i_file]])\n pop_idx.append(i_file)\n\n for rm in reversed(pop_idx):\n unpaired_texts.pop(rm)\n\n # three_files is the text files and edats that form a triad (one edat, two\n # similarly named text files).\n for triad in three_files:\n for i_pair in reversed(range(len(pairs))):\n if triad[0:2] == pairs[i_pair]:\n pairs.pop(i_pair)\n\n two_texts = []\n all_two_texts = []\n two_text_pairs = []\n\n for i_file in range(len(unpaired_texts)):\n for j_file in range(i_file + 1, len(unpaired_texts)):\n if (unpaired_texts[i_file][:len(unpaired_texts[i_file])-6] in unpaired_texts[j_file]):\n all_two_texts.append(i_file)\n all_two_texts.append(j_file)\n two_text_pairs.append([i_file, j_file])\n\n all_two_texts = sorted(all_two_texts, reverse=True)\n\n # two_texts is the text files that pair with other text files.\n for i_pair in range(len(two_text_pairs)):\n two_texts.append([unpaired_texts[two_text_pairs[i_pair][0]],\n unpaired_texts[two_text_pairs[i_pair][1]]])\n\n for i_file in all_two_texts:\n unpaired_texts.pop(i_file)\n\n # one_text is the remaining un-paired text files.\n one_text = [[unpaired_texts[i_file]] for i_file in range(len(unpaired_texts))]\n\n # Determine subject IDs and timepoints for all files.\n # Assumes that files will be named according to convention\n # blahblahblah_[subj]-[tp].txt or blahblahblah-[subj]-[tp].txt.\n one_text_subjects = [get_subject(file_[0]) for file_ in one_text]\n one_text_timepoints = [get_timepoint(file_[0]) for file_ in one_text]\n two_text_subjects = [get_subject(pair[0]) for pair in two_texts]\n two_text_timepoints = [get_timepoint(pair[0]) for pair in two_texts]\n three_file_subjects = [get_subject(triad[0]) for triad in three_files]\n three_file_timepoints = [get_timepoint(triad[0]) for triad in three_files]\n pair_subjects = [get_subject(pair[0]) for pair in pairs]\n pair_timepoints = [get_timepoint(pair[0]) for pair in pairs]\n\n af_files = ([item for sublist in pairs for item in sublist] +\n [item for sublist in two_texts for item in sublist] +\n [item for sublist in three_files for item in sublist] +\n [item for sublist in one_text for item in sublist])\n\n one_edat = list(set(all_files) - set(af_files))\n one_edat = [[edat] for edat in one_edat]\n one_edat_subjects = [get_subject(file_[0]) for file_ in one_edat]\n one_edat_timepoints = [get_timepoint(file_[0]) for file_ in one_edat]\n\n all_subjects = (one_text_subjects + two_text_subjects + three_file_subjects +\n pair_subjects + one_edat_subjects)\n all_notetype = (([\"one_text\"] * len(one_text_subjects)) +\n ([\"two_texts\"] * len(two_text_subjects)) +\n ([\"three_files\"] * len(three_file_subjects)) +\n ([\"pair\"] * len(pair_subjects)) +\n ([\"one_edat\"] * len(one_edat_subjects)))\n all_timepoints = (one_text_timepoints + two_text_timepoints +\n three_file_timepoints + pair_timepoints +\n one_edat_timepoints)\n all_file_sets = one_text + two_texts + three_files + pairs + one_edat\n\n organized_dir = org_dir_dict.get(task_name)\n\n for i_subj in range(len(all_subjects)):\n month = timepoint_dict.get(task_name).get(all_timepoints[i_subj])\n files_note = note_dict.get(all_notetype[i_subj])\n if len(all_subjects) > 4:\n try:\n print(\"Successfully organized %s-%s\" % (all_subjects[i_subj], month))\n print(\"Moved:\")\n subject_id = all_subjects[i_subj]\n files = all_file_sets[i_subj]\n note = organize_files(subject_id, month, files, organized_dir)\n note.append(files_note)\n orged = 1\n orgedwhen = time.strftime(\"%Y/%m/%d\")\n orgedby = \"PY\"\n except IOError:\n print(\"%s-%s couldn't be organized.\" % (all_subjects[i_subj], all_timepoints[i_subj]))\n note = files_note\n orged = 0\n orgedwhen = \"\"\n orgedby = \"\"\n\n try:\n if all_notetype[i_subj] == \"pair\":\n print(\"Successfully converted %s-%s\" % (all_subjects[i_subj], all_timepoints[i_subj]))\n conved = 1\n convedwhen = time.strftime(\"%Y/%m/%d\")\n convedby = \"PY\"\n else:\n print(\"%s-%s couldn't be converted.\" % (all_subjects[i_subj], all_timepoints[i_subj]))\n conved = 0\n convedwhen = \"\"\n convedby = \"\"\n except IOError:\n print(\"%s-%s couldn't be converted.\" % (all_subjects[i_subj], all_timepoints[i_subj]))\n conved = 0\n convedwhen = \"\"\n convedby = \"\"\n else:\n print(\"%s-%s couldn't be organized.\" % (all_subjects[i_subj], all_timepoints[i_subj]))\n note = files_note\n orged = 0\n orgedwhen = \"\"\n orgedby = \"\"\n print(\"%s-%s couldn't be converted.\" % (all_subjects[i_subj], all_timepoints[i_subj]))\n conved = 0\n convedwhen = \"\"\n convedby = \"\"\n\n csv_data = add_subject(csv_data, all_subjects[i_subj],\n all_timepoints[i_subj], orged, orgedwhen, orgedby,\n conved, convedwhen, convedby, note)\n\n csv_data = csv_data[colnames]\n csv_data.to_csv(csv_file, index=False)", "def transcribe(dna):\n\n\n if isinstance(dna, Seq):\n return dna.transcribe()\n elif isinstance(dna, MutableSeq):\n return dna.toseq().transcribe()\n else:\n return dna.replace(\"T\", \"U\").replace(\"t\", \"u\")", "def fasta2csv(self, is_local_interpro):\n\t\tprint(\"Creating row for each protein with domain, please wait..\")\n\t\tdataset_name = \"toxin_dataset.csv\"\n\t\tnum_all_proteins = 0\n\t\tnum_proteins_with_domains = 0\n\t\tnum_remain_proteins = 0\n\t\tcsv_already_exists = True\n\t\tif not isfile(join(self.output_path, dataset_name)): # if csv not exists then firstly write header\n\t\t\tcsv_already_exists = False\n\t\tfor fasta_file in listdir(self.fasta_dir_path):\n\t\t\tshort_label = splitext(basename(fasta_file))[0].split(\".\")[0]\n\t\t\twith open(join(self.fasta_dir_path, fasta_file), 'r') as fasta_data, open(self.domains_path,\n\t\t\t 'r') as domains_data, open(\n\t\t\t\t\tjoin(self.output_path, dataset_name), 'a') as dataset_csv, open(\n\t\t\t\t\tjoin(self.output_path, \"toxin_remaining_seq\" + \"_\" + short_label + \".fasta\"),\n\t\t\t\t\t'a') as remain_seqs_file:\n\t\t\t\tproteins_dict = SeqIO.to_dict(SeqIO.parse(fasta_data, \"fasta\"))\n\t\t\t\tnum_all_proteins += len(proteins_dict)\n\t\t\t\tuniprot2prot = self.extract_uniprot4protein_keys(proteins_dict)\n\t\t\t\twriter = csv.writer(dataset_csv, delimiter=',')\n\t\t\t\tif not csv_already_exists: # if csv not exists then firstly write header\n\t\t\t\t\tproteins_domains_header = [\"uniprot_id\", \"toxin\", \"seq\", \"seq_len\", \"interpro_domains\",\n\t\t\t\t\t \"evidence_db_domains\"]\n\t\t\t\t\twriter.writerow(proteins_domains_header)\n\t\t\t\t\tcsv_already_exists = True\n\t\t\t\tbatch_num_lines = 10000\n\n\t\t\t\tfor i, batch in enumerate(batch_iterator(domains_data, batch_num_lines)):\n\t\t\t\t\tfor line in batch:\n\t\t\t\t\t\tline_split = line.strip().split(\"\\t\")\n\t\t\t\t\t\tassert len(line_split) == 3, \"AssertionError: {} does not have 3 tabs.\".format(line)\n\t\t\t\t\t\tuniprot_id = line_split[0]\n\t\t\t\t\t\tif uniprot_id == \"uniprot_id\":\n\t\t\t\t\t\t\tprint(\"Skipping first line\")\n\t\t\t\t\t\t\tcontinue\n\t\t\t\t\t\tif is_local_interpro:\n\t\t\t\t\t\t\tuniprot_id = uniprot_id.split(\"|\")[1]\n\t\t\t\t\t\tif uniprot_id in uniprot2prot:\n\t\t\t\t\t\t\tinterpro_ids = line_split[1]\n\t\t\t\t\t\t\tevidence_db_ids = line_split[2]\n\t\t\t\t\t\t\tlabel = self.get_labels(fasta_file)\n\t\t\t\t\t\t\t# make the row of the current protein\n\t\t\t\t\t\t\tprotein_row = [uniprot_id, label, str(uniprot2prot[uniprot_id].seq),\n\t\t\t\t\t\t\t len(str(uniprot2prot[uniprot_id].seq)), interpro_ids, evidence_db_ids]\n\t\t\t\t\t\t\twriter.writerow(protein_row)\n\t\t\t\t\t\t\tnum_proteins_with_domains += 1\n\t\t\t\t\t\t\t# remove found protein from the dictionary, to keep track of the remaining proteins\n\t\t\t\t\t\t\tuniprot2prot.pop(uniprot_id)\n\n\t\t\t\tnum_remain_proteins += len(uniprot2prot) # update num of remain proteins\n\t\t\t\tSeqIO.write(uniprot2prot.values(), remain_seqs_file, \"fasta\") # append remaining proteins to fasta\n\t\t\t\tprint(\"num of remaining proteins for {} label: {} saved on remaining fasta\".format(\n\t\t\t\t\tself.get_labels(fasta_file), len(uniprot2prot)))\n\t\tassert num_all_proteins == num_proteins_with_domains + num_remain_proteins, \"AssertionError: total num of proteins should be equal to proteins with domains + proteins without domains.\"\n\t\tprint(\"num of Toxin proteins: {}\".format(num_all_proteins))\n\t\tprint(\"num of Toxin proteins with found domains: {}\".format(num_proteins_with_domains))\n\t\tprint(\"num of remaining proteins with not found domains: {}\".format(num_remain_proteins))" ]
[ "0.5484953", "0.54577637", "0.53773695", "0.53639716", "0.5157762", "0.50608796", "0.50549304", "0.5024335", "0.5011397", "0.50003254", "0.49951127", "0.4984907", "0.4974743", "0.4969747", "0.49636707", "0.49043584", "0.4893198", "0.48811284", "0.48798373", "0.48621103", "0.48617107", "0.48425254", "0.48391378", "0.48330083", "0.47914118", "0.47657967", "0.47535914", "0.47512376", "0.47492865", "0.47433195" ]
0.62633854
0
Helper function for creating and running a Python subprocess executing the given string as Python code.
async def async_run_subproc_from_code(sub_proc_code: str) -> asyncio.subprocess.Process: return await asyncio.create_subprocess_exec(sys.executable, '-c', sub_proc_code, stdout=asyncio.subprocess.PIPE)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def process_run(cmd_string, stdin=None):\n process_object=subprocess.Popen(shlex.split(cmd_string),\n stdin=stdin,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n return process_object", "def run_process(self, inp=\"\"):\n return subprocess.run(self.binary,\n input=inp,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE,\n universal_newlines=True)", "def call_command_line(string, **kwargs):\n return subprocess.run(string.split(\" \"), **kwargs)", "def run(self, method, input_string):\n process = subprocess.run(['python', __file__, method],\n stdout=subprocess.PIPE,\n input=input_string, encoding='ascii')\n self.return_code = process.returncode\n self.output = process.stdout", "def run_cmd(cmd_str, pfexec=False, as_arr=False):\n\n cmd_chunks = cmd_str.split()\n\n if not path.exists(cmd_chunks[0]):\n raise NotImplementedError(\"'%s' not found\" % cmd_chunks[0])\n\n if pfexec:\n if not path.exists('/bin/pfexec'):\n raise NotImplementedError('pfexec not found')\n\n cmd_chunks.insert(0, '/bin/pfexec')\n\n proc = subprocess.Popen(cmd_chunks,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n (out, err) = proc.communicate()\n\n if proc.returncode == 0:\n out = out.strip().split('\\n')\n\n if len(out) > 1 or as_arr is True:\n return out\n else:\n return out[0]\n else:\n raise Exception('error: %s' % err)", "def run_subprocess(text, args):\n proc = subprocess.run(\n args,\n input=text,\n encoding='utf-8',\n stdout=subprocess.PIPE)\n return proc.stdout.strip()", "def execute(cmd_string):\n pass", "def _shellrun(command_array, cwd=None, capture_ouput=None):\n script = command_array[0]\n if script == 'jinja2_gen.py':\n import jinja2_gen\n with patch('sys.argv', command_array):\n curcwd = os.getcwd()\n os.chdir(cwd)\n try:\n jinja2_gen.main()\n except:\n pass\n os.chdir(curcwd)\n else:\n return subprocess.run(command_array, cwd=cwd, capture_ouput=True)", "def run_subprocess(self, input_value):\n try:\n proc = Popen([\"python\", self.SCRIPT_NAME],\n stdin=PIPE,\n stdout=PIPE,\n stderr=PIPE)\n out_value, err_value = proc.communicate(\n input_value.encode(self.ENCODING),\n timeout=self.PROCESS_TIMEOUT)\n except TimeoutExpired:\n proc.kill()\n out_value, err_value = proc.communicate()\n return out_value.decode(self.ENCODING), err_value.decode(self.ENCODING)", "def shell_cmd(*args):\n proc = subprocess.run(args)\n returncode = proc.returncode\n if returncode != 0:\n raise RuntimeError(\n f\"Command {args} failed with return code {returncode}\")\n return proc", "def ez_run(cls, program_string):\n res = Cpu('test')\n res.load(program_string.split('\\n'))\n res.run()\n return res", "def string_to_run(self, qad, executable, stdin=None, stdout=None, stderr=None, exec_args=None):\n stdin = \"< \" + stdin if stdin is not None else \"\"\n stdout = \"> \" + stdout if stdout is not None else \"\"\n stderr = \"2> \" + stderr if stderr is not None else \"\"\n\n if exec_args:\n executable = executable + \" \" + \" \".join(list_strings(exec_args))\n\n basename = os.path.basename(self.name)\n if basename in [\"mpirun\", \"mpiexec\", \"srun\"]:\n if self.type is None:\n # $MPIRUN -n $MPI_PROCS $EXECUTABLE < $STDIN > $STDOUT 2> $STDERR\n num_opt = \"-n \" + str(qad.mpi_procs)\n cmd = \" \".join([self.name, self.options, num_opt, executable, stdin, stdout, stderr])\n else:\n raise NotImplementedError(\"type %s is not supported!\" % self.type)\n\n elif basename == \"runjob\":\n #runjob --ranks-per-node 2 --exp-env OMP_NUM_THREADS --exe $ABINIT < $STDIN > $STDOUT 2> $STDERR\n #runjob -n 2 --exp-env=OMP_NUM_THREADS --exe $ABINIT < $STDIN > $STDOUT 2> $STDERR\n # exe must be absolute path or relative to cwd.\n bg_size, rpn = qad.bgsize_rankspernode()\n #num_opt = \"-n \" + str(qad.mpi_procs)\n num_opt = \"--ranks-per-node \" + str(rpn)\n cmd = \" \".join([self.name, self.options, num_opt, \"--exp-env OMP_NUM_THREADS\",\n \"--exe `which \" + executable + \"` \", stdin, stdout, stderr])\n else:\n if qad.mpi_procs != 1:\n raise ValueError(\"Cannot use mpi_procs > when mpi_runner basename=%s\" % basename)\n cmd = \" \".join([executable, stdin, stdout, stderr])\n\n return cmd", "def run_python(parameters, ignore_return_code=False, stdin=None):\n cmd = [sys.executable]\n if sys.platform == 'win32':\n if isinstance(parameters, string_types):\n cmd = ' '.join(cmd) + ' ' + parameters\n else:\n cmd = ' '.join(cmd + parameters)\n else:\n if isinstance(parameters, string_types):\n parameters = shlex.split(parameters)\n cmd += parameters\n p = Popen(cmd, stdout=PIPE, stderr=PIPE, stdin=PIPE)\n stdout, stderr = p.communicate(input=stdin)\n if not (p.returncode == 0 or ignore_return_code):\n raise OSError(bytes_to_str(stderr))\n return stdout.decode('utf8', 'replace'), stderr.decode('utf8', 'replace')", "def _run_shell(self, command_string: str, cwd: str = '/', print_command: bool = False) -> subprocess.Popen:\n if print_command:\n self.logger.info(command_string)\n return subprocess.Popen(command_string, shell=True, cwd=cwd)", "async def _run_subprocess(\n cmd: str,\n allow_params: bool,\n params: Dict[str, ParamValueT],\n) -> Dict[str, Any]:\n cmd_str = cmd\n if allow_params:\n if params[\"shell_params\"] == []:\n cmd_str = cmd.format([''])\n else:\n cmd_str = cmd.format(*params.get('shell_params', ['']))\n\n logging.info(\"Running command: %s\", cmd_str)\n\n cmd_list = shlex.split(cmd_str)\n\n process = await asyncio.create_subprocess_exec(\n *cmd_list,\n stdout=asyncio.subprocess.PIPE,\n stderr=asyncio.subprocess.PIPE,\n )\n\n stdout, stderr = await process.communicate()\n\n return {\n \"returncode\": process.returncode,\n \"stdout\": stdout.decode(),\n \"stderr\": stderr.decode(),\n }", "def exec_python(*args, **kwargs):\n cmdargs, kwargs = __wrap_python(args, kwargs)\n return exec_command(*cmdargs, **kwargs)", "def send_to_cmdline(string):\n retval=subprocess.check_output(string, shell=True)\n retval=str(retval) # Convert from byte string", "def run(cmd: str) -> None:\n subprocess.run(cmd, shell=True, check=True)", "def run_shell_command(program: str, args: list, separator = None):\n cmd = [program]\n\n for arg in args:\n cmd.append(arg)\n\n return subprocess.run(cmd, stdout=subprocess.PIPE).stdout.decode(\"utf-8\")", "def exec_and_return(execargs):\n return subprocess.call(execargs)", "def run_code():\n\n output = None\n code = request.json['code']\n\n cmd = 'python -c \"' + code +'\"'\n p = Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE,\n stderr=STDOUT, close_fds=True)\n output = p.stdout.read()\n\n return jsonify(output.decode('utf-8'))", "def _run(*args):\n return subprocess.run(\n args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, check=True,\n universal_newlines=True)", "def run_subprocess(cmd):\n subprocess.Popen(cmd, stdin =subprocess.PIPE,\n stderr=subprocess.PIPE,\n stdout=subprocess.PIPE,\n shell=True,)", "def py_exec(self, code_string):\n if not isinstance(code_string, str):\n print('py_exec: Error, the code must be a string `{}`!'.format(code_string))\n return False\n\n try: ret = eval(code_string, self.global_vars, self.global_vars)\n except Exception, e:\n print('py_exec: Error execution code `{}`! Exception `{}`!'.format(code_string, e))\n ret = False\n\n return ret", "def process(cmd_string, stdin=None):\n return process_results(process_run(cmd_string, stdin=stdin))", "def run_python(python_binary, command, extra_args=(), output=False,\n additional_env={}):\n args = [python_binary, os.path.join(CODE_PATH, command)] + list(extra_args)\n if output:\n command = check_output\n else:\n command = check_call\n env = os.environ.copy()\n env.update(additional_env)\n return command(args, env=env)", "def _run_command(args):\n subprocess.run(args, check=True)", "def run_subprocess(self, *cmd_and_args):\n\n command_line = \" \".join(cmd_and_args)\n self.logger.debug(\"Running: %s\", command_line)\n\n return subprocess.Popen(command_line, shell=True, close_fds=True)", "def runScript(path=None):\n if path:\n exec(compile(open(path, \"rb\").read(), path, 'exec'))", "def _run_cmd(*args):\n proc = Popen(\n args, stdin=PIPE, stdout=PIPE, stderr=PIPE,\n cwd=os.path.dirname(__file__))\n output, _ = proc.communicate()\n code = proc.returncode\n return code, output" ]
[ "0.6948954", "0.6686626", "0.6629013", "0.65176725", "0.6339211", "0.6338575", "0.6203015", "0.60847574", "0.6064163", "0.60511017", "0.60285914", "0.6006691", "0.59902215", "0.5979476", "0.5975541", "0.59750855", "0.5935459", "0.5928149", "0.5910659", "0.5901592", "0.58895797", "0.5873045", "0.5813116", "0.58053404", "0.58052105", "0.5779019", "0.5767785", "0.5700492", "0.56971014", "0.56896746" ]
0.6972163
0
Try to find the project root directory based on expected lists of files and/or directories it contains, recursively ascending from a given starting directory. Given a starting file assumed to be a descendent of the project root directory (or the root directory itself), examine that file to see if it is the project root. Determine this by checking if it is a directory with all of a specified list of child files and or child directories. If it is the project root, return it; if not, recursively perform the same steps on the file's parent directory, until this has been done for a maximum number of levels or the filesystem root is reached (in which case ``None`` is returned).
def find_proj_root(self, descendant: Path, file_names: list, dir_names: list, max_levels: int = 25): count_test_files = len(file_names) if isinstance(file_names, list) else 0 count_test_files += len(dir_names) if isinstance(dir_names, list) else 0 if count_test_files == 0: raise RuntimeError("_find_proj_root() must be given at least one expected file/dir in project root") levels = 0 if descendant.is_dir() and self._dir_contains(descendant, file_names) and self._dir_contains(descendant, dir_names, True): return descendant for d in descendant.parents: if max_levels < 1 or levels < max_levels: levels += 1 else: break if self._dir_contains(d, file_names) and self._dir_contains(d, dir_names, True): return d return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _find_project_root_dir(target: Path) -> Path:\n p = Path(\".\").resolve()\n for _ in range(1000):\n try:\n candidate = _search_for_dir(target, p)\n return candidate.parent\n except DirectoryNotFoundException:\n p = p.parent\n raise DirectoryNotFoundException", "def find_project_directory(start=\".\", look_for=None):\n look_for = set(look_for or DEFAULT_PROJECT_INDICATORS)\n\n directory = path.path(start).abspath()\n\n while directory.parent != directory:\n items = os.listdir(directory)\n if any(i in look_for for i in items):\n return directory\n\n directory = directory.parent\n\n raise WatsonError('%s does not look like a project subdirectory' % start)", "def _find_root() -> pathlib.Path:\n cwd = pathlib.Path.cwd()\n while not (\n pathlib.Path(cwd, \"pyproject.toml\").exists() or\n pathlib.Path(cwd, \"poetry.lock\").exists() or\n pathlib.Path(\"/\") == cwd\n ):\n cwd = cwd.parent\n return cwd", "def _walk_to_root(path):\n if not os.path.exists(path):\n raise IOError('Starting path not found')\n\n if os.path.isfile(path):\n path = os.path.dirname(path)\n\n last_dir = None\n current_dir = os.path.abspath(path)\n while last_dir != current_dir:\n yield current_dir\n parent_dir = os.path.abspath(os.path.join(current_dir, os.path.pardir))\n last_dir, current_dir = current_dir, parent_dir", "def find_root():\n\n curdir = os.path.curdir\n fs_root = \"/\"\n # Do as build/envsetup.sh does\n # if this files exists, we're at root\n root_clue = \"build/core/envsetup.mk\"\n found = False\n while not found and not os.path.samefile(fs_root, curdir):\n if os.path.exists(os.path.join(curdir, root_clue)):\n found = True\n break\n curdir = os.path.join(os.path.pardir, curdir)\n return curdir if found else None", "def FindInPathParents(path_to_find, start_path, test_func=None):\n if test_func is None:\n test_func = os.path.exists\n for path in IteratePathParents(start_path):\n target = os.path.join(path, path_to_find)\n if test_func(target):\n return target\n return None", "def ancestors(self):\r\n\r\n def find_parent(dir):\r\n parent = os.path.dirname(dir)\r\n buildfile = os.path.join(parent, BuildFile._CANONICAL_NAME)\r\n if os.path.exists(buildfile) and not os.path.isdir(buildfile):\r\n return parent, BuildFile(self.root_dir, os.path.relpath(buildfile, self.root_dir))\r\n else:\r\n return parent, None\r\n\r\n parent_buildfiles = OrderedSet()\r\n\r\n parentdir = os.path.dirname(self.full_path)\r\n visited = set()\r\n while parentdir not in visited and self.root_dir != parentdir:\r\n visited.add(parentdir)\r\n parentdir, buildfile = find_parent(parentdir)\r\n if buildfile:\r\n parent_buildfiles.update(buildfile.family())\r\n\r\n return parent_buildfiles", "def _lowest_ancestor_containing_relpath(relpath):\n cur_dir_path = os.path.abspath(os.path.dirname(__file__))\n while True:\n if os.path.exists(os.path.join(cur_dir_path, relpath)):\n return cur_dir_path\n\n next_dir_path = os.path.dirname(cur_dir_path)\n if next_dir_path != cur_dir_path:\n cur_dir_path = next_dir_path\n else:\n return None", "def root_directory(path):\n path = _resolve_path(path)\n found = False\n while not found:\n if path.parent == path: # parent is self\n return None\n valid = is_nonraw_object_directory(path)\n if not valid:\n path = path.parent\n continue\n\n meta_filename = path / META_FILENAME\n with meta_filename.open(\"r\", encoding=\"utf-8\") as meta_file:\n meta_data = yaml.YAML(typ=\"safe\", pure=True).load(meta_file)\n if EXDIR_METANAME not in meta_data:\n path = path.parent\n continue\n exdir_meta = meta_data[EXDIR_METANAME]\n if TYPE_METANAME not in exdir_meta:\n path = path.parent\n continue\n if FILE_TYPENAME != exdir_meta[TYPE_METANAME]:\n path = path.parent\n continue\n found = True\n return path", "def get_relative_project_dir(project_repo_name=None, partial=True):\n current_working_directory = Path.cwd()\n cwd_parts = current_working_directory.parts\n if partial:\n while project_repo_name not in cwd_parts[-1]:\n current_working_directory = current_working_directory.parent\n cwd_parts = current_working_directory.parts\n else:\n while cwd_parts[-1] != project_repo_name:\n current_working_directory = current_working_directory.parent\n cwd_parts = current_working_directory.parts\n return current_working_directory", "def _search_parent_dir(file_name):\n\n current_dir = os.getcwd()\n parent_dir = os.path.dirname(current_dir)\n while current_dir != parent_dir:\n if not os.path.splitdrive(current_dir)[-1]:\n return False\n file_list = os.listdir(current_dir)\n parent_dir = os.path.dirname(current_dir)\n\n if file_name in file_list:\n return current_dir\n\n else:\n current_dir = parent_dir\n return False", "def find_vc_root(filename, max_iterations=5):\n current_dir = os.path.dirname(filename)\n while True:\n files = os.listdir(current_dir)\n for dirname in VCS_DIRNAMES:\n if dirname in files:\n return current_dir\n\n current_dir = os.path.dirname(current_dir)\n max_iterations -= 1\n if max_iterations == 0:\n raise ValueError(\"Could not find version control root.\")", "def tree_resolveRoot():\n nonlocal str_rootDir\n if self.b_relativeDir:\n os.chdir(self.str_inputDir)\n str_rootDir = '.'\n else:\n str_rootDir = self.str_inputDir\n return str_rootDir", "def test_find_in_parent_path(self):\n directory = os.path.dirname(os.path.realpath(__file__))\n subdirectory = os.path.join(directory, 'fake')\n result = steptest.find_project_directory(subdirectory)\n self.assertEqual(directory, result)", "def IteratePathParents(start_path):\n path = os.path.abspath(start_path)\n yield path\n while path.strip('/'):\n path = os.path.dirname(path)\n yield path", "def find_directory_with_a_file(\n filename: str,\n cwd: Optional[Union[str, Path]] = None) -> Optional[Path]:\n if cwd is None:\n curr_dir = Path(os.getcwd()).absolute()\n else:\n curr_dir = Path(cwd).absolute()\n\n pathname = curr_dir / filename\n if pathname.exists():\n return curr_dir\n\n for work_dir in curr_dir.parents:\n pathname = work_dir / filename\n if pathname.exists():\n return work_dir\n\n return None", "def getProjectRoot(self):\n currentPath = os.getcwd()\n while(True):\n if \"DataStore\" in os.listdir(currentPath):\n break\n currentPath = \"/\".join(currentPath.split(\"/\")[:-1])\n return currentPath + \"/\"", "def get_repo_root_from_file_in_repo(path_to_file_in_repo):\n try:\n repo = Repo(path=path_to_file_in_repo, search_parent_directories=True)\n\n submodules_root = repo.git.rev_parse(\"--show-superproject-working-tree\")\n if submodules_root:\n return submodules_root\n\n git_root = repo.git.rev_parse(\"--show-toplevel\")\n return git_root\n except InvalidGitRepositoryError:\n return None", "def FindUpwardParent(start_dir, *desired_list):\n desired_path = os.path.join(*desired_list)\n last_dir = ''\n cur_dir = start_dir\n found_path = os.path.join(cur_dir, desired_path)\n while not os.path.exists(found_path):\n last_dir = cur_dir\n cur_dir = os.path.dirname(cur_dir)\n if last_dir == cur_dir:\n raise PathNotFound('Unable to find %s above %s' %\n (desired_path, start_dir))\n found_path = os.path.join(cur_dir, desired_path)\n # Strip the entire original desired path from the end of the one found\n # and remove a trailing path separator, if present (unless it's\n # filesystem/drive root).\n found_path = found_path[:len(found_path) - len(desired_path)]\n if found_path.endswith(os.sep) and os.path.dirname(found_path) != found_path:\n found_path = found_path[:len(found_path) - 1]\n return found_path", "def find_folder(startpath, folder_name, first_occurrence=False):\n candidates = []\n for root, dirs, files in os.walk(startpath):\n for d in dirs:\n if d == folder_name.strip('/'):\n if first_occurrence:\n candidates.append(os.path.abspath(root + '/' + d))\n return candidates\n candidates.append(os.path.abspath(root+'/'+d))\n return candidates", "def get_project_root(self):\n # Get current working directory\n cwd = os.getcwd()\n # Remove all children directories\n rd = os.path.join(cwd.split('stochastic-travel-demand-modelling/', 1)[0])\n # Make sure directory ends with project's name\n if not rd.endswith('stochastic-travel-demand-modelling'):\n rd = os.path.join(rd,'stochastic-travel-demand-modelling/')\n\n return rd", "def search_parents(name, cwd):\n for pdir in parents(cwd):\n if name in os.listdir(pdir):\n return os.path.join(pdir, name)\n\n return None", "def find_in_parent_dir(fname):\n p = os.path.abspath(os.path.curdir)\n \n while not os.path.exists(os.path.join(p, project_conf_name)):\n oldp, p = p, os.path.dirname(p)\n if p == oldp:\n return None\n \n return open(os.path.join(p, project_conf_name), 'r')", "def test_find_in_current_path(self):\n directory = os.path.dirname(os.path.realpath(__file__))\n result = steptest.find_project_directory(directory)\n self.assertEqual(directory, result)", "def find_root_in_parents(cfg, path_parts, roottype):\n\n # Easy out: caller wants rootname \"CVSROOT\", and we're hiding those.\n if path_parts[-1] == \"CVSROOT\" and cfg.options.hide_cvsroot:\n return None\n\n path_encoding, _ = get_repos_encodings(cfg, None)\n\n for pp in cfg.general.root_parents:\n path, context, repo_type = _parse_root_parent(pp)\n\n if repo_type != roottype:\n continue\n if context is not None:\n if not _path_starts_with(path_parts, context):\n continue\n rootidx = len(context)\n else:\n rootidx = 0\n\n if len(path_parts) <= rootidx:\n continue\n\n rootname = path_parts[rootidx]\n fullroot = _path_join(path_parts[0 : (rootidx + 1)])\n remain = path_parts[(rootidx + 1) :]\n\n rootpath = None\n if roottype == \"cvs\":\n rootpath = vclib.ccvs.find_root_in_parent(path, rootname,\n path_encoding)\n elif roottype == \"svn\":\n rootpath = vclib.svn.find_root_in_parent(path, rootname,\n path_encoding)\n\n if rootpath is not None:\n return fullroot, rootpath, remain\n return None, None, None", "def test_find_in_grandparent_path(self):\n directory = os.path.dirname(os.path.realpath(__file__))\n subdirectory = os.path.join(directory, 'fake', 'fake')\n result = steptest.find_project_directory(subdirectory)\n self.assertEqual(directory, result)", "def get_project_root():\n # Get current working directory\n cwd = os.getcwd()\n # Remove all children directories\n rd = os.path.join(cwd.split('stochastic-travel-demand-modelling/', 1)[0])\n # Make sure directory ends with project's name\n if not rd.endswith('stochastic-travel-demand-modelling'):\n rd = os.path.join(rd,'stochastic-travel-demand-modelling/')\n\n return rd", "def parent_directories(start, stop=None, strict=True):\n start = os.path.abspath(start)\n if stop is not None:\n stop = os.path.abspath(stop)\n\n if start == stop:\n return\n\n if not strict and os.path.isdir(start):\n yield start\n\n while True:\n parent = os.path.dirname(start)\n if parent == stop or parent == start:\n break\n yield parent\n start = parent", "def find(cls, target):\r\n target_path = os.path.relpath(target.address.buildfile.parent_path, get_buildroot())\r\n\r\n def _find():\r\n for root_dir, types in cls._TYPES_BY_ROOT.items():\r\n if target_path.startswith(root_dir): # The only candidate root for this target.\r\n # Validate the target type, if restrictions were specified.\r\n if types and not isinstance(target, tuple(types)):\r\n # TODO: Find a way to use the BUILD file aliases in the error message, instead\r\n # of target.__class__.__name__. E.g., java_tests instead of JavaTests.\r\n raise TargetDefinitionException(target,\r\n 'Target type %s not allowed under %s' % (target.__class__.__name__, root_dir))\r\n return root_dir\r\n return None\r\n\r\n # Try already registered roots\r\n root = _find()\r\n if root:\r\n return root\r\n\r\n # Fall back to searching the ancestor path for a root.\r\n # TODO(John Sirois): We currently allow for organic growth of maven multi-module layout style\r\n # projects (for example) and do not require a global up-front registration of all source roots\r\n # and instead do lazy resolution here. This allows for parse cycles that lead to surprising\r\n # runtime errors. Re-consider allowing lazy source roots at all.\r\n for buildfile in reversed(target.address.buildfile.ancestors()):\r\n if buildfile not in cls._SEARCHED:\r\n ParseContext(buildfile).parse()\r\n cls._SEARCHED.add(buildfile)\r\n root = _find()\r\n if root:\r\n return root\r\n\r\n # Finally, resolve files relative to the BUILD file parent dir as the target base\r\n return target_path", "def get_source_dir(filename):\n if not filename:\n return os.path.expanduser('~')\n # return os.getcwd()\n\n (cabal_dir, project_name) = get_cabal_project_dir_and_name_of_file(filename)\n if not cabal_dir:\n return os.path.dirname(filename)\n\n _project_name, cabal_file = get_cabal_in_dir(cabal_dir)\n exit_code, out, err = call_and_wait([CABAL_INSPECTOR_EXE_PATH, cabal_file])\n\n if exit_code == 0:\n info = json.loads(out)\n\n dirs = [\".\"]\n\n if 'error' not in info:\n # collect all hs-source-dirs\n if info['library']:\n dirs.extend(info['library']['info']['source-dirs'])\n for i in info['executables']:\n dirs.extend(i['info']['source-dirs'])\n for t in info['tests']:\n dirs.extend(t['info']['source-dirs'])\n\n paths = [os.path.abspath(os.path.join(cabal_dir, d)) for d in dirs]\n paths.sort(key = lambda p: -len(p))\n\n for p in paths:\n if filename.startswith(p):\n return p\n\n return os.path.dirname(filename)" ]
[ "0.6449977", "0.63446194", "0.59693354", "0.589468", "0.57563084", "0.57514924", "0.56900877", "0.5676506", "0.5509007", "0.54974484", "0.54620385", "0.5414883", "0.5392206", "0.5351022", "0.53417945", "0.533628", "0.53257906", "0.53238785", "0.52965724", "0.52953446", "0.5283511", "0.52354264", "0.5230426", "0.5230225", "0.51910436", "0.51572603", "0.5154022", "0.5145154", "0.51269567", "0.5123517" ]
0.6357597
1
Test the parse_request_type method of the RequestHandler on the basic test JOB request example, checking for auth.
def test_parse_request_type_1b(self): req_type, errors = self._exec_parse(test_source=MessageEventType.MAAS_REQUEST, session_secret=self.session_secret, check_for_auth=True) self.assertEqual(req_type, MessageEventType.MAAS_REQUEST)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_parse_request_type_1d(self):\n self.test_job_data.pop('model')\n req_type, errors = self._exec_parse(test_source=MessageEventType.MAAS_REQUEST, session_secret='some_string',\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_parse_request_type_1c(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.MAAS_REQUEST, session_secret='some_string',\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_parse_request_type_1a(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.MAAS_REQUEST, session_secret=self.session_secret)\n self.assertEqual(req_type, MessageEventType.MAAS_REQUEST)", "def test_parse_request_type_2e(self):\n self.test_auth_data.pop('username')\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_parse_request_type_2f(self):\n self.test_auth_data.pop('user_secret')\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_parse_request_type_2c(self):\n self.test_auth_data['username'] = 'short'\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def _parse_in_request(self, request):\n error = None\n self.logger.debug(\"Http method: %s\" % request.method)\n if request.method == 'GET':\n self._params = request.args.to_dict()\n self.logger.debug(\"Request params: %s\" % self._params)\n \n elif request.method == 'POST':\n self._params = request.form.to_dict()\n self.logger.debug(\"Request params: %s\" % self._params)", "def test_right_request_class(self):\n environ = complete_environ(REQUEST_METHOD=\"GET\", PATH_INFO=\"/\")\n\n def start_response(status, response_headers):\n pass\n\n self.handler(environ, start_response)\n\n ok_(isinstance(self.handler.request, DjangoWSGIRequest))", "def test_request(self, kasserver, kasapi):\n kasserver._request(self.REQUEST_TYPE, self.REQUEST_PARAMS)\n request = {\n \"KasUser\": USERNAME,\n \"KasAuthType\": \"plain\",\n \"KasAuthData\": PASSWORD,\n \"KasRequestType\": self.REQUEST_TYPE,\n \"KasRequestParams\": self.REQUEST_PARAMS,\n }\n kasapi.assert_called_once_with(json.dumps(request))", "def rest_api_request_handler(self, request_type):\n result = {}\n success_code = 0\n with self.resource_lock:\n if request_type == self.RestRequest.REST_MUTS:\n result = self.muts # Returns MUTs\n elif request_type == self.RestRequest.REST_TEST_SPEC:\n result = self.test_spec # Returns Test Specification\n elif request_type == self.RestRequest.REST_TEST_RESULTS:\n pass # Returns test results\n else:\n success_code = -1\n return json.dumps(self.get_rest_result_template(result, 'request/' + request_type, success_code), indent=4)", "def test_parse_request_type_2a(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.SESSION_INIT)", "def handle_request(self, request: HttpParser) -> None:\n raise NotImplementedError() # pragma: no cover", "def basicRequest(self):\n endpoint = \"/foo\"\n\n def verify(request):\n o(request.method).equals(\"GET\")(\"Checking basic request method.\")\n o(request.url).equals(endpoint)(\"Checking basic request url.\")\n request.respond(200)\n self.testServer.respondWith(verify)\n\n server.request(endpoint)\n self.testServer.respond()", "def parse_request(self, request):\n request.process_inputs()", "def test_api_content_type(httpretty, new_job):\n url = 'https://salesforce/services/async/34.0/job/THEJOBID'\n httpretty.register_uri('POST', url, status=201)\n new_job.request('post', url, data=b'hi', content_type='text/plain')\n assert httpretty.last_request().headers['Content-Type'] == 'text/plain'", "def test_parse_request_type_2d(self):\n self.test_auth_data['user_secret'] = 'short'\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_job_type_name(self):\n\n url = '/%s/jobs/?job_type_name=%s' % (self.api, self.job1.job_type.name)\n response = self.client.generic('GET', url)\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n\n result = json.loads(response.content)\n self.assertEqual(len(result['results']), 1)\n self.assertEqual(result['results'][0]['job_type']['name'], self.job1.job_type.name)", "def parse_request(request):\n request_split = request.split()\n method = request_split[0]\n uri = request_split[1]\n protocol = request_split[2]\n print(\"Protocol: \" + protocol)\n headers = request_split[3]\n\n if method != \"GET\":\n raise RequestError(405, \"Method Not Allowed\")\n elif protocol != \"HTTP/1.1\":\n raise RequestError(505, \"HTTP Version Not Supported\")\n elif \"Host:\" not in headers:\n raise RequestError(400, \"Bad Request\")\n else:\n return uri", "def _parser(self, request, *args, **kwargs):\n\n self.request = request\n\n # parse header\n self.header = {k[5:]: v for k, v in request.META.items() if k.startswith('HTTP_')}\n self.header['CONTENT_TYPE'] = request.META.get('CONTENT_TYPE')\n\n # parse boby\n if request.method not in ['GET', 'HEAD']:\n\n # TODO: serve other body format\n if 'multipart/form-data' in self.header['CONTENT_TYPE']:\n self.body = request.POST.dict()\n\n else:\n # default: application/json\n if self.request.body:\n try:\n self.body = json.loads(self.request.body)\n except Exception as e:\n raise Exception('parse json body error')\n \n # parse query\n self.query = request.GET.dict()\n\n # parse cookie\n self.cookie = {k: v for k, v in request.COOKIES.items()}", "def testWholeRequest(self):\n body = self.protocol.encode_message(self.request_message)\n self.Reinitialize(input=body,\n content_type=self.content_type)\n self.factory.add_request_mapper(self.mapper())\n self.service_handler.handle('POST', '/my_service', 'method1')\n VerifyResponse(self,\n self.service_handler.response,\n '200',\n 'OK',\n self.protocol.encode_message(self.response_message),\n self.content_type)", "def test_api_default_content_type(httpretty, new_job):\n url = 'https://salesforce/services/async/34.0/job/THEJOBID'\n httpretty.register_uri('POST', url, status=201)\n new_job.request('post', url, data=b'hi')\n assert httpretty.last_request().headers['Content-Type'] == 'application/xml; charset=UTF-8'", "def parse_request(self):\r\n self.command = None # set in case of error on the first line\r\n self.request_version = version = self.default_request_version\r\n self.close_connection = 1\r\n requestline = self.raw_requestline\r\n # hack: quick and dirty fix for doubled request with bad data\r\n ok = 0\r\n if requestline.startswith(\"GET\"):\r\n ok += 1\r\n if requestline.startswith(\"POST\"):\r\n ok += 1\r\n if requestline.startswith(\"QUIT\"):\r\n ok += 1\r\n if ok == 0:\r\n return False\r\n # hack ends here\r\n requestline = requestline.rstrip('\\r\\n')\r\n self.requestline = requestline\r\n words = requestline.split()\r\n if len(words) == 3:\r\n command, path, version = words\r\n if version[:5] != 'HTTP/':\r\n self.send_error(400, \"Bad request version (%r)\" % version)\r\n return False\r\n try:\r\n base_version_number = version.split('/', 1)[1]\r\n version_number = base_version_number.split(\".\")\r\n # RFC 2145 section 3.1 says there can be only one \".\" and\r\n # - major and minor numbers MUST be treated as\r\n # separate integers;\r\n # - HTTP/2.4 is a lower version than HTTP/2.13, which in\r\n # turn is lower than HTTP/12.3;\r\n # - Leading zeros MUST be ignored by recipients.\r\n if len(version_number) != 2:\r\n raise ValueError\r\n version_number = int(version_number[0]), int(version_number[1])\r\n except (ValueError, IndexError):\r\n self.send_error(400, \"Bad request version (%r)\" % version)\r\n return False\r\n if version_number >= (1, 1) and self.protocol_version >= \"HTTP/1.1\":\r\n self.close_connection = 0\r\n if version_number >= (2, 0):\r\n self.send_error(505,\r\n \"Invalid HTTP Version (%s)\" % base_version_number)\r\n return False\r\n elif len(words) == 2:\r\n command, path = words\r\n self.close_connection = 1\r\n if command != 'GET':\r\n self.send_error(400,\r\n \"Bad HTTP/0.9 request type (%r)\" % command)\r\n return False\r\n elif not words:\r\n return False\r\n else:\r\n self.send_error(400, \"Bad request syntax (%r)\" % requestline)\r\n return False\r\n self.command, self.path, self.request_version = command, path, version\r\n\r\n # Examine the http_request_headers and look for a Connection directive\r\n self.headers = self.MessageClass(self.rfile, 0)\r\n\r\n conntype = self.headers.get('Connection', \"\")\r\n if conntype.lower() == 'close':\r\n self.close_connection = 1\r\n elif conntype.lower() == 'keep-alive' and self.protocol_version >= \"HTTP/1.1\":\r\n self.close_connection = 0\r\n return True", "def test_basic(self):\n request = fake_twisted_request(request_headers={\n b'x-foo': [b'bar'],\n })\n self.assertThat(\n _nevow_request_to_request_map(request),\n ContainsDict({\n 'content_type': Equals(b'application/octet-stream'),\n 'content_length': Equals(0),\n 'character_encoding': Is(None),\n 'headers': Equals({b'Content-Length': [0],\n b'X-Foo': [b'bar'],\n b'Host': [b'example.com']}),\n 'remote_addr': Equals(b'192.168.1.1'),\n 'request_method': Equals(b'GET'),\n 'server_name': Equals(b'example.com'),\n 'server_port': Equals(80),\n 'scheme': Equals(b'http'),\n 'uri': Equals(URL.from_text(u'/one'))}))", "def init_request(self):\n\t\tprint(\"Testes\")\n\t\treturn Request(url=self.login_page[0], callback=self.login)", "def test_parse_request_type_2b(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=False)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def parse_request(self, request):\n response=''\n http_code = 200\n\n request_line = request.splitlines()[0]\n request_method, path, request_version = request_line.split()\n\n #Try to split path into it's components: the operation requested and the keyvalue\n try:\n request_op, request_keyvalue = path.split('?')\n request_op = request_op[1:]\n\n #If request is a get we split in a different order than if it's a set\n if request_op == 'get':\n request_value, request_key = request_keyvalue.split('=')\n response, http_code = self.get_value(request_key)\n elif request_op == 'set':\n request_key, request_value = request_keyvalue.split('=')\n response, http_code = self.set_value(request_key, request_value)\n else:\n response = 'Unknown operation in URL. Must be either GET or SET.'\n http_code = 400\n\n except ValueError: #Catch any paths that don't match the form we're interested in\n response = dedent(\"\"\"Incorrect path (%s)\n Requested URL must take the form http://%s:%s/[operation]?[value]\"\"\" % (path, self.server_address, self.server_port))\n http_code = 400\n return response, http_code\n\n return response, http_code", "def test_basic_generic_request(self, api_instance):\n action = \"BasicGenericRequest\"\n\n # Send a basic payload.\n params = {\n \"ADateTime\": datetime.datetime(2020, 10, 12),\n \"ATrueBool\": True,\n \"AFalseBool\": False,\n \"NoneShouldNotExist\": None,\n }\n\n request_params = api_instance.generic_request(action=action, params=params)\n self.assert_common_params(request_params, action=\"BasicGenericRequest\")\n assert request_params[\"ADateTime\"] == \"2020-10-12T00:00:00\"\n assert request_params[\"ATrueBool\"] == \"true\"\n assert request_params[\"AFalseBool\"] == \"false\"\n assert \"NoneShouldNotExist\" not in request_params", "async def handle_request(self, request: aioweb.request.Request):", "def test_lti20_rest_good_dispatch(self):\r\n for ginput, expected in self.GOOD_DISPATCH_INPUTS:\r\n self.assertEquals(self.xmodule.parse_lti_2_0_handler_suffix(ginput), expected)", "def test_parse_request(cli_request, error, msg, uri):\n from server import parse_request\n\n if error:\n with pytest.raises(error) as e:\n parse_request(cli_request)\n assert e.args[0] == msg\n else:\n assert parse_request(cli_request) == uri" ]
[ "0.6846396", "0.65539205", "0.64534205", "0.62383395", "0.6118661", "0.6015404", "0.59169805", "0.59136623", "0.5874284", "0.5861507", "0.57382715", "0.5665503", "0.56611925", "0.5578788", "0.552031", "0.5486631", "0.5465408", "0.5462342", "0.545905", "0.54447556", "0.5372402", "0.5345684", "0.53295225", "0.53220475", "0.5319663", "0.5308844", "0.5284891", "0.52609426", "0.5240156", "0.5239357" ]
0.6644406
1
Test the parse_request_type method of the RequestHandler on the basic test JOB request example, but modified to be without model.
def test_parse_request_type_1d(self): self.test_job_data.pop('model') req_type, errors = self._exec_parse(test_source=MessageEventType.MAAS_REQUEST, session_secret='some_string', check_for_auth=True) self.assertEqual(req_type, MessageEventType.INVALID)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_parse_request_type_1b(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.MAAS_REQUEST, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.MAAS_REQUEST)", "def test_parse_request_type_1a(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.MAAS_REQUEST, session_secret=self.session_secret)\n self.assertEqual(req_type, MessageEventType.MAAS_REQUEST)", "def test_parse_request_type_1c(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.MAAS_REQUEST, session_secret='some_string',\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def parse_request(self, request):\n request.process_inputs()", "def rest_api_request_handler(self, request_type):\n result = {}\n success_code = 0\n with self.resource_lock:\n if request_type == self.RestRequest.REST_MUTS:\n result = self.muts # Returns MUTs\n elif request_type == self.RestRequest.REST_TEST_SPEC:\n result = self.test_spec # Returns Test Specification\n elif request_type == self.RestRequest.REST_TEST_RESULTS:\n pass # Returns test results\n else:\n success_code = -1\n return json.dumps(self.get_rest_result_template(result, 'request/' + request_type, success_code), indent=4)", "def test_job_type_name(self):\n\n url = '/%s/jobs/?job_type_name=%s' % (self.api, self.job1.job_type.name)\n response = self.client.generic('GET', url)\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n\n result = json.loads(response.content)\n self.assertEqual(len(result['results']), 1)\n self.assertEqual(result['results'][0]['job_type']['name'], self.job1.job_type.name)", "def parse_send_request(body):\n # the model and domain are colon separated.\n model_domain, = split_fields(body, 1)\n model, _ = model_domain.split(':', 1)\n # make sure the model exists\n model, _ = validate_model(model)\n if model == 'spot':\n return parse_spot_request(body)\n else:\n return parse_forecast_request(body)", "def _parser(self, request, *args, **kwargs):\n\n self.request = request\n\n # parse header\n self.header = {k[5:]: v for k, v in request.META.items() if k.startswith('HTTP_')}\n self.header['CONTENT_TYPE'] = request.META.get('CONTENT_TYPE')\n\n # parse boby\n if request.method not in ['GET', 'HEAD']:\n\n # TODO: serve other body format\n if 'multipart/form-data' in self.header['CONTENT_TYPE']:\n self.body = request.POST.dict()\n\n else:\n # default: application/json\n if self.request.body:\n try:\n self.body = json.loads(self.request.body)\n except Exception as e:\n raise Exception('parse json body error')\n \n # parse query\n self.query = request.GET.dict()\n\n # parse cookie\n self.cookie = {k: v for k, v in request.COOKIES.items()}", "def test_api_default_content_type(httpretty, new_job):\n url = 'https://salesforce/services/async/34.0/job/THEJOBID'\n httpretty.register_uri('POST', url, status=201)\n new_job.request('post', url, data=b'hi')\n assert httpretty.last_request().headers['Content-Type'] == 'application/xml; charset=UTF-8'", "def handle_request(self, request: HttpParser) -> None:\n raise NotImplementedError() # pragma: no cover", "def test_request_without_type(self):\n request = StudySubjectsRequest(self.project_name, self.environment)\n self.assertFalse(\"subjectKeyType\" in request.url_path())", "def test_api_content_type(httpretty, new_job):\n url = 'https://salesforce/services/async/34.0/job/THEJOBID'\n httpretty.register_uri('POST', url, status=201)\n new_job.request('post', url, data=b'hi', content_type='text/plain')\n assert httpretty.last_request().headers['Content-Type'] == 'text/plain'", "def _parse_in_request(self, request):\n error = None\n self.logger.debug(\"Http method: %s\" % request.method)\n if request.method == 'GET':\n self._params = request.args.to_dict()\n self.logger.debug(\"Request params: %s\" % self._params)\n \n elif request.method == 'POST':\n self._params = request.form.to_dict()\n self.logger.debug(\"Request params: %s\" % self._params)", "def _process_request(self, request_type, params, marker_elems=None):\r\n response = self.make_request(request_type, params, verb='POST')\r\n return self._process_response(response, marker_elems)", "def test_parse_request_type_2e(self):\n self.test_auth_data.pop('username')\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_type_arg(self, parse_input):\n with pytest.warns(SyntaxWarning, match=\"Only keyword options of the form\"):\n parse_input(\"name testname\\nversion 1.0\\ntarget example (6)\\ntype example (42)\")", "def test_right_request_class(self):\n environ = complete_environ(REQUEST_METHOD=\"GET\", PATH_INFO=\"/\")\n\n def start_response(status, response_headers):\n pass\n\n self.handler(environ, start_response)\n\n ok_(isinstance(self.handler.request, DjangoWSGIRequest))", "async def handle_request(self, request: aioweb.request.Request):", "def test_job_type_id(self):\n\n url = '/%s/jobs/?job_type_id=%s' % (self.api, self.job1.job_type.id)\n response = self.client.generic('GET', url)\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n\n result = json.loads(response.content)\n self.assertEqual(len(result['results']), 1)\n self.assertEqual(result['results'][0]['job_type']['id'], self.job1.job_type.id)", "def dummy_request():\n return DummyRequest()", "def testBuildRequest(self):\n expected_request = Request1()\n self.protocol.decode_message(Request1,\n 'my body').AndReturn(expected_request)\n\n self.mox.ReplayAll()\n\n mapper = service_handlers.RPCMapper(['POST'],\n 'my-content-type',\n self.protocol)\n\n request = mapper.build_request(self.handler, Request1)\n\n self.assertTrue(expected_request is request)", "def testUnparseableContentType(self):\n headers = Headers({'Content-Type': ['papier-mache']})\n request = FakeRequest(headers=headers)\n resource = TestResource(None, None)\n result = yield resource.deferred_render_POST(request)\n response = loads(result)\n self.assertEqual(JSONRPC_PARSE_ERROR, response['error']['code'])\n message = 'Unparseable Content-Type header.'\n self.assertEqual(message, response['error']['message'])\n self.assertIn(message, self.log.getvalue())\n self.assertIn('<Payload empty or unparseable>', self.log.getvalue())", "def _post_request_parser(self, tagname):\n\n wpsrequest = self\n\n def parse_post_getcapabilities(doc):\n \"\"\"Parse POST GetCapabilities request\n \"\"\"\n acceptedversions = xpath_ns(\n doc, '/wps:GetCapabilities/ows:AcceptVersions/ows:Version')\n acceptedversions = ','.join(\n map(lambda v: v.text, acceptedversions))\n wpsrequest.check_accepted_versions(acceptedversions)\n\n def parse_post_describeprocess(doc):\n \"\"\"Parse POST DescribeProcess request\n \"\"\"\n\n version = doc.attrib.get('version')\n wpsrequest.check_and_set_version(version)\n\n language = doc.attrib.get('language')\n wpsrequest.check_and_set_language(language)\n\n wpsrequest.operation = 'describeprocess'\n wpsrequest.identifiers = [identifier_el.text for identifier_el in\n xpath_ns(doc, './ows:Identifier')]\n\n def parse_post_execute(doc):\n \"\"\"Parse POST Execute request\n \"\"\"\n\n version = doc.attrib.get('version')\n wpsrequest.check_and_set_version(version)\n\n language = doc.attrib.get('language')\n wpsrequest.check_and_set_language(language)\n\n wpsrequest.operation = 'execute'\n\n identifier = xpath_ns(doc, './ows:Identifier')\n\n if not identifier:\n raise MissingParameterValue(\n 'Process identifier not set', 'Identifier')\n\n wpsrequest.identifier = identifier[0].text\n wpsrequest.lineage = 'false'\n wpsrequest.store_execute = 'false'\n wpsrequest.status = 'false'\n wpsrequest.inputs = get_inputs_from_xml(doc)\n wpsrequest.outputs = get_output_from_xml(doc)\n wpsrequest.raw = False\n if xpath_ns(doc, '/wps:Execute/wps:ResponseForm/wps:RawDataOutput'):\n wpsrequest.raw = True\n # executeResponse XML will not be stored\n wpsrequest.store_execute = 'false'\n\n # check if response document tag has been set then retrieve\n response_document = xpath_ns(\n doc, './wps:ResponseForm/wps:ResponseDocument')\n if len(response_document) > 0:\n wpsrequest.lineage = response_document[\n 0].attrib.get('lineage', 'false')\n wpsrequest.store_execute = response_document[\n 0].attrib.get('storeExecuteResponse', 'false')\n wpsrequest.status = response_document[\n 0].attrib.get('status', 'false')\n\n if tagname == WPS.GetCapabilities().tag:\n self.operation = 'getcapabilities'\n return parse_post_getcapabilities\n elif tagname == WPS.DescribeProcess().tag:\n self.operation = 'describeprocess'\n return parse_post_describeprocess\n elif tagname == WPS.Execute().tag:\n self.operation = 'execute'\n return parse_post_execute\n else:\n raise InvalidParameterValue(\n 'Unknown request %r' % tagname, 'request')", "def test_parse_request_type_2f(self):\n self.test_auth_data.pop('user_secret')\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def __init__(self, type_=None, request_id=None, timestamp=None):\n default_attr = dict(type=str(),\n request_id=str(),\n timestamp=str())\n self.type = type_\n self.request_id = request_id\n self.timestamp = timestamp\n self._set_default_attr(default_attr)", "def _parse_request(self):\n if len(self.request.body) > 0:\n try:\n return tornado.escape.json_decode(self.request.body)\n except Exception:\n #Not Json, Using Form data\n return self.request.arguments\n else:\n return self.request.arguments", "def process(self, request, **kwargs):\n\n if len(request):\n\n REST_header = \"\"\n REST_verb = \"\"\n str_path = \"\"\n json_payload = \"\"\n\n self.dp.qprint(\"Listener ID - %s: process() - handling request\" % (self.worker_id))\n\n now = datetime.datetime.today()\n str_timeStamp = now.strftime('%Y-%m-%d %H:%M:%S.%f')\n self.dp.qprint(Colors.YELLOW)\n self.dp.qprint(\"***********************************************\")\n self.dp.qprint(\"***********************************************\")\n self.dp.qprint(\"%s incoming data stream\" % (str_timeStamp) )\n self.dp.qprint(\"***********************************************\")\n self.dp.qprint(\"len = %d\" % len(request))\n self.dp.qprint(\"***********************************************\")\n self.dp.qprint(Colors.CYAN + \"%s\\n\" % (request.decode()) + Colors.YELLOW)\n self.dp.qprint(\"***********************************************\" + Colors.NO_COLOUR)\n l_raw = request.decode().split('\\n')\n FORMtype = l_raw[0].split('/')[0]\n\n self.dp.qprint('Request = ...')\n self.dp.qprint(l_raw)\n REST_header = l_raw[0]\n REST_verb = REST_header.split()[0]\n str_path = REST_header.split()[1]\n json_payload = l_raw[-1]\n\n # remove trailing '/' if any on path\n if str_path[-1] == '/': str_path = str_path[0:-1]\n\n d_ret = {}\n d_ret['status'] = False\n d_ret['RESTheader'] = REST_header\n d_ret['RESTverb'] = REST_verb\n d_ret['action'] = \"\"\n d_ret['path'] = str_path\n d_ret['receivedByServer'] = l_raw\n\n if REST_verb == 'GET':\n d_ret['GET'] = self.DB_get(path = str_path)\n d_ret['status'] = True\n\n self.dp.qprint('json_payload = %s' % json_payload)\n d_ret['client_json_payload'] = json_payload\n d_ret['client_json_len'] = len(json_payload)\n if len(json_payload):\n d_payload = json.loads(json_payload)\n d_request = d_payload['payload']\n payload_verb = d_request['action']\n if 'meta' in d_request.keys():\n d_meta = d_request['meta']\n d_ret['payloadsize']= len(json_payload)\n\n if payload_verb == 'quit':\n self.dp.qprint('Shutting down server...')\n d_ret['status'] = True\n\n if payload_verb == 'run' and REST_verb == 'PUT':\n d_ret['action'] = payload_verb\n self.processPUT( request = d_request)\n d_ret['status'] = True\n\n if REST_verb == 'POST':\n self.processPOST( request = d_request,\n ret = d_ret)\n return d_ret\n else:\n return False", "def parse_from_request(self, name, request):\n # type: (str, Request) -> Any\n name_bytes = name.encode()\n if name_bytes not in request.args:\n if self.default is not None:\n return self.default\n if self.required:\n raise Error(BAD_REQUEST, message=b\"%s is required\" % name_bytes)\n else:\n return None\n\n if len(request.args[name_bytes]) != 1:\n raise Error(BAD_REQUEST, message=b\"Pass exactly one argument for %s\" % name_bytes)\n\n val = request.args[name_bytes][0]\n return self.parse(val)", "def test_keyword(self):\n\n url = '/%s/job-types/?keyword=%s' % (self.api, self.job_type1.name)\n response = self.client.generic('GET', url)\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n result = json.loads(response.content)\n self.assertEqual(len(result['results']), 1)\n self.assertEqual(result['results'][0]['name'], self.job_type1.name)\n\n url = '/%s/job-types/?keyword=%s' % (self.api, 'job-type')\n response = self.client.generic('GET', url)\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n result = json.loads(response.content)\n self.assertEqual(len(result['results']), 6)\n\n url = '/%s/job-types/?keyword=%s' % (self.api, 'job-type-for-view-test')\n response = self.client.generic('GET', url)\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n result = json.loads(response.content)\n self.assertEqual(len(result['results']), 3)\n\n url = '/%s/job-types/?keyword=%s&keyword=%s' % (self.api, 'job-type-for-view-test', self.job_type1.name)\n response = self.client.generic('GET', url)\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n result = json.loads(response.content)\n self.assertEqual(len(result['results']), 4)", "def testWholeRequest(self):\n body = self.protocol.encode_message(self.request_message)\n self.Reinitialize(input=body,\n content_type=self.content_type)\n self.factory.add_request_mapper(self.mapper())\n self.service_handler.handle('POST', '/my_service', 'method1')\n VerifyResponse(self,\n self.service_handler.response,\n '200',\n 'OK',\n self.protocol.encode_message(self.response_message),\n self.content_type)" ]
[ "0.6338693", "0.5977034", "0.5941005", "0.59170467", "0.583181", "0.57952213", "0.5737453", "0.57250804", "0.5709618", "0.5697139", "0.56422365", "0.55725694", "0.5562102", "0.54347223", "0.5388378", "0.53538543", "0.5335091", "0.5228585", "0.52276796", "0.52191585", "0.52041745", "0.5192156", "0.5168033", "0.5166176", "0.51335037", "0.51262414", "0.51200294", "0.5113533", "0.5109407", "0.51083004" ]
0.7518315
0
Test the parse_request_type method of the RequestHandler on the basic test AUTHENTICATION request example, but with a modified, too short username
def test_parse_request_type_2c(self): self.test_auth_data['username'] = 'short' req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret, check_for_auth=True) self.assertEqual(req_type, MessageEventType.INVALID)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_parse_request_type_2e(self):\n self.test_auth_data.pop('username')\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_users_username_get(self):\n pass", "def test_users_username_token_post(self):\n pass", "def test_valid_username(self):\n self.valid_username = {'user': {\n \"username\": \"1@1#%^()+\",\n \"email\": \"remmytest.com\",\n \"password\": \"Password123\"\n }}\n\n response = self.client.post(\n self.reg_url,\n self.valid_username,\n format=\"json\")\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertIn(b\"can only contain letters, numbers, -, _\",\n response.content)", "def test_long_username(self):\r\n\r\n self.url_params['username'] = 'username' * 4\r\n response = self.client.post(self.url, self.url_params)\r\n\r\n # Status code should be 400.\r\n self.assertEqual(response.status_code, 400)\r\n\r\n obj = json.loads(response.content)\r\n self.assertEqual(\r\n obj['value'],\r\n \"Username cannot be more than 30 characters long\",\r\n )", "def test_parse_request_type_2f(self):\n self.test_auth_data.pop('user_secret')\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def api_check_username(request):\n username = request.matchdict['username']\n check_username = RepositorioUsario.check_username(request, username)\n\n return check_username", "def test_login_with_empty_username(self):\n reply = self.admin_register()\n user = dict(\n username='',\n password='Andela8'\n )\n resp = self.client.post(\n '/api/v1/login',\n content_type='application/json',\n data=json.dumps(user)\n )\n\n reply = json.loads(resp.data.decode())\n\n\n self.assertEqual(reply['message'], 'Wrong username!')\n self.assertEqual(resp.status_code, 400)", "def test_02_get_api_with_user_username(self):\n print(\"Verify get users api with username query\")\n response = self.session.typicode_get_api_with_query(self.get_users_api, 'username=' + self.search_username)\n # Validates if response code is 200\n self.assertEqual(response.status_code, 200, \"Received status code \" + str(response.status_code) +\n \" instead of 200\")\n\n # Validates if response is not empty\n self.assertNotEqual(len(response.json()), 0, \"User response is empty\")\n\n # Validates if the username key is present in response\n username_response = get_response_for_given_path(response.json(), self.search_username_key)\n self.assertNotEquals(username_response, False, \"Searched string is not present\")\n\n # Validates if the given username is present in response\n user_present = check_given_value_present_in_list(username_response, self.search_username)\n self.assertTrue(user_present, \"Username \" + self.search_username + \"is not present in the response\")", "def test_optional_public_username(self):\n\n consumer = LTIConsumerFactory(slug=\"consumer\")\n passport = LTIPassportFactory(title=\"consumer1_passport1\", consumer=consumer)\n\n user_count = get_user_model().objects.count()\n\n new_user = self._authenticate(\n {\n \"user_id\": \"3fd0ff83-a62d-4a12-9716-4d48821ae24f\",\n \"lti_message_type\": \"basic-lti-launch-request\",\n \"lti_version\": \"LTI-1p0\",\n \"resource_link_id\": \"aaa\",\n \"context_id\": \"course-v1:fooschool+authbackend+0001\",\n \"lis_person_contact_email_primary\": \"[email protected]\",\n },\n passport,\n )\n\n self.assertEqual(\"\", new_user.public_username)\n self.assertEqual(consumer, new_user.lti_consumer)\n self.assertEqual(\"[email protected]\", new_user.email)\n self.assertEqual(\n \"3fd0ff83-a62d-4a12-9716-4d48821ae24f@consumer\", new_user.username\n )\n self.assertEqual(user_count + 1, get_user_model().objects.count())", "def test_username_validators(self):\n name = self.form.name_for_user\n field_source = self.form.fields if name in self.form.fields else self.form.base_fields\n field = field_source.get(name, None)\n self.assertIsNotNone(field)\n expected = 2\n count_strict = expected + 1\n original_strict = getattr(self.form, 'strict_username', None)\n self.form.strict_username = False\n func = self.form.name_for_user_validators\n actual = self.validators_applied_count(field, func, field_source)\n required_not_strict = self.validators_effect_required(field, func, field_source)\n self.form.strict_username = True\n actual_strict = self.validators_applied_count(field, func, field_source)\n required_strict = self.validators_effect_required(field, func, field_source)\n\n self.assertIsNone(required_not_strict)\n self.assertEqual(expected, actual)\n self.assertIsNone(required_strict)\n self.assertEqual(count_strict, actual_strict)\n\n self.form.strict_username = original_strict\n if original_strict is None:\n del self.form.strict_username", "def auth_request(phenny, input):\n admins = phenny.config.admins\n pattern = '(' + '|'.join([re.escape(x) for x in admins]) + ')'\n matches = re.findall(pattern, input)\n for x in matches:\n phenny.msg('NickServ', 'ACC ' + x)", "def _get_auth_string(self):", "def api_auth_validate_username():\n form = request.get_json(force=True)\n if \"username\" not in form:\n raise \"username is required\"\n return jsonify(\n userProvider.validate_username(\n form['username'].encode('utf8')\n )\n )", "def test_parse_request_type_1c(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.MAAS_REQUEST, session_secret='some_string',\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_user_username_get(self):\n response = self.client.open('//user/{username}'.format(username='username_example'),\n method='GET')\n self.assert200(response, \"Response body is : \" + response.data.decode('utf-8'))", "def getUsername(session, req):\n\tif config.AUTH_TYPE=='NONE':\n\t\treturn None\n\telif config.AUTH_TYPE=='HTTP':\n\t\treturn req.user.lower() #(may be None)\n\telif config.AUTH_TYPE=='FORM':\n\t\ttry:\n\t\t\treturn session['username']\n\t\texcept KeyError:\n\t\t\treturn None\n\telse:\n\t\traise Exception(\"sanity check\")", "def test_user_information_request(self):\n pass", "def test_username(self):\n assert_equals(self.client.username, 'testuser')", "def test_empty_username():\n expect_error(register, InputError, \"\", \"abcdef\", \"A\", \"A\", \"A\")", "def enter_username(self):", "def username(self) -> undefined.UndefinedOr[str]:", "def test_user_cannot_register_with_short_username(self):\n response = self.client.post(SIGNUP_URL,\n data=json.dumps(\n {'username': 'dan', 'email': '[email protected]', 'password': 'pass12345'}),\n content_type='application/json')\n self.assertEqual(response.status_code, 400)\n result = json.loads(response.data.decode())\n self.assertEqual(result[\"message\"],\n \"Username should be atleast 4 characters\")", "def checkusername():\n username = request.get_json()[\"username\"]\n if is_username_registered(username):\n return \"0\"\n else:\n return \"1\"", "def test_parse_request_type_2d(self):\n self.test_auth_data['user_secret'] = 'short'\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_long_username():\n expect_error(register, InputError,\n \"a\" * (MAX_USERNAME + 1), \"abcdef\", \"a\", \"a\", \"a\")", "def username(self) -> str:", "def username(self) -> str:", "async def authenticate(self, request: web.Request) -> Dict[str, Any]:", "def test_login_with_wrong_username(self):\n reply = self.admin_register()\n user = dict(\n username='codjoe',\n password='Andela8'\n )\n resp = self.client.post(\n '/api/v1/login',\n content_type='application/json',\n data=json.dumps(user)\n )\n\n reply = json.loads(resp.data.decode())\n\n\n self.assertEqual(reply['message'], 'Wrong username!')\n self.assertEqual(resp.status_code, 400)" ]
[ "0.6582576", "0.6092927", "0.60866356", "0.5909042", "0.58209646", "0.5802963", "0.57239425", "0.5721285", "0.56978977", "0.5697544", "0.56666356", "0.5660978", "0.56190515", "0.56169957", "0.5615925", "0.5603473", "0.55940366", "0.5578598", "0.5573344", "0.5537729", "0.5526532", "0.55215377", "0.5481615", "0.5472665", "0.54659694", "0.54433304", "0.54413146", "0.54413146", "0.5439316", "0.5417641" ]
0.65109897
1
Test the parse_request_type method of the RequestHandler on the basic test AUTHENTICATION request example, but with a modified, too short user_secret
def test_parse_request_type_2d(self): self.test_auth_data['user_secret'] = 'short' req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret, check_for_auth=True) self.assertEqual(req_type, MessageEventType.INVALID)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_parse_request_type_2f(self):\n self.test_auth_data.pop('user_secret')\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_parse_request_type_2e(self):\n self.test_auth_data.pop('username')\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_parse_request_type_2c(self):\n self.test_auth_data['username'] = 'short'\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "async def authenticate(self, request: web.Request) -> Dict[str, Any]:", "def test_parse_request_type_2a(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.SESSION_INIT)", "def test_parse_request_type_1c(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.MAAS_REQUEST, session_secret='some_string',\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_parse_request_type_2b(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=False)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def authenticate(user, request):", "def _get_auth_string(self):", "def add_auth(self, http_request):\r\n pass", "def __init__(__self__, *,\n auth_type: pulumi.Input[str],\n name: Optional[pulumi.Input[str]] = None,\n secret: Optional[pulumi.Input[str]] = None):\n pulumi.set(__self__, \"auth_type\", 'secret')\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if secret is not None:\n pulumi.set(__self__, \"secret\", secret)", "def check_auth():", "def pyramid_request(self, pyramid_request, lti_user):\n pyramid_request.params[\"code\"] = \"test_code\"\n pyramid_request.params[\"state\"] = \"test_state\"\n pyramid_request.session[\"oauth2_csrf\"] = \"test_csrf\"\n pyramid_request.lti_user = lti_user\n return pyramid_request", "def get_auth_token(self, request: Request, type=\"Bearer\") -> str:\n if \"Authorization\" not in request.headers:\n raise AuthenticationRequiredException\n try:\n auth_type, auth_code = request.headers[\"Authorization\"].split(' ')\n assert auth_type == type\n except Exception:\n raise AuthenticationSchemeInvalidException\n return auth_code", "def get_request_authentication():\n return os.urandom(16)", "def test_parse_request_type_1b(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.MAAS_REQUEST, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.MAAS_REQUEST)", "def auth_user():\n\n logging.info(request.headers)\n validate(request)", "def wrong_oauth_config_bad_auth_type(url_base):\n return {\n \"credentials\": {\n \"client_secret\": \"test_client_secret\",\n \"client_id\": \"test_client_id\",\n \"refresh_token\": \"test_refresh_token\",\n },\n \"base_url\": url_base,\n }", "def test_auth_token_valid_user(self):\n self.url = \"/auth\"\n ok_pass_user = { **self.auth_user }\n response = self.client.post(self.url, ok_pass_user)\n self.assertEqual(200, response.status_code)", "def _get_auth_info_for_id_or_from_request(\n sub_type=str, user=None, username=None, db_session=None\n):\n db_session = db_session or current_app.scoped_session()\n\n # set default \"anonymous\" user_id and username\n # this is fine b/c it might be public data or a client token that is not\n # linked to a user\n final_user_id = None\n if sub_type == str:\n final_user_id = sub_type(ANONYMOUS_USER_ID)\n final_username = ANONYMOUS_USERNAME\n\n token = \"\"\n try:\n if user:\n final_username = user.username\n final_user_id = sub_type(user.id)\n elif username:\n result = query_for_user(db_session, username)\n final_username = result.username\n final_user_id = sub_type(result.id)\n else:\n token = validate_request(scope={\"user\"}, audience=config.get(\"BASE_URL\"))\n set_current_token(token)\n final_user_id = current_token[\"sub\"]\n final_user_id = sub_type(final_user_id)\n final_username = current_token[\"context\"][\"user\"][\"name\"]\n except Exception as exc:\n logger.info(\n f\"could not determine user auth info from request. setting anonymous user information. Details:\\n{exc}\"\n )\n\n client_id = \"\"\n try:\n if not token:\n token = validate_request(scope=[], audience=config.get(\"BASE_URL\"))\n set_current_token(token)\n client_id = current_token.get(\"azp\") or \"\"\n except Exception as exc:\n logger.info(\n f\"could not determine client auth info from request. setting anonymous client information. Details:\\n{exc}\"\n )\n\n if (\n not config.get(\"CLIENT_CREDENTIALS_ON_DOWNLOAD_ENABLED\")\n and final_username == ANONYMOUS_USERNAME\n and client_id != \"\"\n ):\n raise Forbidden(\"This endpoint does not support client credentials tokens\")\n\n return {\n \"user_id\": final_user_id,\n \"username\": final_username,\n \"client_id\": client_id,\n }", "def identify_request(request: RequestType):\n # Private encrypted JSON payload\n try:\n data = json.loads(decode_if_bytes(request.body))\n if \"encrypted_magic_envelope\" in data:\n return True\n except Exception:\n pass\n # Public XML payload\n try:\n xml = etree.fromstring(encode_if_text(request.body))\n if xml.tag == MAGIC_ENV_TAG:\n return True\n except Exception:\n pass\n return False", "def test_valid_flow__registration(self):\n\n test_env = {\n \"testapp_authority\": self.testapp_authority,\n \"testapp_app\": self.testapp_app,\n \"extra_environ_app\": {\n \"wsgi.url_scheme\": \"https\",\n \"HTTP_HOST\": \"app.example.com\",\n },\n \"extra_environ_authority\": {\n \"wsgi.url_scheme\": \"https\",\n \"HTTP_HOST\": \"authority.example.com\",\n },\n \"requests_session_app\": requests.Session(),\n \"requests_session_authority\": requests.Session(),\n }\n\n def callback__request_token(req, test_env=test_env):\n \"\"\"/authority/oauth1/request_token is visited by the Server\n\n py3 needs the 'unicode' wrapper to decode the bystring\n \"\"\"\n assert \"Authorization\" in req.headers\n assert req.headers[\"Authorization\"].decode(\"utf-8\").startswith(\"OAuth \")\n assert \"User-Agent\" in req.headers\n assert req.headers[\"User-Agent\"].decode(\"utf-8\") == \"CustomApiClient v0\"\n assert req.url == oauth1_utils.CustomApiClient.OAUTH1_SERVER_REQUEST_TOKEN\n\n # request as SERVER, no cookies\n with IsolatedTestapp(test_env[\"testapp_authority\"]) as testapp:\n res = testapp.get(\n \"/authority/oauth1/request_token\",\n headers=req.headers,\n extra_environ=test_env[\"extra_environ_authority\"],\n status=200,\n )\n\n # status is '200 OK'\n # return in a format tailored for `requests`\n return (int(res.status.split(\" \")[0]), res.headers, res.body)\n\n def callback__authenticate_get(req, test_env=test_env):\n \"\"\"/authority/oauth1/authorize is visited by the USER\"\"\"\n assert req.url.startswith(OAUTH1__URL_AUTHORITY_AUTHENTICATE)\n qs = req.url.split(\"?\")[1]\n qs = dict(parse_qsl(qs))\n\n testapp = test_env[\"testapp_authority\"]\n res = testapp.get(\n \"/authority/oauth1/authorize?oauth_token=%s\" % qs[\"oauth_token\"],\n headers=req.headers,\n extra_environ=test_env[\"extra_environ_authority\"],\n status=200,\n )\n test_env[\"requests_session_authority\"].cookies.update(\n testapp.cookies\n ) # update the session with the cookies from the response\n\n # status is '200 OK'\n # return in a format tailored for `requests`\n return (int(res.status.split(\" \")[0]), res.headers, res.body)\n\n def callback__authenticate_post(req, test_env=test_env):\n \"\"\"/authority/oauth1/authorize is visited by the USER\"\"\"\n assert req.url.startswith(OAUTH1__URL_AUTHORITY_AUTHENTICATE)\n payload = dict(parse_qsl(req.body))\n\n testapp = test_env[\"testapp_authority\"]\n res = testapp.post(\n \"/authority/oauth1/authorize\",\n payload,\n headers=req.headers,\n extra_environ=test_env[\"extra_environ_authority\"],\n status=302,\n )\n test_env[\"requests_session_authority\"].cookies.update(\n testapp.cookies\n ) # update the session with the cookies from the response\n\n # status is '200 OK'\n # return in a format tailored for `requests`\n return (int(res.status.split(\" \")[0]), res.headers, res.body)\n\n def callback__callback(req, test_env=test_env):\n \"\"\"/application/flow-register/authorized-callback is visited by the USER\"\"\"\n _path, _qs = req.url.split(\"?\")\n\n testapp = test_env[\"testapp_app\"]\n res = testapp.get(\n \"/application/flow-register/authorized-callback?%s\" % _qs,\n headers=req.headers,\n extra_environ=test_env[\"extra_environ_app\"],\n status=303,\n )\n test_env[\"requests_session_app\"].cookies.update(\n testapp.cookies\n ) # update the session with the cookies from the response\n\n # status is '303 See Other'\n # return in a format tailored for `requests`\n return (int(res.status.split(\" \")[0]), res.headers, res.body)\n\n def callback__access_token(req, test_env=test_env):\n \"\"\"/authority/oauth1/access_token is visited by the Server\"\"\"\n assert \"Authorization\" in req.headers\n assert req.headers[\"Authorization\"].decode(\"utf-8\").startswith(\"OAuth \")\n assert \"User-Agent\" in req.headers\n assert req.headers[\"User-Agent\"].decode(\"utf-8\") == \"CustomApiClient v0\"\n assert req.url == oauth1_utils.CustomApiClient.OAUTH1_SERVER_ACCESS_TOKEN\n\n # request as SERVER, no cookies\n with IsolatedTestapp(test_env[\"testapp_authority\"]) as testapp:\n _headers = string_headers(\n req.headers\n ) # these can end up being unicode in tests\n res = testapp.get(\n \"/authority/oauth1/access_token\",\n headers=_headers,\n extra_environ=test_env[\"extra_environ_authority\"],\n status=200,\n )\n\n # status is '200 OK'\n # return in a format tailored for `requests`\n return (int(res.status.split(\" \")[0]), res.headers, res.body)\n\n def callback__callback_success(req, test_env=test_env):\n \"\"\"/application/flow-register/authorized-callback-success is visited by the USER\"\"\"\n (_path, _qs) = parse_request_simple(req)\n\n testapp = test_env[\"testapp_application\"]\n _headers = string_headers(\n req.headers\n ) # these can end up being unicode in tests\n res = testapp.get(\n \"/application/flow-register/authorized-callback-success?%s\" % _qs,\n headers=_headers,\n extra_environ=test_env[\"extra_environ_app\"],\n status=200,\n )\n test_env[\"requests_session_application\"].cookies.update(\n testapp.cookies\n ) # update the session with the cookies from the response\n\n # status is '200 OK'\n # return in a format tailored for `requests`\n return (int(res.status.split(\" \")[0]), res.headers, res.body)\n\n with responses.RequestsMock() as rsps:\n rsps.add_callback(\n responses.GET,\n oauth1_utils.CustomApiClient.OAUTH1_SERVER_REQUEST_TOKEN, # /authority/oauth1/request_token\n callback=callback__request_token,\n )\n rsps.add_callback(\n responses.GET,\n oauth1_utils.CustomApiClient.OAUTH1_SERVER_ACCESS_TOKEN, # /authority/oauth1/access_token\n callback=callback__access_token,\n )\n\n # the following were originally handled via `requests.get` but migrated to direct webtest queries\n #\n # rsps.add_callback(\n # responses.GET, OAUTH1__URL_AUTHORITY_AUTHENTICATE, # /authority/oauth1/authorize\n # callback=callback__authenticate_get,\n # )\n # rsps.add_callback(\n # responses.POST, OAUTH1__URL_AUTHORITY_AUTHENTICATE, # /authority/oauth1/authorize\n # callback=callback__authenticate_post,\n # )\n # rsps.add_callback(\n # responses.GET, oauth1_model.OAUTH1__URL_APP_FLOW_REGISTER_CALLBACK, # https://example.com/application/flow-register/authorized-callback\n # callback=callback__callback,\n # )\n # rsps.add_callback(\n # responses.GET, oauth1_model.OAUTH1__URL_APP_FLOW_REGISTER_CALLBACK_SUCCESS, # https://example.com/application/flow-register/authorized-callback-success\n # callback=callback__callback_success,\n # )\n\n #\n # actual test flow...\n #\n\n # first we need to log into the oAuth1 Authority\n # the authority is the account which will be the oAuth identity provider (e.g. Twitter)\n\n # User visit\n res = self.testapp_authority.get(\n \"/authority/account/login-form\",\n extra_environ=test_env[\"extra_environ_authority\"],\n status=200,\n )\n assert res.text == \"authority|login-form\"\n test_env[\"requests_session_authority\"].cookies.update(\n self.testapp_authority.cookies\n ) # update the session with the cookies from the response\n\n # User visit\n res = self.testapp_authority.get(\n \"/authority/account/login-submit\",\n extra_environ=test_env[\"extra_environ_authority\"],\n status=303,\n )\n test_env[\"requests_session_authority\"].cookies.update(\n self.testapp_authority.cookies\n ) # update the session with the cookies from the response\n assert (\n res.text\n == \"\"\"303 See Other\\n\\nThe resource has been moved to /authority/account/home; you should be redirected automatically.\\n\\n\"\"\"\n )\n\n # User visit\n res = self.testapp_authority.get(\n \"/authority/account/home\",\n extra_environ=test_env[\"extra_environ_authority\"],\n status=200,\n )\n test_env[\"requests_session_authority\"].cookies.update(\n self.testapp_authority.cookies\n ) # update the session with the cookies from the response\n assert (\n res.text\n == \"authority|home|user=%s\" % oauth1_model.USERID_ACTIVE__AUTHORITY\n )\n\n #\n # now we want to visit the application\n #\n\n # User visit's the application\n #\n res = self.testapp_app.get(\n \"/application/flow-register\",\n extra_environ=test_env[\"extra_environ_app\"],\n status=303,\n )\n test_env[\"requests_session_app\"].cookies.update(\n self.testapp_app.cookies\n ) # update the session with the cookies from the response\n assert (\n res.text\n == \"\"\"303 See Other\\n\\nThe resource has been moved to /application/flow-register/oauth1/start; you should be redirected automatically.\\n\\n\"\"\"\n )\n\n # User visit\n # however, it makes a behind the scenes visit to\n # * /authority/oauth1/request_token\n res = self.testapp_app.get(\n \"/application/flow-register/oauth1/start\",\n extra_environ=test_env[\"extra_environ_app\"],\n status=303,\n )\n test_env[\"requests_session_app\"].cookies.update(\n self.testapp_app.cookies\n ) # update the session with the cookies from the response\n assert \"Location\" in res.headers\n url_auth = res.headers[\"Location\"]\n assert res.headers[\"Location\"].startswith(\n OAUTH1__URL_AUTHORITY_AUTHENTICATE\n )\n\n # resAuthInbound = test_env['requests_session_authority'].get(url_auth)\n # then the user is redirected to the authority to approve\n qs = url_auth.split(\"?\")[1]\n url_auth_local = \"/authority/oauth1/authorize?%s\" % qs\n resAuthInbound = self.testapp_authority.get(\n url_auth_local, extra_environ=test_env[\"extra_environ_authority\"]\n )\n assert (\n '<form action=\"/authority/oauth1/authorize\" method=\"POST\" id=\"app-action-authorize\">'\n in resAuthInbound.text\n )\n csrfs = re_csrf.findall(resAuthInbound.text)\n assert len(csrfs) == 2 # submit, deny\n tokens = re_token.findall(resAuthInbound.text)\n assert len(tokens) == 2 # submit, deny\n\n payload = {\n \"csrf_\": csrfs[0],\n \"oauth_token\": tokens[0],\n \"submit\": \"authorize\",\n }\n # payload = {'csrf_': csrfs[0], 'oauth_token': tokens[0], 'submit': 'authorize', }\n\n # visited by USER: Authorize the application on the Authority\n resAuthApprove = self.testapp_authority.post(\n \"/authority/oauth1/authorize\",\n payload,\n extra_environ=test_env[\"extra_environ_authority\"],\n status=302,\n )\n test_env[\"requests_session_authority\"].cookies.update(\n self.testapp_authority.cookies\n ) # update the session with the cookies from the response\n\n # visited by USER: redirected to the callback page on the APPLICATION\n assert \"Location\" in resAuthApprove.headers\n url_callback = resAuthApprove.headers[\"Location\"]\n assert url_callback.startswith(OAUTH1__URL_APP_FLOW_REGISTER_CALLBACK)\n qs = url_callback.split(\"?\")[1]\n url_callback_local = (\n \"/application/flow-register/authorized-callback?%s\" % qs\n )\n resAuthCallback = self.testapp_app.get(\n url_callback_local,\n extra_environ=test_env[\"extra_environ_app\"],\n status=303,\n )\n\n # visited by USER: redirected to the callback-success page on the APPLICATION\n assert \"Location\" in resAuthCallback.headers\n url_callback_success = resAuthCallback.headers[\"Location\"]\n assert url_callback_success.startswith(\n OAUTH1__URL_APP_FLOW_REGISTER_CALLBACK_SUCCESS\n )\n assert len(url_callback_success.split(\"?\")) == 1\n url_callback_success_local = (\n \"/application/flow-register/authorized-callback-success\"\n )\n resAuthCallbackSuccess = self.testapp_app.get(\n url_callback_success_local,\n extra_environ=test_env[\"extra_environ_app\"],\n status=200,\n )\n assert (\n resAuthCallbackSuccess.text\n == \"application|register|authorized-callback-success|user=%s\"\n % oauth1_model.USERID_ACTIVE__APPLICATION\n )\n\n # ensure logout, just to be safe\n res = self.testapp_authority.get(\n \"/authority/account/logout\",\n extra_environ=test_env[\"extra_environ_authority\"],\n status=303,\n )\n assert (\n res.text\n == \"\"\"303 See Other\\n\\nThe resource has been moved to /authority/account/login-form; you should be redirected automatically.\\n\\n\"\"\"\n )\n\n res = self.testapp_authority.get(\n \"/authority/account/home\",\n extra_environ=test_env[\"extra_environ_authority\"],\n status=303,\n )\n assert (\n res.text\n == \"\"\"303 See Other\\n\\nThe resource has been moved to /authority/account/login-form; you should be redirected automatically.\\n\\n\"\"\"\n )", "def authenticator():", "def post(self):\n\n data = request.get_json()\n # data = request.data\n print(\"data: \", data)\n\n arg_parser = reqparse.RequestParser()\n arg_parser.add_argument(\n \"exp\",\n default=15552000,\n help=\"Parameter must be an integer\",\n type=int\n )\n\n args = arg_parser.parse_args()\n\n print(args)\n\n auth = request.authorization\n print(\"auth req: \", auth)\n if not auth:\n # Try extracting from POST body\n print(\"here\")\n auth = request.get_json()\n print(\"here\")\n print(\"auth: \", auth)\n if not auth or not (\"email\" in auth and \"password\" in auth):\n abort(401, \"Missing authentication credentials\")\n\n # if auth[\"is_driver\"]:\n # # if it is a driver\n # user = Driver.identify(auth[\"email\"])\n # password = auth[\"password\"]\n\n # else:\n # # If it is a restaurant\n # user = Restaurant.identify(auth[\"email\"])\n # password = auth[\"password\"]\n\n is_driver = True\n\n user = Driver.identify(auth[\"email\"])\n password = auth[\"password\"]\n\n if not user:\n user = Restaurant.identify(auth[\"email\"])\n is_driver = False\n\n if not user or not user.verify_password(password):\n current_app.logger.warn(\n \"Incorrect credentials for {} from {}\".format(\n auth[\"email\"],\n *request.access_route\n )\n )\n abort(401, \"Incorrect email or password\")\n\n access_token = user.gen_access_token(args[\"exp\"])\n\n current_app.logger.info(\"[AUTH] User {} logged IN from {}\".format(\n user.email,\n *request.access_route\n ))\n\n access_token.update({\n \"is_driver\": is_driver\n })\n\n # return resp, 200\n return access_token", "def test_parse_request_type_1d(self):\n self.test_job_data.pop('model')\n req_type, errors = self._exec_parse(test_source=MessageEventType.MAAS_REQUEST, session_secret='some_string',\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_request(self, kasserver, kasapi):\n kasserver._request(self.REQUEST_TYPE, self.REQUEST_PARAMS)\n request = {\n \"KasUser\": USERNAME,\n \"KasAuthType\": \"plain\",\n \"KasAuthData\": PASSWORD,\n \"KasRequestType\": self.REQUEST_TYPE,\n \"KasRequestParams\": self.REQUEST_PARAMS,\n }\n kasapi.assert_called_once_with(json.dumps(request))", "def test_lti20_request_handler_bad_user(self):\r\n self.setup_system_xmodule_mocks_for_lti20_request_test()\r\n self.system.get_real_user = Mock(return_value=None)\r\n mock_request = self.get_signed_lti20_mock_request(self.GOOD_JSON_PUT)\r\n response = self.xmodule.lti_2_0_result_rest_handler(mock_request, \"user/abcd\")\r\n self.assertEqual(response.status_code, 404)", "def verify_oauth_body_sign(self, request, content_type='application/x-www-form-urlencoded'):\r\n\r\n client_key, client_secret = self.get_client_key_secret()\r\n headers = {\r\n 'Authorization': unicode(request.headers.get('Authorization')),\r\n 'Content-Type': content_type,\r\n }\r\n\r\n sha1 = hashlib.sha1()\r\n sha1.update(request.body)\r\n oauth_body_hash = base64.b64encode(sha1.digest())\r\n oauth_params = signature.collect_parameters(headers=headers, exclude_oauth_signature=False)\r\n oauth_headers = dict(oauth_params)\r\n oauth_signature = oauth_headers.pop('oauth_signature')\r\n mock_request = mock.Mock(\r\n uri=unicode(urllib.unquote(request.url)),\r\n http_method=unicode(request.method),\r\n params=oauth_headers.items(),\r\n signature=oauth_signature\r\n )\r\n\r\n if oauth_body_hash != oauth_headers.get('oauth_body_hash'):\r\n raise LTIError(\"OAuth body hash verification is failed.\")\r\n\r\n if not signature.verify_hmac_sha1(mock_request, client_secret):\r\n raise LTIError(\"OAuth signature verification is failed.\")", "def auth_request(phenny, input):\n admins = phenny.config.admins\n pattern = '(' + '|'.join([re.escape(x) for x in admins]) + ')'\n matches = re.findall(pattern, input)\n for x in matches:\n phenny.msg('NickServ', 'ACC ' + x)", "def test_generate_and_validate_token(self):\n\n audience = 'http://www.service.teletracking.com/'\n roles = {'role': ['admin', 'user'], 'audience': audience}\n secret = 'drMemxWrLen6fCXQA5jO6gXkK/UoZVzPGRDiff7ByPU='\n token = AuthenticationHandler.generate_auth_token(roles, secret)\n decoded_token = AuthenticationHandler.validate_and_decode_token(\n token=token, key=secret,\n audience=audience\n )\n self.assertTrue(decoded_token['role'][0] == 'admin')\n self.assertTrue(decoded_token['role'][1] == 'user')" ]
[ "0.6868755", "0.6550271", "0.6417075", "0.5802363", "0.5790747", "0.56437844", "0.5623763", "0.56027114", "0.5575092", "0.55328166", "0.55166173", "0.5513767", "0.55094415", "0.5427882", "0.5422552", "0.5422503", "0.54216367", "0.5387786", "0.5385843", "0.5295982", "0.52746737", "0.5274204", "0.52432144", "0.5233139", "0.5226115", "0.52173305", "0.52145964", "0.5210772", "0.5208643", "0.5200012" ]
0.68579394
1
Test the parse_request_type method of the RequestHandler on the basic test AUTHENTICATION request example, but modified to be missing username
def test_parse_request_type_2e(self): self.test_auth_data.pop('username') req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret, check_for_auth=True) self.assertEqual(req_type, MessageEventType.INVALID)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_parse_request_type_2c(self):\n self.test_auth_data['username'] = 'short'\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_parse_request_type_2f(self):\n self.test_auth_data.pop('user_secret')\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_parse_request_type_1c(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.MAAS_REQUEST, session_secret='some_string',\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_parse_request_type_1d(self):\n self.test_job_data.pop('model')\n req_type, errors = self._exec_parse(test_source=MessageEventType.MAAS_REQUEST, session_secret='some_string',\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_parse_request_type_2d(self):\n self.test_auth_data['user_secret'] = 'short'\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "async def authenticate(self, request: web.Request) -> Dict[str, Any]:", "def test_parse_request_type_2a(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.SESSION_INIT)", "def test_parse_request_type_1a(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.MAAS_REQUEST, session_secret=self.session_secret)\n self.assertEqual(req_type, MessageEventType.MAAS_REQUEST)", "def test_parse_request_type_1b(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.MAAS_REQUEST, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.MAAS_REQUEST)", "def test_user_information_request(self):\n pass", "def add_auth(self, http_request):\r\n pass", "def test_users_username_token_post(self):\n pass", "def _parse_in_request(self, request):\n error = None\n self.logger.debug(\"Http method: %s\" % request.method)\n if request.method == 'GET':\n self._params = request.args.to_dict()\n self.logger.debug(\"Request params: %s\" % self._params)\n \n elif request.method == 'POST':\n self._params = request.form.to_dict()\n self.logger.debug(\"Request params: %s\" % self._params)", "def process_request(self, request, *args, **kwargs):\n if not settings.GSSAPI_ENABLED_OPTION:\n return None\n unauthorized = False\n if 'HTTP_AUTHORIZATION' in request.META:\n kind, initial_client_token = request.META['HTTP_AUTHORIZATION'].split(' ', 1)\n if kind != 'Negotiate':\n unauthorized = True\n else:\n unauthorized = True\n if unauthorized:\n response = HttpResponse(request, status=401)\n response['WWW-Authenticate'] = 'Negotiate'\n return response\n return None", "def parse_login(self, response):\n return FormRequest.from_response(\n response,\n formnumber=1,\n formdata={'quick_username': 'mahan', 'quick_password': '@123456'},\n )", "def test_login_with_empty_username(self):\n reply = self.admin_register()\n user = dict(\n username='',\n password='Andela8'\n )\n resp = self.client.post(\n '/api/v1/login',\n content_type='application/json',\n data=json.dumps(user)\n )\n\n reply = json.loads(resp.data.decode())\n\n\n self.assertEqual(reply['message'], 'Wrong username!')\n self.assertEqual(resp.status_code, 400)", "def parse_request(self, request):\n request.process_inputs()", "def authenticate(self, request):\n return None", "def test_request(self, kasserver, kasapi):\n kasserver._request(self.REQUEST_TYPE, self.REQUEST_PARAMS)\n request = {\n \"KasUser\": USERNAME,\n \"KasAuthType\": \"plain\",\n \"KasAuthData\": PASSWORD,\n \"KasRequestType\": self.REQUEST_TYPE,\n \"KasRequestParams\": self.REQUEST_PARAMS,\n }\n kasapi.assert_called_once_with(json.dumps(request))", "def auth_user():\n\n logging.info(request.headers)\n validate(request)", "def process_request(self, request):\n request_type, data = request.request_type, request.data\n if request_type is Request.Type.LOG_IN:\n return self._log_in(data)\n elif request_type is Request.Type.LOG_OUT:\n return self._log_out()\n else:\n return self._parent.handle_request_from(self._user, request)", "def customRequestParser(data):\n if data:\n if(len(data) > 30):\n return \"REQUEST EXCEED MAX ALLOWED\"\n\n requestArray = data.split()\n if(len(requestArray[0]) != 4 or not requestArray[0].isdigit()):\n return \"BAD REQUEST 4 DIGIT MISSING\"\n \n return reverseResponse(data)\n else:\n return \"BAD REQUEST\"", "def handle_request(self, request: HttpParser) -> None:\n raise NotImplementedError() # pragma: no cover", "def test_authenticate_user_with_no_username(self):\n data = {\n 'username': '',\n 'password': 'testpassword'\n }\n response = self.client.post(self.authenticate_url, data, format='json')\n\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n self.assertEqual(len(response.data['username']), 1)", "def parse_pre_auth(self, string, origin):\r\n # Tidy up our line\r\n string = self._check_line_is_good(string)\r\n \r\n # Break up into token and body\r\n high_level_parts = string.split(None, 1)\r\n command = high_level_parts[0]\r\n if not command.isupper() and not command.isdigit():\r\n raise ProtocolError('Command not in uppercase', string)\r\n params = self._parse_params(high_level_parts[1])\r\n \r\n # If this is an invalid command, pass it upwards\r\n try:\r\n self._pass_to_handler(origin, command, params)\r\n except ParseError, error:\r\n raise ParseError(error.value, string)", "def parse_request(request):\n request_split = request.split()\n method = request_split[0]\n uri = request_split[1]\n protocol = request_split[2]\n print(\"Protocol: \" + protocol)\n headers = request_split[3]\n\n if method != \"GET\":\n raise RequestError(405, \"Method Not Allowed\")\n elif protocol != \"HTTP/1.1\":\n raise RequestError(505, \"HTTP Version Not Supported\")\n elif \"Host:\" not in headers:\n raise RequestError(400, \"Bad Request\")\n else:\n return uri", "def _get_auth_string(self):", "def __init__(__self__, *,\n password: pulumi.Input[str],\n server: pulumi.Input[str],\n type: pulumi.Input[str],\n username: pulumi.Input[str]):\n pulumi.set(__self__, \"password\", password)\n pulumi.set(__self__, \"server\", server)\n pulumi.set(__self__, \"type\", 'BasicAuth')\n pulumi.set(__self__, \"username\", username)", "def __auth(username, password, type=\"basic\"): # pylint:disable=redefined-builtin\n # TODO: Handle encrypted passwords.\n if type.lower() == \"basic\":\n return HTTPBasicAuth(username, password)\n return HTTPDigestAuth(username, password)", "def sanitize_http_request(request_info: HttpRequestInfo):\n\n print(\"*\" * 50)\n print(\"[sanitize_http_request] Implement me!\")\n print(\"*\" * 50)\n return request_info" ]
[ "0.6654907", "0.63609856", "0.6065126", "0.5986271", "0.5873989", "0.5833712", "0.577218", "0.5721933", "0.5668901", "0.5649751", "0.5642403", "0.55796313", "0.5571298", "0.5557869", "0.55527097", "0.54972166", "0.5489206", "0.54751295", "0.54433227", "0.5423554", "0.5412894", "0.53951514", "0.53856146", "0.53702587", "0.5365906", "0.5363146", "0.53490555", "0.53465974", "0.5338702", "0.5337467" ]
0.6986922
0
Test the parse_request_type method of the RequestHandler on the basic test AUTHENTICATION request example, but modified to be missing user_secret
def test_parse_request_type_2f(self): self.test_auth_data.pop('user_secret') req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret, check_for_auth=True) self.assertEqual(req_type, MessageEventType.INVALID)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_parse_request_type_2e(self):\n self.test_auth_data.pop('username')\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_parse_request_type_2d(self):\n self.test_auth_data['user_secret'] = 'short'\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_parse_request_type_2c(self):\n self.test_auth_data['username'] = 'short'\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_parse_request_type_1c(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.MAAS_REQUEST, session_secret='some_string',\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_parse_request_type_2a(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.SESSION_INIT)", "async def authenticate(self, request: web.Request) -> Dict[str, Any]:", "def test_parse_request_type_1d(self):\n self.test_job_data.pop('model')\n req_type, errors = self._exec_parse(test_source=MessageEventType.MAAS_REQUEST, session_secret='some_string',\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def test_parse_request_type_1b(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.MAAS_REQUEST, session_secret=self.session_secret,\n check_for_auth=True)\n self.assertEqual(req_type, MessageEventType.MAAS_REQUEST)", "def test_parse_request_type_2b(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.SESSION_INIT, session_secret=self.session_secret,\n check_for_auth=False)\n self.assertEqual(req_type, MessageEventType.INVALID)", "def add_auth(self, http_request):\r\n pass", "def pyramid_request(self, pyramid_request, lti_user):\n pyramid_request.params[\"code\"] = \"test_code\"\n pyramid_request.params[\"state\"] = \"test_state\"\n pyramid_request.session[\"oauth2_csrf\"] = \"test_csrf\"\n pyramid_request.lti_user = lti_user\n return pyramid_request", "def authenticate(user, request):", "def auth_user():\n\n logging.info(request.headers)\n validate(request)", "def test_parse_request_type_1a(self):\n req_type, errors = self._exec_parse(test_source=MessageEventType.MAAS_REQUEST, session_secret=self.session_secret)\n self.assertEqual(req_type, MessageEventType.MAAS_REQUEST)", "def _parse_in_request(self, request):\n error = None\n self.logger.debug(\"Http method: %s\" % request.method)\n if request.method == 'GET':\n self._params = request.args.to_dict()\n self.logger.debug(\"Request params: %s\" % self._params)\n \n elif request.method == 'POST':\n self._params = request.form.to_dict()\n self.logger.debug(\"Request params: %s\" % self._params)", "def test_request(self, kasserver, kasapi):\n kasserver._request(self.REQUEST_TYPE, self.REQUEST_PARAMS)\n request = {\n \"KasUser\": USERNAME,\n \"KasAuthType\": \"plain\",\n \"KasAuthData\": PASSWORD,\n \"KasRequestType\": self.REQUEST_TYPE,\n \"KasRequestParams\": self.REQUEST_PARAMS,\n }\n kasapi.assert_called_once_with(json.dumps(request))", "def identify_request(request: RequestType):\n # Private encrypted JSON payload\n try:\n data = json.loads(decode_if_bytes(request.body))\n if \"encrypted_magic_envelope\" in data:\n return True\n except Exception:\n pass\n # Public XML payload\n try:\n xml = etree.fromstring(encode_if_text(request.body))\n if xml.tag == MAGIC_ENV_TAG:\n return True\n except Exception:\n pass\n return False", "def auth_init(self, request):\n request_class = self.server.message_factory.get_request_type(\n \"authorization_endpoint\"\n )\n logger.debug(\"Request: '%s'\" % sanitize(request))\n # Same serialization used for GET and POST\n\n try:\n areq = self.server.parse_authorization_request(query=request)\n except (MissingRequiredValue, MissingRequiredAttribute, AuthzError) as err:\n logger.debug(\"%s\" % err)\n areq = request_class()\n areq.lax = True\n if isinstance(request, dict):\n areq.from_dict(request)\n else:\n areq.deserialize(request, \"urlencoded\")\n try:\n redirect_uri = self.get_redirect_uri(areq)\n except (RedirectURIError, ParameterError, UnknownClient) as err:\n return error_response(\"invalid_request\", \"%s\" % err)\n try:\n _rtype = areq[\"response_type\"]\n except KeyError:\n _rtype = [\"code\"]\n try:\n _state = areq[\"state\"]\n except KeyError:\n _state = \"\"\n\n return redirect_authz_error(\n \"invalid_request\", redirect_uri, \"%s\" % err, _state, _rtype\n )\n except KeyError:\n areq = request_class().deserialize(request, \"urlencoded\")\n # verify the redirect_uri\n try:\n self.get_redirect_uri(areq)\n except (RedirectURIError, ParameterError) as err:\n return error_response(\"invalid_request\", \"%s\" % err)\n except Exception as err:\n message = traceback.format_exception(*sys.exc_info())\n logger.error(message)\n logger.debug(\"Bad request: %s (%s)\" % (err, err.__class__.__name__))\n error = ErrorResponse(error=\"invalid_request\", error_description=str(err))\n return BadRequest(error.to_json(), content=\"application/json\")\n\n if not areq:\n logger.debug(\"No AuthzRequest\")\n return error_response(\"invalid_request\", \"Can not parse AuthzRequest\")\n\n if isinstance(areq, Response):\n return areq\n\n areq = self.filter_request(areq)\n\n if self.events:\n self.events.store(\"Protocol request\", areq)\n\n try:\n _cinfo = self.cdb[areq[\"client_id\"]]\n except KeyError:\n logger.error(\n \"Client ID ({}) not in client database\".format(areq[\"client_id\"])\n )\n return error_response(\"unauthorized_client\", \"unknown client\")\n else:\n try:\n _registered = [set(rt.split(\" \")) for rt in _cinfo[\"response_types\"]]\n except KeyError:\n # If no response_type is registered by the client then we'll\n # code which it the default according to the OIDC spec.\n _registered = [{\"code\"}]\n\n _wanted = set(areq[\"response_type\"])\n if _wanted not in _registered:\n return error_response(\n \"invalid_request\", \"Trying to use unregistered response_typ\"\n )\n\n logger.debug(\"AuthzRequest: %s\" % (sanitize(areq.to_dict()),))\n try:\n redirect_uri = self.get_redirect_uri(areq)\n except (RedirectURIError, ParameterError, UnknownClient) as err:\n return error_response(\n \"invalid_request\", \"{}:{}\".format(err.__class__.__name__, err)\n )\n\n try:\n keyjar = self.keyjar\n except AttributeError:\n keyjar = \"\"\n\n try:\n # verify that the request message is correct\n areq.verify(keyjar=keyjar, opponent_id=areq[\"client_id\"])\n except (MissingRequiredAttribute, ValueError, MissingRequiredValue) as err:\n return redirect_authz_error(\"invalid_request\", redirect_uri, \"%s\" % err)\n\n return {\"areq\": areq, \"redirect_uri\": redirect_uri}", "def process_request(self, request, *args, **kwargs):\n if not settings.GSSAPI_ENABLED_OPTION:\n return None\n unauthorized = False\n if 'HTTP_AUTHORIZATION' in request.META:\n kind, initial_client_token = request.META['HTTP_AUTHORIZATION'].split(' ', 1)\n if kind != 'Negotiate':\n unauthorized = True\n else:\n unauthorized = True\n if unauthorized:\n response = HttpResponse(request, status=401)\n response['WWW-Authenticate'] = 'Negotiate'\n return response\n return None", "def check_auth():", "def authenticate(self, request):\n return None", "def __init__(__self__, *,\n auth_type: pulumi.Input[str],\n name: Optional[pulumi.Input[str]] = None,\n secret: Optional[pulumi.Input[str]] = None):\n pulumi.set(__self__, \"auth_type\", 'secret')\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if secret is not None:\n pulumi.set(__self__, \"secret\", secret)", "def get_auth_token(self, request: Request, type=\"Bearer\") -> str:\n if \"Authorization\" not in request.headers:\n raise AuthenticationRequiredException\n try:\n auth_type, auth_code = request.headers[\"Authorization\"].split(' ')\n assert auth_type == type\n except Exception:\n raise AuthenticationSchemeInvalidException\n return auth_code", "def before():\n app.logger.info(\"Local Timestamp: {}\".format(str(datetime.now())))\n app.logger.info(\"Request Method: {}\".format(request.method))\n app.logger.info(\"Request URL: {}\".format(request.url))\n app.logger.info(\"Request Access Route: {}\".format(request.access_route[0]))\n headers = \"\"\n for (key, value) in request.headers:\n # hide authorization header from logs\n if key == \"Authorization\":\n value = \"[provided]\" \n headers += \"{}: {}\\n\".format(key, value)\n app.logger.info(\"Request Headers:{}\\n{}\\n{}\".format(\"-\"*45,str(headers)[:-1], \"-\"*60))\n body = copy.deepcopy(request.json)\n if type(body) is dict and \"password\" in body:\n body['password'] = \"[provided]\"\n app.logger.info(\"Request Body: {}\".format(body))", "def _get_auth_info_for_id_or_from_request(\n sub_type=str, user=None, username=None, db_session=None\n):\n db_session = db_session or current_app.scoped_session()\n\n # set default \"anonymous\" user_id and username\n # this is fine b/c it might be public data or a client token that is not\n # linked to a user\n final_user_id = None\n if sub_type == str:\n final_user_id = sub_type(ANONYMOUS_USER_ID)\n final_username = ANONYMOUS_USERNAME\n\n token = \"\"\n try:\n if user:\n final_username = user.username\n final_user_id = sub_type(user.id)\n elif username:\n result = query_for_user(db_session, username)\n final_username = result.username\n final_user_id = sub_type(result.id)\n else:\n token = validate_request(scope={\"user\"}, audience=config.get(\"BASE_URL\"))\n set_current_token(token)\n final_user_id = current_token[\"sub\"]\n final_user_id = sub_type(final_user_id)\n final_username = current_token[\"context\"][\"user\"][\"name\"]\n except Exception as exc:\n logger.info(\n f\"could not determine user auth info from request. setting anonymous user information. Details:\\n{exc}\"\n )\n\n client_id = \"\"\n try:\n if not token:\n token = validate_request(scope=[], audience=config.get(\"BASE_URL\"))\n set_current_token(token)\n client_id = current_token.get(\"azp\") or \"\"\n except Exception as exc:\n logger.info(\n f\"could not determine client auth info from request. setting anonymous client information. Details:\\n{exc}\"\n )\n\n if (\n not config.get(\"CLIENT_CREDENTIALS_ON_DOWNLOAD_ENABLED\")\n and final_username == ANONYMOUS_USERNAME\n and client_id != \"\"\n ):\n raise Forbidden(\"This endpoint does not support client credentials tokens\")\n\n return {\n \"user_id\": final_user_id,\n \"username\": final_username,\n \"client_id\": client_id,\n }", "def test_valid_flow__registration(self):\n\n test_env = {\n \"testapp_authority\": self.testapp_authority,\n \"testapp_app\": self.testapp_app,\n \"extra_environ_app\": {\n \"wsgi.url_scheme\": \"https\",\n \"HTTP_HOST\": \"app.example.com\",\n },\n \"extra_environ_authority\": {\n \"wsgi.url_scheme\": \"https\",\n \"HTTP_HOST\": \"authority.example.com\",\n },\n \"requests_session_app\": requests.Session(),\n \"requests_session_authority\": requests.Session(),\n }\n\n def callback__request_token(req, test_env=test_env):\n \"\"\"/authority/oauth1/request_token is visited by the Server\n\n py3 needs the 'unicode' wrapper to decode the bystring\n \"\"\"\n assert \"Authorization\" in req.headers\n assert req.headers[\"Authorization\"].decode(\"utf-8\").startswith(\"OAuth \")\n assert \"User-Agent\" in req.headers\n assert req.headers[\"User-Agent\"].decode(\"utf-8\") == \"CustomApiClient v0\"\n assert req.url == oauth1_utils.CustomApiClient.OAUTH1_SERVER_REQUEST_TOKEN\n\n # request as SERVER, no cookies\n with IsolatedTestapp(test_env[\"testapp_authority\"]) as testapp:\n res = testapp.get(\n \"/authority/oauth1/request_token\",\n headers=req.headers,\n extra_environ=test_env[\"extra_environ_authority\"],\n status=200,\n )\n\n # status is '200 OK'\n # return in a format tailored for `requests`\n return (int(res.status.split(\" \")[0]), res.headers, res.body)\n\n def callback__authenticate_get(req, test_env=test_env):\n \"\"\"/authority/oauth1/authorize is visited by the USER\"\"\"\n assert req.url.startswith(OAUTH1__URL_AUTHORITY_AUTHENTICATE)\n qs = req.url.split(\"?\")[1]\n qs = dict(parse_qsl(qs))\n\n testapp = test_env[\"testapp_authority\"]\n res = testapp.get(\n \"/authority/oauth1/authorize?oauth_token=%s\" % qs[\"oauth_token\"],\n headers=req.headers,\n extra_environ=test_env[\"extra_environ_authority\"],\n status=200,\n )\n test_env[\"requests_session_authority\"].cookies.update(\n testapp.cookies\n ) # update the session with the cookies from the response\n\n # status is '200 OK'\n # return in a format tailored for `requests`\n return (int(res.status.split(\" \")[0]), res.headers, res.body)\n\n def callback__authenticate_post(req, test_env=test_env):\n \"\"\"/authority/oauth1/authorize is visited by the USER\"\"\"\n assert req.url.startswith(OAUTH1__URL_AUTHORITY_AUTHENTICATE)\n payload = dict(parse_qsl(req.body))\n\n testapp = test_env[\"testapp_authority\"]\n res = testapp.post(\n \"/authority/oauth1/authorize\",\n payload,\n headers=req.headers,\n extra_environ=test_env[\"extra_environ_authority\"],\n status=302,\n )\n test_env[\"requests_session_authority\"].cookies.update(\n testapp.cookies\n ) # update the session with the cookies from the response\n\n # status is '200 OK'\n # return in a format tailored for `requests`\n return (int(res.status.split(\" \")[0]), res.headers, res.body)\n\n def callback__callback(req, test_env=test_env):\n \"\"\"/application/flow-register/authorized-callback is visited by the USER\"\"\"\n _path, _qs = req.url.split(\"?\")\n\n testapp = test_env[\"testapp_app\"]\n res = testapp.get(\n \"/application/flow-register/authorized-callback?%s\" % _qs,\n headers=req.headers,\n extra_environ=test_env[\"extra_environ_app\"],\n status=303,\n )\n test_env[\"requests_session_app\"].cookies.update(\n testapp.cookies\n ) # update the session with the cookies from the response\n\n # status is '303 See Other'\n # return in a format tailored for `requests`\n return (int(res.status.split(\" \")[0]), res.headers, res.body)\n\n def callback__access_token(req, test_env=test_env):\n \"\"\"/authority/oauth1/access_token is visited by the Server\"\"\"\n assert \"Authorization\" in req.headers\n assert req.headers[\"Authorization\"].decode(\"utf-8\").startswith(\"OAuth \")\n assert \"User-Agent\" in req.headers\n assert req.headers[\"User-Agent\"].decode(\"utf-8\") == \"CustomApiClient v0\"\n assert req.url == oauth1_utils.CustomApiClient.OAUTH1_SERVER_ACCESS_TOKEN\n\n # request as SERVER, no cookies\n with IsolatedTestapp(test_env[\"testapp_authority\"]) as testapp:\n _headers = string_headers(\n req.headers\n ) # these can end up being unicode in tests\n res = testapp.get(\n \"/authority/oauth1/access_token\",\n headers=_headers,\n extra_environ=test_env[\"extra_environ_authority\"],\n status=200,\n )\n\n # status is '200 OK'\n # return in a format tailored for `requests`\n return (int(res.status.split(\" \")[0]), res.headers, res.body)\n\n def callback__callback_success(req, test_env=test_env):\n \"\"\"/application/flow-register/authorized-callback-success is visited by the USER\"\"\"\n (_path, _qs) = parse_request_simple(req)\n\n testapp = test_env[\"testapp_application\"]\n _headers = string_headers(\n req.headers\n ) # these can end up being unicode in tests\n res = testapp.get(\n \"/application/flow-register/authorized-callback-success?%s\" % _qs,\n headers=_headers,\n extra_environ=test_env[\"extra_environ_app\"],\n status=200,\n )\n test_env[\"requests_session_application\"].cookies.update(\n testapp.cookies\n ) # update the session with the cookies from the response\n\n # status is '200 OK'\n # return in a format tailored for `requests`\n return (int(res.status.split(\" \")[0]), res.headers, res.body)\n\n with responses.RequestsMock() as rsps:\n rsps.add_callback(\n responses.GET,\n oauth1_utils.CustomApiClient.OAUTH1_SERVER_REQUEST_TOKEN, # /authority/oauth1/request_token\n callback=callback__request_token,\n )\n rsps.add_callback(\n responses.GET,\n oauth1_utils.CustomApiClient.OAUTH1_SERVER_ACCESS_TOKEN, # /authority/oauth1/access_token\n callback=callback__access_token,\n )\n\n # the following were originally handled via `requests.get` but migrated to direct webtest queries\n #\n # rsps.add_callback(\n # responses.GET, OAUTH1__URL_AUTHORITY_AUTHENTICATE, # /authority/oauth1/authorize\n # callback=callback__authenticate_get,\n # )\n # rsps.add_callback(\n # responses.POST, OAUTH1__URL_AUTHORITY_AUTHENTICATE, # /authority/oauth1/authorize\n # callback=callback__authenticate_post,\n # )\n # rsps.add_callback(\n # responses.GET, oauth1_model.OAUTH1__URL_APP_FLOW_REGISTER_CALLBACK, # https://example.com/application/flow-register/authorized-callback\n # callback=callback__callback,\n # )\n # rsps.add_callback(\n # responses.GET, oauth1_model.OAUTH1__URL_APP_FLOW_REGISTER_CALLBACK_SUCCESS, # https://example.com/application/flow-register/authorized-callback-success\n # callback=callback__callback_success,\n # )\n\n #\n # actual test flow...\n #\n\n # first we need to log into the oAuth1 Authority\n # the authority is the account which will be the oAuth identity provider (e.g. Twitter)\n\n # User visit\n res = self.testapp_authority.get(\n \"/authority/account/login-form\",\n extra_environ=test_env[\"extra_environ_authority\"],\n status=200,\n )\n assert res.text == \"authority|login-form\"\n test_env[\"requests_session_authority\"].cookies.update(\n self.testapp_authority.cookies\n ) # update the session with the cookies from the response\n\n # User visit\n res = self.testapp_authority.get(\n \"/authority/account/login-submit\",\n extra_environ=test_env[\"extra_environ_authority\"],\n status=303,\n )\n test_env[\"requests_session_authority\"].cookies.update(\n self.testapp_authority.cookies\n ) # update the session with the cookies from the response\n assert (\n res.text\n == \"\"\"303 See Other\\n\\nThe resource has been moved to /authority/account/home; you should be redirected automatically.\\n\\n\"\"\"\n )\n\n # User visit\n res = self.testapp_authority.get(\n \"/authority/account/home\",\n extra_environ=test_env[\"extra_environ_authority\"],\n status=200,\n )\n test_env[\"requests_session_authority\"].cookies.update(\n self.testapp_authority.cookies\n ) # update the session with the cookies from the response\n assert (\n res.text\n == \"authority|home|user=%s\" % oauth1_model.USERID_ACTIVE__AUTHORITY\n )\n\n #\n # now we want to visit the application\n #\n\n # User visit's the application\n #\n res = self.testapp_app.get(\n \"/application/flow-register\",\n extra_environ=test_env[\"extra_environ_app\"],\n status=303,\n )\n test_env[\"requests_session_app\"].cookies.update(\n self.testapp_app.cookies\n ) # update the session with the cookies from the response\n assert (\n res.text\n == \"\"\"303 See Other\\n\\nThe resource has been moved to /application/flow-register/oauth1/start; you should be redirected automatically.\\n\\n\"\"\"\n )\n\n # User visit\n # however, it makes a behind the scenes visit to\n # * /authority/oauth1/request_token\n res = self.testapp_app.get(\n \"/application/flow-register/oauth1/start\",\n extra_environ=test_env[\"extra_environ_app\"],\n status=303,\n )\n test_env[\"requests_session_app\"].cookies.update(\n self.testapp_app.cookies\n ) # update the session with the cookies from the response\n assert \"Location\" in res.headers\n url_auth = res.headers[\"Location\"]\n assert res.headers[\"Location\"].startswith(\n OAUTH1__URL_AUTHORITY_AUTHENTICATE\n )\n\n # resAuthInbound = test_env['requests_session_authority'].get(url_auth)\n # then the user is redirected to the authority to approve\n qs = url_auth.split(\"?\")[1]\n url_auth_local = \"/authority/oauth1/authorize?%s\" % qs\n resAuthInbound = self.testapp_authority.get(\n url_auth_local, extra_environ=test_env[\"extra_environ_authority\"]\n )\n assert (\n '<form action=\"/authority/oauth1/authorize\" method=\"POST\" id=\"app-action-authorize\">'\n in resAuthInbound.text\n )\n csrfs = re_csrf.findall(resAuthInbound.text)\n assert len(csrfs) == 2 # submit, deny\n tokens = re_token.findall(resAuthInbound.text)\n assert len(tokens) == 2 # submit, deny\n\n payload = {\n \"csrf_\": csrfs[0],\n \"oauth_token\": tokens[0],\n \"submit\": \"authorize\",\n }\n # payload = {'csrf_': csrfs[0], 'oauth_token': tokens[0], 'submit': 'authorize', }\n\n # visited by USER: Authorize the application on the Authority\n resAuthApprove = self.testapp_authority.post(\n \"/authority/oauth1/authorize\",\n payload,\n extra_environ=test_env[\"extra_environ_authority\"],\n status=302,\n )\n test_env[\"requests_session_authority\"].cookies.update(\n self.testapp_authority.cookies\n ) # update the session with the cookies from the response\n\n # visited by USER: redirected to the callback page on the APPLICATION\n assert \"Location\" in resAuthApprove.headers\n url_callback = resAuthApprove.headers[\"Location\"]\n assert url_callback.startswith(OAUTH1__URL_APP_FLOW_REGISTER_CALLBACK)\n qs = url_callback.split(\"?\")[1]\n url_callback_local = (\n \"/application/flow-register/authorized-callback?%s\" % qs\n )\n resAuthCallback = self.testapp_app.get(\n url_callback_local,\n extra_environ=test_env[\"extra_environ_app\"],\n status=303,\n )\n\n # visited by USER: redirected to the callback-success page on the APPLICATION\n assert \"Location\" in resAuthCallback.headers\n url_callback_success = resAuthCallback.headers[\"Location\"]\n assert url_callback_success.startswith(\n OAUTH1__URL_APP_FLOW_REGISTER_CALLBACK_SUCCESS\n )\n assert len(url_callback_success.split(\"?\")) == 1\n url_callback_success_local = (\n \"/application/flow-register/authorized-callback-success\"\n )\n resAuthCallbackSuccess = self.testapp_app.get(\n url_callback_success_local,\n extra_environ=test_env[\"extra_environ_app\"],\n status=200,\n )\n assert (\n resAuthCallbackSuccess.text\n == \"application|register|authorized-callback-success|user=%s\"\n % oauth1_model.USERID_ACTIVE__APPLICATION\n )\n\n # ensure logout, just to be safe\n res = self.testapp_authority.get(\n \"/authority/account/logout\",\n extra_environ=test_env[\"extra_environ_authority\"],\n status=303,\n )\n assert (\n res.text\n == \"\"\"303 See Other\\n\\nThe resource has been moved to /authority/account/login-form; you should be redirected automatically.\\n\\n\"\"\"\n )\n\n res = self.testapp_authority.get(\n \"/authority/account/home\",\n extra_environ=test_env[\"extra_environ_authority\"],\n status=303,\n )\n assert (\n res.text\n == \"\"\"303 See Other\\n\\nThe resource has been moved to /authority/account/login-form; you should be redirected automatically.\\n\\n\"\"\"\n )", "def _get_unvalidated_auth_params(self, request):\n try:\n params = parse_authz_header(request)\n except ValueError:\n params = None\n if params is None:\n return None\n if params[\"scheme\"].lower() != \"srp-hmac\":\n return None\n return params", "def test_lti20_request_handler_bad_user(self):\r\n self.setup_system_xmodule_mocks_for_lti20_request_test()\r\n self.system.get_real_user = Mock(return_value=None)\r\n mock_request = self.get_signed_lti20_mock_request(self.GOOD_JSON_PUT)\r\n response = self.xmodule.lti_2_0_result_rest_handler(mock_request, \"user/abcd\")\r\n self.assertEqual(response.status_code, 404)", "def parse_authz_header(request, *default):\n # This outer try-except catches ValueError and\n # turns it into return-default if necessary.\n try:\n # Grab the auth header from the request, if any.\n authz = request.environ.get(\"HTTP_AUTHORIZATION\")\n if authz is None:\n raise ValueError(\"Missing auth parameters\")\n scheme, kvpairs_str = authz.split(None, 1)\n # Split the parameters string into individual key=value pairs.\n # In the simple case we can just split by commas to get each pair.\n # Unfortunately this will break if one of the values contains a comma.\n # So if we find a component that isn't a well-formed key=value pair,\n # then we stitch bits back onto the end of it until it is.\n kvpairs = []\n if kvpairs_str:\n for kvpair in kvpairs_str.split(\",\"):\n if not kvpairs or _AUTH_PARAM_RE.match(kvpairs[-1]):\n kvpairs.append(kvpair)\n else:\n kvpairs[-1] = kvpairs[-1] + \",\" + kvpair\n if not _AUTH_PARAM_RE.match(kvpairs[-1]):\n raise ValueError('Malformed auth parameters')\n # Now we can just split by the equal-sign to get each key and value.\n params = {\"scheme\": scheme}\n for kvpair in kvpairs:\n (key, value) = kvpair.strip().split(\"=\", 1)\n # For quoted strings, remove quotes and backslash-escapes.\n if value.startswith('\"'):\n value = value[1:-1]\n if _UNESC_QUOTE_RE.search(value):\n raise ValueError(\"Unescaped quote in quoted-string\")\n value = _ESCAPED_CHAR.sub(lambda m: m.group(0)[1], value)\n params[key] = value\n return params\n except ValueError:\n if default:\n return default[0]\n raise", "def test_user_information_request(self):\n pass" ]
[ "0.68752295", "0.6823082", "0.6565441", "0.6135257", "0.61087584", "0.60270864", "0.5897579", "0.5829118", "0.5743995", "0.5722806", "0.55755186", "0.5556302", "0.55216575", "0.55189157", "0.5506603", "0.5503659", "0.54570746", "0.5452072", "0.5439107", "0.54367673", "0.54231733", "0.542021", "0.5420005", "0.5384559", "0.5377673", "0.5377472", "0.5360954", "0.53579986", "0.53506577", "0.53331804" ]
0.70758444
0
Returns the moves for the pawn at the given location
def get_pawn_moves(self, state): pawn_moves = [] if self.color == cc.WHITE_ACTIVE: forward_1 = add_vectors(self.coord, cc.V_UP) forward_2 = add_vectors(self.coord, cc.V_UP_2) attacks = get_crawler_moves(self.coord, cc.W_PAWN_CAPTURE_VECTORS) starting_rank = cc.RANK_2 promo_rank = cc.RANK_8 promo_pieces = cc.WHITE_PROMO enemy_set = cc.BLACK_PIECES elif self.color == cc.BLACK_ACTIVE: forward_1 = add_vectors(self.coord, cc.V_DOWN) forward_2 = add_vectors(self.coord, cc.V_DOWN_2) attacks = get_crawler_moves(self.coord, cc.B_PAWN_CAPTURE_VECTORS) starting_rank = cc.RANK_7 promo_rank = cc.RANK_1 promo_pieces = cc.BLACK_PROMO enemy_set = cc.WHITE_PIECES else: raise Exception("get_pawn_moves: Invalid Piece Color") if validate_move(forward_1) and state.board[forward_1] == cc.NO_PIECE: if forward_1[0] == promo_rank: for p in promo_pieces: pawn_moves.append(cc.Action(self.string, self.coord, forward_1, promo=p)) else: pawn_moves.append(cc.Action(self.string, self.coord, forward_1)) if self.coord[0] == starting_rank and validate_move(forward_2) and state.board[forward_2] == cc.NO_PIECE: pawn_moves.append(cc.Action(self.string, self.coord, forward_2, en_p=forward_1)) for attack in attacks: if state.board[attack] in enemy_set: if attack[0] == promo_rank: for p in promo_pieces: pawn_moves.append(cc.Action(self.string, self.coord, attack, capture=True, promo=p)) else: pawn_moves.append(cc.Action(self.string, self.coord, attack, capture=True)) # Make sure Pawns can attack en_passant squares elif attack == state.en_passant: pawn_moves.append(cc.Action(self.string, self.coord, attack, capture=True)) return pawn_moves
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_piece_moves(self, x, y):\n\n piece = self.get_piece(x, y)\n moves = []\n\n if not piece:\n return moves\n\n if piece.name == 'rook' or piece.name == 'queen':\n direcs = ['up', 'down', 'left', 'right']\n moves = [self._get_moves_indirection(x, y, direc) for direc in\n direcs]\n\n elif piece.name == 'bishop' or piece.name == 'queen':\n direcs = ['d1', 'd2', 'd3', 'd4']\n for direc in direcs:\n moves += self._get_moves_indirection(x, y, direc)\n\n elif piece.name == 'king':\n moves = [(x-1, y-1), (x-1, y), (x-1, y+1), (x, y-1),\n (x, y+1), (x+1, y-1), (x+1, y), (x+1, y+1)]\n\n elif piece.name == 'knight':\n moves = [(x-1, y-2), (x-2, y-1), (x-2, y+1), (x-1, y+2),\n (x+1, y+2), (x+2, y+1), (x+1, y-2), (x+2, y-1)]\n\n elif piece.name == 'pawn':\n if piece.color == ChessGame.BLACK:\n moves = [(x-1, y), (x-1, y-1), (x-1, y+1)]\n else:\n moves = [(x+1, y), (x+1, y-1), (x+1, y+1)]\n\n tmp = list(moves)\n for u, v in tmp:\n if v != y and not self.is_enemy(u, v, piece.color):\n moves.remove((u, v))\n\n if v == y and self.is_enemy(u, v, piece.color):\n moves.remove((u, v))\n\n mycolor = piece.color\n valid = set()\n for (u, v) in moves:\n if not self.in_bounds(u, v):\n continue\n\n if not self.get_piece(u, v): # board is blank\n valid.add((u, v))\n\n if self.is_enemy(u, v, mycolor):\n valid.add((u, v))\n\n return valid", "def get_legal_moves(self):\n moves = []\n if self.player_locations[self.whose_turn] is None:\n return self.get_blank_locations()\n matrix = [(1,0), (-1,0), (0,1), (0,-1), (1,1), (1,-1), (-1, 1), (-1,-1)]\n\n for dx, dy in matrix:\n x,y = self.player_locations[self.whose_turn]\n while x+dx <= xdim and x+dx >= 0 and y+dy <= ydim and y+dy >= 0:\n x = x+dx\n y = y+dx\n if self.board[x][y] : break\n moves.append((x,y))\n return moves", "def command_moves(board, locations):\n possible_moves = []\n buffers = [(1,0), (0,1), (-1,0), (0,-1)]\n\n for piece in locations:\n piece_moves = []\n\n for move in buffers:\n\n poss_move = return_valid_move(board, locations, piece, move)\n\n if poss_move:\n piece_moves.append(poss_move)\n\n possible_moves.append(piece_moves)\n\n return possible_moves", "def get_moves(self):", "def get_moves(self):\n return self.piece_behavior.get_moves(self.board, self.position)", "def get_pawn_moves(self, i, j, moves):\r\n # white pawn\r\n if self.turn_white:\r\n # one&two squares moves\r\n if self.board[i - 1][j] == \"--\":\r\n moves.append(Move((i, j), (i - 1, j), self.board))\r\n if i == 6 and self.board[i - 2][j] == \"--\":\r\n moves.append(Move((i, j), (i - 2, j), self.board))\r\n # capturing\r\n # left\r\n if j >= 1:\r\n if self.board[i - 1][j - 1][0] == \"b\":\r\n moves.append(Move((i, j), (i - 1, j - 1), self.board))\r\n elif (i - 1, j - 1) == self.enpas_pos:\r\n moves.append(Move((i, j), (i - 1, j - 1), self.board, enpassant=True))\r\n\r\n # right\r\n if j <= 6:\r\n if self.board[i - 1][j + 1][0] == \"b\":\r\n moves.append(Move((i, j), (i - 1, j + 1), self.board))\r\n elif (i - 1, j + 1) == self.enpas_pos:\r\n moves.append(Move((i, j), (i - 1, j + 1), self.board, enpassant=True))\r\n # black\r\n else:\r\n # one&two squares moves\r\n if self.board[i + 1][j] == \"--\":\r\n moves.append(Move((i, j), (i + 1, j), self.board))\r\n if i == 1 and self.board[i + 2][j] == \"--\":\r\n moves.append(Move((i, j), (i + 2, j), self.board))\r\n # capturing\r\n # left\r\n if j >= 1:\r\n if self.board[i + 1][j - 1][0] == \"w\":\r\n moves.append(Move((i, j), (i + 1, j - 1), self.board))\r\n elif (i + 1, j - 1) == self.enpas_pos:\r\n moves.append(Move((i, j), (i + 1, j - 1), self.board, enpassant=True))\r\n # right\r\n if j <= 6:\r\n if self.board[i + 1][j + 1][0] == \"w\":\r\n moves.append(Move((i, j), (i + 1, j + 1), self.board))\r\n elif (i + 1, j + 1) == self.enpas_pos:\r\n moves.append(Move((i, j), (i + 1, j + 1), self.board, enpassant=True))", "def get_moves(character):\n if character == 'player':\n x_coord = get_locations()['player'][0]\n y_coord = get_locations()['player'][1]\n elif character == 'monster':\n x_coord = get_locations()['monster'][0]\n y_coord = get_locations()['monster'][1]\n if x_coord == 1 and y_coord == 1:\n return ['S', 'D']\n elif x_coord == STATUS['grid_size'] and y_coord == STATUS['grid_size']:\n return ['W', 'A']\n elif x_coord == 1 and y_coord == STATUS['grid_size']:\n return ['W', 'D']\n elif x_coord == STATUS['grid_size'] and y_coord == 1:\n return ['S', 'A']\n elif x_coord == 1:\n return ['W', 'D', 'S']\n elif y_coord == 1:\n return ['D', 'S', 'A']\n elif x_coord == STATUS['grid_size']:\n return ['W', 'S', 'A']\n elif y_coord == STATUS['grid_size']:\n return ['W', 'A', 'D']\n else:\n return ['W', 'D', 'S', 'A']", "def openMoves(self):\n arr = []\n for y in range(0,HEIGHT):\n for x in range(0,WIDTH):\n t = self.getPawn(x,y)\n if(t!=None):\n for z in range(-1,2):\n if(self.legalMove(t,z)):\n #move , #newState\n arr.append((t,z))\n return arr", "def moves(self):\n\n moves = list()\n\n for row in range(HEIGHT):\n for col in range(WIDTH):\n\n move = (row, col)\n\n if self.board[row][col] == 9:\n moves.append(move)\n\n if self.board[row][col] == 1 or self.board[row][col] == 2:\n\n move = (row - 1, col)\n\n if self.board[row - 1][col] == 1 or self.board[row - 1][col] == 2:\n\n pass\n\n else:\n\n moves.append(move)\n\n return moves", "def findPlacesToMove():\n movesDestinations = [];\n \n curY = curBlank[0];\n curX = curBlank[1];\n\n if(curY-1 >= 1): #UP\n movesDestinations.append((curY-1, curX));\n if(curY+1 <= n): #DOWN\n movesDestinations.append((curY+1, curX));\n if(curX-1 >= 1): #LEFT\n movesDestinations.append((curY, curX-1));\n if(curX+1 <= n): #RIGHT\n movesDestinations.append((curY, curX+1));\n \n return movesDestinations;", "def get_goat_possible_moves(self) -> List:\n moves = []\n for pos in self.get_all_positions():\n if pos.is_goat():\n addr_from = pos.address\n for addr_to in pos.piece.get_valid_moves():\n moves.append((addr_from, addr_to))\n\n return moves", "def moves(self, teleport=False):\n recv = [(self._size[y][x] - self._used[y][x], x, y)\n for x in range(self.xsize) for y in range(self.ysize)]\n recv.sort(reverse=True)\n send = [(self._used[y][x], x, y)\n for x in range(self.xsize) for y in range(self.ysize)\n if self._used[y][x] > 0]\n send.sort()\n # print(\"recv: {}...\".format(str(recv[:5])))\n # print(\"send: {}...\".format(str(send[:5])))\n moves = []\n for avail, x1, y1 in recv:\n for used, x0, y0 in send:\n if avail < used:\n break\n if teleport or (x0 == x1 and abs(y0 - y1) == 1) or (\n y0 == y1 and abs(x0 - x1) == 1):\n self.apply((x0, y0), (x1, y1))\n moves.append((self.score(), self.key(), self.save(), list(self.history)))\n self.undo()\n return moves", "def get_valid_moves(self):\n if self.king:\n valid_moves = [[self.row + 1, self.col + 1],\n [self.row + 1, self.col - 1],\n [self.row - 1, self.col - 1],\n [self.row - 1, self.col + 1]]\n else:\n if self.player == 1:\n valid_moves = [[self.row + 1, self.col + 1],\n [self.row + 1, self.col - 1]]\n else:\n valid_moves = [[self.row - 1, self.col - 1],\n [self.row - 1, self.col + 1]]\n return valid_moves", "def get_moves(self):\n moves = []\n i, j = self._get_coordinates(0) # blank space\n\n if i > 0:\n moves.append(Puzzle(self._swap(i, j, i - 1, j))) # move up\n\n if j < self.PUZZLE_NUM_COLUMNS - 1:\n moves.append(Puzzle(self._swap(i, j, i, j + 1))) # move right\n\n if j > 0:\n moves.append(Puzzle(self._swap(i, j, i, j - 1))) # move left\n\n if i < self.PUZZLE_NUM_ROWS - 1:\n moves.append(Puzzle(self._swap(i, j, i + 1, j))) # move down\n\n return moves", "def get_all_moves(self, board, player):\n result = []\n for startx in range(8):\n for starty in range(8):\n for destx in range(8):\n for desty in range(8):\n if self.is_legal_move(board, [startx, starty], [destx, desty], player):\n result.append([[startx, starty], [destx, desty]])\n return result", "def safe_moves(p, state):\n\n x, y = state['players'][p]['x'], state['players'][p]['y']\n\n moves = []\n actions = [(1, 0, 'east'),\n (-1, 0, 'west'),\n (0, -1, 'north'),\n (0, 1, 'south')]\n for dx, dy, move in actions:\n tx, ty = str(x + dx), str(y + dy)\n if tx not in state['cells'] or ty not in state['cells'][tx]:\n moves.append(move)\n\n return moves", "def moves(self, board_state):\n # pos_moves = generate_moves(board_state) # Naive moves function here\n blacks = board_state.search_board('B')\n # Generate the possible moves required to kill the first black piece\n # on the board\n pos_moves = sorted_generate_moves_piece(board_state, blacks[0])\n return pos_moves", "def get_available_moves(self):\n available = []\n row, col = tuple(self.current_pos)\n if row - 1 >= 0 and self.maze[row - 1][col] != 'x':\n available.append('n')\n if row + 1 < len(self.maze) and self.maze[row + 1][col] != 'x':\n available.append('s')\n if col - 1 >= 0 and self.maze[row][col - 1] != 'x':\n available.append('w')\n if col + 1 < len(self.maze[row]) and self.maze[row][col + 1] != 'x':\n available.append('e')\n return available", "def _get_valid_moves(self, piece):\n moves = {}\n left = piece.col - 1 # Left position\n right = piece.col + 1 # Right position\n row = piece.row # Current row\n\n if piece.get_player() == Player.white or piece.is_king():\n # Checks the movements from the bottom to the top\n moves.update(self._traverse_left(row - 1, max(row - 3, -1), -1, piece.get_player(), left))\n moves.update(self._traverse_right(row - 1, max(row - 3, -1), -1, piece.get_player(), right))\n\n if piece.get_player() == Player.black or piece.is_king():\n # Checks the movements from the top to the bottom\n moves.update(self._traverse_left(row + 1, min(row + 3, constant.BOARD_DIMENSION),\n 1, piece.get_player(), left))\n moves.update(self._traverse_right(row + 1, min(row + 3, constant.BOARD_DIMENSION),\n 1, piece.get_player(), right))\n\n return moves", "def getPossibleMoves(self): # called to get possible positions this piece can go\r\n \r\n moves = {}\r\n\r\n ids = []\r\n\r\n for piece in self.board.pieces.values():\r\n if piece.name == \"empty\":\r\n piece.glow = False\r\n piece.ready = False\r\n\r\n self.piece = self\r\n\r\n def check(direction=\"left\", heading=\"north\", x=None, y=None):\r\n piece = self.piece\r\n if direction == \"left\": x -= 50\r\n else: x += 50\r\n\r\n if heading == \"north\": y -= 50\r\n else: y += 50\r\n\r\n if (x, y) in self.board.pieces: # position is empty\r\n empty = self.board.getPiece((x, y))\r\n empty.glow = True\r\n old, new, obj = (direction, heading), (x, y), piece\r\n identity = self.getRandomID(ids) # get an ID for the move\r\n moves[identity] = old, new, obj\r\n\r\n if piece.isKing: # piece is a king, so go on\r\n check(direction, heading, x, y)\r\n else: # its not empty, so check if its comrade\r\n x1, y1 = x+25, y+25\r\n piece2 = self.board.getPiece((x1, y1))\r\n try:\r\n if piece.isComrade(piece2):# piece is comrade so return\r\n return\r\n else: # piece is not comrade, so check empty\r\n if direction == \"left\": x2 = x1-25-50\r\n else: x2 = x1-25+50\r\n\r\n if heading == \"north\": y2 = y1-25-50\r\n else: y2 = y1-25+50\r\n\r\n if (x2, y2) in self.board.pieces: # its empty, so notify player\r\n empty = self.board.getPiece((x2, y2))\r\n empty.glow = True\r\n empty.ready = True\r\n\r\n old, new, obj = (direction, heading), (x2, y2), piece2\r\n identity = self.getRandomID(ids)\r\n moves[identity] = old, new, obj\r\n\r\n check(direction, heading, piece2.x-25, piece2.y-25)\r\n check(direction, heading, x2, y2)\r\n \r\n # check empty or comrade again\r\n if direction == \"left\": x3 = x2-50\r\n else: x3 = x2+50\r\n\r\n if heading == \"north\": y3 = y2-50\r\n else: y3 = y2+50\r\n\r\n if (x3, y3) in self.board.pieces: # positon(address) is empty\r\n return\r\n else: # there is a piece, so check if comrade, stop, if not comrade continue\r\n x3+=25\r\n y3+= 25\r\n\r\n piece3 = self.board.getPiece((x3, y3))\r\n if piece3.isComrade(piece2): # comrades, so stop\r\n return\r\n else: # not comrades, so continue\r\n self.piece = piece3\r\n check(direction, heading, x, y)\r\n\r\n #self.piece = piece2\r\n \r\n #check(direction, heading, x2, y2) # keep searching\r\n else: # its not empty, so return\r\n return\r\n except:\r\n pass\r\n\r\n if self.piece.name == \"white\": direction = \"north\"\r\n else: direction = \"south\"\r\n \r\n check(\"left\", direction, self.piece.x-25, self.piece.y-25)\r\n check(\"right\", direction, self.piece.x-25, self.piece.y-25)\r\n \r\n if self.piece.isKing:\r\n if self.piece.name == \"white\": heading = \"south\"\r\n else: heading = \"north\"\r\n \r\n check(\"left\", heading, self.piece.x-25, self.piece.y-25)\r\n check(\"right\", heading, self.piece.x-25, self.piece.y-25)\r\n\r\n if self.piece.name == \"white\":\r\n eatMoves = self.board.game.thinkEatMoves(moves, \"person\")\r\n if eatMoves is not None:\r\n return eatMoves\r\n\r\n return moves", "def get_moves(self, board, position):\n current_piece = board.grid[position[1]][position[0]]\n\n moves = []\n\n # For each direction in which the piece can move...\n for direction in self.directions:\n # for each vector in that direction...\n # (once a piece is encountered in a direction,\n # further positions in that direction are unaccessible,\n # therefore break out of inner FOR loop)\n for vector in direction:\n new_position = (position[0] + vector[0], position[1] + vector[1])\n\n # Check if the proposed destination is inbounds\n if board._inbounds(new_position) is False:\n break\n\n other_piece = board.grid[new_position[1]][new_position[0]]\n\n # Check if the proposed destination is occupied by a friendly piece\n if other_piece != \"empty\" and other_piece.player == current_piece.player:\n break\n\n # Check other validity conditions, mainly for pawn\n if self._is_valid_move(vector, current_piece, other_piece) is False:\n break\n\n # The destination is viable, add the move\n moves.append(Move(position, current_piece, new_position, other_piece))\n\n # If there was an enemy piece on the square\n if other_piece != \"empty\":\n break\n\n return moves", "def get_move(moves):\n pass", "def get_moves(self):\n grid = self.model.grid\n # List of agents we can't overlap with\n no_overlap = [\"wall\", \"human\", \"zombie\"]\n\n if self.agent_type == \"zombie\" or \\\n (\"AvoidingZombie\" not in self.states and os.environ[\"mode\"] == \"5\"):\n no_overlap.append(\"road\")\n\n # Always give the option to stay on your current location(stand still)\n all_cells = self.neighbors()\n free_cells = [self.pos]\n\n # Get rid of cells that we may not move to by iterating through all\n # cells next to the agent, and only adding non-occupied cells\n for cell in all_cells:\n cell_occupied = False\n x, y = cell.pos\n # If there are agents in the current cell, and we are not allowed\n # to overlap with any of those agents, the cell is occupied.\n # Only add cells which are not occupied.\n if not grid.is_cell_empty((x, y)):\n for agent in grid[x][y]:\n if agent.agent_type in no_overlap:\n cell_occupied = True\n break\n if not cell_occupied:\n free_cells.append((x, y))\n return free_cells", "def get_moves(self, board, player):\r\n width, height = self.board_size\r\n return self.get_moves_c(board, player, width, height)", "def api_get_moves(self):\n return self.board.moves", "def get_move_pieces(self, player):\n self.mark_moves(player)\n moves = [piece for piece in self.pieces if piece.get_state() == MOVE]\n self.clear_moves()\n return moves", "def generate_possible_moves(self):\r\n\t\t# Moves:\r\n\t\t# 0 - North\r\n\t\t# 1 - East\r\n\t\t# 2 - South\r\n\t\t# 3 - West\r\n\r\n\t\tmoves = []\r\n\r\n\t\tif self.x != 0:\r\n\t\t\tmoves.append(0)\r\n\t\tif self.y != self.n-1:\r\n\t\t\tmoves.append(1)\r\n\t\tif self.x != self.n-1:\r\n\t\t\tmoves.append(2)\r\n\t\tif self.y != 0:\r\n\t\t\tmoves.append(3)\r\n\r\n\t\treturn moves", "def get_square_moves(self, moves):\r\n return self.board.get_square_moves(moves)", "def possibleMoves(self,i,j):\n piece = self.board[i][j].piece\n if(piece.pieceCode == \"None\"):\n return []\n \n if(piece.name == \"pawn\"):\n return self.pawnMoves(piece,self.board)\n elif(piece.name == \"king\"):\n return self.kingSteps(self.board,piece.color)\n else:\n return self.pieceMoves(piece,self.board)", "def available_moves(self):\n moves = []\n for x, y in self.available_boards:\n moves.extend([self.to_position(x, y, i, j) for (i, j)\n in self.boards[x][y].empty_squares])\n return moves" ]
[ "0.7060851", "0.69786894", "0.6970851", "0.691838", "0.6903573", "0.68918544", "0.6812156", "0.6720306", "0.6709956", "0.6701223", "0.6660928", "0.66348755", "0.65792215", "0.6563159", "0.65361404", "0.6500815", "0.64763135", "0.6462871", "0.64623004", "0.6432862", "0.64326966", "0.642712", "0.6417274", "0.6406815", "0.63908744", "0.63715357", "0.63698894", "0.6306416", "0.6285597", "0.6238693" ]
0.7350092
0
Returns the moves for the knight at the given location
def get_knight_moves(self, state): knight_moves = [] if self.color == cc.WHITE_ACTIVE: enemy_set = cc.BLACK_PIECES elif self.color == cc.BLACK_ACTIVE: enemy_set = cc.WHITE_PIECES else: raise Exception("get_knight_moves: Invalid Knight Color") possible_moves = get_crawler_moves(self.coord, cc.KNIGHT_VECTORS) for move in possible_moves: if state.board[move] == cc.NO_PIECE: knight_moves.append(cc.Action(self.string, self.coord, move)) elif state.board[move] in enemy_set: knight_moves.append(cc.Action(self.string, self.coord, move, capture=True)) return knight_moves
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_legal_moves(self):\n moves = []\n if self.player_locations[self.whose_turn] is None:\n return self.get_blank_locations()\n matrix = [(1,0), (-1,0), (0,1), (0,-1), (1,1), (1,-1), (-1, 1), (-1,-1)]\n\n for dx, dy in matrix:\n x,y = self.player_locations[self.whose_turn]\n while x+dx <= xdim and x+dx >= 0 and y+dy <= ydim and y+dy >= 0:\n x = x+dx\n y = y+dx\n if self.board[x][y] : break\n moves.append((x,y))\n return moves", "def get_king_moves(self, i, j, moves):\r\n directions = ((1, 1), (1, -1), (1, 0), (0, -1), (0, 1), (-1, -1), (-1, 1), (-1, 0))\r\n if self.turn_white:\r\n oponent = 'b'\r\n else:\r\n oponent = 'w'\r\n for d in directions:\r\n cri = i + d[0]\r\n crj = j + d[1]\r\n if 0 <= cri <= 7 and 0 <= crj <= 7:\r\n # empty\r\n if self.board[cri][crj][0] == '-':\r\n moves.append(Move((i, j), (cri, crj), self.board))\r\n\r\n # oponent\r\n elif self.board[cri][crj][0] == oponent:\r\n moves.append(Move((i, j), (cri, crj), self.board))", "def moves(self):\n\n moves = list()\n\n for row in range(HEIGHT):\n for col in range(WIDTH):\n\n move = (row, col)\n\n if self.board[row][col] == 9:\n moves.append(move)\n\n if self.board[row][col] == 1 or self.board[row][col] == 2:\n\n move = (row - 1, col)\n\n if self.board[row - 1][col] == 1 or self.board[row - 1][col] == 2:\n\n pass\n\n else:\n\n moves.append(move)\n\n return moves", "def get_moves(character):\n if character == 'player':\n x_coord = get_locations()['player'][0]\n y_coord = get_locations()['player'][1]\n elif character == 'monster':\n x_coord = get_locations()['monster'][0]\n y_coord = get_locations()['monster'][1]\n if x_coord == 1 and y_coord == 1:\n return ['S', 'D']\n elif x_coord == STATUS['grid_size'] and y_coord == STATUS['grid_size']:\n return ['W', 'A']\n elif x_coord == 1 and y_coord == STATUS['grid_size']:\n return ['W', 'D']\n elif x_coord == STATUS['grid_size'] and y_coord == 1:\n return ['S', 'A']\n elif x_coord == 1:\n return ['W', 'D', 'S']\n elif y_coord == 1:\n return ['D', 'S', 'A']\n elif x_coord == STATUS['grid_size']:\n return ['W', 'S', 'A']\n elif y_coord == STATUS['grid_size']:\n return ['W', 'A', 'D']\n else:\n return ['W', 'D', 'S', 'A']", "def knightMovesForNTimes(n, x0, y0):\n x_init = x0\n y_init = y0\n # last position of the knight.\n # This is needed to tackle situations when the knight moves out of the chessboard.\n x_last = x0\n y_last = y0\n for i in range(n):\n # last position of the knight.\n # This is needed to tackle situations when the knight moves out of the chessboard.\n #x_last = x0\n #y_last = y0\n # randomly select any of the possible 8 moves of the knight\n move_dir = randint(1, 8)\n x, y = getMovePosition(move_dir, x_last, y_last)\n # update the last position of the knight\n x_last = x\n y_last = y\n #if x != -100 or y != -100:\n # when the knight is inside the chessboard\n # x_last = x\n # y_last = y\n # if the knight moves out of the chessboard, then the move is invalid\n # make a new move valid move\n #if x == -100 and y == -100:\n #else:\n # print(\"The knight moves out of the chessboard.\")\n # x, y = getMovePosition(move_dir, x_last, y_last)\n #break\n #i = i -1\n print(\"The knight moves to position ({} {}) after move# {}\\n\".format(x, y, i+1))\n\n return x, y", "def get_moves(self):", "def findPlacesToMove():\n movesDestinations = [];\n \n curY = curBlank[0];\n curX = curBlank[1];\n\n if(curY-1 >= 1): #UP\n movesDestinations.append((curY-1, curX));\n if(curY+1 <= n): #DOWN\n movesDestinations.append((curY+1, curX));\n if(curX-1 >= 1): #LEFT\n movesDestinations.append((curY, curX-1));\n if(curX+1 <= n): #RIGHT\n movesDestinations.append((curY, curX+1));\n \n return movesDestinations;", "def command_moves(board, locations):\n possible_moves = []\n buffers = [(1,0), (0,1), (-1,0), (0,-1)]\n\n for piece in locations:\n piece_moves = []\n\n for move in buffers:\n\n poss_move = return_valid_move(board, locations, piece, move)\n\n if poss_move:\n piece_moves.append(poss_move)\n\n possible_moves.append(piece_moves)\n\n return possible_moves", "def _get_piece_moves(self, x, y):\n\n piece = self.get_piece(x, y)\n moves = []\n\n if not piece:\n return moves\n\n if piece.name == 'rook' or piece.name == 'queen':\n direcs = ['up', 'down', 'left', 'right']\n moves = [self._get_moves_indirection(x, y, direc) for direc in\n direcs]\n\n elif piece.name == 'bishop' or piece.name == 'queen':\n direcs = ['d1', 'd2', 'd3', 'd4']\n for direc in direcs:\n moves += self._get_moves_indirection(x, y, direc)\n\n elif piece.name == 'king':\n moves = [(x-1, y-1), (x-1, y), (x-1, y+1), (x, y-1),\n (x, y+1), (x+1, y-1), (x+1, y), (x+1, y+1)]\n\n elif piece.name == 'knight':\n moves = [(x-1, y-2), (x-2, y-1), (x-2, y+1), (x-1, y+2),\n (x+1, y+2), (x+2, y+1), (x+1, y-2), (x+2, y-1)]\n\n elif piece.name == 'pawn':\n if piece.color == ChessGame.BLACK:\n moves = [(x-1, y), (x-1, y-1), (x-1, y+1)]\n else:\n moves = [(x+1, y), (x+1, y-1), (x+1, y+1)]\n\n tmp = list(moves)\n for u, v in tmp:\n if v != y and not self.is_enemy(u, v, piece.color):\n moves.remove((u, v))\n\n if v == y and self.is_enemy(u, v, piece.color):\n moves.remove((u, v))\n\n mycolor = piece.color\n valid = set()\n for (u, v) in moves:\n if not self.in_bounds(u, v):\n continue\n\n if not self.get_piece(u, v): # board is blank\n valid.add((u, v))\n\n if self.is_enemy(u, v, mycolor):\n valid.add((u, v))\n\n return valid", "def get_knight_moves(self, i, j, moves):\r\n directions = ((2, 1), (2, -1), (1, 2), (1, -2), (-2, 1), (-2, -1), (-1, 2), (-1, -2))\r\n if self.turn_white:\r\n opon = 'b'\r\n else:\r\n opon = 'w'\r\n for d in directions:\r\n cri = i + d[0]\r\n crj = j + d[1]\r\n if 0 <= cri <= 7 and 0 <= crj <= 7:\r\n # empty\r\n if self.board[cri][crj][0] == '-':\r\n moves.append(Move((i, j), (cri, crj), self.board))\r\n\r\n # oponent\r\n elif self.board[cri][crj][0] == opon:\r\n moves.append(Move((i, j), (cri, crj), self.board))", "def _get_valid_moves(self, piece):\n moves = {}\n left = piece.col - 1 # Left position\n right = piece.col + 1 # Right position\n row = piece.row # Current row\n\n if piece.get_player() == Player.white or piece.is_king():\n # Checks the movements from the bottom to the top\n moves.update(self._traverse_left(row - 1, max(row - 3, -1), -1, piece.get_player(), left))\n moves.update(self._traverse_right(row - 1, max(row - 3, -1), -1, piece.get_player(), right))\n\n if piece.get_player() == Player.black or piece.is_king():\n # Checks the movements from the top to the bottom\n moves.update(self._traverse_left(row + 1, min(row + 3, constant.BOARD_DIMENSION),\n 1, piece.get_player(), left))\n moves.update(self._traverse_right(row + 1, min(row + 3, constant.BOARD_DIMENSION),\n 1, piece.get_player(), right))\n\n return moves", "def get_possible_moves(self, board: np.ndarray):\n board_size = board.shape[0]\n moves = []\n if abs(self.value) == 1:\n if self.start_row <= 2:\n directions = [np.array((1, -1)), np.array((1, 1))]\n else:\n directions = [np.array((-1, 1)), np.array((-1, -1))]\n else:\n directions = [np.array((-1, 1)), np.array((1, 1)), np.array((-1, -1)), np.array((1, -1))]\n for direction in directions:\n within_board = True\n i = 1\n while within_board:\n coord = self.coord + direction * i\n within_board = _check_if_position_on_board(coord, board_size)\n # break if first step is already out of board\n if not within_board:\n break\n value_board = board[coord[0], coord[1]]\n # break if there is a stone of them same player in the way\n if value_board < 0 and self.value < 0 or value_board > 0 and self.value > 0:\n break\n # if there is no stone, than add this to move list.\n if value_board == 0:\n moves += [{\"old_coord\": self.coord, \"new_coord\": coord, \"jumped_stones\": [], \"jumped_values\": 0,\n \"move_coords\": [coord]}]\n # if there is a stone of the enemy\n if (value_board < 0 < self.value) or (self.value < 0 < value_board):\n # check if it can be jumped\n coord_jump = coord + direction\n move_coords = [coord_jump.copy()]\n within_board_after_jump = _check_if_position_on_board(coord_jump, board_size)\n # break if place behind stone is out of border\n if not within_board_after_jump:\n break\n value_board_jump = board[coord_jump[0], coord_jump[1]]\n jumped_stones = []\n # break if there is no free place\n if value_board_jump != 0:\n break\n jumped_stones += [coord]\n moves_tmp = self.jump_chain(directions, board, coord_jump, value_board, jumped_stones, move_coords)\n if len(moves_tmp) > 0:\n moves += moves_tmp\n else:\n moves += [{\"old_coord\": self.coord, \"new_coord\": coord_jump, \"jumped_stones\": jumped_stones,\n \"jumped_values\": abs(value_board), \"move_coords\": [coord_jump]}]\n i += 1\n # break if normal stone, because they can only move one field\n if abs(self.value) == 1:\n break\n return moves", "def get_king_moves(self, state):\n #king_moves = []\n possible_moves = []\n if self.color == cc.WHITE_ACTIVE:\n enemy_color = cc.BLACK_ACTIVE\n enemy_pieces = cc.BLACK_PIECES\n elif self.color == cc.BLACK_ACTIVE:\n enemy_color = cc.WHITE_ACTIVE\n enemy_pieces = cc.WHITE_PIECES\n else:\n raise Exception(\"GameState: Invalid Active Color\")\n\n for vector in cc.KING_VECTORS:\n rank = self.coord[0] + vector[0]\n column = self.coord[1] + vector[1]\n if rank in cc.VALID_RANKS and column in cc.VALID_RANKS:\n if state.board[rank, column] == cc.NO_PIECE:\n possible_moves.append(cc.Action(self.string, self.coord, (rank, column)))\n elif state.board[rank, column] in enemy_pieces:\n possible_moves.append(cc.Action(self.string, self.coord, (rank, column), capture=True))\n \n # # Iterate over list of king moves, removing ones that are under attack\n # for move in possible_moves:\n # if not check.space_under_attack(state, move.end, enemy_color):\n # king_moves.append(move)\n\n return possible_moves", "def get_valid_moves(self):\n if self.king:\n valid_moves = [[self.row + 1, self.col + 1],\n [self.row + 1, self.col - 1],\n [self.row - 1, self.col - 1],\n [self.row - 1, self.col + 1]]\n else:\n if self.player == 1:\n valid_moves = [[self.row + 1, self.col + 1],\n [self.row + 1, self.col - 1]]\n else:\n valid_moves = [[self.row - 1, self.col - 1],\n [self.row - 1, self.col + 1]]\n return valid_moves", "def get_moves(self):\n return self.piece_behavior.get_moves(self.board, self.position)", "def moves(self):\n move_list = []\n for direction in Maze.possible_directions:\n move = Maze.dirs_to_moves[direction]\n if (0 <= (self.location[0]+move[0]) < len(self.grid) and\n 0 <= (self.location[1]+move[1]) < len(self.grid[0]) and\n self.grid[self.location[0]+move[0]][self.location[1]+move[1]] != 'X'):\n move_list.append(move)\n\n return move_list", "def get_goat_possible_moves(self) -> List:\n moves = []\n for pos in self.get_all_positions():\n if pos.is_goat():\n addr_from = pos.address\n for addr_to in pos.piece.get_valid_moves():\n moves.append((addr_from, addr_to))\n\n return moves", "def get_all_moves(self, board, player):\n result = []\n for startx in range(8):\n for starty in range(8):\n for destx in range(8):\n for desty in range(8):\n if self.is_legal_move(board, [startx, starty], [destx, desty], player):\n result.append([[startx, starty], [destx, desty]])\n return result", "def get_moves(self, board, player):\r\n width, height = self.board_size\r\n return self.get_moves_c(board, player, width, height)", "def api_get_moves(self):\n return self.board.moves", "def get_available_moves(self):\n available = []\n row, col = tuple(self.current_pos)\n if row - 1 >= 0 and self.maze[row - 1][col] != 'x':\n available.append('n')\n if row + 1 < len(self.maze) and self.maze[row + 1][col] != 'x':\n available.append('s')\n if col - 1 >= 0 and self.maze[row][col - 1] != 'x':\n available.append('w')\n if col + 1 < len(self.maze[row]) and self.maze[row][col + 1] != 'x':\n available.append('e')\n return available", "def moves(self, board_state):\n # pos_moves = generate_moves(board_state) # Naive moves function here\n blacks = board_state.search_board('B')\n # Generate the possible moves required to kill the first black piece\n # on the board\n pos_moves = sorted_generate_moves_piece(board_state, blacks[0])\n return pos_moves", "def generate_possible_moves(self):\r\n\t\t# Moves:\r\n\t\t# 0 - North\r\n\t\t# 1 - East\r\n\t\t# 2 - South\r\n\t\t# 3 - West\r\n\r\n\t\tmoves = []\r\n\r\n\t\tif self.x != 0:\r\n\t\t\tmoves.append(0)\r\n\t\tif self.y != self.n-1:\r\n\t\t\tmoves.append(1)\r\n\t\tif self.x != self.n-1:\r\n\t\t\tmoves.append(2)\r\n\t\tif self.y != 0:\r\n\t\t\tmoves.append(3)\r\n\r\n\t\treturn moves", "def get_square_moves(self, moves):\r\n return self.board.get_square_moves(moves)", "def test_find_knight_next_moves(self):\n result = _find_knight_next_moves('d4')\n self.assertEqual(['b3', 'b5', 'c2', 'c6', 'e2', 'e6', 'f3', 'f5'],\n result\n )", "def test_find_knight_next_moves_limit_of_board(self):\n result = _find_knight_next_moves('h1')\n self.assertEqual(['f2', 'g3'], result)", "def test_find_knight_move_for_next_2_turns(self):\n result = find_knight_moves_for_the_next_two_turns('h1')\n self.assertEqual(['d3', 'e2', 'e4', 'f2', 'f5', 'g3', 'g4', 'h3', 'h5'], result)", "def get_moves(self, board, position):\n current_piece = board.grid[position[1]][position[0]]\n\n moves = []\n\n # For each direction in which the piece can move...\n for direction in self.directions:\n # for each vector in that direction...\n # (once a piece is encountered in a direction,\n # further positions in that direction are unaccessible,\n # therefore break out of inner FOR loop)\n for vector in direction:\n new_position = (position[0] + vector[0], position[1] + vector[1])\n\n # Check if the proposed destination is inbounds\n if board._inbounds(new_position) is False:\n break\n\n other_piece = board.grid[new_position[1]][new_position[0]]\n\n # Check if the proposed destination is occupied by a friendly piece\n if other_piece != \"empty\" and other_piece.player == current_piece.player:\n break\n\n # Check other validity conditions, mainly for pawn\n if self._is_valid_move(vector, current_piece, other_piece) is False:\n break\n\n # The destination is viable, add the move\n moves.append(Move(position, current_piece, new_position, other_piece))\n\n # If there was an enemy piece on the square\n if other_piece != \"empty\":\n break\n\n return moves", "def possibleMoves(self,i,j):\n piece = self.board[i][j].piece\n if(piece.pieceCode == \"None\"):\n return []\n \n if(piece.name == \"pawn\"):\n return self.pawnMoves(piece,self.board)\n elif(piece.name == \"king\"):\n return self.kingSteps(self.board,piece.color)\n else:\n return self.pieceMoves(piece,self.board)", "def piece_moves(self, pos, line_movemant, diagonal_movemant, distance):\r\n enemy = (\"white\" if self.color == \"black\" else \"black\")\r\n board = self.board\r\n moves = []\r\n unchecked_moves = ()\r\n line = ((0, 1), (1, 0), (-1, 0), (0, -1))\r\n diag = ((1, 1), (-1, 1), (1, -1), (-1, -1))\r\n starting_pos = board.num_notation(pos.upper())\r\n if line_movemant and diagonal_movemant:\r\n unchecked_moves = line + diag\r\n elif diagonal_movemant:\r\n unchecked_moves = diag\r\n elif line_movemant:\r\n unchecked_moves = line\r\n for (x, y) in unchecked_moves:\r\n for single_move in range(1, distance + 1):\r\n \"\"\"iterating over all possible moves in a specific line/diagonal \"\"\"\r\n destination = starting_pos[0] + single_move * x, starting_pos[1] + single_move * y\r\n if board.alpha_notation(destination) not in board.occupied(self.color) and board.in_board(destination):\r\n moves.append(destination)\r\n if board.alpha_notation(destination) in board.occupied(enemy):\r\n break\r\n else:\r\n break\r\n if self.name == \"K\":\r\n moves = moves + self.castle(pos)\r\n return list(map(board.alpha_notation, moves))\r\n # instead of iterating over moves and using alpha_notation on its values we can use map\r" ]
[ "0.6674116", "0.64829564", "0.64223623", "0.6395059", "0.6389674", "0.6372455", "0.6314743", "0.6305518", "0.62892103", "0.6274054", "0.6225394", "0.6195604", "0.6182217", "0.6175854", "0.6158496", "0.61548793", "0.61211133", "0.61066663", "0.6094043", "0.6078156", "0.6041899", "0.59820443", "0.5981855", "0.59757465", "0.594252", "0.5913163", "0.5894555", "0.58929276", "0.58861697", "0.58521956" ]
0.6953485
0
Returns the moves for the king at the given location. Makes sure that the King doesn't put himself in check.
def get_king_moves(self, state): #king_moves = [] possible_moves = [] if self.color == cc.WHITE_ACTIVE: enemy_color = cc.BLACK_ACTIVE enemy_pieces = cc.BLACK_PIECES elif self.color == cc.BLACK_ACTIVE: enemy_color = cc.WHITE_ACTIVE enemy_pieces = cc.WHITE_PIECES else: raise Exception("GameState: Invalid Active Color") for vector in cc.KING_VECTORS: rank = self.coord[0] + vector[0] column = self.coord[1] + vector[1] if rank in cc.VALID_RANKS and column in cc.VALID_RANKS: if state.board[rank, column] == cc.NO_PIECE: possible_moves.append(cc.Action(self.string, self.coord, (rank, column))) elif state.board[rank, column] in enemy_pieces: possible_moves.append(cc.Action(self.string, self.coord, (rank, column), capture=True)) # # Iterate over list of king moves, removing ones that are under attack # for move in possible_moves: # if not check.space_under_attack(state, move.end, enemy_color): # king_moves.append(move) return possible_moves
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_valid_moves(self):\n if self.king:\n valid_moves = [[self.row + 1, self.col + 1],\n [self.row + 1, self.col - 1],\n [self.row - 1, self.col - 1],\n [self.row - 1, self.col + 1]]\n else:\n if self.player == 1:\n valid_moves = [[self.row + 1, self.col + 1],\n [self.row + 1, self.col - 1]]\n else:\n valid_moves = [[self.row - 1, self.col - 1],\n [self.row - 1, self.col + 1]]\n return valid_moves", "def _get_valid_moves(self, piece):\n moves = {}\n left = piece.col - 1 # Left position\n right = piece.col + 1 # Right position\n row = piece.row # Current row\n\n if piece.get_player() == Player.white or piece.is_king():\n # Checks the movements from the bottom to the top\n moves.update(self._traverse_left(row - 1, max(row - 3, -1), -1, piece.get_player(), left))\n moves.update(self._traverse_right(row - 1, max(row - 3, -1), -1, piece.get_player(), right))\n\n if piece.get_player() == Player.black or piece.is_king():\n # Checks the movements from the top to the bottom\n moves.update(self._traverse_left(row + 1, min(row + 3, constant.BOARD_DIMENSION),\n 1, piece.get_player(), left))\n moves.update(self._traverse_right(row + 1, min(row + 3, constant.BOARD_DIMENSION),\n 1, piece.get_player(), right))\n\n return moves", "def _get_piece_moves(self, x, y):\n\n piece = self.get_piece(x, y)\n moves = []\n\n if not piece:\n return moves\n\n if piece.name == 'rook' or piece.name == 'queen':\n direcs = ['up', 'down', 'left', 'right']\n moves = [self._get_moves_indirection(x, y, direc) for direc in\n direcs]\n\n elif piece.name == 'bishop' or piece.name == 'queen':\n direcs = ['d1', 'd2', 'd3', 'd4']\n for direc in direcs:\n moves += self._get_moves_indirection(x, y, direc)\n\n elif piece.name == 'king':\n moves = [(x-1, y-1), (x-1, y), (x-1, y+1), (x, y-1),\n (x, y+1), (x+1, y-1), (x+1, y), (x+1, y+1)]\n\n elif piece.name == 'knight':\n moves = [(x-1, y-2), (x-2, y-1), (x-2, y+1), (x-1, y+2),\n (x+1, y+2), (x+2, y+1), (x+1, y-2), (x+2, y-1)]\n\n elif piece.name == 'pawn':\n if piece.color == ChessGame.BLACK:\n moves = [(x-1, y), (x-1, y-1), (x-1, y+1)]\n else:\n moves = [(x+1, y), (x+1, y-1), (x+1, y+1)]\n\n tmp = list(moves)\n for u, v in tmp:\n if v != y and not self.is_enemy(u, v, piece.color):\n moves.remove((u, v))\n\n if v == y and self.is_enemy(u, v, piece.color):\n moves.remove((u, v))\n\n mycolor = piece.color\n valid = set()\n for (u, v) in moves:\n if not self.in_bounds(u, v):\n continue\n\n if not self.get_piece(u, v): # board is blank\n valid.add((u, v))\n\n if self.is_enemy(u, v, mycolor):\n valid.add((u, v))\n\n return valid", "def king_adjust(self, turn):\n\n opposite_turn = next_turn(turn)\n\n original_location_index = (piece_class.KING_LOCATION[turn][0] + piece_class.KING_LOCATION[turn][1] * 8)\n \n# if self.board[original_location_index] == self.empty:\n# print(\"yo\")\n \n self.board[original_location_index].possible_moves = [i for i in self.board[original_location_index].possible_moves if i not in self.long_dict[opposite_turn]]\n \n\n\n #king_path = [i for i in self.board[original_location_index].possible_moves if i not in self.path_dict[opposite_turn]]\n\n #removes moving into check from king path. Can remove this functionality from the move method now.\n #self.board[original_location_index].possible_moves = king_path\n #king_path_index = [(i[0] + i[1]*8) for i in self.board[original_location_index].possible_moves]\n\n \n #for i in king_path:\n # if i in self.long_dict[opposite_turn] or self.check_dict[opposite_turn]:\n # print(\"king path is: \", king_path)\n # king_path.remove(i)\n # print(\"king path is now: \", king_path)\n \n\n\n\n\n #for i in king_path_index:\n # enemy_piece = self.board[i]\n # self.board[i] = self.board[original_location_index]\n # self.board[original_location_index] = self.empty\n # self.loads_pathways(turn)\n # if self.coords[i] in self.path_dict[opposite_turn]:\n # print(\"yo\")\n \n #self.board[original_location_index] = self.board[i]\n #self.board[i] = enemy_piece", "def get_king_moves(self, i, j, moves):\r\n directions = ((1, 1), (1, -1), (1, 0), (0, -1), (0, 1), (-1, -1), (-1, 1), (-1, 0))\r\n if self.turn_white:\r\n oponent = 'b'\r\n else:\r\n oponent = 'w'\r\n for d in directions:\r\n cri = i + d[0]\r\n crj = j + d[1]\r\n if 0 <= cri <= 7 and 0 <= crj <= 7:\r\n # empty\r\n if self.board[cri][crj][0] == '-':\r\n moves.append(Move((i, j), (cri, crj), self.board))\r\n\r\n # oponent\r\n elif self.board[cri][crj][0] == oponent:\r\n moves.append(Move((i, j), (cri, crj), self.board))", "def get_legal_moves(self):\n moves = []\n if self.player_locations[self.whose_turn] is None:\n return self.get_blank_locations()\n matrix = [(1,0), (-1,0), (0,1), (0,-1), (1,1), (1,-1), (-1, 1), (-1,-1)]\n\n for dx, dy in matrix:\n x,y = self.player_locations[self.whose_turn]\n while x+dx <= xdim and x+dx >= 0 and y+dy <= ydim and y+dy >= 0:\n x = x+dx\n y = y+dx\n if self.board[x][y] : break\n moves.append((x,y))\n return moves", "def get_legal_moves(self):\n # for each square in the castle figure out if an moves can occur from it.\n moves = []\n allowed = [self.turn]\n if self.turn == DEFENDER:\n allowed.extend((KING, CASTLE_OCCUPIED))\n it = np.nditer(self.board_state, flags=['multi_index'])\n while not it.finished:\n index = it.multi_index\n curr_loc = it[0]\n if curr_loc in allowed:\n moves.extend(self.get_legal_move_piece(curr_loc, index))\n it.iternext()\n return moves", "def get_all_moves(self, castling_allowed=True):\n\n can_move = str.isupper if self.white_to_move else str.islower\n\n valid_moves = set()\n\n for row_num, row in enumerate(self.board):\n for col_num, piece in enumerate(row):\n if piece != EMPTY_SPACE and can_move(piece):\n\n location = (row_num, col_num)\n\n # Everything except the pawn movement\n if piece.lower() in NAME_TO_PIECE:\n valid_moves = valid_moves.union(self._get_standard_moves_for_piece(location, piece))\n\n # Pawn moves\n if piece.lower() == PAWN:\n valid_moves = valid_moves.union(self._get_pawn_moves(location, piece))\n\n # Castling\n if castling_allowed and piece.lower() == KING:\n valid_moves = valid_moves.union(self._get_possible_castles(piece))\n\n return valid_moves", "def get_king_attackers(self):\n # if the list is non-empty, we're in Check!\n other_side_color = PieceColor.BLACK if self._current_side_color == PieceColor.WHITE else PieceColor.WHITE\n king_position = self._pieces[self._current_side_color]['K'].position # 'K' is for King!\n\n return self.get_attackers(other_side_color, king_position)", "def command_moves(board, locations):\n possible_moves = []\n buffers = [(1,0), (0,1), (-1,0), (0,-1)]\n\n for piece in locations:\n piece_moves = []\n\n for move in buffers:\n\n poss_move = return_valid_move(board, locations, piece, move)\n\n if poss_move:\n piece_moves.append(poss_move)\n\n possible_moves.append(piece_moves)\n\n return possible_moves", "def get_available_moves(self):\n available = []\n row, col = tuple(self.current_pos)\n if row - 1 >= 0 and self.maze[row - 1][col] != 'x':\n available.append('n')\n if row + 1 < len(self.maze) and self.maze[row + 1][col] != 'x':\n available.append('s')\n if col - 1 >= 0 and self.maze[row][col - 1] != 'x':\n available.append('w')\n if col + 1 < len(self.maze[row]) and self.maze[row][col + 1] != 'x':\n available.append('e')\n return available", "def get_move_pieces(self, player):\n self.mark_moves(player)\n moves = [piece for piece in self.pieces if piece.get_state() == MOVE]\n self.clear_moves()\n return moves", "def get_all_valid_moves(self, player):\n moves = [] # Stores the possible moves\n capture_move_exists = False # Indicates if a capturing move is possible\n\n for piece in self.get_all_pieces(player):\n valid_moves = self._get_valid_moves(piece)\n\n for move, skip in valid_moves.items():\n moves.append([(piece.row, piece.col), move, skip])\n\n if len(skip) > 0:\n # Checks if there is a move that can capture a piece\n capture_move_exists = True\n\n if capture_move_exists:\n # Only gets the capturing moves if there is one\n eating_moves = []\n for move in moves:\n if len(move[2]) != 0:\n eating_moves.append(move)\n\n moves = eating_moves\n\n return moves", "def getPossibleMoves(self): # called to get possible positions this piece can go\r\n \r\n moves = {}\r\n\r\n ids = []\r\n\r\n for piece in self.board.pieces.values():\r\n if piece.name == \"empty\":\r\n piece.glow = False\r\n piece.ready = False\r\n\r\n self.piece = self\r\n\r\n def check(direction=\"left\", heading=\"north\", x=None, y=None):\r\n piece = self.piece\r\n if direction == \"left\": x -= 50\r\n else: x += 50\r\n\r\n if heading == \"north\": y -= 50\r\n else: y += 50\r\n\r\n if (x, y) in self.board.pieces: # position is empty\r\n empty = self.board.getPiece((x, y))\r\n empty.glow = True\r\n old, new, obj = (direction, heading), (x, y), piece\r\n identity = self.getRandomID(ids) # get an ID for the move\r\n moves[identity] = old, new, obj\r\n\r\n if piece.isKing: # piece is a king, so go on\r\n check(direction, heading, x, y)\r\n else: # its not empty, so check if its comrade\r\n x1, y1 = x+25, y+25\r\n piece2 = self.board.getPiece((x1, y1))\r\n try:\r\n if piece.isComrade(piece2):# piece is comrade so return\r\n return\r\n else: # piece is not comrade, so check empty\r\n if direction == \"left\": x2 = x1-25-50\r\n else: x2 = x1-25+50\r\n\r\n if heading == \"north\": y2 = y1-25-50\r\n else: y2 = y1-25+50\r\n\r\n if (x2, y2) in self.board.pieces: # its empty, so notify player\r\n empty = self.board.getPiece((x2, y2))\r\n empty.glow = True\r\n empty.ready = True\r\n\r\n old, new, obj = (direction, heading), (x2, y2), piece2\r\n identity = self.getRandomID(ids)\r\n moves[identity] = old, new, obj\r\n\r\n check(direction, heading, piece2.x-25, piece2.y-25)\r\n check(direction, heading, x2, y2)\r\n \r\n # check empty or comrade again\r\n if direction == \"left\": x3 = x2-50\r\n else: x3 = x2+50\r\n\r\n if heading == \"north\": y3 = y2-50\r\n else: y3 = y2+50\r\n\r\n if (x3, y3) in self.board.pieces: # positon(address) is empty\r\n return\r\n else: # there is a piece, so check if comrade, stop, if not comrade continue\r\n x3+=25\r\n y3+= 25\r\n\r\n piece3 = self.board.getPiece((x3, y3))\r\n if piece3.isComrade(piece2): # comrades, so stop\r\n return\r\n else: # not comrades, so continue\r\n self.piece = piece3\r\n check(direction, heading, x, y)\r\n\r\n #self.piece = piece2\r\n \r\n #check(direction, heading, x2, y2) # keep searching\r\n else: # its not empty, so return\r\n return\r\n except:\r\n pass\r\n\r\n if self.piece.name == \"white\": direction = \"north\"\r\n else: direction = \"south\"\r\n \r\n check(\"left\", direction, self.piece.x-25, self.piece.y-25)\r\n check(\"right\", direction, self.piece.x-25, self.piece.y-25)\r\n \r\n if self.piece.isKing:\r\n if self.piece.name == \"white\": heading = \"south\"\r\n else: heading = \"north\"\r\n \r\n check(\"left\", heading, self.piece.x-25, self.piece.y-25)\r\n check(\"right\", heading, self.piece.x-25, self.piece.y-25)\r\n\r\n if self.piece.name == \"white\":\r\n eatMoves = self.board.game.thinkEatMoves(moves, \"person\")\r\n if eatMoves is not None:\r\n return eatMoves\r\n\r\n return moves", "def getLegalMoves(cls, piece, layout):\n color = cls._getColorAt(piece, layout)\n if color == \"w\":\n direction = \"b\"\n else:\n direction = \"f\"\n\n unfiltered_moves = []\n\n if cls._isKing(piece, layout):\n left, right = cls.adjacency_matrix[piece][\"b\"]\n unfiltered_moves.extend([left, right])\n left, right = cls.adjacency_matrix[piece][\"f\"]\n unfiltered_moves.extend([left, right])\n else:\n left, right = cls._getMoves(piece, direction)\n unfiltered_moves.extend([left, right])\n\n # remove None destinations from the list (board edges)\n filtered_moves = list(filter(lambda x: x != None, unfiltered_moves))\n\n # move destination is an opponent\n possible_jumps = list(\n filter(lambda x: cls._isOpponent(color, x, layout), filtered_moves)\n )\n if possible_jumps:\n # process jumps first because you MUST jump\n unfiltered_jumps = list(\n map(lambda x: cls._canJump(x, piece, layout), possible_jumps)\n )\n jumps = list(filter(lambda x: x != None, unfiltered_jumps))\n # we don't care about moves if we have jumps\n if jumps:\n return {\"jumps\": jumps}\n\n # if we got here, process moves\n moves = list(\n filter(\n lambda x: cls.Pieces.NONE == cls._getColorAt(\n x, layout), filtered_moves\n )\n )\n return {\"moves\": list(moves)}", "def get_goat_possible_moves(self) -> List:\n moves = []\n for pos in self.get_all_positions():\n if pos.is_goat():\n addr_from = pos.address\n for addr_to in pos.piece.get_valid_moves():\n moves.append((addr_from, addr_to))\n\n return moves", "def moves(self):\n\n moves = list()\n\n for row in range(HEIGHT):\n for col in range(WIDTH):\n\n move = (row, col)\n\n if self.board[row][col] == 9:\n moves.append(move)\n\n if self.board[row][col] == 1 or self.board[row][col] == 2:\n\n move = (row - 1, col)\n\n if self.board[row - 1][col] == 1 or self.board[row - 1][col] == 2:\n\n pass\n\n else:\n\n moves.append(move)\n\n return moves", "def get_moves(self):\n grid = self.model.grid\n # List of agents we can't overlap with\n no_overlap = [\"wall\", \"human\", \"zombie\"]\n\n if self.agent_type == \"zombie\" or \\\n (\"AvoidingZombie\" not in self.states and os.environ[\"mode\"] == \"5\"):\n no_overlap.append(\"road\")\n\n # Always give the option to stay on your current location(stand still)\n all_cells = self.neighbors()\n free_cells = [self.pos]\n\n # Get rid of cells that we may not move to by iterating through all\n # cells next to the agent, and only adding non-occupied cells\n for cell in all_cells:\n cell_occupied = False\n x, y = cell.pos\n # If there are agents in the current cell, and we are not allowed\n # to overlap with any of those agents, the cell is occupied.\n # Only add cells which are not occupied.\n if not grid.is_cell_empty((x, y)):\n for agent in grid[x][y]:\n if agent.agent_type in no_overlap:\n cell_occupied = True\n break\n if not cell_occupied:\n free_cells.append((x, y))\n return free_cells", "def moves(self):\n move_list = []\n for direction in Maze.possible_directions:\n move = Maze.dirs_to_moves[direction]\n if (0 <= (self.location[0]+move[0]) < len(self.grid) and\n 0 <= (self.location[1]+move[1]) < len(self.grid[0]) and\n self.grid[self.location[0]+move[0]][self.location[1]+move[1]] != 'X'):\n move_list.append(move)\n\n return move_list", "def get_moves(self, board, position):\n current_piece = board.grid[position[1]][position[0]]\n\n moves = []\n\n # For each direction in which the piece can move...\n for direction in self.directions:\n # for each vector in that direction...\n # (once a piece is encountered in a direction,\n # further positions in that direction are unaccessible,\n # therefore break out of inner FOR loop)\n for vector in direction:\n new_position = (position[0] + vector[0], position[1] + vector[1])\n\n # Check if the proposed destination is inbounds\n if board._inbounds(new_position) is False:\n break\n\n other_piece = board.grid[new_position[1]][new_position[0]]\n\n # Check if the proposed destination is occupied by a friendly piece\n if other_piece != \"empty\" and other_piece.player == current_piece.player:\n break\n\n # Check other validity conditions, mainly for pawn\n if self._is_valid_move(vector, current_piece, other_piece) is False:\n break\n\n # The destination is viable, add the move\n moves.append(Move(position, current_piece, new_position, other_piece))\n\n # If there was an enemy piece on the square\n if other_piece != \"empty\":\n break\n\n return moves", "def moves(self, board_state):\n # pos_moves = generate_moves(board_state) # Naive moves function here\n blacks = board_state.search_board('B')\n # Generate the possible moves required to kill the first black piece\n # on the board\n pos_moves = sorted_generate_moves_piece(board_state, blacks[0])\n return pos_moves", "def get_all_moves(board, player):\n moves = []\n if not (player_has_won(board, player) or\n player_has_won(board, utils.get_opponent(player)) or\n (not is_valid_board(board))):\n for index in range(9):\n if board[index] == config.NO_PLAYER:\n moves += [index]\n return moves", "def get_square_moves(self, moves):\r\n return self.board.get_square_moves(moves)", "def get_moves(self):\n return self.piece_behavior.get_moves(self.board, self.position)", "def spaces_threatened_towards_king(self, list_of_directional_spaces_threatened):\n for lst in list_of_directional_spaces_threatened:\n for move in lst:\n if isinstance(THE_BOARD.coords_to_piece(move), King):\n return(lst)\n \n raise Exception(f\"{self.symbol} not threatening the enemy king\")", "def spaces_threatened_towards_king(self, list_of_directional_spaces_threatened):\n for lst in list_of_directional_spaces_threatened:\n for move in lst:\n if isinstance(THE_BOARD.coords_to_piece(move), King):\n return(lst)\n \n raise Exception(f\"{self.symbol} not threatening the enemy king\")", "def spaces_threatened_towards_king(self, list_of_directional_spaces_threatened):\n for lst in list_of_directional_spaces_threatened:\n for move in lst:\n if isinstance(THE_BOARD.coords_to_piece(move), King):\n return(lst)\n \n raise Exception(f\"{self.symbol} not threatening the enemy king\")", "def get_possible_moves(self, board: np.ndarray):\n board_size = board.shape[0]\n moves = []\n if abs(self.value) == 1:\n if self.start_row <= 2:\n directions = [np.array((1, -1)), np.array((1, 1))]\n else:\n directions = [np.array((-1, 1)), np.array((-1, -1))]\n else:\n directions = [np.array((-1, 1)), np.array((1, 1)), np.array((-1, -1)), np.array((1, -1))]\n for direction in directions:\n within_board = True\n i = 1\n while within_board:\n coord = self.coord + direction * i\n within_board = _check_if_position_on_board(coord, board_size)\n # break if first step is already out of board\n if not within_board:\n break\n value_board = board[coord[0], coord[1]]\n # break if there is a stone of them same player in the way\n if value_board < 0 and self.value < 0 or value_board > 0 and self.value > 0:\n break\n # if there is no stone, than add this to move list.\n if value_board == 0:\n moves += [{\"old_coord\": self.coord, \"new_coord\": coord, \"jumped_stones\": [], \"jumped_values\": 0,\n \"move_coords\": [coord]}]\n # if there is a stone of the enemy\n if (value_board < 0 < self.value) or (self.value < 0 < value_board):\n # check if it can be jumped\n coord_jump = coord + direction\n move_coords = [coord_jump.copy()]\n within_board_after_jump = _check_if_position_on_board(coord_jump, board_size)\n # break if place behind stone is out of border\n if not within_board_after_jump:\n break\n value_board_jump = board[coord_jump[0], coord_jump[1]]\n jumped_stones = []\n # break if there is no free place\n if value_board_jump != 0:\n break\n jumped_stones += [coord]\n moves_tmp = self.jump_chain(directions, board, coord_jump, value_board, jumped_stones, move_coords)\n if len(moves_tmp) > 0:\n moves += moves_tmp\n else:\n moves += [{\"old_coord\": self.coord, \"new_coord\": coord_jump, \"jumped_stones\": jumped_stones,\n \"jumped_values\": abs(value_board), \"move_coords\": [coord_jump]}]\n i += 1\n # break if normal stone, because they can only move one field\n if abs(self.value) == 1:\n break\n return moves", "def possible_moves(self, piece):\n def _index(orig, off):\n \"\"\"Helper function to find the new index.\"\"\"\n orig_x, orig_y = orig\n off_x, off_y = off\n return (orig_y - off_y) * self.ncols + (orig_x - off_x)\n\n p_x, p_y = piece\n p_i = _index(piece, (0, 0))\n\n # pass a list of the four corners first for basic possibles\n move_land = [((p_x + i, p_y + j), self.squares[_index(piece, (i, j))])\\\n for i in [-1, 1] for j in [-1, 1]]\n possibles = self.squares[p_i].can_move(piece, move_land)\n\n # next append the new list from jumps\n jump_land = [((p_x + i, p_y + j), self.squares[_index(piece, (i, j))])\\\n for j in [-2, 2] for i in [-2, 2]]\n possibles += self.squares[p_i].can_jump(piece, move_land, jump_land)\n\n # clean out the list of duplicates, although there should be none\n return [m for i, m in enumerate(possibles) if m not in possibles[:i]]", "def get_moves_for_square(self, square):\n (x,y) = square\n\n # determine the color of the piece.\n color = self[x][y]\n\n # skip empty source squares.\n if color==0:\n return []\n\n # search all possible directions.\n moves = []\n for direction in self.__directions:\n move = self._discover_move(square, direction)\n if move:\n # print(square,move,direction)\n moves.append(move)\n\n # return the generated move list\n return moves" ]
[ "0.6816272", "0.68159825", "0.6712147", "0.667524", "0.66631305", "0.6642847", "0.658078", "0.6434977", "0.64293206", "0.63750225", "0.6243731", "0.6235143", "0.622932", "0.62285656", "0.6218373", "0.6215783", "0.6200796", "0.6177311", "0.61664575", "0.6161181", "0.61515874", "0.6142152", "0.6141744", "0.61233246", "0.6114357", "0.6114357", "0.6114357", "0.611144", "0.6104963", "0.60931534" ]
0.708593
0
Returns a tuple of possible moves based on the starting position and the vectors. Used for Knights, Pawns, and Kings
def get_crawler_moves(coord, vectors): possible_moves = [] for vector in vectors: try: move = (coord[0]+vector[0], coord[1]+vector[1]) except TypeError as e: print("coord: {}".format(coord)) print("vector: {}".format(vector)) if move[0] in cc.VALID_RANKS and move[1] in cc.VALID_RANKS: possible_moves.append(move) return tuple(possible_moves)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_possible_moves(self, current_x: int, current_y: int) -> List[(int, int)]:\n pass", "def get_possible_moves(self, current_x: int, current_y: int) -> List[(int, int)]:\n pass", "def get_possible_moves(self, current_x: int, current_y: int) -> List[(int, int)]:\n pass", "def get_possible_moves(self, current_x: int, current_y: int) -> List[(int, int)]:\n pass", "def get_possible_moves(self, current_x: int, current_y: int) -> List[(int, int)]:\n pass", "def get_possible_moves(self, current_x: int, current_y: int) -> List[(int, int)]:\n pass", "def get_possible_moves(self, current_x: int, current_y: int) -> List[(int, int)]:\n pass", "def generate_possible_moves(self):\r\n\t\t# Moves:\r\n\t\t# 0 - North\r\n\t\t# 1 - East\r\n\t\t# 2 - South\r\n\t\t# 3 - West\r\n\r\n\t\tmoves = []\r\n\r\n\t\tif self.x != 0:\r\n\t\t\tmoves.append(0)\r\n\t\tif self.y != self.n-1:\r\n\t\t\tmoves.append(1)\r\n\t\tif self.x != self.n-1:\r\n\t\t\tmoves.append(2)\r\n\t\tif self.y != 0:\r\n\t\t\tmoves.append(3)\r\n\r\n\t\treturn moves", "def find_moves(self):\n\n from itertools import product\n free_position = self.find_free()\n return [list(free_position+i) for i in [[0,1],[1,0],[-1,0],[0,-1]] if tuple(i+free_position) in product(range(self.size),repeat=2)]", "def possibleMovements(self,numIterations:int=50)->list[tuple]:\n x=random.randint(0,self._side-1); y=random.randint(0,self._side-1)\n possible_positions=[]\n positionsCovered=[(x,y)]\n for _ in range(numIterations):\n if x+2<self._side and y+1<self._side:\n possible_positions.append((x+2,y+1))\n \n if x+2<self._side and y-1<self._side and y-1>0:\n possible_positions.append((x+2,y-1))\n \n if x-2<self._side and y+1<self._side and x-2>0:\n possible_positions.append((x-2,y+1))\n \n if x-2<self._side and y-1<self._side and x-2>0 and y-1>0:\n possible_positions.append((x-2,y-1)) \n\n if x+1<self._side and y+2<self._side:\n possible_positions.append((x+1,y+2))\n \n if x+1<self._side and y-2<self._side and y-1>0:\n possible_positions.append((x+1,y-2))\n\n if x-1<self._side and y+2<self._side and x-1>0:\n possible_positions.append((x-1,y+2))\n \n if x-1<self._side and y-2<self._side and x-1>0 and y-2>0:\n possible_positions.append((x-1,y-2))\n\n newX,newY=random.choice(possible_positions) #choose randomly among the possible positions,and then repeat this \n x,y=newX,newY\n positionsCovered.append((newX,newY)) \n\n return positionsCovered", "def get_available_moves(self):\n available = []\n row, col = tuple(self.current_pos)\n if row - 1 >= 0 and self.maze[row - 1][col] != 'x':\n available.append('n')\n if row + 1 < len(self.maze) and self.maze[row + 1][col] != 'x':\n available.append('s')\n if col - 1 >= 0 and self.maze[row][col - 1] != 'x':\n available.append('w')\n if col + 1 < len(self.maze[row]) and self.maze[row][col + 1] != 'x':\n available.append('e')\n return available", "def findPlacesToMove():\n movesDestinations = [];\n \n curY = curBlank[0];\n curX = curBlank[1];\n\n if(curY-1 >= 1): #UP\n movesDestinations.append((curY-1, curX));\n if(curY+1 <= n): #DOWN\n movesDestinations.append((curY+1, curX));\n if(curX-1 >= 1): #LEFT\n movesDestinations.append((curY, curX-1));\n if(curX+1 <= n): #RIGHT\n movesDestinations.append((curY, curX+1));\n \n return movesDestinations;", "def legal_moves(self):\n moves = \"\"\n swappable = self.swappable_positions\n empty_position = self.get_position(0)\n\n for s in swappable:\n pos_diff = empty_position[0] - s[0], empty_position[1] - s[1]\n if pos_diff[0] > 0:\n moves += \"U\"\n elif pos_diff[0] < 0:\n moves += \"D\"\n elif pos_diff[1] > 0:\n moves += \"L\"\n elif pos_diff[1] < 0:\n moves += \"R\"\n\n return moves", "def get_goat_possible_moves(self) -> List:\n moves = []\n for pos in self.get_all_positions():\n if pos.is_goat():\n addr_from = pos.address\n for addr_to in pos.piece.get_valid_moves():\n moves.append((addr_from, addr_to))\n\n return moves", "def get_directions():\n return [(1, 0), (0, 1), (-1, 0), (0, -1)]", "def get_possible_moves(board):\n\n possible_moves = []\n\n ret_tuple_left = move_left(board)\n ret_tuple_right = move_right(board)\n ret_tuple_up = move_up(board)\n ret_tuple_down = move_down(board)\n\n if ret_tuple_left[0]:\n possible_moves.append(ret_tuple_left[1])\n if ret_tuple_right[0]:\n possible_moves.append(ret_tuple_right[1])\n if ret_tuple_up[0]:\n possible_moves.append(ret_tuple_up[1])\n if ret_tuple_down[0]:\n possible_moves.append(ret_tuple_down[1])\n\n return possible_moves", "def possible_moves(self, pos: Point) -> List[Point]:\n # logger.debug(f\"inside possible_moves {pos}\")\n available_squares = []\n for direction in Direction.cardinal():\n # logger.debug(f\"direction = {direction}\")\n neighbor = pos + direction\n # logger.debug(f\"neighbor = {neighbor}\")\n if neighbor.x < 1 or self.width - 2 < neighbor.x or neighbor.y < 1 or self.height - 2 < neighbor.y:\n # logger.debug(f\"{neighbor} not in bounds\")\n continue\n if self.can_carve(pos, direction):\n # logger.debug(f\"can_carve returned True pos={pos}, direction={direction}\")\n available_squares.append(neighbor)\n # logger.debug(f\"available squares:\")\n # for square in available_squares:\n # logger.debug(f\"square={square}\")\n # logger.add(\"debug.log\")\n return available_squares", "def pathfinder(starting_position: tuple, target_position: tuple, grid: np.ndarray) -> List[tuple] or None:\n moves_dict = {(1, 0): \"DOWN\", (-1, 0): \"UP\", (0, 1): \"RIGHT\", (0, -1): \"LEFT\"}\n\n moves = []\n path = []\n dead_ends = []\n\n def rate_position(current, target):\n \"\"\"\n Helper function to calculate distance to target\n \"\"\"\n return (target[0] - current[0]) ** 2 + (target[1] - current[1]) ** 2\n\n # Setting starting position\n current_position = starting_position\n while current_position != target_position:\n possible_moves = {}\n # Checking for each possible move and rating them\n for m in moves_dict.keys():\n if check_valid_move(grid, current_position, m):\n new_position = tuple(np.add(current_position, m))\n new_position_rating = rate_position(new_position, target_position)\n if new_position not in path and new_position not in dead_ends:\n possible_moves[new_position_rating] = m\n\n # if there are possible move, select the one, that would move us the closest to target\n if possible_moves:\n path.append(current_position) # save position to path\n moves.append(possible_moves[min(possible_moves)]) # save move to move list\n current_position = tuple(np.add(current_position, possible_moves[min(possible_moves)]))\n # if not, go back one move and add current position to dead ends\n else:\n # if no moves available from the start, return None\n if current_position == starting_position:\n return None\n dead_ends.append(current_position) # save position to dead ends\n current_position = path[-1] # move back one step\n path.pop(-1) # delete step from path\n moves.pop(-1) # delete move from move list\n\n return [tuple(moves_dict[move] for move in moves)]", "def get_legal_moves(self):\n moves = []\n if self.player_locations[self.whose_turn] is None:\n return self.get_blank_locations()\n matrix = [(1,0), (-1,0), (0,1), (0,-1), (1,1), (1,-1), (-1, 1), (-1,-1)]\n\n for dx, dy in matrix:\n x,y = self.player_locations[self.whose_turn]\n while x+dx <= xdim and x+dx >= 0 and y+dy <= ydim and y+dy >= 0:\n x = x+dx\n y = y+dx\n if self.board[x][y] : break\n moves.append((x,y))\n return moves", "def getMove(self):\n while True:\n try:\n init = tuple(int(str.strip()) for str in raw_input('Choose the initial position of your move: ').split(','))\n break\n except ValueError:\n print(\"Input is not integer.\")\n\n while (len(init) != 2) or (init[0] not in range(1, self.grid.width+1)) or (init[1] not in range(1, self.grid.height+1)):\n print 'Initial position is not valid.'\n init = tuple(int(str.strip()) for str in raw_input('Choose the initial position of your move: ').split(','))\n\n while True:\n try:\n dest = tuple(int(str.strip()) for str in raw_input('Choose the destination position of your move: ').split(','))\n break\n except ValueError:\n print(\"Input is not integer.\")\n\n while (len(dest) != 2) or (dest[0] not in range(1, self.grid.width+1)) or (dest[1] not in range(1, self.grid.height+1)):\n print 'Destination position is not valid.'\n dest = tuple(int(str.strip()) for str in raw_input('Choose the destination position of your move: ').split(','))\n\n return (init, dest)", "def legal_moves():\n\tlegal_moves = (\"r\", \"p\", \"s\")\n\treturn legal_moves", "def get_all_moves(self):\n # 2d matrix of true/false, true if something can be placed\n legal_move_board = []\n possible_move_list = []\n for row in range(self.size):\n move_row = []\n for col in range(self.size):\n empty = self.board[row][col].state == PegState.EMPTY\n move_row.append(empty)\n if empty:\n possible_move_list.append((row, col))\n legal_move_board.append(move_row)\n \n # every position where something can be placed (list of tuples) (Combined with above)\n \"\"\" possible_move_list = []\n for row in range(self.size):\n for col in range(self.size):\n if legal_move_board[row][col] == True:\n possible_move_list.append((row, col))\n \"\"\"\n return legal_move_board, possible_move_list", "def get_move_options(self, x, y):\r\n best = self._get_lowest_neighbor_value(x, y)\r\n moves = []\r\n for dx, dy in DijkstraMap.neighbors:\r\n tx, ty = x + dx, y + dy\r\n if self.point_in_map(tx, ty) and self.tiles[tx][ty] == best:\r\n moves.append( (dx, dy))\r\n return moves", "def moves(self):\n\n # define a full range, which we can compare against columns,\n # rows, or blocks. they're all the same when stored as sets.\n line = set(range(1, 10))\n moves = []\n\n # iterate every cell on the board\n for row in range(0, 9):\n for col in range(0, 9):\n\n # ignore this cell if it's already filled\n i = self._index(col, row)\n if self.data[i] is not None:\n continue\n\n # fetch the adjacent cells\n row_values = set(self._row(row))\n col_values = set(self._column(col))\n bck_values = set(self._block(col, row))\n\n # subtract the values present in the adjacent cells\n # (since this cell *can't* be of any of those values),\n # to leave the list of possibilities for this cell\n missing = line.difference(row_values, col_values, bck_values)\n\n # if there's only *one* possibility, we've found the\n # solution to this cell\n if len(missing) == 1:\n moves.append((col, row, missing.pop()))\n\n return moves", "def get_possible_moves(self):\n moves = []\n for i in range(1, self.current_total + 1):\n if i ** 2 <= self.current_total:\n moves.append(i ** 2)\n\n return moves", "def _get_movements_8n():\n s2 = math.sqrt(2)\n return [(1, 0, 1.0),\n (0, 1, 1.0),\n (-1, 0, 1.0),\n (0, -1, 1.0),\n (1, 1, s2),\n (-1, 1, s2),\n (-1, -1, s2),\n (1, -1, s2)]", "def get_valid_moves(self):\n if self.king:\n valid_moves = [[self.row + 1, self.col + 1],\n [self.row + 1, self.col - 1],\n [self.row - 1, self.col - 1],\n [self.row - 1, self.col + 1]]\n else:\n if self.player == 1:\n valid_moves = [[self.row + 1, self.col + 1],\n [self.row + 1, self.col - 1]]\n else:\n valid_moves = [[self.row - 1, self.col - 1],\n [self.row - 1, self.col + 1]]\n return valid_moves", "def _get_piece_moves(self, x, y):\n\n piece = self.get_piece(x, y)\n moves = []\n\n if not piece:\n return moves\n\n if piece.name == 'rook' or piece.name == 'queen':\n direcs = ['up', 'down', 'left', 'right']\n moves = [self._get_moves_indirection(x, y, direc) for direc in\n direcs]\n\n elif piece.name == 'bishop' or piece.name == 'queen':\n direcs = ['d1', 'd2', 'd3', 'd4']\n for direc in direcs:\n moves += self._get_moves_indirection(x, y, direc)\n\n elif piece.name == 'king':\n moves = [(x-1, y-1), (x-1, y), (x-1, y+1), (x, y-1),\n (x, y+1), (x+1, y-1), (x+1, y), (x+1, y+1)]\n\n elif piece.name == 'knight':\n moves = [(x-1, y-2), (x-2, y-1), (x-2, y+1), (x-1, y+2),\n (x+1, y+2), (x+2, y+1), (x+1, y-2), (x+2, y-1)]\n\n elif piece.name == 'pawn':\n if piece.color == ChessGame.BLACK:\n moves = [(x-1, y), (x-1, y-1), (x-1, y+1)]\n else:\n moves = [(x+1, y), (x+1, y-1), (x+1, y+1)]\n\n tmp = list(moves)\n for u, v in tmp:\n if v != y and not self.is_enemy(u, v, piece.color):\n moves.remove((u, v))\n\n if v == y and self.is_enemy(u, v, piece.color):\n moves.remove((u, v))\n\n mycolor = piece.color\n valid = set()\n for (u, v) in moves:\n if not self.in_bounds(u, v):\n continue\n\n if not self.get_piece(u, v): # board is blank\n valid.add((u, v))\n\n if self.is_enemy(u, v, mycolor):\n valid.add((u, v))\n\n return valid", "def available_moves(self):\n available_moves = []\n for i in range(self.quadrants_count):\n quadrant_positions = self.play_area[i].available_positions()\n for p in quadrant_positions:\n position = p + i * 9\n for j in range(self.quadrants_count):\n move1 = [str(position), str(j + 1), \"l\"]\n move2 = [str(position), str(j + 1), \"r\"]\n available_moves.append(\" \".join(move1))\n available_moves.append(\" \".join(move2))\n return available_moves", "def FindPossiblePositions(temporaryMaze, currentPosition):\n\tx = currentPosition[X]\n\ty = currentPosition[Y]\n\tpossiblePositions = [[None for a in range(4)] for b in range(4)] \n\n\t# on regarde a droite\n\tif (y < len(temporaryMaze[0]) - 1):\n\t\tif temporaryMaze[x][y + 1] == FINISH or temporaryMaze[x][y + 1] >= PATH:\n\t\t\tpossiblePositions[RIGHT][X] = x\n\t\t\tpossiblePositions[RIGHT][Y] = y + 1\n\t\t\tpossiblePositions[RIGHT][PHEROMONE] = temporaryMaze[x][y + 1]\n\t\t\tpossiblePositions[RIGHT][DIRECTION] = RIGHT\n\n\t# on regarde en haut\n\tif (x > 0):\n\t\tif temporaryMaze[x - 1][y] == FINISH or temporaryMaze[x - 1][y] >= PATH:\n\t\t\tpossiblePositions[UP][X] = x - 1\n\t\t\tpossiblePositions[UP][Y] = y\n\t\t\tpossiblePositions[UP][PHEROMONE] = temporaryMaze[x - 1][y]\n\t\t\tpossiblePositions[UP][DIRECTION] = UP\n\n\t# on regarde a gauche\n\tif (y > 0):\n\t\tif temporaryMaze[x][y - 1] == FINISH or temporaryMaze[x][y - 1] >= PATH:\n\t\t\tpossiblePositions[LEFT][X] = x\n\t\t\tpossiblePositions[LEFT][Y] = y - 1\n\t\t\tpossiblePositions[LEFT][PHEROMONE] = temporaryMaze[x][y - 1]\n\t\t\tpossiblePositions[LEFT][DIRECTION] = LEFT\n\n\t# on regarde en bas\n\tif (x < len(temporaryMaze) - 1):\n\t\tif temporaryMaze[x + 1][y] == FINISH or temporaryMaze[x + 1][y] >= PATH:\n\t\t\tpossiblePositions[DOWN][X] = x + 1\n\t\t\tpossiblePositions[DOWN][Y] = y\n\t\t\tpossiblePositions[DOWN][PHEROMONE] = temporaryMaze[x + 1][y]\n\t\t\tpossiblePositions[DOWN][DIRECTION] = DOWN\n\n\treturn possiblePositions" ]
[ "0.69245857", "0.69245857", "0.69245857", "0.69245857", "0.69245857", "0.69245857", "0.69245857", "0.67829096", "0.65028155", "0.6499266", "0.64990675", "0.6449157", "0.64386046", "0.63928956", "0.6390314", "0.6385454", "0.6357725", "0.6330005", "0.63225853", "0.6311004", "0.630088", "0.62327945", "0.62113476", "0.6169312", "0.61321414", "0.6128571", "0.6110796", "0.6085114", "0.6059818", "0.605755" ]
0.70559174
0
Returns the moves for the bishop at the given location
def get_bishop_moves(state, coord): # Movement Options # Diagonals # no piece = add move, enemy = add move & break loop, friendly = break loop bishop_moves = [] if state.active_color == cc.WHITE_ACTIVE: for vector in cc.BISHOP_VECTORS: bishop_moves.extend(get_direction_moves(state, cc.W_BISHOP, coord, vector)) elif state.active_color == cc.BLACK_ACTIVE: for vector in cc.BISHOP_VECTORS: bishop_moves.extend(get_direction_moves(state, cc.B_BISHOP, coord, vector)) else: raise Exception("GameState: Invalid Active Color") return bishop_moves
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def traverseBishop(self):\n\t\tmoves = np.empty(14, dtype=object)\n\t\tcnt = [0]\n\t\tPiece.traverse(self, cnt, moves, -1, -1)\n\t\tPiece.traverse(self, cnt, moves, -1, 1)\n\t\tPiece.traverse(self, cnt, moves, 1, -1)\n\t\tPiece.traverse(self, cnt, moves, 1, 1)\n\t\treturn moves[:cnt[0]]", "def get_bishop_moves(self, i, j, moves):\r\n directions = ((-1, 1), (1, -1), (1, 1), (-1, -1))\r\n if self.turn_white:\r\n oponent = 'b'\r\n else:\r\n oponent = 'w'\r\n for d in directions:\r\n for m in range(1, 8):\r\n cri = i + d[0] * m\r\n crj = j + d[1] * m\r\n if 0 <= cri <= 7 and 0 <= crj <= 7:\r\n # empty\r\n if self.board[cri][crj][0] == '-':\r\n # print(i, j, cri, crj)\r\n moves.append(Move((i, j), (cri, crj), self.board))\r\n\r\n # oponent\r\n elif self.board[cri][crj][0] == oponent:\r\n # print(i, j, cri, crj)\r\n moves.append(Move((i, j), (cri, crj), self.board))\r\n break\r\n # piesa mea\r\n else:\r\n break\r\n else:\r\n break", "def moves(self, board_state):\n # pos_moves = generate_moves(board_state) # Naive moves function here\n blacks = board_state.search_board('B')\n # Generate the possible moves required to kill the first black piece\n # on the board\n pos_moves = sorted_generate_moves_piece(board_state, blacks[0])\n return pos_moves", "def MoveBishop(ChessPiece, NewSquare, AllPieces, Board):\n\n\tfor Bishop in AllPieces[ChessPiece]:\n\t\tX_BishopSquare = int(Bishop['CurrentSquare'][1])-1 # Integer Coordinate\n\t\tY_BishopSquare = int(RowToNumber(Bishop['CurrentSquare'][0])) # Integer Coordinate\n\n\t\t# Diagonal Up Right..\n\t\tfor i in range(1, 8):\n\t\t\tif X_BishopSquare+i <= 7 and Y_BishopSquare+i <= 7:\n\t\t\t\tif NewSquare == (NumberToRow(str(Y_BishopSquare+i))+str(X_BishopSquare+i+1)):\n\t\t\t\t\tif Board[X_BishopSquare+i][Y_BishopSquare+i] is not ChessPiece[:5]:\n\t\t\t\t\t\tBoard[X_BishopSquare][Y_BishopSquare] = 'Free'\n\t\t\t\t\t\tBoard[X_BishopSquare+i][Y_BishopSquare+i] = ChessPiece[:5]\n\t\t\t\t\t\treturn Bishop['CurrentSquare']\n\t\t\telse:\n\t\t\t\tbreak\n\n\t\t# Diagonal Up Left..\n\t\tfor i in range(1, 8): \n\t\t\tif X_BishopSquare+i <= 7 and Y_BishopSquare-i >= 0:\n\t\t\t\tif NewSquare == (NumberToRow(str(Y_BishopSquare-i))+str(X_BishopSquare+i+1)):\n\t\t\t\t\tif Board[X_BishopSquare+i][Y_BishopSquare-i] is not ChessPiece[:5]:\n\t\t\t\t\t\tBoard[X_BishopSquare][Y_BishopSquare] = 'Free'\n\t\t\t\t\t\tBoard[X_BishopSquare+i][Y_BishopSquare-i] = ChessPiece[:5]\n\t\t\t\t\t\treturn Bishop['CurrentSquare']\n\t\t\telse:\n\t\t\t\tbreak\n\n\t\t# Diagonal Down Right..\n\t\tfor i in range(1, 8): \n\t\t\tif X_BishopSquare-i >= 0 and Y_BishopSquare+i <= 7:\n\t\t\t\tif NewSquare == (NumberToRow(str(Y_BishopSquare+i))+str(X_BishopSquare-i+1)):\n\t\t\t\t\tif Board[X_BishopSquare-i][Y_BishopSquare+i] is not ChessPiece[:5]:\n\t\t\t\t\t\tBoard[X_BishopSquare][Y_BishopSquare] = 'Free'\n\t\t\t\t\t\tBoard[X_BishopSquare-i][Y_BishopSquare+i] = ChessPiece[:5]\n\t\t\t\t\t\treturn Bishop['CurrentSquare']\n\t\t\telse:\n\t\t\t\tbreak\n\t\t\n\t\t# Diagonal Down Left..\n\t\tfor i in range(1, 8):\n\t\t\tif X_BishopSquare-i >= 0 and Y_BishopSquare-i >= 0:\n\t\t\t\tif NewSquare == (NumberToRow(str(Y_BishopSquare-i))+str(X_BishopSquare-i+1)):\n\t\t\t\t\tif Board[X_BishopSquare-i][Y_BishopSquare-i] is not ChessPiece[:5]:\n\t\t\t\t\t\tBoard[X_BishopSquare][Y_BishopSquare] = 'Free'\n\t\t\t\t\t\tBoard[X_BishopSquare-i][Y_BishopSquare-i] = ChessPiece[:5]\n\t\t\t\t\t\treturn Bishop['CurrentSquare']\n\t\t\telse:\n\t\t\t\tbreak\n\n\t# If Both Bishops Don't Match..\n\treturn None", "def command_moves(board, locations):\n possible_moves = []\n buffers = [(1,0), (0,1), (-1,0), (0,-1)]\n\n for piece in locations:\n piece_moves = []\n\n for move in buffers:\n\n poss_move = return_valid_move(board, locations, piece, move)\n\n if poss_move:\n piece_moves.append(poss_move)\n\n possible_moves.append(piece_moves)\n\n return possible_moves", "def findPlacesToMove():\n movesDestinations = [];\n \n curY = curBlank[0];\n curX = curBlank[1];\n\n if(curY-1 >= 1): #UP\n movesDestinations.append((curY-1, curX));\n if(curY+1 <= n): #DOWN\n movesDestinations.append((curY+1, curX));\n if(curX-1 >= 1): #LEFT\n movesDestinations.append((curY, curX-1));\n if(curX+1 <= n): #RIGHT\n movesDestinations.append((curY, curX+1));\n \n return movesDestinations;", "def get_goat_possible_moves(self) -> List:\n moves = []\n for pos in self.get_all_positions():\n if pos.is_goat():\n addr_from = pos.address\n for addr_to in pos.piece.get_valid_moves():\n moves.append((addr_from, addr_to))\n\n return moves", "def get_moves(self):", "def get_moves(self):\n moves = []\n i, j = self._get_coordinates(0) # blank space\n\n if i > 0:\n moves.append(Puzzle(self._swap(i, j, i - 1, j))) # move up\n\n if j < self.PUZZLE_NUM_COLUMNS - 1:\n moves.append(Puzzle(self._swap(i, j, i, j + 1))) # move right\n\n if j > 0:\n moves.append(Puzzle(self._swap(i, j, i, j - 1))) # move left\n\n if i < self.PUZZLE_NUM_ROWS - 1:\n moves.append(Puzzle(self._swap(i, j, i + 1, j))) # move down\n\n return moves", "def moves(self):\n\n # define a full range, which we can compare against columns,\n # rows, or blocks. they're all the same when stored as sets.\n line = set(range(1, 10))\n moves = []\n\n # iterate every cell on the board\n for row in range(0, 9):\n for col in range(0, 9):\n\n # ignore this cell if it's already filled\n i = self._index(col, row)\n if self.data[i] is not None:\n continue\n\n # fetch the adjacent cells\n row_values = set(self._row(row))\n col_values = set(self._column(col))\n bck_values = set(self._block(col, row))\n\n # subtract the values present in the adjacent cells\n # (since this cell *can't* be of any of those values),\n # to leave the list of possibilities for this cell\n missing = line.difference(row_values, col_values, bck_values)\n\n # if there's only *one* possibility, we've found the\n # solution to this cell\n if len(missing) == 1:\n moves.append((col, row, missing.pop()))\n\n return moves", "def moves(self):\n\n moves = list()\n\n for row in range(HEIGHT):\n for col in range(WIDTH):\n\n move = (row, col)\n\n if self.board[row][col] == 9:\n moves.append(move)\n\n if self.board[row][col] == 1 or self.board[row][col] == 2:\n\n move = (row - 1, col)\n\n if self.board[row - 1][col] == 1 or self.board[row - 1][col] == 2:\n\n pass\n\n else:\n\n moves.append(move)\n\n return moves", "def get_moves(self):\n grid = self.model.grid\n # List of agents we can't overlap with\n no_overlap = [\"wall\", \"human\", \"zombie\"]\n\n if self.agent_type == \"zombie\" or \\\n (\"AvoidingZombie\" not in self.states and os.environ[\"mode\"] == \"5\"):\n no_overlap.append(\"road\")\n\n # Always give the option to stay on your current location(stand still)\n all_cells = self.neighbors()\n free_cells = [self.pos]\n\n # Get rid of cells that we may not move to by iterating through all\n # cells next to the agent, and only adding non-occupied cells\n for cell in all_cells:\n cell_occupied = False\n x, y = cell.pos\n # If there are agents in the current cell, and we are not allowed\n # to overlap with any of those agents, the cell is occupied.\n # Only add cells which are not occupied.\n if not grid.is_cell_empty((x, y)):\n for agent in grid[x][y]:\n if agent.agent_type in no_overlap:\n cell_occupied = True\n break\n if not cell_occupied:\n free_cells.append((x, y))\n return free_cells", "def get_slide_moves(position, board):\n r,q = position\n blocks = [p for p in board if board[p] == \"B\"]\n ran = range(-4,5)\n return [p for p in get_adjacents(position) if p not in blocks]", "def find_moves(self):\n\n from itertools import product\n free_position = self.find_free()\n return [list(free_position+i) for i in [[0,1],[1,0],[-1,0],[0,-1]] if tuple(i+free_position) in product(range(self.size),repeat=2)]", "def possible_moves(self, pos: Point) -> List[Point]:\n # logger.debug(f\"inside possible_moves {pos}\")\n available_squares = []\n for direction in Direction.cardinal():\n # logger.debug(f\"direction = {direction}\")\n neighbor = pos + direction\n # logger.debug(f\"neighbor = {neighbor}\")\n if neighbor.x < 1 or self.width - 2 < neighbor.x or neighbor.y < 1 or self.height - 2 < neighbor.y:\n # logger.debug(f\"{neighbor} not in bounds\")\n continue\n if self.can_carve(pos, direction):\n # logger.debug(f\"can_carve returned True pos={pos}, direction={direction}\")\n available_squares.append(neighbor)\n # logger.debug(f\"available squares:\")\n # for square in available_squares:\n # logger.debug(f\"square={square}\")\n # logger.add(\"debug.log\")\n return available_squares", "def _get_piece_moves(self, x, y):\n\n piece = self.get_piece(x, y)\n moves = []\n\n if not piece:\n return moves\n\n if piece.name == 'rook' or piece.name == 'queen':\n direcs = ['up', 'down', 'left', 'right']\n moves = [self._get_moves_indirection(x, y, direc) for direc in\n direcs]\n\n elif piece.name == 'bishop' or piece.name == 'queen':\n direcs = ['d1', 'd2', 'd3', 'd4']\n for direc in direcs:\n moves += self._get_moves_indirection(x, y, direc)\n\n elif piece.name == 'king':\n moves = [(x-1, y-1), (x-1, y), (x-1, y+1), (x, y-1),\n (x, y+1), (x+1, y-1), (x+1, y), (x+1, y+1)]\n\n elif piece.name == 'knight':\n moves = [(x-1, y-2), (x-2, y-1), (x-2, y+1), (x-1, y+2),\n (x+1, y+2), (x+2, y+1), (x+1, y-2), (x+2, y-1)]\n\n elif piece.name == 'pawn':\n if piece.color == ChessGame.BLACK:\n moves = [(x-1, y), (x-1, y-1), (x-1, y+1)]\n else:\n moves = [(x+1, y), (x+1, y-1), (x+1, y+1)]\n\n tmp = list(moves)\n for u, v in tmp:\n if v != y and not self.is_enemy(u, v, piece.color):\n moves.remove((u, v))\n\n if v == y and self.is_enemy(u, v, piece.color):\n moves.remove((u, v))\n\n mycolor = piece.color\n valid = set()\n for (u, v) in moves:\n if not self.in_bounds(u, v):\n continue\n\n if not self.get_piece(u, v): # board is blank\n valid.add((u, v))\n\n if self.is_enemy(u, v, mycolor):\n valid.add((u, v))\n\n return valid", "def moves(self, teleport=False):\n recv = [(self._size[y][x] - self._used[y][x], x, y)\n for x in range(self.xsize) for y in range(self.ysize)]\n recv.sort(reverse=True)\n send = [(self._used[y][x], x, y)\n for x in range(self.xsize) for y in range(self.ysize)\n if self._used[y][x] > 0]\n send.sort()\n # print(\"recv: {}...\".format(str(recv[:5])))\n # print(\"send: {}...\".format(str(send[:5])))\n moves = []\n for avail, x1, y1 in recv:\n for used, x0, y0 in send:\n if avail < used:\n break\n if teleport or (x0 == x1 and abs(y0 - y1) == 1) or (\n y0 == y1 and abs(x0 - x1) == 1):\n self.apply((x0, y0), (x1, y1))\n moves.append((self.score(), self.key(), self.save(), list(self.history)))\n self.undo()\n return moves", "def get_square_moves(self, moves):\r\n return self.board.get_square_moves(moves)", "def moves(self) -> List[List[PushState]]:\n # seen_moves = set()\n possible_moves = [[], []]\n\n if state := self.prune_states(self.state): # Could return None\n size = len(self.prune_states(state))\n else:\n return possible_moves\n\n for count, each_slot in enumerate(state):\n # for count, each_slot in reversed(list(enumerate(state))):\n if each_slot == \"L\" and not possible_moves[0]:\n next_state = deepcopy(state)\n next_state = tuple(self.push(list(next_state), count))\n next_state = self.prune_states(next_state)\n\n # if next_state not in seen_moves:\n # seen_moves.add(next_state)\n possible_moves[0].append(next_state)\n\n elif each_slot == \"R\" and not possible_moves[1]:\n next_state = deepcopy(state)\n next_state = tuple(self.push(list(next_state), count))\n next_state = self.prune_states(next_state)\n\n # if next_state not in seen_moves:\n # seen_moves.add(next_state)\n possible_moves[1].append(next_state)\n\n if possible_moves[0] and possible_moves[1]:\n break\n\n return possible_moves", "def make_move(self, state):\r\n # intially set drop phase to true\r\n drop_phase = True\r\n move = [] # list to make moves with to return\r\n succ = self.succ(state) # get the successor of this state\r\n # intial postion of board to set up most advantagous spot if its empty\r\n if sum(x.count(self.my_piece) for x in self.board) == 0 and self.board[2][2] == ' ':\r\n move.insert(0, (2, 2))\r\n return move\r\n \r\n # check the number of 'r' and 'b' on board if theres 4 of each drop phase is false\r\n if sum(x.count('r') for x in self.board) == 4 and sum(x.count('b') for x in self.board) == 4:\r\n drop_phase = False\r\n\r\n # if not during drop phase use minimax to make next move from one postion to next\r\n if not drop_phase:\r\n move = []\r\n d = self.Max_value(state, 0)\r\n val = d['val']\r\n m = d['move']\r\n p = d['pos']\r\n f = d['from']\r\n s = sorted(succ, key=lambda e: e['f'])\r\n moveto = s[-1]\r\n move.insert(1, (moveto['from'][0], moveto['from'][1]))\r\n move.insert(0, (moveto['pos'][0], moveto['pos'][1]))\r\n return move # return the from, to move\r\n\r\n else: #else use minimax and to make move during drop phase selecting spot to place AI piece\r\n d = self.Max_value(state, 0)\r\n val = d['val']\r\n m = d['move']\r\n p = d['pos']\r\n hold = []\r\n move = []\r\n n = None\r\n hold = []\r\n for s in succ:\r\n p = s['pos'][0]\r\n p1 = s['pos'][1]\r\n if s['f'] == val and state[p][p1] == ' ':\r\n hold.append(s)\r\n if len(hold) == 1:\r\n row = hold[0]['pos'][0]\r\n col = hold[0]['pos'][1]\r\n else:\r\n f = sorted(hold, key=lambda e: e['pos'])\r\n row = f[0]['pos'][0]\r\n col = f[0]['pos'][1]\r\n\r\n move.insert(0, (row, col)) # return the move \r\n return move", "def move(self, location):\n disp_x = location[0] - self._x_coord\n disp_y = location[1] - self._y_coord\n board = self._board\n\n # Instantiate dictionary of displaced locations to value they will take\n mov_map = dict()\n for position in self._area:\n mov_map[(position[0] + disp_x, position[1] + disp_y)] = board[position[0]][position[1]]\n\n # Clear previous locations\n for position in self._area:\n board[position[0]][position[1]] = \" \"\n\n # Place stones to displaced location\n for position in self._area:\n board[position[0] + disp_x][position[1] + disp_y] = \\\n mov_map[(position[0] + disp_x, position[1] + disp_y)]\n\n # Return the new stone locations for processing\n return set(mov_map.keys())", "def get_all_moves(self):\n # 2d matrix of true/false, true if something can be placed\n legal_move_board = []\n possible_move_list = []\n for row in range(self.size):\n move_row = []\n for col in range(self.size):\n empty = self.board[row][col].state == PegState.EMPTY\n move_row.append(empty)\n if empty:\n possible_move_list.append((row, col))\n legal_move_board.append(move_row)\n \n # every position where something can be placed (list of tuples) (Combined with above)\n \"\"\" possible_move_list = []\n for row in range(self.size):\n for col in range(self.size):\n if legal_move_board[row][col] == True:\n possible_move_list.append((row, col))\n \"\"\"\n return legal_move_board, possible_move_list", "def move_away(state, location):\n\n return move_relative(state, location, False)", "def get_moves(self):\n return self.piece_behavior.get_moves(self.board, self.position)", "def get_pawn_moves(self, state):\n pawn_moves = []\n\n if self.color == cc.WHITE_ACTIVE:\n forward_1 = add_vectors(self.coord, cc.V_UP)\n forward_2 = add_vectors(self.coord, cc.V_UP_2)\n attacks = get_crawler_moves(self.coord, cc.W_PAWN_CAPTURE_VECTORS)\n starting_rank = cc.RANK_2\n promo_rank = cc.RANK_8\n promo_pieces = cc.WHITE_PROMO\n enemy_set = cc.BLACK_PIECES\n elif self.color == cc.BLACK_ACTIVE:\n forward_1 = add_vectors(self.coord, cc.V_DOWN)\n forward_2 = add_vectors(self.coord, cc.V_DOWN_2)\n attacks = get_crawler_moves(self.coord, cc.B_PAWN_CAPTURE_VECTORS)\n starting_rank = cc.RANK_7\n promo_rank = cc.RANK_1\n promo_pieces = cc.BLACK_PROMO\n enemy_set = cc.WHITE_PIECES\n else:\n raise Exception(\"get_pawn_moves: Invalid Piece Color\")\n\n if validate_move(forward_1) and state.board[forward_1] == cc.NO_PIECE:\n if forward_1[0] == promo_rank:\n for p in promo_pieces:\n pawn_moves.append(cc.Action(self.string, self.coord, forward_1, promo=p))\n else:\n pawn_moves.append(cc.Action(self.string, self.coord, forward_1))\n if self.coord[0] == starting_rank and validate_move(forward_2) and state.board[forward_2] == cc.NO_PIECE:\n pawn_moves.append(cc.Action(self.string, self.coord, forward_2, en_p=forward_1))\n\n for attack in attacks:\n if state.board[attack] in enemy_set:\n if attack[0] == promo_rank:\n for p in promo_pieces:\n pawn_moves.append(cc.Action(self.string, self.coord, attack, capture=True, promo=p))\n else:\n pawn_moves.append(cc.Action(self.string, self.coord, attack, capture=True))\n # Make sure Pawns can attack en_passant squares\n elif attack == state.en_passant:\n pawn_moves.append(cc.Action(self.string, self.coord, attack, capture=True))\n\n return pawn_moves", "def openMoves(self):\n arr = []\n for y in range(0,HEIGHT):\n for x in range(0,WIDTH):\n t = self.getPawn(x,y)\n if(t!=None):\n for z in range(-1,2):\n if(self.legalMove(t,z)):\n #move , #newState\n arr.append((t,z))\n return arr", "def get_move_options(self, x, y):\r\n best = self._get_lowest_neighbor_value(x, y)\r\n moves = []\r\n for dx, dy in DijkstraMap.neighbors:\r\n tx, ty = x + dx, y + dy\r\n if self.point_in_map(tx, ty) and self.tiles[tx][ty] == best:\r\n moves.append( (dx, dy))\r\n return moves", "def get_hoop_location(ball_loc_x):\n if ball_loc_x > 47:\n hoop_loc_x = 88.65\n hoop_loc_y = 25\n else:\n hoop_loc_x = 5.35\n hoop_loc_y = 25\n\n return [hoop_loc_x, hoop_loc_y]", "def get_possible_moves(self, board: np.ndarray):\n board_size = board.shape[0]\n moves = []\n if abs(self.value) == 1:\n if self.start_row <= 2:\n directions = [np.array((1, -1)), np.array((1, 1))]\n else:\n directions = [np.array((-1, 1)), np.array((-1, -1))]\n else:\n directions = [np.array((-1, 1)), np.array((1, 1)), np.array((-1, -1)), np.array((1, -1))]\n for direction in directions:\n within_board = True\n i = 1\n while within_board:\n coord = self.coord + direction * i\n within_board = _check_if_position_on_board(coord, board_size)\n # break if first step is already out of board\n if not within_board:\n break\n value_board = board[coord[0], coord[1]]\n # break if there is a stone of them same player in the way\n if value_board < 0 and self.value < 0 or value_board > 0 and self.value > 0:\n break\n # if there is no stone, than add this to move list.\n if value_board == 0:\n moves += [{\"old_coord\": self.coord, \"new_coord\": coord, \"jumped_stones\": [], \"jumped_values\": 0,\n \"move_coords\": [coord]}]\n # if there is a stone of the enemy\n if (value_board < 0 < self.value) or (self.value < 0 < value_board):\n # check if it can be jumped\n coord_jump = coord + direction\n move_coords = [coord_jump.copy()]\n within_board_after_jump = _check_if_position_on_board(coord_jump, board_size)\n # break if place behind stone is out of border\n if not within_board_after_jump:\n break\n value_board_jump = board[coord_jump[0], coord_jump[1]]\n jumped_stones = []\n # break if there is no free place\n if value_board_jump != 0:\n break\n jumped_stones += [coord]\n moves_tmp = self.jump_chain(directions, board, coord_jump, value_board, jumped_stones, move_coords)\n if len(moves_tmp) > 0:\n moves += moves_tmp\n else:\n moves += [{\"old_coord\": self.coord, \"new_coord\": coord_jump, \"jumped_stones\": jumped_stones,\n \"jumped_values\": abs(value_board), \"move_coords\": [coord_jump]}]\n i += 1\n # break if normal stone, because they can only move one field\n if abs(self.value) == 1:\n break\n return moves", "def get_moves_for_square(self, square):\n (x,y) = square\n\n # determine the color of the piece.\n color = self[x][y]\n\n # skip empty source squares.\n if color==0:\n return []\n\n # search all possible directions.\n moves = []\n for direction in self.__directions:\n move = self._discover_move(square, direction)\n if move:\n # print(square,move,direction)\n moves.append(move)\n\n # return the generated move list\n return moves" ]
[ "0.71127635", "0.6942288", "0.6672812", "0.6465321", "0.6382628", "0.6379456", "0.6155536", "0.6119805", "0.6104786", "0.6047381", "0.6000614", "0.59988225", "0.5961775", "0.5960223", "0.59416145", "0.594005", "0.59387296", "0.58761156", "0.58495075", "0.5841314", "0.58346194", "0.5827303", "0.58222353", "0.57757545", "0.5767465", "0.57385886", "0.573241", "0.5729616", "0.5719847", "0.5716471" ]
0.74600714
0
this method parse the file line.txt to create a list of StopOfGraph which is much easier to use to create a graph
def read_file(self, path, route_id): #uncoded_route_id = route_id.decode("utf-8") route_id = str(route_id) path += "/line_" + route_id + ".txt" with io.open(path, encoding="utf-8") as f: lines = f.readlines() stop_of_graph_list = list() # So here we're examining the lines of the file for line in lines[1:]: line = line.strip() if line != '': stop_of_graph_list.append(StopOfGraph.StopOfGraph(line)) # We mustn't forget to give our bus line a name self.line_id = lines[0] return stop_of_graph_list
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse(self, f):\n lines = []\n for line in f:\n _line = line.split(\"//\")[0].strip()\n if _line.startswith(\"(\"): # is a label\n label_name = _line[1:-1]\n self.labels[label_name] = len(lines) # line number / address of label\n elif _line:\n lines.append(_line)\n # else: it's just a whitespace/comment line (ignore)\n return lines", "def line_parser(path):\n lines = []\n with open(path, 'r') as input:\n lines = [line.rstrip().split(',') for line in input]\n lines = [\n [[float(x1), float(y1)],\n [float(x2), float(y2)]] \n for x1, y1, x2, y2 in lines]\n return lines", "def file_parse():\n\n\tfilename = input(\"Enter the file path for your graph: \")\n\ttarget = open(filename, 'r')\n\n\ttarget_lines = [] \t# List of lines from target file\n\t\n\t# Grab the graph count and node/edge count for the first graph\n\ti = 0\n\tfor line in target:\n\t\tif i == 0:\n\t\t\tgraph_count = int(line)\n\t\telif i == 1:\n\t\t\tnode_count = int(line)\n\t\telif i == 2:\n\t\t\tedge_count = int(line)\n\t\telse:\t\n\t\t\ttarget_lines.append(line.strip('\\n'))\n\t\ti += 1\n\n\treturn graph_create(target_lines, graph_count, node_count, edge_count)", "def translate_graph(lines: List[str]) -> Graph:\n nodes = []\n edges = []\n is_node = True\n\n # first_value used to offset node/edge ids to start from 0\n first = True\n first_value = 0\n\n for line in lines:\n a = line.split(\",\")\n\n if \"edge\" in line:\n is_node = False\n elif is_node:\n if first:\n first = False\n first_value = int(a[0])\n\n # Create node with Node(id, label, size)\n pos_x = float(a[4])\n pos_y = float(a[5])\n n = Node(int(a[0]),\n str(a[1]).replace(\"\\\"\", \"\"),\n int(float(a[2])),\n kp=KeyPoint(pos_x, pos_y, float(a[2])),\n pos=(float(a[4]),float(a[5])))\n\n if (a[1] == \"\"):\n print(\"Node \" + a[0] + \" missing label.\")\n exit(1)\n nodes.append(n)\n else:\n n1 = None\n n2 = None\n for node in nodes:\n if node.node_id == int(a[0]):\n n1 = node\n for node in nodes:\n if node.node_id == int(a[1]):\n n2 = node\n e = Edge(n1, n2, distance(n1.pos,n2.pos))\n edges.append(e)\n #all_edges.append(e)\n #edges.append((int(a[0]) - first_value, int(a[1]) - first_value))\n\n\n return Graph(nodes, edges, 1)", "def _read_data(filename):\n file = open(filename, \"r\")\n timestamps = []\n edges = []\n for line in file:\n # source target weight timestamp\n if line.startswith(\"%\"):\n continue\n spl = line.split()\n if len(spl) == 4:\n # store that stuff in triples (source, target, weight, timestamp)\n edges.append((int(spl[0]), int(spl[1]), int(spl[2]), int(spl[3])))\n timestamps.append(int(spl[3]))\n return edges, sorted(timestamps)", "def read_from_file(self, filename):\n with open(filename, 'r') as f:\n for line in f.read().splitlines():\n name, neighbours, r_table = line.split('!')\n\n self.add_new(name)\n if neighbours:\n for neighbour in neighbours.split(';'):\n try:\n self.add_neighbours(name, neighbour)\n except Exception as e:\n\n pass\n if r_table:\n for network in r_table.split(';'):\n net_name, distance = network.split(':')\n\n distance = int(distance)\n self.add_network(name, net_name, distance)", "def ParseGraph(filename):\n vertices = []\n edges = set([])\n\n for l in open(filename):\n fields = [int(f) for f in l.split()]\n vertex = fields.pop(0)\n incident = [tuple(sorted([vertex, f])) for f in fields]\n vertices.append(vertex)\n edges.update(incident)\n\n return vertices, list(edges)", "def __buildByFile( self, file ):\r\n file = open( file, 'r' ) # abre arquivo em modo leitura\r\n # retorna uma lista com strings separadas para cada linha lida do arquivo:\r\n lines = [ line.rstrip( '\\n' ).split( ' ' ) for line in file.readlines() ]\r\n # apenas converte todos os numeros de coordenadas da lista para inteiros:\r\n lines = [ [ int(line[0]), int(line[1]), int(line[2]) ] for line in lines ]\r\n # cria e adiciona todos os vertices a lista:\r\n for line in lines:\r\n self.vertexes.append( Vertex ( line[0] ) )\r\n self.matrix[ line[0] ] = {}\r\n \r\n # calculando os custos e criando todas as arestas do grafo:\r\n v = [] # criando copia da lista de vertices:\r\n for vertex in self.vertexes: v.append( vertex )\r\n \r\n for vertexIni in self.vertexes:\r\n if vertexIni in v:\r\n v.remove( vertexIni )\r\n coordinates = ( lines[ vertexIni.label-1 ][1], lines[ vertexIni.label-1 ][2] )\r\n for vertexEnd in v:\r\n distance = math.sqrt( ( lines[ vertexIni.label-1 ][1] - lines[ vertexEnd.label-1 ][1] )**2 + ( lines[ vertexIni.label-1 ][2] - lines[ vertexEnd.label-1 ][2] )**2 )\r\n edge = Edge( vertexIni, vertexEnd, distance )\r\n self.edges.append( edge )\r\n vertexIni.adjacenses.append( edge )\r\n edge = Edge( vertexEnd, vertexIni, distance ) #add esse\r\n vertexEnd.adjacenses.append( edge )\r\n self.matrix[ vertexIni.label ][ vertexEnd.label ] = distance\r\n self.matrix[ vertexEnd.label ][ vertexIni.label ] = distance\r\n self.sortEdges( vertexIni.adjacenses )\r\n \r\n self.sortEdges( self.edges ) # ordenando todas as arestas do grafo\r\n print 'grafo clique com', len( self.vertexes ), 'cidades criado.'", "def _return_node_lists(self, my_breaklines):\n my_filelines = self.file_lines\n v_start, c_start = my_breaklines\n\n for i, line in enumerate(my_filelines):\n if v_start == i:\n vehicle_part = line.strip().split(' ')\n self.info['NUMBER'], self.info['CAPACITY'] = int(vehicle_part[0]), int(vehicle_part[-1])\n if c_start <= i:\n c_part = line.strip().split(' ')\n c_store = list()\n for j in c_part:\n try:\n c_store.append(int(j))\n\n except ValueError:\n continue\n if c_store != []:\n if c_store[4]> 130:\n self.customers.append(\n Customer(c_store[0], c_store[1], c_store[2], c_store[3], c_store[4], c_store[5], c_store[6], 0))\n else:\n self.customers.append(\n Customer(c_store[0], c_store[1], c_store[2], c_store[3], c_store[4], c_store[5], c_store[6], 1))", "def process_lines(self, lines):\n line_index = 0\n n_lines = len(lines)\n while line_index < n_lines:\n if lines[line_index].startswith(\"HIERARCHY\"):\n line_index = self._read_skeleton(lines, line_index, n_lines)\n if lines[line_index].startswith(\"MOTION\"):\n self._read_frametime(lines, line_index+2)\n line_index = self._read_frames(lines, line_index+3, n_lines)\n else:\n line_index += 1", "def graph_create(line_list, graph_count, node_count, edge_count):\n\n\tlines = line_list\n\tgraphs = []\n\tx = 0\n\n\t# For each graph\n\tfor i in range(graph_count):\n\t\tprev_node = lines[0].split()[0]\n\t\tedges = []\n\t\tgraph = []\n\t\tnodes = {}\n\t\tshortest_count = float('inf')\n\t\tlongest_count = -1\n\t\tpath_count = 0\n\n\t\t# For each node in the graph, store it and its connected nodes in 'nodes'\n\t\tfor j in range(x, edge_count):\n\t\t\tcurr_node = lines[j].split()[0]\n\n\t\t\tif curr_node == prev_node:\n\t\t\t\tedges.append(lines[j].split()[1])\n\n\t\t\telse:\n\t\t\t\tedges = []\n\t\t\t\tedges.append(lines[j].split()[1])\n\t\t\t\n\t\t\tprev_node = curr_node\n\n\t\t\t# Initialize nodes values for current node and insert all connect nodes 'edges'\n\t\t\tnodes[int(lines[j].split()[0])] = [shortest_count, longest_count, path_count], edges\n\t\t\n\t\t# If the last node in the graph is reached, move to the next graph\n\t\tif j == edge_count-1 and i < graph_count-1:\n\t\t\tnode_count = int(lines[j+1])\n\t\t\tx = edge_count + 2\n\t\t\tedge_count += 2 + int(lines[j+2])\n\n\t\tgraph.append(nodes)\n\t\tgraphs.append(graph)\n\n\treturn graphs", "def parse_file(file_path):\n stations = defaultdict(dict) # Spares me 2 lines inside that loop\n for line in open(file_path):\n if '->' in line:\n origin, destiny = map(str.strip, line.split('->'))\n elif '-' in line:\n stations[origin][destiny] = [int(i) for i in line.split('-')]\n return stations", "def read_graph():\n return nx.read_edgelist('edges_new.txt', delimiter='\\t')", "def parse_lines_in_file(path):\n signals = []\n with open(path) as fp:\n for line in fp:\n # removes all text comments starting with # character\n parts = line.partition('#')\n line = parts[0]\n\n # removes starting and ending whitespace\n line = line.strip()\n if len(line) > 0:\n signals.append(line)\n return signals", "def build_graph(filepath):\n graph = defaultdict(list)\n with open(filepath, 'r') as file:\n for edge in file:\n head, tail = edge.split()\n graph[head].append(tail)\n return graph", "def parse(self):\n\t\tfirst = None\n\t\tf = open(self.input_file)\n\t\tfor line in f.readlines():\n\t\t\tif line.startswith(\"#\"):\n\t\t\t\tcontinue\n\t\t\ttry:\n\t\t\t\tflow,t,sequence,size = line.split()\n\t\t\texcept:\n\t\t\t\tcontinue\n\t\t\t# append data to a list of tuples\n\t\t\tflow = int(flow)\n\t\t\tt = float(t)\n\t\t\tsequence = int(sequence)\n\t\t\tif size == \"x\":\n\t\t\t\tcontinue\n\t\t\tsize = int(size)\n\t\t\tif not size == 0:\n\t\t\t\tif flow == 1:\n\t\t\t\t\tself.data1.append((t,sequence,size))\n\t\t\t\telif flow == 2:\n\t\t\t\t\tself.data2.append((t,sequence,size))\n\t\t\t\telif flow == 3:\n\t\t\t\t\tself.data3.append((t, sequence, size))\n\t\t\t\telif flow == 4:\n\t\t\t\t\tself.data4.append((t, sequence, size))\n\t\t\t\telif flow == 5:\n\t\t\t\t\tself.data5.append((t, sequence, size))\n\t\t\t\telse:\n\t\t\t\t\tprint \"Erroneous data: \",flow, t, sequence, size\n\t\t\t# Keep track of the minimum and maximum time seen\n\t\t\tif not self.min_time or t < self.min_time:\n\t\t\t\tself.min_time = t\n\t\t\tif not self.max_time or t > self.max_time:\n\t\t\t\tself.max_time = t\n\n\t\t\t# print len(self.data1),len(self.data2),len(self.data3),len(self.data4),len(self.data5)", "def read_edges(f=sys.stdin):\n edges = []\n k = ['first', 'last', 'capacity', 'flow', 'used']\n lines = f.readlines()\n for line in lines:\n v = [int(s) for s in line.split(\" \")] + [0, False]\n edges.append(dict(zip(k,v)))\n \n\n return edges", "def read_file(path):\n\tG = nx.Graph()\n\n\twith open(path, 'r') as in_file:\n\t\tfor line in in_file:\n\t\t\tcontents = line.split(\" \")\n\t\t\tu = int(contents[0])\n\t\t\tv = int(contents[1])\n\t\t\tstreet_type = int(contents[2])\n\t\t\ttime = int(contents[3])\n\t\t\tlength = int(contents[4])\n\t\t\tcost = 1/float(length)\n\t\t\t\n\t\t\tG.add_node(u)\n\t\t\tG.add_node(v)\n\t\t\tif street_type is 1:\n\t\t\t\tG.add_edge(u, v, street_type=street_type, time=time, length=length, cost=cost)\n\t\t\telse:\n\t\t\t\tG.add_edge(u, v, street_type=street_type, time=time, length=length, cost=cost)\n\t\t\t\tG.add_edge(v, u, street_type=street_type, time=time, length=length, cost=cost)\n\n\treturn G", "def extract_edges(self,fn):\n new_edges = {}\n with open (fn,'r') as f:\n for line in f:\n new_edges = EdgeParser.line_to_edges(self,line,new_edges)\n return new_edges", "def load_linelist(filename):\n linelist = []\n infile = open(filename)\n for row in infile:\n row = row.strip()\n if len(row)==0 or row[0] in '#%!@':\n continue\n g = row.split()\n wl = float(g[0])\n if len(g)>1:\n species = g[1]\n else:\n species = ''\n linelist.append((wl, species))\n infile.close()\n return linelist", "def addEdge_file(self, path):\n with open(path, 'r') as File:\n for line in File.readlines():\n ints = list(map(int, line.strip().split())) \n u = ints[0]\n v = ints[1:]\n for i in v:\n self.addEdge(u, i)", "def get_nodes_and_weight_from_file(file, template_start, template_end):\n with open(file, \"rt\") as f:\n data = f.readlines()\n edge_list = [] # This list stores the tuples of valid start and end regions\n edge_duration_dict = {} # Per tuple as key, the dict stores durations per travel as value\n # The following for loop calculates the duration per travel and adds it to edge_duration_dict\n for line in data[1:]: # Starts from 1 since there is a header line\n split_line = line.split(\",\")\n # I found out the some lines have wrong hours figures, such as 25h, so I elimenate them\n try: # I found out the some lines have wrong hours figures, such as 25h, so I elimenate them\n start_time = datetime.datetime.strptime(split_line[1], template_start)\n end_time = datetime.datetime.strptime(split_line[3], template_end)\n travel_duration = end_time-start_time\n except:\n pass\n if split_line[0] in VALID_NODES and split_line[2] in VALID_NODES:\n nodes_frm_to = (split_line[0], split_line[2]) # Defining the start and end regions as the edge\n # If the edge not in edge_list yet, append to the list, and add its duration to duration dict values\n if nodes_frm_to not in edge_list:\n edge_list.append(nodes_frm_to)\n edge_duration_dict[nodes_frm_to] = [travel_duration]\n # If the edge already in edge_list yet, just add its duration to duration dict values\n else:\n edge_duration_dict[nodes_frm_to].append(travel_duration)\n # At this point we have pairs of regions with a list of durations per each\n edge_and_duration_dict = {} # This dictionary stores the mean duration per edge\n for edge, durations in edge_duration_dict.items():\n duration_array = np.array(durations) # Turning the duration list into an array for time saving\n edge_and_duration_dict[edge] = duration_array.mean()\n # At this point we have pairs of regions (from - to) with the mean travel duration for each pair\n # The rest of the code prepares the information to be aligned with Node and Graph definitions\n neighbors_by_region = {}\n start_regions = set([tup[0] for tup in edge_list]) # These are the 'from' regions of every edge\n # For each one of the 'from' regions, make neighbors dict(region and duration)\n # and stores in neighbors_by_region\n for region in start_regions:\n neighbors_dict = {}\n for edge, duration in edge_and_duration_dict.items():\n if edge[0] == region:\n neighbors_dict[edge[1]] = duration\n neighbors_by_region[region] = neighbors_dict\n\n return neighbors_by_region", "def read_graph(file_name):\r\n with open(file_name, 'r') as f:\r\n lines = f.readlines()\r\n first_line = lines[0].strip().split()\r\n no_vertices = int(first_line[0])\r\n new_graph = UndirectedGraph(no_vertices)\r\n for line in lines[1:]:\r\n if line == \"\":\r\n continue\r\n line = line.strip().split()\r\n _from, _to, _cost = int(line[0]), int(line[1]), int(line[2])\r\n new_graph.add_edge(_from, _to, _cost)\r\n return new_graph", "def _parseLine(self, line, delimiter = \":\"):\r\n\t\tsplt = line.split(delimiter)\r\n\t\tinVec = self._parseVec(splt[0])\r\n\t\toutVec = self._parseVec(splt[1])\r\n\t\tif (len(splt) == 2):\r\n\t\t\tlabel = \"\"\r\n\t\telse:\r\n\t\t\tlabel = splt[2]\r\n\t\tself.data.append({'in':inVec, 'out':outVec, 'label':label})", "def file_parser(file_name):\n h = 480\n w = 640\n out = []\n with open(file_name, 'r') as f:\n line_num = 1\n for line in f:\n if line_num < 17:\n # Read to where data starts\n line_num += 1\n continue\n elif line_num > 74:\n break\n # print(list(map(int, line.strip().split(\" \"))))\n vals = line.split()\n # print(list(\"\".join(line)))\n # print(line.split())\n assert(float(vals[2]) < 640)\n assert(float(vals[3]) < 480)\n point = [float(vals[2]) * w, float(vals[3]) * h]\n # print(point)\n out.append(point)\n line_num += 1\n\n out.append([0,0])\n out.append([w-1, 0])\n out.append([0, h-1])\n out.append([w-1, h-2])\n return out", "def read_data(input_file):\n\n def process_line(labels, words):\n l = ' '.join([label for label in labels if len(label) > 0])\n w = ' '.join([word for word in words if len(word) > 0])\n lines.append((l, w))\n words = []\n labels = []\n return words, labels, lines\n\n rf = open(input_file, 'r')\n lines = [];\n words = [];\n labels = []\n for line in rf:\n word = line.strip().split(' ')[0]\n label = line.strip().split(' ')[-1]\n # here we dont do \"DOCSTART\" check\n\n if len(line.strip()) == 0: # and words[-1] == '.'\n words, labels, lines = process_line(labels, words)\n words.append(word)\n labels.append(label)\n rf.close()\n return lines", "def _get_objects(self,label_fh):\n objects = []\n for line in label_fh.readlines():\n try:\n object = {}\n line = line.replace(u'\\ufeff', '')\n if line != '':\n x1, y1, x2, y2, x3, y3, x4, y4= [int(i) for i in line.split(',')[:-1]]\n p1 = (x1, y1)\n p2 = (x2, y2)\n p3 = (x3, y3)\n p4 = (x4, y4)\n object['polygon'] = [p1,p2,p3,p4]\n objects.append(object)\n except:\n pass\n return objects", "def readEdges(self, fileName, format):\n f = open(fileName)\n if format == 'simple':\n edgesRaw = f.read().split(\"\\n\")\n\n if edgesRaw[-1] == '': edgesRaw = edgesRaw[:-1]\n\n for edge in edgesRaw:\n [startVertex, endVertex] = edge.split(\"--\")\n newEdge = Edge(Vertex(int(startVertex)), Vertex(int(endVertex)))\n self.addEdge(newEdge)", "def parse(self):\n count = [] #count for trainset_size\n with open(self.file) as f:\n for line in f:\n data = line.split(\" \")[0]\n filename = data[:-1]\n id = data[-1:]\n if (filename not in count):\n count.append(filename)\n\n acid = \"\"\n structure = \"\"\n with open(self.directory+\"/\"+filename+\".dssp\") as dssp:\n for i in range(28): #skip lines we don't need\n next(dssp)\n for line in dssp:\n if (line[9] != \" \" and line[10] == \" \" and line[11] == id and line[13] not in (\"*\",\"!\",\"B\",\"Z\",\"X\")):\n #amino acid sequence\n if (line[13].islower()):\n acid += \"C\"\n else:\n acid += line[13]\n\n #sequence of the structure\n if (line[16] in (\"H\",\"G\",\"I\")):\n structure += \"H\"\n elif (line[16] in (\"E\",\"B\")):\n structure += \"E\"\n else:\n structure += \"C\"\n\n if (len(count) > self.trainset_size):\n self.testset.append((acid,structure))\n else:\n self.trainset.append((acid,structure))", "def getGraph(self, fin):\r\n tmpList = fin.readline().split()\r\n # Number of Cells to be partitioned\r\n self.cells = int(tmpList[0])\r\n # Number of Connections or Nets\r\n self.conns = int(tmpList[1])\r\n # Number of Circuit Rows\r\n self.rows = int(tmpList[2])\r\n # Number of Circuit Columns\r\n self.cols = int(tmpList[3])\r\n # Number of available sites in the Circuit\r\n self.sitesNum = self.rows*self.cols\r\n \r\n \r\n self.winX = self.cols/4\r\n self.winY = self.rows/4\r\n \r\n # Add nodes from 0 to number of Cells to graph structure and initialize net array and net cost \r\n self.G.add_nodes_from(range(0,self.cells))\r\n for node in self.G.nodes():\r\n self.G.node[node][\"nets\"]=[]\r\n self.G.node[node][\"locked\"]=False\r\n \r\n # For every Net, add edges between corresponding nodes\r\n for net in range(0,self.conns):\r\n tmpList = fin.readline().split()\r\n numNodes = int(tmpList[0])\r\n srcNode = int(tmpList[1])\r\n #self.G.node[srcNode][\"nets\"].append(srcNode)\r\n for conn in range(2,numNodes+1):\r\n self.G.add_edge(srcNode, int(tmpList[conn]))\r\n self.G.node[int(tmpList[conn])][\"nets\"].append(srcNode)" ]
[ "0.6563644", "0.6518964", "0.61344117", "0.6097468", "0.604421", "0.6015144", "0.59337026", "0.59075713", "0.58815616", "0.58800524", "0.5870289", "0.5836382", "0.5812531", "0.5802812", "0.5800502", "0.5797739", "0.57955956", "0.57904834", "0.57658803", "0.5762743", "0.5750281", "0.5741711", "0.57323265", "0.57323086", "0.5706909", "0.5704234", "0.56987405", "0.5698515", "0.56983835", "0.5697242" ]
0.6849259
0
this function finds the complete list of stops based on the list of the main stops the bus is doing through
def find_complete_stops_list(self, list_main_stops): # List contains the main stops # list_of_list contains the little stops which are not in the timetable list_of_lists = list() previous_stop = list_main_stops[0] for current_stop in list_main_stops[1:]: missing_stops = self.find_shortest_path(previous_stop, current_stop) missing_stops.pop(0) missing_stops.pop() list_of_lists.append(missing_stops) previous_stop = current_stop # Now we fuses everything, the main stops and the little stops complete_stops_list = [list_main_stops[0], ] for i, current_main_stop in enumerate(list_main_stops[1:]): complete_stops_list += list_of_lists[i] complete_stops_list.append(current_main_stop) return complete_stops_list
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def info_for_model(stop_list, stops, route):\n\n # Need to know where the bus number 1 and 2 are\n # This if and elif were put in due to an error where the bus list for bus 1 would come up empty, but not sure if necessary\n if len(stops[0]) == 0:\n bus_1 = stops[1][len(stops[1]) - 1]\n elif len(stops[0]) != 0:\n bus_1 = stops[0][0]\n bus_2 = stops[1][0]\n\n # Create empty lists to hold the information for each bus\n stops_bus_1 = []\n stops_bus_2 = []\n stops_bus_3 = []\n\n # Ste bus_number to 3, we will start filling the buses from the end, the last bus first\n bus_number = 3\n\n # Populate our lists\n for i in stops[len(stops) - 1]:\n # Get the times for the buses at the given stop\n first_3_buses = get_due_time(str(i), route)\n\n if len(first_3_buses) == 0:\n # print('Something went wrong!')\n continue\n # Add in the delay\n get_delay(first_3_buses)\n\n # Have to check if the bus it at the first stop, in which case, we just say 'Starting stop' for previous_stop\n if i == stop_list[0]:\n previous_stop = 'Starting stop'\n # Else, we get the previous stop\n else:\n previous_stop = stop_list[stop_list.index(i) - 1]\n\n # If the bus is the last one, we will only append to bus_number_3\n if bus_number == 3:\n # If we reach the stop where bus number 2 is, we must append this stop to both bus_number_3 and bus_number2 and\n # decrease the bus_number counter\n if i == bus_2:\n bus_number -= 1\n stops_bus_3.append({'stopid':i, 'delay':first_3_buses[1]['delay'], 'arrival_hour':first_3_buses[1]['arrivaldatetime'][11:13], 'datetime':first_3_buses[1]['arrivaldatetime'], 'previous_stop':previous_stop})\n stops_bus_2.append({'stopid':i, 'delay':first_3_buses[0]['delay'], 'arrival_hour':first_3_buses[0]['arrivaldatetime'][11:13], 'datetime':first_3_buses[0]['arrivaldatetime'], 'previous_stop':previous_stop})\n else:\n stops_bus_3.append({'stopid':i, 'delay':first_3_buses[0]['delay'], 'arrival_hour':first_3_buses[0]['arrivaldatetime'][11:13], 'datetime':first_3_buses[0]['arrivaldatetime'], 'previous_stop':previous_stop})\n\n # Now, we keep adding bus 2 and bus 3\n elif bus_number == 2:\n # If we reach the stop where bus number 1 is, we must append this stop to both bus_number_3 and bus_number2 and\n # bus_number1 and decrease the bus_number counter\n if i == bus_1:\n bus_number -= 1\n stops_bus_3.append({'stopid':i, 'delay':first_3_buses[2]['delay'], 'arrival_hour':first_3_buses[2]['arrivaldatetime'][11:13], 'datetime':first_3_buses[2]['arrivaldatetime'], 'previous_stop':previous_stop})\n stops_bus_2.append({'stopid':i, 'delay':first_3_buses[1]['delay'], 'arrival_hour':first_3_buses[1]['arrivaldatetime'][11:13], 'datetime':first_3_buses[1]['arrivaldatetime'], 'previous_stop':previous_stop})\n stops_bus_1.append({'stopid':i, 'delay':first_3_buses[0]['delay'], 'arrival_hour':first_3_buses[0]['arrivaldatetime'][11:13], 'datetime':first_3_buses[0]['arrivaldatetime'], 'previous_stop':previous_stop})\n else:\n stops_bus_3.append({'stopid':i, 'delay':first_3_buses[1]['delay'], 'arrival_hour':first_3_buses[1]['arrivaldatetime'][11:13], 'datetime':first_3_buses[1]['arrivaldatetime'], 'previous_stop':previous_stop})\n stops_bus_2.append({'stopid':i, 'delay':first_3_buses[0]['delay'], 'arrival_hour':first_3_buses[0]['arrivaldatetime'][11:13], 'datetime':first_3_buses[0]['arrivaldatetime'], 'previous_stop':previous_stop})\n\n # Here, we are now appending all the buses, until we finally reach the source stop\n elif bus_number == 1:\n stops_bus_3.append({'stopid':i, 'delay':first_3_buses[2]['delay'], 'arrival_hour':first_3_buses[2]['arrivaldatetime'][11:13], 'datetime':first_3_buses[2]['arrivaldatetime'], 'previous_stop':previous_stop})\n stops_bus_2.append({'stopid':i, 'delay':first_3_buses[1]['delay'], 'arrival_hour':first_3_buses[1]['arrivaldatetime'][11:13], 'datetime':first_3_buses[1]['arrivaldatetime'], 'previous_stop':previous_stop})\n stops_bus_1.append({'stopid':i, 'delay':first_3_buses[0]['delay'], 'arrival_hour':first_3_buses[0]['arrivaldatetime'][11:13], 'datetime':first_3_buses[0]['arrivaldatetime'], 'previous_stop':previous_stop})\n joined = [stops_bus_1, stops_bus_2, stops_bus_3]\n return joined", "def get_all_stops(bus_position, stop_ids, at_stop_id):\n # Create empty list to hold the stops between where each bus is currently and the source stop\n list_of_stops = []\n\n # Populate list\n for i in bus_position:\n # Create list for each bus (bus 1, bus 2, bus 3)\n bus = []\n found = False\n for j in stop_ids:\n # Search for the stop where the bus is at and change found to true\n if i == j:\n found = True\n # If found, then add the stop\n if found:\n bus.append(j)\n # If we reach the source stop, break, don't add anymore\n if j == int(at_stop_id):\n break\n # Append that bus to the list_of_stops list\n list_of_stops.append(bus)\n\n list_of_stops = [list_of_stops[i] for i in [0,1,2]]\n\n # Return every stop between where the buses are the source stop\n return list_of_stops", "def gen_stops():\r\n stop_ = list(nasal_stop)\r\n stop_.extend(voiced_stop)\r\n stop_.extend(unvoiced_stop)\r\n return stop_", "def stops_on_routes():\n routes = ['15', '46A', '14', '41B', '39A', '65', '40D', '11', '31', '27', '67', '79', '42', '66A', '33B', '140', '44', '83A', '27B', '38', '16C', '747', '41C', '39', '25', '239', '43', '70', '13', '150', '145', '77A', '184', '84', '61', '83', '40', '66', '15A', '123', '17A', '16', '14C', '9', '4', '37', '32', '33', '49', '56A', '151', '25A', '45A', '54A', '47', '18', '7', '17', '102', '120', '65B', '41', '122', '29A', '76', '68', '59', '25B', '69', '27A', '66B', '38B', '7D', '75', '15B', '84A', '63', '84X', '33X', '68A', '1', '76A', '7B', '270', '236', '130', '238', '220', '44B', '40B', '26', '32B', '8', '41A', '53', '67X', '104', '32A', '79A', '114', '185', '66X', '31B', '32X', '51X', '51D', '41X', '142', '111', '69X', '27X', '116', '46E', '161', '118', '25X', '38A', '33A', '31A']\n routes_and_stops={}\n for route in routes:\n routes_and_stops[route]=[] #new array value for each route key\n reader = csv.reader(open(\"../Data/Sorted Data/stopped_bus_data.csv\"))\n for line in reader:\n try:\n current_route=extract_bus_route(line[3])\n if int(line[13]) not in routes_and_stops[current_route]:\n routes_and_stops[current_route].append(int(line[13]))\n except:\n continue\n return routes_and_stops", "def next_bus(stop_point):\n stop = str(stop_point)\n r = requests.get('https://api-radon.tfl.gov.uk/StopPoint/' + stop_point + '/Arrivals')\n print(r)\n json_result = r.json()\n all_stops = json_result\n my_buses = []\n for i in all_stops:\n i = '{}, {}-min'.format(str(i['destinationName']),str(round(i['timeToStation']/60)))\n my_buses.append(i)\n return my_buses", "def _build_stations(self, stop_list):\n # stations = [] TODO: What is this for\n dists = self._euclidian_distances(stop_list)\n stations = self._calculate_y_lines(dists)\n return stations", "def get_nearest_busstops(lat1, lon1, r):\n \n all_busstops = Busstop.objects.all()\n nearest_busstops = []\n for bs in all_busstops:\n lat2 = bs.lat\n lon2 = bs.lon\n # calculate distance beween two geo-location\n dist = math.acos(math.sin(math.pi * lat1/180) * math.sin(math.pi * lat2/180)\n + math.cos(math.pi * lat1/180) * math.cos(math.pi * lat2/180)\n * math.cos(math.pi * (lon1-lon2)/180)) * 180/math.pi * 60 * 1.1515 * 1.609344\n \n if dist <= r:\n \n bs_tuple = (bs.stop_tag, bs.title, bs.lat, bs.lon, bs.stopId) # Busstop detail \n s_tuple_list = [] \n \n # find route and direction details for bustop 'bs' with tag stop_tag \n for s in Stop.objects.all().filter(stop__stop_tag=bs.stop_tag):\n agency = s.direction.route.agency.agency_tag\n route_tag = s.direction.route.route_tag\n direction_tag = s.direction.direction_tag\n stop_tag = bs.stop_tag\n pred_string = ''\n \n s_tuple = (s.direction.title, s.direction.route.title,\n s.direction.route.agency.title, pred_string) # Stop direction/route/agency details\n s_tuple_list.append(s_tuple) \n\n busstop = (bs_tuple, s_tuple_list)\n nearest_busstops.append(busstop)\n \n return nearest_busstops", "def determine_next_stop(current_location, stops):\n possible_stop = {}\n for stop in stops:\n # calculates the distance between the current location and the next possible stop\n calculated_distance = csv_reader.distance_lookup(current_location, stop)\n\n # adds a key value pair of possible trip (a tuple) and the distance of that trip\n possible_stop[(current_location, stop)] = float(calculated_distance)\n return possible_stop", "def stops_on_routes_with_direction():\n routes_and_stops = {}\n routes = ['102y', '102z', '104y', '104z', '111y', '111z', '114y', '114z', '116y', '116z', '118y', '11y', '11z', '120y', '120z', '122y', '122z', '123y', '123z', '130y', '130z', '13y', '13z', '140y', '140z', '142y', '142z', '145y', '145z', '14Cy', '14Cz', '14y', '14z', '150y', '150z', '151y', '151z', '15Ay', '15Az', '15By', '15Bz', '15y', '15z', '161y', '161z', '16Cy', '16Cz', '16y', '16z', '17Ay', '17Az', '17y', '17z', '184y', '184z', '185y', '185z', '18y', '18z', '1y', '1z', '220y', '220z', '236y', '236z', '238y', '238z', '239y', '239z', '25Ay', '25Az', '25By', '25Bz', '25Xy', '25Xz', '25y', '25z', '26y', '26z', '270y', '270z', '27Ay', '27Az', '27By', '27Bz', '27Xy', '27Xz', '27y', '27z', '29Ay', '29Az', '31Ay', '31Az', '31By', '31Bz', '31y', '31z', '32Ay', '32Az', '32By', '32Bz', '32Xy', '32Xz', '32y', '32z', '33Ay', '33Az', '33By', '33Bz', '33Xy', '33Xz', '33y', '33z', '37y', '37z', '38Ay', '38Az', '38By', '38Bz', '38y', '38z', '39Ay', '39Az', '39y', '39z', '40By', '40Bz', '40Dy', '40Dz', '40y', '40z', '41Ay', '41By', '41Bz', '41Cy', '41Cz', '41Xy', '41Xz', '41y', '41z', '42y', '42z', '43y', '43z', '44By', '44Bz', '44y', '44z', '45Ay', '45Az', '46Ay', '46Az', '46Ey', '47y', '47z', '49y', '49z', '4y', '4z', '51Dy', '51Dz', '51Xy', '53By', '53Bz', '53y', '53z', '54Ay', '54Az', '56Ay', '56Az', '59y', '59z', '61y', '61z', '63y', '63z', '65By', '65Bz', '65y', '65z', '66Ay', '66Az', '66By', '66Bz', '66Xy', '66Xz', '66y', '66z', '67Xy', '67Xz', '67y', '67z', '68Ay', '68Az', '68y', '68z', '69Xy', '69Xz', '69y', '69z', '70y', '70z', '747y', '747z', '75y', '75z', '76Ay', '76Az', '76y', '76z', '77Ay', '77Az', '79Ay', '79Az', '79y', '79z', '7By', '7Bz', '7Dy', '7Dz', '7y', '7z', '83Ay', '83Az', '83y', '83z', '84Ay', '84Az', '84Xy', '84Xz', '84y', '84z', '8y', '8z', '9y', '9z']\n for route in routes:\n routes_and_stops[route] = [] # new array value for each route key\n reader = csv.reader(open(\"../Data/Sorted Data/stopped_bus_data.csv\"))\n for line in reader:\n try:\n current_route = extract_route_and_direction(line[3])\n if int(line[13]) not in routes_and_stops[current_route]:\n routes_and_stops[current_route].append(int(line[13]))\n except:\n continue\n return routes_and_stops", "def get_times( stop_id ):\n\tstop_id = \"BUS\" + stop_id.upper( ).replace( \"BUS\", \"\" )\n\tapi_response = fetch( \n\t\t'http://myride.gocitybus.com/widget/Default1.aspx?pt=30&code=%s' % \n\t\tstop_id )\n\n\tif ( api_response.status_code == 200 ):\n\t\txml_response = parseString( api_response.content )\n\t\tstop = xml_response.getElementsByTagName( 'Stop' )[ 0 ]\n\t\tcurrent_stop = { 'stop_name': stop.getAttribute( 'name' ), 'buses':[]}\n\t\tbuses = stop.getElementsByTagName( 'Bus' )\n\t\tfor bus in buses:\n\t\t\troute_name = text_content( bus.getElementsByTagName( 'RouteName' )[ 0 ])\n\t\t\ttime_left = text_content( bus.getElementsByTagName( 'TimeTillArrival' )[ 0 ])\n\t\t\tcurrent_stop[ 'buses' ].append({ 'stop_code' : stop_id,\n\t\t\t 'route_name' : route_name, \n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t 'time_till_arrival' : time_left })\n\t\treturn current_stop\n\treturn {}", "def _parse_departures(self, data, stop, servernow):\n servernow.replace(second=0, microsecond=0)\n results = []\n departures = data.findall('./itdDeparture')\n for departure in departures:\n # Get Line Information\n origin, destination, line, ridenum, ridedir, canceled = self._parse_mot(departure.find('./itdServingLine'))\n\n if departure.find('./genAttrList/genAttrElem[value=\"HIGHSPEEDTRAIN\"]') is not None:\n line.linetype = LineType('train.longdistance.highspeed')\n elif departure.find('./genAttrList/genAttrElem[value=\"LONG_DISTANCE_TRAINS\"]') is not None:\n line.linetype = LineType('train.longdistance')\n\n # if ridenum is None:\n # ridedata = departure.find('./itdServingTrip')\n # if ridedata is not None:\n # ridenum = ridedata.attrib.get('tripCode', None)\n # if ridenum is not None:\n # ridenum = ridenum.strip()\n\n # Build Ride Objekt with known stops\n ride = Ride(line, ridenum)\n ride.direction = ridedir\n ride.canceled = canceled\n\n train_line = line.linetype in self.train_station_lines\n\n # todo: take delay and add it to next stops\n mypoint = self._parse_trip_point(departure, train_line=train_line)\n\n before_delay = None\n if mypoint.arrival:\n before_delay = mypoint.arrival.delay\n after_delay = None\n if mypoint.departure:\n after_delay = mypoint.departure.delay\n\n delay = None\n if departure.find('./itdServingLine/itdNoTrain'):\n delay = departure.find('./itdServingLine/itdNoTrain').attrib.get('delay', None)\n if delay is not None:\n delay = timedelta(minutes=delay)\n\n if delay is not None:\n if (mypoint.arrival and servernow < mypoint.arrival.livetime) or (mypoint.departure and servernow < mypoint.departure.livetime):\n before_delay = delay\n else:\n after_delay = delay\n\n prevs = False\n for pointdata in departure.findall('./itdPrevStopSeq/itdPoint'):\n point = self._parse_trip_point(pointdata, train_line=train_line)\n if point is not None:\n if before_delay is not None:\n if point.arrival is not None and point.arrival.delay is None and point.arrival.time + before_delay >= servernow:\n point.arrival.delay = before_delay\n if point.departure is not None and point.departure.delay is None and point.departure.time + before_delay >= servernow:\n point.departure.delay = before_delay\n prevs = True\n ride.append(point)\n\n pointer = ride.append(mypoint)\n\n onwards = False\n for pointdata in departure.findall('./itdOnwardStopSeq/itdPoint'):\n point = self._parse_trip_point(pointdata, train_line=train_line)\n if point is not None:\n if after_delay is not None:\n if point.arrival is not None and point.arrival.delay is None and point.arrival.time + after_delay >= servernow:\n point.arrival.delay = after_delay\n if point.departure is not None and point.departure.delay is None and point.departure.time + after_delay >= servernow:\n point.departure.delay = after_delay\n onwards = True\n ride.append(point)\n\n if not prevs and not onwards:\n ride.prepend(None)\n if origin is not None:\n ride.prepend(TimeAndPlace(Platform(origin)))\n\n ride.append(None)\n if destination is not None:\n ride.append(TimeAndPlace(Platform(destination)))\n\n # Return RideSegment from the Station we depart from on\n results.append(ride[pointer:])\n return Ride.Results(results)", "def buildStopsDict(self):\n \n if len(self.nodesDict) == 0:\n raise Exception('Nodes dictionary is empty!')\n if len(self.linksDict) == 0:\n raise Exception('Links dictionary is empty!')\n \n self.stopsByRoute = dict()\n self.stopsByNode = dict()\n arcpy.env.workspace = PublicTransit.WORKING_GDB\n \n tempStops = \"temp_stops\"\n tempStopsSp = \"temp_stops_sp\"\n \n # Delete temp_stops and temp_stops_sp feature classes if they exist.\n if arcpy.Exists(tempStops):\n arcpy.Delete_management(tempStops)\n if arcpy.Exists(tempStopsSp):\n arcpy.Delete_management(tempStopsSp)\n arcpy.CopyFeatures_management(PublicTransit.RTD_PATH + PublicTransit.RTD_STOPS,\n tempStops)\n \n # Project temp_stops to CA state plane and add XY.\n install_dir = arcpy.GetInstallInfo()['InstallDir']\n out_coordinate_system = os.path.join(install_dir, PublicTransit.NAD_83_DIRECTORY)\n arcpy.Project_management(tempStops, tempStopsSp, out_coordinate_system,\n \"NAD_1983_To_WGS_1984_1\")\n arcpy.AddXY_management(tempStopsSp)\n \n # Create a search cursor to traverse all stops.\n stops = arcpy.SearchCursor(tempStopsSp, \"\", \"\",\n \"CPT_STOPPOINTID; SCH_STOPPOINTSEQNO; \" +\n \"SCH_ROUTEID; SCH_PATTERNID; ROUTE_PATTERN; \" +\n \"SourceOID; POINT_X; POINT_Y\",\n \"ROUTE_PATTERN A; SCH_STOPPOINTSEQNO A\")\n numStops = int(arcpy.GetCount_management(tempStopsSp).getOutput(0))\n print \"Found %d stops\" % numStops\n \n p = index.Property()\n p.overwrite = True\n self.spIndex = index.Index(PublicTransit.SPATIAL_INDEX_FILE,properties=p)\n \n # For each stop determine the nearest network node.\n scount = 0\n icount = 0\n for s in stops:\n # only create stops for routes which exist in RTD\n if not s.ROUTE_PATTERN in self.transitRoutes:\n continue\n scount += 1\n st = TransitStop(s.CPT_STOPPOINTID, s.SCH_ROUTEID, s.SCH_PATTERNID,\n s.ROUTE_PATTERN, s.SourceOID, s.SCH_STOPPOINTSEQNO)\n # If the stop's linkId is in the links dictionary use the link from\n # and to node (these should all be bus routes since MTC's route\n # traversal FC was created for buses only at this time).\n if s.SourceOID in self.linksDict:\n link = self.linksDict[s.SourceOID]\n # Determine which node is nearest and snap to it.\n if self.__getDistance(s.POINT_X,\n s.POINT_Y,\n link.fromNode.x,\n link.fromNode.y) <= \\\n self.__getDistance(s.POINT_X,\n s.POINT_Y,\n link.toNode.x,\n link.toNode.y):\n st.tanaNode = link.fromNode.nodeId\n else:\n st.tanaNode = link.toNode.nodeId\n st.inRegion = True\n \n # The stop's link is not in linksDict. These are either stops \n # outside the region or non-bus routes for which there are no\n # route traversal edges. Do a link lookup from the Roadways\n # feature class.\n else:\n arcpy.env.workspace = PublicTransit.RTD_PATH\n roadwaysSearch = arcpy.SearchCursor(PublicTransit.ROADWAYS_FC,\n \"LinkId = \" + str(s.SourceOID),\n \"\", \"\", \"F_JNCTID; T_JNCTID\", \"\")\n for r in roadwaysSearch:\n fromNode = self.__getIdHash(r.F_JNCTID)\n toNode = self.__getIdHash(r.T_JNCTID)\n if fromNode in self.nodesDict and toNode in self.nodesDict:\n if self.__getDistance(s.POINT_X,\n s.POINT_Y,\n self.nodesDict[fromNode].x,\n self.nodesDict[fromNode].y) <= \\\n self.__getDistance(s.POINT_X,\n s.POINT_Y,\n self.nodesDict[toNode].x,\n self.nodesDict[toNode].y):\n st.tanaNode = fromNode\n else:\n st.tanaNode = toNode\n st.inRegion = True\n else:\n st.inRegion = False\n \n # Add the stop to stopsByRoute and stopsByNode dictionaries\n if s.ROUTE_PATTERN in self.stopsByRoute:\n self.stopsByRoute[s.ROUTE_PATTERN].append(st)\n else:\n self.stopsByRoute[s.ROUTE_PATTERN] = [st]\n if (st.tanaNode in self.stopsByNode):\n self.stopsByNode[st.tanaNode].append(st)\n else:\n self.stopsByNode[st.tanaNode] = [st]\n # add the stop node to the spatial index\n if st.tanaNode in self.nodesDict:\n icount += 1\n self.spIndex.insert(st.stopPointId,\n (self.nodesDict[st.tanaNode].x,\n self.nodesDict[st.tanaNode].y,\n self.nodesDict[st.tanaNode].x,\n self.nodesDict[st.tanaNode].y))\n del stops", "def create_list_of_stops(packages):\n stop_list = []\n for package in packages:\n stop_list.append(package.address_id)\n\n # removes any duplicates in list\n stop_list = list(dict.fromkeys(stop_list))\n return stop_list", "def get_stop_list(route, direction):\n # Open connection to database and execute query\n if direction == 'inbound':\n direction = 1\n elif direction == 'outbound':\n direction = 0\n db = MySQLdb.connect(user='lucas', db='summerProdb', passwd='hello_world', host='csi6220-3-vm3.ucd.ie')\n cursor = db.cursor()\n query = \"SELECT 46_a_route.stop_id \" \\\n \"FROM 46_a_route \" \\\n \"WHERE direction =\" + str(direction) + \\\n \" ORDER BY stop_sequence;\"\n cursor.execute(query)\n rows = cursor.fetchall()\n\n # Close connection\n db.close()\n\n # Create and populate a list with all the bus stop ids retrieved from the database\n stop_list = []\n for i in rows:\n stop_list.append(i[0])\n return stop_list", "def get_closest_bus_stop(due_time, stop_src, stop_ids, route_id):\n\n # Get index of where the SRC stop is in the tupple to serve as the high-bound, and store that position in original. Also, store the original due time, as it will be needed\n high_bound = 0\n original = 0\n original_due_time = due_time\n for i in range(0, len(stop_ids)):\n if str(stop_ids[i]) == stop_src:\n high_bound = i\n original = i\n break\n\n # Innitialize pointer to be halfway between the lowbound (set to 0 index) and the highbound (the SRC stop).\n pointer = original//4\n low_bound = 0\n\n # Optimally we want to find the stop where our bus is just 1 minute away, for better accuracy. But sometimes that is not possible, so we will\n # need to look for a bus further away. This variable, arrival_within_minutes, starts with 1 minutes, and will be increased as necessary.\n arrival_within_minutes = 1\n\n # Search until we find where the bus is\n while True:\n last_due_time = 0\n # Search while our due time is not 'Due' or within the specified minutes\n while due_time != 'Due' or int(due_time) > arrival_within_minutes:\n # Once more, get the buses for the stop we are currently looking at\n first_3_buses = get_due_time(str(stop_ids[pointer]), route_id)\n\n # Get just the first bus, since we already have the 3 buses from our SRC stop (this one is just looking for where one of those 3 buses is)\n possible_stop = filter_buses(first_3_buses)\n # Store the new due time, from the bus stop our binary algorithm selected\n new_due_time_due = possible_stop['duetime']\n\n # If the new due_time is the same as the last_due_time it means the algorithm got stuck without finding a better value, and we need to break, and change our\n # arrival_within_minutes for a longer time\n if new_due_time_due == last_due_time:\n break\n\n # If we found a 'Due' or within the arrival_within_minutes, return that index. That is the index of the stop where our bus is at/close to.\n if possible_stop['duetime'] == 'Due' or int(possible_stop['duetime']) <= arrival_within_minutes:\n # ('Found the bus with', new_due_time_due, 'minutes due time.')\n # This for loop is to check if the previous bus stop(s) have the same due time, and find a closer more accurae stop\n # print('Original pointer:', pointer)\n for i in range(pointer - 1, 0, -1):\n if new_due_time_due == (filter_buses(get_due_time(str(stop_ids[i]), route_id))['duetime']):\n pointer = i\n # print('New pointer:', pointer)\n else:\n break\n # Return the pointer, the index of the stop\n return pointer\n else:\n # If the due time at the possible stop is less than the one at SRC, we're on the right path, and need to look for a stop farther from the SRC\n if int(possible_stop['duetime']) < int(due_time):\n # Store the new, better due time\n due_time = possible_stop['duetime']\n # Change the highbound to the pointer and reduce our pointer again to halfway between lowbound and highbound\n high_bound = pointer\n pointer -= ((high_bound - low_bound)//4)\n else:\n # If the due time at the possible stop is bigger than the one at SRC, we've gone too far, and need to look for a stop closer to the SRC\n # The lowbound becomes the pointer and we move the pointer, again, to halfway between the lowbound and the highbound\n low_bound = pointer\n pointer += ((high_bound - low_bound)//4)\n # If we found a better (shortter) due time, we store this one for the next iteration and keep looking for an even better one\n last_due_time = new_due_time_due\n\n # If the algorithm comes to this part, it means we didn't find a stop where our bus was due wihin 1 (or more) minutes. So we need to increase the\n # arrival_within minutes to keep searching.\n arrival_within_minutes += 1\n\n # Reset our lowbound, highbound and pointer to restart the search\n low_bound = 0\n high_bound = original\n pointer = original // 4\n\n # If we start looking for a stop, previous to the SRC, were our bus has MORE duetime, we've gonne too far. Possibly, there are two buses running very close to one another,\n # and they may be due to our SRC stop at the same time (seen before too many times with the 17). In this case, we need to increase the original bound to take the stop where\n # we found the previous bus.\n if arrival_within_minutes > int(original_due_time):\n high_bound += 1\n return high_bound\n\n # Just a token return\n return 0", "def getBusSchedule():\n isFirstWord = True\n inFile = fileinput.input(files=('busRoutes.txt'))\n strWord = \"\"\n index = 0\n dictStops = {}\n strStops = []\n\n for line in inFile:\n isFirstWord = True\n index = 0\n for strWord in line.split():\n #skip the Bus number\n if isFirstWord:\n isFirstWord = False\n continue\n\n #skip the colons\n if strWord == \":\":\n continue\n\n if inFile.isfirstline():\n lstTimes = []\n dictStops[strWord] = lstTimes;\n strStops.append(strWord);\n else:\n lstTimes = dictStops[strStops[index]]\n lstTimes.append(strWord)\n dictStops[strStops[index]] = lstTimes;\n index+=1\n #for word in line\n #for line in file\n return dictStops;", "def get_stops(agency, route_tag, direction_tag):\n \n xml_query_string = 'http://webservices.nextbus.com/service/publicXMLFeed?command=routeConfig&a=' \\\n + agency + '&r=' + route_tag\n xml_request = requests.get(xml_query_string)\n stops = []\n root = ET.fromstring(xml_request.text)\n for route in root:\n for child in route:\n if child.tag == 'direction':\n if child.attrib['tag'] == direction_tag:\n for stop in child:\n stops.append(stop.attrib['tag'])\n return stops", "def test_get_stops(self):\n obj = self.client.bus.routes.get(self.random_route.id)\n stops = obj.stops\n self.assertEqual(type(stops), type([]))\n [self.assertEqual(type(i), BusStop) for i in stops]\n self.assertEqual(stops[0].longitude, stops[0].x)\n self.assertEqual(stops[0].latitude, stops[0].y)\n stops[0].wkt\n stops[0].geojson\n stops[0].__repr__()\n stops[0].__str__()\n stops[0].__unicode__()", "def FindAllRoutesRec(ConnectionInfo, EndStation, RouteConditions, TimeTableList, TimeTableIndex, StationHourIndex, PathInfo=[]):\r\n PathInfo = PathInfo + [ConnectionInfo]\r\n\r\n if Cond.IfTestRouteSearch:\r\n \tStations = GetAllStationsOfRoute(PathInfo)\r\n \tprint \"\\nStations of Path (%s): ++++++++\" % len(Stations)\r\n \tprint Stations\r\n \tprint \"Route Information:\"\r\n \tprint PrettyStringRouteInfo(PathInfo)\r\n\r\n # check successful termination\r\n # if len(PathInfo) > 1 and ConnectionInfo[ConnInfoInd['station_to']] == EndStation: \r\n if CheckIfPathTerminatesSuccessfully(ConnectionInfo, PathInfo, RouteConditions, EndStation):\r\n \tif Cond.IfTestRouteSearch:\r\n \t\tprint \"End Station is reached!\"\t\r\n \treturn [PathInfo]\r\n\r\n # current (this iteration's) path length\r\n CurPathLen = len(PathInfo)\r\n\r\n # get next connections\r\n start_station = ConnectionInfo[ConnInfoInd['station_to']]\r\n departure_hour = ConnectionInfo[ConnInfoInd['arrival_hour']] \t\r\n departure_min = ConnectionInfo[ConnInfoInd['arrival_min']]\r\n\r\n # TEST BU2019\r\n if False:\r\n\t print 'ConnInfoInd: ' + str(ConnectionInfo)\r\n\t print 'start_station,departure_hour,departure_min: %s, %s, %s' % (start_station, departure_hour, departure_min)\r\n\t time.sleep(0.1)\r\n \r\n # mandatory conditions\r\n WaitLimit = RouteConditions[Cond.MaxWaitingTimeAtStation][0]\r\n \r\n # get next connections from the station\r\n ConnectionInfoList = GetListOfNextConnections(TimeTableList, TimeTableIndex, StationHourIndex, start_station, departure_hour, departure_min, WaitLimit)\r\n\r\n # insert on-foot connections (Zu Fuss, ZF) to nearby stations into ConnectionInfoList\r\n # cancel (Tunc 4/3/2019)\r\n if False:\r\n\t StationMeasurementTime = ReqStationMeasureTime\r\n\t \r\n\t if Cond.MaxNumberOfSubsequentStationPassagesOnFoot in RouteConditions \\\r\n\t \tand RouteConditions[Cond.MaxNumberOfSubsequentStationPassagesOnFoot][0] > 0:\r\n\r\n\t\t if RouteConditions.has_key(Cond.MeasureStations):\r\n\t\t \tStationMeasurementTime = RouteConditions[Cond.MeasureStations][1]\r\n\t\t Connections = GetOnFootStationChangeConnections(start_station, departure_hour, departure_min, StationMeasurementTime)\r\n\t\t \r\n\t\t if Connections:\t\t# i.e. if Connections is not None\r\n\t\t \t(OnFootConnections1, OnFootConnections2) = Connections \r\n\t\t \tConnectionInfoList = AddConnectionsToListAfterDepartureTime(ConnectionInfoList, OnFootConnections1)\r\n\t\t \tConnectionInfoList = AddConnectionsToListAfterDepartureTime(ConnectionInfoList, OnFootConnections2)\r\n\r\n if Cond.IfTestRouteSearch:\r\n\t\tprint \"Next connections:\"\r\n\t\tfor c in ConnectionInfoList:\r\n\t\t\tprint c\r\n\t\ttime.sleep(Cond.TestWaitingTime)\r\n\r\n if not ConnectionInfoList:\t\t# Endstation: Node w/o successor nodes\r\n \treturn []\r\n\r\n PathInfoList = []\r\n\r\n for ConnectionInfo in ConnectionInfoList:\r\n\t\tres = Cond.CheckIfConnectionShouldBeSelected(ConnectionInfo, PathInfo, EndStation, RouteConditions)\r\n\r\n\t\t# test\r\n\t\tif Cond.IfTestRouteSearch:\r\n\t\t\tif res == None or res == False:\r\n\t\t\t\tprint \"CheckIfConnectionShouldBeSelected: %s\" % res\r\n\r\n\t \tif res == None: return[] \r\n\t \tif res == False: continue\r\n\r\n\t \t# recursive call\r\n\t\textended_paths = FindAllRoutesRec(ConnectionInfo, EndStation, RouteConditions, \\\r\n\t\t\tTimeTableList, TimeTableIndex, StationHourIndex, PathInfo)\r\n\r\n\t\t# report status\r\n\t\tif Cond.ReportDuringRouteSearch in RouteConditions:\r\n\t\t\tTimeIntv = default_timer() - Cond.SearchStartTime\r\n\t\t\tRouteSearchReportingIntervalInSeconds = RouteConditions[Cond.ReportDuringRouteSearch][0]\r\n\t\t\tif TimeIntv > Cond.RouteSearchReportCounter * RouteSearchReportingIntervalInSeconds:\r\n\t\t\t\tCond.RouteSearchReportCounter += 1 \r\n\t\t\t\tprint \"%s seconds passed... \" % \"{:.2f}\".format(TimeIntv)\r\n\t\t\t\tprint \"%s routes found so far, that passed all connection selection criteria (before route selection)\" \\\r\n\t\t\t\t\t% Cond.RouteCountAfterConnectionSelection\t\r\n\t\t\t\tprint \"%s routes found so far, that passed all route selection criteria (before final route filtering)\" \\\r\n\t\t\t\t\t% Cond.RouteCountAfterRouteSelection\t\r\n\t\t\t\tprint \"----------------------\"\t\r\n\r\n\t\t# append to path list\r\n\t\tfor p in extended_paths:\r\n\t\t\t# no need to recheck route unless current connection is the last one \r\n\t\t\t# LastConnection = (ConnectionInfo == p[-1])\r\n\t\t\tLastConnection = (CurPathLen == len(p) -1 and ConnectionInfo == p[-1])\r\n\t\t\t\r\n\t\t\tif LastConnection:\r\n\r\n\t\t\t\tif Cond.CheckIfRouteShouldBeSelected(p, RouteConditions):\r\n\t\t\t\t\tPathInfoList.append(p)\r\n\t\t\t\t\tCond.SelectedRoutes.append(ApplyAllRouteInfoCorrections(p))\r\n\r\n\t\t\t\t\t# evaluate route\r\n\t\t\t\t\t# cancel for BU2019\r\n\r\n\t\t\t\t\tif Cond.IfTestRouteSearch:\r\n\t\t\t\t\t\tprint \"%s routes found so far, that passed all connection selection criteria (before route selection)\" \\\r\n\t\t\t\t\t\t\t% Cond.RouteCountAfterConnectionSelection\r\n\t\t\t\t\t\tprint \"%s routes found so far, that passed all route selection criteria (before final route filtering)\\n\" \\\r\n\t\t\t\t\t\t\t% Cond.RouteCountAfterRouteSelection\t\t\r\n\t\t\t\t\t\tprint \"----------------------\"\t\r\n\r\n\t\t\t\t\t# test\r\n\t\t\t\t\tIncrementDicValue(Cond.RouteCountPerRouteLength, CurPathLen)\r\n\t\t\t\t\t\r\n\t\t\telse:\r\n\t\t\t\t# not last connection, no need to recheck the route\r\n\t\t\t\t# PathInfoList.append(p)\r\n\t\t\t\t# IncrementDicValue(SelectedRoutesPerLevel, CurPathLen)\r\n\t\t\t\tpass\r\n \r\n return PathInfoList", "def bus_details_SD(adjacent_list):\n\n temp = 0\n for x in results:\n if temp != x.get('ServiceNo'):\n temp = x.get('ServiceNo')\n count = 0\n adja_bus_stop = my_dictionary()\n adjacent_list.add(temp, adja_bus_stop)\n adja_bus_stop.add(count, [x.get('BusStopCode'), x.get('Distance')])\n count += 1\n else:\n adja_bus_stop.add(count, [x.get('BusStopCode'), x.get('Distance')])\n count += 1\n return adjacent_list", "def get_stop_info(stops):\n\tapi_url = 'http://webservices.nextbus.com/service/publicXMLFeed?command=predictions&a=sf-muni&stopId='\n\t\"\"\"Stop_dict = {bus_name:'38',\n\t\t\t\t\tminutes: 7,\n\t\t\t\t\tstop_location: 'Geary & Leavenworth'}\"\"\"\n\tfor stop in stops:\n\t\turl = api_url + str(stop)\n\treturn url", "def create_route(stop_list):\n delivery_route = []\n stop_list = stop_list\n current_location = 0\n shortest_distance = sys.maxsize\n shortest_trip = None\n\n while len(stop_list) != 1:\n # calculate possible next trips from current location\n possible = determine_next_stop(current_location, stop_list)\n\n for key, value in possible.items():\n if value < shortest_distance:\n shortest_trip = key\n shortest_distance = value\n\n # adds the shortest next stop to delivery route\n delivery_route.append(shortest_trip[1])\n\n # makes the next shortest stop the current location\n current_location = shortest_trip[1]\n\n # removes current location from stop list\n stop_list.remove(shortest_trip[1])\n\n # resets shortest_distance variable\n shortest_distance = sys.maxsize\n\n # adds last stop to delivery route\n delivery_route.append(stop_list[0])\n\n return delivery_route", "def plan_stop(wps, idx, min_decel, max_decel, speed_limit):\n\n if idx < 0:\n return []\n\n wps = wps[0: idx+1]\n\n # Calculate the acceleration needed to stop the car at the last waypoint in wps\n path_length = distance(wps, 0, len(wps)-1)\n a = -wps[0].twist.twist.linear.x**2/(2*path_length) # From the kinematic equations\n\n ''' Constrain the acceleration to be within min_decel and max_decel (note, a, min_decel and\n max_decel are all supposed to be negative, being decelerations) '''\n decel = max(a, max_decel)\n decel = min(decel, min_decel)\n\n wps[idx].twist.twist.linear.x = 0\n current_speed = 0\n current_i = idx-1\n while current_i >= 0 and (current_i == 0 or current_speed < wps[current_i-1].twist.twist.linear.x):\n dist = distance(wps, current_i, current_i+1)\n current_speed = (current_speed**2 - 2*decel*dist)**.5\n if current_i >= 1:\n current_speed = min(current_speed, wps[current_i-1].twist.twist.linear.x)\n else:\n current_speed = min(current_speed, speed_limit)\n wps[current_i].twist.twist.linear.x = current_speed\n current_i -= 1\n\n return wps", "def find_stop(self, stop_name):\n stops_found = []\n stop_name = stop_name.replace('’', \"'\")\n if '\\n' in stop_name:\n raise StopNotFound\n for stop in self:\n if (stop.properties['Stop Name'].title() == stop_name.title()) or (stop_name.title() in stop.properties['Nicknames']) or (stop_name in stop.properties['Nicknames']):\n if stop.properties['Last Edit'] != int(self.now().strftime(\"%j\")):\n self.reset_all\n stops_found.append(stop)\n if len(stops_found) == 0:\n best_ratio = 0\n best_stop = None\n for stop in self:\n ratio = fuzz.partial_ratio(stop.properties['Stop Name'].title(), stop_name.title())\n if ratio > 80 and ratio > best_ratio:\n best_ratio = ratio\n best_stop = stop\n elif ratio == 100:\n raise StopNotFound()\n if best_stop is not None:\n best_stop._map = self\n return best_stop\n else:\n raise StopNotFound()\n elif len(stops_found) == 1:\n stops_found[0]._map = self\n return stops_found[0]\n else:\n temp_num = 1\n for stop in stops_found:\n if not(stop.properties['Nicknames']):\n stop.properties['Nicknames'].append('Temp' + str(temp_num))\n temp_num += 1\n raise MutlipleStopsFound(stops_found)", "def railway_stops(osm_path): \n return retrieve(osm_path,'points',['railway'],**{'railway':[\"='halt' or \",\"='subway_entrance' or \",\"='tram_stop'\"]})", "def stop_areas(self):\n seen_ids = set()\n for route in self.routes:\n for stop in route:\n st = stop.stoparea\n if st.id not in seen_ids:\n seen_ids.add(st.id)\n yield st", "def test_trips_starting_and_ending_with_c_by_3_stops(self):\n railroad = trains.Railroad()\n self.assertEqual(sorted(railroad.find_routes('C', 'C', 3)), sorted(['CDC', 'CEBC']))", "def test_krakow_single_stop(self):\n # read html data from given template (not from web, links change periodically)\n with open(os.path.join(BASE_DIR, 'collector', 'test_resources', 'mpk_krakow_parse_bus_stop_res.html'), 'r') as file:\n html = file.read()\n\n self.assertListEqual(mpk_krakow.parse_bus_stop(html), [\n ('212', 'http://rozklady.mpk.krakow.pl/?lang=PL&rozklad=20171023&linia=213__1__23'),\n ('213', 'http://rozklady.mpk.krakow.pl/?lang=PL&rozklad=20171023&linia=213__2__32'),\n ('213', 'http://rozklady.mpk.krakow.pl/?lang=PL&rozklad=20171023&linia=213__4__32'),\n ('213', 'http://rozklady.mpk.krakow.pl/?lang=PL&rozklad=20171023&linia=213__3__23')\n ])", "def find_routes(\r\n stops,\r\n measurement_units = \"\"\"Minutes\"\"\",\r\n analysis_region = None,\r\n reorder_stops_to_find_optimal_routes = False,\r\n preserve_terminal_stops = \"\"\"Preserve First\"\"\",\r\n return_to_start = False,\r\n use_time_windows = False,\r\n time_of_day = None,\r\n time_zone_for_time_of_day = \"\"\"Geographically Local\"\"\",\r\n uturn_at_junctions = \"\"\"Allowed Only at Intersections and Dead Ends\"\"\",\r\n point_barriers = None,\r\n line_barriers = None,\r\n polygon_barriers = None,\r\n use_hierarchy = True,\r\n restrictions = None,\r\n attribute_parameter_values = None,\r\n route_shape = \"\"\"True Shape\"\"\",\r\n route_line_simplification_tolerance = None,\r\n populate_route_edges = False,\r\n populate_directions = True,\r\n directions_language = \"\"\"en\"\"\",\r\n directions_distance_units = \"\"\"Miles\"\"\",\r\n directions_style_name = \"\"\"NA Desktop\"\"\",\r\n travel_mode = \"\"\"Custom\"\"\",\r\n impedance = \"\"\"Drive Time\"\"\",\r\n gis = None):\r\n kwargs = locals()\r\n\r\n if stops is None:\r\n stops = default_stops\r\n\r\n if point_barriers is None:\r\n point_barriers = default_point_barriers\r\n\r\n if line_barriers is None:\r\n line_barriers = default_line_barriers\r\n\r\n if polygon_barriers is None:\r\n polygon_barriers = default_polygon_barriers\r\n\r\n if restrictions is None:\r\n restrictions = default_restrictions\r\n\r\n if attribute_parameter_values is None:\r\n attribute_parameter_values = default_attributes\r\n\r\n if route_line_simplification_tolerance is None:\r\n route_line_simplification_tolerance = default_tolerance\r\n\r\n param_db = {\r\n \"stops\": (FeatureSet, \"Stops\"),\r\n \"measurement_units\": (str, \"Measurement_Units\"),\r\n \"analysis_region\": (str, \"Analysis_Region\"),\r\n \"reorder_stops_to_find_optimal_routes\": (bool, \"Reorder_Stops_to_Find_Optimal_Routes\"),\r\n \"preserve_terminal_stops\": (str, \"Preserve_Terminal_Stops\"),\r\n \"return_to_start\": (bool, \"Return_to_Start\"),\r\n \"use_time_windows\": (bool, \"Use_Time_Windows\"),\r\n \"time_of_day\": (datetime, \"Time_of_Day\"),\r\n \"time_zone_for_time_of_day\": (str, \"Time_Zone_for_Time_of_Day\"),\r\n \"uturn_at_junctions\": (str, \"UTurn_at_Junctions\"),\r\n \"point_barriers\": (FeatureSet, \"Point_Barriers\"),\r\n \"line_barriers\": (FeatureSet, \"Line_Barriers\"),\r\n \"polygon_barriers\": (FeatureSet, \"Polygon_Barriers\"),\r\n \"use_hierarchy\": (bool, \"Use_Hierarchy\"),\r\n \"restrictions\": (str, \"Restrictions\"),\r\n \"attribute_parameter_values\": (FeatureSet, \"Attribute_Parameter_Values\"),\r\n \"route_shape\": (str, \"Route_Shape\"),\r\n \"route_line_simplification_tolerance\": (LinearUnit, \"Route_Line_Simplification_Tolerance\"),\r\n \"populate_route_edges\": (bool, \"Populate_Route_Edges\"),\r\n \"populate_directions\": (bool, \"Populate_Directions\"),\r\n \"directions_language\": (str, \"Directions_Language\"),\r\n \"directions_distance_units\": (str, \"Directions_Distance_Units\"),\r\n \"directions_style_name\": (str, \"Directions_Style_Name\"),\r\n \"travel_mode\": (str, \"Travel_Mode\"),\r\n \"impedance\": (str, \"Impedance\"),\r\n \"solve_succeeded\": (bool, \"Solve Succeeded\"),\r\n \"output_routes\": (FeatureSet, \"Output Routes\"),\r\n \"output_route_edges\": (FeatureSet, \"Output Route Edges\"),\r\n \"output_directions\": (FeatureSet, \"Output Directions\"),\r\n \"output_stops\": (FeatureSet, \"Output Stops\"),\r\n }\r\n return_values = [\r\n {\"name\": \"solve_succeeded\", \"display_name\": \"Solve Succeeded\", \"type\": bool},\r\n {\"name\": \"output_routes\", \"display_name\": \"Output Routes\", \"type\": FeatureSet},\r\n {\"name\": \"output_route_edges\", \"display_name\": \"Output Route Edges\", \"type\": FeatureSet},\r\n {\"name\": \"output_directions\", \"display_name\": \"Output Directions\", \"type\": FeatureSet},\r\n {\"name\": \"output_stops\", \"display_name\": \"Output Stops\", \"type\": FeatureSet},\r\n ]\r\n\r\n if gis is None:\r\n gis = arcgis.env.active_gis\r\n\r\n url = gis.properties.helperServices.asyncRoute.url\r\n return _execute_gp_tool(gis, \"FindRoutes\", kwargs, param_db, return_values, _use_async, url)", "def _construct_stop_list(web_crawl):\n # TODO: enable deselection of default and site dependent lists\n stop_list = _get_default_stop_list()\n if 'stopWords' in web_crawl:\n new_list = [w.strip() for w in web_crawl['stopWords'].split(',')]\n for word in new_list:\n stop_list.append(word)\n return stop_list" ]
[ "0.7045815", "0.6997676", "0.6822802", "0.6821542", "0.66974765", "0.6560196", "0.6510856", "0.6391744", "0.63823456", "0.6375212", "0.6310557", "0.63099355", "0.62924623", "0.6262218", "0.6256029", "0.61674494", "0.61529994", "0.6052756", "0.6003775", "0.5965702", "0.59637594", "0.5963225", "0.5945357", "0.59405637", "0.5918012", "0.5915577", "0.58904296", "0.58864826", "0.58765686", "0.5861258" ]
0.7113685
0
Return filtered by search value queryset.
def get_queryset(self): qs = super().get_queryset() search_value = self.request.GET.get('search_box') if search_value is not None: qs = qs.search_by(search_value) return qs
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def queryset(self, request, queryset):\n # Compare the requested value (either '80s' or 'other')\n # to decide how to filter the queryset.\n\n if self.value() is None:\n return queryset.all()\n\n return queryset.filter(firm__pk=self.value())", "def get_queryset(self):\n qs = super(SearchForm, self).get_queryset()\n\n qs = self.pre_search(qs)\n\n # Ensure that the form is valid\n if not self.is_valid():\n return qs\n\n # Do Searching\n q = self.cleaned_data.get('q', None).strip()\n if q:\n args = []\n for field in self.SEARCH_FIELDS:\n if self.CASE_SENSITIVE:\n kwarg = {field + '__contains': q}\n else:\n kwarg = {field + '__icontains': q}\n args.append(Q(**kwarg))\n if len(args) > 1:\n qs = qs.filter(reduce(lambda x, y: x | y, args))\n elif len(args) == 1:\n qs = qs.filter(args[0])\n\n qs = self.post_search(qs)\n\n return qs", "def queryset(self, request, queryset):\n if self.value() is None:\n return queryset\n return queryset.filter(data__qg_location__0__country__icontains=self.value())", "def _filter(self, _model, **kwargs):\n return _model.objects.filter(**kwargs)", "def filter_queryset(self, queryset):\n for name, value in self.form.cleaned_data.items():\n queryset = self.filters[name].filter(queryset, value)\n # assert isinstance(queryset, models.QuerySet), \\\n # \"Expected '%s.%s' to return a QuerySet, but got a %s instead.\" \\\n # % (type(self).__name__, name, type(queryset).__name__)\n return queryset", "def apply_search(self, queryset):\n self.form = self.form_class(self.request.GET)\n\n if not self.form.is_valid():\n return queryset\n\n data = self.form.cleaned_data\n\n if data.get('upc'):\n # If there's an exact UPC match, it returns just the matched\n # product. Otherwise does a broader icontains search.\n qs_match = queryset.filter(upc=data['upc'])\n if qs_match.exists():\n queryset = qs_match\n else:\n queryset = queryset.filter(upc__icontains=data['upc'])\n\n if data.get('title'):\n queryset = queryset.filter(title__icontains=data['title'])\n\n if data.get('product_class'):\n queryset = queryset.filter(product_class=data['product_class'])\n\n return queryset", "def get_queryset(self):\n qs = CentroDeSalud.objects.all()\n \n if 'search' in self.request.GET:\n q = self.request.GET['search']\n qs = qs.filter(\n Q(nombre__icontains=q) |\n Q(codigo_hpgd__icontains=q)\n )\n\n return qs", "def get_query(self,q,request):\n kwargs = { \"%s__icontains\" % search_field : q }\n return model.objects.filter(**kwargs).order_by(search_field)", "def set_search_filter(\n query: BaseQuery,\n obj_model: Model,\n search_field: str = None,\n search_value=None,\n *args,\n **kwargs,\n) -> BaseQuery:\n if search_field is not None and search_value is not None:\n column = next(\n (c for c in inspect(obj_model).columns if c.name == search_field), None\n )\n if column is not None:\n query = query.filter(column.like(f\"%{search_value}%\"))\n\n return query", "def queryset(self, request, queryset):\n\n print(\"queryset: self.value() = {} with type = {}\".format(self.value(), type(self.value())))\n\n filtered_pub_ids = []\n\n for pub in Publication.objects.all():\n print(\"queryset: pub.book_title_short.lower() = {} self.value() = {}\".format(pub.book_title_short.lower(), self.value()))\n if self.value() is None:\n filtered_pub_ids.append(pub.id)\n elif pub.book_title_short != None and self.value().lower() in pub.book_title_short.lower():\n filtered_pub_ids.append(pub.id)\n\n return queryset.filter(id__in=filtered_pub_ids)", "def filter(self, **search_terms):\n conditions = \" AND \".join(\n [f\"{term} = :{term}\"\n for term, value in search_terms.items()\n if value is not None]\n ).strip()\n\n if conditions:\n conditions = f\"WHERE {conditions}\"\n\n instances = self.db.query(f\"\"\"\n SELECT * from {self.table}\n {conditions}\n \"\"\", **search_terms).all(as_dict=True)\n\n return [\n self.model(**instance)\n for instance in instances\n ]", "def get_queryset(self) -> Union[QuerySet, List[TodoListModel]]:\n queryset = super().get_queryset()\n\n if 'name_search' in self.kwargs:\n queryset = queryset.filter(name__icontains=self.kwargs['name_search'])\n\n return queryset", "def filter_queryset(self, request, queryset, view):\n # filter by tags if available.\n tags = request.query_params.get(\"tags\", None)\n\n if tags and isinstance(tags, six.string_types):\n tags = tags.split(\",\")\n return queryset.filter(tags__name__in=tags)\n\n return queryset", "def filter(self, *q, **kwargs):\n return self._filter_or_exclude(*q, **kwargs)", "def filter(cls, *args, **kwargs) -> models.QuerySet:\n return cls.objects.filter(*args, **kwargs)", "def queryset(self, request, queryset):\n\n return (\n queryset if self.value() is None\n else queryset.filter(instrument__id=self.value())\n )", "def get_queryset(self):\n q = self.get_search_query()\n if q:\n # Send signal to record the view of this product\n product_search.send(sender=self, query=q, user=self.request.user)\n \n return Item.browsable.filter(title__icontains=q)\n else:\n return Item.browsable.all()", "def queryset(self, request, queryset):\n # Compare the requested value (either '80s' or '90s')\n # to decide how to filter the queryset.\n if self.value():\n return queryset.filter(state_pol=self.value())", "def get_queryset(self):\n queryset = self.queryset.order_by('Name')\n query = self.request.QUERY_PARAMS.get('q', None)\n unique_name = self.request.QUERY_PARAMS.get('unique_name', False)\n if query is not None:\n queryset = queryset.filter(Name__istartswith=query)\n if unique_name:\n queryset = queryset.distinct('Name')\n return queryset", "def get_queryset(self):\n\n # Get the keyword URL parameter value. Return empty string if the keyword is empty.\n # Filter the queryset based on the value of keyword and the queryset object's title.\n keyword = self.request.query_params.get('keyword', '')\n queryset = self.queryset.filter(title__icontains=keyword)\n\n if isinstance(queryset, QuerySet):\n # Ensure queryset is re-evaluated on each request.\n queryset = queryset.all()\n\n #return queryset\n return queryset.order_by('-first_published_at')", "def filter_by(cls, **kwargs):\n return cls.query.filter_by(**kwargs)", "def filter_names(self, qs, name, value):\n return qs.filter(name__in=value)", "def search(self, q, *args, **kwargs):\n\t\treturn self.__model.objects.search(q, *args, **kwargs)", "def simple_search(self, pattern):\n query = Q()\n for ptn in pattern.split():\n for field in SEARCH_FIELDS:\n query |= Q(**{'%s__icontains' % field: ptn})\n return self.get_queryset().filter(query)", "def queryset(self, request, queryset):\r\n # Compare the requested value to decide how to filter the queryset.\r\n if self.value():\r\n return queryset.filter(parent_id=self.value())\r\n return queryset", "def filter_search(self, request, search, view):\n raise NotImplementedError(\".filter_search() must be overridden.\")", "def get_queryset(self):\n queryset = Article.objects.all()\n username = self.request.query_params.get('username', None)\n if username is not None:\n queryset = queryset.filter(author__username__iexact=username)\n tag = self.request.query_params.get('tag', None)\n if tag is not None:\n queryset = queryset.filter(tags__tag_name__iexact=tag)\n search = self.request.query_params.get('search', None)\n if search is not None:\n queryset = queryset.filter(\n Q(title__icontains=search) |\n Q(slug__icontains=search) |\n Q(description__icontains=search) |\n Q(body__contains=search)\n )\n\n return queryset", "def get_queryset(self):\n\n return person_search_qs(self.request)", "def filter_status(self, queryset, name, value):\n\n return queryset.filter(status=value)", "def pre_search(self, qs):\n return qs" ]
[ "0.715664", "0.71247804", "0.70761645", "0.703622", "0.7035118", "0.68929595", "0.6786135", "0.67200524", "0.669921", "0.6685121", "0.6676126", "0.6639637", "0.66092557", "0.6591136", "0.6586185", "0.6503779", "0.6502065", "0.64994085", "0.6464354", "0.6445284", "0.64293295", "0.6427525", "0.6421018", "0.64196926", "0.6406675", "0.6377694", "0.63711077", "0.6367643", "0.6323015", "0.6314485" ]
0.791173
0
Return only available for user objects.
def get_queryset(self): queryset = super().get_queryset() return queryset.available_for_user(self.request.user)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_in_users(self, obj):\n try:\n user = self.context.get('request').user\n except Exception:\n # raise serializers.ValidationError('Could not access request.user')\n return False\n if user in obj.users.all():\n return True\n else:\n return False", "def get_viewable(self, user):\n return True", "def granted_users(self):\n return [\n u\n for u in User.objects.filter(is_active=True)\n if ManagedObject.objects.filter(UserAccess.Q(u) & Q(id=self.id)).exists()\n ]", "def viewable_only(self, user):\n # TODO: integrate this into questions() probably as standard and have\n # TODO: a separate all_questions() method to return all, including\n # TODO: hidden and deleted questions.\n return self.filter(Q(user=user) |\n Q(is_hidden=False) | Q(is_deleted=False))", "def is_accessible_by(self, user):\n return (self.public or\n (user.is_authenticated and\n (user.is_staff or self.users.filter(pk=user.pk).exists())))", "def available(self, request):\n username = request.query_params['username']\n resp_data = {}\n if User.objects.filter(username=username).exists():\n resp_data['available'] = False\n else:\n resp_data['available'] = True\n return Response(resp_data, status=status.HTTP_200_OK)", "def user_objects(cls, user):\n return cls.objects.filter(UserAccess.Q(user))", "def get_is_por_holder(self, obj):\n user = self.context['request'].user\n if not user.is_authenticated:\n return False\n # pylint: disable=no-member\n profile = UserProfile.objects.get(user=user)\n if obj in profile.get_council_privileges():\n return True\n return False", "def can_be_accessed(self, user):\n if self.shared_with_everyone:\n return True\n\n if self.user == user or self.users_allowed.filter(pk=user.pk).exists():\n return True\n\n for group in self.groups_allowed.all():\n if user.groups.filter(pk=group.pk).exists():\n return True\n\n return False", "def has_access(self, user):\n if user.is_superuser:\n return True\n return self.user_objects(user).filter(id=self.id).exists()", "def is_visible_to(self, user):\n return True", "def get_allowed_users(user):\n try:\n up = user.get_profile()\n except AttributeError:\n return db.UserProfile.objects.none()\n\n if user.has_perm(\"vnswww.userprofile_use_any\"):\n return db.UserProfile.objects.all()\n elif user.has_perm(\"vnswww.userprofile_use_org\"):\n return db.UserProfile.objects.filter(org=up.org)\n else:\n return db.UserProfile.objects.filter(pk=up.id)", "def has_object_permission(self, request, view, user):\n return user == request.user or request.user.is_superuser", "def get_available_invitees(self):\n return User.objects.exclude(pk=self.request.user.pk)", "def query_filters_restricted (self) :\n user = self.user_restriction\n if user is not None :\n return Q.created_by == user", "def getInterestedUsers():", "def user_has_access(self, user):\n if not user: return False\n query = db.Query(TaskListMember)\n query.filter('task_list =', self)\n query.filter('user =', user)\n return query.get()", "def get_is_interested(self, obj):\n # pylint: disable=no-member\n user = self.context['request'].user\n if not user.is_authenticated:\n return None\n profile = UserProfile.objects.get(user=user)\n return profile in obj.interested_users.all()", "def can_retrieve(self, user):\n return user.has_perm('agenda.can_see')", "def users_with_role(self):\r\n return User.objects.none()", "def get_queryset(self, *args, **kwargs):\n qs = super().get_queryset(*args, **kwargs)\n user = self.request.user\n if not user.is_authenticated:\n return qs.none()\n if self.is_get and not user.has_perm(\"users.view_user\"):\n return qs.filter(pk=user.pk)\n if self.is_update and not user.has_perm(\"users.change_user\"):\n return qs.filter(pk=user.pk)\n return qs", "def can_be_viewed_by(self,user):\n return True", "def get_viewable(self, user):\n if user.get('role') in ('admin', 'manager', 'engineer'):\n return True\n return user['name'] == self.doc.get('customer')", "def can_view(self, user):\r\n return True", "def get_queryset(self):\n return get_user_model().objects.none()", "def filter_for_user(self, user):\n query = Q(visibility=Document.PUBLIC) | \\\n Q(visibility=Document.PRIVATE, created_by=user) | \\\n Q(visibility=Document.ORG_ONLY,\n organization__memberships__user=user)\n\n if not user.external:\n query = query | Q(visibility=Document.ORG_ONLY_NO_EXTERNAL,\n organization__memberships__user=user)\n\n return super(DocumentManager, self).get_query_set().filter(query) \\\n .distinct()", "def _has_data(cls):\n return User.objects.count() > 0", "def get_is_por_holder(self, obj):\n user = self.context['request'].user\n if not user.is_authenticated:\n return False\n # pylint: disable=no-member\n profile = UserProfile.objects.get(user=user)\n if obj in profile.get_club_privileges():\n return True\n return False", "def user_has_access(self, user):\n if self.visibility == self.PUBLIC:\n return True\n elif self.visibility == self.PRIVATE and self.created_by == user:\n return True\n elif self.visibility in (self.ORG_ONLY, self.ORG_ONLY_NO_EXTERNAL):\n if user.external and self.visibility == self.ORG_ONLY_NO_EXTERNAL:\n return False\n elif self.organization.memberships.filter(user=user).count() >= 1:\n return True\n return False", "def get_queryset(self):\n return super().get_queryset().filter(user=self.request.user)" ]
[ "0.7111527", "0.69771284", "0.6968059", "0.6935108", "0.68565166", "0.68484193", "0.68204546", "0.67346036", "0.6713047", "0.6699355", "0.66954744", "0.6626666", "0.66193235", "0.659172", "0.65581006", "0.65302825", "0.6526931", "0.65193975", "0.65113837", "0.64569193", "0.6455344", "0.6451436", "0.6437083", "0.64138037", "0.64111304", "0.6409702", "0.6373034", "0.6353526", "0.632512", "0.6321817" ]
0.72600186
0
Test that the JordanWigner convention places the Zstring on lower indices.
def test_jw_convention(self): qubit_op = QubitOperator('Z0 X1') transformed_op = reverse_jordan_wigner(qubit_op) expected_op = FermionOperator('1^') expected_op += FermionOperator('1') self.assertTrue(transformed_op == expected_op)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_compress_offset_less_len1(self):\n text = 'ababab'\n actual = LZ77.compress(text)\n expected = bytearray([32]) + bytearray(b'ab') + bytearray([0, 18])\n self.assertEqual(actual, expected)", "def test_zernike_indexing(self):\n basis = ZernikePolynomial(L=8, M=4, spectral_indexing=\"ansi\")\n assert (basis.modes == [8, 4, 0]).all(axis=1).any()\n assert not (basis.modes == [8, 8, 0]).all(axis=1).any()\n\n basis = ZernikePolynomial(L=10, M=4, spectral_indexing=\"fringe\")\n assert (basis.modes == [10, 0, 0]).all(axis=1).any()\n assert not (basis.modes == [10, 2, 0]).all(axis=1).any()\n\n basis = FourierZernikeBasis(L=8, M=4, N=0, spectral_indexing=\"ansi\")\n assert (basis.modes == [8, 4, 0]).all(axis=1).any()\n assert not (basis.modes == [8, 8, 0]).all(axis=1).any()\n\n basis = FourierZernikeBasis(L=10, M=4, N=0, spectral_indexing=\"fringe\")\n assert (basis.modes == [10, 0, 0]).all(axis=1).any()\n assert not (basis.modes == [10, 2, 0]).all(axis=1).any()", "def test_compress_offset_less_len2(self):\n text = 'abcdabcdab'\n actual = LZ77.compress(text)\n expected = bytearray([8]) + bytearray(b'abcd') + bytearray([0, 52])\n self.assertEqual(actual, expected)", "def test_Z_start(self):\t\t\n self.assertAlmostEqual(attempt.Z[0], 40)", "def test_compress_2_idenctical_char(self):\n text = 'aa'\n actual = LZ77.compress(text)\n expected = bytearray([0]) + bytearray(b'aa')\n self.assertEqual(actual, expected)", "def test_lowerFirst(string, expected):\n assert lowerFirst(string) == expected", "def test_decompress_offset_less_len1(self):\n b_array = bytearray([32]) + bytearray(b'ab') + bytearray([0, 18])\n actual = LZ77.decompress(b_array)\n expected = 'ababab'\n self.assertEqual(actual, expected)", "def test_lorentz_width():\n v0 = 100\n I = 1\n w = 0.5\n\n assert lorentz(v0 - w/2, v0, I, w) == approx(0.5)\n assert lorentz(v0 + w/2, v0, I, w) == approx(0.5)", "def test_compress_4_idenctical_char(self):\n text = 'bbbb'\n actual = LZ77.compress(text)\n expected = bytearray([32]) + bytearray(b'bb') + bytearray([0, 16])\n self.assertEqual(actual, expected)", "def test_standardize_xyz_string(self):\n xyz = \"\"\"\n \n \n C -0.67567701 1.18507660 0.04672449\n H -0.25592948 1.62415961 0.92757746\n H -2.26870864 1.38030564 0.05865317\n O -0.36671999 -0.21081064 0.01630374\n H -0.73553821 -0.63718986 0.79332805\n C -0.08400571 1.86907236 -1.19973252\n \n H -0.50375517 1.42998100 -2.08057962\n H -0.31518819 2.91354759 -1.17697025\n H 0.97802159 1.73893214 -1.20769117\n O -3.69788377 1.55609096 0.07050345\n O -4.28667752 0.37487691 0.04916102\n H -4.01978712 -0.12970163 0.82103635\n \n \"\"\"\n expected_xyz = \"\"\"C -0.67567701 1.18507660 0.04672449\nH -0.25592948 1.62415961 0.92757746\nH -2.26870864 1.38030564 0.05865317\nO -0.36671999 -0.21081064 0.01630374\nH -0.73553821 -0.63718986 0.79332805\nC -0.08400571 1.86907236 -1.19973252\nH -0.50375517 1.42998100 -2.08057962\nH -0.31518819 2.91354759 -1.17697025\nH 0.97802159 1.73893214 -1.20769117\nO -3.69788377 1.55609096 0.07050345\nO -4.28667752 0.37487691 0.04916102\nH -4.01978712 -0.12970163 0.82103635\"\"\"\n new_xyz = converter.standardize_xyz_string(xyz)\n self.assertEqual(new_xyz, converter.standardize_xyz_string(expected_xyz))", "def test_styblinski_tang(self):\n fun = get_problem('styblinski_tang', dimension=2)\n self.assertAlmostEqual(fun(self.array4), -78.332, places=3)", "def test_first_last_middle_name(self):\n\t\tformatted_name = get_formatted_name('Wolfgang','mozart','amadues')\n\t\tself.assertEqual(formatted_name,'Wolfgang Amadues Mozart')", "def test_zenith_on_moon(moonsky):\n\n zenith_source = moonsky\n zenith_source.check()\n\n zenith_source_lmn = zenith_source.pos_lmn.squeeze()\n assert np.allclose(zenith_source_lmn, np.array([0, 0, 1]))", "def test_single_word_with_junk(self):\n score = location.match_weight('clevedon UK', ['clevedon'])\n\n self.assertEqual(score, 1000)", "def LOWER_START():\n return 7", "def UPPER_START():\n return 1", "def test_decompress_offset_less_len2(self):\n b_array = bytearray([8]) + bytearray(b'abcd') + bytearray([0, 52])\n actual = LZ77.decompress(b_array)\n expected = 'abcdabcdab'\n self.assertEqual(actual, expected)", "def test_coord_preceding_fs(self):", "def test_truncated_wls(self):\n order = ['e', 'b', 'c', 'd']\n reconstructed = wls(self.dists, order=order)\n self.assertEqual(set(reconstructed.getTipNames()), set(order))", "def test_compress_2(self):\n text = 'abcdefdeabc'\n actual = LZ77.compress(text)\n expected = bytearray([3]) + bytearray(b'abcdef')\\\n + bytearray([0, 32]) + bytearray([0, 113])\n self.assertEqual(actual, expected)", "def test_lorentz_broadening():\n v0 = 100\n I = 10\n w = 1.0\n\n assert lorentz(v0, v0, I, w) == approx(5)", "def test_downcase_region(self):\n before_b = \"\"\"\\\n Americans live in the most severe weather-prone country on Earth. Each year, Americans cope with an average of 10,000 thunderstorms, 2,500 floods, 1,000 tornadoes, as well as an average of 6 deadly hurricanes. Potentially deadly weather impacts every American. Communities can now rely on the National Weather Service’s StormReady program to help them guard against the ravages of Mother Nature.\n\n Some 90% of all presidentially declared disasters are weather related, leading to around 500 deaths per year and nearly $14 billion in damage. StormReady, a program started in 1999 in Tulsa, OK, helps arm America's communities with the communication and safety skills needed to save lives and property– before and during the event. StormReady helps community leaders and emergency managers strengthen local safety programs.\n\n StormReady communities are better prepared to save lives from the onslaught of severe weather through better planning, education, and awareness. No community is storm proof, but StormReady can help communities save lives. Does StormReady make a difference?\n \"\"\"\n after_b = \"\"\"\\\n Americans live in the most severe weather-prone country on Earth. Each year, Americans cope with an average of 10,000 thunderstorms, 2,500 floods, 1,000 tornadoes, as well as an average of 6 deadly hurricanes. Potentially deadly weather impacts every American. Communities can now rely on the National Weather Service’s StormReady program to help them guard against the ravages of Mother Nature.\n\n some 90% of all presidentially declared disasters are weather related, leading to around 500 deaths per year and nearly $14 billion in damage. stormready, a program started in 1999 in tulsa, ok, helps arm america's communities with the communication and safety skills needed to save lives and property– before and during the event. stormready helps community leaders and emergency managers strengthen local safety programs.\n\n StormReady communities are better prepared to save lives from the onslaught of severe weather through better planning, education, and awareness. No community is storm proof, but StormReady can help communities save lives. Does StormReady make a difference?\n \"\"\"\n self.run_test(\n before_b=before_b,\n after_b=after_b,\n before_sel=(\"3.0\", \"4.0\"),\n after_sel=(\"3.0\", \"4.0\"),\n command_name=\"downcase-region\",\n )", "def lower_case_really():", "def zernIndex(j):\r\n n = int((-1.+np.sqrt(8*(j-1)+1))/2.)\r\n p = (j-(n*(n+1))/2.)\r\n k = n%2\r\n m = int((p+k)/2.)*2 - k\r\n\r\n if m!=0:\r\n if j%2==0:\r\n s=1\r\n else:\r\n s=-1\r\n m *= s\r\n\r\n return [n, m]", "def find_za(lyb_z):\n return (lyb_z + 1)*(lyb_rest/lya_rest)-1", "def test_initials_regex_3():\n ...", "def test_ignore_case(ext):\n\n cases = [\n 'Summer',\n 'SUMMER',\n 'summer',\n 'SuMmEr',\n ]\n\n for semester in cases:\n\n row = ext('abc %s 2012 def' % semester)\n\n assert row.offset == 4\n assert row.semester == semester\n assert row.year == '2012'", "def test_zsh_sbd(zh_default_fixture, text, expected_sents):\n segments = zh_default_fixture.segment(text)\n segments = [s.strip() for s in segments]\n assert segments == expected_sents", "def test_az_za():\n Nside = 128\n obs = observatory.Observatory(latitude, longitude, fov=20, nside=Nside)\n center = [0, 0]\n lon, lat = [5, 0]\n ind0 = hp.ang2pix(Nside, lon, lat, lonlat=True)\n lon, lat = hp.pix2ang(Nside, ind0, lonlat=True)\n za, az, pix = obs.calc_azza(center, return_inds=True)\n ind = np.where(pix == ind0)\n # lon = longitude of the source, which is set to 5deg off zenith (hence, zenith angle)\n assert np.isclose(np.degrees(za[ind]), lon)\n assert np.isclose(np.degrees(az[ind]), 90.0)", "def testSliceLowerHalfWithNegativeIndex(self):\n inst = WireData(b'0123456789')\n self.assertEqual(inst[:-5], WireData(b'01234'))" ]
[ "0.6065288", "0.59248656", "0.5903232", "0.5901062", "0.5656905", "0.56236637", "0.55714595", "0.5567075", "0.5561326", "0.5499509", "0.547115", "0.5381495", "0.53753805", "0.53739816", "0.53625846", "0.53501177", "0.534338", "0.5318929", "0.53051966", "0.5287866", "0.5277039", "0.52716714", "0.52526784", "0.5239159", "0.5237849", "0.5237802", "0.5236331", "0.5225084", "0.5216504", "0.5215832" ]
0.6192689
0
Plot the difference between models. Uses plotly as the backend.
def plot_difference_plotly(mdiff, title="", annotation=None): import plotly.graph_objs as go import plotly.offline as py annotation_html = None if annotation is not None: annotation_html = [ [ "+++ {}<br>--- {}".format(", ".join(int_tokens), ", ".join(diff_tokens)) for (int_tokens, diff_tokens) in row ] for row in annotation ] data = go.Heatmap(z=mdiff, colorscale='RdBu', text=annotation_html) layout = go.Layout(width=950, height=950, title=title, xaxis=dict(title="topic"), yaxis=dict(title="topic")) py.iplot(dict(data=[data], layout=layout))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def show_derivative(self):\n for trace in self.plotWidget.plotDataItems:\n dt = float(trace.attrs['dt'])\n dtrace = np.diff(trace.data)\n x = pgplot.make_xvector(dtrace, dt)\n self.plotWidget.plot(x, dtrace, pen=pg.mkPen('r'))", "def plot_difference_plotly(mdiff, title=\"\", annotation=None):\n import plotly.graph_objs as go\n import plotly.offline as py\n\n annotation_html = None\n if annotation is not None:\n annotation_html = [\n [\n \"+++ {}<br>--- {}\".format(\", \".join(int_tokens), \", \".join(diff_tokens))\n for (int_tokens, diff_tokens) in row\n ]\n for row in annotation\n ]\n\n data = go.Heatmap(z=mdiff, colorscale='RdBu', text=annotation_html)\n layout = go.Layout(width=950, height=950, title=title, xaxis=dict(title=\"topic\"), yaxis=dict(title=\"topic\"))\n py.iplot(dict(data=[data], layout=layout))", "def make_diff(m1, m2):\n return graph_objs.Scatter(\n x=[0, 0],\n y=[m1, m2],\n text=['median 1: ' + '{:0.2f}'.format(m1),\n 'median 2: ' + '{:0.2f}'.format(m2)],\n mode='lines',\n line=graph_objs.Line(\n width=2,\n color='rgb(0,0,0)'\n ),\n hoverinfo='text'\n )", "def plot_diff(self):\n if not(self.is_attribute(\"time\") & self.is_attribute(\"intensity_up\") & \n self.is_attribute(\"intensity_up_sigma\") &\n self.is_attribute(\"intensity_down\") & \n self.is_attribute(\"intensity_down_sigma\") &\n self.is_attribute(\"intensity_up_total\") &\n self.is_attribute(\"intensity_down_total\")):\n return\n fig, ax = plt.subplots()\n ax.set_title(\"Polarized intensity: I_up - I_down\")\n ax.set_xlabel(\"Time (microseconds)\")\n ax.set_ylabel('Intensity')\n \n np_time = numpy.array(self.time, dtype=float)\n np_up = numpy.array(self.intensity_up, dtype=float)\n np_sup = numpy.array(self.intensity_up_sigma, dtype=float)\n np_up_mod = numpy.array(self.intensity_up_total, dtype=float)\n np_down = numpy.array(self.intensity_down, dtype=float)\n np_sdown = numpy.array(self.intensity_down_sigma, dtype=float)\n np_down_mod = numpy.array(self.intensity_down_total, dtype=float)\n np_diff = np_up - np_down\n np_diff_mod = np_up_mod - np_down_mod\n np_sdiff = numpy.sqrt(numpy.square(np_sup)+numpy.square(np_sdown))\n\n ax.plot([np_time.min(), np_time.max()], [0., 0.], \"b:\")\n ax.plot(np_time, np_diff_mod, \"k-\",\n label=\"model\")\n ax.errorbar(np_time, np_diff, yerr=np_sdiff, fmt=\"ko\", alpha=0.2,\n label=\"experiment\")\n\n y_min_d, y_max_d = ax.get_ylim()\n param = y_min_d-(np_diff-np_diff_mod).max()\n\n ax.plot([np_time.min(), np_time.max()], [param, param], \"k:\")\n ax.plot(np_time, np_diff-np_diff_mod+param, \"r-\", alpha=0.7,\n label=\"difference\")\n ax.legend(loc='upper right')\n fig.tight_layout()\n return (fig, ax)", "def plot_discrepancy(self, axes=None, **kwargs):\n return vis.plot_discrepancy(self.target_model,\n self.target_model.parameter_names,\n axes=axes,\n **kwargs)", "def diff_plot(targets, predictions, filename, remove_outliers=False):\n\tif remove_outliers:\n\t\tindices = np.where(np.logical_not(np.logical_or(np.abs(predictions) > 10 * np.abs(targets), np.abs(predictions) < np.abs(targets) / 10.0)))\n\t\ttargets = targets[indices]\n\t\tpredictions = predictions[indices]\n\n\tif targets.shape[0] != 0:\n\t\tfig, ax = plt.subplots()\n\t\tfig.suptitle(str(targets.shape[0]) + ' samples, R2: ' + str(r2(targets, predictions)), fontsize=12)\n\t\taxes = plt.gca()\n\t\taxes.set_ylim(np.min(predictions), np.max(predictions))\n\t\taxes.set_xlim(np.min(targets), np.max(targets))\n\t\tax.scatter(targets, predictions, edgecolors=(0, 0, 0))\n\t\tax.set_xlabel('Targets')\n\t\tax.set_ylabel('Predictions')\n\t\tax.plot([targets.min(), targets.max()], [targets.min(), targets.max()], 'k--', lw=4)\n\t\tplt.savefig(filename)\n\t\tplt.close()", "def plot(self) -> None:\n cw_l2_data_list = list(); cw_linf_data_list = list()\n\n for model in self.model_list:\n cw_l2_data_list.append(joblib.load(model + \"/stat/mse-rmse-si-mae-cw_l2_1.pkl\"))\n\n cw_l2_attack = list(zip(self.model_list, cw_l2_data_list))\n\n for model in self.model_list:\n cw_linf_data_list.append(joblib.load(model + \"/stat/mse-rmse-si-mae-cw_inf_1.pkl\"))\n\n cw_linf_attack = list(zip(self.model_list, cw_linf_data_list))\n\n # RMSE v.s. MAE over change budget\n # There will be one graph for each manipulation\n # CW_L2 ATTACK\n for datum in cw_l2_attack:\n ran_color_list = self._random_color_picker(2)\n fig, axis_1 = plt.subplots()\n\n # Generate x_axis\n x_axis = list()\n for key in datum[1]:\n if float(key) not in x_axis:\n x_axis.append(float(key))\n\n x_axis.sort()\n\n # Sort data in datum[1]\n data_dict = self._sort_dict(x_axis, datum[1])\n\n # PLOT RMSE ON AXIS 1\n # Generate y_axis ticks for RMSE\n rmse_values = list()\n for key in data_dict:\n rmse_values.append(data_dict[key][\"rmse\"])\n\n # Generate 10 ticks for the y_axis\n y_axis_ticks = np.linspace(0.0, 0.6, num=10, endpoint=True)\n\n # Plot RMSE\n axis_1.plot(x_axis, rmse_values, color=ran_color_list[0], linestyle=\"solid\")\n axis_1.set_xlabel(\"Perturbation Budget\")\n axis_1.set_ylabel(\"Root Mean Squared Error (RMSE)\", color=ran_color_list[0])\n axis_1.set_yticks(y_axis_ticks)\n \n for tick_label, tick_line in zip(axis_1.get_yticklabels(), axis_1.get_yticklines()):\n tick_label.set_color(ran_color_list[0])\n tick_line.set_color(ran_color_list[0])\n\n # PLOT MAE ON AXIS 2\n axis_2 = axis_1.twinx()\n\n # Generate y-axis ticks for MAE\n mae_values = list()\n for key in data_dict:\n mae_values.append(data_dict[key][\"mae\"])\n\n\n # Plot MAE\n axis_2.plot(x_axis, mae_values, color=ran_color_list[1], linestyle=\"solid\")\n axis_2.set_ylabel(\"Mean Absolute Error (MAE)\", color=ran_color_list[1])\n axis_2.set_yticks(y_axis_ticks)\n \n for tick_label, tick_line in zip(axis_2.get_yticklabels(), axis_2.get_yticklines()):\n tick_label.set_color(ran_color_list[1])\n tick_line.set_color(ran_color_list[1])\n\n model_tag = datum[0].split(\"/\"); model_tag = model_tag[-1]\n plt.savefig(self.save_path + \"/{}_rmse-and-mae-as-perturbation-budget-increases-for-cw_l2-attack-on-model-{}.png\".format(self.plot_name, model_tag), \n bbox_inches=\"tight\")\n plt.close()\n\n # CW_Linf ATTACK\n for datum in cw_linf_attack:\n ran_color_list = self._random_color_picker(2)\n fig, axis_1 = plt.subplots()\n\n # Generate x_axis\n x_axis = list()\n for key in datum[1]:\n if float(key) not in x_axis:\n x_axis.append(float(key))\n\n x_axis.sort()\n\n # Sort data in datum[1]\n data_dict = self._sort_dict(x_axis, datum[1])\n\n # PLOT RMSE ON AXIS 1\n # Generate y_axis ticks for RMSE\n rmse_values = list()\n for key in data_dict:\n rmse_values.append(data_dict[key][\"rmse\"])\n\n # Plot RMSE\n axis_1.plot(x_axis, rmse_values, color=ran_color_list[0], linestyle=\"solid\")\n axis_1.set_xlabel(\"Perturbation Budget\")\n axis_1.set_ylabel(\"Root Mean Squared Error (RMSE)\", color=ran_color_list[0])\n axis_1.set_yticks(y_axis_ticks)\n\n for tick_label, tick_line in zip(axis_1.get_yticklabels(), axis_1.get_yticklines()):\n tick_label.set_color(ran_color_list[0])\n tick_line.set_color(ran_color_list[0])\n\n # PLOT MAE ON AXIS 2\n axis_2 = axis_1.twinx()\n\n # Generate y-axis ticks for MAE\n mae_values = list()\n for key in data_dict:\n mae_values.append(data_dict[key][\"mae\"])\n\n # Plot MAE\n axis_2.plot(x_axis, mae_values, color=ran_color_list[1], linestyle=\"solid\")\n axis_2.set_ylabel(\"Mean Absolute Error (MAE)\", color=ran_color_list[1])\n axis_2.set_yticks(y_axis_ticks)\n \n for tick_label, tick_line in zip(axis_2.get_yticklabels(), axis_2.get_yticklines()):\n tick_label.set_color(ran_color_list[1])\n tick_line.set_color(ran_color_list[1])\n \n model_tag = datum[0].split(\"/\"); model_tag = model_tag[-1]\n plt.savefig(self.save_path + \"/{}_rmse-and-mae-as-perturbation-budget-increases-for-cw_linf-attack-on-model-{}.png\".format(self.plot_name, model_tag),\n bbox_inches=\"tight\")\n plt.close()\n \"RMSE and MAE as Perturbation Budget increases for CW_Linf attack on model {}\".format(model_tag)\n \n # Scattter Index over the change budget\n # All the manipulations will be put on the same graph.\n # CW_L2 ATTACK\n plt.figure()\n plt.xlabel(\"Perturbation Budget\"); plt.ylabel(\"Scatter Index\")\n ran_color_list = self._random_color_picker(len(cw_l2_attack)); i = 0\n\n # Find maximum scatter index value\n scatter_values = list()\n for datum in cw_l2_attack:\n for key in datum[1]:\n scatter_values.append(datum[1][key][\"scatter_index\"])\n\n # Generate y_axis ticks; generate 10 ticks\n y_axis_ticks = np.linspace(0.0, float(Decimal(str(max(scatter_values))) + Decimal(\"0.1\")), num=10, endpoint=True)\n plt.yticks(y_axis_ticks)\n\n # Generate x_axis\n x_axis = list()\n for datum in cw_l2_attack:\n for key in datum[1]:\n if float(key) not in x_axis:\n x_axis.append(float(key))\n\n x_axis.sort()\n\n formal_names = FormalNameMap()\n for datum in cw_l2_attack:\n values = list()\n data_dict = self._sort_dict(x_axis, datum[1])\n for key in data_dict:\n values.append(data_dict[key][\"scatter_index\"])\n\n # Append values to the plot\n line_name = datum[0].split(\"/\"); line_name = line_name[-1]\n formal_name = formal_names.getformalname(line_name) if formal_names.hasname(line_name) else line_name\n if \"vanilla\" in line_name:\n plt.plot(x_axis, values, color=ran_color_list[i], linewidth=3, linestyle=self._random_linestyle(), label=formal_name)\n\n else:\n plt.plot(x_axis, values, color=ran_color_list[i], linestyle=self._random_linestyle(), label=formal_name)\n \n i += 1\n\n plt.legend()\n plt.savefig(self.save_path + \"/{}_scatter-index-as-perturbation-budget-increases-for-cw_l2-attack.png\".format(self.plot_name),\n bbox_inches=\"tight\")\n plt.close()\n\n # CW_Linf ATTACK\n plt.figure()\n plt.xlabel(\"Perturbation Budget\"); plt.ylabel(\"Scatter Index\")\n ran_color_list = self._random_color_picker(len(cw_linf_attack)); i = 0\n\n # Find maximum scatter index value\n scatter_values = list()\n for datum in cw_linf_attack:\n for key in datum[1]:\n scatter_values.append(datum[1][key][\"scatter_index\"])\n\n # Generate y_axis ticks; generate 10 ticks\n y_axis_ticks = np.linspace(0.0, float(Decimal(str(max(scatter_values))) + Decimal(\"0.1\")), num=10, endpoint=True)\n plt.yticks(y_axis_ticks)\n\n # Generate x_axis\n x_axis = list()\n for datum in cw_l2_attack:\n for key in datum[1]:\n if float(key) not in x_axis:\n x_axis.append(float(key))\n\n x_axis.sort()\n\n formal_names = FormalNameMap()\n for datum in cw_linf_attack:\n values = list()\n data_dict = self._sort_dict(x_axis, datum[1])\n for key in data_dict:\n values.append(data_dict[key][\"scatter_index\"])\n\n # Append values to the plot\n line_name = datum[0].split(\"/\"); line_name = line_name[-1]\n formal_name = formal_names.getformalname(line_name) if formal_names.hasname(line_name) else line_name\n if \"vanilla\" in line_name:\n plt.plot(x_axis, values, color=ran_color_list[i], linewidth=3, linestyle=self._random_linestyle(), label=formal_name)\n\n else: \n plt.plot(x_axis, values, color=ran_color_list[i], linestyle=self._random_linestyle(), label=formal_name)\n \n i += 1\n\n plt.legend()\n plt.savefig(self.save_path + \"/{}_scatter-index-as-perturbation-budget-increases-for-cw_linf-attack.png\".format(self.plot_name),\n bbox_inches=\"tight\")\n plt.close()", "def plotlyPlot():\n # ssr\n ssr = gradientDescent(X, y)[1]\n # number of iterations \n iterations = np.arange(0, len(ssr), 1)\n\n # use plotly to show graph\n fig = px.line(x=iterations, y=ssr, template=\"plotly_dark\", \n labels=dict(x=\"Iterations\", y=\"Sum of squared reiduals\"),\n title=\"Evolution of Sum of squared residuals by iteration\")\n \n # update xticks\n fig.update_layout(\n xaxis = dict(\n tickmode = \"linear\",\n tick0 = 0,\n dtick = 5\n )\n )\n # show plot\n plot(fig)", "def plot_dif_eq(self):\n try:\n self.canvas.get_tk_widget().pack_forget()\n self.toolbar.pack_forget()\n except AttributeError:\n pass\n\n f = Figure(figsize=(8, 8), dpi=100)\n p = f.add_subplot(111)\n\n p.plot(self.model.ex.x_coord_plot, self.model.ex.y_coord_plot, c = 'C6')\n p.scatter(self.model.ex.x_coord, self.model.ex.y_coord, c = 'C6')\n p.plot(self.model.eu.x_coord, self.model.eu.y_coord, marker='o')\n p.plot(self.model.ieu.x_coord, self.model.ieu.y_coord, marker='o')\n p.plot(self.model.rk.x_coord, self.model.rk.y_coord, marker='o')\n\n p.set_ylabel('y')\n p.set_xlabel('x')\n\n p.legend(['Exact', 'EU', \"IEU\", \"RK\"])\n p.set_title(\"Solutions\")\n if max(self.model.ex.y_coord_plot) >= 1e5 or max(self.model.eu.y_coord) >= 1e5 \\\n or max(self.model.ieu.y_coord) >= 1e5 or max(self.model.rk.y_coord) >= 1e5:\n p.set_ylim([-100, 100])\n\n if min(self.model.ex.y_coord_plot) <= -1e5 or min(self.model.eu.y_coord) <= -1e5 \\\n or min(self.model.ieu.y_coord) <= -1e5 or min(self.model.rk.y_coord) <= -1e5:\n p.set_ylim([-100, 100])\n\n self.canvas = FigureCanvasTkAgg(f, self.f_left)\n self.canvas.draw()\n self.canvas.get_tk_widget().pack(side=tk.LEFT, fill=tk.BOTH, expand=False)\n\n self.toolbar = NavigationToolbar2Tk(self.canvas, self.f_left)\n self.toolbar.update()\n\n self.canvas._tkcanvas.pack(side=tk.TOP, fill=tk.BOTH, expand=False)", "def display_comparison(self, X_val, y_val):\n import matplotlib.pyplot as plt\n x = []\n y = []\n for model_tuple in self.model_list:\n x.append(model_tuple[1])\n y.append(model_tuple[0].score(X_val, y_val))\n plt.scatter(x, y)\n plt.show()", "def plotDifference(path1, path2, architecture):\n network1 = loadNetwork(path1, architecture)\n network2 = loadNetwork(path2, architecture)\n network1_dict = processNetwork(network1)\n network2_dict = processNetwork(network2)\n difference_dict = {}\n\n for name, module in network1_dict.items():\n if network1_dict[name].shape == network2_dict[name].shape:\n difference_dict[name] = np.subtract(network2_dict[name], network1_dict[name])\n else:\n print(\"Input networks must be of the same architecture\")\n break\n\n plotNetwork(difference_dict, architecture)", "def plot_subtract_from_data_all(self):\n f, axes = plt.subplots(2, 3, figsize=(16, 8))\n\n self.subtract_from_data_plot(ax=axes[0, 0], text='Data')\n self.subtract_from_data_plot(ax=axes[0, 1], text='Data - Point Source', point_source_add=True)\n self.subtract_from_data_plot(ax=axes[0, 2], text='Data - Lens Light', lens_light_add=True)\n self.subtract_from_data_plot(ax=axes[1, 0], text='Data - Source Light', source_add=True)\n self.subtract_from_data_plot(ax=axes[1, 1], text='Data - Source Light - Point Source', source_add=True,\n point_source_add=True)\n self.subtract_from_data_plot(ax=axes[1, 2], text='Data - Lens Light - Point Source', lens_light_add=True,\n point_source_add=True)\n f.tight_layout()\n f.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0., hspace=0.05)\n return f, axes", "def check_model_performances(X,Y, model,show=False):\n #model.fit(X, Y)\n predictions = model.predict(X)\n \n predictions = predictions#.reshape(-1,1)\n \n # ######## Computes MSE ####### \n MSE = mean_squared_error(Y, predictions)\n print(f'\\nMSE : {MSE}')\n \n # ######## Computes R2 ####### \n R2 = r2_score(Y, predictions)\n print(f'R2 : {R2}')\n \n # ######## Plot Model predictions vs. target ####### \n if show:\n fig = go.Figure()\n \n fig.add_trace(go.Scatter(y=Y,\n mode='lines',\n name='target'))\n fig.add_trace(go.Scatter(y=predictions\n ,\n mode='lines',\n name='predictions'))\n \n fig.show()", "def plot_graph(self, dataset):\n data = self.data\n diagrams = []\n\n for time_stamp, data_tag in dataset:\n data_x, data_y = [], []\n for item in data:\n data_x.append(item[time_stamp])\n data_y.append(item[data_tag])\n diagrams.append(Scatter(x=data_x, y=data_y, mode='markers'))\n\n layout = plotly.graph_objs.Layout(yaxis=dict(autorange='reversed'))\n data = Data(diagrams)\n fig = plotly.graph_objs.Figure(data=data, layout=layout)\n plotly.plotly.plot(fig, filename='exo-line')", "def plot(self):\n\t\tself.plotOfTF().plot()", "def plot_growth_model(\n ts: ArrayLike,\n ys: ArrayLike,\n model: Optional[BaseModel] = None,\n x_label: str = TIME_H,\n y_label: str = \"Growth\",\n x_lim: Tuple[float, float] = (0.0, 20.0),\n title: Optional[str] = None,\n ax: Optional[plt.Axes] = None,\n data_color: str = \"C1\",\n model_color: str = \"C2\",\n growth_period_color: str = \"C3\",\n maturation_offset: float = 0.0,\n annotation_spec: Optional[AnnotationSpec] = None,\n) -> Tuple[Optional[plt.Figure], plt.Axes]:\n fig, core_ax = core.get_figure_and_axes(ax)\n\n # Plot the data\n core_ax.scatter(ts, ys, c=data_color, label=\"Observed\")\n\n # Visualise the model\n if model is None:\n core_ax.text(\n 0.1, 0.9, \"Model Fitting Failed\", color=\"red\", fontweight=\"bold\", transform=core_ax.transAxes\n ) # pragma: no cover\n else:\n _visualise_model(\n ax=core_ax,\n model=model,\n ts=ts,\n model_color=model_color,\n growth_period_color=growth_period_color,\n maturation_offset=maturation_offset,\n )\n\n if annotation_spec is not None:\n annotation_spec.apply(core_ax, xlabel=x_label, ylabel=y_label, title=title)\n\n # Set the limits\n core_ax.set_xlim(x_lim)\n\n return fig, core_ax", "def diff_plot(df, **kwargs):\n # calculate difference between each column\n for comb in itertools.combinations(df.columns, 2):\n df[\"%s-%s\" % (comb[0], comb[1])] = df[comb[0]] - df[comb[1]]\n\n barcols = [x for x in df.columns if \"-\" in x]\n linecols = [x for x in df.columns if \"-\" not in x]\n\n fig = make_subplots(\n rows=2, cols=1, row_heights=[0.8, 0.2], shared_xaxes=True, vertical_spacing=0.02\n )\n for col in linecols:\n fig.add_trace(go.Scatter(x=df.index, y=df[col], name=col))\n\n for col in barcols:\n fig.add_trace(go.Bar(x=df.index, y=df[col], name=col), row=2, col=1)\n\n today = pd.Timestamp.today()\n vline = go.layout.Shape(\n type=\"line\",\n x0=today,\n x1=today,\n y0=df.min().min(), # Set y0 to the minimum value of y_data\n y1=df.max().max(), # Set y1 to the maximum value of y_data\n line=dict(color=\"grey\", width=1, dash=\"dash\"),\n )\n fig.update_layout(shapes=[vline])\n\n title = kwargs.get(\"title\", \"\")\n fig.update_layout(title_text=title, title_x=0.01, margin=preset_margins)\n return fig", "def plot(self):\n h = .02\n i=1\n bags_X = self.bags_X\n bags_y = self.bags_y\n fig1 = plt.figure(figsize=(45, 9))\n\n \n cm = plt.cm.RdBu\n cm_bright = ListedColormap(['#FF0000', '#0000FF'])\n \n for model in self.models:\n ax = plt.subplot(1, len(self.models) , i)\n X = pd.DataFrame(bags_X[i-1])\n y = pd.Series(bags_y[i-1])\n x_min, x_max = X[X.columns[0]].min() - .5, X[X.columns[0]].max() + .5\n y_min, y_max = X[X.columns[1]].min() - .5, X[X.columns[1]].max() + .5\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))\n Z = np.array(model.predict(pd.DataFrame(np.c_[xx.ravel(), yy.ravel()], columns=X.columns)))\n # print(Z[12])\n Z = Z.reshape(xx.shape)\n ax.contourf(xx, yy, Z, cmap=cm, alpha=.8)\n ax.scatter(X[X.columns[0]], X[X.columns[1]], c=y, cmap=cm_bright, edgecolors='k')\n # size=[1000*w for w in self.weights[i-1]]\n ax.set_xlim(xx.min(), xx.max())\n ax.set_ylim(yy.min(), yy.max())\n ax.set_xlabel(str(X.columns[0]))\n ax.set_ylabel(str(X.columns[1]))\n plt.title(\"Estimator \"+str(i))\n i+=1\n \n fig2 = plt.figure(figsize=(9,9))\n X = self.X\n y = self.y\n ax2 = plt.subplot(1,1,1)\n x_min, x_max = X[X.columns[0]].min() - .5, X[X.columns[0]].max() + .5\n y_min, y_max = X[X.columns[1]].min() - .5, X[X.columns[1]].max() + .5\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))\n Z = np.array(self.predict(pd.DataFrame(np.c_[xx.ravel(), yy.ravel()], columns=X.columns)))\n Z = Z.reshape(xx.shape)\n ax2.contourf(xx, yy, Z, cmap=cm, alpha=.8)\n # size=[1000*w for w in self.weights[i-2]]\n ax2.scatter(X[X.columns[0]], X[X.columns[1]], c=y, cmap=cm_bright, edgecolors='k')\n ax2.set_xlim(xx.min(), xx.max())\n ax2.set_ylim(yy.min(), yy.max())\n plt.title(\"Combined Decision Surface\")\n \n plt.tight_layout()\n plt.show()\n\n return [fig1,fig2]", "def visualisation(self):\n plt.plot(self.x, self.y, 'o', label = 'Example data')\n plt.plot(self.x, np.dot(self.w, self.X), label = 'Model')\n plt.xlim([-1,1])\n plt.ylim([-1,1])", "def vis_difference(self):\n print(self.init_vec)\n\n init = self.init_output.numpy()\n\n alphas = np.linspace(0, 1, 20)\n for i, alpha in enumerate(alphas):\n\n display.clear_output(wait=True)\n norm = [torch.linalg.norm(torch.tensor(\n self.init_vec + alpha*self.eigen[i]), axis=1).detach().numpy() for i in range(2)]\n\n diff = np.array([self.compute_difference(\n alpha, self.eigen[i]) for i in range(2)])\n\n fig = plt.figure(figsize=(14, 12), tight_layout=True)\n fig.suptitle(\"Latent direction variation\", fontsize=20)\n gs = gridspec.GridSpec(2, 2)\n\n ax_temp = plt.subplot(gs[0, :])\n ax_temp.scatter(\n init[:, 0], init[:, 1])\n ax_temp.set_title(\"Initial Dataset\")\n ax_temp.set_xlim(-1, 1)\n ax_temp.set_ylim(-1, 1)\n [s.set_visible(False) for s in ax_temp.spines.values()]\n\n for j in range(2):\n ax_temp = plt.subplot(gs[1, j])\n sc = ax_temp.quiver(\n init[:, 0], init[:, 1], diff[j, :, 0], diff[j, :, 1], norm[j])\n sc.set_clim(np.min(norm[j]), np.max(norm[j]))\n plt.colorbar(sc)\n ax_temp.set_title(\n \"Direction: {}, alpha: {}\".format(j+1, alpha))\n ax_temp.set_xlim(-1, 1)\n ax_temp.set_ylim(-1, 1)\n [s.set_visible(False) for s in ax_temp.spines.values()]\n\n plt.savefig(\"frames_dir/fig_{}\".format(i))\n plt.show()", "def plot(self):\n pass", "def plot_model(self):\n \n plt.figure(figsize=[10,5])\n \n plt.scatter(self.receivers['recxs'],self.receivers['reczs'],marker='v')\n if self.source['src_type']==4:\n from obspy.imaging.beachball import beach\n beach = beach(self.source['mt'], xy=(self.source['srcx'],self.source['srcz']), width=self.model_parameters['xmax']*0.05)\n ax = plt.gca()\n \n ax.add_collection(beach) \n ax.set_aspect(\"equal\")\n \n else:\n plt.scatter(self.source['srcx'],self.source['srcz'],marker='*',color='r',s=200)\n \n plt.axhline(y=0,c='0.5')\n plt.xlim(0,self.model_parameters['xmax'])\n plt.ylim(self.model_parameters['zmax'],-0.1*self.model_parameters['zmax'])\n \n plt.xlabel('Distance (km)')\n plt.ylabel('Depth (km)')\n plt.grid()\n plt.show()", "def plot_diff(self, ax, d0, d1):\n i1 = 0\n for i0 in range(d0.shape[0]):\n try:\n while d0[i0,0] > d1[i1,0]:\n print((d0[i0,0], d1[i1,0]))\n i1 += 1\n except IndexError:\n break\n if d0[i0,0] == d1[i1,0]:\n break\n assert d0[i0,0] == d1[i1,0]\n i0s = i0\n d0s = 0\n d1s = 0\n dt = []\n dd = []\n for i0 in range(i0s, d0.shape[0]):\n d0s += d0[i0,1]\n try:\n d1s += d1[i1,1]\n assert d0[i0,0] == d1[i1,0]\n except IndexError:\n pass\n dt.append(d0[i0,0])\n dd.append(d0s - d1s)\n i1 += 1\n while i1 < d1.shape[0]:\n d1s += d1[i1,1]\n dt.append(d0[i0,0])\n dd.append(d0s - d1s)\n i1 += 1\n dd = np.array(dd)\n dd -= dd[-1]\n i = 0\n if self.start is not None:\n while dt[i] < self.start:\n i += 1\n ax.plot_date(dt[i:], dd[i:], fmt='-', color='yellow')", "def _plot_graph(self) -> None:\n ghg_data, bird_data = self._datasets\n model = self._selection.get_model(ghg_data, bird_data)\n model.plot_data('Percent Change in Bird population (from 1970) vs '\n 'Amount of Greenhouse gas produced in a year',\n 'Amount of Greenhouse gas produced in a year (kt)',\n 'Percent Change in Bird population (from 1970)')", "def plot_difference(mdiff, title=\"\", annotation=None):\n annotation_html = None\n if annotation is not None:\n annotation_html = [[\"+++ {}<br>--- {}\".format(\", \".join(int_tokens),\n \", \".join(diff_tokens))\n for (int_tokens, diff_tokens) in row]\n for row in annotation]\n\n data = go.Heatmap(z=mdiff, colorscale='RdBu', text=annotation_html)\n layout = go.Layout(width=950, height=950, title=title,\n xaxis=dict(title=\"topic\"), yaxis=dict(title=\"topic\"))\n py.iplot(dict(data=[data], layout=layout))", "def make_timeplot(df_measure, df_prediction):\n # mode = 'confirmed'\n mode = 'active'\n df_measure_confirmed = df_measure[mode]\n colors = px.colors.qualitative.Dark24\n n_colors = len(colors)\n fig = go.Figure()\n for i, country in enumerate(df_measure_confirmed.columns):\n fig.add_trace(go.Scatter(x=df_measure_confirmed.index, \n y=df_measure_confirmed[country],\n name=country[1], mode='markers+lines',\n marker_color=colors[i%n_colors],\n line_color=colors[i%n_colors],\n visible=False))\n for i, country in enumerate(df_prediction.columns):\n fig.add_trace(go.Scatter(x=df_prediction.index, \n y=df_prediction[country],\n name='+' + country[1], mode='lines',\n line_dash='dash',\n line_color=colors[i%n_colors],\n showlegend=False,\n visible=False))\n\n last_day = df_measure_confirmed.index.max()\n day = pd.DateOffset(days=1)\n fig.update_layout(title='',\n xaxis=dict(rangeslider_visible=True,\n range=(last_day - 10 * day,\n last_day + 4 * day)))\n fig.update_layout(\n updatemenus=[\n dict(\n type = \"buttons\",\n direction = \"left\",\n buttons=list([\n dict(\n args=[{\"visible\": [False,]*len(df_measure_confirmed.columns)}],\n label=\"Reset\",\n method=\"update\",\n ),\n dict(\n args=[\"yaxis\", {'type':'log'}],\n label=\"log\",\n method=\"relayout\",\n ),\n dict(\n args=[\"yaxis\", {'type':'linear'}],\n label=\"lin\",\n method=\"relayout\",\n ),\n\n ]),\n pad={\"r\": 10, \"t\": 10, \"b\":5},\n showactive=True,\n x=0.05,\n xanchor=\"left\",\n y=1.35,\n yanchor=\"top\",\n font_color='black',\n ),\n ],\n height=.9*FIRST_LINE_HEIGHT,\n)\n\n return fig", "def plot_stats(x_axis, y_axis, df, highlight=[]):\n a, b = df[x_axis], df[y_axis]\n\n X_train, X_test, y_train, y_test = train_test_split(a, b, test_size=0.33, random_state=42)\n\n X_train = np.array(X_train).reshape(-1, 1)\n X_test = np.array(X_test).reshape(-1, 1)\n y_train = np.array(y_train).reshape(-1, 1)\n y_test = np.array(y_test).reshape(-1, 1)\n\n regr = linear_model.LinearRegression()\n\n regr.fit(X_train, y_train)\n\n df[y_axis + \" STD\"] = df[y_axis].apply(lambda a: round((a-df[y_axis].mean())/df[y_axis].std()))\n df[y_axis + \" rank\"] = df[y_axis].rank(ascending=False)\n df[x_axis + \" rank\"] = df[x_axis].rank(ascending=False)\n \n mapper = linear_cmap(field_name=y_axis + \" STD\", palette=brewer[\"RdBu\"][len(df[y_axis + \" STD\"].unique())], \n low=min(df[y_axis + \" STD\"].unique()), high=max(df[y_axis + \" STD\"].unique()))\n \n source = ColumnDataSource(df)\n source2 = ColumnDataSource(df[df[\"Player\"].isin(highlight)])\n \n p = figure(x_range=(df[x_axis].min() - df[x_axis].std(), df[x_axis].max() + df[x_axis].std()), \n y_range=(df[y_axis].min() - df[y_axis].std(), df[y_axis].max() + df[y_axis].std()))\n \n r1 = p.circle(x=x_axis, y=y_axis,\n source=source, size=10, color=mapper, line_color=\"black\", legend_group= y_axis + \" STD\")\n\n p.title.text = y_axis + \" vs. \" + x_axis\n p.title.align = \"center\"\n p.xaxis.axis_label = x_axis\n p.yaxis.axis_label = y_axis\n p.legend.location = 'top_left'\n p.legend.title = \"St. Dev's from Avg \" + y_axis\n p.background_fill_color = \"#dddddd\"\n p.background_fill_alpha = 0.1\n \n line_x = [df[x_axis].min().item() - df[x_axis].std().item(), df[x_axis].max().item() + df[x_axis].std().item()]\n line_y = [(line_x[0]*regr.coef_.item()) + regr.intercept_.item(), (line_x[1]*regr.coef_.item()) + regr.intercept_.item()]\n r2 = p.line(line_x, line_y, line_width=2, color=\"black\")\n\n p.add_tools(HoverTool(renderers=[r1], tooltips=[\n (\"Player\", \"@Player\"),\n (y_axis, \"@{\" + y_axis +\"}{0.000}\"),\n (y_axis + \" Rank\", \"#@{\" + y_axis + \" rank}\"),\n (x_axis, \"@{\" + x_axis +\"}{0}\"),\n (x_axis + \" Rank\", \"#@{\" + x_axis + \" rank}\")]))\n\n \n p.add_tools(HoverTool(renderers=[r2], \n tooltips=[(x_axis, \"$x{0000}\"),\n (\"Predicted \" + y_axis, \"$y\")]))\n \n labels = LabelSet(x=x_axis, \n y=y_axis, text=\"Player\", y_offset=8,\n text_font_size=\"11px\", text_color=\"#555555\",\n source=source2, text_align='center')\n \n p.add_layout(labels)\n\n st.bokeh_chart(p)", "def make_delta(m1, m2):\n return graph_objs.Scatter(\n x=[0],\n y=[(m1 + m2) / 2.0],\n text=['delta: ' + '{:0.2f}'.format(abs(m1 - m2))],\n mode='markers',\n marker=dict(symbol='square',\n color='rgb(255,255,255)'),\n hoverinfo='text'\n )", "def subtract(self):\n self.parent.copyCurrentWinState(self.pltw)\n self.pltw.blklst[self.blkno][self.ypos] = self.data[1] - self.data[2]\n self.pltw.updatePlot()\n self.pltw.dirty = True\n self.pltw.activecurv = self.cpos\n self.parent.updateUI()\n self.hide()", "def plot_convergence_trace_plotly(negll_trace_df, name, plot_title, plot_out=None):\n\n\n data = []\n for trace in name:\n for iteration in set(negll_trace_df['pass']):\n data.append(\n go.Scatter(\n x=negll_trace_df[negll_trace_df['pass'] == iteration]['step'].tolist(),\n y=negll_trace_df[negll_trace_df['pass'] == iteration][trace].tolist(),\n mode='lines',\n name=trace + ' pass ' + str(iteration),\n connectgaps=True,\n showlegend=True,\n line=dict(\n width=4\n )\n\n )\n )\n\n plot = {\n \"data\": data,\n \"layout\": go.Layout(\n title = plot_title,\n xaxis1 = dict(title=\"step\",\n exponentformat=\"e\",\n showexponent='All'),\n yaxis1 = dict(title=\"negative log likelihood\",\n exponentformat=\"e\",\n showexponent='All'\n ),\n font = dict(size=18),\n )\n }\n\n if plot_out is not None:\n plotly_plot(plot, filename=plot_out, auto_open=False)\n else:\n return plot" ]
[ "0.6336095", "0.62351954", "0.60471606", "0.6010149", "0.58498794", "0.5847658", "0.58359987", "0.5812058", "0.57991797", "0.5774976", "0.57747", "0.57411087", "0.5719466", "0.57165027", "0.57083726", "0.5705856", "0.5704397", "0.5674093", "0.56470126", "0.5645467", "0.5633578", "0.560922", "0.56069124", "0.5604811", "0.5567167", "0.55537355", "0.55465275", "0.5537142", "0.5512793", "0.54891175" ]
0.6257385
1
given a username, return some representation of that user Generally, this will be an anchor ref of a mailto URL
def user2Link(user): # could also look up mail addrs via a table lookup, etc return '<a href="mailto:%(user)[email protected]">%(user)s</a>' % {"user": user}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def user_ref_to_username(value):\n # strip the '<@' and '>'\n user_id = reference_to_id(value.group())\n user_profile = get_user_profile(user_id)\n return '@' + user_profile['name'] or user_id", "def user(inp):\n user = inp.text.lower().replace(' ', '-')\n return 'http://www.wikidot.com/user:info/' + user", "def generate_user_link(user):\n return '[@{0}](https://github.com/{0})'.format(user)", "def username(self) -> str:", "def username(self) -> str:", "def get(self, username):\n return username", "def get_user_email(username):\r\n return '{0}@test.com'.format(username)", "def username(username: str, uname: Optional[str] = typer.Argument(None)):\n typer.echo(github.find_email_from_username(username))", "def user_link( username ):\n user = User.get_by_name(username)\n if user and user.last_seen:\n cls = \"\"\n delta = (datetime.now() - user.last_seen).seconds\n if delta < 300: # 5 minutes\n cls = 'class=recent'\n if delta < 60: # 1 minute\n cls = 'class=veryrecent'\n return '<a %s href=\"/user/%s\">%s</a>' % (cls, username, username)\n elif user:\n return '<a href=\"/user/%s\">%s</a>' % (username, username)\n else:\n return username", "def format_username(self, at_char, user):\r\n return u'<a href=\"http://{domain}/user/{user}\" data-user=\"{user}\">{char}{user}</a>'.format(\r\n **dict(domain=self.domain, user=user, char=at_char, text=user))\r\n\r\n #return u'<a href=\"http://%s/user/%s\" data-user=\"\">%s%s</a>' \\\r\n # % (self.domain, user, at_char, user)\r", "def username(self) -> undefined.UndefinedOr[str]:", "def get_user_info(self) -> str:\n return self._searcher.get_user_info()", "def get_user(self):\n return self.get('users/self')", "def get_name(self):\n return self.user.username if self.user.username else self.user.email", "def user_show(username):\n\n user = User.query.get_or_404(username)\n # TODO: grab messages for user inbox (to_user = user) and\n # user outbox (from_user = user)\n # order messages by most recent from the database\n\n return (jsonify(user=user.serialize()), 200)", "def get_user_info_by_name(self, username: str) -> dict:", "def getPublicUserInfo(self, username):\r\n pass", "def bb_user(hit):\n try:\n user = hit.group(1)\n U = User.objects.get(username=user)\n T = loader.get_template('webview/t/user.html')\n C = Context({'U' : U})\n return T.render(C)\n\n except:\n # This is normally thrown when the user is invalid. Return the original result,\n # Only we add an icon to indicate an invalid user.\n return '<img src=\"/static/user_error.png\" alt=\"user\" border=\"0\" />%s' % (user)", "def user(self, username='None'):\n\n layout = u\"\"\"\n user: {id}\n created: {created}\n karma: {karma}\n about: {about}\n \"\"\"\n userdata = requests.get(endpoints.USER.format(username)).json()\n return layout.format(**userdata) if userdata else 'user not found'", "def __str__(self):\n return \"{}\".format(self.user.username)", "def format_user_for_slack(user):\n if getattr(user, \"last_name\", None):\n return f\"<@{user.last_name}>\"\n return user.email", "def get_username(self):\r\n return self.username", "def __str__(self):\n return self.user.username", "def __str__(self):\n return self.user.username", "def __str__(self):\n return self.user.username", "def __str__(self):\n return self.user.username", "def __str__(self):\n return self.user.username", "async def get_user(self, ctx, username: str):\n message = \"\"\n if username is not None:\n api = self.authenticate()\n user = api.get_user(username)\n\n colour =\\\n ''.join([randchoice('0123456789ABCDEF')\n for x in range(6)])\n colour = int(colour, 16)\n url = \"https://twitter.com/\" + user.screen_name\n emb = discord.Embed(title=user.name,\n colour=discord.Colour(value=colour),\n url=url,\n description=user.description)\n emb.set_thumbnail(url=user.profile_image_url)\n emb.add_field(name=\"Followers\", value=user.followers_count)\n emb.add_field(name=\"Friends\", value=user.friends_count)\n if user.verified:\n emb.add_field(name=\"Verified\", value=\"Yes\")\n else:\n emb.add_field(name=\"Verified\", value=\"No\")\n footer = \"Created at \" + user.created_at.strftime(\"%Y-%m-%d %H:%M:%S\")\n emb.set_footer(text=footer)\n await self.bot.send_message(ctx.message.channel, embed=emb)\n else:\n message = \"Uh oh, an error occurred somewhere!\"\n await self.bot.say(message)", "def resolve_username(obj, _):\n return obj.username.decode()", "def get_user(self, username):\n return {}" ]
[ "0.7166675", "0.7055912", "0.70320725", "0.6992083", "0.6992083", "0.6817144", "0.6804579", "0.67751735", "0.67302954", "0.6635375", "0.6424747", "0.6411638", "0.6411059", "0.640774", "0.63793653", "0.63757735", "0.63612443", "0.63392115", "0.6334134", "0.6333766", "0.632456", "0.62684625", "0.6262185", "0.6262185", "0.6262185", "0.6262185", "0.6262185", "0.6242452", "0.62283975", "0.6227257" ]
0.74790764
0
Return the first dict item that matches from pairs of dicts and keys. Will throw a KeyError if missing. _dig_first((dict1, "key1"), (dict2, "key2"), ...)
def _dig_first(*pairs: Tuple[Mapping[str, Setting], str], ignore_empty: bool = False) -> Setting: if not pairs: raise ValueError("pairs cannot be empty") for dict_like, key in pairs: if key in dict_like: value = dict_like[key] if ignore_empty and value == "": continue return value last_key = pairs[-1][1] raise KeyError(last_key)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dict_get_first_of(d: dict, key, *opt_keys, return_key: bool = False, **kw):\n knf = KeyNotFound()\n k = key\n v = d.get(key, knf)\n n = len(opt_keys)\n i = 0\n while isinstance(v, KeyNotFound) and i < n:\n k = opt_keys[i]\n v = d.get(k, knf)\n i += 1\n\n if isinstance(v, KeyNotFound):\n if 'default' in kw:\n _def = dict_default(kw['default'])\n if return_key:\n return None, _def\n else:\n return _def\n else:\n raise KeyError('none of the provided keys found in the dict')\n if return_key:\n return k, v\n else:\n return v", "def dict_first(my_dict: Dict) -> Any:\n return list(my_dict.values())[0]", "def first(data, key):\n for i in data:\n if key(i):\n return i\n return None", "def get_from_first(key: Any, *getters: Sequence, default: Any = None) -> Any:\n for item in getters:\n if item and (\n isinstance(item, dict) and key in item\n or isinstance(key, numbers.Integral) and hasattr(item, \"__len__\") and 0 <= int(key) < len(item)\n ):\n return item[key]\n return default", "def get_first_key_from_dict(ad_dict):\n\n try:\n key = list(ad_dict.keys())[0]\n\n return key\n except IndexError as exc:\n print(\"Cannot find key \" + str(exc), file=sys.stderr)\n return None", "def search(d, key, default=None):\n stack = [iter(d.items())]\n while stack:\n for k, v in stack[-1]:\n if isinstance(v, dict):\n stack.append(iter(v.items()))\n break\n elif k == key:\n return v\n else:\n stack.pop()\n return default", "def dict_find_name(some_dict: Dict[str, Referent], path: List[str]) -> Result:\n if path:\n head, *tail = path\n try:\n return NameContainer.dict_find_name(\n cast(Dict[str, Referent], some_dict[head]),\n tail)\n except KeyError:\n NameContainer.logger.debug(f\"{head!r} not found in {some_dict.keys()}\")\n raise NameContainer.NotFound(path)\n else:\n return cast(Result, some_dict)", "def _try_get(nested_dict, dict_keys):\n try:\n for dict_key in dict_keys:\n nested_dict = nested_dict.__getitem__(dict_key)\n return nested_dict\n except:\n return ''", "def nested_get(dictionary: dict, keys: list):\n nested_dict = dictionary\n for key in keys[:-1]:\n nested_dict = nested_dict[key]\n return nested_dict.get(keys[-1])", "def _find_match(needle: dict, haystack: list, keys: list):\n for item in haystack:\n for key in keys:\n if item.get(key) != needle[key]:\n break\n else:\n return item\n return None", "def search_value(d, key, default=None):\n stack = [iter(d.items())]\n while stack:\n for k, v in stack[-1]:\n if isinstance(v, dict):\n stack.append(iter(v.items()))\n break\n elif k == key:\n return v\n else:\n stack.pop()\n return default", "def lookup(dic, key, *keys):\n if keys:\n return lookup(dic.get(key, {}), *keys)\n return dic.get(key)", "def lookup(dic, key, *keys):\r\n if keys:\r\n return lookup(dic.get(key, {}), *keys)\r\n return dic.get(key)", "def first(self, **opts):\n try:\n return next(self.find(**opts))\n except StopIteration:\n if 'default' in opts:\n return opts['default']\n else:\n raise KeyError(\"no matching objects\")", "def find(name, *dicts):\n for d in dicts:\n if type(d) == str:\n return d\n elif name in d and d[name] is not None:\n return d[name]\n\n return None", "def _get_by_path(dic, keys):\n assert len(keys) > 0, \"Path key can not be an empty list.\"\n\n d = dic\n for key in keys[:-1]:\n if isinstance(key, int) or key in d:\n d = d[key]\n else:\n return None\n if keys[-1] in d or (isinstance(d, list) and keys[-1] < len(d)):\n return d[keys[-1]]\n\n return None", "def get_by_list_of_keys(dictionary: Dict, key_path: List[Any]) -> Dict:\n if len(key_path) == 1:\n return dictionary[key_path[0]]\n else:\n return get_by_list_of_keys(dictionary[key_path[0]], key_path[1:])", "def find_first_node_from_here(start_node, key):\n try:\n return next(find_nodes_from_here(start_node, key))\n except StopIteration:\n return None", "def _search_list_of_dictionaries(key, value, list_of_dictionaries):\n\n for element in list_of_dictionaries:\n if element.get(key) == value:\n return element\n return None", "def get_element(d, path): # type: (Dict, Tuple) -> Any\n if len(path) == 0:\n raise ValueError('Path length cant be 0')\n elif len(path) == 1:\n return d.get(path[0])\n elif d.get(path[0]):\n return DictUtil.get_element(d[path[0]], path[1:])\n return None", "def try_get_key(data: Dict, keys: List) -> Tuple[bool, Optional[Any]]:\n ok, result = False, None\n try:\n result = reduce(operator.getitem, keys, data)\n ok = True\n except KeyError:\n pass\n\n return ok, result", "def _extract_first_from(name, sources):\n for i, source in enumerate(sources):\n if not source:\n continue\n if name in source:\n return (i, source[name])\n raise KeyError(name)", "def get_in(d, ks, default=None):\n *ks_, last = ks\n d_ = d\n\n for k in ks_:\n if type(d_) != dict or k not in d_:\n return default\n d_ = d_[k]\n\n if type(d_) == dict:\n return d_.get(last, default)\n\n return default", "def query_dict(dictionary: Mapping[Any, Any], keys: Sequence[Any]) -> Union[Any, None]:\n\n def extract(dictionary: Any, key: Any) -> Union[Any, None]:\n \"\"\"Get value associated with key, defaulting to None.\"\"\"\n if dictionary is None or not isinstance(dictionary, dict):\n return None\n return dictionary.get(key)\n\n return reduce(extract, keys, dictionary)", "def first(pair):\n\treturn pair[0]", "def do_get(d, *ks, **kwargs):\n try:\n res = reduce (lambda acc, k: acc[k], ks, d)\n except (KeyError, TypeError):\n if \"default\" in kwargs:\n return kwargs[\"default\"]\n else:\n t, v, tb = sys.exc_info()\n if t == KeyError:\n msg = \"nested keys {} not found in {}\".format(ks, d)\n else:\n msg = \"nesting of keys {} too is too deep for {}\".format(ks, d)\n raise KeyError, msg, tb\n else:\n return res", "def find_one_bykey(cls, keydict, defaultval = None):\n return cls.dbm().modelclass_find_one_bykey(cls, keydict, defaultval)", "def get_dict_by_key(pcb_data: List[Dict[str, Any]], key: str) -> Dict[str, Any]:\n for d in pcb_data:\n if isinstance(d, dict) and key in d.keys():\n return d\n return {}", "def traverse_dict(dic, entry_list):\n length = len(entry_list)\n if length > 0:\n element = entry_list[0]\n if isinstance(dic, dict) and length > 1 and element in dic:\n return traverse_dict(dic[element], entry_list[1:])\n elif isinstance(dic, dict) and length == 1 and element in dic:\n return dic[element]\n return None", "def dict_key_from_item(dictionary, value):\n\n # iterate dictionary entries\n # if value equals parameter value, return key\n\n for item in dictionary.items():\n if item[1] == value:\n return item[0]" ]
[ "0.7321539", "0.6594973", "0.6331456", "0.6306491", "0.617117", "0.6135461", "0.60428375", "0.6006988", "0.5986996", "0.5967618", "0.59050506", "0.5897276", "0.5889747", "0.581803", "0.58139414", "0.5798729", "0.5781374", "0.57479256", "0.57039523", "0.5695982", "0.56894183", "0.56863886", "0.563219", "0.5612497", "0.5598855", "0.557593", "0.5567536", "0.55442184", "0.55396193", "0.55199736" ]
0.69398
1
Returns True if an option with this name is allowed in the [tool.cibuildwheel.] section of a config file.
def _is_valid_platform_option(self, name: str) -> bool: disallowed_platform_options = self.disallow.get(self.platform, set()) if name in disallowed_platform_options: return False allowed_option_names = self.default_options.keys() | self.default_platform_options.keys() return name in allowed_option_names
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def has_option(self, key):\n\n return key in self.__opt", "def is_Configure_allowed(self):\n handler = self.get_command_object(\"Configure\")\n return handler.check_allowed()", "def config_has_option(section, option):\n return __CONFIG.has_option(section, option)", "def has_option(self, name):\n return self.option_settings[name]", "def has_option(self, key):\n if self.integration is None:\n return False\n return self.integration.has_option(f'{self.get_config_name()}.{key}')", "def has_option(self, option):\n splitvals = option.split('/')\n section, key = \"/\".join(splitvals[:-1]), splitvals[-1]\n\n return RawConfigParser.has_option(self, section, key)", "def has_option(self, section, option):\n\n # Underscore-style is the recommended configuration style\n option = option.replace('-', '_')\n if ConfigParser.has_option(self, section, option):\n return True\n\n # Support dash-style option names (with deprecation warning).\n option_alias = option.replace('_', '-')\n if ConfigParser.has_option(self, section, option_alias):\n warn = 'Configuration [{s}] {o} (with dashes) should be avoided. Please use underscores: {u}.'.format(\n s=section, o=option_alias, u=option)\n warnings.warn(warn, DeprecationWarning)\n return True\n\n return False", "def has_option(self, option):\n\t\treturn self.config_parser.has_option(self.section_name, option)", "def has_option(self, section, option):\n raise NotImplementedError()", "def is_manually_set(option_name: str) -> bool:\n return get_where_defined(option_name) not in (\n ConfigOption.DEFAULT_DEFINITION,\n ConfigOption.STREAMLIT_DEFINITION,\n )", "def option_is_known(self, opt):\n return opt in self.results", "def check_config_mode(self):\n return False", "def toolHasOptions(*args, **kwargs)->bool:\n pass", "def _is_valid_global_option(self, name: str) -> bool:\n allowed_option_names = self.default_options.keys() | PLATFORMS | {\"overrides\"}\n\n return name in allowed_option_names", "def hasoption(self, option):\n return self.has_option(NOSECTION, option)", "def validate_configuration_file(self):\n\n with open(self.config_path, \"r+\") as f_config:\n return bool(re.search(get_configuration_file_re(),\n f_config.read()))", "def has_option(self, section, option):\n try:\n if option in self._dict[section]:\n return True\n return False\n except KeyError as e:\n raise NoSectionError(str(e)) from None", "def has_option(self, option, section = None):\n\n if section is None:\n section = self.default_section\n return self.cfg.has_option(section, option)", "def check_configuration(self):\n\n return bool(os.path.isfile(self.config_path) and\n self.validate_configuration_file())", "def isset(cls,name):\n inst = cls.inst()\n if name in inst.options and \\\n len(inst.options[name]) > 0:\n return True\n else:\n return False", "def is_condition(cfg):\n if SELECTIONS in list(cfg.keys()):\n return True\n else:\n return False", "def has_option(self, section, option):\n if not section or section == DEFAULTSECT:\n option = self.optionxform(option)\n return option in self._defaults\n elif section not in self._sections:\n return False\n else:\n option = self.optionxform(option)\n return (option in self._sections[section]\n or option in self._defaults)", "def _is_supplied_by_config(group: argparse._MutuallyExclusiveGroup, conf: Dict[str, Any]) -> bool:\n group_args = []\n for arg in group._group_actions:\n group_args.append(arg.dest)\n\n count = 0\n for val in group_args:\n if val in conf:\n count += 1\n return count == len(group_args) or count == 0", "def is_experiment(cfg):\n if CONDITIONS in list(cfg.keys()):\n return True\n else:\n return False", "def _is_opt(self, opt):\r\n\r\n # TODO: raise BadOptionError for unknown option\r\n if len(opt) < 2 or opt[0] != '-':\r\n return False\r\n if opt[1] != '-':\r\n return self._short_opt.get(opt[0:2]) is not None\r\n try:\r\n if \"=\" in opt:\r\n (opt, next_arg) = opt.split(\"=\", 1)\r\n if self._match_long_opt(opt):\r\n return True\r\n except:\r\n pass\r\n\r\n return False", "def has_option(self, method, option):\n\t\targs = self.__parse_docstring(getattr(self, method).__doc__)\n\t\tif \"arguments\" in args:\n\t\t\treturn any(option == label for label in args[\"arguments\"].keys())\n\t\treturn False", "def has_configuration_set():\r\n return getattr(settings, \"MICROSITE_CONFIGURATION\", False)", "def contains(self, option: str) -> bool:\n return self._get_index(option) is not None", "def checkconfig(self): \n validconfig = {\n 'loglevel': lambda s: s in self.loglevels,\n 'logfilelevel': lambda s: s in self.loglevels,\n 'nodes': lambda s: isinstance(s, list),\n 'pynodes': lambda s: isinstance(s, list)\n }\n alive = True\n for key in self.config: \n if (key in validconfig and \n not validconfig[key](self.config[key])):\n logging.critical(\"Invalid configuration option {}: {}\".format(\n key, self.config[key]))\n alive = False\n return alive", "def check_config(self):\n # Check if tool is at all included in workflow\n if \"external\" not in self.config[\"tools\"][\"dna\"]:\n return # External not run, don't check configuration # pragma: no cover" ]
[ "0.6914015", "0.6797586", "0.6748813", "0.66980386", "0.6659934", "0.65959734", "0.65254307", "0.64785737", "0.64204913", "0.63361555", "0.62722063", "0.6266686", "0.6265161", "0.6200039", "0.616609", "0.6163951", "0.6161151", "0.612342", "0.61111915", "0.6098968", "0.6005089", "0.59883255", "0.5956989", "0.5926144", "0.5910069", "0.5902216", "0.58576673", "0.5849768", "0.584092", "0.5808431" ]
0.7029227
0
Load a toml file, returns global and platform as separate dicts.
def _load_file(self, filename: Path) -> Tuple[Dict[str, Any], Dict[str, Any]]: with filename.open("rb") as f: config = tomli.load(f) global_options = config.get("tool", {}).get("cibuildwheel", {}) platform_options = global_options.get(self.platform, {}) return global_options, platform_options
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _load_datas(self) -> tp.Dict[str, dict]:\n with open(self._file, \"r\") as stream:\n try:\n load: tp.Dict[str, dict] = yaml.safe_load(stream)\n logger.info(\"YAML imported\")\n return load\n except yaml.YAMLError as exc:\n logger.debug(\"YAML import error : %s\", exc)\n raise", "def load(filename):\n conf = CommonConfig.get()\n conf.update(toml.load(filename))\n return conf", "def _load_model_utilities(cls, model_path: Path) -> Dict[Text, Any]:\n tf_model_file = model_path / f\"{cls._metadata_filename()}.tf_model\"\n loaded_data = rasa.utils.io.pickle_load(\n model_path / f\"{cls._metadata_filename()}.data_example.pkl\"\n )\n label_data = rasa.utils.io.pickle_load(\n model_path / f\"{cls._metadata_filename()}.label_data.pkl\"\n )\n fake_features = rasa.utils.io.pickle_load(\n model_path / f\"{cls._metadata_filename()}.fake_features.pkl\"\n )\n label_data = RasaModelData(data=label_data)\n priority = rasa.utils.io.json_unpickle(\n model_path / f\"{cls._metadata_filename()}.priority.pkl\"\n )\n entity_tag_specs = rasa.shared.utils.io.read_json_file(\n model_path / f\"{cls._metadata_filename()}.entity_tag_specs.json\"\n )\n entity_tag_specs = [\n EntityTagSpec(\n tag_name=tag_spec[\"tag_name\"],\n ids_to_tags={\n int(key): value for key, value in tag_spec[\"ids_to_tags\"].items()\n },\n tags_to_ids={\n key: int(value) for key, value in tag_spec[\"tags_to_ids\"].items()\n },\n num_tags=tag_spec[\"num_tags\"],\n )\n for tag_spec in entity_tag_specs\n ]\n model_config = rasa.utils.io.pickle_load(\n model_path / f\"{cls._metadata_filename()}.meta.pkl\"\n )\n\n return {\n \"tf_model_file\": tf_model_file,\n \"loaded_data\": loaded_data,\n \"fake_features\": fake_features,\n \"label_data\": label_data,\n \"priority\": priority,\n \"entity_tag_specs\": entity_tag_specs,\n \"model_config\": model_config,\n }", "def load(filename):\n path = Path(__file__).parent / \"resources\" / filename\n with path.open() as file:\n return lkml.load(file)", "def read_data_from_file(self, local_lookml_project_path: str) -> dict:\n logger.info(\n \"Parsing data from local LookML file {}\".format(\n self.lookml_file_name_and_path\n )\n )\n with open(\n utils.assemble_path(\n local_lookml_project_path, self.lookml_file_name_and_path\n ),\n \"r\",\n ) as lookml_file:\n return lkml.load(lookml_file)", "def parse_toml_file(filepath: Union[str, Path]) -> Any:\n from tomlkit import loads\n\n with open(filepath, 'r') as fp:\n toml_dict = loads(fp.read())\n\n blocks = list()\n variables = list()\n values = list()\n comments = list()\n\n header = None\n date_time = None\n\n for key, item in toml_dict.items():\n if key in ['__header__', 'header']:\n header = item\n elif key in ['__datetime__', 'datetime']:\n date_time = item\n else:\n for var, val in item.items():\n if isinstance(val, str):\n if re.fullmatch(r'\\d\\d\\d:\\d\\d', val):\n val = val.split(':')\n val = datetime.timedelta(hours=int(val[0]), minutes=int(val[1]))\n variables.append(var)\n values.append(val)\n blocks.append(key)\n\n variable_comment = dict()\n for key in toml_dict.keys():\n lines = toml_dict[key].as_string().split('\\n')\n while '' in lines:\n lines.remove('')\n comment = list()\n for line in lines:\n if line.startswith('#'):\n comment.append(line[2:])\n else:\n variable_comment[line.split('=')[0].strip()] = '\\n'.join(comment)\n comment = list()\n\n for var in variables:\n if var in variable_comment:\n comments.append(variable_comment[var])\n else:\n comments.append('')\n\n block_names = list(toml_dict.keys())\n try:\n block_names.remove('__header__')\n except ValueError:\n pass\n try:\n block_names.remove('__datetime__')\n except ValueError:\n pass\n\n header = list()\n lines = toml_dict.as_string().split('\\n')\n for line in lines:\n if line.startswith('#'):\n header.append(line.strip().split('# ')[1])\n if line == '':\n break\n\n date_time = _get_datetime_from_header(header)\n\n return date_time, header, block_names, (variables, values, comments, blocks)", "def load_toml(content):\n from toml import loads\n return loads(content)", "def parse_config(path):\n with open(path, \"r\") as config_file:\n return toml.load(config_file)", "def read_config_file(filename):\n\n # Read the config file\n toml_data = open(filename).read()\n\n # Load the definitions in the config file\n data = toml.loads(toml_data)\n\n return data", "def load_tree_from_file(fname: str = 'tree') -> dict:\r\n fid = open(fname + \".pkl\", \"rb\")\r\n return pickle.load(fid)", "async def load(self, file: IO) -> dict:", "def createLsystemFromFile( filename ):\n\tfp = open(filename, \"r\")\n\tlines = fp.readlines()\n\tfp.close()\n\tlsys = init()\n\tfor line in lines:\n\t\twords = line.split()\n\t\tif words[0] == 'base':\n\t\t\tsetBase(lsys, words[1])\n\t\telif words[0] == 'rule':\n\t\t\taddRule(lsys, words[1:])\n\treturn lsys", "def _file_loader(self) -> dict:\n cfg = None\n try:\n with open(self._path) as file:\n cfg = json.loads(file.read())\n except FileNotFoundError as e:\n print(e)\n exit(1)\n return cfg", "def parse(self):\n\n if exists(self.filepath):\n content = open(self.filepath).read().decode(charset)\n else:\n content = \"\"\n\n try:\n config = toml.loads(content)\n except toml.TomlSyntaxError:\n raise ConfigSyntaxError\n\n return config", "def load_builtin_data(name):\n\t\n\tpath = Path(resource_filename('pyospray', f'data/{name}.txt'))\n\tret = {}\n\tvalues = None\n\twith path.open('r') as f:\n\t\tlines = (line.rstrip('\\n') for line in f)\n\t\tfor token, content in tokenize(lines):\n\t\t\tif token == 'key':\n\t\t\t\tvalues = []\n\t\t\t\tret[content] = values\n\t\t\t\n\t\t\telif token == 'values':\n\t\t\t\tvalues.extend(content)\n\t\t\t\n\t\t\telse:\n\t\t\t\traise NotImplementedError\n\t\n\treturn ret", "def load_global_variables(self):\n return read_and_combine_yamls_in_dir(self.__get_environments_root_dir())", "def load_openml_data():\n datasets = dict()\n files = os.listdir(_DATA_DIRECTORY.value)\n for file_name in files:\n with open(_DATA_DIRECTORY.value + file_name, \"r\") as ff:\n task = np.loadtxt(ff, delimiter=\",\", skiprows=1)\n np.random.shuffle(task)\n datasets[file_name] = [task]\n return datasets, files", "def create_dicts():\n load_data_for_dict('data/atis/train/seq.in', 'data/atis/voc/vocabulary.json')\n load_data_for_dict('data/atis/valid/seq.in', 'data/atis/voc/vocabulary.json')\n load_data_for_dict('data/atis/test/seq.in', 'data/atis/voc/vocabulary.json') \n load_data_for_dict('data/atis/train/seq.out', 'data/atis/voc/slot_vocabulary.json')", "def load(self, name):\n resolvedName = envString.resolve(name)\n configuration = CondorConfig()\n configuration.load(resolvedName)\n self.defaults = {}\n\n if configuration.platform.nodeSetRequired and self.opts.nodeSet is None:\n print(\"error: nodeset parameter required by this platform\")\n sys.exit(10)\n\n tempDefaultRoot = Template(configuration.platform.defaultRoot)\n self.defaults[\"DEFAULT_ROOT\"] = tempDefaultRoot.substitute(\n USER_NAME=self.commandLineDefaults[\"USER_NAME\"])\n\n tempLocalScratch = Template(configuration.platform.localScratch)\n self.defaults[\"LOCAL_SCRATCH\"] = \\\n tempLocalScratch.substitute(USER_NAME=self.commandLineDefaults[\"USER_NAME\"])\n self.defaults[\"IDS_PER_JOB\"] = configuration.platform.idsPerJob\n self.defaults[\"DATA_DIRECTORY\"] = envString.resolve(configuration.platform.dataDirectory)\n self.defaults[\"FILE_SYSTEM_DOMAIN\"] = configuration.platform.fileSystemDomain\n self.defaults[\"EUPS_PATH\"] = configuration.platform.eupsPath\n self.defaults[\"MANAGER_SOFTWARE_HOME\"] = configuration.platform.manager_software_home\n\n platform_dir = lsst.utils.getPackageDir(\"ctrl_platform_\"+self.opts.platform)\n self.defaults[\"PLATFORM_DIR\"] = platform_dir\n self.manager = configuration.platform.manager\n self.setup_using = configuration.platform.setup_using", "async def load(self) -> Dict[str, Dict]:\n raise NotImplementedError()", "def load_from_pickle(f):\n # Skip deprecated first line\n _ = f.readline();\n settings = arg.processCommandLineArguments(f.readline().strip().split(\" \"), None);\n \n try:\n savedVars = pickle.load(f);\n except IndexError:\n return False;\n \n f.close();\n \n return dict(savedVars), settings[0];", "def load_attributes():\n\n # <attribute_id> <attribute_name>\n attributes_file = open(PROJECT_ROOT +'/data/attributes.txt').readlines()\n attributes_file = [i.strip().split() for i in attributes_file]\n\n # <certainty_id> <certainty_name>\n certainties_file = open(PROJECT_ROOT +'/data/CUB_200_2011/attributes/certainties.txt').readlines()\n certainties_file = [i.strip().split() for i in certainties_file]\n\n # <image_id> <attribute_id> <is_present> <certainty_id> <time>\n labels_file = open(PROJECT_ROOT +'/data/CUB_200_2011/attributes/image_attribute_labels.txt').readlines()\n labels_file = [i.strip().split() for i in labels_file]\n\n attribute_ids = {}\n for i in attributes_file:\n attribute_ids[i[1]] = int(i[0])\n\n certainty_ids = {}\n for i in certainties_file:\n certainty_ids[i[1]] = int(i[0])\n\n label_ids = {}\n for i in labels_file:\n label_ids[(int(i[0]), int(i[1]))] = list(map(lambda x:int(float(x)), i[2:]))\n\n return attribute_ids, certainty_ids, labels_file, label_ids", "def load():\n with open(SAVE_FILE_NAME, 'r') as save_file:\n dikt = yaml.safe_load(save_file)\n if dikt is None:\n dikt = {}\n return dikt", "def load_toml(self, toml_str): # type: (str) -> None\n self._toml = tomlkit.loads(toml_str)\n self._load_dict(self._toml.value)", "def load_preprocess():\n with open('preprocess.p', mode='rb') as in_file:\n return pickle.load(in_file)", "def _load_data_yaml(self, pathname): \n pathname = self._yaml_extension(pathname)\n\n with open(pathname) as file:\n traj_data = yaml.load(file, Loader=yaml.FullLoader)\n \n return traj_data", "def _load(self):\n p = os.path.join(paths.setup_dir, 'system_health.yaml')\n if os.path.isfile(p):\n with open(p, 'r') as rfile:\n config = yaml.load(rfile)\n if config:\n self._values = config['values']\n self._conditionals = config['conditionals']\n\n general = config['general']\n self._limit = general['limit']", "def load_env(data_dir, model_dir):\n model_params = sketch_rnn_model.get_default_hparams()\n with tf.gfile.Open(os.path.join(model_dir, 'model_config.json'), 'r') as f:\n model_params.parse_json(f.read())\n return load_dataset(data_dir, model_params, inference_mode=True)", "def load_tle(filename : str):\n\n num_elements = 0\n first_line = False\n\n elements_by_id = {}\n\n with open(filename) as f:\n for i, line in enumerate(f.readlines()):\n toks = line.strip().split()\n if toks[1][-1] == 'U':\n if first_line:\n raise ValueError(\"Unexpected first line: {}\".format(i))\n num_elements += 1\n first_line = True\n # Skipping the first line data for now...\n else:\n if not first_line:\n raise ValueError(\"Missing element first line: {}\".format(i))\n first_line = False\n id = toks[1]\n inclination_deg = float(toks[2])\n raan_deg = float(toks[3])\n eccentricity = int(toks[4])\n argument_of_perigee_deg = float(toks[5])\n mean_anomaly_deg = float(toks[6])\n mean_motion = float(toks[7][:11])\n\n elements_by_id[id] = [inclination_deg, raan_deg, eccentricity, \n argument_of_perigee_deg, mean_anomaly_deg, mean_motion]\n print(\"Loaded {} elements.\".format(num_elements))\n return elements_by_id", "def deserialize(file):\n global root_dir\n global wells_list\n global tops_list\n global project_file\n\n f = open(file, 'rb')\n\n current_project = pickle.load(f)\n root_dir = current_project.root_dir\n wells_list = current_project.wells_list\n tops_list = current_project.tops_list\n project_file = current_project.project_file" ]
[ "0.5698216", "0.56965494", "0.56819564", "0.5641362", "0.55809826", "0.5508169", "0.54958004", "0.5484023", "0.5451482", "0.5424716", "0.53777593", "0.5351504", "0.5334559", "0.532896", "0.53215915", "0.5309104", "0.527987", "0.52624243", "0.5254605", "0.52134854", "0.52068585", "0.5206745", "0.5196991", "0.51661474", "0.51601887", "0.5142541", "0.51325655", "0.51312804", "0.51155573", "0.5112892" ]
0.6246138
0
Get and return the value for the named option from environment, configuration file, or the default. If env_plat is False, then don't accept platform versions of the environment variable. If this is an array it will be merged with "sep" before returning. If it is a table, it will be formatted with "table['item']" using {k} and {v} and merged with "table['sep']". Empty variables will not override if ignore_empty is True.
def get( self, name: str, *, env_plat: bool = True, sep: Optional[str] = None, table: Optional[TableFmt] = None, ignore_empty: bool = False, ) -> str: if name not in self.default_options and name not in self.default_platform_options: raise ConfigOptionError(f"{name} must be in cibuildwheel/resources/defaults.toml file") # Environment variable form envvar = f"CIBW_{name.upper().replace('-', '_')}" plat_envvar = f"{envvar}_{self.platform.upper()}" # later overrides take precedence over earlier ones, so reverse the list active_config_overrides = reversed(self.active_config_overrides) # get the option from the environment, then the config file, then finally the default. # platform-specific options are preferred, if they're allowed. result = _dig_first( (os.environ if env_plat else {}, plat_envvar), # type: ignore[arg-type] (os.environ, envvar), *[(o.options, name) for o in active_config_overrides], (self.config_platform_options, name), (self.config_options, name), (self.default_platform_options, name), (self.default_options, name), ignore_empty=ignore_empty, ) if isinstance(result, dict): if table is None: raise ConfigOptionError(f"{name} does not accept a table") return table["sep"].join(table["item"].format(k=k, v=v) for k, v in result.items()) if isinstance(result, list): if sep is None: raise ConfigOptionError(f"{name} does not accept a list") return sep.join(result) if isinstance(result, int): return str(result) return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_value(self, section_name: str, attr_name: str, env_override: bool = False) -> typing.Optional[str]:\n config = ConfigParser(allow_no_value=True)\n config.read(self.connection_string)\n result = None\n\n if env_override is True:\n result = os.getenv(f'{section_name}_{attr_name}'.upper())\n\n if result is None:\n for section in config.sections():\n if section.lower().replace(' ', '_') == section_name.lower().replace(' ', '_'):\n for attr in config[section]:\n if attr.lower().replace(' ', '_') == attr_name.lower().replace(' ', '_'):\n result = config[section][attr]\n\n return result", "def get_value(self, section_name: str, attr_name: str, env_override: bool = False) -> typing.Optional[str]:\n key = f'{section_name}_{attr_name}'.upper()\n result: typing.Optional[str] = None\n if env_override:\n result = os.getenv(key)\n if result is None:\n response = self._vault_api.read_secret(path=section_name.upper(), mount_point=self.mount_point)\n result = response[\"data\"][\"data\"].get(attr_name.upper())\n return result", "def getenv(option, default=undefined, cast=undefined):\n\n # We can't avoid __contains__ because value may be empty.\n if option in os.environ:\n value = os.environ[option]\n else:\n if isinstance(default, Undefined):\n raise UndefinedValueError('{} not found. Declare it as envvar or define a default value.'.format(option))\n\n value = default\n\n if isinstance(cast, Undefined):\n return value\n\n if cast is bool:\n value = _cast_boolean(value)\n elif cast is list:\n value = [x for x in value.split(',') if x]\n else:\n value = cast(value)\n\n return value", "def getParameter(self, value):\n if value in self.commandLineDefaults:\n return self.commandLineDefaults[value]\n if value in self.defaults:\n return self.defaults[value]\n return None", "def get_value(self, section_name: str, attr_name: str, env_override: bool = False) -> typing.Optional[str]:\n pass", "def _coalesceOption(self, name, default = ''):\n return self.view.settings().get(name, self.options.get(name, default))", "def getValue(self, value=None):\n if self.data and self.source & COMMANDLINE:\n return self.data\n\n if self.environ and str(self.environ) in os.environ:\n self.source = ENVIRONMENT\n self.file = None\n return self.cast(os.environ[str(self.environ)])\n\n if self.data:\n return self.data\n\n if self.default:\n self.source = BUILTIN\n self.file = None\n return self.default\n\n self.source = CODE\n self.file = None\n\n if value is None:\n return []\n\n return value", "def env(*vars, **kwargs):\n for v in vars:\n value = os.environ.get(v, None)\n if value:\n return value\n return kwargs.get('default', '')", "def env(*vars, **kwargs):\n for v in vars:\n value = os.environ.get(v, None)\n if value:\n return value\n return kwargs.get('default', '')", "def env(*vars, **kwargs):\n for v in vars:\n value = os.environ.get(v)\n if value:\n return value\n return kwargs.get('default', '')", "def env(*vars, **kwargs):\r\n for v in vars:\r\n value = os.environ.get(v)\r\n if value:\r\n return value\r\n return kwargs.get('default', '')", "def get(self, key, default=None):\n value = os.environ.get(key)\n\n if value:\n self.logging.info(\"Got %s from environment.\" % key)\n self.logging.debug(value)\n return_val = value\n elif key in self._config.keys():\n self.logging.info(\"Got %s from config file.\" % key)\n self.logging.debug(value)\n return_val = self._config[key]\n else:\n return_val = default\n return return_val", "def get(self):\n self.value = os.getenv(self.name, self.default)\n return self.value", "def _repl(self, m):\n section, option = m.group(1, 2)\n if section == \"ENV\":\n return os.getenv(option, \"\")\n else:\n return self.cfg.get(section, option)", "def env(*_vars, **kwargs):\r\n for v in _vars:\r\n value = os.environ.get(v, None)\r\n if value:\r\n return value\r\n return kwargs.get('default', '')", "def env(*_vars, **kwargs):\n for v in _vars:\n value = os.environ.get(v, None)\n if value:\n return value\n return kwargs.get('default', '')", "def parse(env, platform_name=None):\n\n platform_name = platform_name or PLATFORM\n\n result = {}\n for variable, value in env.items():\n\n # Platform specific values\n if isinstance(value, dict):\n value = value.get(platform_name, \"\")\n\n if not value:\n continue\n\n # Allow to have lists as values in the tool data\n if isinstance(value, (list, tuple)):\n value = \";\".join(value)\n\n result[variable] = value\n\n return result", "def _get_field(self, section, field):\n if not self._configparser.has_option(section, field):\n return None\n return self._configparser.get(section, field).strip()", "def get_option(self, name: str, section: str = None) -> str:\n if self.default_vars and name in self.default_vars:\n return self.default_vars[name]\n else:\n ops = self.options\n if name in ops:\n return ops[name]\n else:\n raise ConfigurableError(f'No such option: {name}')", "def get(self):\n if not self.__name in g_platform_variables:\n raise RuntimeError(\"unknown platform variable '%s'\" % (self.__name))\n current_var = g_platform_variables[self.__name]\n combinations = get_platform_combinations()\n for ii in combinations:\n if ii in current_var:\n return current_var[ii]\n raise RuntimeError(\"current platform %s not supported for variable '%s'\" % (str(combinations), self.__name))", "def get_option(self, option, default=None):\n splitvals = option.split('/')\n section, key = \"/\".join(splitvals[:-1]), splitvals[-1]\n\n try:\n value = self.get(section, key)\n value = self._str_to_val(value)\n except ValueError, s:\n logger.warning(\"get failed for {}/{}: {}\".format(section,key,s))\n value = default\n except NoSectionError:\n value = default\n except NoOptionError:\n value = default\n\n return value", "def get_envlist(key, *default, **kwargs):\n separator = kwargs.get('separator', ' ')\n return get_env(key, *default, coerce=lambda x: x.split(separator))", "def _format_environment_value(value):\n value = str(value)\n if platform.system() == \"Windows\":\n # Split on semicolons first\n components = value.split(os.pathsep)\n\n # On each component, replace anything that looks like\n # a drive letter with a unix-like drive path.\n components = [re.sub(r\"^([A-Za-z]):\\\\\",\n r\"\\\\\\1\\\\\",\n c) for c in components]\n\n return \":\".join(components).replace(\"\\\\\", \"/\")\n\n return value", "def _default_getter(environ, metadata, prefix, name):\n ce = metadata[CNF_KEY]\n var = ce.name if ce.name is not None else \"_\".join((*prefix, name)).upper()\n log.debug(\"looking for env var '%s'.\", var)\n try:\n return environ[var]\n except KeyError:\n raise MissingEnvValueError(var) from None", "def retrieve_options(env):\n\n options = []\n if env.core != -1:\n options.extend([\"--core {}\".format(env.core)])\n if env.mtor != 4:\n options.extend([\"--mtor {}\".format(env.mtor)])\n if env.n != 1000:\n options.extend([\"--n {}\".format(env.n)])\n if env.forcefield != \"OPLS2005\":\n options.extend([\"--force {}\".format(env.forcefield)])\n if env.mae_lig:\n options.extend([\"--mae_charges\"])\n if env.gridres != 10:\n options.extend([\"--gridres {}\".format(env.gridres)])\n return \" \".join(options)", "def get(self, key, default=''):\n key = self.optionxform(key)\n cached = self._cache.get(key, _use_default)\n if cached is not _use_default:\n return cached\n name_str = self.name\n key_str = to_unicode(key)\n settings = ProductSetting.select(self.env,\n where={'product': self.product,\n 'section': name_str,\n 'option': key_str})\n if len(settings) > 0:\n value = settings[0].value\n else:\n for parent in self.config.parents:\n value = parent[self.name].get(key, _use_default)\n if value is not _use_default:\n break\n else:\n if default is not _use_default:\n option = Option.registry.get((self.name, key))\n value = option.default if option else _use_default\n else:\n value = _use_default\n if value is _use_default:\n return default\n if not value:\n value = u''\n elif isinstance(value, basestring):\n value = to_unicode(value)\n self._cache[key] = value\n return value", "def _do_get(self, name, group=None, namespace=None):\n if group is None and name in self._groups:\n return (self.GroupAttr(self, self._get_group(name)), None)\n\n info = self._get_opt_info(name, group)\n opt = info['opt']\n if 'location' in info:\n loc = info['location']\n else:\n loc = opt._set_location\n\n if isinstance(opt, SubCommandOpt):\n return (self.SubCommandAttr(self, group, opt.dest), None)\n\n if 'override' in info:\n return (self._substitute(info['override']), loc)\n\n def convert(value):\n return self._convert_value(\n self._substitute(value, group, namespace), opt)\n\n group_name = group.name if group else None\n key = (group_name, name)\n\n # If use_env is true, get a value from the environment but don't use\n # it yet. We will look at the command line first, below.\n env_val = (sources._NoValue, None)\n if self._use_env:\n env_val = self._env_driver.get(group_name, name, opt)\n\n if opt.mutable and namespace is None:\n namespace = self._mutable_ns\n if namespace is None:\n namespace = self._namespace\n if namespace is not None:\n try:\n alt_loc = None\n try:\n val, alt_loc = opt._get_from_namespace(namespace,\n group_name)\n # Try command line first\n if (val != sources._NoValue\n and alt_loc.location == Locations.command_line):\n return (convert(val), alt_loc)\n # Environment source second\n if env_val[0] != sources._NoValue:\n return (convert(env_val[0]), env_val[1])\n # Default file source third\n if val != sources._NoValue:\n return (convert(val), alt_loc)\n except KeyError: # nosec: Valid control flow instruction\n alt_loc = LocationInfo(\n Locations.environment,\n self._env_driver.get_name(group_name, name),\n )\n # If there was a KeyError looking at config files or\n # command line, retry the env_val.\n if env_val[0] != sources._NoValue:\n return (convert(env_val[0]), env_val[1])\n except ValueError as ve:\n message = \"Value for option %s from %s is not valid: %s\" % (\n opt.name, alt_loc, str(ve))\n # Preserve backwards compatibility for file-based value\n # errors.\n if alt_loc.location == Locations.user:\n raise ConfigFileValueError(message)\n raise ConfigSourceValueError(message)\n\n try:\n return self.__drivers_cache[key]\n except KeyError: # nosec: Valid control flow instruction\n pass\n\n for source in self._sources:\n val = source.get(group_name, name, opt)\n if val[0] != sources._NoValue:\n result = (convert(val[0]), val[1])\n self.__drivers_cache[key] = result\n return result\n\n if 'default' in info:\n return (self._substitute(info['default']), loc)\n\n if self._validate_default_values:\n if opt.default is not None:\n try:\n convert(opt.default)\n except ValueError as e:\n raise ConfigFileValueError(\n \"Default value for option %s is not valid: %s\"\n % (opt.name, str(e)))\n\n if opt.default is not None:\n return (convert(opt.default), loc)\n\n return (None, None)", "def getenv_string(setting, default=''):\n return os.environ.get(setting, default)", "def windows_get_env_value(var_name: str) -> str:\n if var_name in os.environ.keys():\n return os.environ[var_name]", "def __get_option(self, option):\n if option in Config.OPTIONS.keys():\n _default = Config.OPTIONS[option]\n elif option in Config.FILE_OPTIONS.keys():\n _default = Config.FILE_OPTIONS[option]\n elif option in Config.PATH_OPTIONS.keys():\n _default = Config.PATH_OPTIONS[option]\n else:\n _default = None # XXX ??\n \n _val = self.__get(option)\n\n if _val: \n return _val\n else:\n return _default" ]
[ "0.55509925", "0.5523774", "0.54046524", "0.5343538", "0.52567863", "0.52536374", "0.5241531", "0.522168", "0.522168", "0.52107966", "0.5202625", "0.5192449", "0.5190362", "0.5177892", "0.5089966", "0.5081989", "0.50755405", "0.50751054", "0.5050369", "0.50461185", "0.504509", "0.50341076", "0.5011676", "0.49969116", "0.49700162", "0.4969125", "0.49610656", "0.49514547", "0.49125555", "0.490985" ]
0.815719
0
Return rectangle representing the Done button
def get_done_rect(self): return pygame.Rect(self.button_data.done_x, self.button_data.y, self.button_data.large_width, self.button_data.height)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_cancel_rect(self):\n return pygame.Rect(self.button_data.cancel_x, self.button_data.y, self.button_data.large_width, self.button_data.height)", "def get_pay_rect(self):\n return pygame.Rect(self.button_data.pay_x, self.button_data.y, self.button_data.large_width, self.button_data.height)", "def _get_button_region(self):\n # XXXX Only rectangulars for now\n if not self._sensitive:\n return None\n if not self._insidetemporal():\n return None\n rgn = Qd.NewRgn()\n if self._shape == 'rect':\n x0, y0 = self._convert_point(self._coordinates[0:2])\n x1, y1 = self._convert_point(self._coordinates[2:4])\n box = x0, y0, x1, y1\n Qd.RectRgn(rgn, box)\n elif self._shape == 'poly':\n Qd.OpenRgn()\n xl, yl = self._convert_point(self._coordinates[-2:])\n Qd.MoveTo(xl, yl)\n for i in range(0, len(self._coordinates), 2):\n x, y = self._convert_point(self._coordinates[i:i+2])\n Qd.LineTo(x, y)\n Qd.CloseRgn(rgn)\n elif self._shape == 'circle':\n print 'Circle not supported yet'\n elif self._shape == 'ellipse':\n # Note: rx/ry are width/height, not points\n x, y, rx, ry = self._dispobj._window._convert_coordinates(self._coordinates)\n Qd.OpenRgn()\n Qd.FrameOval((x-rx, y-ry, x+rx, y+ry))\n Qd.CloseRgn(rgn)\n else:\n print 'Invalid shape type', self._shape\n return rgn", "def fill_button_frame(dialbox):\n dialbox.button_done = tk.Button(dialbox.button_frame, text='Done', width=8, \\\n command=lambda: dialbox.gui.callbacks.done(dialbox))\n dialbox.button_done.grid(row=1, column=1)\n dialbox.button_cancel = tk.Button(dialbox.button_frame, text='Cancel', width=8, \\\n command=lambda: dialbox.gui.callbacks.cancel(dialbox))\n dialbox.button_cancel.grid(row=1, column=0)", "def buttonbox(self):\n self.ok_button = tk.Button(\n self, text=\"OK\", width=5, command=lambda: self.destroy()\n )\n self.ok_button.pack(pady=10)", "def button_box(self):\r\n\r\n below_hz_frame = tkinter.Frame(self)\r\n ok_button = ttk.Button(below_hz_frame, text=\"OK\",\r\n width=10, command=self.ok,\r\n default=tkinter.ACTIVE)\r\n ok_button.grid(row=0, column=0, padx=30, pady=10)\r\n cancel_button = ttk.Button(below_hz_frame, text=\"Cancel\", width=10,\r\n command=self.cancel)\r\n cancel_button.grid(row=0, column=1, padx=30, pady=10)\r\n\r\n # bind 'ok' method to the 'enter' button of the keyboard\r\n self.bind(\"<Return>\", self.ok)\r\n\r\n # bind 'cancel' method to the 'esc' button of the keyboard\r\n self.bind(\"<Escape>\", self.cancel)\r\n below_hz_frame.pack(fill=tkinter.X)", "def draw_but(self, window):\n # draws the rectangular button\n p1 = graphics.Point(self.cen_point_x - self.width / 2, \n self.cen_point_y - self.height / 2)\n p2 = graphics.Point(self.cen_point_x + self.width / 2, \n self.cen_point_y + self.height / 2)\n self.button = graphics.Rectangle(p1, p2)\n self.button.setOutline(\"Orange\")\n self.button.draw(window)\n \n # draws the text on the button\n self.text.draw(window)", "def DrawButton(self, dc, wnd, in_rect, button, orientation):\r\n\r\n bitmap_id, button_state = button.id, button.cur_state\r\n \r\n if bitmap_id == AUI_BUTTON_CLOSE:\r\n if button_state & AUI_BUTTON_STATE_DISABLED:\r\n bmp = self._disabled_close_bmp\r\n else:\r\n bmp = self._active_close_bmp\r\n\r\n elif bitmap_id == AUI_BUTTON_LEFT:\r\n if button_state & AUI_BUTTON_STATE_DISABLED:\r\n bmp = self._disabled_left_bmp\r\n else:\r\n bmp = self._active_left_bmp\r\n\r\n elif bitmap_id == AUI_BUTTON_RIGHT:\r\n if button_state & AUI_BUTTON_STATE_DISABLED:\r\n bmp = self._disabled_right_bmp\r\n else:\r\n bmp = self._active_right_bmp\r\n\r\n elif bitmap_id == AUI_BUTTON_WINDOWLIST:\r\n if button_state & AUI_BUTTON_STATE_DISABLED:\r\n bmp = self._disabled_windowlist_bmp\r\n else:\r\n bmp = self._active_windowlist_bmp\r\n\r\n else:\r\n if button_state & AUI_BUTTON_STATE_DISABLED:\r\n bmp = button.dis_bitmap\r\n else:\r\n bmp = button.bitmap\r\n \r\n if not bmp.IsOk():\r\n return\r\n\r\n rect = wx.Rect(*in_rect)\r\n\r\n if orientation == wx.LEFT:\r\n \r\n rect.SetX(in_rect.x)\r\n rect.SetY(((in_rect.y + in_rect.height)/2) - (bmp.GetHeight()/2))\r\n rect.SetWidth(bmp.GetWidth())\r\n rect.SetHeight(bmp.GetHeight())\r\n \r\n else:\r\n \r\n rect = wx.Rect(in_rect.x + in_rect.width - bmp.GetWidth(),\r\n ((in_rect.y + in_rect.height)/2) - (bmp.GetHeight()/2),\r\n bmp.GetWidth(), bmp.GetHeight())\r\n\r\n self.DrawButtons(dc, rect, bmp, wx.WHITE, button_state)\r\n\r\n out_rect = wx.Rect(*rect)\r\n return out_rect", "def draw_button(self):\n # Draw the button's outline\n pg.draw.rect(self.screen, self.text_color, pg.Rect(self.rect.left - 1, self.rect.top - 1, self.rect.width + 2, self.rect.height + 2))\n\n # Draw the button\n pg.draw.rect(self.screen, self.button_color, self.rect)\n\n # Blit the button's text onto it\n self.screen.blit(self.txt_surface, self.txt_surface_rect)", "def paintButtons(self):\n\n imgPath = GG.genteguada.GenteGuada.getInstance().getDataPath(BUTTON_OK)\n buttonOK = guiobjects.OcempImageButtonTransparent(imgPath, self.buttonTooltips[\"ok\"], self.showTooltip, self.removeTooltip)\n buttonOK.topleft = [770, 30]\n buttonOK.connect_signal(ocempgui.widgets.Constants.SIG_CLICKED, self.changeConfiguration)\n self.window.add_child(buttonOK)\n\n imgPath = GG.genteguada.GenteGuada.getInstance().getDataPath(BUTTON_CANCEL)\n buttonCancel = guiobjects.OcempImageButtonTransparent(imgPath, self.buttonTooltips[\"cancel\"], self.showTooltip, self.removeTooltip)\n buttonCancel.topleft = [890, 30]\n buttonCancel.connect_signal(ocempgui.widgets.Constants.SIG_CLICKED, self.closeConfiguration)\n self.window.add_child(buttonCancel)", "def tab_only_rect(self):\n rect = self.rect()\n lB, rB = [c for c in self.children() if isinstance(c, QtWidgets.QToolButton)]\n side_button_width = lB.width()+rB.width()+15\n rect.adjust(0,0, -side_button_width, 0)\n return rect", "def IndentPressedBitmap(rect, button_state):\r\n\r\n if button_state == AUI_BUTTON_STATE_PRESSED:\r\n rect.x += 1\r\n rect.y += 1\r\n\r\n return rect", "def create_quit_button(self):\n quit_button = Button(self.littleFrame, text=\"Quitter\", font=(\"Arial\", 25), bg='white', relief='groove',\n fg='light blue',\n command=self.leave_page, width=8, activebackground='red',\n activeforeground='black')\n quit_button.grid(column=2, row=0)", "def get_amount_rect(self):\n return pygame.Rect(self.amount.x, self.amount.y, self.amount.width, self.button_data.height)", "def DrawButton(self, dc, wnd, in_rect, button, orientation):\r\n\r\n bitmap_id, button_state = button.id, button.cur_state\r\n \r\n if bitmap_id == AUI_BUTTON_CLOSE:\r\n if button_state & AUI_BUTTON_STATE_DISABLED:\r\n bmp = self._disabled_close_bmp\r\n elif button_state & AUI_BUTTON_STATE_HOVER:\r\n bmp = self._hover_close_bmp\r\n elif button_state & AUI_BUTTON_STATE_PRESSED:\r\n bmp = self._pressed_close_bmp\r\n else:\r\n bmp = self._active_close_bmp\r\n\r\n elif bitmap_id == AUI_BUTTON_LEFT:\r\n if button_state & AUI_BUTTON_STATE_DISABLED:\r\n bmp = self._disabled_left_bmp\r\n else:\r\n bmp = self._active_left_bmp\r\n\r\n elif bitmap_id == AUI_BUTTON_RIGHT:\r\n if button_state & AUI_BUTTON_STATE_DISABLED:\r\n bmp = self._disabled_right_bmp\r\n else:\r\n bmp = self._active_right_bmp\r\n\r\n elif bitmap_id == AUI_BUTTON_WINDOWLIST:\r\n if button_state & AUI_BUTTON_STATE_DISABLED:\r\n bmp = self._disabled_windowlist_bmp\r\n else:\r\n bmp = self._active_windowlist_bmp\r\n\r\n else:\r\n if button_state & AUI_BUTTON_STATE_DISABLED:\r\n bmp = button.dis_bitmap\r\n else:\r\n bmp = button.bitmap\r\n \r\n if not bmp.IsOk():\r\n return\r\n\r\n rect = wx.Rect(*in_rect)\r\n\r\n if orientation == wx.LEFT:\r\n \r\n rect.SetX(in_rect.x)\r\n rect.SetY(((in_rect.y + in_rect.height)/2) - (bmp.GetHeight()/2))\r\n rect.SetWidth(bmp.GetWidth())\r\n rect.SetHeight(bmp.GetHeight())\r\n \r\n else:\r\n \r\n rect = wx.Rect(in_rect.x + in_rect.width - bmp.GetWidth(),\r\n ((in_rect.y + in_rect.height)/2) - (bmp.GetHeight()/2),\r\n bmp.GetWidth(), bmp.GetHeight())\r\n \r\n rect = IndentPressedBitmap(rect, button_state)\r\n dc.DrawBitmap(bmp, rect.x, rect.y, True)\r\n\r\n out_rect = rect\r\n\r\n if bitmap_id == AUI_BUTTON_RIGHT:\r\n self._buttonRect = wx.Rect(rect.x, rect.y, 30, rect.height)\r\n \r\n return out_rect", "def create_buttonbox(self, master_fr):\n \n ttk.Button(\n master_fr,\n text=\"Cancel\",\n command=self.dlg_pbCancel\n ).pack(side=tk.RIGHT)\n \n ttk.Button(\n master_fr,\n text=\"OK\",\n command=self.dlg_pbOK,\n default=tk.ACTIVE\n ).pack(side=tk.RIGHT)\n \n # Do Cancel if closed\n self.tkWindow.protocol(\"WM_DELETE_WINDOW\", self.dlg_pbCancel)", "def button(self):\r\n t = turtle.Turtle()\r\n t.hideturtle()\r\n t.speed(20)\r\n t.penup()\r\n t.color(\"black\")\r\n # Draws one of the squares behind the \"scoreboard\"\r\n t.goto(70, 41)\r\n t.pendown()\r\n t.begin_fill()\r\n for i in range(4):\r\n t.forward(100)\r\n t.left(90)\r\n t.end_fill()\r\n t.penup()\r\n t.goto(70, 139)\r\n # Draws one of the squares over a button up arrow\r\n t.color(\"#20b2aa\") # Turns the color to teal\r\n t.pendown()\r\n t.begin_fill()\r\n for y in range(4):\r\n t.forward(100)\r\n t.left(90)\r\n t.end_fill()\r\n t.penup()\r\n\r\n t.goto(190, 40)\r\n # Draws another one of the square around the enter button\r\n t.color(\"#20b2aa\") # Turns the color to teal\r\n t.pendown()\r\n t.begin_fill()\r\n for y in range(4):\r\n t.forward(100)\r\n t.left(90)\r\n t.end_fill()\r\n\r\n t.penup()\r\n t.goto(70, -59)\r\n t.color(\"#20b2aa\") # Turns the color to teal\r\n t.pendown()\r\n # Draws the box around the down button\r\n t.begin_fill()\r\n for y in range(4):\r\n t.forward(100)\r\n t.left(90)\r\n t.end_fill()\r\n # Draws the up arrow of the button\r\n t.penup()\r\n t.goto(70,143)\r\n t.pendown()\r\n t.color(\"#8b8378\") # Turns the color a light grey\r\n t.begin_fill()\r\n for y in range(3):\r\n t.pendown()\r\n t.forward(100)\r\n t.left(120)\r\n t.end_fill()\r\n # Draws the down arrow of the button\r\n t.penup()\r\n t.goto(70, 40)\r\n t.pendown()\r\n t.begin_fill()\r\n for y in range(3):\r\n t.forward(100)\r\n t.right(120)\r\n t.end_fill()\r\n # Draws scoreboard\r\n t.penup()\r\n t.goto(75, 136)\r\n t.color(\"white\")\r\n t.pendown()\r\n t.begin_fill()\r\n for y in range(4):\r\n t.forward(90)\r\n t.right(90)\r\n t.end_fill()\r\n t.color(\"black\")\r\n t.penup()\r\n t.goto(90,35)\r\n t.pendown()\r\n t.write(\"1\", font=(\"Arial\", 75, \"normal\") )\r\n t.color(\"#8b8378\") # Turns the color a light grey\r\n t.penup()\r\n # Draws the circle for the enter button and writes \"Enter\" on the button\r\n t.goto(240,50)\r\n t.begin_fill()\r\n t.circle(40)\r\n t.end_fill()\r\n t.penup()\r\n t.color(\"white\")\r\n t.goto(210,75)\r\n t.write(\"Enter\", font= (\"Arial\", 20, \"normal\"))\r\n t.color(\"white\")\r\n # Writes \"The Game of Nim\" at the bottom of the screen\r\n t.penup()\r\n t.goto(30, -140)\r\n t.pendown()\r\n t.write(\"The Game \", font=(\"Arial\", 40, \"normal\"))\r\n t.penup()\r\n t.goto(110, -185)\r\n t.write(\"of\", font = (\"Arial\", 40, \"normal\"))\r\n t.goto(70, -245)\r\n t.write(\"Nim\", font = (\"Arial\", 50, \"normal\"))", "def okButton(self):\n return self.__okButton", "def okButton(self):\n return self.__okButton", "def okButton(self):\n return self.__okButton", "def drawing_area_button_release_event(self, widget, event):\n if self.drawing_area_mode == \"GENERATE_CROP\":\n if event.button == 1 and self.pixbuf != None:\n self.button_pressed = False\n \n #current ending coordinates\n self.x_end = int(event.x)\n self.y_end = int(event.y)\n \n #draw the box\n self.draw_box(widget, self.x_begin, self.y_begin, \\\n self.x_end, self.y_end)\n \n if self.drawing_area_mode == \"GENERATE_CROP_TRANSITION\":\n self.drawing_area_mode = \"GENERATE_CROP\"", "def DrawButton(self, dc, wnd, item, rect):\r\n\r\n bmp_rect, text_rect = self.GetToolsPosition(dc, item, rect)\r\n \r\n if not item.GetState() & AUI_BUTTON_STATE_DISABLED:\r\n \r\n if item.GetState() & AUI_BUTTON_STATE_PRESSED:\r\n \r\n dc.SetPen(wx.Pen(self._highlight_colour))\r\n dc.SetBrush(wx.Brush(StepColour(self._highlight_colour, 150)))\r\n dc.DrawRectangleRect(rect)\r\n \r\n elif item.GetState() & AUI_BUTTON_STATE_HOVER or item.IsSticky():\r\n \r\n dc.SetPen(wx.Pen(self._highlight_colour))\r\n dc.SetBrush(wx.Brush(StepColour(self._highlight_colour, 170)))\r\n\r\n # draw an even lighter background for checked item hovers (since\r\n # the hover background is the same colour as the check background)\r\n if item.GetState() & AUI_BUTTON_STATE_CHECKED:\r\n dc.SetBrush(wx.Brush(StepColour(self._highlight_colour, 180)))\r\n\r\n dc.DrawRectangleRect(rect)\r\n \r\n elif item.GetState() & AUI_BUTTON_STATE_CHECKED:\r\n \r\n # it's important to put this code in an else statment after the\r\n # hover, otherwise hovers won't draw properly for checked items\r\n dc.SetPen(wx.Pen(self._highlight_colour))\r\n dc.SetBrush(wx.Brush(StepColour(self._highlight_colour, 170)))\r\n dc.DrawRectangleRect(rect)\r\n \r\n if item.GetState() & AUI_BUTTON_STATE_DISABLED:\r\n bmp = item.GetDisabledBitmap()\r\n else:\r\n bmp = item.GetBitmap()\r\n\r\n if bmp.IsOk():\r\n dc.DrawBitmap(bmp, bmp_rect.x, bmp_rect.y, True)\r\n\r\n # set the item's text colour based on if it is disabled\r\n dc.SetTextForeground(wx.BLACK)\r\n if item.GetState() & AUI_BUTTON_STATE_DISABLED:\r\n dc.SetTextForeground(DISABLED_TEXT_COLOUR)\r\n\r\n if self._agwFlags & AUI_TB_TEXT and item.GetLabel() != \"\":\r\n self.DrawLabel(dc, wnd, item, text_rect)", "def get_buttons():\r\n return start_buttons, end_buttons", "def create_quit_button(self):\n quit_button = Button(self.littleFrame, text=\"Quitter\", font=(\"Arial\", 25), bg='white', relief='groove',\n fg='lightblue',\n command=self.leave_page, width=8, activebackground='white',\n activeforeground='lightblue')\n quit_button.grid(column=2, row=0)", "def draw_button(self):\n self.screen.fill(self.button_color, self.rect)\n self.screen.blit(self.msg_img, self.msg_img_rect)", "def draw_button(self):\r\n self.surface.fill(self.button_color, self.rect)\r\n self.surface.blit(self.msg_image, self.msg_image_rect)", "def draw_button(self, button, relative_x, relative_y, menu_width, menu_height, is_selected):\n \n # adapted from http://arcade.academy/examples/gui_text_button.html#gui-text-button\n screen_button_center_x = (SCREEN_WIDTH - button.center_x - relative_x)\n screen_button_center_y = menu_height + (SCREEN_HEIGHT - button.center_y - relative_y)\n\n arcade.draw_rectangle_filled(\n screen_button_center_x, screen_button_center_y,\n\n button.width, button.height,\n COLOUR_MAP[button.face_color]\n )\n\n if is_selected:\n selected_x = screen_button_center_x - (button.width // 2) - 25\n selector_height = 10\n selector_width = 16\n arcade.draw_triangle_filled(\n selected_x, screen_button_center_y - selector_height,\n selected_x, screen_button_center_y + selector_height,\n selected_x + selector_width, screen_button_center_y,\n COLOUR_MAP[Colour.YELLOW.value]\n )\n\n if not button.pressed:\n color = COLOUR_MAP[button.shadow_color]\n else:\n color = COLOUR_MAP[button.highlight_color]\n\n # Bottom horizontal\n arcade.draw_line(screen_button_center_x - button.width / 2, screen_button_center_y - button.height / 2,\n screen_button_center_x + button.width / 2, screen_button_center_y - button.height / 2,\n color, button.button_height)\n\n # Right vertical\n arcade.draw_line(screen_button_center_x + button.width / 2, screen_button_center_y - button.height / 2,\n screen_button_center_x + button.width / 2, screen_button_center_y + button.height / 2,\n color, button.button_height)\n\n if not button.pressed:\n color = COLOUR_MAP[button.highlight_color]\n else:\n color = COLOUR_MAP[button.shadow_color]\n\n # Top horizontal\n arcade.draw_line(screen_button_center_x - button.width / 2, screen_button_center_y + button.height / 2,\n screen_button_center_x + button.width / 2, screen_button_center_y + button.height / 2,\n color, button.button_height)\n\n # Left vertical\n arcade.draw_line(screen_button_center_x - button.width / 2, screen_button_center_y - button.height / 2,\n screen_button_center_x - button.width / 2, screen_button_center_y + button.height / 2,\n color, button.button_height)\n\n x = screen_button_center_x\n y = screen_button_center_y\n if not button.pressed:\n x -= button.button_height\n y += button.button_height\n\n arcade.draw_text(button.text, x, y,\n arcade.color.BLACK, font_size=button.font_size,\n width=button.width, align=\"center\",\n anchor_x=\"center\", anchor_y=\"center\")", "def draw(self):\n if not self.pressed:\n #draw dialogue prompt\n arcade.draw_rectangle_filled(self.center_x, self.center_y, 20, 20, arcade.color.ALABAMA_CRIMSON)\n arcade.draw_text(\"!\", self.center_x, self.center_y, arcade.color.BLACK, anchor_x=\"center\", anchor_y=\"center\")\n else:\n #draw dialogue box\n arcade.draw_rectangle_filled(self.center_x, self.center_y, self.width, self.height, self.color)\n arcade.draw_text(self.text, self.center_x, self.center_y, arcade.color.BLACK, anchor_x=\"center\", anchor_y=\"center\")", "def DrawPaneButton(self, dc, window, button, button_state, _rect, pane): \r\n \r\n if not pane:\r\n return\r\n \r\n if button == AUI_BUTTON_CLOSE:\r\n if pane.state & optionActive:\r\n bmp = self._active_close_bitmap\r\n else:\r\n bmp = self._inactive_close_bitmap\r\n\r\n elif button == AUI_BUTTON_PIN:\r\n if pane.state & optionActive:\r\n bmp = self._active_pin_bitmap\r\n else:\r\n bmp = self._inactive_pin_bitmap\r\n\r\n elif button == AUI_BUTTON_MAXIMIZE_RESTORE:\r\n if pane.IsMaximized():\r\n if pane.state & optionActive:\r\n bmp = self._active_restore_bitmap\r\n else:\r\n bmp = self._inactive_restore_bitmap\r\n else:\r\n if pane.state & optionActive:\r\n bmp = self._active_maximize_bitmap\r\n else:\r\n bmp = self._inactive_maximize_bitmap\r\n\r\n elif button == AUI_BUTTON_MINIMIZE:\r\n if pane.state & optionActive:\r\n bmp = self._active_minimize_bitmap\r\n else:\r\n bmp = self._inactive_minimize_bitmap\r\n\r\n isVertical = pane.HasCaptionLeft()\r\n \r\n rect = wx.Rect(*_rect)\r\n\r\n if isVertical:\r\n old_x = rect.x\r\n rect.x = rect.x + (rect.width/2) - (bmp.GetWidth()/2)\r\n rect.width = old_x + rect.width - rect.x - 1\r\n else:\r\n old_y = rect.y\r\n rect.y = rect.y + (rect.height/2) - (bmp.GetHeight()/2)\r\n rect.height = old_y + rect.height - rect.y - 1\r\n\r\n if button_state == AUI_BUTTON_STATE_PRESSED:\r\n rect.x += 1\r\n rect.y += 1\r\n\r\n if button_state in [AUI_BUTTON_STATE_HOVER, AUI_BUTTON_STATE_PRESSED]:\r\n\r\n if pane.state & optionActive:\r\n\r\n dc.SetBrush(wx.Brush(StepColour(self._active_caption_colour, 120)))\r\n dc.SetPen(wx.Pen(StepColour(self._active_caption_colour, 70)))\r\n\r\n else:\r\n\r\n dc.SetBrush(wx.Brush(StepColour(self._inactive_caption_colour, 120)))\r\n dc.SetPen(wx.Pen(StepColour(self._inactive_caption_colour, 70)))\r\n\r\n if wx.Platform != \"__WXMAC__\":\r\n # draw the background behind the button\r\n dc.DrawRectangle(rect.x, rect.y, 15, 15)\r\n else:\r\n # Darker the bitmap a bit\r\n bmp = DarkenBitmap(bmp, self._active_caption_colour, StepColour(self._active_caption_colour, 110))\r\n\r\n if isVertical:\r\n bmp = wx.ImageFromBitmap(bmp).Rotate90(clockwise=False).ConvertToBitmap()\r\n \r\n # draw the button itself\r\n dc.DrawBitmap(bmp, rect.x, rect.y, True)", "def define_button(self):\n self.separator1 = pygame.Rect(\n 0,\n SCREEN_WIDTH,\n SCREEN_WIDTH,\n BIG_LINE_WIDTH,\n )\n self.separator2 = pygame.Rect(\n 0,\n SCREEN_WIDTH + BIG_LINE_WIDTH // 2,\n SCREEN_WIDTH,\n BIG_LINE_WIDTH,\n )\n\n self.button = pygame.Rect(\n SCREEN_WIDTH // 2 - BUTTON_WIDTH // 2,\n (SCREEN_HEIGHT + SCREEN_WIDTH) // 2 - BUTTON_HEIGHT // 2,\n BUTTON_WIDTH,\n BUTTON_HEIGHT,\n )" ]
[ "0.61522835", "0.5975798", "0.59413844", "0.5822093", "0.57887036", "0.5761346", "0.5751224", "0.5625157", "0.560466", "0.55835086", "0.55598813", "0.55518174", "0.55303395", "0.5497481", "0.549238", "0.5456099", "0.5433463", "0.5418277", "0.5418277", "0.5418277", "0.5418144", "0.53994155", "0.539576", "0.5391317", "0.5336784", "0.5317923", "0.52966434", "0.527505", "0.526837", "0.52554667" ]
0.81077373
0
Return rectangle representing the Pay button
def get_pay_rect(self): return pygame.Rect(self.button_data.pay_x, self.button_data.y, self.button_data.large_width, self.button_data.height)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_amount_rect(self):\n return pygame.Rect(self.amount.x, self.amount.y, self.amount.width, self.button_data.height)", "def _get_button_region(self):\n # XXXX Only rectangulars for now\n if not self._sensitive:\n return None\n if not self._insidetemporal():\n return None\n rgn = Qd.NewRgn()\n if self._shape == 'rect':\n x0, y0 = self._convert_point(self._coordinates[0:2])\n x1, y1 = self._convert_point(self._coordinates[2:4])\n box = x0, y0, x1, y1\n Qd.RectRgn(rgn, box)\n elif self._shape == 'poly':\n Qd.OpenRgn()\n xl, yl = self._convert_point(self._coordinates[-2:])\n Qd.MoveTo(xl, yl)\n for i in range(0, len(self._coordinates), 2):\n x, y = self._convert_point(self._coordinates[i:i+2])\n Qd.LineTo(x, y)\n Qd.CloseRgn(rgn)\n elif self._shape == 'circle':\n print 'Circle not supported yet'\n elif self._shape == 'ellipse':\n # Note: rx/ry are width/height, not points\n x, y, rx, ry = self._dispobj._window._convert_coordinates(self._coordinates)\n Qd.OpenRgn()\n Qd.FrameOval((x-rx, y-ry, x+rx, y+ry))\n Qd.CloseRgn(rgn)\n else:\n print 'Invalid shape type', self._shape\n return rgn", "def DrawButton(self, dc, wnd, in_rect, button, orientation):\r\n\r\n bitmap_id, button_state = button.id, button.cur_state\r\n \r\n if bitmap_id == AUI_BUTTON_CLOSE:\r\n if button_state & AUI_BUTTON_STATE_DISABLED:\r\n bmp = self._disabled_close_bmp\r\n else:\r\n bmp = self._active_close_bmp\r\n\r\n elif bitmap_id == AUI_BUTTON_LEFT:\r\n if button_state & AUI_BUTTON_STATE_DISABLED:\r\n bmp = self._disabled_left_bmp\r\n else:\r\n bmp = self._active_left_bmp\r\n\r\n elif bitmap_id == AUI_BUTTON_RIGHT:\r\n if button_state & AUI_BUTTON_STATE_DISABLED:\r\n bmp = self._disabled_right_bmp\r\n else:\r\n bmp = self._active_right_bmp\r\n\r\n elif bitmap_id == AUI_BUTTON_WINDOWLIST:\r\n if button_state & AUI_BUTTON_STATE_DISABLED:\r\n bmp = self._disabled_windowlist_bmp\r\n else:\r\n bmp = self._active_windowlist_bmp\r\n\r\n else:\r\n if button_state & AUI_BUTTON_STATE_DISABLED:\r\n bmp = button.dis_bitmap\r\n else:\r\n bmp = button.bitmap\r\n \r\n if not bmp.IsOk():\r\n return\r\n\r\n rect = wx.Rect(*in_rect)\r\n\r\n if orientation == wx.LEFT:\r\n \r\n rect.SetX(in_rect.x)\r\n rect.SetY(((in_rect.y + in_rect.height)/2) - (bmp.GetHeight()/2))\r\n rect.SetWidth(bmp.GetWidth())\r\n rect.SetHeight(bmp.GetHeight())\r\n \r\n else:\r\n \r\n rect = wx.Rect(in_rect.x + in_rect.width - bmp.GetWidth(),\r\n ((in_rect.y + in_rect.height)/2) - (bmp.GetHeight()/2),\r\n bmp.GetWidth(), bmp.GetHeight())\r\n\r\n self.DrawButtons(dc, rect, bmp, wx.WHITE, button_state)\r\n\r\n out_rect = wx.Rect(*rect)\r\n return out_rect", "def get_price_rect(self, index):\n y_position = self.product_entries.top_y + (self.product_entries.row_h * index)\n return pygame.Rect(self.product_entries.price_x, y_position, self.product_entries.price_w, self.button_data.height)", "def DrawButton(self, dc, wnd, in_rect, button, orientation):\r\n\r\n bitmap_id, button_state = button.id, button.cur_state\r\n \r\n if bitmap_id == AUI_BUTTON_CLOSE:\r\n if button_state & AUI_BUTTON_STATE_DISABLED:\r\n bmp = self._disabled_close_bmp\r\n elif button_state & AUI_BUTTON_STATE_HOVER:\r\n bmp = self._hover_close_bmp\r\n elif button_state & AUI_BUTTON_STATE_PRESSED:\r\n bmp = self._pressed_close_bmp\r\n else:\r\n bmp = self._active_close_bmp\r\n\r\n elif bitmap_id == AUI_BUTTON_LEFT:\r\n if button_state & AUI_BUTTON_STATE_DISABLED:\r\n bmp = self._disabled_left_bmp\r\n else:\r\n bmp = self._active_left_bmp\r\n\r\n elif bitmap_id == AUI_BUTTON_RIGHT:\r\n if button_state & AUI_BUTTON_STATE_DISABLED:\r\n bmp = self._disabled_right_bmp\r\n else:\r\n bmp = self._active_right_bmp\r\n\r\n elif bitmap_id == AUI_BUTTON_WINDOWLIST:\r\n if button_state & AUI_BUTTON_STATE_DISABLED:\r\n bmp = self._disabled_windowlist_bmp\r\n else:\r\n bmp = self._active_windowlist_bmp\r\n\r\n else:\r\n if button_state & AUI_BUTTON_STATE_DISABLED:\r\n bmp = button.dis_bitmap\r\n else:\r\n bmp = button.bitmap\r\n \r\n if not bmp.IsOk():\r\n return\r\n\r\n rect = wx.Rect(*in_rect)\r\n\r\n if orientation == wx.LEFT:\r\n \r\n rect.SetX(in_rect.x)\r\n rect.SetY(((in_rect.y + in_rect.height)/2) - (bmp.GetHeight()/2))\r\n rect.SetWidth(bmp.GetWidth())\r\n rect.SetHeight(bmp.GetHeight())\r\n \r\n else:\r\n \r\n rect = wx.Rect(in_rect.x + in_rect.width - bmp.GetWidth(),\r\n ((in_rect.y + in_rect.height)/2) - (bmp.GetHeight()/2),\r\n bmp.GetWidth(), bmp.GetHeight())\r\n \r\n rect = IndentPressedBitmap(rect, button_state)\r\n dc.DrawBitmap(bmp, rect.x, rect.y, True)\r\n\r\n out_rect = rect\r\n\r\n if bitmap_id == AUI_BUTTON_RIGHT:\r\n self._buttonRect = wx.Rect(rect.x, rect.y, 30, rect.height)\r\n \r\n return out_rect", "def draw_but(self, window):\n # draws the rectangular button\n p1 = graphics.Point(self.cen_point_x - self.width / 2, \n self.cen_point_y - self.height / 2)\n p2 = graphics.Point(self.cen_point_x + self.width / 2, \n self.cen_point_y + self.height / 2)\n self.button = graphics.Rectangle(p1, p2)\n self.button.setOutline(\"Orange\")\n self.button.draw(window)\n \n # draws the text on the button\n self.text.draw(window)", "def get_cancel_rect(self):\n return pygame.Rect(self.button_data.cancel_x, self.button_data.y, self.button_data.large_width, self.button_data.height)", "def get_done_rect(self):\n return pygame.Rect(self.button_data.done_x, self.button_data.y, self.button_data.large_width, self.button_data.height)", "def rect(self):\n return Rect(self.x, 0, PipePair.WIDTH, PipePair.PIECE_HEIGHT)", "def drawRectangle(x, y, width, height):\n pen1.up()\n pen1.goto(x, y)\n pen1.down()\n pen1.fd(width)\n pen1.right(90)\n pen1.fd(height)\n pen1.right(90)\n pen1.fd(width)\n pen1.right(90)\n pen1.fd(height)", "def define_button(self):\n self.separator1 = pygame.Rect(\n 0,\n SCREEN_WIDTH,\n SCREEN_WIDTH,\n BIG_LINE_WIDTH,\n )\n self.separator2 = pygame.Rect(\n 0,\n SCREEN_WIDTH + BIG_LINE_WIDTH // 2,\n SCREEN_WIDTH,\n BIG_LINE_WIDTH,\n )\n\n self.button = pygame.Rect(\n SCREEN_WIDTH // 2 - BUTTON_WIDTH // 2,\n (SCREEN_HEIGHT + SCREEN_WIDTH) // 2 - BUTTON_HEIGHT // 2,\n BUTTON_WIDTH,\n BUTTON_HEIGHT,\n )", "def draw_button(self):\n # Draw the button's outline\n pg.draw.rect(self.screen, self.text_color, pg.Rect(self.rect.left - 1, self.rect.top - 1, self.rect.width + 2, self.rect.height + 2))\n\n # Draw the button\n pg.draw.rect(self.screen, self.button_color, self.rect)\n\n # Blit the button's text onto it\n self.screen.blit(self.txt_surface, self.txt_surface_rect)", "def IndentPressedBitmap(rect, button_state):\r\n\r\n if button_state == AUI_BUTTON_STATE_PRESSED:\r\n rect.x += 1\r\n rect.y += 1\r\n\r\n return rect", "def paint_rectangle_layer(working_canvas, name, activation,\n size, scaled_size, x_coordinate, config_window, canvas_colour_config):\n\n y_middle = 350\n y_start = y_middle - (scaled_size / 2)\n y_end = y_middle + (scaled_size / 2)\n Layer_rectangle = working_canvas.create_rectangle(x_coordinate, y_start, x_coordinate + 40, y_end,\n **canvas_colour_config[\"rectangle_layer\"])\n\n working_canvas.tag_bind(Layer_rectangle, \"<Button-1>\", config_window)\n working_canvas.move(Layer_rectangle, 0, 0)\n\n # Labels\n working_canvas.create_text(x_coordinate + 20, y_start - 25, **canvas_colour_config[\"text\"],\n text=\"{\" + name)\n working_canvas.create_text(x_coordinate + 20, y_start - 10, **canvas_colour_config[\"text\"],\n text=\" size: \" + str(size) + \"}\")\n # Arrows\n working_canvas.create_text(x_coordinate+72, y_middle-10, text=activation,\n **canvas_colour_config[\"text\"])\n working_canvas.create_line(x_coordinate+50, y_middle, x_coordinate+100, y_middle, arrow=LAST,\n **canvas_colour_config[\"text\"])\n\n return Layer_rectangle", "def getRect(self):\n return self.rect()", "def curves(self):\n return Rectangle(quantify=8,\n width=self.heights[child.index],\n length=self.width_car,\n position=translate(self.position,\n \"z\",\n -self.positions[0][child.index]),\n centered=False)", "def tab_only_rect(self):\n rect = self.rect()\n lB, rB = [c for c in self.children() if isinstance(c, QtWidgets.QToolButton)]\n side_button_width = lB.width()+rB.width()+15\n rect.adjust(0,0, -side_button_width, 0)\n return rect", "def DrawPaneButton(self, dc, window, button, button_state, _rect, pane): \r\n \r\n if not pane:\r\n return\r\n \r\n if button == AUI_BUTTON_CLOSE:\r\n if pane.state & optionActive:\r\n bmp = self._active_close_bitmap\r\n else:\r\n bmp = self._inactive_close_bitmap\r\n\r\n elif button == AUI_BUTTON_PIN:\r\n if pane.state & optionActive:\r\n bmp = self._active_pin_bitmap\r\n else:\r\n bmp = self._inactive_pin_bitmap\r\n\r\n elif button == AUI_BUTTON_MAXIMIZE_RESTORE:\r\n if pane.IsMaximized():\r\n if pane.state & optionActive:\r\n bmp = self._active_restore_bitmap\r\n else:\r\n bmp = self._inactive_restore_bitmap\r\n else:\r\n if pane.state & optionActive:\r\n bmp = self._active_maximize_bitmap\r\n else:\r\n bmp = self._inactive_maximize_bitmap\r\n\r\n elif button == AUI_BUTTON_MINIMIZE:\r\n if pane.state & optionActive:\r\n bmp = self._active_minimize_bitmap\r\n else:\r\n bmp = self._inactive_minimize_bitmap\r\n\r\n isVertical = pane.HasCaptionLeft()\r\n \r\n rect = wx.Rect(*_rect)\r\n\r\n if isVertical:\r\n old_x = rect.x\r\n rect.x = rect.x + (rect.width/2) - (bmp.GetWidth()/2)\r\n rect.width = old_x + rect.width - rect.x - 1\r\n else:\r\n old_y = rect.y\r\n rect.y = rect.y + (rect.height/2) - (bmp.GetHeight()/2)\r\n rect.height = old_y + rect.height - rect.y - 1\r\n\r\n if button_state == AUI_BUTTON_STATE_PRESSED:\r\n rect.x += 1\r\n rect.y += 1\r\n\r\n if button_state in [AUI_BUTTON_STATE_HOVER, AUI_BUTTON_STATE_PRESSED]:\r\n\r\n if pane.state & optionActive:\r\n\r\n dc.SetBrush(wx.Brush(StepColour(self._active_caption_colour, 120)))\r\n dc.SetPen(wx.Pen(StepColour(self._active_caption_colour, 70)))\r\n\r\n else:\r\n\r\n dc.SetBrush(wx.Brush(StepColour(self._inactive_caption_colour, 120)))\r\n dc.SetPen(wx.Pen(StepColour(self._inactive_caption_colour, 70)))\r\n\r\n if wx.Platform != \"__WXMAC__\":\r\n # draw the background behind the button\r\n dc.DrawRectangle(rect.x, rect.y, 15, 15)\r\n else:\r\n # Darker the bitmap a bit\r\n bmp = DarkenBitmap(bmp, self._active_caption_colour, StepColour(self._active_caption_colour, 110))\r\n\r\n if isVertical:\r\n bmp = wx.ImageFromBitmap(bmp).Rotate90(clockwise=False).ConvertToBitmap()\r\n \r\n # draw the button itself\r\n dc.DrawBitmap(bmp, rect.x, rect.y, True)", "def showrect(context):\r\n #context[\"products\"] = BlogPostForm()\r\n data=[{'time':'2003','price':'6800'},\r\n {'time':'2004','price':'7800'},\r\n {'time':'2005','price':'8800'},\r\n {'time':'2006','price':'9800'},\r\n {'time':'2007','price':'10800'},\r\n {'time':'2008','price':'12600'}\r\n ]\r\n price=[]\r\n for item in data: \r\n price.append(int(item['price']))\r\n maxprice=max(price)\r\n svgheight=200\r\n svgwidth=378\r\n time_relative=15\r\n price_relative=-5\r\n rectwidth=40\r\n x_relative=18 \r\n k=float(svgheight*0.9)/float(maxprice)\r\n rect_x_position=29\r\n time_x_position=34\r\n price_x_position=31\r\n for item in data:\r\n item['rectheight']=k*float(item['price'])\r\n item['rect_y_position']=svgheight-item['rectheight']\r\n item['time_y_position']=item['rect_y_position']+time_relative\r\n item['price_y_position']=item['rect_y_position']+price_relative\r\n item['rect_x_position']=rect_x_position\r\n item['time_x_position']=time_x_position\r\n item['price_x_position']=price_x_position\r\n rect_x_position+=rectwidth+x_relative\r\n time_x_position+=rectwidth+x_relative\r\n price_x_position+=rectwidth+x_relative\r\n result={}\r\n result['svgwidth']=svgwidth\r\n result['svgheight']=svgheight\r\n result['rectwidth']=rectwidth\r\n result['data']=data\r\n return result", "def draw_button(self):\n self.screen.fill(self.button_color, self.rect)\n self.screen.blit(self.msg_img, self.msg_img_rect)", "def rectangle(self, clear_screen=True, x1=10, y1=10, x2=80, y2=40, fill_color='black', outline_color='black'):\n\n if clear_screen:\n self.clear()\n\n return self.draw.rectangle((x1, y1, x2, y2), fill=fill_color, outline=outline_color)", "def afficher_rectangle(R):\n rectangle = R\n buttom_left = rectangle[0]\n S = rectangle[1]\n #print(\"rectangle = \", rectangle, \"et S = \", S)\n dx = abs(buttom_left[0] - S[0])\n dy = abs(buttom_left[1] - S[1])\n return([buttom_left[0], buttom_left[1], dx, dy])", "def draw_button(self):\r\n self.surface.fill(self.button_color, self.rect)\r\n self.surface.blit(self.msg_image, self.msg_image_rect)", "def getRect(self): # real signature unknown; restored from __doc__\r\n pass", "def DrawButton(self, dc, wnd, item, rect):\r\n\r\n bmp_rect, text_rect = self.GetToolsPosition(dc, item, rect)\r\n \r\n if not item.GetState() & AUI_BUTTON_STATE_DISABLED:\r\n \r\n if item.GetState() & AUI_BUTTON_STATE_PRESSED:\r\n \r\n dc.SetPen(wx.Pen(self._highlight_colour))\r\n dc.SetBrush(wx.Brush(StepColour(self._highlight_colour, 150)))\r\n dc.DrawRectangleRect(rect)\r\n \r\n elif item.GetState() & AUI_BUTTON_STATE_HOVER or item.IsSticky():\r\n \r\n dc.SetPen(wx.Pen(self._highlight_colour))\r\n dc.SetBrush(wx.Brush(StepColour(self._highlight_colour, 170)))\r\n\r\n # draw an even lighter background for checked item hovers (since\r\n # the hover background is the same colour as the check background)\r\n if item.GetState() & AUI_BUTTON_STATE_CHECKED:\r\n dc.SetBrush(wx.Brush(StepColour(self._highlight_colour, 180)))\r\n\r\n dc.DrawRectangleRect(rect)\r\n \r\n elif item.GetState() & AUI_BUTTON_STATE_CHECKED:\r\n \r\n # it's important to put this code in an else statment after the\r\n # hover, otherwise hovers won't draw properly for checked items\r\n dc.SetPen(wx.Pen(self._highlight_colour))\r\n dc.SetBrush(wx.Brush(StepColour(self._highlight_colour, 170)))\r\n dc.DrawRectangleRect(rect)\r\n \r\n if item.GetState() & AUI_BUTTON_STATE_DISABLED:\r\n bmp = item.GetDisabledBitmap()\r\n else:\r\n bmp = item.GetBitmap()\r\n\r\n if bmp.IsOk():\r\n dc.DrawBitmap(bmp, bmp_rect.x, bmp_rect.y, True)\r\n\r\n # set the item's text colour based on if it is disabled\r\n dc.SetTextForeground(wx.BLACK)\r\n if item.GetState() & AUI_BUTTON_STATE_DISABLED:\r\n dc.SetTextForeground(DISABLED_TEXT_COLOUR)\r\n\r\n if self._agwFlags & AUI_TB_TEXT and item.GetLabel() != \"\":\r\n self.DrawLabel(dc, wnd, item, text_rect)", "def create_barrier() -> Rect:\n return pygame.Rect(int(WINDOW_WIDTH / 2 - BARRIER_WIDTH / 2), 0, BARRIER_WIDTH, WINDOW_HEIGHT)", "def getRect(self):\n return self.rect", "def DrawPaneButton(self, dc, window, button, button_state, rect, pane): \r\n\r\n if self.usingTheme:\r\n\r\n hTheme = self.hTheme1 \r\n \r\n # Get the real button position (compensating for borders)\r\n drect = wx.Rect(rect.x, rect.y, self._button_size, self._button_size)\r\n \r\n # Draw the themed close button\r\n rc = RECT(0, 0, 0, 0)\r\n if pane.HasCaptionLeft():\r\n rc.top = rect.x + self._button_border_size\r\n rc.left = int(rect.y + 1.5*self._button_border_size)\r\n rc.right = rect.x + self._button_size + self._button_border_size\r\n rc.bottom = int(rect.y + self._button_size + 1.5*self._button_border_size)\r\n else:\r\n rc.top = rect.x - self._button_border_size\r\n rc.left = int(rect.y + 1.5*self._button_border_size)\r\n rc.right = rect.x + self._button_size- self._button_border_size\r\n rc.bottom = int(rect.y + self._button_size + 1.5*self._button_border_size)\r\n\r\n if button == AUI_BUTTON_CLOSE:\r\n btntype = 19\r\n \r\n elif button == AUI_BUTTON_PIN:\r\n btntype = 23\r\n\r\n elif button == AUI_BUTTON_MAXIMIZE_RESTORE:\r\n if not pane.IsMaximized():\r\n btntype = 17\r\n else:\r\n btntype = 21\r\n else:\r\n btntype = 15\r\n\r\n state = 4 # CBS_DISABLED\r\n \r\n if pane.state & optionActive:\r\n\r\n if button_state == AUI_BUTTON_STATE_NORMAL:\r\n state = 1 # CBS_NORMAL\r\n\r\n elif button_state == AUI_BUTTON_STATE_HOVER:\r\n state = 2 # CBS_HOT\r\n\r\n elif button_state == AUI_BUTTON_STATE_PRESSED:\r\n state = 3 # CBS_PUSHED\r\n\r\n else:\r\n raise Exception(\"ERROR: Unknown State.\")\r\n\r\n else: # inactive pane\r\n\r\n if button_state == AUI_BUTTON_STATE_NORMAL:\r\n state = 5 # CBS_NORMAL\r\n\r\n elif button_state == AUI_BUTTON_STATE_HOVER:\r\n state = 6 # CBS_HOT\r\n\r\n elif button_state == AUI_BUTTON_STATE_PRESSED:\r\n state = 7 # CBS_PUSHED\r\n\r\n else:\r\n raise Exception(\"ERROR: Unknown State.\")\r\n\r\n try:\r\n winxptheme.DrawThemeBackground(hTheme, dc.GetHDC(), btntype, state, (rc.top, rc.left, rc.right, rc.bottom), None)\r\n except TypeError:\r\n return\r\n\r\n else:\r\n\r\n # Fallback to default closebutton if themes are not enabled\r\n rect2 = wx.Rect(rect.x-4, rect.y+2, rect.width, rect.height)\r\n AuiDefaultDockArt.DrawPaneButton(self, dc, window, button, button_state, rect2, pane)", "def drawing_area_button_release_event(self, widget, event):\n if self.drawing_area_mode == \"GENERATE_CROP\":\n if event.button == 1 and self.pixbuf != None:\n self.button_pressed = False\n \n #current ending coordinates\n self.x_end = int(event.x)\n self.y_end = int(event.y)\n \n #draw the box\n self.draw_box(widget, self.x_begin, self.y_begin, \\\n self.x_end, self.y_end)\n \n if self.drawing_area_mode == \"GENERATE_CROP_TRANSITION\":\n self.drawing_area_mode = \"GENERATE_CROP\"", "def rounded_box(self, bt, br, bb, bl):\n tlrx, tlry = self.border_top_left_radius\n trrx, trry = self.border_top_right_radius\n brrx, brry = self.border_bottom_right_radius\n blrx, blry = self.border_bottom_left_radius\n\n tlrx = max(0, tlrx - bl)\n tlry = max(0, tlry - bt)\n trrx = max(0, trrx - br)\n trry = max(0, trry - bt)\n brrx = max(0, brrx - br)\n brry = max(0, brry - bb)\n blrx = max(0, blrx - bl)\n blry = max(0, blry - bb)\n\n x = self.border_box_x() + bl\n y = self.border_box_y() + bt\n width = self.border_width() - bl - br\n height = self.border_height() - bt - bb\n\n # Fix overlapping curves\n # See https://www.w3.org/TR/css-backgrounds-3/#corner-overlap\n ratio = min([1] + [\n extent / sum_radii\n for extent, sum_radii in (\n (width, tlrx + trrx),\n (width, blrx + brrx),\n (height, tlry + blry),\n (height, trry + brry),\n )\n if sum_radii > 0\n ])\n return (\n x, y, width, height,\n (tlrx * ratio, tlry * ratio),\n (trrx * ratio, trry * ratio),\n (brrx * ratio, brry * ratio),\n (blrx * ratio, blry * ratio))" ]
[ "0.703257", "0.6708636", "0.6072446", "0.6055187", "0.5909128", "0.58895695", "0.5883365", "0.5796211", "0.57874066", "0.5740925", "0.568164", "0.56528354", "0.56363374", "0.5625496", "0.5503014", "0.54720646", "0.54671466", "0.5442511", "0.54301876", "0.54194665", "0.5407715", "0.5399497", "0.53990436", "0.53705513", "0.5356852", "0.5346434", "0.5331941", "0.53230166", "0.5319035", "0.5305431" ]
0.83420265
0
Return rectangle representing the Cancel button
def get_cancel_rect(self): return pygame.Rect(self.button_data.cancel_x, self.button_data.y, self.button_data.large_width, self.button_data.height)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cancelButton(self):\n return self.__cancelButton", "def cancelButton(self):\n return self.__cancelButton", "def cancelButton(self):\n return self.__cancelButton", "def get_done_rect(self):\n return pygame.Rect(self.button_data.done_x, self.button_data.y, self.button_data.large_width, self.button_data.height)", "def cancelButton(self):\n \n self.answer=\"cancel\"\n self.top.destroy()", "def click_cancel(self):\n self.click_element(self.cancel_button_selector)", "def cancelButton(self):\n \n self.answer=-1\n self.top.destroy()", "def _cancel(self, __button):\r\n\r\n self.destroy()", "def get_pay_rect(self):\n return pygame.Rect(self.button_data.pay_x, self.button_data.y, self.button_data.large_width, self.button_data.height)", "def on_cancel(self, *args):\n self.response(Gtk.ResponseType.CANCEL)", "def buttonCancel_Clicked( self, event ):\n\t\tself.EndModal(wx.ID_CANCEL)", "def _cancel(self, __button=None):\r\n\r\n self.destroy()", "def OnButtonAboutCancelButton(self, event):\r\n\t\tself.Hide()", "def cancel(self): #$NON-NLS-1$\r", "def _get_button_region(self):\n # XXXX Only rectangulars for now\n if not self._sensitive:\n return None\n if not self._insidetemporal():\n return None\n rgn = Qd.NewRgn()\n if self._shape == 'rect':\n x0, y0 = self._convert_point(self._coordinates[0:2])\n x1, y1 = self._convert_point(self._coordinates[2:4])\n box = x0, y0, x1, y1\n Qd.RectRgn(rgn, box)\n elif self._shape == 'poly':\n Qd.OpenRgn()\n xl, yl = self._convert_point(self._coordinates[-2:])\n Qd.MoveTo(xl, yl)\n for i in range(0, len(self._coordinates), 2):\n x, y = self._convert_point(self._coordinates[i:i+2])\n Qd.LineTo(x, y)\n Qd.CloseRgn(rgn)\n elif self._shape == 'circle':\n print 'Circle not supported yet'\n elif self._shape == 'ellipse':\n # Note: rx/ry are width/height, not points\n x, y, rx, ry = self._dispobj._window._convert_coordinates(self._coordinates)\n Qd.OpenRgn()\n Qd.FrameOval((x-rx, y-ry, x+rx, y+ry))\n Qd.CloseRgn(rgn)\n else:\n print 'Invalid shape type', self._shape\n return rgn", "def _onButtonCancelClick(self, widget):\n self.delete()", "def messageCancel(self,message):\n if self.app.DEBUG:\n print 'Dialog: Parent: %s.messageCancel'%self.__class__\n return self.message(message,style=wx.YES_NO|wx.ICON_QUESTION | wx.CANCEL)", "def OnCancel(self, event):\n pass", "def OnCancel(self, event):\n pass", "def onCancelButtonClick(self, event):\n self.EndModal(wx.ID_CANCEL)\n event.Skip()", "def sgnCancel(self):\n\n self.uiCloseWindow()", "def get_remove_rect(self, index, width):\n y_position = self.product_entries.top_y + (self.product_entries.row_h * index)\n return pygame.Rect(self.product_entries.remove_x, y_position, width, self.button_data.height)", "def create_buttonbox(self, master_fr):\n \n ttk.Button(\n master_fr,\n text=\"Cancel\",\n command=self.dlg_pbCancel\n ).pack(side=tk.RIGHT)\n \n ttk.Button(\n master_fr,\n text=\"OK\",\n command=self.dlg_pbOK,\n default=tk.ACTIVE\n ).pack(side=tk.RIGHT)\n \n # Do Cancel if closed\n self.tkWindow.protocol(\"WM_DELETE_WINDOW\", self.dlg_pbCancel)", "def pressCancel(self):\n self.close()", "def onBtnCancelClicked(self):\n self.close()", "def ask_ok_cancel(message=\"\", title=None):\n return dialog(\"ask_ok_cancel\", message=message, title=title)", "def OnButtonOptionsCancelButton(self, event):\r\n\t\tself.Hide()", "def tab_only_rect(self):\n rect = self.rect()\n lB, rB = [c for c in self.children() if isinstance(c, QtWidgets.QToolButton)]\n side_button_width = lB.width()+rB.width()+15\n rect.adjust(0,0, -side_button_width, 0)\n return rect", "def action_cancel(self):\n ids = isinstance(self.ids, (int)) and [self.ids] or self.ids\n context = self._context or {}\n self.cancel_move()\n self.clear_wh_lines()\n return True", "def get_amount_rect(self):\n return pygame.Rect(self.amount.x, self.amount.y, self.amount.width, self.button_data.height)" ]
[ "0.70682716", "0.70682716", "0.70682716", "0.65052885", "0.6482575", "0.62415266", "0.62338424", "0.61535877", "0.61140287", "0.6066416", "0.6013982", "0.6001875", "0.5925659", "0.59174865", "0.58720326", "0.5870998", "0.5853521", "0.5830382", "0.5830382", "0.5786672", "0.57733697", "0.5747019", "0.5743469", "0.5688444", "0.56842464", "0.5648044", "0.5645389", "0.5557395", "0.5551577", "0.55472165" ]
0.8576048
0
Return rectangle representing the top information bar
def get_top_bar_rect(self): return pygame.Rect(self.top_bar.x, self.top_bar.y, self.top_bar.width, self.button_data.height)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _rect_top(self):\n\treturn max(self.y, self.y + self.h)", "def DrawTop(screen, top_x, top_y, top_len, top_width):\n pygame.draw.rect(screen, (255,0,0),(top_x, top_y, top_len*2, top_width*2), 4)", "def draw_top(self):\n return group()", "def drawUI(self):\n cv2.rectangle(self.root, (0, self.height - 80), (self.width, self.height), (50, 50, 50), -1) # bar\n cv2.putText(self.root, 'Zavri s Q...', (20, self.height - 25), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 1, cv2.LINE_AA)", "def topBorderFor( player ):\n return centerTextAt( \"\", default_display_vars.borderChar_Top, getUserScreenWidth( player ) )", "def generate_topline(self) -> tuple:\n line_top = \"┌\" + \"┬\".join([\n \"─\" + \"─\" + \"─\" for _ in range(self.width)]) + \"┐\\n\"\n return (self.style, line_top)", "def getItemRect(self, item):\n r = Rect(1, 0, 1, 1)\n for p in self.menu.items:\n r.topLeft.x = r.bottomRight.x\n if p.name:\n r.bottomRight.x += (nameLength(p.name) + 2)\n if p is item:\n return r\n return r", "def show_top_status(self):\n self.header_win.clear()\n size = self.size()\n display = self.app.config[\"display\"]\n head_parts = []\n if display[\"show_app_name\"]:\n head_parts.append(\"Suplemon Editor v\"+self.app.version)\n if display[\"show_clock\"]:\n head_parts.append(curr_time())\n if display[\"show_file_list\"]:\n head_parts.append(self.file_list_str())\n\n # Add module statuses to the status bar\n for name in self.app.modules.modules.keys():\n module = self.app.modules.modules[name]\n if module.options[\"status\"] == \"top\":\n head_parts.append(module.get_status());\n\n head = \" - \".join(head_parts)\n head = head + ( \" \" * (self.screen.getmaxyx()[1]-len(head)-1) )\n if len(head) >= size[0]:\n head = head[:size[0]-1]\n self.header_win.addstr(0,0, head, curses.color_pair(0) | curses.A_REVERSE)\n self.header_win.refresh()", "def top(self):\n return self.points['topRight'].y", "def top(self):\n return self.points['topRight'].y", "def top(wid_in_cm, hei_times):\n inch = 2.54\n wid = wid_in_cm / inch\n hei = wid * hei_times\n points_in_inch = 72 # there are 72 points in a cm\n pt_size = 0.35146 # mm\n fontsize_9pt = 9 * pt_size * points_in_inch / (inch * 10)\n plt.rc('text', usetex=True)\n fig = plt.figure(1, figsize=(wid, hei))\n fig.subplots_adjust(0, 0, 1, 1)\n lw = 1\n ax = plt.axes()\n return ax, lw, fontsize_9pt", "def table_top_abs(self):\n table_height = np.array([0, 0, self.table_full_size[2]])\n return string_to_array(self.floor.get(\"pos\")) + table_height", "def displayhealthbar(point, HP, big):\n x1 = point[0] - 20*big\n y1 = point[1] - 40*(0.4*big)\n width1 = 10*big*4\n height1 = 30*(0.4*big)\n x2 = point[0] - 20 * big + 4\n y2 = point[1] - 40*(0.4*big) + 4\n width2 = HP * ((10*big*4)-8)\n height2 = 30*(0.4*big)-8\n pygame.draw.rect(win, (230, 230, 230), (x1, y1, width1, height1))\n pygame.draw.rect(win, (0, 255, 0), (x2, y2, width2, height2))", "def GetHiCorner(self):\n ...", "def _rect_bottom(self):\n\treturn min(self.y, self.y + self.h)", "def display(self):\n stroke(51)\n fill(self.couleur)\n rect(self.pos_x, 0, self.largeur, self.min_y)\n rect(self.pos_x, self.min_y + self.hauteur, self.largeur, util.SCREEN_Y-(self.min_y + self.hauteur))", "def rect(self):\n return Rect(self.x, 0, PipePair.WIDTH, PipePair.PIECE_HEIGHT)", "def draw_status_bar(self, screen: curses.window) -> None:\n sh, sw = screen.getmaxyx()\n cpu_percent = psutil.cpu_percent()\n status_bar_text = f\"Press 'q' to exit | STATUS BAR | CPU Usage: {cpu_percent}%\"\n whitespace_width = \" \" * (sw - len(status_bar_text) - 1)\n add_str_color(screen, sh - 1, 0, status_bar_text, ColorPair.black_on_white)\n add_str_color(screen, sh - 1, len(status_bar_text), whitespace_width, ColorPair.black_on_white)\n\n # Add space in corner using insch so it doesn't wrap and cause an error\n ins_ch_color(screen, sh - 1, sw - 1, ' ', ColorPair.black_on_white)", "def top_height_px(self):\n return self.top_pieces * PipePair.PIECE_HEIGHT", "def rect(value):\r\n return '({}, {}), {} x {}'.format(value.x(), value.y(), value.width(), value.height())", "def calc_size(self):\r\n self.posx = WIDTH_ORDERBOOK\r\n self.posy = HEIGHT_STATUS\r\n self.width = self.termwidth - WIDTH_ORDERBOOK\r\n self.height = self.termheight - HEIGHT_CON - HEIGHT_STATUS", "def screen_status(self):\n height, width = self.stdscr.getmaxyx()\n i_start = self.y_start * width + self.x_start\n\n return i_start, height, width", "def getRect(self): # real signature unknown; restored from __doc__\r\n pass", "def draw_head_box(data):\n crop = np.array(data['crop']);\n tl = data['tls'][data['i']];\n br = data['brs'][data['i']];\n cv2.rectangle(crop,tl,br,(255,255,0));\n return crop;", "def rect(self):\n return Rect(self.x, self.y, Bird.WIDTH, Bird.HEIGHT)", "def get_gauge_dimensions(self):\n pos_x, pos_y, dim_x, dim_y = self.status_bar.GetFieldRect(1)\n return (pos_x, pos_y), (dim_x, dim_y)", "def calc_size(self):\r\n self.height = self.termheight - HEIGHT_CON - HEIGHT_STATUS\r\n self.posy = HEIGHT_STATUS\r\n self.width = WIDTH_ORDERBOOK", "def show_bottom_status(self):\n editor = self.app.get_editor()\n size = self.size()\n cur = editor.cursor()\n data = \"@ \"+str(cur[0])+\",\"+str(cur[1])+\" \"+\\\n \"cur:\"+str(len(editor.cursors))+\" \"+\\\n \"buf:\"+str(len(editor.buffer))\n if self.app.config[\"display\"][\"show_last_key\"]:\n data += \" key:\"+str(self.app.last_input)\n #if self.app.config[\"display\"][\"show_term_size\"]:\n # data += \" [\"+str(size[0])+\"x\"+str(size[1])+\"]\"\n if self.app.config[\"app\"][\"debug\"]:\n data += \" cs:\"+str(editor.current_state)+\" hist:\"+str(len(editor.history)) # Undo / Redo debug\n #if editor.last_find:\n # find = editor.last_find\n # if len(find) > 10:find = find[:10]+\"...\"\n # data = \"find:'\"+find+\"' \" + data\n\n # Add module statuses to the status bar\n for name in self.app.modules.modules.keys():\n module = self.app.modules.modules[name]\n if module.options[\"status\"] == \"bottom\":\n data += \" \" + module.get_status();\n\n self.status_win.clear()\n status = self.app.get_status()\n extra = size[0] - len(status+data) - 1\n line = status+(\" \"*extra)+data\n\n if len(line) >= size[0]:\n line = line[:size[0]-1]\n\n self.status_win.addstr(0,0, line, curses.color_pair(0) | curses.A_REVERSE)\n self.status_win.refresh()", "def __indicatorGlobalRect(self):\n pos = self.__indicator.parentWidget().mapToGlobal(\n self.__indicator.geometry().topLeft())\n return QRect(pos.x(), pos.y(),\n self.__indicator.width(), self.__indicator.height())", "def get_top_cell(self):\n return self._top_cell" ]
[ "0.6610852", "0.6593702", "0.64745486", "0.6129006", "0.6054198", "0.59405684", "0.5939769", "0.5931584", "0.59063864", "0.59063864", "0.5879101", "0.5859516", "0.5808413", "0.5802447", "0.58016765", "0.5800475", "0.57628614", "0.5745136", "0.5725048", "0.57205886", "0.57190937", "0.57169294", "0.57159287", "0.5714668", "0.56950647", "0.56780696", "0.56658244", "0.56282663", "0.5620659", "0.5615376" ]
0.726646
0
Return rectangle representing the total amount bar
def get_amount_rect(self): return pygame.Rect(self.amount.x, self.amount.y, self.amount.width, self.button_data.height)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bar_length(stage, fraction):\n x1, y1, x2, y2 = canvas.coords('bar_' + stage)\n canvas.coords(\n 'bar_' + stage,\n 20,\n y1,\n 20 + round(fraction * (width - 40)),\n y2,\n )", "def get_bars(self):\n return self.percent_2_bars(self.percent)", "def _draw(self):\n if self._total == 0:\n return\n self._clear()\n\n print('{0:-<{1}}{2:3d}% ({3:{5}d}/{4:{5}d}) ETA {6}'.format(\n \"=\" * round(self._bar_width * self._current / self._total),\n self._bar_width,\n round(math.floor(self._current / self._total * 100)),\n self._current,\n self._total,\n self._digits,\n self._remaining_time\n ), end='\\r')", "def draw_bar(t, height):\n t.begin_fill() # Added this line\n t.left(90)\n t.forward(height)\n # t.write(\" \"+ str(height))\n t.right(90)\n t.forward(10)\n t.right(90)\n t.forward(height)\n t.left(90)\n t.end_fill() # Added this line\n t.forward(10)", "def draw_bar(t, height,echelle = 1):\n t.begin_fill() # Added this line\n t.left(90)\n t.forward(height*echelle)\n t.write(\" \"+ str(float(height)))\n t.right(90)\n t.forward(25)\n t.right(90)\n t.forward(height*echelle)\n t.left(90)\n t.end_fill() # Added this line\n t.forward(3)", "def shape(self):\n if self.volumes:\n return 4 * self.bars_count + 1 + 1,\n else:\n return 3 * self.bars_count + 1 + 1,", "def rectArea(base, height):\n return base * height", "def rect_area(rect):\n return rect[2] * rect[3]", "def render_bar(panel, x, y, total_width, name, value,\n maximum, bar_color, back_color, foreground_color):\n bar_width = int(float(value) / maximum * total_width)\n\n libtcod.console_set_default_background(panel, back_color)\n libtcod.console_rect(panel, x, y, total_width, 1,\n False, libtcod.BKGND_SCREEN)\n\n libtcod.console_set_default_background(panel, bar_color)\n if bar_width > 0:\n libtcod.console_rect(panel, x, y, bar_width, 1,\n False, libtcod.BKGND_SCREEN)\n\n libtcod.console_set_default_foreground(panel, foreground_color)\n libtcod.console_print_ex(panel, int(x + total_width / 2), y,\n libtcod.BKGND_NONE, libtcod.CENTER,\n '{0}: {1}/{2}'.format(name, value, maximum))", "def expensesBar(self):\r\n return self._createTextProfile(self.expenses)", "def rectangle_area(base, height):\n return (base * height)", "def build_bar(self):\n diff = float(self.amount - self.min)\n percent_done = int(round((diff / float(self.span)) * 100.0))\n\n # figure the proper number of 'character' make up the bar\n all_full = self.width - 2\n num_hashes = int(round((percent_done * all_full) / 100))\n\n if self.mode == 'dynamic':\n # build a progress bar with self.char (to create a dynamic bar\n # where the percent string moves along with the bar progress.\n self.bar = self.char * num_hashes\n else:\n # build a progress bar with self.char and spaces (to create a\n # fixed bar (the percent string doesn't move)\n self.bar = self.char * num_hashes + ' ' * (all_full-num_hashes)\n\n percent_str = str(percent_done) + \"%\"\n self.bar = '[ ' + self.bar + ' ] ' + percent_str\n self.bar += \" <\" + str(self.amount) + \" / \" + str(self.max) + \">\"", "def total_area(self):\n return numpy.prod([r[1] - r[0] for r in self.range_])", "def area(self):\n\t\treturn self.height * self.height", "def area(self):\n return (self.baselength1 + self.baselength2)*self.height/2", "def Draw_Bar(self, num, barimage, pointimage):\r\n self.prop = num/self.range\r\n self.screen.blit(barimage, [self.pos[0], self.pos[1]])\r\n self.screen.blit(pointimage, [(self.pos[0] - int(10*self.ratiow))\r\n + int(self.prop*self.barsize),\r\n self.pos[1] - int(15*self.ratioh)])\r\n self.label = self.medfont.render(self.text+\": \"+str(num)+\"%\", 1, self.color)\r\n self.screen.blit(self.label, self.textpos)", "def _pbar(self, x, y):\n if self.show_pbar:\n return tqdm(range(x, y), ncols=100, leave=True)\n else:\n return range(x, y)", "def draw(self, screen):\n n_of_bars = len(self.data)\n graph_width = screen.get_width() - self.EDGE * 2\n total_SEPARATION = self.SEPARATION * (n_of_bars - 1)\n bar_width = (graph_width - total_SEPARATION) / n_of_bars\n MAX_VALUE = max(self.data) # Height of bars will be relative to the largest value\n\n x = self.EDGE\n for i in range(n_of_bars):\n # Create rect for bar\n height = (self.data[i] / MAX_VALUE) * (screen.get_height() - self.EDGE * 3)\n rect = pygame.Rect(int(x), 0, int(bar_width), height)\n rect.bottom = screen.get_height() - self.EDGE\n\n # Determine color based off value\n color_val = int(self.data[i] / MAX_VALUE * 100)\n color_r = 55 + color_val * 2\n color_b = 255 - color_val * 2\n pygame.draw.rect(screen, (color_r, 0, color_b), rect)\n x += bar_width + self.SEPARATION", "def _calculate_bar_width(self):\n (self._tw, self._th) = ProgressTracker._terminal_size()\n self._bar_width = self._tw - (2 * self._digits + 3 + 5 + 5 + len(self._remaining_time))", "def bar(length, progress):\n dots = floor(progress * length)\n spaces = length - dots\n\n percent = int(progress * 100)\n\n return f'{percent}% |' + dots * '*' + spaces * ' ' + '|'", "def total_area(self):\n return self._total_area", "def draw_bar_chart(t,h,w): # create function to draw chart\n if abs(h) < 100:\n tortoise.color(\"SeaGreen\",\"ForestGreen\") # set turtle color\n elif abs(h) >= 100 and abs(h) < 200:\n tortoise.color(\"orange\",\"gold\") # set turtle color\n else:\n tortoise.color(\"coral3\",\"IndianRed\") # set turtle color\n \n t.begin_fill() # begin drawing shapes\n t.left(90)\n t.forward(h) # draw bar height\n t.right(90)\n t.forward(20) # prepare for text\n if h >= 0:\n t.write(h) # write value\n else:\n t.penup()\n t.right(90)\n t.forward(15)\n t.write(h)\n t.forward(-15)\n t.left(90)\n t.pendown()\n t.forward(40) # bar width\n t.right(90)\n t.forward(h)\n t.left(90)\n t.penup()\n t.right(90)\n t.forward(15)\n t.left(90)\n t.forward(-50)\n t.write(w) # write word\n t.forward(50)\n t.left(90)\n t.forward(15)\n t.right(90)\n t.forward(20) # spacing\n t.pendown()\n t.end_fill() # stop drawing shapes", "def get_bar(self):\n return self.__bar", "def incomeBar(self):\r\n return self._createTextProfile(self.income)", "def bar_chart_score(self, grouped):\n picked_scenario = self.scenario_dict[\"%d\" % (self.scenario_num-1)]\n distinct_enum_X = self.data_dict[picked_scenario[\"X\"]]['distinct_enum']\n score = 0\n if distinct_enum_X == 1:\n score = 0\n elif distinct_enum_X >= 2 and distinct_enum_X <= 20:\n score = 3\n elif distinct_enum_X > 20:\n score = 40 / distinct_enum_X\n return score", "def rect(l, b):\n print(\"Area of rectangle is\", l * b)", "def get_total(self):\n total = super().get_total()\n\n if self.qty < 10:\n total += 3\n\n return total", "def baseline_total_organic_compound(self):\n return self.indoor_air_quality_baseline[1]", "def rectSize(rect):\n return np.array([rect[1] - rect[0], rect[3] - rect[2]])", "def rectangle_area(width : number, height : number) ->number:\n area = width*height\n #print(\"The area of rectangle is =\", area, \"sq. units\")\n return area" ]
[ "0.6459064", "0.64523906", "0.63721067", "0.6196911", "0.61125314", "0.61101776", "0.6100009", "0.6002434", "0.5970846", "0.595529", "0.5952238", "0.5951107", "0.5946098", "0.5916032", "0.58980125", "0.5888238", "0.5865675", "0.5840044", "0.580254", "0.5791656", "0.57573295", "0.5751365", "0.57482666", "0.5747676", "0.5736429", "0.5731306", "0.568579", "0.56662124", "0.56592345", "0.5658596" ]
0.6932268
0
Return rectangle representing the up scroll button
def get_up_scroll_rect(self): return pygame.Rect(self.scroll.x, self.scroll.up_y, self.scroll.width, self.scroll.height)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_down_scroll_rect(self):\n return pygame.Rect(self.scroll.x, self.scroll.dn_y, self.scroll.width, self.scroll.height)", "def scrollUp_y(self):\r\n if self.y_stack>0:\r\n self.y_stack-=1\r\n \r\n self.pixmap3=self.drawPixmap(\"xz\")\r\n self.lbl3.setPixmap(self.pixmap3)\r\n self.pixmap4= self.writeEdge(\"xz\")\r\n self.lbl4.setPixmap(self.pixmap4)\r\n self.y_stack_lbl.setText(str(self.y_stack+1) + '/' + str(self.img.shape[1]))", "def _rect_top(self):\n\treturn max(self.y, self.y + self.h)", "def up(self):\n if self.selected_offset > 0:\n self.selected_offset -= 1\n if self.selected_offset < self.top_offset:\n self.top_offset -= 1\n self.__update_display()\n self.__update_selection()", "def mouse_right_up(self):\n pass", "def scrollUp(self):\r\n if self.z_stack>0:\r\n self.z_stack-=1\r\n self.pixmap=self.drawPixmap(\"xy\")\r\n self.lbl.setPixmap(self.pixmap)\r\n self.pixmap2=self.writeEdge(\"xy\")\r\n self.lbl2.setPixmap(self.pixmap2)\r\n\r\n self.z_stack_lbl.setText(str(self.z_stack+1) + '/' + str(self.img.shape[0]))", "def get_top_bar_rect(self):\n return pygame.Rect(self.top_bar.x, self.top_bar.y, self.top_bar.width, self.button_data.height)", "def pos_up(self, y=1):\n\n self.y -= y\n return self.pos(self.x, self.y)", "def scrollUp_x(self):\r\n if self.x_stack>0:\r\n self.x_stack-=1\r\n \r\n self.pixmap5=self.drawPixmap(\"yz\")\r\n self.lbl5.setPixmap(self.pixmap5) \r\n self.pixmap6= self.writeEdge(\"yz\")\r\n self.lbl6.setPixmap(self.pixmap6)\r\n self.x_stack_lbl.setText(str(self.x_stack+1) + '/' + str(self.img.shape[2]))", "def rightButtonUp(self):\n\t\tautopy.mouse.toggle(False,autopy.mouse.RIGHT_BUTTON)", "def move_up(self):\n if self.pointer != 0:\n logging.debug(\"moved up\")\n self.pointer -= 1\n self.refresh()\n self.reset_scrolling()\n return True\n else:\n return False", "def rightUp(self):", "def OnRightUp(self, event):\r\n\r\n x, y = event.GetX(), event.GetY()\r\n wnd = self.TabHitTest(x, y)\r\n\r\n if wnd:\r\n e = AuiNotebookEvent(wxEVT_COMMAND_AUINOTEBOOK_TAB_RIGHT_UP, self.GetId())\r\n e.SetEventObject(self)\r\n e.SetSelection(self.GetIdxFromWindow(wnd))\r\n self.GetEventHandler().ProcessEvent(e)\r\n elif not self.ButtonHitTest(x, y):\r\n e = AuiNotebookEvent(wxEVT_COMMAND_AUINOTEBOOK_BG_RIGHT_UP, self.GetId())\r\n e.SetEventObject(self)\r\n self.GetEventHandler().ProcessEvent(e)", "def ev_mousebuttonup(self, event: MouseButtonUp) -> None:", "def page_up(self):\n self.set_initial_offset(self.initial_offset - self.my_surface.get_height())", "def _rect_bottom(self):\n\treturn min(self.y, self.y + self.h)", "def mouse_middle_up(self):\n pass", "def move_up(self, step: int = 1) -> None:\n if self.cursor_pos.x == 0:\n self.cursor_pos = Point(self.height - step, self.cursor_pos.y)\n else:\n self.cursor_pos = Point(self.cursor_pos.x-step, self.cursor_pos.y)", "def on_up_rcr_btn_clicked(self):\n self.status = 'up'\n height = self.height_set_rcr_lineEdit.text()\n vel = self.vel_set_rcr_lineEdit.text()\n height = float(height)\n vel = float(vel)\n self.control1.device.cmd_precise_move(height, vel)\n direction = \"UP\"\n # txt = \"given target: \" + str(height) + \", \"+ \"vel: \" + str(vel)\n self.set_rcr_txt(height, vel, direction)\n self.set_status_txt(self.status)\n # self.set_ab_height()", "def swipe_up(self):\n self.swipe_sub(SWIPE_MATRIX[0])", "def on_step_up_rcr_btn_clicked(self):\n self.status = 'up'\n status_txt = \"step \" + self.status\n self.set_status_txt(status_txt)\n self.control1.device.cmd_move(self.status)\n # self.set_ab_height()", "def get_done_rect(self):\n return pygame.Rect(self.button_data.done_x, self.button_data.y, self.button_data.large_width, self.button_data.height)", "def move_up(self):\r\n if self.rect.top > 0:\r\n self.rect.top -= self.speed", "def IndentPressedBitmap(rect, button_state):\r\n\r\n if button_state == AUI_BUTTON_STATE_PRESSED:\r\n rect.x += 1\r\n rect.y += 1\r\n\r\n return rect", "def move_up(self):\n if self.center.y < (self.screen_height - (self.height / 2)):\n self.center.y += 5", "def buttonDownScreenPos(self, btn=None):\n if btn is None:\n btn = self.button()\n return Point(self._buttonDownScreenPos[btn])", "def buttonDownPos(self, btn=None):\n if btn is None:\n btn = self.button()\n return Point(self.currentItem.mapFromScene(self._buttonDownScenePos[btn]))", "def up(self):\n self.move(0, 1)", "def up(self, *args):\n self.cur_win().up()", "def up_down(self, up):\n if up == 'u':\n up = 1\n elif up == 'n':\n up = 0\n elif up == 'd':\n up = -1\n else:\n raise ValueError(\"The heck you doing Servo?? u d or n ONLY\")\n self.h += up\n if self.get_pos() == blocks['wall']:\n self.h -= up" ]
[ "0.67192155", "0.6316986", "0.6173602", "0.6136128", "0.61354804", "0.609783", "0.6021287", "0.60154986", "0.5984156", "0.59422266", "0.59341276", "0.59182394", "0.59066033", "0.5906058", "0.5906037", "0.590299", "0.58598703", "0.58307093", "0.58299977", "0.58195907", "0.5783811", "0.57837594", "0.57773536", "0.5776181", "0.57704085", "0.57430583", "0.5742851", "0.5741632", "0.5739564", "0.57333153" ]
0.81178427
0
Return rectangle representing the down scroll button
def get_down_scroll_rect(self): return pygame.Rect(self.scroll.x, self.scroll.dn_y, self.scroll.width, self.scroll.height)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_up_scroll_rect(self):\n return pygame.Rect(self.scroll.x, self.scroll.up_y, self.scroll.width, self.scroll.height)", "def buttonDownPos(self, btn=None):\n if btn is None:\n btn = self.button()\n return Point(self.currentItem.mapFromScene(self._buttonDownScenePos[btn]))", "def isdown(self):\n return self._drawing", "def rightDown(self):", "def buttonDownScreenPos(self, btn=None):\n if btn is None:\n btn = self.button()\n return Point(self._buttonDownScreenPos[btn])", "def DrawOverflowButton(self, dc, wnd, rect, state):\r\n \r\n if state & AUI_BUTTON_STATE_HOVER or state & AUI_BUTTON_STATE_PRESSED:\r\n \r\n cli_rect = wnd.GetClientRect()\r\n light_gray_bg = StepColour(self._highlight_colour, 170)\r\n\r\n if self._agwFlags & AUI_TB_VERTICAL:\r\n \r\n dc.SetPen(wx.Pen(self._highlight_colour))\r\n dc.DrawLine(rect.x, rect.y, rect.x+rect.width, rect.y)\r\n dc.SetPen(wx.Pen(light_gray_bg))\r\n dc.SetBrush(wx.Brush(light_gray_bg))\r\n dc.DrawRectangle(rect.x, rect.y+1, rect.width, rect.height)\r\n \r\n else:\r\n \r\n dc.SetPen(wx.Pen(self._highlight_colour))\r\n dc.DrawLine(rect.x, rect.y, rect.x, rect.y+rect.height)\r\n dc.SetPen(wx.Pen(light_gray_bg))\r\n dc.SetBrush(wx.Brush(light_gray_bg))\r\n dc.DrawRectangle(rect.x+1, rect.y, rect.width, rect.height)\r\n \r\n x = rect.x + 1 + (rect.width-self._overflow_bmp.GetWidth())/2\r\n y = rect.y + 1 + (rect.height-self._overflow_bmp.GetHeight())/2\r\n dc.DrawBitmap(self._overflow_bmp, x, y, True)", "def scrollDown_y(self):\r\n if self.y_stack<self.img.shape[1]-1:\r\n self.y_stack+=1\r\n self.pixmap3=self.drawPixmap(\"xz\") \r\n self.lbl3.setPixmap(self.pixmap3)\r\n self.pixmap4= self.writeEdge(\"xz\")\r\n self.lbl4.setPixmap(self.pixmap4)\r\n self.y_stack_lbl.setText(str(self.y_stack+1) + '/' + str(self.img.shape[1]))", "def buttonDownScenePos(self, btn=None):\n if btn is None:\n btn = self.button()\n return Point(self._buttonDownScenePos[btn])", "def on_step_down_rcr_btn_clicked(self):\n self.status = 'down'\n status_txt = \"step \"+ self.status\n self.set_status_txt(status_txt)\n self.control1.device.cmd_move(self.status)\n # self.set_ab_height()", "def scrollDown(self):\r\n\r\n if self.z_stack<self.img.shape[0]-1:\r\n self.z_stack+=1\r\n \r\n #self.pixmap=QtGui.QPixmap.fromImage(ImageQt.ImageQt(misc.toimage(self.img[self.z_stack]))).scaled(500,500)\r\n self.pixmap= self.drawPixmap(\"xy\")\r\n self.lbl.setPixmap(self.pixmap)\r\n self.pixmap2= self.writeEdge(\"xy\")\r\n self.lbl2.setPixmap(self.pixmap2)\r\n self.z_stack_lbl.setText(str(self.z_stack+1) + '/' + str(self.img.shape[0]))", "def down():\n global y, canvas # y é modificado\n canvas.create_line(x, y, x, y + 10)\n y += 10", "def get_done_rect(self):\n return pygame.Rect(self.button_data.done_x, self.button_data.y, self.button_data.large_width, self.button_data.height)", "def move_scroll_bar_down(self):\n scroll = self.textBrowser.verticalScrollBar()\n scroll.setSliderPosition(scroll.maximum())", "def on_down_rcr_btn_clicked(self):\n self.status = 'down'\n height = self.height_set_rcr_lineEdit.text()\n vel = self.vel_set_rcr_lineEdit.text()\n # self.speed_rcr_hSlider.setValue(vel)\n # self.height_rcr_hSlider.setValue(height)\n height = -float(height)\n vel = float(vel)\n direction = \"DOWN\"\n self.control1.device.cmd_precise_move(height, vel)\n self.set_rcr_txt(height, vel, direction)\n self.set_status_txt(self.status)\n # self.set_ab_height()", "def move_down(self):\n if self.pointer < (len(self.contents)-1):\n logging.debug(\"moved down\")\n self.pointer += 1\n self.reset_scrolling()\n self.refresh()\n return True\n else:\n return False", "def tab_only_rect(self):\n rect = self.rect()\n lB, rB = [c for c in self.children() if isinstance(c, QtWidgets.QToolButton)]\n side_button_width = lB.width()+rB.width()+15\n rect.adjust(0,0, -side_button_width, 0)\n return rect", "def scrollUp_y(self):\r\n if self.y_stack>0:\r\n self.y_stack-=1\r\n \r\n self.pixmap3=self.drawPixmap(\"xz\")\r\n self.lbl3.setPixmap(self.pixmap3)\r\n self.pixmap4= self.writeEdge(\"xz\")\r\n self.lbl4.setPixmap(self.pixmap4)\r\n self.y_stack_lbl.setText(str(self.y_stack+1) + '/' + str(self.img.shape[1]))", "def swipe_down(self):\n self.swipe_sub(SWIPE_MATRIX[1])", "def get_pay_rect(self):\n return pygame.Rect(self.button_data.pay_x, self.button_data.y, self.button_data.large_width, self.button_data.height)", "def _rect_bottom(self):\n\treturn min(self.y, self.y + self.h)", "def pos_down(self, y=1):\n\n self.y += y\n return self.pos(self.x, self.y)", "def _rect_top(self):\n\treturn max(self.y, self.y + self.h)", "def page_down(self):\n self.set_initial_offset(self.initial_offset + self.my_surface.get_height())", "def GetDown(self, *args, **kwargs):\n pass", "def move_down(self):\r\n if self.rect.bottom < BG_HEIGHT - 60:\r\n self.rect.top += self.speed", "def move_down(self):\n if self.center.y > (self.height / 2):\n self.center.y -= 5", "def OnRightDown(self, event):\r\n \r\n cli_rect = wx.RectPS(wx.Point(0, 0), self.GetClientSize())\r\n\r\n if self._gripper_sizer_item:\r\n gripper_rect = self._gripper_sizer_item.GetRect()\r\n if gripper_rect.Contains(event.GetPosition()):\r\n return\r\n \r\n if self._overflow_sizer_item:\r\n \r\n dropdown_size = self._art.GetElementSize(AUI_TBART_OVERFLOW_SIZE)\r\n if dropdown_size > 0 and event.m_x > cli_rect.width - dropdown_size and \\\r\n event.m_y >= 0 and event.m_y < cli_rect.height and self._art:\r\n return\r\n \r\n self._action_pos = wx.Point(*event.GetPosition())\r\n self._action_item = self.FindToolForPosition(*event.GetPosition())\r\n\r\n if self._action_item:\r\n if self._action_item.state & AUI_BUTTON_STATE_DISABLED:\r\n \r\n self._action_pos = wx.Point(-1, -1)\r\n self._action_item = None\r\n return", "def move_down(self):\n self.y -= 1", "def OnRightUp(self, event):\r\n\r\n x, y = event.GetX(), event.GetY()\r\n wnd = self.TabHitTest(x, y)\r\n\r\n if wnd:\r\n e = AuiNotebookEvent(wxEVT_COMMAND_AUINOTEBOOK_TAB_RIGHT_UP, self.GetId())\r\n e.SetEventObject(self)\r\n e.SetSelection(self.GetIdxFromWindow(wnd))\r\n self.GetEventHandler().ProcessEvent(e)\r\n elif not self.ButtonHitTest(x, y):\r\n e = AuiNotebookEvent(wxEVT_COMMAND_AUINOTEBOOK_BG_RIGHT_UP, self.GetId())\r\n e.SetEventObject(self)\r\n self.GetEventHandler().ProcessEvent(e)", "def page_down(self):\n counter = self.get_entry_count_per_screen()\n while counter != 0 and self.pointer < (len(self.contents)-1):\n logging.debug(\"moved down\")\n self.pointer += 1\n counter -= 1\n self.refresh()\n self.reset_scrolling()\n return True" ]
[ "0.7188937", "0.6204293", "0.62015027", "0.61635894", "0.61521184", "0.6064291", "0.59770024", "0.5958718", "0.58810395", "0.5874888", "0.5863888", "0.5783592", "0.57748634", "0.577476", "0.57615274", "0.5755299", "0.5751036", "0.5721733", "0.57195115", "0.57188267", "0.57070404", "0.56930804", "0.5687035", "0.5679767", "0.56736076", "0.5668328", "0.5663592", "0.56618893", "0.5620715", "0.56056744" ]
0.8037389
0
Returns a rectangle representing the description display for a product
def get_description_rect(self, index): y_position = self.product_entries.top_y + (self.product_entries.row_h * index) return pygame.Rect(self.product_entries.desc_x, y_position, self.product_entries.desc_w, self.button_data.height)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_description():\n desc = dict()\n desc[\"cache\"] = 3600\n desc[\"data\"] = True\n desc[\n \"description\"\n ] = \"\"\"This plot is not meant for interactive use, but a backend for\n SPS plots.\n \"\"\"\n desc[\"arguments\"] = [\n dict(\n type=\"text\",\n name=\"pid\",\n default=\"202012300005-KDVN-WWUS83-SPSDVN\",\n label=\"IEM generated up to 35 char product identifier:\",\n ),\n dict(\n type=\"int\",\n default=0,\n name=\"segnum\",\n label=\"Product Segment Number (starts at 0):\",\n ),\n ]\n return desc", "def create_item_description(self, gameStateObj):\n surf = Engine.create_surface((98, 56 + 80), transparent=True)\n\n width, height = (96, 56) # ??\n item = gameStateObj.activeMenu.getSelection()\n \n real_surf = MenuFunctions.CreateBaseMenuSurf((width, height), 'BaseMenuBackgroundOpaque')\n BGSurf = Engine.create_surface((real_surf.get_width() + 2, real_surf.get_height() + 4), transparent=True, convert=True)\n BGSurf.blit(real_surf, (2, 4))\n BGSurf.blit(GC.IMAGESDICT['SmallGem'], (0, 0))\n # Now make translucent\n BGSurf = Image_Modification.flickerImageTranslucent(BGSurf, 10)\n\n if item.weapon and self.canWield(item):\n top = 4\n left = 2\n GC.FONT['text_white'].blit('Affin', BGSurf, (width//2 - GC.FONT['text_white'].size('Affin')[0] + left, 4 + top))\n GC.FONT['text_white'].blit('Atk', BGSurf, (5 + left, 20 + top))\n GC.FONT['text_white'].blit('AS', BGSurf, (width//2 + 5 + left, 20 + top))\n GC.FONT['text_white'].blit('Hit', BGSurf, (5 + left, 36 + top))\n GC.FONT['text_white'].blit('Avo', BGSurf, (width//2 + 5 + left, 36 + top))\n \n dam = str(self.damage(gameStateObj, item))\n acc = str(self.accuracy(gameStateObj, item))\n avo = str(self.avoid(gameStateObj, item))\n atkspd = str(self.attackspeed(item))\n AtkWidth = GC.FONT['text_blue'].size(dam)[0]\n HitWidth = GC.FONT['text_blue'].size(acc)[0]\n AvoidWidth = GC.FONT['text_blue'].size(avo)[0]\n ASWidth = GC.FONT['text_blue'].size(atkspd)[0] \n GC.FONT['text_blue'].blit(dam, BGSurf, (width//2 - 4 - AtkWidth + left, 20 + top))\n GC.FONT['text_blue'].blit(atkspd, BGSurf, (width - 8 - ASWidth + left, 20 + top))\n GC.FONT['text_blue'].blit(acc, BGSurf, (width//2 - 4 - HitWidth + left, 36 + top))\n GC.FONT['text_blue'].blit(avo, BGSurf, (width - 8 - AvoidWidth + left, 36 + top))\n\n item.drawType(BGSurf, width//2 + 8 + left, 3 + top)\n\n else: # assumes every non-weapon has a description\n if item.desc:\n words_in_item_desc = item.desc\n else:\n words_in_item_desc = \"Cannot wield.\"\n lines = TextChunk.line_wrap(TextChunk.line_chunk(words_in_item_desc), width - 8, GC.FONT['text_white'])\n\n for index, line in enumerate(lines):\n GC.FONT['text_white'].blit(line, BGSurf, (4 + 2, 4+index*16 + 4))\n\n surf.blit(BGSurf, (0, 76))\n\n if gameStateObj.cursor.position[0] > GC.TILEX//2 + gameStateObj.cameraOffset.get_x():\n rightflag = True\n else:\n rightflag = False\n\n if not self.generic_flag:\n BigPortraitSurf = self.bigportrait\n # If on the left, mirror the character portrait\n if not rightflag:\n BigPortraitSurf = Engine.flip_horiz(BigPortraitSurf)\n surf.blit(BigPortraitSurf, (2, 0))\n\n return surf", "def _build_itemized_description_table(products: typing.List[Product] = []):\n numrows = len(products)\n table_001 = FlexibleColumnWidthTable(number_of_rows=numrows, number_of_columns=3) \n table_001.add(\n TableCell(\n Paragraph(\"Ensemble Name\", font_color=X11Color(\"White\")),\n background_color=HexColor(\"0b3954\"),\n preferred_width=Decimal(256),\n )\n )\n table_001.add(\n TableCell(\n Paragraph(\"Classification\", font_color=X11Color(\"White\")),\n background_color=HexColor(\"0b3954\"),\n preferred_width=Decimal(128),\n )\n )\n table_001.add(\n TableCell(\n Paragraph(\"Fee\", font_color=X11Color(\"White\")),\n background_color=HexColor(\"0b3954\"),\n preferred_width=Decimal(64),\n )\n )\n \n return table_001", "def description():", "def get_description(self):", "def get_description():\n desc = {\"description\": __doc__, \"data\": True, \"cache\": 600}\n today = datetime.date.today()\n desc[\"arguments\"] = [\n dict(\n type=\"csector\",\n name=\"csector\",\n default=\"IA\",\n label=\"Select state/sector to plot\",\n ),\n dict(\n type=\"date\",\n name=\"sdate\",\n default=f\"{today.year}/01/01\",\n label=\"Start Date:\",\n min=\"2000/01/04\",\n max=today.strftime(\"%Y/%m/%d\"),\n ),\n dict(\n type=\"date\",\n name=\"edate\",\n default=today.strftime(\"%Y/%m/%d\"),\n label=\"End Date:\",\n min=\"2000/01/04\",\n max=today.strftime(\"%Y/%m/%d\"),\n ),\n dict(\n type=\"select\",\n name=\"d\",\n default=\"0\",\n options=PDICT,\n label=\"Select Drought Classification (at and above counted):\",\n ),\n dict(\n type=\"select\",\n name=\"w\",\n default=\"percent\",\n options=PDICT2,\n label=\"How to express time for plot:\",\n ),\n dict(type=\"cmap\", name=\"cmap\", default=\"plasma\", label=\"Color Ramp:\"),\n ]\n return desc", "def get_description(self):\n return DisplayText(self._description)", "def description(self) -> str:\n return \"put units of a given colour on the perimeter of the board.\"", "def __str__(self):\n return (\"[Rectangle] \" + \"(\" + str(self.id) +\n \") \" + str(self.x) + \"/\" + str(self.y) +\n \" - \" + str(self.width) + \"/\" +\n str(self.height))", "def draw(self, context):\n \n Left_Depth = self.product.get_prompt(\"Left Depth\")\n Right_Depth = self.product.get_prompt(\"Right Depth\")\n Shelf_Quantity = self.product.get_prompt(\"Shelf Quantity\")\n \n layout = self.layout\n self.draw_product_size(layout)\n box = layout.box()\n row = box.row()\n Left_Depth.draw_prompt(row) \n Right_Depth.draw_prompt(row)\n box = layout.box()\n row = box.row()\n Shelf_Quantity.draw_prompt(row)", "def __str__(self):\n return('[Rectangle] {}/{}'.format(self.__width, self.__height))", "def __str__(self):\n return str(\"[Rectangle] {}/{}\".format(self.__width, self.__height))", "def GetDescription(cls):\n return textwrap.dedent('''\n This trace step includes a diagram of the Ego long. acceleration in the report.\n ''').strip()", "def __str__(self):\n return f'{self.product.title} - Color {self.product.color}'", "def get_description(self):\n return \"-\".join(\n map(str, (self.release, self.chromosome, self.start, self.reference, self.alternative))\n )", "def __repr__(self):\n return self.description", "def description(self):", "def description(self) -> str:\r\n description = \"The player must aim to put the most possible units \" + \\\r\n \"of \" + colour_name(self.colour) + \" on the outer\" +\\\r\n \" perimeter.\"\r\n return description", "def __str__(self):\n return (\"[Rectangle] ({}) {}/{} - {}/{}\".format(self.id, self.x,\n self.y, self.width,\n self.height))", "def draw(self, context):\n Left_Depth = self.product.get_prompt(\"Left Depth\")\n Right_Depth = self.product.get_prompt(\"Right Depth\")\n Shelf_Quantity = self.product.get_prompt(\"Shelf Quantity\")\n Add_Backing = self.product.get_prompt(\"Add Backing\")\n Backing_Thickness = self.product.get_prompt(\"Backing Thickness\")\n Add_Top = self.product.get_prompt(\"Add Top KD\")\n Remove_Left_Side = self.product.get_prompt(\"Remove Left Side\")\n Remove_Right_Side = self.product.get_prompt(\"Remove Right Side\")\n Hide_Toe_Kick = self.product.get_prompt(\"Hide Toe Kick\")\n Door = self.product.get_prompt(\"Door\")\n Use_Left_Swing = self.product.get_prompt(\"Use Left Swing\")\n Pull_Location = self.product.get_prompt(\"Pull Location\")\n Force_Double_Doors = self.product.get_prompt(\"Force Double Doors\")\n Door_Pull_Height = self.product.get_prompt(\"Door Pull Height\")\n Door_Type = self.product.get_prompt(\"Door Type\")\n Angled_Shelves = self.product.get_prompt(\"Angled Shelves\")\n Open_Door = self.product.get_prompt(\"Open Door\")\n Base_Pull_Location = self.product.get_prompt(\"Base Pull Location\")\n Tall_Pull_Location = self.product.get_prompt(\"Tall Pull Location\")\n Upper_Pull_Location = self.product.get_prompt(\"Upper Pull Location\")\n \n layout = self.layout\n self.draw_product_size(layout) \n \n if Left_Depth:\n box = layout.box()\n row = box.row()\n Left_Depth.draw_prompt(row)\n \n if Right_Depth:\n Right_Depth.draw_prompt(row)\n \n if Shelf_Quantity:\n col = box.column(align=True)\n row = col.row()\n row.label(\"Qty:\")\n row.prop(self,\"shelf_quantity\",expand=True) \n col.separator()\n \n if Add_Backing:\n row = box.row()\n Add_Backing.draw_prompt(row)\n\n #if Backing_Thickness:\n # if Add_Backing.value() == True:\n # row = box.row()\n # Backing_Thickness.draw_prompt(row)\n \n \n if Add_Top:\n row = box.row()\n Add_Top.draw_prompt(row) \n\n if Remove_Left_Side:\n row = box.row()\n Remove_Left_Side.draw_prompt(row) \n \n if Remove_Right_Side:\n row = box.row()\n Remove_Right_Side.draw_prompt(row) \n \n \n # row = box.row()\n # Hide_Toe_Kick.draw_prompt(row)\n \n row = box.row()\n Door.draw_prompt(row)\n if Door.value():\n if Angled_Shelves and Door_Type:\n if Angled_Shelves.value() == False:\n row = box.row()\n row.prop(self,'Door_Type',text=\"Door Type\")\n #Door_Type.draw_prompt(row)\n row = box.row()\n #Pull_Location.draw_prompt(row)\n row.prop(self,'Pull_Location',text=\"Pull Location\")\n\n row = box.row()\n #Door_Pull_Height.draw_prompt(row)\n row.prop(self,'Pull_Type',text=\"Pull Type\")\n row = box.row()\n if self.Pull_Type == 'Base':\n Base_Pull_Location.draw_prompt(row)\n elif self.Pull_Type == 'Tall':\n Tall_Pull_Location.draw_prompt(row)\n else:\n Upper_Pull_Location.draw_prompt(row)\n\n if Open_Door: \n row = box.row()\n Open_Door.draw_prompt(row)\n\n if Angled_Shelves:\n if Angled_Shelves.value():\n row = box.row()\n Use_Left_Swing.draw_prompt(row)\n row = box.row() \n Force_Double_Doors.draw_prompt(row)", "def draw(self, context):\n Left_Depth = self.product.get_prompt(\"Left Depth\")\n Right_Depth = self.product.get_prompt(\"Right Depth\")\n Shelf_Quantity = self.product.get_prompt(\"Shelf Quantity\")\n Add_Backing = self.product.get_prompt(\"Add Backing\")\n Backing_Thickness = self.product.get_prompt(\"Backing Thickness\")\n Add_Top = self.product.get_prompt(\"Add Top KD\")\n Remove_Left_Side = self.product.get_prompt(\"Remove Left Side\")\n Remove_Right_Side = self.product.get_prompt(\"Remove Right Side\")\n Hide_Toe_Kick = self.product.get_prompt(\"Hide Toe Kick\")\n Door = self.product.get_prompt(\"Door\")\n Use_Left_Swing = self.product.get_prompt(\"Use Left Swing\")\n Pull_Location = self.product.get_prompt(\"Pull Location\")\n Force_Double_Doors = self.product.get_prompt(\"Force Double Doors\")\n Door_Pull_Height = self.product.get_prompt(\"Door Pull Height\")\n Door_Type = self.product.get_prompt(\"Door Type\")\n Angled_Shelves = self.product.get_prompt(\"Angled Shelves\")\n Open_Door = self.product.get_prompt(\"Open Door\")\n Base_Pull_Location = self.product.get_prompt(\"Base Pull Location\")\n Tall_Pull_Location = self.product.get_prompt(\"Tall Pull Location\")\n Upper_Pull_Location = self.product.get_prompt(\"Upper Pull Location\")\n \n layout = self.layout\n self.draw_product_size(layout) \n \n if Left_Depth:\n box = layout.box()\n row = box.row()\n Left_Depth.draw_prompt(row)\n \n if Right_Depth:\n Right_Depth.draw_prompt(row)\n \n if Shelf_Quantity:\n col = box.column(align=True)\n row = col.row()\n row.label(\"Qty:\")\n row.prop(self,\"shelf_quantity\",expand=True) \n col.separator()\n \n if Add_Backing:\n row = box.row()\n Add_Backing.draw_prompt(row)\n\n #if Backing_Thickness:\n # if Add_Backing.value() == True:\n # row = box.row()\n # Backing_Thickness.draw_prompt(row)\n \n \n if Add_Top:\n row = box.row()\n Add_Top.draw_prompt(row) \n\n if Remove_Left_Side:\n row = box.row()\n Remove_Left_Side.draw_prompt(row) \n \n if Remove_Right_Side:\n row = box.row()\n Remove_Right_Side.draw_prompt(row) \n \n \n # row = box.row()\n # Hide_Toe_Kick.draw_prompt(row)\n \n row = box.row()\n Door.draw_prompt(row)\n if Door.value():\n if Angled_Shelves and Door_Type:\n if Angled_Shelves.value() == False:\n row = box.row()\n row.prop(self,'Door_Type',text=\"Door Type\")\n #Door_Type.draw_prompt(row)\n row = box.row()\n #Pull_Location.draw_prompt(row)\n row.prop(self,'Pull_Location',text=\"Pull Location\")\n\n row = box.row()\n #Door_Pull_Height.draw_prompt(row)\n row.prop(self,'Pull_Type',text=\"Pull Type\")\n row = box.row()\n if self.Pull_Type == 'Base':\n Base_Pull_Location.draw_prompt(row)\n elif self.Pull_Type == 'Tall':\n Tall_Pull_Location.draw_prompt(row)\n else:\n Upper_Pull_Location.draw_prompt(row)\n\n if Open_Door: \n row = box.row()\n Open_Door.draw_prompt(row)\n\n if Angled_Shelves:\n if Angled_Shelves.value():\n row = box.row()\n Use_Left_Swing.draw_prompt(row)\n row = box.row() \n Force_Double_Doors.draw_prompt(row)", "def get_description(self):\n return COMPONENT_LIST[self.index][1]", "def get_description(self):\n pass", "def description(self):\n return self.visual_desc", "def __str__(self):\n return \"[Rectangle] {}/{}\".format(self.__width, self.__height)", "def __str__(self):\n return \"[Rectangle] {}/{}\".format(self.__width, self.__height)", "def description(self) -> str:\r\n descrip = 'The player must aim to put the most possible units of a ' \\\r\n 'given colour c on the outer perimeter of ' \\\r\n 'the board. The ' \\\r\n 'player’s score is the total number of unit cells ' \\\r\n 'of colour ' \\\r\n 'c that are on the perimeter. There is a ' \\\r\n 'premium on corner ' \\\r\n 'cells: they count twice towards the score. '\r\n return descrip", "def description() -> str:\n content = \"Demonstrates usage of blackbord remappings.\\n\"\n content += \"\\n\"\n content += \"Demonstration is via an exemplar behaviour making use of remappings..\\n\"\n\n if py_trees.console.has_colours:\n banner_line = console.green + \"*\" * 79 + \"\\n\" + console.reset\n s = banner_line\n s += console.bold_white + \"Blackboard\".center(79) + \"\\n\" + console.reset\n s += banner_line\n s += \"\\n\"\n s += content\n s += \"\\n\"\n s += banner_line\n else:\n s = content\n return s", "def draw(self, context):\n layout = self.layout\n self.draw_product_size(layout)\n \n Left_Depth = self.product.get_prompt(\"Left Depth\")\n Right_Depth = self.product.get_prompt(\"Right Depth\")\n Shelf_Quantity = self.product.get_prompt(\"Shelf Quantity\")\n \n box = layout.box()\n row = box.row()\n Shelf_Quantity.draw_prompt(row)", "def get_description(self):\n raise NotImplementedError" ]
[ "0.6610947", "0.6592959", "0.6475347", "0.6304326", "0.62589735", "0.6158773", "0.6146244", "0.61412126", "0.61374414", "0.61354303", "0.6127795", "0.6126339", "0.61020863", "0.60996103", "0.609043", "0.60874915", "0.6083631", "0.60741735", "0.6074143", "0.60647815", "0.60647815", "0.6052923", "0.6044837", "0.6043659", "0.60424435", "0.60424435", "0.6020132", "0.60080385", "0.6003939", "0.5977286" ]
0.74717224
0
Returns a rectangle representing the price display for a product
def get_price_rect(self, index): y_position = self.product_entries.top_y + (self.product_entries.row_h * index) return pygame.Rect(self.product_entries.price_x, y_position, self.product_entries.price_w, self.button_data.height)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def showrect(context):\r\n #context[\"products\"] = BlogPostForm()\r\n data=[{'time':'2003','price':'6800'},\r\n {'time':'2004','price':'7800'},\r\n {'time':'2005','price':'8800'},\r\n {'time':'2006','price':'9800'},\r\n {'time':'2007','price':'10800'},\r\n {'time':'2008','price':'12600'}\r\n ]\r\n price=[]\r\n for item in data: \r\n price.append(int(item['price']))\r\n maxprice=max(price)\r\n svgheight=200\r\n svgwidth=378\r\n time_relative=15\r\n price_relative=-5\r\n rectwidth=40\r\n x_relative=18 \r\n k=float(svgheight*0.9)/float(maxprice)\r\n rect_x_position=29\r\n time_x_position=34\r\n price_x_position=31\r\n for item in data:\r\n item['rectheight']=k*float(item['price'])\r\n item['rect_y_position']=svgheight-item['rectheight']\r\n item['time_y_position']=item['rect_y_position']+time_relative\r\n item['price_y_position']=item['rect_y_position']+price_relative\r\n item['rect_x_position']=rect_x_position\r\n item['time_x_position']=time_x_position\r\n item['price_x_position']=price_x_position\r\n rect_x_position+=rectwidth+x_relative\r\n time_x_position+=rectwidth+x_relative\r\n price_x_position+=rectwidth+x_relative\r\n result={}\r\n result['svgwidth']=svgwidth\r\n result['svgheight']=svgheight\r\n result['rectwidth']=rectwidth\r\n result['data']=data\r\n return result", "def draw(self, context):\n \n Left_Depth = self.product.get_prompt(\"Left Depth\")\n Right_Depth = self.product.get_prompt(\"Right Depth\")\n Shelf_Quantity = self.product.get_prompt(\"Shelf Quantity\")\n \n layout = self.layout\n self.draw_product_size(layout)\n box = layout.box()\n row = box.row()\n Left_Depth.draw_prompt(row) \n Right_Depth.draw_prompt(row)\n box = layout.box()\n row = box.row()\n Shelf_Quantity.draw_prompt(row)", "def test_visualize_price_breakdown(self):\n pass", "def draw(self, context):\n layout = self.layout\n self.draw_product_size(layout)\n \n Left_Depth = self.product.get_prompt(\"Left Depth\")\n Right_Depth = self.product.get_prompt(\"Right Depth\")\n Shelf_Quantity = self.product.get_prompt(\"Shelf Quantity\")\n \n box = layout.box()\n row = box.row()\n Shelf_Quantity.draw_prompt(row)", "def draw(self, context):\n Left_Depth = self.product.get_prompt(\"Left Depth\")\n Right_Depth = self.product.get_prompt(\"Right Depth\")\n Shelf_Quantity = self.product.get_prompt(\"Shelf Quantity\")\n Add_Backing = self.product.get_prompt(\"Add Backing\")\n Backing_Thickness = self.product.get_prompt(\"Backing Thickness\")\n Add_Top = self.product.get_prompt(\"Add Top KD\")\n Remove_Left_Side = self.product.get_prompt(\"Remove Left Side\")\n Remove_Right_Side = self.product.get_prompt(\"Remove Right Side\")\n Hide_Toe_Kick = self.product.get_prompt(\"Hide Toe Kick\")\n Door = self.product.get_prompt(\"Door\")\n Use_Left_Swing = self.product.get_prompt(\"Use Left Swing\")\n Pull_Location = self.product.get_prompt(\"Pull Location\")\n Force_Double_Doors = self.product.get_prompt(\"Force Double Doors\")\n Door_Pull_Height = self.product.get_prompt(\"Door Pull Height\")\n Door_Type = self.product.get_prompt(\"Door Type\")\n Angled_Shelves = self.product.get_prompt(\"Angled Shelves\")\n Open_Door = self.product.get_prompt(\"Open Door\")\n Base_Pull_Location = self.product.get_prompt(\"Base Pull Location\")\n Tall_Pull_Location = self.product.get_prompt(\"Tall Pull Location\")\n Upper_Pull_Location = self.product.get_prompt(\"Upper Pull Location\")\n \n layout = self.layout\n self.draw_product_size(layout) \n \n if Left_Depth:\n box = layout.box()\n row = box.row()\n Left_Depth.draw_prompt(row)\n \n if Right_Depth:\n Right_Depth.draw_prompt(row)\n \n if Shelf_Quantity:\n col = box.column(align=True)\n row = col.row()\n row.label(\"Qty:\")\n row.prop(self,\"shelf_quantity\",expand=True) \n col.separator()\n \n if Add_Backing:\n row = box.row()\n Add_Backing.draw_prompt(row)\n\n #if Backing_Thickness:\n # if Add_Backing.value() == True:\n # row = box.row()\n # Backing_Thickness.draw_prompt(row)\n \n \n if Add_Top:\n row = box.row()\n Add_Top.draw_prompt(row) \n\n if Remove_Left_Side:\n row = box.row()\n Remove_Left_Side.draw_prompt(row) \n \n if Remove_Right_Side:\n row = box.row()\n Remove_Right_Side.draw_prompt(row) \n \n \n # row = box.row()\n # Hide_Toe_Kick.draw_prompt(row)\n \n row = box.row()\n Door.draw_prompt(row)\n if Door.value():\n if Angled_Shelves and Door_Type:\n if Angled_Shelves.value() == False:\n row = box.row()\n row.prop(self,'Door_Type',text=\"Door Type\")\n #Door_Type.draw_prompt(row)\n row = box.row()\n #Pull_Location.draw_prompt(row)\n row.prop(self,'Pull_Location',text=\"Pull Location\")\n\n row = box.row()\n #Door_Pull_Height.draw_prompt(row)\n row.prop(self,'Pull_Type',text=\"Pull Type\")\n row = box.row()\n if self.Pull_Type == 'Base':\n Base_Pull_Location.draw_prompt(row)\n elif self.Pull_Type == 'Tall':\n Tall_Pull_Location.draw_prompt(row)\n else:\n Upper_Pull_Location.draw_prompt(row)\n\n if Open_Door: \n row = box.row()\n Open_Door.draw_prompt(row)\n\n if Angled_Shelves:\n if Angled_Shelves.value():\n row = box.row()\n Use_Left_Swing.draw_prompt(row)\n row = box.row() \n Force_Double_Doors.draw_prompt(row)", "def draw(self, context):\n Left_Depth = self.product.get_prompt(\"Left Depth\")\n Right_Depth = self.product.get_prompt(\"Right Depth\")\n Shelf_Quantity = self.product.get_prompt(\"Shelf Quantity\")\n Add_Backing = self.product.get_prompt(\"Add Backing\")\n Backing_Thickness = self.product.get_prompt(\"Backing Thickness\")\n Add_Top = self.product.get_prompt(\"Add Top KD\")\n Remove_Left_Side = self.product.get_prompt(\"Remove Left Side\")\n Remove_Right_Side = self.product.get_prompt(\"Remove Right Side\")\n Hide_Toe_Kick = self.product.get_prompt(\"Hide Toe Kick\")\n Door = self.product.get_prompt(\"Door\")\n Use_Left_Swing = self.product.get_prompt(\"Use Left Swing\")\n Pull_Location = self.product.get_prompt(\"Pull Location\")\n Force_Double_Doors = self.product.get_prompt(\"Force Double Doors\")\n Door_Pull_Height = self.product.get_prompt(\"Door Pull Height\")\n Door_Type = self.product.get_prompt(\"Door Type\")\n Angled_Shelves = self.product.get_prompt(\"Angled Shelves\")\n Open_Door = self.product.get_prompt(\"Open Door\")\n Base_Pull_Location = self.product.get_prompt(\"Base Pull Location\")\n Tall_Pull_Location = self.product.get_prompt(\"Tall Pull Location\")\n Upper_Pull_Location = self.product.get_prompt(\"Upper Pull Location\")\n \n layout = self.layout\n self.draw_product_size(layout) \n \n if Left_Depth:\n box = layout.box()\n row = box.row()\n Left_Depth.draw_prompt(row)\n \n if Right_Depth:\n Right_Depth.draw_prompt(row)\n \n if Shelf_Quantity:\n col = box.column(align=True)\n row = col.row()\n row.label(\"Qty:\")\n row.prop(self,\"shelf_quantity\",expand=True) \n col.separator()\n \n if Add_Backing:\n row = box.row()\n Add_Backing.draw_prompt(row)\n\n #if Backing_Thickness:\n # if Add_Backing.value() == True:\n # row = box.row()\n # Backing_Thickness.draw_prompt(row)\n \n \n if Add_Top:\n row = box.row()\n Add_Top.draw_prompt(row) \n\n if Remove_Left_Side:\n row = box.row()\n Remove_Left_Side.draw_prompt(row) \n \n if Remove_Right_Side:\n row = box.row()\n Remove_Right_Side.draw_prompt(row) \n \n \n # row = box.row()\n # Hide_Toe_Kick.draw_prompt(row)\n \n row = box.row()\n Door.draw_prompt(row)\n if Door.value():\n if Angled_Shelves and Door_Type:\n if Angled_Shelves.value() == False:\n row = box.row()\n row.prop(self,'Door_Type',text=\"Door Type\")\n #Door_Type.draw_prompt(row)\n row = box.row()\n #Pull_Location.draw_prompt(row)\n row.prop(self,'Pull_Location',text=\"Pull Location\")\n\n row = box.row()\n #Door_Pull_Height.draw_prompt(row)\n row.prop(self,'Pull_Type',text=\"Pull Type\")\n row = box.row()\n if self.Pull_Type == 'Base':\n Base_Pull_Location.draw_prompt(row)\n elif self.Pull_Type == 'Tall':\n Tall_Pull_Location.draw_prompt(row)\n else:\n Upper_Pull_Location.draw_prompt(row)\n\n if Open_Door: \n row = box.row()\n Open_Door.draw_prompt(row)\n\n if Angled_Shelves:\n if Angled_Shelves.value():\n row = box.row()\n Use_Left_Swing.draw_prompt(row)\n row = box.row() \n Force_Double_Doors.draw_prompt(row)", "def price_to_screen(self, price):\r\n relative_from_bottom = \\\r\n float(price - self.pmin) / float(self.pmax - self.pmin)\r\n screen_from_bottom = relative_from_bottom * self.height\r\n return int(self.height - screen_from_bottom)", "def getPrice(self):\n headers = {\n \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.113 Safari/537.36\"}\n response = requests.get(self.__product_URL, headers=headers)\n #print(response.status_code)\n soup = BeautifulSoup(response.content, \"html.parser\")\n file = open(\"testproduct.html\", \"wb\")\n file.write(soup.prettify(\"utf-8\"))\n file.close()\n title = soup.find(\"span\", attrs={\"id\": \"productTitle\", \"class\": \"a-size-large\"}).string.strip()\n self.__product_title = title\n temp = soup.find_all(\"a\", attrs={\"class\": \"a-accordion-row a-declarative accordion-header\"})[1]\n price = temp.find(\"span\", attrs={\"class\": \"a-color-price\"}).text.strip()\n lst = list(price)\n lst.remove(\",\")\n price = int(float(\"\".join(lst)))\n self.__product_price = price\n #print(self.__product_price)", "def curves(self):\n return Rectangle(quantify=8,\n width=self.heights[child.index],\n length=self.width_car,\n position=translate(self.position,\n \"z\",\n -self.positions[0][child.index]),\n centered=False)", "def view_product(cls, product_id):\n product = Product.get_by_id(product_id)\n print(f'Product ID: {product.product_id}')\n print(f'Product Name: {product.product_name}')\n print(f'Quantity: {product.product_quantity}')\n print(f'Price: ${product.product_price / 100:.2f}\\n')", "def print_products_line(self, products: list):\n\n range_param = 1\n\n products = deepcopy(products)\n\n self.adapter_for_terminal(products)\n\n for i, product in enumerate(products, start=1):\n range_param = i\n\n _str = str(i) + \") \"\n\n if product.get('product_name'):\n _str += product.get('product_name')\n if product.get('generic_name'):\n _str += \" - \" + product.get('generic_name')\n if product.get('brands_tags'):\n _str += \" - marques : (\" + product.get('brands_tags') + \")\"\n\n cprint(_str, 'blue')\n\n return range_param", "def get_description_rect(self, index):\n y_position = self.product_entries.top_y + (self.product_entries.row_h * index)\n return pygame.Rect(self.product_entries.desc_x, y_position, self.product_entries.desc_w, self.button_data.height)", "def get_product_area(self, obj):\n\t\treturn obj.product.product", "def paint(self):\r\n\r\n def paint_row(pos, price, vol, ownvol, color, changevol):\r\n \"\"\"paint a row in the orderbook (bid or ask)\"\"\"\r\n if changevol > 0:\r\n col2 = col_bid + curses.A_BOLD\r\n elif changevol < 0:\r\n col2 = col_ask + curses.A_BOLD\r\n else:\r\n col2 = col_vol\r\n self.addstr(pos, 0, book.gox.quote2str(price), color)\r\n self.addstr(pos, 12, book.gox.base2str(vol), col2)\r\n if ownvol:\r\n self.addstr(pos, 28, book.gox.base2str(ownvol), col_own)\r\n\r\n self.win.bkgd(\" \", COLOR_PAIR[\"book_text\"])\r\n self.win.erase()\r\n\r\n gox = self.gox\r\n book = gox.orderbook\r\n\r\n mid = self.height / 2\r\n col_bid = COLOR_PAIR[\"book_bid\"]\r\n col_ask = COLOR_PAIR[\"book_ask\"]\r\n col_vol = COLOR_PAIR[\"book_vol\"]\r\n col_own = COLOR_PAIR[\"book_own\"]\r\n\r\n sum_total = gox.config.get_bool(\"goxtool\", \"orderbook_sum_total\")\r\n group = gox.config.get_float(\"goxtool\", \"orderbook_group\")\r\n group = gox.quote2int(group)\r\n if group == 0:\r\n group = 1\r\n\r\n #\r\n #\r\n # paint the asks (first we put them into bins[] then we paint them)\r\n #\r\n if len(book.asks):\r\n i = 0\r\n bins = []\r\n pos = mid - 1\r\n vol = 0\r\n prev_vol = 0\r\n\r\n # no grouping, bins can be created in one simple and fast loop\r\n if group == 1:\r\n cnt = len(book.asks)\r\n while pos >= 0 and i < cnt:\r\n level = book.asks[i]\r\n price = level.price\r\n if sum_total:\r\n vol += level.volume\r\n else:\r\n vol = level.volume\r\n ownvol = level.own_volume\r\n bins.append([pos, price, vol, ownvol, 0])\r\n pos -= 1\r\n i += 1\r\n\r\n # with gouping its a bit more complicated\r\n else:\r\n # first bin is exact lowest ask price\r\n price = book.asks[0].price\r\n vol = book.asks[0].volume\r\n bins.append([pos, price, vol, 0, 0])\r\n prev_vol = vol\r\n pos -= 1\r\n\r\n # now all following bins\r\n bin_price = int(math.ceil(float(price) / group) * group)\r\n if bin_price == price:\r\n # first level was exact bin price already, skip to next bin\r\n bin_price += group\r\n while pos >= 0 and bin_price < book.asks[-1].price + group:\r\n vol, _vol_quote = book.get_total_up_to(bin_price, True) ## 01 freeze\r\n if vol > prev_vol:\r\n # append only non-empty bins\r\n if sum_total:\r\n bins.append([pos, bin_price, vol, 0, 0])\r\n else:\r\n bins.append([pos, bin_price, vol - prev_vol, 0, 0])\r\n prev_vol = vol\r\n pos -= 1\r\n bin_price += group\r\n\r\n # now add the own volumes to their bins\r\n for order in book.owns:\r\n if order.typ == \"ask\" and order.price > 0:\r\n order_bin_price = int(math.ceil(float(order.price) / group) * group)\r\n for abin in bins:\r\n if abin[1] == order.price:\r\n abin[3] += order.volume\r\n break\r\n if abin[1] == order_bin_price:\r\n abin[3] += order.volume\r\n break\r\n\r\n # mark the level where change took place (optional)\r\n if gox.config.get_bool(\"goxtool\", \"highlight_changes\"):\r\n if book.last_change_type == \"ask\":\r\n change_bin_price = int(math.ceil(float(book.last_change_price) / group) * group)\r\n for abin in bins:\r\n if abin[1] == book.last_change_price:\r\n abin[4] = book.last_change_volume\r\n break\r\n if abin[1] == change_bin_price:\r\n abin[4] = book.last_change_volume\r\n break\r\n\r\n # now finally paint the asks\r\n for pos, price, vol, ownvol, changevol in bins:\r\n paint_row(pos, price, vol, ownvol, col_ask, changevol)\r\n\r\n #\r\n #\r\n # paint the bids (first we put them into bins[] then we paint them)\r\n #\r\n if len(book.bids):\r\n i = 0\r\n bins = []\r\n pos = mid + 1\r\n vol = 0\r\n prev_vol = 0\r\n\r\n # no grouping, bins can be created in one simple and fast loop\r\n if group == 1:\r\n cnt = len(book.bids)\r\n while pos < self.height and i < cnt:\r\n level = book.bids[i]\r\n price = level.price\r\n if sum_total:\r\n vol += level.volume\r\n else:\r\n vol = level.volume\r\n ownvol = level.own_volume\r\n bins.append([pos, price, vol, ownvol, 0])\r\n prev_vol = vol\r\n pos += 1\r\n i += 1\r\n\r\n # with gouping its a bit more complicated\r\n else:\r\n # first bin is exact lowest ask price\r\n price = book.bids[0].price\r\n vol = book.bids[0].volume\r\n bins.append([pos, price, vol, 0, 0])\r\n prev_vol = vol\r\n pos += 1\r\n\r\n # now all following bins\r\n bin_price = int(math.floor(float(price) / group) * group)\r\n if bin_price == price:\r\n # first level was exact bin price already, skip to next bin\r\n bin_price -= group\r\n while pos < self.height and bin_price >= 0:\r\n vol, _vol_quote = book.get_total_up_to(bin_price, False)\r\n if vol > prev_vol:\r\n # append only non-empty bins\r\n if sum_total:\r\n bins.append([pos, bin_price, vol, 0, 0])\r\n else:\r\n bins.append([pos, bin_price, vol - prev_vol, 0, 0])\r\n prev_vol = vol\r\n pos += 1\r\n bin_price -= group\r\n\r\n # now add the own volumes to their bins\r\n for order in book.owns:\r\n if order.typ == \"bid\" and order.price > 0:\r\n order_bin_price = int(math.floor(float(order.price) / group) * group)\r\n for abin in bins:\r\n if abin[1] == order.price:\r\n abin[3] += order.volume\r\n break\r\n if abin[1] == order_bin_price:\r\n abin[3] += order.volume\r\n break\r\n\r\n # mark the level where change took place (optional)\r\n if gox.config.get_bool(\"goxtool\", \"highlight_changes\"):\r\n if book.last_change_type == \"bid\":\r\n change_bin_price = int(math.floor(float(book.last_change_price) / group) * group)\r\n for abin in bins:\r\n if abin[1] == book.last_change_price:\r\n abin[4] = book.last_change_volume\r\n break\r\n if abin[1] == change_bin_price:\r\n abin[4] = book.last_change_volume\r\n break\r\n\r\n # now finally paint the bids\r\n for pos, price, vol, ownvol, changevol in bins:\r\n paint_row(pos, price, vol, ownvol, col_bid, changevol)\r\n\r\n # update the xterm title bar\r\n if self.gox.config.get_bool(\"goxtool\", \"set_xterm_title\"):\r\n last_candle = self.gox.history.last_candle()\r\n if last_candle:\r\n title = self.gox.quote2str(last_candle.cls).strip()\r\n title += \" - goxtool -\"\r\n title += \" bid:\" + self.gox.quote2str(book.bid).strip()\r\n title += \" ask:\" + self.gox.quote2str(book.ask).strip()\r\n\r\n term = os.environ[\"TERM\"]\r\n # the following is incomplete but better safe than sorry\r\n # if you know more terminals then please provide a patch\r\n if \"xterm\" in term or \"rxvt\" in term:\r\n sys_out.write(\"\\x1b]0;%s\\x07\" % title)\r\n sys_out.flush()", "def get_price():\n \n #Teacher's code. Could not get it working.\n #price = db(db.product.name == productName).select(db.product.price)[0].price\n \n \n return (200)", "def paint_depth(pos, price, vol, own, col_price, change):\r\n if change > 0:\r\n col = col_bid + curses.A_BOLD\r\n elif change < 0:\r\n col = col_ask + curses.A_BOLD\r\n else:\r\n col = col_bar\r\n pricestr = FORMAT_STRING % self.gox.quote2float(price)\r\n self.addstr(pos, 0, pricestr, col_price)\r\n length = int(vol * mult_x)\r\n # pylint: disable=E1101\r\n self.win.hline(pos, BAR_LEFT_EDGE, curses.ACS_CKBOARD, length, col)\r\n if own:\r\n self.addstr(pos, length + BAR_LEFT_EDGE, \"o\", col_own)", "def showOrderPrice(self, order):\n \n order.CalculateOrderPrice()\n label3 = Label(self, text=\"$:\"+str(order.GetTotalPrice), font=LARGE_FONT)\n label3.grid(row=2, column=5)", "def plotprice(self):\n plt.figure()\n plt.hist( self.pricetree[-1,:] )\n plt.title(\"price Distribution\") \n plt.show()", "def print_product(product):\r\n try:\r\n print(\"\\n \\\r\n Name : {} \\n \\\r\n Categories : {} \\n \\\r\n Nutri-score : {} \\n \\\r\n Stores : {} \\n \\\r\n URL : {}\".format(product.name, product.category, product.nutri_score, product.stores, product.url))\r\n except TypeError:\r\n print(\"Désolé, il n'y a pas de substitut pour ce product...\")", "def paint_row(pos, price, vol, ownvol, color, changevol):\r\n if changevol > 0:\r\n col2 = col_bid + curses.A_BOLD\r\n elif changevol < 0:\r\n col2 = col_ask + curses.A_BOLD\r\n else:\r\n col2 = col_vol\r\n self.addstr(pos, 0, book.gox.quote2str(price), color)\r\n self.addstr(pos, 12, book.gox.base2str(vol), col2)\r\n if ownvol:\r\n self.addstr(pos, 28, book.gox.base2str(ownvol), col_own)", "def getProductPrice(productID):\n return \"http://api.tcgplayer.com/pricing/product/\" + str(productID)", "def display_price(self):\n return '$ '+str(self.price)", "def get_price(self, request, pk):\n return Response('20$')", "def get_price(self, request, pk):\n return Response('20$')", "def test_visualize_recipe_price_breakdown_by_id(self):\n pass", "def get_price(self):\r\n return self.price", "def get_product_price(self):\n\n price = \"0.0000\"\n\n try:\n price = self.trees.get_element_by_id(\"priceblock_ourprice\").text\n except:\n try:\n price = self.trees.get_element_by_id(\n \"price_inside_buybox\").text\n except:\n try:\n price = self.trees.get_element_by_id(\n \"priceblock_dealprice\").text\n except:\n try:\n price = self.trees.xpath(\n \"//span[@class='a-color-price']/text()\")[0]\n except:\n try:\n price = self.trees.xpath(\n \"//span[@class='a-size-base a-color-price']/text()\")[0]\n except:\n pass\n\n non_decimal = re.compile(r'[^\\d.]+')\n price = non_decimal.sub('', price)\n\n return round(float(price[0:5]), 2)", "def get_price():\n return uniform(1.0, 350.0)", "def __str__(self):\n return f'{self.product.title} - Color {self.product.color}'", "def __str__(self):\n return('[Rectangle] {}/{}'.format(self.__width, self.__height))" ]
[ "0.66148126", "0.6425636", "0.6408066", "0.6388764", "0.63350195", "0.63350195", "0.5850726", "0.58149785", "0.58066714", "0.56577665", "0.564943", "0.5617745", "0.56055164", "0.5587808", "0.55845076", "0.5541297", "0.5503129", "0.54867107", "0.5484257", "0.5461903", "0.54541177", "0.5450827", "0.54352885", "0.54352885", "0.53876615", "0.5374863", "0.53437954", "0.5337462", "0.53335196", "0.5319487" ]
0.7220666
0
Returns a rectangle representing the remove button for a product
def get_remove_rect(self, index, width): y_position = self.product_entries.top_y + (self.product_entries.row_h * index) return pygame.Rect(self.product_entries.remove_x, y_position, width, self.button_data.height)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_price_rect(self, index):\n y_position = self.product_entries.top_y + (self.product_entries.row_h * index)\n return pygame.Rect(self.product_entries.price_x, y_position, self.product_entries.price_w, self.button_data.height)", "def _remove_button_width(self):\n removew = self.width - self.layout.product_entries.remove_x - Widths.BORDER\n\n if self._test_display_down_button() or self._test_display_up_button():\n removew -= (self.layout.scroll.width + Widths.BORDER)\n\n return removew", "def get_cancel_rect(self):\n return pygame.Rect(self.button_data.cancel_x, self.button_data.y, self.button_data.large_width, self.button_data.height)", "def get_description_rect(self, index):\n y_position = self.product_entries.top_y + (self.product_entries.row_h * index)\n return pygame.Rect(self.product_entries.desc_x, y_position, self.product_entries.desc_w, self.button_data.height)", "def get_pay_rect(self):\n return pygame.Rect(self.button_data.pay_x, self.button_data.y, self.button_data.large_width, self.button_data.height)", "def removeProduct(self, *args):\n return _libsbml.Reaction_removeProduct(self, *args)", "def remove_player_button():\r\n global state\r\n if not state == \"remove\":\r\n state = \"remove\"\r\n sub_title[\"text\"] = \"REMOVE PLAYER\"\r\n sub_title.pack()\r\n\r\n clear_frames()\r\n\r\n \"\"\"Creating a new frame for showing the widgets\"\"\"\r\n frame = Frame(window)\r\n frame[\"padx\"] = 30\r\n frame[\"pady\"] = 30\r\n createdFrames.append(frame)\r\n frame.pack()\r\n\r\n \"\"\"Creating the widgets\"\"\"\r\n name_lb = Label(frame, text=\"Name of the player\")\r\n name_lb.pack()\r\n name_entry = Entry(frame)\r\n name_entry.focus()\r\n name_entry.pack()\r\n feedback_label = Label(frame)\r\n\r\n \"\"\"Creating the confirm button\"\"\"\r\n confirm_button = Button(frame, text=\"Confirm\")\r\n confirm_button[\"command\"] = partial(remove_player, name_entry, feedback_label)\r\n confirm_button.pack()", "def remove_selection(self, coord):\n button = self.grid[coord]\n button['bg'] = default_color\n button['activebackground'] = '#38dcf5'", "def remove_button(self):\n self.scene.remove_child(self.toggle_button_el)", "def RemoveShape(self, *args):\n return _XCAFDoc.XCAFDoc_ShapeTool_RemoveShape(self, *args)", "def _remove_texture(self):\n # Retrieve the item that was selected\n key = self._listbox.get(ACTIVE)\n # Post a delete notice to the manager\n self._remove(key)", "def buttonRemove_Clicked( self, event ):\n\t\tindex = -1\n\t\tindex = self.listCtrlTreasure.GetNextItem(index, wx.LIST_NEXT_ALL, wx.LIST_STATE_SELECTED)\n\t\tif index != -1:\n\t\t\tself.getTreasure(index, True)\n\t\t\tself.refreshTreasureList()", "def remove_item_checkout(self):\n self._remove_btn.click()", "def RemoveComponent(self, *args):\n return _XCAFDoc.XCAFDoc_ShapeTool_RemoveComponent(self, *args)", "def get_amount_rect(self):\n return pygame.Rect(self.amount.x, self.amount.y, self.amount.width, self.button_data.height)", "def test_delete_custom_button(self):\n pass", "def draw(self, context):\n Left_Depth = self.product.get_prompt(\"Left Depth\")\n Right_Depth = self.product.get_prompt(\"Right Depth\")\n Shelf_Quantity = self.product.get_prompt(\"Shelf Quantity\")\n Add_Backing = self.product.get_prompt(\"Add Backing\")\n Backing_Thickness = self.product.get_prompt(\"Backing Thickness\")\n Add_Top = self.product.get_prompt(\"Add Top KD\")\n Remove_Left_Side = self.product.get_prompt(\"Remove Left Side\")\n Remove_Right_Side = self.product.get_prompt(\"Remove Right Side\")\n Hide_Toe_Kick = self.product.get_prompt(\"Hide Toe Kick\")\n Door = self.product.get_prompt(\"Door\")\n Use_Left_Swing = self.product.get_prompt(\"Use Left Swing\")\n Pull_Location = self.product.get_prompt(\"Pull Location\")\n Force_Double_Doors = self.product.get_prompt(\"Force Double Doors\")\n Door_Pull_Height = self.product.get_prompt(\"Door Pull Height\")\n Door_Type = self.product.get_prompt(\"Door Type\")\n Angled_Shelves = self.product.get_prompt(\"Angled Shelves\")\n Open_Door = self.product.get_prompt(\"Open Door\")\n Base_Pull_Location = self.product.get_prompt(\"Base Pull Location\")\n Tall_Pull_Location = self.product.get_prompt(\"Tall Pull Location\")\n Upper_Pull_Location = self.product.get_prompt(\"Upper Pull Location\")\n \n layout = self.layout\n self.draw_product_size(layout) \n \n if Left_Depth:\n box = layout.box()\n row = box.row()\n Left_Depth.draw_prompt(row)\n \n if Right_Depth:\n Right_Depth.draw_prompt(row)\n \n if Shelf_Quantity:\n col = box.column(align=True)\n row = col.row()\n row.label(\"Qty:\")\n row.prop(self,\"shelf_quantity\",expand=True) \n col.separator()\n \n if Add_Backing:\n row = box.row()\n Add_Backing.draw_prompt(row)\n\n #if Backing_Thickness:\n # if Add_Backing.value() == True:\n # row = box.row()\n # Backing_Thickness.draw_prompt(row)\n \n \n if Add_Top:\n row = box.row()\n Add_Top.draw_prompt(row) \n\n if Remove_Left_Side:\n row = box.row()\n Remove_Left_Side.draw_prompt(row) \n \n if Remove_Right_Side:\n row = box.row()\n Remove_Right_Side.draw_prompt(row) \n \n \n # row = box.row()\n # Hide_Toe_Kick.draw_prompt(row)\n \n row = box.row()\n Door.draw_prompt(row)\n if Door.value():\n if Angled_Shelves and Door_Type:\n if Angled_Shelves.value() == False:\n row = box.row()\n row.prop(self,'Door_Type',text=\"Door Type\")\n #Door_Type.draw_prompt(row)\n row = box.row()\n #Pull_Location.draw_prompt(row)\n row.prop(self,'Pull_Location',text=\"Pull Location\")\n\n row = box.row()\n #Door_Pull_Height.draw_prompt(row)\n row.prop(self,'Pull_Type',text=\"Pull Type\")\n row = box.row()\n if self.Pull_Type == 'Base':\n Base_Pull_Location.draw_prompt(row)\n elif self.Pull_Type == 'Tall':\n Tall_Pull_Location.draw_prompt(row)\n else:\n Upper_Pull_Location.draw_prompt(row)\n\n if Open_Door: \n row = box.row()\n Open_Door.draw_prompt(row)\n\n if Angled_Shelves:\n if Angled_Shelves.value():\n row = box.row()\n Use_Left_Swing.draw_prompt(row)\n row = box.row() \n Force_Double_Doors.draw_prompt(row)", "def draw(self, context):\n Left_Depth = self.product.get_prompt(\"Left Depth\")\n Right_Depth = self.product.get_prompt(\"Right Depth\")\n Shelf_Quantity = self.product.get_prompt(\"Shelf Quantity\")\n Add_Backing = self.product.get_prompt(\"Add Backing\")\n Backing_Thickness = self.product.get_prompt(\"Backing Thickness\")\n Add_Top = self.product.get_prompt(\"Add Top KD\")\n Remove_Left_Side = self.product.get_prompt(\"Remove Left Side\")\n Remove_Right_Side = self.product.get_prompt(\"Remove Right Side\")\n Hide_Toe_Kick = self.product.get_prompt(\"Hide Toe Kick\")\n Door = self.product.get_prompt(\"Door\")\n Use_Left_Swing = self.product.get_prompt(\"Use Left Swing\")\n Pull_Location = self.product.get_prompt(\"Pull Location\")\n Force_Double_Doors = self.product.get_prompt(\"Force Double Doors\")\n Door_Pull_Height = self.product.get_prompt(\"Door Pull Height\")\n Door_Type = self.product.get_prompt(\"Door Type\")\n Angled_Shelves = self.product.get_prompt(\"Angled Shelves\")\n Open_Door = self.product.get_prompt(\"Open Door\")\n Base_Pull_Location = self.product.get_prompt(\"Base Pull Location\")\n Tall_Pull_Location = self.product.get_prompt(\"Tall Pull Location\")\n Upper_Pull_Location = self.product.get_prompt(\"Upper Pull Location\")\n \n layout = self.layout\n self.draw_product_size(layout) \n \n if Left_Depth:\n box = layout.box()\n row = box.row()\n Left_Depth.draw_prompt(row)\n \n if Right_Depth:\n Right_Depth.draw_prompt(row)\n \n if Shelf_Quantity:\n col = box.column(align=True)\n row = col.row()\n row.label(\"Qty:\")\n row.prop(self,\"shelf_quantity\",expand=True) \n col.separator()\n \n if Add_Backing:\n row = box.row()\n Add_Backing.draw_prompt(row)\n\n #if Backing_Thickness:\n # if Add_Backing.value() == True:\n # row = box.row()\n # Backing_Thickness.draw_prompt(row)\n \n \n if Add_Top:\n row = box.row()\n Add_Top.draw_prompt(row) \n\n if Remove_Left_Side:\n row = box.row()\n Remove_Left_Side.draw_prompt(row) \n \n if Remove_Right_Side:\n row = box.row()\n Remove_Right_Side.draw_prompt(row) \n \n \n # row = box.row()\n # Hide_Toe_Kick.draw_prompt(row)\n \n row = box.row()\n Door.draw_prompt(row)\n if Door.value():\n if Angled_Shelves and Door_Type:\n if Angled_Shelves.value() == False:\n row = box.row()\n row.prop(self,'Door_Type',text=\"Door Type\")\n #Door_Type.draw_prompt(row)\n row = box.row()\n #Pull_Location.draw_prompt(row)\n row.prop(self,'Pull_Location',text=\"Pull Location\")\n\n row = box.row()\n #Door_Pull_Height.draw_prompt(row)\n row.prop(self,'Pull_Type',text=\"Pull Type\")\n row = box.row()\n if self.Pull_Type == 'Base':\n Base_Pull_Location.draw_prompt(row)\n elif self.Pull_Type == 'Tall':\n Tall_Pull_Location.draw_prompt(row)\n else:\n Upper_Pull_Location.draw_prompt(row)\n\n if Open_Door: \n row = box.row()\n Open_Door.draw_prompt(row)\n\n if Angled_Shelves:\n if Angled_Shelves.value():\n row = box.row()\n Use_Left_Swing.draw_prompt(row)\n row = box.row() \n Force_Double_Doors.draw_prompt(row)", "def _onRemove(self, event):\n index = self.colorlist.GetSelection()\n del self.graphColors[index]\n self._tupleListToStrings()\n if len(self.graphColors) > 0:\n self.colorlist.SetSelection(0)\n self._updateButtons(None)", "def draw_push_button(self, text, event_name, num_items = 1, item = 0):\n width = self.XCOLUMNSKIP//num_items\n self.guiElements[event_name] = Draw.PushButton(\n text,\n self.event_id(event_name),\n self.xPos + item*width, self.yPos, width, self.YLINESKIP)\n if item + 1 == num_items:\n self.yPos -= self.YLINESKIP", "def test_remove_item_test_remove_single_item():\n sc.menu = sc.default_menu\n # sc.current.add_item('Coffee', 1)\n sc.current.remove_item('Coffee', 1)\n assert sc.current.receipt == {'Coffee': 4, 'Tea': 1, 'subtotal': 6.36}", "def delete_button(self):\n self.pop_up_del = Toplevel(master)\n self.pop_up_del.geometry(\"500x50\")\n\n self.del_label = Label(self.pop_up_del, text=\"Are you sure you want to delete this item?\", font=\"roboto 12\")\n self.del_label.place(relx=0.5, rely=0.01, anchor=\"n\")\n\n self.del_button = Button(self.pop_up_del, text=\"DELETE\", command=self.delete_item)\n self.del_button.place(relx=0.4, rely=0.5, anchor=\"n\")\n\n self.keep_button = Button(self.pop_up_del, text=\"CANCEL\", command=self.close_1)\n self.keep_button.place(relx=0.6, rely=0.5, anchor=\"n\")", "def _removeClicked(self):\n isosurface = self.isosurface()\n if isosurface is not None:\n volume = isosurface.parent()\n if volume is not None:\n volume.removeIsosurface(isosurface)", "def add_del_from_pl_btn(self):\n self.delete_pl = QPushButton(\"Delete from playlist\")\n self.delete_pl.clicked.connect(self.delete_pl_btn_push)\n self.hbtnbox.addWidget(self.delete_pl)", "def test_remove_item_test_proper_input():\n sc.menu = sc.default_menu\n sc.current.add_item('Coffee', 1)\n assert sc.current.receipt == {'Coffee': 5, 'Tea': 1, 'subtotal': 7.95}", "def get_object_id(self, pos):\n object_id = ScreenGUI.get_object_id(self, pos)\n\n if object_id == -1:\n # Try searching remove buttons\n if self.product_displays.collide_on_remove(pos) is not None:\n object_id = self.ids.REMOVE\n \n return object_id", "def define_button(self):\n self.separator1 = pygame.Rect(\n 0,\n SCREEN_WIDTH,\n SCREEN_WIDTH,\n BIG_LINE_WIDTH,\n )\n self.separator2 = pygame.Rect(\n 0,\n SCREEN_WIDTH + BIG_LINE_WIDTH // 2,\n SCREEN_WIDTH,\n BIG_LINE_WIDTH,\n )\n\n self.button = pygame.Rect(\n SCREEN_WIDTH // 2 - BUTTON_WIDTH // 2,\n (SCREEN_HEIGHT + SCREEN_WIDTH) // 2 - BUTTON_HEIGHT // 2,\n BUTTON_WIDTH,\n BUTTON_HEIGHT,\n )", "def test_product_remove(self):\n\n flag = \"user\"\n api = \"product.product.remove\"\n current_page = 1\n search_info = json.dumps({\n 'id': 12,\n })\n print('start------------------------>remove')\n result = self.access_api(flag = flag, api = api, current_page = current_page, product_info = search_info)", "def remove(self, *args):\n return _libsbml.ListOfCompartmentGlyphs_remove(self, *args)", "def _get_button_region(self):\n # XXXX Only rectangulars for now\n if not self._sensitive:\n return None\n if not self._insidetemporal():\n return None\n rgn = Qd.NewRgn()\n if self._shape == 'rect':\n x0, y0 = self._convert_point(self._coordinates[0:2])\n x1, y1 = self._convert_point(self._coordinates[2:4])\n box = x0, y0, x1, y1\n Qd.RectRgn(rgn, box)\n elif self._shape == 'poly':\n Qd.OpenRgn()\n xl, yl = self._convert_point(self._coordinates[-2:])\n Qd.MoveTo(xl, yl)\n for i in range(0, len(self._coordinates), 2):\n x, y = self._convert_point(self._coordinates[i:i+2])\n Qd.LineTo(x, y)\n Qd.CloseRgn(rgn)\n elif self._shape == 'circle':\n print 'Circle not supported yet'\n elif self._shape == 'ellipse':\n # Note: rx/ry are width/height, not points\n x, y, rx, ry = self._dispobj._window._convert_coordinates(self._coordinates)\n Qd.OpenRgn()\n Qd.FrameOval((x-rx, y-ry, x+rx, y+ry))\n Qd.CloseRgn(rgn)\n else:\n print 'Invalid shape type', self._shape\n return rgn" ]
[ "0.6206437", "0.60439533", "0.59111506", "0.57449245", "0.5735818", "0.5700898", "0.55577743", "0.55432266", "0.5529115", "0.55252004", "0.5517157", "0.5499525", "0.5495325", "0.54349494", "0.5414864", "0.5401785", "0.54001504", "0.54001504", "0.5365234", "0.53408545", "0.53233725", "0.52796286", "0.5275348", "0.52685815", "0.52526337", "0.52490306", "0.5230605", "0.5230159", "0.522085", "0.5199978" ]
0.7680152
0
Update the total spend value
def update_total(self): self.objects[self.ids.AMOUNT].setText("Total Spend: \xA3%.2f" % (self.owner.total_price() / 100))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_total(self):\n # the 'or 0' sets order_total as 0 instead of None,\n # preventing an error when calculating delivery_costs\n self.order_total = self.lineitems.aggregate(\n Sum('lineitem_total'))['lineitem_total__sum'] or 0\n if self.order_total < settings.FREE_DELIVERY_THRESHOLD:\n sdp = settings.STANDARD_DELIVERY_PERCENTAGE\n self.delivery_cost = self.order_total * sdp / 100\n else:\n self.delivery_cost = 0\n self.grand_total = self.order_total + self.delivery_cost\n self.save()", "def set_total(self):\n\n self.total = 0\n for item in self.items.all():\n self.total += item.price\n self.save()", "def action_update_total(self):\n for order in self:\n amount_untaxed = 0.0\n for line in order.order_line_ids:\n amount_untaxed += line.price_subtotal\n order.price_subtotal = amount_untaxed", "def update_total_price():\n tk_total_price.set('Total: {0:>6}'.format(str(total_price)))\n print(total_price)", "def inc_total(self, dif):\n if not(is_number_correct(dif)):\n raise ValueError(\"Incorrect total value!\")\n self.total += int(dif)\n self.budget_holder[datetime.datetime.now()] = self.total", "def update_amounts(self, save=True):\n self.amount_donated = self.get_amount_total(\n [StatusDefinition.SUCCESS, StatusDefinition.PENDING,\n StatusDefinition.PLEDGED])\n self.amount_needed = self.amount_asked - self.amount_donated\n\n if self.amount_needed < 0:\n # Should never be less than zero\n self.amount_needed = 0\n\n if save:\n self.save()", "def _balance_update(self):\n return_rate = self.df.loc[self.currentStep, \"return_Close\"]\n self.buy_amount += return_rate * self.buy_amount\n self.sell_amount -= return_rate * self.sell_amount", "def update_total(self):\r\n likes = 0.00\r\n\r\n if self.get_hamburger.get():\r\n likes += 1.35\r\n\r\n if self.get_cheeseburger.get():\r\n likes += 1.59\r\n\r\n if self.get_bacon.get():\r\n likes += 1.79\r\n\r\n if self.get_drink.get():\r\n likes += 0.49\r\n \r\n self.result_txt.delete(0.0, END)\r\n self.result_txt.insert(0.0, likes)", "def _update_profit(self, cost: float):\n\n if cost > 0:\n self._total_debit += cost\n else:\n self._total_credit += -(cost)", "def newTotalUpdate(self, errorMessage=None):\n if errorMessage:\n self.newTotalLabel.setText(errorMessage)\n return\n self.newTotal = self.total\n if self.percentage or self.amount:\n if self.amount and self.amount > 0:\n self.newTotal = self.newTotal - self.amount\n\n if self.percentage and self.percentage > 0:\n self.newTotal = self.newTotal - (self.newTotal *\n (self.percentage / 100))\n elif self.code != \"\":\n pass\n else:\n self.newTotal = self.total\n\n self.newTotal = round(self.newTotal, 2)\n\n self.newTotalLabel.setText(\"$\" + str(self.newTotal))", "def update_totals(self, commit=False):\n sub_total = 0.0\n tax = 0.0\n for item in self.invoice_items:\n sub_total += float(item.total if item.total else 0)\n tax += float(item.tax if item.tax else 0)\n\n self.tax = tax\n self.sub_total = sub_total\n self.total = float(self.tax) + float(self.sub_total)\n\n if commit:\n db.session.add(self)\n db.session.commit()\n return True", "def _compute_amount(self):\n for line in self:\n line.update({\n 'price_subtotal': line.price_unit * line.quantity,\n })", "def add_amount_spent(self, amount_spent: float) -> None:\n self.current_balance -= amount_spent", "def save(self, *args, **kwargs):\n self.total = self.quantity * self.price\n super(DeliveryDetail, self).save(*args, **kwargs)", "def deposit(self, amount, budget):\r\n if budget != \"Total Balance\":\r\n assert budget in self.budgets, \"Specified budget doesn't exist\"\r\n self.budgets[budget] += float(amount)\r\n self.balance += float(amount)", "def compute_total_paid(self):\n total = 0.0\n for line in self.loan_ids:\n if line.pay:\n total += line.amount\n self.total_paid = total", "def withdrawMoney(self, withdraw_amount):\r\n self.balance_amt = self.balance_amt - withdraw_amount", "def total(self, desired_period: int = 12):\n self._trigger_gather()\n result = Decimal(0)\n for item in self.elements:\n result += item.income.amount(desired_period)\n return(Decimal(result))", "def total(self, desired_period: int = 12):\n self._trigger_gather()\n result = Decimal(0)\n for item in self.elements:\n result += item.income.amount(desired_period)\n return(Decimal(result))", "def set_total(self, valeurs):\r\n \r\n self._total = valeurs", "def pay(self, amt: float):\n self._money += amt", "def total(self, total):\n\n self._total = total", "def total(self, total):\n\n self._total = total", "def total(self, total):\n\n self._total = total", "def total(self, total):\n\n self._total = total", "def total(self, total):\n\n self._total = total", "def total(self, total):\n\n self._total = total", "def total(self, total):\n\n self._total = total", "def total(self, total):\n\n self._total = total", "def _onchange_price_subtotal(self):\n self.price = self.price_subtotal" ]
[ "0.72733474", "0.7256667", "0.70726025", "0.70486337", "0.70019245", "0.682015", "0.67827046", "0.67650783", "0.6759054", "0.6743769", "0.66739947", "0.6569658", "0.65474904", "0.64809436", "0.646712", "0.64240855", "0.6417558", "0.6404048", "0.6404048", "0.63580096", "0.63532424", "0.6346972", "0.6346972", "0.6346972", "0.6346972", "0.6346972", "0.6346972", "0.6346972", "0.6346972", "0.631742" ]
0.81530696
0
The width of the remove button depends on whether the up/dn buttons are displayed
def _remove_button_width(self): removew = self.width - self.layout.product_entries.remove_x - Widths.BORDER if self._test_display_down_button() or self._test_display_up_button(): removew -= (self.layout.scroll.width + Widths.BORDER) return removew
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def enable_remove(self):\n n_t = self.tree_ctrl.GetCount()\n n_t_t = self.tree_ctrl_theory.GetCount()\n if n_t + n_t_t <= 0:\n self.bt_remove.Disable()\n else:\n self.bt_remove.Enable()", "def on_width(self, instance_toolbar, width: float) -> None:\n\n if self.mode == \"center\":\n self.action_button.x = width / 2 - self.action_button.width / 2\n else:\n self.action_button.x = width - self.action_button.width * 2\n\n # The user reduces the width of the window.\n if (\n self.get_window_width_resizing_direction() == \"left\"\n and self.use_overflow\n and self.ids.label_title.is_shortened\n ):\n if not self.overflow_action_button_is_added():\n self.add_overflow_button()\n self.add_action_button_to_overflow()\n # The user increases the width of the window.\n if (\n self.get_window_width_resizing_direction() == \"right\"\n and self.use_overflow\n and not self.ids.label_title.is_shortened\n and self.overflow_cls.items\n ):\n self.return_action_button_to_toolbar()", "def _adjust_widgets(self):\n\t\teditEnabled = self.directoryList.get_sensitive()\n\t\tselection = self.directoryList.get_selection()\n\t\tmodel, selected = selection.get_selected()\n\t\tnRows = self.directoryModel.get_row_count()\n\t\thasSelection = selected is not None\n\t\tcanMoveUp = False\n\t\tcanMoveDown = False\n\t\tif hasSelection:\n\t\t\tpath = model.get_path(selected)\n\t\t\tiRow = path[0]\n\t\t\tcanMoveUp = iRow > 0\n\t\t\tcanMoveDown = iRow < (nRows -1)\n\t\tself.buttonNew.set_sensitive(editEnabled)\n\t\tself.buttonEdit.set_sensitive(hasSelection and editEnabled)\n\t\tself.buttonUp.set_sensitive(canMoveUp and editEnabled)\n\t\tself.buttonDown.set_sensitive(canMoveDown and editEnabled)\n\t\tself.buttonRemove.set_sensitive(hasSelection and editEnabled)\n\n\t\tself.logView.set_sensitive(not editEnabled)\n\t\tself.buttonImport.set_sensitive(bool(nRows))\n\t\tif self.buttonImport.get_active() and not bool(nRows):\n\t\t\tself.buttonImport.set_active(False)", "def add_del_from_pl_btn(self):\n self.delete_pl = QPushButton(\"Delete from playlist\")\n self.delete_pl.clicked.connect(self.delete_pl_btn_push)\n self.hbtnbox.addWidget(self.delete_pl)", "def makeButtons(self):\n self.but_run = QtWidgets.QPushButton('Run') \n self.but_status = QtWidgets.QPushButton('Status') \n self.but_brow = QtWidgets.QPushButton('View') \n self.but_remove = QtWidgets.QPushButton('Remove files') \n\n self.hboxB = QtWidgets.QHBoxLayout()\n self.hboxB.addWidget(self.but_run)\n self.hboxB.addWidget(self.but_status)\n self.hboxB.addWidget(self.but_brow)\n self.hboxB.addStretch(1) \n self.hboxB.addWidget(self.but_remove)\n\n self.but_run.clicked.connect(self.onRun)\n self.but_status.clicked.connect(self.onStatus)\n self.but_brow.clicked.connect(self.onBrow)\n self.but_remove.clicked.connect(self.onRemove)", "def remove_player_button():\r\n global state\r\n if not state == \"remove\":\r\n state = \"remove\"\r\n sub_title[\"text\"] = \"REMOVE PLAYER\"\r\n sub_title.pack()\r\n\r\n clear_frames()\r\n\r\n \"\"\"Creating a new frame for showing the widgets\"\"\"\r\n frame = Frame(window)\r\n frame[\"padx\"] = 30\r\n frame[\"pady\"] = 30\r\n createdFrames.append(frame)\r\n frame.pack()\r\n\r\n \"\"\"Creating the widgets\"\"\"\r\n name_lb = Label(frame, text=\"Name of the player\")\r\n name_lb.pack()\r\n name_entry = Entry(frame)\r\n name_entry.focus()\r\n name_entry.pack()\r\n feedback_label = Label(frame)\r\n\r\n \"\"\"Creating the confirm button\"\"\"\r\n confirm_button = Button(frame, text=\"Confirm\")\r\n confirm_button[\"command\"] = partial(remove_player, name_entry, feedback_label)\r\n confirm_button.pack()", "def createButton(self, button):\n pushButton = QtGui.QPushButton(button)\n pushButton.clicked.connect(self.reclassify)\n pushButton.toggled.connect(self.acquire)\n if self.size == 0:\n pushButton.setMinimumSize(100, 25)\n pushButton.setStyleSheet('font-size:12px')\n elif self.size == 1: \n pushButton.setMinimumSize(100, 40)\n pushButton.setStyleSheet('font-size:20px')\n elif self.size == 2: \n pushButton.setMinimumSize(100, 80)\n pushButton.setStyleSheet('font-size:30px')\n self.buttons.append(pushButton)\n return pushButton", "def get_remove_rect(self, index, width):\n y_position = self.product_entries.top_y + (self.product_entries.row_h * index)\n return pygame.Rect(self.product_entries.remove_x, y_position, width, self.button_data.height)", "def _initResizeButtons(self):\n self._addBasesButton = SVGButton(\":/pathtools/add-bases\", self)\n self._addBasesButton.clicked.connect(self._addBasesClicked)\n self._addBasesButton.hide()\n self._removeBasesButton = SVGButton(\":/pathtools/remove-bases\", self)\n self._removeBasesButton.clicked.connect(self._removeBasesClicked)\n self._removeBasesButton.hide()", "def _extra_width(self) -> int:\n width = 0\n if self.box and self.show_edge:\n width += 2\n if self.box:\n width += len(self.columns) - 1\n return width", "def resizeToView(self): # The Tool Options widget is a nightmare to resize :)\r\n view = self.activeView()\r\n\r\n if view and self.widget.isVisible():\r\n # We start with the tool options sizeHint as a goal size and then\r\n # shrink it down if necessary to fit inside the view.\r\n containerSize = self.widget.widget().sizeHint()\r\n\r\n # I don't like all these magic numbers (And repeteition) but I honestly don't know what they\r\n # correspond to either. Margins, I suppose, but then why is one of the numbers 14\r\n # when the margins are all 4?\r\n\r\n if view.height() < containerSize.height() + self.btnHide.height() + 14:\r\n containerSize.setHeight(view.height() - self.btnHide.height() - 14)\r\n\r\n if view.width() < containerSize.width() + 8:\r\n containerSize.setWidth(view.width() - 8)\r\n \r\n self.container.setFixedSize(containerSize)\r\n\r\n # Once the tool options container is an appropriate size, resize the\r\n # Pad widget to it's appropriate sizes\r\n padSize = self.sizeHint()\r\n if view.height() < padSize.height():\r\n padSize.setHeight(view.height())\r\n\r\n if view.width() < padSize.width():\r\n padSize.setWidth(view.width())\r\n \r\n self.setFixedSize(padSize)\r\n\r\n elif not self.widget.isVisible():\r\n # Resize the widget to the size of the button + some extra height for the hidden widget I guess?\r\n\r\n # I just don't know what these numbers are, or why I can't use the \r\n # button's own sizeHint. The result also varies if something else\r\n # about the layout varies.\r\n self.setFixedSize(23, 54)", "def clearButtons(self):\n for ch in self.cboxes:\n ch.hide()\n for tbx in self.tboxes:\n tbx.hide()\n for btnum in reversed(range(self.flowLayout.layout.count())):\n item = self.flowLayout.layout.itemAt(btnum)\n if item is not None:\n self.flowLayout.layout.removeItem(item)\n r, c = self.flowLayout.items[item.widget()]\n del self.flowLayout.items[item.widget()]\n del self.flowLayout.rows[r][c]\n item.widget().hide()\n self.flowLayout.update()", "def rightButtonUp(self):\n\t\tautopy.mouse.toggle(False,autopy.mouse.RIGHT_BUTTON)", "def AdjustFontSize(self):\r\n self.sh.Columns(\"A\").Delete()", "def delete_button(self):\n self.pop_up_del = Toplevel(master)\n self.pop_up_del.geometry(\"500x50\")\n\n self.del_label = Label(self.pop_up_del, text=\"Are you sure you want to delete this item?\", font=\"roboto 12\")\n self.del_label.place(relx=0.5, rely=0.01, anchor=\"n\")\n\n self.del_button = Button(self.pop_up_del, text=\"DELETE\", command=self.delete_item)\n self.del_button.place(relx=0.4, rely=0.5, anchor=\"n\")\n\n self.keep_button = Button(self.pop_up_del, text=\"CANCEL\", command=self.close_1)\n self.keep_button.place(relx=0.6, rely=0.5, anchor=\"n\")", "def _test_display_up_button(self):\n return (self.product_displays.top_index > 0)", "def keyPressEvent(self, event):\n if (event.key() == QtCore.Qt.Key_Backspace or event.key() == QtCore.Qt.Key_Delete):\n self.deleteBtns()", "def addToolBarButtons(self):", "def CountButtons(self):\r\n\r\n n = 0\r\n \r\n if self.HasCaption() or self.HasCaptionLeft():\r\n if isinstance(wx.GetTopLevelParent(self.window), AuiFloatingFrame):\r\n return 1\r\n \r\n if self.HasCloseButton():\r\n n += 1\r\n if self.HasMaximizeButton():\r\n n += 1\r\n if self.HasMinimizeButton():\r\n n += 1\r\n if self.HasPinButton():\r\n n += 1\r\n\r\n return n", "def remove_button(self):\n self.scene.remove_child(self.toggle_button_el)", "def width(self) -> int:\n self.tk_ref.update()\n return self.tk_ref.winfo_width()", "def leftButtonUp(self):\n\t\tautopy.mouse.toggle(False,autopy.mouse.LEFT_BUTTON)", "def min_width(self):\n ...", "def update_buttons(self):\n # Enable the Add/Remove/Up/Down measurements buttons if a Survey is loaded\n enable = self.mgr.obj is not None\n self.addButton.setEnabled(enable)\n self.removeButton.setEnabled(enable)\n self.upButton.setEnabled(enable)\n self.downButton.setEnabled(enable)\n \n # Enable the Add/Remove condition buttons if a Measurement is selected\n #enable = len(list(self.mgr.obj.measurements)) > 0\n enable = self.measurementTableWidget.rowCount() > 0\n self.addConditionButton.setEnabled(enable)\n self.removeConditionButton.setEnabled(enable)", "def OnSize(self, event):\r\n \r\n x, y = self.GetClientSize()\r\n realize = False\r\n\r\n if x > y:\r\n self.SetOrientation(wx.HORIZONTAL)\r\n else:\r\n self.SetOrientation(wx.VERTICAL)\r\n\r\n if (x >= y and self._absolute_min_size.x > x) or (y > x and self._absolute_min_size.y > y):\r\n \r\n # hide all flexible items\r\n for item in self._items:\r\n if item.sizer_item and item.proportion > 0 and item.sizer_item.IsShown():\r\n item.sizer_item.Show(False)\r\n item.sizer_item.SetProportion(0)\r\n\r\n if self._originalStyle & AUI_TB_OVERFLOW:\r\n if not self.GetOverflowVisible():\r\n self.SetOverflowVisible(True)\r\n realize = True\r\n \r\n else:\r\n\r\n if self._originalStyle & AUI_TB_OVERFLOW and not self._custom_overflow_append and \\\r\n not self._custom_overflow_prepend:\r\n if self.GetOverflowVisible():\r\n self.SetOverflowVisible(False)\r\n realize = True\r\n\r\n # show all flexible items\r\n for item in self._items:\r\n if item.sizer_item and item.proportion > 0 and not item.sizer_item.IsShown():\r\n item.sizer_item.Show(True)\r\n item.sizer_item.SetProportion(item.proportion)\r\n \r\n self._sizer.SetDimension(0, 0, x, y)\r\n\r\n if realize:\r\n self.Realize()\r\n else:\r\n self.Refresh(False)\r\n \r\n self.Update()", "def remove_overflow_button(self) -> None:\n\n if self.overflow_action_button_is_added():\n action_overflow_button = self.ids.right_actions.children[0]\n self.ids.right_actions.remove_widget(action_overflow_button)\n self._overflow_menu_items = []", "def misclist_resize(self, event):\n\n if not self.MiscList.GetColumnCount():\n return\n\n column_width = self.MiscList.GetClientSize()[0] - 4\n self.MiscList.SetColumnWidth(0, 200)\n self.MiscList.SetColumnWidth(1, column_width - 200)", "def add_button(self):\n if len(self.datamodels) == 0:#In this case the button is deleted completely as there is no reference to it\n self.addDataDisplays = QtGui.QPushButton(self) #Draw (+) button to add data displays\n self.addDataDisplays.setText(\"+\")\n self.addDataDisplays.clicked.connect(self.add_data_display)\n self.verticalLayout.removeWidget(self.addDataDisplays)\n self.verticalLayout.addWidget(self.addDataDisplays)", "def tab_only_rect(self):\n rect = self.rect()\n lB, rB = [c for c in self.children() if isinstance(c, QtWidgets.QToolButton)]\n side_button_width = lB.width()+rB.width()+15\n rect.adjust(0,0, -side_button_width, 0)\n return rect", "def test_delete_custom_button(self):\n pass" ]
[ "0.5788891", "0.57431185", "0.56472933", "0.56221867", "0.55743104", "0.5545128", "0.5536082", "0.549052", "0.54834133", "0.53839856", "0.5378169", "0.53288573", "0.5291649", "0.5290926", "0.5288311", "0.52758807", "0.5266669", "0.5258391", "0.5257666", "0.5237212", "0.52364486", "0.52286154", "0.5213549", "0.52102154", "0.51907223", "0.51856226", "0.518278", "0.5149566", "0.51344657", "0.5122753" ]
0.83062744
0
Returns TRUE if the up button should be displayed
def _test_display_up_button(self): return (self.product_displays.top_index > 0)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def isUp ( self ) :\n return not self.isDown()", "def move_up(event: EventType, widget: WidgetType) -> bool:\n return event.key == KEY_MOVE_UP", "def is_pressed(self) -> bool:\n return True", "def rightButtonUp(self):\n\t\tautopy.mouse.toggle(False,autopy.mouse.RIGHT_BUTTON)", "def check_up(prev_button, cur_button):\r\n\r\n if button_lst[cur_button][1] == button_lst[prev_button][1] and \\\r\n button_lst[cur_button][0] == button_lst[prev_button][0] - 1:\r\n return True\r\n return False", "def is_pressed(self) -> bool:", "def leftButtonUp(self):\n\t\tautopy.mouse.toggle(False,autopy.mouse.LEFT_BUTTON)", "def was_pressed(self) -> bool:\n return True", "def was_pressed(self) -> bool:", "def GetToggle(self):\n\n return not self.up", "def is_up(self):\n \n return self.is_level('up')", "def pressed(self) -> bool:\n return self.type == \"JOYBUTTONDOWN\"", "def go(self):\n return 'go' in self.buttons_pressed", "def handle_mouseup(self, button, name):\r\n x = widget.Widget.handle_mouseup(self, button, name)\r\n if not self.mouse_on_me():\r\n return False\r\n if not self.get_visible():\r\n return False\r\n for i in self.widgets:\r\n if i.get_visible():\r\n if i.handle_mouseup(button, name):\r\n return True\r\n return x", "def _test_display_down_button(self):\n return (self.product_displays.top_index + self.limits.screen_products) < len(self.product_displays)", "def ev_mousebuttonup(self, event: MouseButtonUp) -> None:", "def is_button_pressed():\n return btn_pin.value() == 0", "def isButtonReleased() -> bool:\n pass", "def is_down(self):\n return True", "def is_down(self):\n return True", "def get_pressed(self):\n\n self.update()\n\n if self.pressed:\n self.pressed = False\n return True\n\n return False", "def handle_mouseup(self, button, name):\r\n if self.get_visible():\r\n for i in self.widgets:\r\n if i.get_visible():\r\n if i.handle_mouseup(button, name):\r\n return True\r\n return False", "def isButtonPressed() -> bool:\n pass", "def is_up(self):\n return True", "def is_up(self):\n return True", "def joy_up(event: EventType, widget: WidgetType) -> bool:\n return event.value == JOY_UP", "def up(self, *args):\n self.cur_win().up()", "def button_a(self) -> bool:\n return bool(self.pressed & 0x2)", "def ispressed(self,key):\n \n if (self.buttons & b0uttondict.get(key,0)): return True\n else: return False", "def button_b(self) -> bool:\n return bool(self.pressed & 0x4)" ]
[ "0.7570521", "0.72280705", "0.7115068", "0.71126646", "0.7043401", "0.70227927", "0.700404", "0.6908078", "0.6845273", "0.6813771", "0.6766018", "0.67114484", "0.6708478", "0.6696725", "0.66916925", "0.66751057", "0.66570747", "0.66532713", "0.66235787", "0.66235787", "0.65951204", "0.6579123", "0.6561839", "0.6507606", "0.6507606", "0.64899546", "0.647934", "0.6474064", "0.64399046", "0.6436949" ]
0.7942494
0
Reset the products list to nothing
def clear_products(self): self.product_displays.clear()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def empty(self):\n self.items = []\n self.totalWeight = 0", "def reset(self):\n self.elements = [0] * len(self)", "def reset(self):\n self._data = []", "def clear(self):\n self._items = []", "def clear(self):\n self.__list = []", "def __init__(self): \n self.products_list = []", "def reset(self):\n # type: () -> None\n self._all_scalar_oids = []\n self._use_scalar_oids_cache = False", "def reset(self, ):\n self.clear()\n self.create_all_menus()", "def reset(self):\n self.ship_list = self.backup_list", "def clearList(self):\r\n self.addons.clear()", "def _reset(self) -> None:\n self.images = []\n self.activations = []\n self.labels = []\n self.preds = []\n self.n_found = 0", "def reset(self) -> None:", "def reset(self) -> None:", "def reset(self) -> None:", "def reset(cls):\n cls.POS = 0\n cls.ITEMS = MOCKED_SEARCH_ITEMS.copy()", "def Reset(self):\n self._results = []", "def reset(self):", "def reset(self):", "def reset(self):", "def reset(self):", "def clearOrderList(self):\r\n\t\tself.orders = []", "def reset(self):\n self.temp_data.clear()", "def reset(self):\n self._setupObjects()", "def reset(self):\n self._pkgs.clear()\n self._catalogs.clear()\n self._categories.clear()\n self._command_to_category.clear()\n self._version = None", "def reset(self):\n pass", "def reset(self):\n pass", "def reset(self):\n pass", "def reset(self):\n pass", "def reset(self):\n pass", "def reset(self):\n pass" ]
[ "0.6782221", "0.67685705", "0.6734079", "0.6694723", "0.6681738", "0.66100025", "0.66018546", "0.65946543", "0.65911126", "0.65647316", "0.6526661", "0.6521588", "0.6521588", "0.6521588", "0.65129584", "0.6501035", "0.64841735", "0.64841735", "0.64841735", "0.64841735", "0.648406", "0.64793926", "0.64613837", "0.645524", "0.64478195", "0.64478195", "0.64478195", "0.64478195", "0.64478195", "0.64478195" ]
0.82027036
0
Scan down the product list and set the visible state of the product objects
def _set_show_hide_products(self): visible_count = 0 for (counter, product) in enumerate(self.product_displays): if (counter < self.product_displays.top_index): # Hide all the products above the list product top product.set_visible(False) elif visible_count < self.limits.screen_products: # Show screen products based on their quantity product.visible = True visible_count += 1 else: # Hide products below list bottom product.set_visible(False)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _draw_products(self, window):\n \n # Iterate over all products in list\n index = 0\n for product in self.product_displays:\n if product.visible:\n product.draw(self.layout, index, self._remove_button_width(), window)\n index += 1", "def get_visible_products(self):\r\n all_products = billing.loading.get_products(hidden=True)\r\n public_products = billing.loading.get_products()\r\n subscribed_product_types = ProductType.objects \\\r\n .filter(subscriptions__billing_account=self) \\\r\n .distinct()\r\n subscribed_products = set(pt.get_product_class() for pt in subscribed_product_types)\r\n visible_products = set(public_products).union(subscribed_products)\r\n return [p for p in all_products if p in visible_products]", "def openproducts(self):\n\n print \"Open products\"\n self.combo_product_list.setEnabled(True)\n frame=self.combo_area_list.currentText()\n self.combo_product_list.clear()\n self.combo_dataset_list.clear()\n self.combo_variable_list.clear()\n print str(frame)\n list_glo=[]\n if str(frame) == \"GLOBAL\":\n for key in self.dict_prod.keys():\n if str(frame) in key :\n list_glo.append(str(key))\n ind=0\n #print \"Frame %s \" %(frame)\n for key in self.dict_prod.keys():\n if str(frame) == \"BAL\":\n frame1=\"_BAL_\"\n frame2=\"-BAL-\"\n if frame1 in key or frame2 in key :\n self.combo_product_list.addItem(str(key))\n elif str(frame) == \"NWS\":\n frame1=\"NORTHWESTSHELF_\"\n frame2=\"NWS\"\n if frame1 in key or frame2 in key :\n self.combo_product_list.addItem(str(key))\n elif str(frame) == \"GLOBAL\":\n if str(frame) in key :\n if ind == 0 :\n self.combo_product_list.addItem(list_glo[5])\n elif ind == 5 : \n self.combo_product_list.addItem(list_glo[0])\n else : \n self.combo_product_list.addItem(list_glo[ind])\n ind+=1\n else :\n if str(frame) in key :\n self.combo_product_list.addItem(str(key))\n self.combo_dataset_list.setEnabled(True)", "def update_visibility(self, state):\n # The problem is that the following loop triggers __on_item_changed() which would cause the\n # data container to update its visibility in each iteration. It is better to do this once at the\n # end of this function. That's why the following two lines:\n update_data_container_visibility = self.__update_data_container_visibility # save the current state\n self.__update_data_container_visibility = False\n \n # Update all QList items but not the data container\n for item in self.__ordered_items:\n if item.is_hidden:\n continue\n \n if state == 1: item.set_checked()\n elif state == 0: item.set_unchecked()\n elif state == -1: item.toggle_check_state()\n\n # Now, update the data container visibility\n self.__update_data_container_visibility = update_data_container_visibility\n self.__data_container.update_visibility()", "def updateControl(self, event):\r\n print(\"updating...\")\r\n product_dict = [\r\n {\"title\":\"Core Python Programming\", \"author\":\"Wesley Chun\",\r\n \"isbn\":\"0132269937\", \"mfg\":\"Prentice Hall\"},\r\n {\"title\":\"Python Programming for the Absolute Beginner\",\r\n \"author\":\"Michael Dawson\", \"isbn\":\"1598631128\",\r\n \"mfg\":\"Course Technology\"},\r\n {\"title\":\"Learning Python\", \"author\":\"Mark Lutz\",\r\n \"isbn\":\"0596513984\", \"mfg\":\"O'Reilly\"}\r\n ]\r\n data = self.products + product_dict\r\n self.dataOlv.SetObjects(data)", "def products(self, products):\n\n self._products = products", "def products(self, products):\n\n self._products = products", "def toggle_active(self):\n res = super().toggle_active()\n Product = self.env['lunch.product'].with_context(active_test=False)\n all_products = Product.search([('supplier_id', 'in', self.ids)])\n all_products._sync_active_from_related()\n return res", "def set_visible(self, visible):\n self._visible = visible\n for artist in self.artists:\n artist.set_visible(visible)", "def display_product(self, list_products):\n for element in list_products[self.first_number:self.first_number +\n config.NUMBER_PRODUCT_DISPLAY]:\n print(str(list_products.index(element) + 1)\n + \" - \" + element[1] + \" - \" + element[4].upper()\n + \" - \" + element[2] + \" - \" + element[3])", "def set_visible(self, value):\n for artist in self.artists:\n artist.set_visible(value)", "def get_product_list_grid(self):\n product_list = WebDriverWait(self.driver, self.search_module_wait_time).until(EC.visibility_of_element_located(self.PRODUCT_LIST_GRID))\n return product_list", "def set_product(self, product):\n self.single_selection_from_static_kendo_dropdown(self.product_kendo_dropdown_locator, product)", "def emulate_off_api_manager_products(cls):\n cls.products = OFF_API_FILTERED_PRODUCTS", "def show_available_products(): # {{{\n products_available = {}\n try:\n with MONGO:\n product_collection = MONGO.connection.assignment_07[\"product\"].find(\n )\n\n for product in product_collection:\n if int(product[\"quantity_available\"]) > 0:\n products_available[product[\"product_id\"]] = {\n \"description\": product[\"description\"],\n \"product_type\": product[\"product_type\"],\n \"quantity_available\": product[\"quantity_available\"],\n }\n except TypeError as excep:\n LOGGER.warning(\"Error looking up available products\")\n LOGGER.warning(excep)\n else:\n if not products_available:\n LOGGER.info('No products found')\n else:\n LOGGER.info(\"Available products retrieved successfully.\")\n return products_available # }}}", "def search_product(self):\n cat = []\n product = open_products()\n radio = self.radiobutton_check()\n search = self.lineEdit_search.text()\n _translate = QtCore.QCoreApplication.translate\n __sortingEnabled = self.tableWidget.isSortingEnabled()\n self.tableWidget.setSortingEnabled(False)\n o=0\n if len(self.lineEdit_search.text()) == 0:\n self.show_product()\n else:\n for r in range(0, len(product)):\n if search.upper() in str(product[r][radio]).upper():\n cat.append(product[r])\n for i in range(0, len(cat)):\n for c in range(0, 5):\n item = self.tableWidget.item(i, c)\n item.setText(_translate(\"MainWindow\", str(cat[i][c])))\n o+=1\n else:\n for c in range(0, 5):\n item = self.tableWidget.item(r, c)\n item.setText(_translate(\"MainWindow\", \"\"))\n if o == 0:\n self.frame_3.show()\n self.label_16.setText('PRODUCT NOT FOUND!')", "def get_all_products(self):\n\t\tpass", "def show_available_products():\n LOGGER.debug('Listing all available products.')\n available_products = {}\n with MongoDBConnection() as mongo:\n database = mongo.connection.hp_norton\n for product in database.products.find(\n {'quantity_available': {'$gt': 0}}):\n available_products[product['product_id']] = {\n 'description': product['description'],\n 'product_type': product['product_type'],\n 'quantity_available': product['quantity_available']}\n return available_products", "def open_products(self, cursor, user, product_ids, context):\n ir_model_data = self.pool.get('ir.model.data')\n\n tree_res = ir_model_data.get_object_reference(\n cursor, user, 'product', 'product_product_tree_view'\n )\n tree_id = tree_res and tree_res[1] or False\n\n return {\n 'name': _('Products that have been exported to Magento'),\n 'view_type': 'form',\n 'view_mode': 'tree,form',\n 'res_model': 'product.product',\n 'views': [(tree_id, 'tree')],\n 'context': context,\n 'type': 'ir.actions.act_window',\n 'domain': [('id', 'in', product_ids)]\n }", "def _product_available(self, cr, uid, ids, field_names=None, arg=False, context=None):\n if not field_names:\n field_names = []\n if context is None:\n context = {}\n res = {}\n \n for product_loc in self.browse(cr, uid, ids):\n c = context.copy()\n c.update({ 'states': ('done',), 'what': ('in', 'out'), 'location': product_loc.location_id.id})\n stock = self.pool.get('product.product').get_product_available(cr, uid, [product_loc.product_id.id], context=c)\n res[product_loc.id] = stock.get(product_loc.product_id.id, 0.0)\n return res", "def show_available_products(*args):\n logger.info(f\"Preparing dict of available prodcuts...\")\n available_products = {}\n\n with MONGO:\n mdb = eval(Settings.connect_string)\n products = mdb[\"product\"]\n for doc in products.find():\n del doc[\"_id\"]\n if int(doc[\"quantity_available\"]) > 0:\n product_id = doc[\"product_id\"]\n del doc[\"product_id\"]\n available_products[product_id] = doc\n\n return available_products", "def write(self, vals):\n if 'active' in vals.keys():\n shopify_product_product_obj = self.env['shopify.product.product.ept']\n for product in self:\n shopify_product = shopify_product_product_obj.search(\n [('product_id', '=', product.id)])\n if vals.get('active'):\n shopify_product = shopify_product_product_obj.search(\n [('product_id', '=', product.id), ('active', '=', False)])\n shopify_product.write({'active': vals.get('active')})\n res = super(ProductProduct, self).write(vals)\n return res", "def visible(self, show):", "def update_product(self):\n if len(self.lineEdit_name.text()) != 0 and len(self.lineEdit_desc.text()) != 0 and len(\n self.lineEdit_cost.text()) != 0 and len(self.lineEdit_cat.text()) != 0:\n item = ['name', 'description', 'cost', 'categories']\n id = self.lineEdit_id.text()\n list = self.product_list()\n for n in range(0, len(list)):\n try:\n update_product(item[n], list[n], id)\n self.frame_3.show()\n self.label_16.setText('UPDATE PRODUCT SUCESSFULLY!')\n except:\n self.frame_3.show()\n self.label_16.setText('ERROR UPDATE PRODUCT!')\n else:\n self.frame_3.show()\n self.label_16.setText('THERE CAN BE NO BLANCK FIELDS!')", "def products(self):\r\n return self._products", "def view_products():\n min_id = (Product.select().order_by(Product.product_id.asc()).get()).product_id\n max_id = (Product.select().order_by(Product.product_id.desc()).get()).product_id\n print(f\"\\nPlease select id between {min_id} & {max_id}\")\n id = int(input(\"Select product id: \"))\n while id not in range(min_id, max_id+1):\n print(\"Your selection must be between {} and {}\".format(min_id, max_id))\n id = int(input(\"Select product id: \"))\n print(f\"\"\"\\n-Product: {Product.get_by_id(id).product_name}\n-Quantity: {Product.get_by_id(id).product_quantity}\n-Price: {Product.get_by_id(id).product_price} cents\n-Date updated: {Product.get_by_id(id).date_updated}\\n\"\"\")\n input(\"\\nPress ENTER to continue\")\n clear()", "def __iter__(self):\n return self._products.__iter__()", "def __init__(self): \n self.products_list = []", "def test_products(self, flag_is_active):\n flag_is_active.return_value = True\n\n # Create some products\n for i in range(3):\n product(save=True)\n\n # GET the home page and verify the content\n r = self.client.get(reverse('products'), follow=True)\n eq_(200, r.status_code)\n doc = pq(r.content)\n eq_(3, len(doc('#products-and-services li')))", "def __iter__(self):\n product_ids = self.wishlist.keys()\n products = Product.objects.filter(id__in=product_ids)\n for product in products:\n self.wishlist[str(product.id)]['product'] = product\n\n for item in self.wishlist.values():\n yield item" ]
[ "0.63815486", "0.610703", "0.58968854", "0.58574975", "0.5814728", "0.5778437", "0.5778437", "0.57648104", "0.5710969", "0.5639458", "0.56277835", "0.562654", "0.55940884", "0.5580036", "0.5571813", "0.5565017", "0.5561356", "0.5540053", "0.5487899", "0.5480094", "0.5470553", "0.5468826", "0.54531276", "0.54358906", "0.54184914", "0.5414597", "0.54125154", "0.54057485", "0.5379109", "0.53748184" ]
0.7236825
0
Draw all visible product objects on the window
def _draw_products(self, window): # Iterate over all products in list index = 0 for product in self.product_displays: if product.visible: product.draw(self.layout, index, self._remove_button_width(), window) index += 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw(self, window):\n window.fill(Colours.BG)\n\n self._set_show_hide_products()\n self._draw_products(window)\n self._draw_static_objects(window)\n\n pygame.display.flip()", "def draw(self):\n for obj in self.objects:\n obj.draw()", "def draw(self): \n [component.draw() for component in self.components]", "def draw(self): \n [component.draw() for component in self.components]", "def draw(self): \n [component.draw() for component in self.components]", "def draw(self): \n [component.draw() for component in self.components]", "def draw(self): \n [component.draw() for component in self.components]", "def draw(self): \n [component.draw() for component in self.components]", "def draw(self): \n [component.draw() for component in self.components]", "def draw(self): \n [component.draw() for component in self.components]", "def draw(self):\n\n for item in self.vis:\n item.undraw()\n self.render()\n for item in self.vis:\n item.draw(self.win)\n self.drawn = True", "def drawall(self):\r\n for x in self.objectlist:\r\n if x.model:\r\n x.model.draw()", "def visualise(self, obj):\n self.clear()\n self.draw(obj)\n self.show()", "def draw_all_objects():\n\tglobal fuel_available\n\n\tbackground_module.draw_bg(win)\n\tbackground_module.draw_snow(win)\n\tobstacles_module.draw_obstacles(win)\n\tcoins_module.draw_coins(win)\n\tforeground_module.draw_fg(win)\n\n\tfor spark_object in effects_module.Coin_spark_effects.coin_effects_list:\n\t\tspark_object.draw(win)\n\tfor hit_effect_object in effects_module.Hit_effects.hit_effects_list:\n\t\thit_effect_object.draw(win)\n\n\tif num_of_lives == 0:\n\t\tplayer_module.player.y += 1\n\t\tplayer_module.propeller.draw(win)\n\t\tplayer_module.player.draw(win)\n\telif won_bool:\n\t\tplayer_module.draw_player(win, True)\n\telse:\n\t\tplayer_module.draw_player(win)\n\t\t\n\tbird_module.draw_bird(win)\n\tdynamic_obstacle_giftbox.draw_gift(win)\n\tdynamic_obstacle_olaf.draw_olaf(win)\n\tdynamic_obstacle_santa.draw_santa(win)\n\tdisplay_module.display_lives(win, num_of_lives)\n\tdisplay_module.draw_minimap(win,frame_count)\n\n\tif start_fuel:\n\t\tfuel_available -= 1\n\tfuel_available = display_module.fuel_bar.draw_fuel_bar(win, fuel_available, start_fuel)\n\n\tdisplay_module.draw_fuel(win)\n\tcursor.draw(win)", "def _draw_static_objects(self, window):\n \n self.update_total()\n\n ## Draw border\n for draw_object in self.border.get_border().values():\n draw_object.draw(window)\n\n # Draw the fixed objects\n static_objs = [\n self.objects[self.ids.TOPBAR],\n self.objects[self.ids.PAY],\n self.objects[self.ids.CANCEL],\n self.objects[self.ids.DONE],\n self.objects[self.ids.AMOUNT],\n self.objects[self.ids.UP],\n self.objects[self.ids.DOWN]\n ]\n\n # Decide which objects should be shown\n if self.owner.user is not None:\n self.objects[self.ids.PAY].visible = self.owner.user.credit_allowed()\n self.objects[self.ids.DONE].visible = self.owner.user.has_added_credit() or (len(self.product_displays) > 0)\n \n self.objects[self.ids.UP].visible = self._test_display_up_button()\n self.objects[self.ids.DOWN].visible = self._test_display_down_button()\n\n for static_obj in static_objs:\n static_obj.draw(window)\n\n if self.banner is not None:\n self.banner.draw(window)", "def on_draw( self ):\r\n self.clear()\r\n self.setup_3D()\r\n print \"DEBUG:\" , \"There are\" , len( self.renderlist ) , \"items in 'self.renderlist'\"\r\n for obj in self.renderlist:\r\n obj.draw()", "def drawObjects(self, objects):\n\t\tfor o in objects:\n\t\t\tif o.visible:\n\t\t\t\tif o.rtc:\t# only horisontal center is supported currently, and only absolute values\n\t\t\t\t\thcenter = int((self.width - 1) / 2)\n\t\t\t\t\tposX = hcenter + o.x\n\t\t\t\t\tposY = o.y\n\t\t\t\telse:\n\t\t\t\t\tposX = int((o.x * self.width / 100) if type(o.x) == float else o.x)\n\t\t\t\t\tposY = int((o.y * self.height / 100) - 1 if type(o.y) == float else o.y)\n\t\t\t\t# frame\n\t\t\t\tif o.frame:\n\t\t\t\t\tfor nr, item in enumerate(o.frame):\n\t\t\t\t\t\tself.wts(posY + nr, posX + 1, item[0], item[1])\n\t\t\t\t# text\n\t\t\t\tfor nr, item in enumerate(o.content):\n\t\t\t\t\tfor coord in self.verticalBoundaries:\t# Check if a line is crossed\n\t\t\t\t\t\tif coord > posY and coord < posY + len(item[0]):\n\t\t\t\t\t\t\tif len(self.menus) == 1:\n\t\t\t\t\t\t\t\titem[0] = item[0][:coord - posY - 2] + '..'\n\t\t\t\t\ttry:\n\t\t\t\t\t\tself.wts(posY + nr + 1, posX + 2, item[0], item[1])\n\t\t\t\t\texcept:\n\t\t\t\t\t\tself.exit('Error occured in drawObjects, while drawing : OBJECT= \"' + str(o.content) + '\" ITEM= \"' + str(item)) + '\"'\n\t\treturn True", "def show(self,canvas): \n for piece in self.bluh:\n piece.render(canvas)\n\n #create vertical and horizontal bold outline\n for i in range(len(self.board)+1):\n x0=300+self.piecesize*i\n y0=100\n x1=300+self.piecesize*i\n y1=900\n canvas.create_line(x0,y0,x1,y1,width=5,fill=self.mode.color1)\n for a in range(len(self.board)+1):\n for i in range(len(self.board)+1):\n x2=300\n y2=100+self.piecesize*i\n x3=1100\n y3=100+self.piecesize*i\n canvas.create_line(x2,y2,x3,y3,width=5,fill=self.mode.color1)\n for piece in self.bluh:\n if piece.isselected==True:\n piece.dropShadow(canvas)\n piece.render(canvas)\n #print(piece.__repr__())", "def draw(self): \n pygame.event.clear()\n self.window = ocempgui.widgets.Box(GG.utils.SCREEN_SZ[0], GG.utils.SCREEN_SZ[1])\n self.paintScreen()\n self.paintAvatar()\n self.paintTags()\n self.paintCustomizeZone()\n self.paintButtons()\n self.window.zOrder = 90000\n self.window.depth = 2\n return self.window", "def draw_objects(self, view_manager):\n raise NotImplementedError(\"draw_objects can not be called directly from recoBase3D\")", "def drawAll(self):\r\n for x in range(len(self.model)):\r\n self.model[x].draw()", "def _drawFrame(self):\n\n self._clearScreen()\n \n for object in Object.Objects:\n self._drawObject(object)\n\n for entity in Entity.Entities:\n self._drawObject(entity)\n\n self._drawObject(Game.Player)", "def on_draw(self):\n # draw everything", "def draw(self, **kwargs):\n for o in sorted(self._drawables, key=default_itemgetter(\"z\", default=0)):\n o.draw(**kwargs)", "def draw(self, context):\n Left_Depth = self.product.get_prompt(\"Left Depth\")\n Right_Depth = self.product.get_prompt(\"Right Depth\")\n Shelf_Quantity = self.product.get_prompt(\"Shelf Quantity\")\n Add_Backing = self.product.get_prompt(\"Add Backing\")\n Backing_Thickness = self.product.get_prompt(\"Backing Thickness\")\n Add_Top = self.product.get_prompt(\"Add Top KD\")\n Remove_Left_Side = self.product.get_prompt(\"Remove Left Side\")\n Remove_Right_Side = self.product.get_prompt(\"Remove Right Side\")\n Hide_Toe_Kick = self.product.get_prompt(\"Hide Toe Kick\")\n Door = self.product.get_prompt(\"Door\")\n Use_Left_Swing = self.product.get_prompt(\"Use Left Swing\")\n Pull_Location = self.product.get_prompt(\"Pull Location\")\n Force_Double_Doors = self.product.get_prompt(\"Force Double Doors\")\n Door_Pull_Height = self.product.get_prompt(\"Door Pull Height\")\n Door_Type = self.product.get_prompt(\"Door Type\")\n Angled_Shelves = self.product.get_prompt(\"Angled Shelves\")\n Open_Door = self.product.get_prompt(\"Open Door\")\n Base_Pull_Location = self.product.get_prompt(\"Base Pull Location\")\n Tall_Pull_Location = self.product.get_prompt(\"Tall Pull Location\")\n Upper_Pull_Location = self.product.get_prompt(\"Upper Pull Location\")\n \n layout = self.layout\n self.draw_product_size(layout) \n \n if Left_Depth:\n box = layout.box()\n row = box.row()\n Left_Depth.draw_prompt(row)\n \n if Right_Depth:\n Right_Depth.draw_prompt(row)\n \n if Shelf_Quantity:\n col = box.column(align=True)\n row = col.row()\n row.label(\"Qty:\")\n row.prop(self,\"shelf_quantity\",expand=True) \n col.separator()\n \n if Add_Backing:\n row = box.row()\n Add_Backing.draw_prompt(row)\n\n #if Backing_Thickness:\n # if Add_Backing.value() == True:\n # row = box.row()\n # Backing_Thickness.draw_prompt(row)\n \n \n if Add_Top:\n row = box.row()\n Add_Top.draw_prompt(row) \n\n if Remove_Left_Side:\n row = box.row()\n Remove_Left_Side.draw_prompt(row) \n \n if Remove_Right_Side:\n row = box.row()\n Remove_Right_Side.draw_prompt(row) \n \n \n # row = box.row()\n # Hide_Toe_Kick.draw_prompt(row)\n \n row = box.row()\n Door.draw_prompt(row)\n if Door.value():\n if Angled_Shelves and Door_Type:\n if Angled_Shelves.value() == False:\n row = box.row()\n row.prop(self,'Door_Type',text=\"Door Type\")\n #Door_Type.draw_prompt(row)\n row = box.row()\n #Pull_Location.draw_prompt(row)\n row.prop(self,'Pull_Location',text=\"Pull Location\")\n\n row = box.row()\n #Door_Pull_Height.draw_prompt(row)\n row.prop(self,'Pull_Type',text=\"Pull Type\")\n row = box.row()\n if self.Pull_Type == 'Base':\n Base_Pull_Location.draw_prompt(row)\n elif self.Pull_Type == 'Tall':\n Tall_Pull_Location.draw_prompt(row)\n else:\n Upper_Pull_Location.draw_prompt(row)\n\n if Open_Door: \n row = box.row()\n Open_Door.draw_prompt(row)\n\n if Angled_Shelves:\n if Angled_Shelves.value():\n row = box.row()\n Use_Left_Swing.draw_prompt(row)\n row = box.row() \n Force_Double_Doors.draw_prompt(row)", "def draw(self, context):\n Left_Depth = self.product.get_prompt(\"Left Depth\")\n Right_Depth = self.product.get_prompt(\"Right Depth\")\n Shelf_Quantity = self.product.get_prompt(\"Shelf Quantity\")\n Add_Backing = self.product.get_prompt(\"Add Backing\")\n Backing_Thickness = self.product.get_prompt(\"Backing Thickness\")\n Add_Top = self.product.get_prompt(\"Add Top KD\")\n Remove_Left_Side = self.product.get_prompt(\"Remove Left Side\")\n Remove_Right_Side = self.product.get_prompt(\"Remove Right Side\")\n Hide_Toe_Kick = self.product.get_prompt(\"Hide Toe Kick\")\n Door = self.product.get_prompt(\"Door\")\n Use_Left_Swing = self.product.get_prompt(\"Use Left Swing\")\n Pull_Location = self.product.get_prompt(\"Pull Location\")\n Force_Double_Doors = self.product.get_prompt(\"Force Double Doors\")\n Door_Pull_Height = self.product.get_prompt(\"Door Pull Height\")\n Door_Type = self.product.get_prompt(\"Door Type\")\n Angled_Shelves = self.product.get_prompt(\"Angled Shelves\")\n Open_Door = self.product.get_prompt(\"Open Door\")\n Base_Pull_Location = self.product.get_prompt(\"Base Pull Location\")\n Tall_Pull_Location = self.product.get_prompt(\"Tall Pull Location\")\n Upper_Pull_Location = self.product.get_prompt(\"Upper Pull Location\")\n \n layout = self.layout\n self.draw_product_size(layout) \n \n if Left_Depth:\n box = layout.box()\n row = box.row()\n Left_Depth.draw_prompt(row)\n \n if Right_Depth:\n Right_Depth.draw_prompt(row)\n \n if Shelf_Quantity:\n col = box.column(align=True)\n row = col.row()\n row.label(\"Qty:\")\n row.prop(self,\"shelf_quantity\",expand=True) \n col.separator()\n \n if Add_Backing:\n row = box.row()\n Add_Backing.draw_prompt(row)\n\n #if Backing_Thickness:\n # if Add_Backing.value() == True:\n # row = box.row()\n # Backing_Thickness.draw_prompt(row)\n \n \n if Add_Top:\n row = box.row()\n Add_Top.draw_prompt(row) \n\n if Remove_Left_Side:\n row = box.row()\n Remove_Left_Side.draw_prompt(row) \n \n if Remove_Right_Side:\n row = box.row()\n Remove_Right_Side.draw_prompt(row) \n \n \n # row = box.row()\n # Hide_Toe_Kick.draw_prompt(row)\n \n row = box.row()\n Door.draw_prompt(row)\n if Door.value():\n if Angled_Shelves and Door_Type:\n if Angled_Shelves.value() == False:\n row = box.row()\n row.prop(self,'Door_Type',text=\"Door Type\")\n #Door_Type.draw_prompt(row)\n row = box.row()\n #Pull_Location.draw_prompt(row)\n row.prop(self,'Pull_Location',text=\"Pull Location\")\n\n row = box.row()\n #Door_Pull_Height.draw_prompt(row)\n row.prop(self,'Pull_Type',text=\"Pull Type\")\n row = box.row()\n if self.Pull_Type == 'Base':\n Base_Pull_Location.draw_prompt(row)\n elif self.Pull_Type == 'Tall':\n Tall_Pull_Location.draw_prompt(row)\n else:\n Upper_Pull_Location.draw_prompt(row)\n\n if Open_Door: \n row = box.row()\n Open_Door.draw_prompt(row)\n\n if Angled_Shelves:\n if Angled_Shelves.value():\n row = box.row()\n Use_Left_Swing.draw_prompt(row)\n row = box.row() \n Force_Double_Doors.draw_prompt(row)", "def draw(self):\n #for (x, y) in self.coords:\n # pyxel.rect(\n # (x + self.x) * 4,\n # (y + self.y) * 4,\n # (x + self.x) * 4 + 3,\n # (y + self.y) * 4 + 3,\n # self.color)", "def draw(self, surface):\n for box in self.checkboxes:\n box.draw(surface)", "def show_buttons(self):\n for button in self.buttons:\n x = button.starting_x\n y = button.starting_y\n self.screen.fill(button.color, ((x, y), (button.width, button.height)))", "def show_grid(self):\n for ax in (self.time_velocity, self.time_power, self.power_velocity):\n ax.grid(True)" ]
[ "0.7319963", "0.71650875", "0.68041396", "0.68041396", "0.68041396", "0.68041396", "0.68041396", "0.68041396", "0.68041396", "0.68041396", "0.67512876", "0.6708001", "0.66253144", "0.6591921", "0.65150344", "0.64464444", "0.6332542", "0.63218254", "0.6319037", "0.62770903", "0.6274782", "0.6273543", "0.6253388", "0.6233249", "0.6229954", "0.6229954", "0.6199511", "0.6191206", "0.6188452", "0.61561257" ]
0.8326296
0
Draw all the static objects on the window
def _draw_static_objects(self, window): self.update_total() ## Draw border for draw_object in self.border.get_border().values(): draw_object.draw(window) # Draw the fixed objects static_objs = [ self.objects[self.ids.TOPBAR], self.objects[self.ids.PAY], self.objects[self.ids.CANCEL], self.objects[self.ids.DONE], self.objects[self.ids.AMOUNT], self.objects[self.ids.UP], self.objects[self.ids.DOWN] ] # Decide which objects should be shown if self.owner.user is not None: self.objects[self.ids.PAY].visible = self.owner.user.credit_allowed() self.objects[self.ids.DONE].visible = self.owner.user.has_added_credit() or (len(self.product_displays) > 0) self.objects[self.ids.UP].visible = self._test_display_up_button() self.objects[self.ids.DOWN].visible = self._test_display_down_button() for static_obj in static_objs: static_obj.draw(window) if self.banner is not None: self.banner.draw(window)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw(self): \n pygame.event.clear()\n self.window = ocempgui.widgets.Box(GG.utils.SCREEN_SZ[0], GG.utils.SCREEN_SZ[1])\n self.paintScreen()\n self.paintAvatar()\n self.paintTags()\n self.paintCustomizeZone()\n self.paintButtons()\n self.window.zOrder = 90000\n self.window.depth = 2\n return self.window", "def draw(self, window):\n \n window.fill(Colours.BG)\n\n for draw_obj in self.objects.values():\n draw_obj.draw(window)\n\n #Ensure text is drawn on top\n self.objects['introtext'].draw(window)\n self.objects['introtext2'].draw(window)\n self.objects['introtext3'].draw(window)\n \n if self.banner is not None:\n self.banner.draw(window)\n \n pygame.display.flip()", "def draw(self):\n for obj in self.objects:\n obj.draw()", "def draw(self, window):\n window.fill(Colours.BG)\n\n self._set_show_hide_products()\n self._draw_products(window)\n self._draw_static_objects(window)\n\n pygame.display.flip()", "def draw_game():\n # Fill window with background color\n RENDER_WINDOW.fill(BACKGROUNDCOLOR)\n\n # Draw Game Title\n draw_title()\n\n # Draw Puzzle\n draw_puzzle()\n \n # Draw buttons to GUI \n draw_buttons()\n\n # Draw Text\n draw_text() \n\n # Draw random toggle\n draw_rand_toggle()", "def draw_objects():\n\n # Disable the turtle animation, and erase the scren.\n turtle.tracer(False)\n turtle.hideturtle()\n turtle.clear()\n\n # Draw all the parts of the scene.\n draw_ball()\n draw_target()\n draw_bounds()\n draw_pins()\n\n show_status()\n\n # Now show the screen, after everything has been drawn\n turtle.tracer(True)", "def draw_all_objects():\n\tglobal fuel_available\n\n\tbackground_module.draw_bg(win)\n\tbackground_module.draw_snow(win)\n\tobstacles_module.draw_obstacles(win)\n\tcoins_module.draw_coins(win)\n\tforeground_module.draw_fg(win)\n\n\tfor spark_object in effects_module.Coin_spark_effects.coin_effects_list:\n\t\tspark_object.draw(win)\n\tfor hit_effect_object in effects_module.Hit_effects.hit_effects_list:\n\t\thit_effect_object.draw(win)\n\n\tif num_of_lives == 0:\n\t\tplayer_module.player.y += 1\n\t\tplayer_module.propeller.draw(win)\n\t\tplayer_module.player.draw(win)\n\telif won_bool:\n\t\tplayer_module.draw_player(win, True)\n\telse:\n\t\tplayer_module.draw_player(win)\n\t\t\n\tbird_module.draw_bird(win)\n\tdynamic_obstacle_giftbox.draw_gift(win)\n\tdynamic_obstacle_olaf.draw_olaf(win)\n\tdynamic_obstacle_santa.draw_santa(win)\n\tdisplay_module.display_lives(win, num_of_lives)\n\tdisplay_module.draw_minimap(win,frame_count)\n\n\tif start_fuel:\n\t\tfuel_available -= 1\n\tfuel_available = display_module.fuel_bar.draw_fuel_bar(win, fuel_available, start_fuel)\n\n\tdisplay_module.draw_fuel(win)\n\tcursor.draw(win)", "def draw():", "def visualise(self, obj):\n self.clear()\n self.draw(obj)\n self.show()", "def draw(self, screen):", "def draw(self):\n self.screen.fill(BACKGROUND_COLOR)\n self.cannon.draw(self.screen)\n self.objects.draw(self.screen)", "def draw(self):\n\n for item in self.vis:\n item.undraw()\n self.render()\n for item in self.vis:\n item.draw(self.win)\n self.drawn = True", "def on_draw( self ):\r\n self.clear()\r\n self.setup_3D()\r\n print \"DEBUG:\" , \"There are\" , len( self.renderlist ) , \"items in 'self.renderlist'\"\r\n for obj in self.renderlist:\r\n obj.draw()", "def draw(self):\n self.screen.fill(WHITE)\n self.color_invalid()\n self.draw_selected()\n self.shade_locked_cells()\n self.draw_grid()\n self.draw_buttons()\n self.draw_numbers()", "def on_draw(self):\n # draw everything", "def draw(self):", "def __draw_game(self) -> None:\n self.__draw_window()\n self.pipes.draw(self.win)\n self.player.draw(self.win)\n pygame.display.update()", "def _drawFrame(self):\n\n self._clearScreen()\n \n for object in Object.Objects:\n self._drawObject(object)\n\n for entity in Entity.Entities:\n self._drawObject(entity)\n\n self._drawObject(Game.Player)", "def draw(self, win):\n self.rect.draw(win)\n self.text.draw(win)", "def draw_app(self):\n \n # Start iterations\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n pygame.quit()\n sys.exit()\n\n self.display.fill([255, 255, 255])\n self.grid.draw(self.display)\n pygame.display.update()", "def draw(self):\n self.screen.fill((0,51,102))\n # get the new drawables\n self.drawables = (self.game_model.get_background_drawables()\n + self.game_model.get_plane_drawables()\n + self.game_model.get_bullet_drawables()\n + self.game_model.get_enemy_drawables())\n for d in self.drawables:\n rect = d.get_rect()\n surf = d.get_surface()\n surf.set_colorkey((255,255,255))\n self.screen.blit(surf, rect)", "def draw_window_pane():\n houseturtle.begin_fill()\n for y in range(4):\n houseturtle.pendown()\n houseturtle.forward(35)\n houseturtle.left(90)\n houseturtle.penup()\n houseturtle.end_fill()", "def renderall(self):\n\n if not self.isinitialized:\n return\n # clear display\n self.screen.fill(BGCOLOR)\n # draw the board\n self.drawBoard()\n # flip the display to show whatever we drew\n pygame.display.flip()", "def draw(self):\n self.draw_occupied_cells()\n self.draw_open_cells()\n self.draw_edges()\n plt.xlabel(\"Red\")\n plt.ylabel(\"Black\")\n plt.title('Hex')\n self.camera.snap()", "def paint(self):\n self.paint_snake()\n self.paint_apple()\n root.mainloop()", "def draw(self):\n #for (x, y) in self.coords:\n # pyxel.rect(\n # (x + self.x) * 4,\n # (y + self.y) * 4,\n # (x + self.x) * 4 + 3,\n # (y + self.y) * 4 + 3,\n # self.color)", "def draw(self):\n self.win.fill(BLACK)\n title1 = self.messenger.text_format(\"La casa\", self.font, 58, WHITE)\n title2 = self.messenger.text_format(\"de\", self.font, 48, WHITE)\n title3 = self.messenger.text_format(\"Marcelo\", self.font, 58, WHITE)\n\n title_rect1 = title1.get_rect()\n title_rect2 = title2.get_rect()\n title_rect3 = title3.get_rect()\n\n self.win.blit(title1, (WIDTH / 3.4 - (title_rect1[2] / 2), 90))\n pygame.draw.rect(self.win, RED, (238, 92, 45, 45))\n self.win.blit(title2, (WIDTH / 2 - (title_rect2[2] / 2), 95))\n self.win.blit(title3, (WIDTH / 1.4 - (title_rect3[2] / 2), 90))\n self.draw_menu(WHITE, WHITE)", "def draw_obstacles(self):\n for obstacle in self.obstacles:\n obstacle.draw(self.window, Colors.BLACK.value)", "def draw(self):\n pass", "def draw(self):\n pass" ]
[ "0.73159957", "0.7200247", "0.7053506", "0.70143944", "0.69643205", "0.6928434", "0.6911108", "0.683837", "0.6818077", "0.67656994", "0.67541057", "0.6746994", "0.6725112", "0.67019", "0.66835344", "0.6659916", "0.66531104", "0.66452307", "0.66397053", "0.66258734", "0.661867", "0.6610415", "0.66100633", "0.6588956", "0.65885186", "0.65792245", "0.65357196", "0.6521772", "0.65195656", "0.65195656" ]
0.78646946
0
Creates correlated ability parameters based on an input correlation matrix. This is a helper function for use in synthesizing multidimensional data assuming multivariate normal distribution
def create_correlated_abilities(correlation_matrix, n_participants, seed=None): rng = np.random.default_rng(seed) lower = np.linalg.cholesky(correlation_matrix) return lower @ rng.standard_normal((correlation_matrix.shape[0], n_participants))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def correlate_columns(matrix):\n return np.dot(matrix.T, matrix) / (la.norm(matrix) ** 2)", "def correl_vars(ds,cutoff=0.65, is_cor_mat_return=True):\n cor_mat = ds.corr() # correl matrix\n \n var1 = []; var2 = []\n for i in range(len(cor_mat.columns)):\n for j in range(len(cor_mat.index)):\n if (abs(cor_mat.iloc[i,j]) > cutoff) & (i>j):\n var1.append(cor_mat.columns[i]); var2.append(cor_mat.index[j])\n \n high_cor_var = list(zip(var1,var2)) # correls vars list\n \n # Getting VIF's\n inv_corr_mat = np.linalg.inv(corr_mat)\n vif = pd.DataFrame(np.diag(inv_corr_mat), index=df.columns).reset_index().rename(columns={'index':'Parameter',0:'VIF'}).sort_values(by = ['VIF'],ascending=False, ignore_index=True)\n \n # Other way by using statsmodels package : added intercept using add_constant as statmodels doesn't include it by default\n# from statsmodels.stats.outliers_influence import variance_inflation_factor\n# from statsmodels.tools.tools import add_constant\n# vif = pd.DataFrame([variance_inflation_factor(add_constant(ds).values, i) for i in range(add_constant(ds).shape[1])], \\\n# index=add_constant(ds).columns, columns=['VIF']).reset_index().rename(columns={'index':'Parameter'}).drop(index=0).sort_values(by = ['VIF'],ascending=False, ignore_index=True)\n \n if is_cor_mat_return :\n correl_dict = {'correl_matrix':cor_mat, 'Correl_vars' : high_cor_var, 'vif':vif}\n return correl_dict\n else :\n correl_dict = {'Correl_vars' : high_cor_var, 'vif':vif}\n return correl_dict", "def create_correlationMatrix(obj):\n pass", "def correlation_matrix(data):\n\n _, num_columns = shape(data)\n\n def matrix_entry(i, j):\n return correlation(get_column(data, i), get_column(data, j))\n\n return make_matrix(num_columns, num_columns, matrix_entry)", "def correlation_matrix(data):\n\n _, num_columns = shape(data)\n\n def matrix_entry(i, j):\n return correlation(get_column(data, i), get_column(data, j))\n\n return make_matrix(num_columns, num_columns, matrix_entry)", "def run_r_corr(df, corr_type='spearman', p_correction='BH'):\r\n num_cols = len(df.columns.values)\r\n r_dataframe = pandas2ri.py2ri(df)\r\n r_as = r['as.matrix']\r\n rcorr = r['rcorr'] \r\n r_p_adjust = r['p.adjust']\r\n result = rcorr(r_as(r_dataframe), type=corr_type)\r\n rho = result[0]\r\n n = result[1]\r\n p = result[2]\r\n \r\n if p_correction is not None:\r\n p = r_p_adjust(p, p_correction)\r\n r_corrs = pandas2ri.ri2py(rho)\r\n r_p_vals = pandas2ri.ri2py(p)\r\n r_counts = pandas2ri.ri2py(n)\r\n r_p_vals = np.reshape(r_p_vals, (num_cols,num_cols))\r\n return r_corrs, r_counts, r_p_vals", "def determine_correlation(var1,var2):\n v1 = np.array(var1)\n v2 = np.array(var2)\n mat = np.c_[(v1,v2)]# np.vstack((v1,v2)) #\n corr = np.corrcoef(mat.T)\n return corr[0][1]", "def _compute_correlation(self, metric_array, corr_type):\n transposed_channels = np.transpose(metric_array)\n df = pd.DataFrame(transposed_channels)\n corr_matrix = df.corr().values\n upper_diag_idxs = np.triu_indices(self.n_channels, 1)\n upper_diag_elems = corr_matrix[upper_diag_idxs]\n\n correlation_dict = {}\n for i, j, corr in zip(*upper_diag_idxs, upper_diag_elems):\n corr_id = 'ch_' + corr_type + '_' + str(i) + '_' + str(j)\n correlation_dict.update(\n {corr_id: corr}\n )\n return correlation_dict", "def _listcorr(a):\n corrs = np.zeros((a[0].shape[1], len(a), len(a)))\n for i in range(len(a)):\n for j in range(len(a)):\n if j > i:\n corrs[:, i, j] = [np.nan_to_num(np.corrcoef(ai, aj)[0, 1])\n for (ai, aj) in zip(a[i].T, a[j].T)]\n return corrs", "def linearize_spatial_corr(Mcorr):\n assert type(Mcorr) is numpy.ndarray, 'bad type for matrix argument'\n assert Mcorr.shape == (199,199), 'invalid shape for autocorrelation matrix'\n \n # Scan the correlogram and compute the radius from the midpoint\n n = numpy.zeros(101, 'h')\n c = numpy.zeros(101, 'd')\n mid_x, mid_y = 99.5, 99.5\n for i in xrange(199):\n for j in xrange(199):\n d = numpy.sqrt((mid_y - i)**2 + (mid_x - j)**2)\n if d > 100:\n d = 100\n n[int(d)] += 1\n c[int(d)] += Mcorr[i, j]\n c /= n # get the sample means\n \n # Create the return array: reflect 0->Max correlations to -Max->0 \n Lcorr = numpy.zeros((2,201), 'd')\n Lcorr[0] = numpy.arange(-100, 101)\n Lcorr[1] = numpy.r_[c[::-1], c[1:]]\n \n return Lcorr", "def correlation_matrix(self, attr_list, method=\"pearson\"):\n corr_matrix = self.data.corr(method=method)\n corr_dict = {}\n for attr in attr_list:\n corr_dict[attr] = list(corr_matrix[attr][attr_list].values)\n return json.dumps(corr_dict)", "def correlation_matrix(data):\r\n\r\n _, num_columns = shape(data)\r\n\r\n def matrix_entry(i, j):\r\n return correlation(get_column(data, i), get_column(data, j))\r\n\r\n return make_matrix(num_columns, num_columns, matrix_entry)", "def FormCorrelationMatrix(mat):\n nVars = len(mat[0])\n N = len(mat)\n \n res = numpy.zeros((nVars,nVars),'d')\n for i in range(nVars):\n x = mat[:,i]\n sumX = sum(x)\n sumX2 = sum(x*x)\n for j in range(i,nVars):\n y = mat[:,j]\n sumY = sum(y)\n sumY2 = sum(y*y)\n numerator = N*sum(x*y) - sumX*sumY\n denom = numpy.sqrt((N*sumX2-sumX**2)*(N*sumY2-sumY**2))\n if denom != 0.0:\n res[i,j] = numerator/denom\n res[j,i] = numerator/denom\n else:\n res[i,j] = 0\n res[j,i] = 0\n return res", "def generate_normal_correlated_data(\n N,\n r=0.5,\n mu_a=0,\n mu_b=0,\n sig_a=1,\n sig_b=1,\n lim_a=None,\n lim_b=None,\n tolerance=0.05,\n seed=None,\n verbose=False,\n):\n np.random.seed(seed)\n\n # Construct mean vector\n mu = np.array([mu_a, mu_b])\n\n # Construct covariance matrix\n sig = np.array([[sig_a ** 2, r * sig_a * sig_b], [r * sig_a * sig_b, sig_b ** 2]])\n\n # Simulate data\n delta = np.inf # initialize deviance to target correlation\n\n i = 1 # count tries\n\n while delta > tolerance:\n\n # Draw data from multivariate normal\n data = np.random.multivariate_normal(mean=mu, cov=sig, size=N)\n\n # Truncate data to limits\n if lim_a is not None:\n data[:, 0] = np.clip(data[:, 0], *lim_a)\n if lim_b is not None:\n data[:, 1] = np.clip(data[:, 1], *lim_b)\n\n # Compute generated correlation and deviance\n r_gen, p = pearsonr(data[:, 0], data[:, 1])\n delta = np.abs(r - r_gen)\n\n i += 1\n\n if verbose:\n print(f\"Pearson r = {r_gen:.2f}, p = {p:.4f}\")\n print(f\" (It took {i} tries to achieve the desired tolerance.)\")\n\n return data", "def get_correlation(spreadsheet_mat, phenotype_response, run_parameters):\n correlation_array = np.zeros(spreadsheet_mat.shape[0])\n if 'correlation_measure' in run_parameters:\n if run_parameters['correlation_measure'] == 'pearson':\n\n spreadsheet_mat = spreadsheet_mat - spreadsheet_mat.mean(axis=1).reshape((-1, 1))\n phenotype_response = phenotype_response - phenotype_response.mean()\n spreadsheet_mat_var = np.std(spreadsheet_mat, axis=1)\n phenotype_response_var = np.std(phenotype_response)\n numerator = spreadsheet_mat.dot(phenotype_response)\n denominator = spreadsheet_mat_var * phenotype_response_var * spreadsheet_mat.shape[1]\n with np.errstate(divide='ignore', invalid='ignore'):\n correlation_array = np.true_divide(numerator, denominator)\n correlation_array[denominator==0] = 0\n\n return correlation_array\n\n if run_parameters['correlation_measure'] == 't_test':\n \n a = spreadsheet_mat[:, phenotype_response!=0]\n b = spreadsheet_mat[:, phenotype_response==0]\n d = np.mean(a, axis=1) - np.mean(b, axis=1)\n denom = np.sqrt(np.var(a, axis=1, ddof=1)/a.shape[1] + np.var(b, axis=1, ddof=1)/b.shape[1])\n with np.errstate(divide='ignore', invalid='ignore'):\n correlation_array = np.divide(d, denom)\n correlation_array[np.isnan(denom)] = 0\n correlation_array = np.abs(correlation_array)\n\n return correlation_array\n\n return correlation_array", "def correlation_matrix(self, layout={}, **kwargs):\n df = self._data.corr()\n kwargs.update({\n 'zmin': -1, 'zmax': 1,\n 'colors': 'rdbu', 'ncolors': 9,\n 'xgap': 3, 'ygap': 3, 'dtick': 1,\n 'colorbar': {'x': 1 - 0.22},\n })\n\n layout = recursive_update(\n layout, updater={\n 'xaxis': {'showgrid': False, 'zeroline': False},\n 'yaxis': {'showgrid': False, 'zeroline': False},\n })\n\n # square for 1920x1080 screens in awating for better plotly option\n layout = recursive_update(\n layout, updater={\n 'yaxis': {'domain': [0, 1]},\n 'xaxis': {'domain': [0.28215, 1 - 0.28215]},\n })\n\n return df.iplot.heatmap(layout=layout, **kwargs)", "def FormCorrelationMatrix(mat):\n nVars = len(mat[0])\n N = len(mat)\n\n res = numpy.zeros((nVars, nVars), 'd')\n for i in range(nVars):\n x = mat[:, i]\n sumX = sum(x)\n sumX2 = sum(x * x)\n for j in range(i, nVars):\n y = mat[:, j]\n sumY = sum(y)\n sumY2 = sum(y * y)\n numerator = N * sum(x * y) - sumX * sumY\n denom = numpy.sqrt((N * sumX2 - sumX**2) * (N * sumY2 - sumY**2))\n if denom != 0.0:\n res[i, j] = numerator / denom\n res[j, i] = numerator / denom\n else:\n res[i, j] = 0\n res[j, i] = 0\n return res", "def _corr(args, config):\n info('running on correlation function')\n config['gene1'], config['gene2'] = args.gene1, args.gene2\n config['adj_gene'] = args.adjust\n if config['gene1'] and config['gene2']:\n exp_mat = _read_in(config)\n config['exp_mat'] = exp_mat\n else:\n info('gene1 or gene2 error')\n sys.exit(1)\n corr_gene._corr_tumor(config)", "def compute_correlations(struc_df, option, gamma, alpha):\n n_states = len(np.unique(struc_df.objnum))\n nodes = network.temp_node_info()\n adjacency = network.adjacency_mat(nodes)\n L = compute_limit_matrix(0.5, adjacency, n_states)\n L_vector = L.flatten()\n M = learn_sr(struc_df, gamma, alpha)\n M = M[2, 6]\n M_vector = M.flatten()\n\n if option == \"norm\":\n print(\"Norm of L - M: \")\n print(la.norm(L_vector - M_vector, np.inf))\n\n if option == \"correlation\":\n print(\"Correlation of L, M: \")\n print(np.dot(L_vector, M_vector) /\n (la.norm(L_vector) * la.norm(M_vector)))", "def correlation(self,M,operator,site_i,site_j):\n minsite = min(site_i,site_j)\n maxsite = max(site_i,site_j)\n u = np.array([[1]])\n for i in range(0,minsite):\n M[i] = np.tensordot(u, M[i],axes=(-1,1)).transpose(1,0,2)\n l,u = self.left_cannonical(M[i])\n M[i] = l\n M[minsite] = np.tensordot(u, M[minsite]).transpose(1,0,2)\n MP = np.tensordot(M[minsite],operator,axes=(0,0))\n MPI = np.tensordot(MP, np.conj(M[minsite]),axes=(-1,0))\n MPI = MPI.transpose([0,2,1,3])\n for i in range(minsite+1,maxsite):\n MI = np.tensordot(MPI, M[i],axes=(2,1))\n MPI = np.tensordot(MI, np.conj(M[i]), axes=([3,2],[0,1]))\n\n MP = np.tensordot(M[maxsite],operator,axes=(0,0))\n MPJ = np.tensordot(MP, np.conj(M[maxsite]),axes=(-1,0))\n MPJ = MPJ.transpose([0,2,1,3])\n\n product = np.tensordot(MPI,MPJ, axes=([2,3,0,1]))\n correlation = np.trace(product)\n\n return correlation", "def correlate_rows(matrix):\n return np.dot(matrix, matrix.T) / (la.norm(matrix) ** 2)", "def correlation(G, variables = [], conditionants = []):\n \n cov = covariance(G, variables = variables, \n conditionants = conditionants)\n k = cov.shape[0]\n sds = sp.Matrix([1/sp.sqrt(cov[i, i]) for i \n in range(0, k)]*k).reshape(k, k)\n \n cor = cov.multiply_elementwise(sds).multiply_elementwise(sds.T)\n return cor.applyfunc(sp.simplify)", "def cat_correl_matrix(df, col_list = None):\n\n if (col_list == None ):\n #select features for which correlations needs to be calculated\n cat_col = df.select_dtypes(['category']).columns\n\n else : \n cat_col = col_list\n\n if (len(cat_col) == 0) : \n return (print('* Categoical columns are not present in input dataset.'+ str('\\n')+ \n '* Please change datatypes to categorical for required features'))\n else :\n\n correl_mat =pd.DataFrame(data='',index=cat_col,columns=cat_col)\n #calculating correlation matrix\n for i in range(len(cat_col)):\n for j in range(i):\n confusion_matrix = pd.crosstab(df[cat_col[i]], df[cat_col[j]]).as_matrix()\n correl_mat.iloc[i,j]= round(100*cramers_v1(confusion_matrix),2)\n #Output \n print(\"Correlation Matrix of categorical variables are:-\")\n return correl_mat", "def _calculate_cc(self, array, corr_range, tau_max, lag_mode):\n\n # lag_mode dict\n mode = self.lag_modi[lag_mode]\n only_tri = int(self.only_tri)\n\n if lag_mode == 'all':\n corrmat = numpy.zeros((2*tau_max + 1, self.N, self.N),\n dtype='float32')\n elif lag_mode == 'sum':\n corrmat = numpy.zeros((2, self.N, self.N), dtype='float32')\n elif lag_mode == 'max':\n corrmat = numpy.zeros((2, self.N, self.N), dtype='float32')\n\n # loop over all node pairs, NOT symmetric due to time shifts!\n for i in range(self.N-only_tri):\n for j in range((i+1)*only_tri, self.N):\n\n if mode == 2:\n maxcross = 0.0\n argmax = 0\n\n # loop over taus INCLUDING the last tau value\n for t in range(2*tau_max+1):\n\n # here the actual cross correlation is calculated\n crossij = (array[tau_max, i, :] * array[t, j, :]).mean()\n\n # fill in values in matrix depending on lag_mode\n if mode == 0:\n corrmat[t, i, j] = crossij\n\n elif mode == 1:\n if t <= tau_max:\n corrmat[1, i, j] += numpy.abs(crossij)\n if t >= tau_max:\n corrmat[0, i, j] += numpy.abs(crossij)\n\n elif mode == 2:\n # calculate max and argmax by comparing to previous\n # value and storing max\n if numpy.abs(crossij) > maxcross:\n maxcross = numpy.abs(crossij)\n argmax = t\n\n if mode == 2:\n corrmat[0, i, j] = maxcross\n corrmat[1, i, j] = argmax - tau_max\n\n if self.only_tri:\n if lag_mode == 'all':\n corrmat = corrmat + corrmat.transpose(0, 2, 1)[::-1]\n elif lag_mode == 'sum':\n corrmat[0] += corrmat[1].transpose()\n corrmat[1] = corrmat[0].transpose()\n elif lag_mode == 'max':\n corrmat[0] += corrmat[0].transpose()\n corrmat[1] -= corrmat[1].transpose()\n\n return corrmat", "def correlate(z_matrix, c_method):\n # Use the selected correlation method\n if c_method.upper() == \"PEARSON\":\n return correlation.pearson(z_matrix)\n elif c_method.upper() == \"ABSPEARSON\":\n return correlation.abspearson(z_matrix)\n elif c_method.upper() == \"DCOR\":\n return correlation.dcor(z_matrix)\n else:\n raise ValueError(\"Unknown correlation method ({})\".format(c_method))", "def get_correlation(self,\n incl_chromosomes,\n fasta_file=None,\n annotation_files=None,\n weights_eval=False,\n *args,\n **kwargs):\n if self.generator_train.__class__.__name__ == 'MultiGenerator':\n assert fasta_file and annotation_files,\\\n \"\"\" To evaluate a MultiGenerator model, the fasta file and the\n annotation file need to be passed as inputs.\"\"\"\n command_dict = self.generator_train.command_dict[0]\n\n if 'keras_dna.sequence.SeqIntervalDl' in command_dict.get_details():\n one_hot_encoding = True\n else:\n one_hot_encoding = False\n batch_size = self.generator_train.command_dict[-1].as_input()['batch_size']\n output_shape = self.generator_train.command_dict[-1].as_input()['output_shape']\n else:\n command_dict = self.generator_train.command_dict\n one_hot_encoding = command_dict.as_input()['one_hot_encoding']\n batch_size = command_dict.as_input()['batch_size']\n output_shape = command_dict.as_input()['output_shape']\n\n assert 'keras_dna.sequence.ContinuousDataset' in command_dict.get_details(),\\\n \"\"\"Correlation score is only available for continuous dataset\"\"\"\n \n dico = command_dict.get_details()['keras_dna.sequence.ContinuousDataset']\n if dico['nb_annotation_type']:\n nb_annotation = dico['nb_annotation_type']\n else:\n nb_annotation = 1\n \n if annotation_files:\n if isinstance(dico['annotation_files'], list):\n assert len(annotation_files) == len(dico['annotation_files']),\\\n \"\"\"annotation_files must be a list with the name number of\n entries as annotation_files in the generator, complete with\n zeros if needed\"\"\"\n else:\n assert len(annotation_files) == 1,\\\n \"\"\"annotation_files must be a list with the same number of\n entries as annotation_files in the generator, complete with\n zeros if needed\"\"\"\n indexes = np.where(np.array(annotation_files) != '0')[0]\n \n else:\n if isinstance(dico['annotation_files'], list):\n indexes = range(len(dico['annotation_files']))\n else:\n indexes = [0]\n \n if isinstance(dico['annotation_files'], list):\n nb_types = len(dico['annotation_files']) // nb_annotation\n else:\n nb_types = 1\n\n eval_dict = deepcopy(command_dict.as_input())\n\n if fasta_file:\n eval_dict['fasta_file'] = fasta_file\n\n if annotation_files:\n annotation_files = np.array(annotation_files)\n annotation_files[annotation_files == '0'] = annotation_files[indexes[0]]\n eval_dict['annotation_files'] = list(annotation_files)\n\n eval_dict['incl_chromosomes'] = incl_chromosomes\n eval_dict['batch_size'] = batch_size\n eval_dict['one_hot_encoding'] = one_hot_encoding\n eval_dict['output_shape'] = output_shape\n eval_dict['overlapping'] = False\n\n if not weights_eval:\n eval_dict['weighting_mode'] = None\n\n generator_eval = Generator(**eval_dict)\n\n metrics = [partial(correlate,\n cell_idx=int(idx / nb_annotation),\n idx=int(idx % nb_annotation),\n nb_types=int(nb_types),\n nb_annotation=int(nb_annotation)) for idx in indexes]\n \n for idx, metric in zip(indexes, metrics):\n metric.__name__ = 'correlate_{}_{}'.format(int(idx / nb_annotation),\n int(idx % nb_annotation))\n\n model = clone_model(self.model)\n for i, layer in enumerate(self.model.layers):\n model.layers[i].set_weights(layer.get_weights())\n\n model.compile(optimizer=self.model.optimizer,\n loss=self.model.loss,\n metrics=metrics)\n evaluations = model.evaluate_generator(generator=generator_eval(),\n steps=len(generator_eval),\n *args,\n **kwargs)\n \n return {'correlate_{}_{}'.format(int(idx / nb_annotation),\\\n idx % nb_annotation) : evaluations[idx + 1] for idx in indexes}", "def correlate_assignments(formula, marginals, samples):\n\n # Localize the formula's assignment variable table.\n\n asgvar = formula.assignment_variables\n\n # Iterate over all pairs of signatures\n\n for sig1, sig2 in itertools.combinations(marginals.keys(), 2):\n\n # If these are a geminal pair, ignore.\n\n if sig1.is_geminal(sig2):\n continue\n\n # If either of these are nailed, ignore.\n\n if len(marginals[sig1]) == 1 or len(marginals[sig2]) == 1:\n continue\n\n # Iterate over all pairs of assignments between these two.\n\n for m1_id, m2_id in itertools.product(marginals[sig1].keys(),\n marginals[sig2].keys()):\n\n # If these are the same methyl, ignore.\n\n if m1_id == m2_id:\n continue\n\n # Get the product of the marginal probability of sig1 -> m1 and\n # sig2 -> m2.\n\n independent_prob = marginals[sig1][m1_id] * marginals[sig2][m2_id]\n\n # Count the fraction of samples in which these assignments co-occur\n\n real_prob = 0\n for sample in samples:\n\n if sample[sig1].seqid == m1_id and sample[sig2].seqid == m2_id:\n real_prob += 1/len(samples)\n\n # Get the methyls with the given sequence IDs.\n\n m1s = [m for m in asgvar[sig1].keys() if m.seqid == m1_id]\n\n m2s = [m for m in asgvar[sig2].keys() if m.seqid == m2_id]\n\n if real_prob > 10*independent_prob:\n\n for m1 in m1s:\n clause = [-1*asgvar[sig1][m1]]\n for m2 in m2s:\n clause.append(asgvar[sig2][m2])\n formula.add_clause(clause)\n\n for m2 in m2s:\n clause = [-1*asgvar[sig2][m2]]\n for m1 in m1s:\n clause.append(asgvar[sig1][m1])\n formula.add_clause(clause)\n\n elif real_prob*10 < independent_prob:\n for m1, m2 in itertools.product(m1s, m2s):\n formula.add_clause([-asgvar[sig1][m1], -asgvar[sig2][m2]])", "def auto_correlation(sequence):\n if isinstance(sequence, basestring):\n sequence = map(int, sequence)\n seq = np.array(sequence, dtype=np.int)\n dseq = np.column_stack((seq[1:], seq[:-1]))\n slope, intercept, r, ttp, see = linregress(seq[1:], seq[:-1])\n cc = np.corrcoef(dseq, rowvar=0)[0][1]\n return {'slope': slope, 'intercept': intercept, 'r-squared': r ** 2,\n 'p': ttp, 'see': see, 'auto_correlation': cc}", "def correlation(data):\n return corrcoef(np.transpose(np.reshape(data, ((data.shape[0] * data.shape[1]), data.shape[2]))))", "def Nmatrix(init_par, alpha, delta, obs, sigma_obs, ccoef, N):\n\tparallax, v, sigma_v = init_par[:-4], init_par[-4:-1], init_par[-1] \n\tplx_obs, mualpha_obs, mudelta_obs = obs[:, 0], obs[:, 1], obs[:, 2]\n\n\tp, q, r = normalTriad(alpha, delta)\n\tmualpha_mod = np.dot(np.transpose(p),v)*parallax/_A\n\tmudelta_mod = np.dot(np.transpose(q),v)*parallax/_A\n\t\n\tplx_mod, mualpha_mod, mudelta_mod = parallax, mualpha_mod, mudelta_mod\n\tsigma_plx, sigma_mualpha, sigma_mudelta = np.transpose(sigma_obs)\n\ta,like, expo, detD = np.ones(N),np.ones(N),np.ones(N), np.ones(N) \n\tC = np.zeros((3,3,N),dtype=np.float64)\n\tC[0,0,:],C[1,1,:],C[2,2,:] = sigma_plx**2.,sigma_mualpha**2., sigma_mudelta**2.\n\tcorr_coefficient_plx_mualpha, corr_coefficient_plx_mudelta, corr_coefficient_mualpha_mudelta = np.zeros(N), np.zeros(N), np.zeros(N)\n\tcorr_coefficient_plx_mualpha[:], corr_coefficient_plx_mudelta[:], corr_coefficient_mualpha_mudelta[:] = ccoef[:, 0], ccoef[:, 1], ccoef[:, 2] \n\t\n\tC[0,1,:], C[0,2,:] = corr_coefficient_plx_mualpha*sigma_plx*sigma_mualpha, corr_coefficient_plx_mudelta*sigma_plx*sigma_mudelta\n\tC[1,0,:], C[1,2,:] = corr_coefficient_plx_mualpha*sigma_plx*sigma_mualpha, corr_coefficient_mualpha_mudelta*sigma_mualpha*sigma_mudelta\n\tC[2,0,:], C[2,1,:] = corr_coefficient_plx_mudelta*sigma_plx*sigma_mudelta, corr_coefficient_mualpha_mudelta*sigma_mualpha*sigma_mudelta\n\tE = np.zeros((3,3,N),dtype=np.float64)\n\tE[1,1,:],E[2,2,:] = (sigma_v**2.)*(parallax/_A)**2., (sigma_v**2.)*(parallax/_A)**2.\n\tD,invD = np.zeros((3,3,N),dtype=np.float64),np.zeros((3,3,N),dtype=np.float64)\n\tD = np.add(E,C)\n\tfor i in range(N):\n\t\tdetD[i] = matrix_det(D[:,:,i]) \n\t\tinvD[:,:,i] = matrix_inv(D[:,:,i])\n\t\t\n\ta_c = np.ones((3,N))\n\ta_c = [plx_obs - plx_mod, mualpha_obs - mualpha_mod, mudelta_obs-mudelta_mod]\n\t\n\t\n\n\t\n\tcprime_pi, cprime_vx, cprime_vy, cprime_vz, = np.ones((3,N)), np.ones((3,N)), \\\n\t\t\t\t\t\t\tnp.ones((3,N)), np.ones((3,N)), \n\tcprime_pi[0,:] = 1.\n\tcprime_pi[1,:] = np.dot(np.transpose(p),v)/_A\n\tcprime_pi[2,:] = np.dot(np.transpose(q),v)/_A\n\t\n\tcprime_vx[0,:] = 0.\n\tcprime_vx[1,:] = -np.sin(alpha)*plx_mod/_A \n\tcprime_vx[2,:] = -np.sin(delta)*np.cos(alpha)*plx_mod/_A\n\n\t\n\tcprime_vy[0,:] = 0.\n\tcprime_vy[1,:] = np.cos(alpha)*plx_mod/_A \n\tcprime_vy[2,:] = -np.sin(delta)*np.sin(alpha)*plx_mod/_A\n\n\tcprime_vz[0,:] = 0.\n\tcprime_vz[1,:] = 0. \n\tcprime_vz[2,:] = np.cos(delta)*plx_mod/_A\n\n\tdlnd_dpi, dlnd_dsigmav = np.zeros(N), np.zeros(N)\n\tde_dpi, de_dsigmav = np.zeros(N), np.zeros(N)\n\t\n\n\t### See formula A.5 \n\tde_dpi[:] = ((sigma_v/_A)**2.)*2.*plx_mod[:]\n\tde_dsigmav[:] = ((plx_mod[:]/_A)**2.)*2.*sigma_v\n\t\n\tdlnd_dpi[:] = (invD[1,1,:] + invD[2,2,:])*de_dpi[:] \n\tdlnd_dsigmav[:] = (invD[1,1,:] + invD[2,2,:])*de_dsigmav[:]\n\t\n\t\n\t\n\t### See formula A.7\n\thess = np.zeros((N+4, N+4))\n\n\thess_diag_pi, hess_diag_pi_1, hess_diag_pi_2 = np.zeros(N), np.zeros(N), np.zeros(N)\n\thess_diag_pi_1[:] = invD[0, 0, :]*cprime_pi[0, :]*cprime_pi[0, :] + invD[0, 1, :]*cprime_pi[0, :]*cprime_pi[1, :] + invD[0, 2, :]*cprime_pi[0, :]*cprime_pi[2, :] + \\\n\t\t\t invD[1, 0, :]*cprime_pi[1, :]*cprime_pi[0, :] + invD[1, 1, :]*cprime_pi[1, :]*cprime_pi[1, :] + invD[1, 2, :]*cprime_pi[1, :]*cprime_pi[2, :] + \\\n\t\t \t invD[2, 0, :]*cprime_pi[2, :]*cprime_pi[0, :] + invD[2, 1, :]*cprime_pi[2, :]*cprime_pi[1, :] + invD[2, 2, :]*cprime_pi[2, :]*cprime_pi[2, :]\t\n\t\n\t\n\t#hess_diag_pi_2[:] = np.sum(0.5*(invD[1, 1, :]**2. + 2.*invD[1, 2, :]**2. + invD[2, 2, :]**2.)*de_dpi[:]*de_dpi[:]) ### Check if it's with or without sum: without!\n\t# So correct formula is below.\n\thess_diag_pi_2[:] = (0.5*(invD[1, 1, :]**2. + 2.*invD[1, 2, :]**2. + invD[2, 2, :]**2.)*de_dpi[:]*de_dpi[:])\n\thess_diag_pi[:] = hess_diag_pi_1[:] + hess_diag_pi_2[:]\t\n\n\t\n\thess_diag_vx, hess_diag_vy, hess_diag_vz, hess_diag_sigmav = np.zeros(N), np.zeros(N), np.zeros(N), np.zeros(N)\n\thess_pi_vx, hess_pi_vy, hess_pi_vz, hess_pi_sigmav = np.zeros(N), np.zeros(N), np.zeros(N), np.zeros(N)\n\thess_diag_vxi, hess_diag_vyi, hess_diag_vzi = np.zeros(N), np.zeros(N), np.zeros(N)\n\t\n\thess_diag_vxi[:] = invD[0, 0, :]*cprime_vx[0, :]*cprime_vx[0, :] + invD[0, 1, :]*cprime_vx[0, :]*cprime_vx[1, :] + invD[0, 2, :]*cprime_vx[0, :]*cprime_vx[2, :] + \\\n\t\t\t invD[1, 0, :]*cprime_vx[1, :]*cprime_vx[0, :] + invD[1, 1, :]*cprime_vx[1, :]*cprime_vx[1, :] + invD[1, 2, :]*cprime_vx[1, :]*cprime_vx[2, :] + \\\n\t\t\t invD[2, 0, :]*cprime_vx[2, :]*cprime_vx[0, :] + invD[2, 1, :]*cprime_vx[2, :]*cprime_vx[1, :] + invD[2, 2, :]*cprime_vx[2, :]*cprime_vx[2, :] \t\t\n\t\n\thess_diag_vyi[:] = invD[0, 0, :]*cprime_vy[0, :]*cprime_vy[0, :] + invD[0, 1, :]*cprime_vy[0, :]*cprime_vy[1, :] + invD[0, 2, :]*cprime_vy[0, :]*cprime_vy[2, :] +\\\n\t\t\t invD[1, 0, :]*cprime_vy[1, :]*cprime_vy[0, :] + invD[1, 1, :]*cprime_vy[1, :]*cprime_vy[1, :] + invD[1, 2, :]*cprime_vy[1, :]*cprime_vy[2, :] +\\\n\t\t\t invD[2, 0, :]*cprime_vy[2, :]*cprime_vy[0, :] + invD[2, 1, :]*cprime_vy[2, :]*cprime_vy[1, :] + invD[2, 2, :]*cprime_vy[2, :]*cprime_vy[2, :] \t\n\n\n\thess_diag_vzi[:] = invD[0, 0, :]*cprime_vz[0, :]*cprime_vz[0, :] + invD[0, 1, :]*cprime_vz[0, :]*cprime_vz[1, :] + invD[0, 2, :]*cprime_vz[0, :]*cprime_vz[2, :] +\\\n\t\t\t invD[1, 0, :]*cprime_vz[1, :]*cprime_vz[0, :] + invD[1, 1, :]*cprime_vz[1, :]*cprime_vz[1, :] + invD[1, 2, :]*cprime_vz[1, :]*cprime_vz[2, :] +\\\n\t\t\t invD[2, 0, :]*cprime_vz[2, :]*cprime_vz[0, :] + invD[2, 1, :]*cprime_vz[2, :]*cprime_vz[1, :] + invD[2, 2, :]*cprime_vz[2, :]*cprime_vz[2, :] \t\t\n\t\n\n\thess_pi_vx[:] = invD[0, 0, :]*cprime_pi[0,:]*cprime_vx[0, :] + invD[0, 1, :]*cprime_pi[0,:]*cprime_vx[1, :] + invD[0, 2, :]*cprime_pi[0,:]*cprime_vx[2, :] +\\\n\t\t\tinvD[1, 0, :]*cprime_pi[1,:]*cprime_vx[0, :] + invD[1, 1, :]*cprime_pi[1,:]*cprime_vx[1, :] + invD[1, 2, :]*cprime_pi[1,:]*cprime_vx[2, :] +\\\n\t\t\tinvD[2, 0, :]*cprime_pi[2,:]*cprime_vx[0, :] + invD[2, 1, :]*cprime_pi[2,:]*cprime_vx[1, :] + invD[2, 2, :]*cprime_pi[2,:]*cprime_vx[2, :] \n\n\thess_pi_vy[:] = invD[0, 0, :]*cprime_pi[0,:]*cprime_vy[0, :] + invD[0, 1, :]*cprime_pi[0,:]*cprime_vy[1, :] + invD[0, 2, :]*cprime_pi[0,:]*cprime_vy[2, :] +\\\n\t\t\tinvD[1, 0, :]*cprime_pi[1,:]*cprime_vy[0, :] + invD[1, 1, :]*cprime_pi[1,:]*cprime_vy[1, :] + invD[1, 2, :]*cprime_pi[1,:]*cprime_vy[2, :] +\\\n\t\t\tinvD[2, 0, :]*cprime_pi[2,:]*cprime_vy[0, :] + invD[2, 1, :]*cprime_pi[2,:]*cprime_vy[1, :] + invD[2, 2, :]*cprime_pi[2,:]*cprime_vy[2, :] \n\n\thess_pi_vz[:] = invD[0, 0, :]*cprime_pi[0,:]*cprime_vz[0, :] + invD[0, 1, :]*cprime_pi[0,:]*cprime_vz[1, :] + invD[0, 2, :]*cprime_pi[0,:]*cprime_vz[2, :] +\\\n\t\t\tinvD[1, 0, :]*cprime_pi[1,:]*cprime_vz[0, :] + invD[1, 1, :]*cprime_pi[1,:]*cprime_vz[1, :] + invD[1, 2, :]*cprime_pi[1,:]*cprime_vz[2, :] +\\\n\t\t\tinvD[2, 0, :]*cprime_pi[2,:]*cprime_vz[0, :] + invD[2, 1, :]*cprime_pi[2,:]*cprime_vz[1, :] + invD[2, 2, :]*cprime_pi[2,:]*cprime_vz[2, :] \n\n\t\t\t\t\t\t\n\thess_diag_vx = np.sum(hess_diag_vxi)\n\thess_diag_vy = np.sum(hess_diag_vyi)\n\thess_diag_vz = np.sum(hess_diag_vzi)\t\n\t\n\thess_diag_sigmav = np.sum(0.5*(invD[1, 1, :]**2. + 2.*invD[1, 2, :]**2. + invD[2, 2, :]**2.)*de_dsigmav[:]*de_dsigmav[:])\n\thess_pi_sigmav[:] = 0.5*(invD[1, 1, :]**2. + 2.*invD[1, 2, :]**2. + invD[2, 2, :]**2.)*de_dpi[:]*de_dsigmav[:] \n\n\thess_diag = np.concatenate((hess_diag_pi, np.array([hess_diag_vx, hess_diag_vy, hess_diag_vz, hess_diag_sigmav])))\n\t\n\tfor i in range(N+4):\n\t\thess[i, i] = hess_diag[i]\n\t\t\n\t\n\tfor j in range(N):\n\t\t\thess[j, -4] = hess_pi_vx[j]\n\t\t\thess[j, -3] = hess_pi_vy[j]\n\t\t\thess[j, -2] = hess_pi_vz[j]\n\t\t\thess[j, -1] = hess_pi_sigmav[j]\n\t\t\thess[-4, j] = hess_pi_vx[j]\n\t\t\thess[-3, j] = hess_pi_vy[j] \n\t\t\thess[-2, j] = hess_pi_vz[j]\n\t\t\thess[-1, j] = hess_pi_sigmav[j]\n\t\t\t\n\n\t\n\t\n\tpart_12, part_13, part_23 = np.zeros(N),np.zeros(N),np.zeros(N)\n\tfor ia in range(3):\n\t\tfor ib in range(3):\n\t\t\tpart_12[:] += invD[ia, ib, :]*cprime_vx[ia, :]*cprime_vy[ib, :] \n\t\t\tpart_13[:] += invD[ia, ib, :]*cprime_vx[ia, :]*cprime_vz[ib, :] \n\t\t\tpart_23[:] += invD[ia, ib, :]*cprime_vy[ia, :]*cprime_vz[ib, :] \t\t\t\t\n\n\thess[-4, -3] = np.sum(part_12)\n\thess[-3, -4] = hess[-4, -3]\n\t\n\thess[-4, -2] = np.sum(part_13)\n\thess[-2, -4] = hess[-4, -2]\n\n\thess[-3, -2] = np.sum(part_23)\n\thess[-2, -3] = hess[-3, -2]\n\n\t#### I am returning here the matrix Njk, which is defined as -E(H),\n\t#### where H is the hessian of the likelihood: therefore to obtain the real hessian, one\n\t#### should multiply this by '-1' (see function below.)\n\treturn hess ### See eq. 18" ]
[ "0.58267885", "0.57044107", "0.56744105", "0.5665763", "0.5665763", "0.5660631", "0.5652028", "0.5646071", "0.5644198", "0.560361", "0.5593867", "0.55926245", "0.55782807", "0.55476695", "0.55410445", "0.55390596", "0.5536928", "0.5492058", "0.54820085", "0.54800165", "0.54698306", "0.5461566", "0.53901035", "0.5383367", "0.5378238", "0.53729475", "0.5357149", "0.53521264", "0.5345549", "0.53443855" ]
0.69881344
0
Creates dichotomous unidimensional synthetic IRT data. Creates synthetic IRT data to test parameters estimation functions. Only for use with dichotomous outputs Assumes the model P(theta) = 1.0 / (1 + exp(discrimination (theta difficulty)))
def create_synthetic_irt_dichotomous(difficulty, discrimination, thetas, guessing=0, seed=None): rng = np.random.default_rng(seed) if np.ndim(guessing) < 1: guessing = np.full_like(difficulty, guessing) continuous_output = irt_evaluation(difficulty, discrimination, thetas) # Add guessing parameters continuous_output *= (1.0 - guessing[:, None]) continuous_output += guessing[:, None] # convert to binary based on probability random_compare = rng.uniform(size=continuous_output.shape) return (random_compare <= continuous_output).astype('int')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_synthetic_mirt_dichotomous(difficulty, discrimination, thetas,\n seed=None):\n rng = np.random.default_rng(seed)\n\n # If the input is just a vector of discriminations\n if (np.ndim(discrimination) == 1) or (discrimination.shape[0] == 1):\n discrimination = np.vstack((discrimination,) * difficulty.shape[0])\n\n # Inline computation of the logistic kernel\n kernel_terms = discrimination @ thetas\n kernel_terms += difficulty[:, None]\n continuous_output = expit(kernel_terms)\n\n # convert to binary based on probability\n random_compare = rng.uniform(size=continuous_output.shape)\n\n return (random_compare <= continuous_output).astype('int')", "def create_synthetic_irt_polytomous(difficulty, discrimination, thetas,\n model='grm', seed=None):\n difficulty = np.atleast_2d(difficulty)\n n_items, n_levels = difficulty.shape\n\n if n_levels == 1:\n raise AssertionError(\"Polytomous items must have more than 1 threshold\")\n\n rng = np.random.default_rng(seed)\n\n # Check for single input of discrimination\n if np.atleast_1d(discrimination).size == 1:\n discrimination = np.full((n_items,), discrimination)\n\n theta_length = thetas.shape[-1]\n\n # Get the model to use, will throw error if not supported\n probability_func = {'grm': _graded_func,\n 'grm_md': _graded_func_md,\n 'pcm': _credit_func,\n 'gum': _unfold_func}[model.lower()]\n\n # Check difficulty parameters for validity\n clip_high = _check_difficulty_parameters(difficulty, model.lower())\n\n # Initialize output for memory concerns\n level_scratch = np.zeros((n_levels + 2, theta_length))\n output = np.zeros((n_items, theta_length), dtype='int')\n\n # Loop over items and compute probability estimates\n # for each of the levels and assign level based on\n # those probabilities\n for item_ndx in range(n_items):\n # Obtain the probabilities for the data (in-place)\n probability_func(difficulty[item_ndx], discrimination[item_ndx],\n thetas, level_scratch[1:, :])\n\n # Get the thresholds of the levels\n np.cumsum(level_scratch[1:, :], axis=0, out=level_scratch[1:, :])\n level_scratch[0] = rng.uniform(size=theta_length)\n\n # Discritize the outputs based on the thresholds\n output[item_ndx] = np.apply_along_axis(\n _my_digitize, axis=0, arr=level_scratch)\n\n # Add 1 to return [1, n_levels]\n output += 1\n np.clip(output, 1, clip_high, out=output)\n return output", "def generate_synthetic_data(args):\n number_training_obeservations = args.ntr\n number_testing_obeservations = args.nte\n number_dimensions = args.nd\n mu = args.mu\n feature_model = args.feature_model\n outcome_model = args.outcome_model\n sigma_outcome = args.sigma_outcome\n number_environments = args.ne\n \n T_train = generate_T(number_training_obeservations)\n T_test = generate_T(number_testing_obeservations)\n\n X_train, X_test = generate_x(number_dimensions, T_train, T_test, mu, feature_model)\n \n train_potential_outcome, test_potential_outcome = generate_outcomes(outcome_model, feature_model, X_train, X_test, sigma_outcome)\n\n train_po_control = train_potential_outcome[:,0].reshape(number_training_obeservations,1)\n train_po_treatment = train_potential_outcome[:,1].reshape(number_training_obeservations,1)\n\n y_train = np.multiply(T_train , train_po_treatment) + np.multiply(1-T_train , train_po_control)\n\n return X_train, T_train, y_train, X_test, T_test, test_potential_outcome", "def create_dataset(self, x, t, n_samples, noise, random=True, normalize=True, return_idx=False, random_state=42):\n assert ((x.shape[1] == 1) & (t.shape[1] == 1)), 'x and t should have shape (n_samples x 1)'\n u = self.generate_solution(x, t)\n\n X = np.concatenate([t, x], axis=1)\n if random_state is None:\n y = u + noise * np.std(u, axis=0) * np.random.normal(size=u.shape)\n else:\n y = u + noise * np.std(u, axis=0) * np.random.RandomState(seed=random_state).normal(size=u.shape)\n \n\n # creating random idx for samples\n N = y.shape[0] if n_samples == 0 else n_samples\n\n if random is True:\n if random_state is None:\n rand_idx = np.random.permutation(y.shape[0])[:N]\n else:\n rand_idx = np.random.RandomState(seed=random_state).permutation(y.shape[0])[:N]\n else:\n rand_idx = np.arange(y.shape[0])[:N]\n \n # Normalizing\n if normalize:\n if (self.scaling_factor is None):\n self.scaling_factor = (-(np.max(X, axis=0) + np.min(X, axis=0))/2, (np.max(X, axis=0) - np.min(X, axis=0))/2) # only calculate the first time\n X = (X + self.scaling_factor[0]) / self.scaling_factor[1] \n\n # Building dataset\n X_train = torch.tensor(X[rand_idx, :], dtype=torch.float32)\n y_train = torch.tensor(y[rand_idx, :], dtype=torch.float32)\n\n if return_idx is False:\n return X_train, y_train\n else:\n return X_train, y_train, rand_idx", "def make_dataset():\n\n\tnumberOfTrials = dataset_params.num_of_samples\n\tnumberOfTrials_train = int(numberOfTrials*0.8)\n\tnumberOfTrials_test = int(numberOfTrials*0.2)\n\n\tprint(\"==================================================\")\n\tprint(\"1. Generating Train images ......\")\n\tprint(\"\\nTrain image per variation\", numberOfTrials_train)\n\tmakeDataset(numberOfTrials_train, \"train\")\n\n\tprint(\"==================================================\")\n\tprint(\"2. Generating Test images ......\")\n\tprint(\"\\nTest image per variation\", numberOfTrials_test)\n\tmakeDataset(numberOfTrials_test, \"test\")\n\n\tprint(\"==================================================\")\n\tprint(\"Done!!!\")", "def build_toy_dataset(N):\n pi = np.array([0.4, 0.6])\n mus = [[1, 1], [-1, -1]]\n stds = [[0.1, 0.1], [0.1, 0.1]]\n x = np.zeros((N, 2), dtype=np.float32)\n\n for n in range(N):\n k = np.argmax(np.random.multinomial(1, pi))\n x[n, :] = np.random.multivariate_normal(mus[k], np.diag(stds[k]))\n\n return x", "def generate_data(self, model_params, my_N):\n assert \"pies\" in model_params or \"pi\" in model_params\n W = model_params[\"W\"]\n\n D, H_gen = W.shape\n pies = model_params[\"pies\"] if \"pies\" in model_params else model_params[\"pi\"]\n # np.random.seed(0)\n p = np.random.random(size=(my_N, H_gen)) # Create latent vector\n s = p <= pies # Translate into boolean latent vector\n my_data = self.generate_from_hidden(model_params, {\"s\": s})\n\n return my_data", "def createDataset(n, d=100, d_inf=5, is_classification=True, no_signal=False):\n \n # making random inputs, outputs\n X = np.random.normal(0, 1, (3*n, d))\n y = np.random.normal(0, 1, (3*n, 1))\n \n # thresholding y values for classification\n if is_classification:\n y = 2.0*((y>0) - 0.5)\n \n # making the first d_inf dimensions informative\n\n if is_classification:\n X[:,:d_inf] += y*np.random.normal(1.0, 1.5, X[:,:d_inf].shape)\n else:\n snr = 0.05\n X[:,:d_inf] += snr*y\n X = zscore(X, axis=0)\n \n # if you dont want useful signal, randomize the labels\n if no_signal:\n np.random.shuffle(y)\n\n # Divide into train/test/holdout pairs\n outputs = [[X[i::3, :], y[i::3, 0]] for i in range(3)] \n \n return outputs", "def create_dataset(self, x, t, n_samples, noise, random=True, return_idx=False, random_state=42):\n assert ((x.shape[1] == 2) & (t.shape[1] == 1)), 'x and t should have shape (n_samples x 1)'\n u = self.generate_solution(x, t)\n\n X = np.concatenate([t, x], axis=1)\n y = u + noise * np.std(u, axis=0) * np.random.normal(size=u.shape)\n\n # creating random idx for samples\n N = y.shape[0] if n_samples == 0 else n_samples\n\n if random is True:\n rand_idx = np.random.RandomState(seed=random_state).permutation(y.shape[0])[:N] # so we can get similar splits for different noise levels\n else:\n rand_idx = np.arange(y.shape[0])[:N]\n\n # Building dataset\n X_train = torch.tensor(X[rand_idx, :], requires_grad=True, dtype=torch.float32)\n y_train = torch.tensor(y[rand_idx, :], requires_grad=True, dtype=torch.float32)\n \n if return_idx is False:\n return X_train, y_train\n else:\n return X_train, y_train, rand_idx", "def generate_data(self, model_params, my_N):\n H, D = self.H, self.D\n pies = model_params['pi']\n\n p = np.random.random(size=(my_N, H)) # Create latent vector\n s = p < pies # Translate into boolean latent vector\n\n return self.generate_from_hidden(model_params, {'s': s})", "def gen_data(N, D):\n X = np.hstack((np.ones((N, 1)), np.random.randn(N,D)))\n w_orig = np.array([[1], [1], [2]])\n y = 1. * ((X @ w_orig + 0.1 * np.random.rand(N, 1)) > 0) * 2 - 1\n return X, y", "def generate_synth_data(n):", "def makePLDS(T, x_0, f, g, Dx, Dy):\n\tX = np.zeros((T, Dx))\n\tY = np.zeros((T, Dy))\n\n\tX[0] = x_0\n\tY[0] = g.sample(x_0)\n\tfor t in range(1,T):\n\t\tX[t] = f.sample(X[t-1])\n\t\tY[t] = g.sample(X[t])\n\treturn X, Y", "def dataConversion(data, I, attr, Na, noise, alpha, R=True):\n n = data.shape[0]\n m = data.shape[1]\n\n data_dict = {}\n structure = []\n # Transform the data to a dictionary and get the data structure\n for j in range(m):\n temp_data = data[:, j].reshape(-1, 1)\n enc = preprocessing.OneHotEncoder(categories='auto')\n enc.fit(temp_data)\n data_dict[j] = enc.transform(temp_data).toarray()\n structure.append(len(set(data[:, j])))\n\n x = np.rint(I * n).astype(int)\n # The indices of the records to which noise will be added\n row_inds = np.random.choice(range(n), x, replace=False)\n # For each corrupted observation:\n for i in row_inds:\n if R:\n attr_inds = np.random.choice(m, size=Na, replace=False)\n else:\n # Specify the attributes to which noise will be added\n attr_inds = attr\n # For each specified attribute\n for j in attr_inds:\n # Divide 'noise' over all the columns belonging to that attribute\n noise_individuals = np.random.dirichlet(np.ones(structure[j] - 1) * alpha, size=1) * noise\n t = 0\n for k in range(structure[j]):\n if data_dict[j][i, k] == 0 and t < noise_individuals.shape[1]:\n data_dict[j][i, k] = noise_individuals[0][t]\n t += 1\n elif data_dict[j][i, k] == 1:\n data_dict[j][i, k] -= noise\n new_data = np.concatenate([data_dict[k] for k in data_dict.keys()], axis=1)\n\n # One-hot encoding GT:\n enc = OneHotEncoder(categories='auto')\n enc.fit(data)\n GT = enc.transform(data).toarray()\n\n return GT, new_data, structure, row_inds", "def create_data_generators(shuffle=True, novelty_type='normal', item_to_include='None',\n scale_level=1):\n\n total_noi_i = 10 # Number of processed images from one environemnt i\n noe = 1 # Numer of environments\n n_p = 32 # Patch size, patch --> n_p x n_p\n\n novelty = novelty_type\n datasets = []\n\n for i in range(noe):\n\n # Load only images of the environment which includes images of the stated novel item.\n if item_to_include is not None and novelty == 'novel_item':\n dataset_env_i = PolycraftDatasetWithSpecificItem(\n nov_type=novelty, noi=total_noi_i, env_idx=i, p_size=n_p, scale_factor=scale_level,\n item_name=item_to_include)\n datasets.append(dataset_env_i)\n # We only process the one environment with the item (maybe change this\n # if we have more than one environement per novel_item!?)\n break\n\n # No specific item given which should be included.\n else:\n dataset_env_i = PolycraftDatasetNoSpecificItem(\n nov_type=novelty, noi=total_noi_i, env_idx=i, p_size=n_p, scale_factor=scale_level)\n datasets.append(dataset_env_i)\n\n final_dataset = ConcatDataset(datasets)\n\n total_noi = len(final_dataset) # Total number of processed images from all datasets\n\n if(total_noi < 7):\n print('Number of samples too small for splitting dataset in training-/valid-/test set.')\n\n train_noi = int(0.7 * total_noi) # Number of images used for training (70 %)\n valid_noi = int(0.15 * total_noi) # Number of images used for validation (15 %)\n test_noi = total_noi - train_noi - valid_noi # Number of images used for testing (15 %)\n train_dataset, valid_dataset, test_dataset = torch.utils.data.random_split(\n final_dataset, [train_noi, valid_noi, test_noi])\n\n train_loader = DataLoader(train_dataset, batch_size=1, shuffle=True)\n valid_loader = DataLoader(valid_dataset, batch_size=1, shuffle=True)\n test_loader = DataLoader(test_dataset, batch_size=1, shuffle=True)\n\n return train_loader, valid_loader, test_loader", "def get_t_d(conf, r_inputs, d_data):\n # to crop out unstable part for temporal discriminator, details in TecoGAN supplemental paper\n crop_size_dt = int(conf.train.crop_size * 4 * conf.gan.crop_dt)\n offset_dt = (conf.train.crop_size * 4 - crop_size_dt) // 2\n crop_size_dt = conf.train.crop_size * 4 - offset_dt*2\n paddings = (0, 0, offset_dt, offset_dt, offset_dt, offset_dt, 0, 0)\n\n with nn.parameter_scope(\"discriminator\"):\n real_warp = warp_by_flow(d_data.t_targets, d_data.t_vel)\n real_warp = space_to_depth_disc(real_warp, d_data.t_batch)\n\n # equivalent to tf.image.crop_to_bounding_box\n real_warp = real_warp[:, offset_dt:offset_dt +\n crop_size_dt, offset_dt:offset_dt+crop_size_dt, :]\n real_warp = F.pad(real_warp, paddings)\n before_warp = space_to_depth_disc(d_data.t_targets, d_data.t_batch)\n t_input = space_to_depth_disc(\n r_inputs[:, :d_data.t_size, :, :, :], d_data.t_batch)\n # resizing using bilinear interpolation\n input_hi = F.interpolate(t_input, scale=(4, 4), mode='linear',\n channel_last=True)\n real_warp = F.concatenate(before_warp, real_warp, input_hi)\n\n tdiscrim_real_output, real_layers = discriminator(real_warp)\n\n fake_warp = warp_by_flow(d_data.t_gen_output, d_data.t_vel)\n fake_warp = space_to_depth_disc(fake_warp, d_data.t_batch)\n fake_warp = fake_warp[:, offset_dt:offset_dt +\n crop_size_dt, offset_dt:offset_dt+crop_size_dt, :]\n fake_warp = F.pad(fake_warp, paddings)\n before_warp = space_to_depth_disc(\n d_data.t_gen_output, d_data.t_batch, inplace=False)\n fake_warp = F.concatenate(before_warp, fake_warp, input_hi)\n tdiscrim_fake_output, fake_layers = discriminator(fake_warp)\n\n temporal_disc = collections.namedtuple('temporal_disc', 'tdiscrim_real_output,'\n 'real_layers, tdiscrim_fake_output, fake_layers')\n return temporal_disc(\n tdiscrim_real_output=tdiscrim_real_output,\n real_layers=real_layers,\n tdiscrim_fake_output=tdiscrim_fake_output,\n fake_layers=fake_layers\n )", "def test_ds_1d(i, num_bins):\n np.random.seed(2191+i)\n simulated_dataset = simulate_direction(num_bins, ntaxa=47, nsamples=int(360/num_bins), Sigma_trace=1)\n X, K, sigma, mu = simulated_dataset\n y = np.zeros((X.shape[0]*X.shape[1], np.shape(X)[2])) #reformat data for model\n for i in range(len(X)):\n for j in range(len(X[0])):\n y[X.shape[1]*i+j] = X[i,j]\n no_struc = 1\n one_dim = fitModel_1d_util(y)\n for i in range(2):\n print([one_d_AIC(one_dim[1][i], y) for i in range(len(one_dim[1]))])\n #for i in range(2):\n # print([one_d_AWE(one_dim[1][i], y) for i in range(len(one_dim[1]))])\n #print(\"silhouette\")\n #for i in range(len(one_dim[1])):\n # mixing, sigma, delta, Q, Q_edge, edge_mean, mu, likelihoods, iterations = one_dim[1][i]\n # print(silhouette(mixing, sigma, mu, y))\n two_dim = fitModel_2d_util(y)\n for i in range(2):\n print([one_d_AIC(one_dim[1][i], y) for i in range(len(one_dim[1]))])\n print([AIC(two_dim[1][i], y) for i in range(len(two_dim[1]))])\n #one_dim_scores = one_dim[0] #Scores start at 2 bins\n #two_dim_scores = two_dim[0]\n selection = 1 #if selection is negative just assume i'm referring to the 2d case\n return simulated_dataset, one_dim, two_dim, selection", "def generateModelData(params, standoffDistance, nSamples):\n initialEnergy, eLoss, e2, e3, sigma = params\n \n data_x=np.random.uniform(low=0.0, high=distance_cellLength, size=nSamples)\n meanEnergy = initialEnergy + eLoss*data_x + \\\n e2*np.power(data_x,2) + e3 * np.power(data_x,3)\n data_ed= np.random.normal(loc=meanEnergy, scale=sigma)\n data_en = getDDneutronEnergy(data_ed)\n \n neutronDistance = standoffDistance + (distance_cellLength - data_x) + \\\n distance_zeroDegLength/2\n neutronTOF = getTOF(mass_neutron, data_en, neutronDistance)\n effectiveDenergy = (initialEnergy + data_ed)/2\n deuteronTOF = getTOF( mass_deuteron, effectiveDenergy, data_x )\n data_tof = neutronTOF + deuteronTOF\n \n data = np.column_stack((data_x,data_ed,data_en,data_tof))\n return data", "def generate_synthetic_dataset(args):\n logger = logging.getLogger(\"GACM\")\n logger.info('Checking the data files...')\n for data_path in args.train_dirs + args.dev_dirs + args.test_dirs:\n assert os.path.exists(data_path), '{} file does not exist.'.format(data_path)\n assert len(args.test_dirs) > 0, 'No test files are provided.'\n dataset = Dataset(args, train_dirs=args.train_dirs, dev_dirs=args.dev_dirs, test_dirs=args.test_dirs)\n logger.info('Initialize the model...')\n model = Agent(args, len(dataset.qid_query), len(dataset.uid_url), len(dataset.vid_vtype))\n logger.info('model.global_step: {}'.format(model.global_step))\n assert args.load_model > -1\n logger.info('Restoring the model...')\n model.load_model(model_dir=args.load_dir, model_prefix=args.algo, global_step=args.load_model, load_optimizer=False)\n\n synthetic_types = ['deterministic', 'stochastic']\n shuffle_splits = [None, [1, 11], [1, 6, 11]]\n amplifications = [1, 7]\n for synthetic_type in synthetic_types:\n for shuffle_split in shuffle_splits:\n for amplification in amplifications:\n #synthetic_type = 'deterministic'\n #shuffle_split = None\n #amplification = 1\n file_path = os.path.join(args.load_dir, '..', 'synthetic')\n model.generate_synthetic_dataset('test', dataset, file_path, \n 'synthetic_{}_{}_{}.txt'.format(synthetic_type[0].upper(), str(shuffle_split), amplification), \n synthetic_type=synthetic_type, shuffle_split=shuffle_split, amplification=amplification)\n # exit()\n logger.info('Done with click sequence generation.')", "def main() -> None:\n seed = 0\n samples = 100_000\n\n np.random.seed(seed)\n\n s = np.random.binomial(1, 0.6, samples)\n\n x_1f = np.random.normal(0, 0.5, samples)\n x_1 = x_1f\n x_2 = np.random.normal(-1, 3, samples)\n x_2f = x_2\n\n y_1 = np.random.binomial(1, sigmoid(x_1))\n y_1f = np.random.binomial(1, sigmoid(x_1f))\n y_2 = np.random.binomial(1, sigmoid(x_2))\n y_2f = y_2\n p = x_1 + x_2\n pf = x_1f + x_2f\n y_3 = np.random.binomial(1, sigmoid(p))\n y_3f = np.random.binomial(1, sigmoid(pf))\n\n noise_1 = np.random.normal(0, 4, samples)\n noise_2 = np.random.normal(3, 7, samples)\n\n print(\n s.mean(),\n x_1.mean(),\n x_1f.mean(),\n x_2.mean(),\n x_2f.mean(),\n y_1.mean(),\n y_1f.mean(),\n y_2.mean(),\n y_2f.mean(),\n y_3.mean(),\n y_3f.mean(),\n )\n\n df = pd.DataFrame(\n data={\n \"x1\": x_1,\n \"x1f\": x_1f,\n \"x2\": x_2,\n \"x2f\": x_2f,\n \"s\": s,\n \"y1\": y_1,\n \"y1f\": y_1f,\n \"y2\": y_2,\n \"y2f\": y_2f,\n \"y3\": y_3,\n \"y3f\": y_3f,\n \"n1\": noise_1,\n \"n2\": noise_2,\n }\n )\n\n # Shuffle the data,\n df = df.sample(frac=1.0, random_state=seed).reset_index(drop=True)\n\n # Save the CSV\n df.to_csv(str(Path(__file__).parent / \"synthetic_scenario_1.csv\"), index=False)", "def generate_model (d):\n return np.random.rand (d+1, 1)", "def make_test_data(self):\r\n\r\n \r\n\r\n print (\"Creating Test Sample:\")\r\n\r\n print (' Period, rate, reps, phases: ', self.period, self.framerate, self.nrepetitions, self.nPhases)\r\n\r\n nframes = int(self.period * self.framerate * self.nrepetitions)\r\n\r\n print (' nframes: ', nframes)\r\n\r\n if self.bkgdNoise > 0.:\r\n\r\n d = np.random.normal(size=(nframes,self.imageSize[0],self.imageSize[1]),\r\n\r\n loc=self.bkgdIntensity, scale=self.bkgdNoise).astype('float32')\r\n\r\n else:\r\n\r\n d = self.bkgdIntensity*np.ones((nframes,self.imageSize[0],self.imageSize[1])).astype('float32')\r\n\r\n \r\n\r\n ds = d.shape\r\n\r\n print (' data shape: ', ds)\r\n\r\n dx = int(ds[2]/4)\r\n\r\n xc = int(ds[2]/2)\r\n\r\n xo = [xc-dx, xc+dx]\r\n\r\n ywidth = int(ds[2]/(self.nPhases+2))\r\n\r\n framedelay = 4\r\n\r\n\r\n\r\n if not self.mode:\r\n\r\n self.phasex = []\r\n\r\n self.phasey = []\r\n\r\n for i in range(0,self.nPhases):\r\n\r\n dy = int((i+1)*ds[2]/(self.nPhases+2)) # each phase is assigned to a region\r\n\r\n self.resp = np.zeros((nframes,))\r\n\r\n self.resp = np.cos(\r\n\r\n np.linspace(0, 2.0*np.pi*nframes/(self.period*self.framerate), nframes-framedelay)+i*np.pi/8 - np.pi/2.0)\r\n\r\n self.resp = np.concatenate((np.zeros(framedelay), self.resp))\r\n\r\n d[:, xo[0]:xo[1], dy:dy+ywidth ] += self.resp[:, np.newaxis, np.newaxis]\r\n\r\n self.phasey.append( (2+(dy+int(ds[2]/self.nPhases))/2))\r\n\r\n self.phasex.append((6+int(ds[1]/2)/2)) # make the signal equivalent of digitized one (baseline 3000, signal at 1e-4 of baseline)\r\n\r\n else:\r\n\r\n self.nPhases = 4\r\n\r\n self.spotsize = 16\r\n\r\n nrpts = 20\r\n\r\n nsites = 4\r\n\r\n one_rep = int(self.period*self.framerate)\r\n\r\n isi = int(self.period*self.framerate/self.nPhases)\r\n\r\n print('period, isi: ', self.period, isi)\r\n\r\n r = np.arange(0, nrpts, 1.)\r\n\r\n alpha = 4.\r\n\r\n A = r/alpha *np.exp(-(r-alpha)/alpha) # scaled alpha function\r\n\r\n self.spot= self.gauss_spot(self.spotsize, 3.) # the 2d spot\r\n\r\n sigsize = np.random.normal(size=self.nPhases, loc=self.signal_size, scale=self.signal_size*2)\r\n\r\n sigsize = [np.abs(s) for s in sigsize] # restrict to positive amplitudes\r\n\r\n print ('sigsize: ', sigsize)\r\n\r\n for j in range(self.nrepetitions):\r\n\r\n for i in range(self.nPhases):\r\n\r\n self.resp = np.zeros((nrpts, self.spot.shape[0], self.spot.shape[1]))\r\n\r\n for k in range(nrpts):\r\n\r\n self.resp[k,:,:] += sigsize[i]*A[k] * self.spot # make response an alpha time course of gaussian spot\r\n\r\n start = j*one_rep + i*isi + framedelay\r\n\r\n stop = start + nrpts\r\n\r\n dy = int((i+1)*ds[2]/(self.nPhases+2)) # location for phase\r\n\r\n #dy = dy + 2*z\r\n\r\n# print ('start, stop: ', start, stop)\r\n\r\n for z in range(nsites):\r\n\r\n #self.resp = np.concatenate((np.zeros(framedelay), self.resp))\r\n\r\n xp = xo[0] + i*10 - 10*z\r\n\r\n yp = dy - i*10 + 10*z\r\n\r\n d[start:stop, xp:xp+self.spotsize, yp:yp+self.spotsize ] += self.resp\r\n\r\n self.imageData = d # reduce to a 16-bit map to match camera data type\r\n\r\n self.nFrames = self.imageData.shape[0]\r\n\r\n self.times = np.arange(0, nframes/self.framerate, 1.0/self.framerate)\r\n\r\n print( \" Test Image Created\")\r\n\r\n # imv = pg.ImageView()\r\n\r\n # imv.show()\r\n\r\n # imv.setImage(self.imageData)\r\n\r\n\r\n\r\n if self.layout is not None:\r\n\r\n self.layout.addWidget(imv, 0, 0)\r\n\r\n\r\n\r\n avgImage = np.mean(self.imageData, axis=0)\r\n\r\n ima = pg.ImageView()\r\n\r\n ima.setImage(avgImage)\r\n\r\n self.layout.addWidget(ima, 0, 1)\r\n\r\n self.adjust_image_data()\r\n\r\n self.avgimg = np.mean(self.imageData, axis=0) # get mean image for reference later: average across all time\r\n\r\n print (' Test file, original Image Info: ')\r\n\r\n self.print_image_info()\r\n\r\n self.rebin_image()\r\n\r\n #self.clean_windowerrors()\r\n\r\n # pg.image(self.imageData)\r\n\r\n # pg.show()\r\n\r\n # mpl.figure(1)\r\n\r\n # mpl.show()\r\n\r\n if not self.mode: # FFT analysis\r\n\r\n self.analysis_fourier_map(target=1, mode=0)\r\n\r\n self.plot_maps(mode=2, gfilter=self.gfilter)\r\n\r\n else:\r\n\r\n self.analysis_dFF_map()\r\n\r\n mpl.show()", "def makeDataset(numberOfTrials, data_type):\n\n\tdata_folder = data_type + \"_images\"\n\tlabel_file = os.path.join(dataset_params.data_path, data_type + \"_lables.csv\")\n\n\tutils.create_directory(dataset_params.data_path)\n\tutils.create_directory(os.path.join(dataset_params.data_path, data_folder))\n\n\tallowedRadius = utils.defineShapePerimeter()\n\tcolorsRGB = utils.defineColorValues()\n\tshapeDict = utils.defineShapeSides()\n\tpadding = dataset_params.padding\n\n\tnum = 0\n\toutput_images = [[\"figNum\", \"shape\", \"color\", \"size\", \"background\", \"quadrant\", \"radius\"]]\n\tfor c in dataset_params.colors: # for all 7 foreground colors \n\t\tfor q in dataset_params.quadrants: # for all 4 quadratns \n\t\t\tfor s in dataset_params.shapes: # for all 5 shapes\n\t\t\t\tfor k in dataset_params.sizes: # for all 3 sizes\n\t\t\t\t\tfor b in dataset_params.backgrounds: # for all 3 background colors\n\t\t\t\t\t\tfor i in range(numberOfTrials):\n\t\t\t\t\t\t\tfileName = os.path.join(dataset_params.data_path, data_folder, str(num) + \".png\")\n\t\t\t\t\t\t\tpresentQuadrant = dataset_params.quadrants[q]\n\t\t\t\t\t\t\tradius = random.randint(allowedRadius[s][k][0],allowedRadius[s][k][1])\n\n\t\t\t\t\t\t\tif(presentQuadrant == 3):\n\t\t\t\t\t\t\t\txMin = 128 + padding\n\t\t\t\t\t\t\t\txMax = 255 - radius\n\t\t\t\t\t\t\t\tyMin = 128 + padding\n\t\t\t\t\t\t\t\tyMax = 255 - radius\n\n\t\t\t\t\t\t\telif(presentQuadrant == 2):\n\t\t\t\t\t\t\t\txMin = 0 + radius\n\t\t\t\t\t\t\t\txMax = 128 - padding\n\t\t\t\t\t\t\t\tyMin = 128 + padding\n\t\t\t\t\t\t\t\tyMax = 255 - radius\n\n\t\t\t\t\t\t\telif(presentQuadrant == 1):\n\t\t\t\t\t\t\t\txMin = 0 + radius\n\t\t\t\t\t\t\t\txMax = 128 - padding\n\t\t\t\t\t\t\t\tyMin = 0 + radius\n\t\t\t\t\t\t\t\tyMax = 128 - padding\n\n\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\txMin = 128 + padding\n\t\t\t\t\t\t\t\txMax = 255 - radius\n\t\t\t\t\t\t\t\tyMin = 0 + radius\n\t\t\t\t\t\t\t\tyMax = 128 - padding\n\n\t\t\t\t\t\t\txCenter = random.randint(xMin, xMax)\n\t\t\t\t\t\t\tyCenter = random.randint(yMin, yMax)\n\t\t\t\t\t\t\tcenter = [xCenter, yCenter]\n\n\t\t\t\t\t\t\tif(s == \"circle\"):\n\t\t\t\t\t\t\t\toutput_images.append([num, \"circle\", c, k, b, presentQuadrant, radius])\n\t\t\t\t\t\t\t\timg = makeCircle(c, radius, center, b, colorsRGB)\n\t\t\t\t\t\t\t\timg = img[:,:,::-1]\n\t\t\t\t\t\t\t\tcv2.imwrite(fileName, img)\n\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\tn = shapeDict[s]\n\t\t\t\t\t\t\t\timg = makePolygon(center, n, radius, b, c, colorsRGB)\n\t\t\t\t\t\t\t\timg = img[:,:,::-1]\n\t\t\t\t\t\t\t\tcv2.imwrite(fileName, img)\n\t\t\t\t\t\t\t\toutput_images.append([num, s, c, k, b, presentQuadrant, radius])\n\t\t\t\t\t\t\tnum += 1\n\t\n\tprint(\"Number of image generated\", num)\n\n\tprint(\"Saving \" + data_type + \" data meta information to CSV ......\")\n\tdf = pd.DataFrame(output_images[1:], columns=output_images[0])\n\tdf.to_csv(label_file, index=False)\n\tprint(\"Saved \" + data_type + \" data meta information: \" + data_folder)\n\t\n\n\tprint(\"Saving \" + data_type + \" images data to npz(numpy) compressed file ......\")\n\tmake_npz_file(data_type)\n\tprint(\"Saved \" + data_type + \" images data to npz(numpy) compressed file!\")\n\t\n\treturn None", "def generate_t(d):\n typ= np.random.randint(3)\n if typ == 0:\n t= np.random.normal(size=d)\n elif typ == 1:\n t= np.random.rand(d)\n elif typ == 2:\n t= np.hstack([np.random.normal(size=int(d/2)), np.random.normal(loc=2, size=(d - int(d/2)))])\n \n t= (t - np.min(t))/(np.max(t) - np.min(t))\n \n exponent= np.random.randint(1, 4)\n \n if np.random.randint(2) == 0:\n t= t**exponent\n else:\n t= t**(1.0/exponent)\n \n t= (t - np.min(t))/(np.max(t) - np.min(t))\n \n return np.round(t, decimals=3)", "def prepare_data(dataset, train_ratio=0.8, input_dim=None, seed=10):\n # Retrieve main path of project\n dirname = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))\n\n # Download and store dataset at chosen location\n if dataset == 'Cora' or dataset == 'PubMed' or dataset == 'Citeseer':\n path = os.path.join(dirname, 'data')\n data = Planetoid(path, name=dataset, split='full')[0]\n data.name = dataset\n data.num_classes = (max(data.y)+1).item()\n # data.train_mask, data.val_mask, data.test_mask = split_function(data.y.numpy())\n # data = Planetoid(path, name=dataset, split='public', transform=T.NormalizeFeatures(), num_train_per_class=20, num_val=500, num_test=1000)\n\n elif dataset == 'Amazon':\n path = os.path.join(dirname, 'data', 'Amazon')\n data = Amazon(path, 'photo')[0]\n data.name = dataset\n data.num_classes = (max(data.y)+1).item()\n data.train_mask, data.val_mask, data.test_mask = split_function(\n data.y.numpy(), seed=seed)\n # Amazon: 4896 train, 1224 val, 1530 test\n \n elif dataset in ['syn1', 'syn2', 'syn4', 'syn5']: \n data = synthetic_data(\n dataset, dirname, train_ratio, input_dim)\n \n elif dataset == 'syn6':\n data = gc_data(dataset, dirname, train_ratio)\n\n elif dataset == 'Mutagenicity':\n data = gc_data(dataset, dirname, train_ratio)\n\n return data", "def template_dataset(self):\n exp_dict = {\n 'experiment_name': 'ALLEN_all_neurons',\n 'only_process_n': None, # Set to None to process all\n 'randomize_selection': True,\n 'reference_image_key': {'proc_stimuli': 'image'},\n 'reference_label_key': {'neural_trace_trimmed': 'label'},\n 'rf_query': [{\n 'rf_coordinate_range': { # Get all cells\n 'x_min': 40,\n 'x_max': 70,\n 'y_min': 20,\n 'y_max': 50,\n },\n 'cre_line': 'Cux2',\n 'structure': 'VISp'}\n ],\n 'cross_ref': 'rf_coordinate_range_and_stimuli',\n 'store_means': [\n 'image',\n 'label'\n ],\n 'cc_repo_vars': {\n 'output_size': [2, 1], # target variable -- neural activity,\n 'model_im_size': [152, 304, 1],\n 'loss_function': 'pearson',\n 'score_metric': 'pearson',\n 'preprocess': 'resize'\n },\n # 'deconv_method': 'elephant'\n }\n exp_dict = self.add_globals(exp_dict)\n return exp_dict", "def make_dataset_from_trajectories(closed_loop_dynamics, initial_states, dt, state_norm, length, n_reps, noise_std):\n \n data = {\"envs\":initial_states, \"trajectories\":[]}\n data['meta_info'] = {'L':length, 'T': length * dt, 'obs_noise_std':noise_std}\n trajs = generate_trajectories_from_inits(closed_loop_dynamics, initial_states, dt, n_reps, length, noise_std)\n if state_norm is not None:\n Tx = np.diag(state_norm)\n else:\n Tx = np.eye(len(initial_states[0]))\n state = np.moveaxis(trajs, 1, 3)\n state = np.dot(state, Tx)\n trajs = np.moveaxis(state, 3, 1)\n data[\"trajectories\"] = trajs\n return data", "def makeD2hhAsymm(name,\n config,\n KPIDK_string,\n PiPIDK_string,\n Mass_low_string,\n Mass_high_string,\n CombPIDK_string,\n DecayDescriptor,\n inputSel,\n useTOS,\n Hlt1TOS,\n Hlt2TOS\n ) :\n\n def makeTISTOS( name, _input, _hlttos ) :\n from Configurables import TisTosParticleTagger\n _tisTosFilter = TisTosParticleTagger( name + \"Tagger\" )\n _tisTosFilter.TisTosSpecs = _hlttos\n return Selection( name\n , Algorithm = _tisTosFilter\n , RequiredSelections = [ _input ]\n ) \n\n _Kcuts1 = \"~ISMUON & (PT > %(DaugPtMin)s* MeV) & (MIPCHI2DV(PRIMARY) > %(DaugIPChi2)s)\" % locals()['config']\n _KcutsPIDK = KPIDK_string % locals()['config']\n _Kcuts2 = \" & (ISLONG) & (P > %(DaugP)s* MeV) & (TRCHI2DOF < %(DaugTrkChi2)s)\" % locals()['config']\n _Kcuts = _Kcuts1 + _KcutsPIDK + _Kcuts2\n _Picuts1 = \"~ISMUON & (PT > %(DaugPtMin)s* MeV) & (MIPCHI2DV(PRIMARY) > %(DaugIPChi2)s)\" % locals()['config']\n _PicutsPIDK = PiPIDK_string % locals()['config']\n _Picuts2 = \" & (ISLONG) & (P > %(DaugP)s* MeV) & (TRCHI2DOF < %(DaugTrkChi2)s)\" % locals()['config']\n _Picuts = _Picuts1 + _PicutsPIDK + _Picuts2\n _dauCuts = { 'K+': _Kcuts, 'pi+': _Picuts }\n\n _massLow = Mass_low_string % locals()['config']\n _massHigh = Mass_high_string % locals()['config']\n _combCuts1 = \"(APT > %(D0Pt)s* MeV)\" \\\n \"& (AHASCHILD( PT > %(DaugPtMax)s* MeV ) )\" \\\n \"& (ADOCA(1,2)< %(D0DOCA)s* mm)\" \\\n \"& (AP > %(D0P)s* MeV)\" % locals()['config']\n _combCutsPIDK = CombPIDK_string % locals()['config']\n _combCuts = _combCuts1 + _combCutsPIDK + _massLow + _massHigh\n\n _motherCuts = \"(VFASPF(VCHI2PDOF) < %(D0VtxChi2Ndof)s)\" \\\n \"& (BPVVDCHI2 > %(D0FDChi2)s)\" \\\n \"& (BPVLTIME() > %(D0Tau)s)\" \\\n \"& (BPVDIRA > %(D0BPVDira)s)\" % locals()['config']\n\n _D0 = CombineParticles( DecayDescriptor = DecayDescriptor,\n MotherCut = _motherCuts,\n CombinationCut = _combCuts,\n DaughtersCuts = _dauCuts)\n\n _sel = Selection ( name+'Sel',\n Algorithm = _D0,\n RequiredSelections = inputSel )\n\n if not useTOS:\n return _sel\n\n _selD2hhHlt1TOS = makeTISTOS( name + \"D2hhHlt1TOS\"\n , _sel\n , Hlt1TOS\n )\n _selD2hhHlt2TOS = makeTISTOS( name + \"D2hhHlt2TOS\"\n , _selD2hhHlt1TOS\n , Hlt2TOS\n )\n \n return _selD2hhHlt2TOS", "def create_data(self):\n\n print (f'Using {self.n_s} simulations for the training data to estimate cov')\n print (f'Using {self.n_p} simulations for the upper/lower training data')\n print (f'Number of splits, to increase number simulations: {self.n_train}')\n print (f'Adding noise to the derivative: {np.invert(self.noiseless_deriv)}')\n\n # Number of upper and lower simulations\n n_p = int(self.n_s * self.derivative_fraction)\n\n # set a seed to surpress the sample variance (EVEN FOR CENTRAL SIMULATIONS)\n seed = np.random.randint(1e6) \n # We should double-check to see if the sample variance if being surpressed\n\n # Perturb lower \n np.random.seed(seed)\n t_m = self.generate_data(np.array([self.theta_fid for i in \n range(self.n_train * self.n_p)])\n ,train = -self.delta_theta, flatten = self.flatten\n ,noiseless_deriv = self.noiseless_deriv) \n # Perturb higher \n np.random.seed(seed)\n t_p = self.generate_data(np.array([theta_fid for i in \n range(self.n_train * self.n_p)])\n ,train = self.delta_theta, flatten = self.flatten\n , noiseless_deriv = self.noiseless_deriv)\n\n # Central\n np.random.seed(seed)\n t = self.generate_data(np.array([self.theta_fid for i in \n range(self.n_train * self.n_s)])\n ,train = None, flatten = self.flatten)\n\n\n # derivative data\n t_d = (t_p - t_m) / (2. * self.delta_theta)\n\n # Save in a dict that the network takes\n data = {\"data\": t, \"data_d\": t_d}\n # for plotting purposes we save the upper/lower separately as well\n data[\"x_m\"], data[\"x_p\"] = t_m, t_p \n\n\n # Repeat the same story to generate test data\n print ('\\n')\n print (f'Using {self.n_s} simulations for the test data to estimate cov')\n print (f'Using {self.n_p_val} simulations for the upper/lower test data')\n print (f'Number of splits, to increase number simulations: {self.n_train_val}')\n print (f'Adding noise to the derivative: {np.invert(self.noiseless_deriv)}')\n print ('\\n')\n\n seed = np.random.randint(1e6)\n # Perturb lower \n np.random.seed(seed)\n tt_m = self.generate_data(np.array([self.theta_fid for i in \n range(self.n_train * self.n_p)])\n , train = -self.delta_theta, flatten = self.flatten\n , noiseless_deriv = self.noiseless_deriv)\n # Perturb higher \n np.random.seed(seed)\n tt_p = self.generate_data(np.array([self.theta_fid for i in \n range(self.n_train * self.n_p)])\n , train = self.delta_theta, flatten = self.flatten\n , noiseless_deriv = self.noiseless_deriv)\n # Central sim\n np.random.seed(seed)\n tt = self.generate_data(np.array([self.theta_fid for i in \n range(self.n_train * self.n_s)])\n , train = None, flatten = self.flatten)\n \n # np.random.seed()\n \n # derivative data\n tt_d = (tt_p - tt_m) / (2. * self.delta_theta)\n\n data[\"validation_data\"] = tt \n data[\"validation_data_d\"] = tt_d\n\n # for plotting purposes we save the upper/lower separately\n data[\"x_m_test\"], data[\"x_p_test\"] = tt_m, tt_p \n\n return data", "def create_experiment_dataset(df_degrees, params):\n \n print ('[*] Creating Experiment Dataset')\n if params.strategy == 'random':\n degree_thr = np.array([float(x)/100.0 for x in np.arange(10, 101, params.step_size)])\n else: \n degree_thr = np.array([float(x)/100.0 for x in np.logspace(-2, 2, params.step_size, dtype=float)])\n\n n_users_thr, n_items_thr = [], []\n user_degree_thr, item_degree_thr = [], []\n n_users_max = len(df_degrees['from'].unique())\n n_items_max = len(df_degrees['to'].unique())\n \n for threshold in degree_thr: \n if params.strategy == 'upper_degree_centrality':\n df_temp = df_degrees[df_degrees['degree_centrality_user'] > threshold]\n elif params.strategy == 'lower_degree_centrality':\n df_temp = df_degrees[df_degrees['degree_centrality_user'] < threshold] \n elif params.strategy == 'random':\n df_temp = utils.filter_dataset(df_degrees, degree_user=threshold, degree_item=1, strategy='random')\n\n n_users = len(df_temp['from'].unique()) \n n_users_thr.append(n_users)\n user_degree_thr.append(threshold)\n if n_users == 0 and params.strategy == 'upper_degree_centrality':\n break\n if n_users == n_users_max and params.strategy == 'lower_degree_centrality':\n break \n \n for threshold in degree_thr: \n if params.strategy == 'upper_degree_centrality':\n df_temp = df_degrees[df_degrees['degree_centrality_item'] > threshold]\n elif params.strategy == 'lower_degree_centrality':\n df_temp = df_degrees[df_degrees['degree_centrality_item'] < threshold] \n elif params.strategy == 'random':\n df_temp = utils.filter_dataset(df_degrees, degree_user=1, degree_item=threshold, strategy='random')\n\n n_items = len(df_temp['to'].unique()) \n n_items_thr.append(n_items)\n item_degree_thr.append(threshold)\n if n_items == 0 and params.strategy == 'upper_degree_centrality':\n break\n if n_items == n_items_max and params.strategy == 'lower_degree_centrality':\n break \n\n arr = []\n for u, u_thr in enumerate(user_degree_thr):\n for i, i_thr in enumerate(item_degree_thr):\n arr.append([u_thr, n_users_thr[u], i_thr, n_items_thr[i]])\n df_exp = pd.DataFrame(arr, columns=['degree_centrality_users', 'n_users', 'degree_centrality_items', 'n_items'])\n cond1 = df_exp['n_users'] > 5\n cond2 = df_exp['n_items'] > 5\n df_exp = df_exp[cond1 & cond2]\n df_exp.to_csv(os.path.join('.', 'Variables', 'df_exp.csv'), sep=';', index=None)\n print ('[+] Experiment Dataset Created')\n\n return df_exp" ]
[ "0.72807664", "0.64221025", "0.5949613", "0.5845599", "0.58318764", "0.57768965", "0.5752076", "0.5745334", "0.5730884", "0.5682785", "0.56709635", "0.5626789", "0.5555776", "0.55365014", "0.5451153", "0.5448884", "0.5415505", "0.539355", "0.5340234", "0.53331894", "0.53308266", "0.5323411", "0.5319963", "0.5285057", "0.5276496", "0.52740616", "0.52600193", "0.5251707", "0.52238786", "0.5220519" ]
0.6925462
1
Creates dichotomous multidimensional synthetic IRT data. Assumes the model P(theta) = 1.0 / (1 + exp(1 (dot(discrimination,theta) + difficulty)))
def create_synthetic_mirt_dichotomous(difficulty, discrimination, thetas, seed=None): rng = np.random.default_rng(seed) # If the input is just a vector of discriminations if (np.ndim(discrimination) == 1) or (discrimination.shape[0] == 1): discrimination = np.vstack((discrimination,) * difficulty.shape[0]) # Inline computation of the logistic kernel kernel_terms = discrimination @ thetas kernel_terms += difficulty[:, None] continuous_output = expit(kernel_terms) # convert to binary based on probability random_compare = rng.uniform(size=continuous_output.shape) return (random_compare <= continuous_output).astype('int')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_synthetic_irt_dichotomous(difficulty, discrimination, thetas,\n guessing=0, seed=None):\n rng = np.random.default_rng(seed)\n\n if np.ndim(guessing) < 1:\n guessing = np.full_like(difficulty, guessing)\n\n continuous_output = irt_evaluation(difficulty, discrimination, thetas)\n\n # Add guessing parameters\n continuous_output *= (1.0 - guessing[:, None])\n continuous_output += guessing[:, None]\n\n # convert to binary based on probability\n random_compare = rng.uniform(size=continuous_output.shape)\n\n return (random_compare <= continuous_output).astype('int')", "def create_synthetic_irt_polytomous(difficulty, discrimination, thetas,\n model='grm', seed=None):\n difficulty = np.atleast_2d(difficulty)\n n_items, n_levels = difficulty.shape\n\n if n_levels == 1:\n raise AssertionError(\"Polytomous items must have more than 1 threshold\")\n\n rng = np.random.default_rng(seed)\n\n # Check for single input of discrimination\n if np.atleast_1d(discrimination).size == 1:\n discrimination = np.full((n_items,), discrimination)\n\n theta_length = thetas.shape[-1]\n\n # Get the model to use, will throw error if not supported\n probability_func = {'grm': _graded_func,\n 'grm_md': _graded_func_md,\n 'pcm': _credit_func,\n 'gum': _unfold_func}[model.lower()]\n\n # Check difficulty parameters for validity\n clip_high = _check_difficulty_parameters(difficulty, model.lower())\n\n # Initialize output for memory concerns\n level_scratch = np.zeros((n_levels + 2, theta_length))\n output = np.zeros((n_items, theta_length), dtype='int')\n\n # Loop over items and compute probability estimates\n # for each of the levels and assign level based on\n # those probabilities\n for item_ndx in range(n_items):\n # Obtain the probabilities for the data (in-place)\n probability_func(difficulty[item_ndx], discrimination[item_ndx],\n thetas, level_scratch[1:, :])\n\n # Get the thresholds of the levels\n np.cumsum(level_scratch[1:, :], axis=0, out=level_scratch[1:, :])\n level_scratch[0] = rng.uniform(size=theta_length)\n\n # Discritize the outputs based on the thresholds\n output[item_ndx] = np.apply_along_axis(\n _my_digitize, axis=0, arr=level_scratch)\n\n # Add 1 to return [1, n_levels]\n output += 1\n np.clip(output, 1, clip_high, out=output)\n return output", "def build_toy_dataset(N):\n pi = np.array([0.4, 0.6])\n mus = [[1, 1], [-1, -1]]\n stds = [[0.1, 0.1], [0.1, 0.1]]\n x = np.zeros((N, 2), dtype=np.float32)\n\n for n in range(N):\n k = np.argmax(np.random.multinomial(1, pi))\n x[n, :] = np.random.multivariate_normal(mus[k], np.diag(stds[k]))\n\n return x", "def gen_data(N, D):\n X = np.hstack((np.ones((N, 1)), np.random.randn(N,D)))\n w_orig = np.array([[1], [1], [2]])\n y = 1. * ((X @ w_orig + 0.1 * np.random.rand(N, 1)) > 0) * 2 - 1\n return X, y", "def generate_model (d):\n return np.random.rand (d+1, 1)", "def discrete_multidim_model():\n idata = from_dict(\n {\"x\": np.random.randint(10, size=(2, 50, 3)), \"y\": np.random.randint(10, size=(2, 50))},\n dims={\"x\": [\"school\"]},\n )\n return idata", "def generate_data(self, model_params, my_N):\n assert \"pies\" in model_params or \"pi\" in model_params\n W = model_params[\"W\"]\n\n D, H_gen = W.shape\n pies = model_params[\"pies\"] if \"pies\" in model_params else model_params[\"pi\"]\n # np.random.seed(0)\n p = np.random.random(size=(my_N, H_gen)) # Create latent vector\n s = p <= pies # Translate into boolean latent vector\n my_data = self.generate_from_hidden(model_params, {\"s\": s})\n\n return my_data", "def makePLDS(T, x_0, f, g, Dx, Dy):\n\tX = np.zeros((T, Dx))\n\tY = np.zeros((T, Dy))\n\n\tX[0] = x_0\n\tY[0] = g.sample(x_0)\n\tfor t in range(1,T):\n\t\tX[t] = f.sample(X[t-1])\n\t\tY[t] = g.sample(X[t])\n\treturn X, Y", "def get_t_d(conf, r_inputs, d_data):\n # to crop out unstable part for temporal discriminator, details in TecoGAN supplemental paper\n crop_size_dt = int(conf.train.crop_size * 4 * conf.gan.crop_dt)\n offset_dt = (conf.train.crop_size * 4 - crop_size_dt) // 2\n crop_size_dt = conf.train.crop_size * 4 - offset_dt*2\n paddings = (0, 0, offset_dt, offset_dt, offset_dt, offset_dt, 0, 0)\n\n with nn.parameter_scope(\"discriminator\"):\n real_warp = warp_by_flow(d_data.t_targets, d_data.t_vel)\n real_warp = space_to_depth_disc(real_warp, d_data.t_batch)\n\n # equivalent to tf.image.crop_to_bounding_box\n real_warp = real_warp[:, offset_dt:offset_dt +\n crop_size_dt, offset_dt:offset_dt+crop_size_dt, :]\n real_warp = F.pad(real_warp, paddings)\n before_warp = space_to_depth_disc(d_data.t_targets, d_data.t_batch)\n t_input = space_to_depth_disc(\n r_inputs[:, :d_data.t_size, :, :, :], d_data.t_batch)\n # resizing using bilinear interpolation\n input_hi = F.interpolate(t_input, scale=(4, 4), mode='linear',\n channel_last=True)\n real_warp = F.concatenate(before_warp, real_warp, input_hi)\n\n tdiscrim_real_output, real_layers = discriminator(real_warp)\n\n fake_warp = warp_by_flow(d_data.t_gen_output, d_data.t_vel)\n fake_warp = space_to_depth_disc(fake_warp, d_data.t_batch)\n fake_warp = fake_warp[:, offset_dt:offset_dt +\n crop_size_dt, offset_dt:offset_dt+crop_size_dt, :]\n fake_warp = F.pad(fake_warp, paddings)\n before_warp = space_to_depth_disc(\n d_data.t_gen_output, d_data.t_batch, inplace=False)\n fake_warp = F.concatenate(before_warp, fake_warp, input_hi)\n tdiscrim_fake_output, fake_layers = discriminator(fake_warp)\n\n temporal_disc = collections.namedtuple('temporal_disc', 'tdiscrim_real_output,'\n 'real_layers, tdiscrim_fake_output, fake_layers')\n return temporal_disc(\n tdiscrim_real_output=tdiscrim_real_output,\n real_layers=real_layers,\n tdiscrim_fake_output=tdiscrim_fake_output,\n fake_layers=fake_layers\n )", "def make_dataset():\n\n\tnumberOfTrials = dataset_params.num_of_samples\n\tnumberOfTrials_train = int(numberOfTrials*0.8)\n\tnumberOfTrials_test = int(numberOfTrials*0.2)\n\n\tprint(\"==================================================\")\n\tprint(\"1. Generating Train images ......\")\n\tprint(\"\\nTrain image per variation\", numberOfTrials_train)\n\tmakeDataset(numberOfTrials_train, \"train\")\n\n\tprint(\"==================================================\")\n\tprint(\"2. Generating Test images ......\")\n\tprint(\"\\nTest image per variation\", numberOfTrials_test)\n\tmakeDataset(numberOfTrials_test, \"test\")\n\n\tprint(\"==================================================\")\n\tprint(\"Done!!!\")", "def generate_synthetic_data(args):\n number_training_obeservations = args.ntr\n number_testing_obeservations = args.nte\n number_dimensions = args.nd\n mu = args.mu\n feature_model = args.feature_model\n outcome_model = args.outcome_model\n sigma_outcome = args.sigma_outcome\n number_environments = args.ne\n \n T_train = generate_T(number_training_obeservations)\n T_test = generate_T(number_testing_obeservations)\n\n X_train, X_test = generate_x(number_dimensions, T_train, T_test, mu, feature_model)\n \n train_potential_outcome, test_potential_outcome = generate_outcomes(outcome_model, feature_model, X_train, X_test, sigma_outcome)\n\n train_po_control = train_potential_outcome[:,0].reshape(number_training_obeservations,1)\n train_po_treatment = train_potential_outcome[:,1].reshape(number_training_obeservations,1)\n\n y_train = np.multiply(T_train , train_po_treatment) + np.multiply(1-T_train , train_po_control)\n\n return X_train, T_train, y_train, X_test, T_test, test_potential_outcome", "def create_dataset(self, x, t, n_samples, noise, random=True, normalize=True, return_idx=False, random_state=42):\n assert ((x.shape[1] == 1) & (t.shape[1] == 1)), 'x and t should have shape (n_samples x 1)'\n u = self.generate_solution(x, t)\n\n X = np.concatenate([t, x], axis=1)\n if random_state is None:\n y = u + noise * np.std(u, axis=0) * np.random.normal(size=u.shape)\n else:\n y = u + noise * np.std(u, axis=0) * np.random.RandomState(seed=random_state).normal(size=u.shape)\n \n\n # creating random idx for samples\n N = y.shape[0] if n_samples == 0 else n_samples\n\n if random is True:\n if random_state is None:\n rand_idx = np.random.permutation(y.shape[0])[:N]\n else:\n rand_idx = np.random.RandomState(seed=random_state).permutation(y.shape[0])[:N]\n else:\n rand_idx = np.arange(y.shape[0])[:N]\n \n # Normalizing\n if normalize:\n if (self.scaling_factor is None):\n self.scaling_factor = (-(np.max(X, axis=0) + np.min(X, axis=0))/2, (np.max(X, axis=0) - np.min(X, axis=0))/2) # only calculate the first time\n X = (X + self.scaling_factor[0]) / self.scaling_factor[1] \n\n # Building dataset\n X_train = torch.tensor(X[rand_idx, :], dtype=torch.float32)\n y_train = torch.tensor(y[rand_idx, :], dtype=torch.float32)\n\n if return_idx is False:\n return X_train, y_train\n else:\n return X_train, y_train, rand_idx", "def makeDataset(numberOfTrials, data_type):\n\n\tdata_folder = data_type + \"_images\"\n\tlabel_file = os.path.join(dataset_params.data_path, data_type + \"_lables.csv\")\n\n\tutils.create_directory(dataset_params.data_path)\n\tutils.create_directory(os.path.join(dataset_params.data_path, data_folder))\n\n\tallowedRadius = utils.defineShapePerimeter()\n\tcolorsRGB = utils.defineColorValues()\n\tshapeDict = utils.defineShapeSides()\n\tpadding = dataset_params.padding\n\n\tnum = 0\n\toutput_images = [[\"figNum\", \"shape\", \"color\", \"size\", \"background\", \"quadrant\", \"radius\"]]\n\tfor c in dataset_params.colors: # for all 7 foreground colors \n\t\tfor q in dataset_params.quadrants: # for all 4 quadratns \n\t\t\tfor s in dataset_params.shapes: # for all 5 shapes\n\t\t\t\tfor k in dataset_params.sizes: # for all 3 sizes\n\t\t\t\t\tfor b in dataset_params.backgrounds: # for all 3 background colors\n\t\t\t\t\t\tfor i in range(numberOfTrials):\n\t\t\t\t\t\t\tfileName = os.path.join(dataset_params.data_path, data_folder, str(num) + \".png\")\n\t\t\t\t\t\t\tpresentQuadrant = dataset_params.quadrants[q]\n\t\t\t\t\t\t\tradius = random.randint(allowedRadius[s][k][0],allowedRadius[s][k][1])\n\n\t\t\t\t\t\t\tif(presentQuadrant == 3):\n\t\t\t\t\t\t\t\txMin = 128 + padding\n\t\t\t\t\t\t\t\txMax = 255 - radius\n\t\t\t\t\t\t\t\tyMin = 128 + padding\n\t\t\t\t\t\t\t\tyMax = 255 - radius\n\n\t\t\t\t\t\t\telif(presentQuadrant == 2):\n\t\t\t\t\t\t\t\txMin = 0 + radius\n\t\t\t\t\t\t\t\txMax = 128 - padding\n\t\t\t\t\t\t\t\tyMin = 128 + padding\n\t\t\t\t\t\t\t\tyMax = 255 - radius\n\n\t\t\t\t\t\t\telif(presentQuadrant == 1):\n\t\t\t\t\t\t\t\txMin = 0 + radius\n\t\t\t\t\t\t\t\txMax = 128 - padding\n\t\t\t\t\t\t\t\tyMin = 0 + radius\n\t\t\t\t\t\t\t\tyMax = 128 - padding\n\n\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\txMin = 128 + padding\n\t\t\t\t\t\t\t\txMax = 255 - radius\n\t\t\t\t\t\t\t\tyMin = 0 + radius\n\t\t\t\t\t\t\t\tyMax = 128 - padding\n\n\t\t\t\t\t\t\txCenter = random.randint(xMin, xMax)\n\t\t\t\t\t\t\tyCenter = random.randint(yMin, yMax)\n\t\t\t\t\t\t\tcenter = [xCenter, yCenter]\n\n\t\t\t\t\t\t\tif(s == \"circle\"):\n\t\t\t\t\t\t\t\toutput_images.append([num, \"circle\", c, k, b, presentQuadrant, radius])\n\t\t\t\t\t\t\t\timg = makeCircle(c, radius, center, b, colorsRGB)\n\t\t\t\t\t\t\t\timg = img[:,:,::-1]\n\t\t\t\t\t\t\t\tcv2.imwrite(fileName, img)\n\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\tn = shapeDict[s]\n\t\t\t\t\t\t\t\timg = makePolygon(center, n, radius, b, c, colorsRGB)\n\t\t\t\t\t\t\t\timg = img[:,:,::-1]\n\t\t\t\t\t\t\t\tcv2.imwrite(fileName, img)\n\t\t\t\t\t\t\t\toutput_images.append([num, s, c, k, b, presentQuadrant, radius])\n\t\t\t\t\t\t\tnum += 1\n\t\n\tprint(\"Number of image generated\", num)\n\n\tprint(\"Saving \" + data_type + \" data meta information to CSV ......\")\n\tdf = pd.DataFrame(output_images[1:], columns=output_images[0])\n\tdf.to_csv(label_file, index=False)\n\tprint(\"Saved \" + data_type + \" data meta information: \" + data_folder)\n\t\n\n\tprint(\"Saving \" + data_type + \" images data to npz(numpy) compressed file ......\")\n\tmake_npz_file(data_type)\n\tprint(\"Saved \" + data_type + \" images data to npz(numpy) compressed file!\")\n\t\n\treturn None", "def generate_synth_data(n):", "def generate_data(self, model_params, my_N):\n H, D = self.H, self.D\n pies = model_params['pi']\n\n p = np.random.random(size=(my_N, H)) # Create latent vector\n s = p < pies # Translate into boolean latent vector\n\n return self.generate_from_hidden(model_params, {'s': s})", "def createDataset(n, d=100, d_inf=5, is_classification=True, no_signal=False):\n \n # making random inputs, outputs\n X = np.random.normal(0, 1, (3*n, d))\n y = np.random.normal(0, 1, (3*n, 1))\n \n # thresholding y values for classification\n if is_classification:\n y = 2.0*((y>0) - 0.5)\n \n # making the first d_inf dimensions informative\n\n if is_classification:\n X[:,:d_inf] += y*np.random.normal(1.0, 1.5, X[:,:d_inf].shape)\n else:\n snr = 0.05\n X[:,:d_inf] += snr*y\n X = zscore(X, axis=0)\n \n # if you dont want useful signal, randomize the labels\n if no_signal:\n np.random.shuffle(y)\n\n # Divide into train/test/holdout pairs\n outputs = [[X[i::3, :], y[i::3, 0]] for i in range(3)] \n \n return outputs", "def create_dataset(self, x, t, n_samples, noise, random=True, return_idx=False, random_state=42):\n assert ((x.shape[1] == 2) & (t.shape[1] == 1)), 'x and t should have shape (n_samples x 1)'\n u = self.generate_solution(x, t)\n\n X = np.concatenate([t, x], axis=1)\n y = u + noise * np.std(u, axis=0) * np.random.normal(size=u.shape)\n\n # creating random idx for samples\n N = y.shape[0] if n_samples == 0 else n_samples\n\n if random is True:\n rand_idx = np.random.RandomState(seed=random_state).permutation(y.shape[0])[:N] # so we can get similar splits for different noise levels\n else:\n rand_idx = np.arange(y.shape[0])[:N]\n\n # Building dataset\n X_train = torch.tensor(X[rand_idx, :], requires_grad=True, dtype=torch.float32)\n y_train = torch.tensor(y[rand_idx, :], requires_grad=True, dtype=torch.float32)\n \n if return_idx is False:\n return X_train, y_train\n else:\n return X_train, y_train, rand_idx", "def generate_T(self):\n T = np.empty([self.lmbd, self.d * 2])\n # loop for sampling with replacement\n for i in range(self.lmbd):\n random_id = np.random.randint(low=0, high=self.mi-1)\n T[i, :] = self.P[random_id, :]\n\n return T", "def get_d_data(conf, flow_hr, gen_outputs, r_targets, rnn_length):\n # 3 frames are used as one entry, the last input images%3 frames are abandoned\n t_size = int(3 * (rnn_length // 3))\n t_gen_output = F.reshape(gen_outputs[:, :t_size, :, :, :], (conf.train.batch_size *\n t_size, conf.train.crop_size*4, conf.train.crop_size*4, 3),\n inplace=False)\n t_targets = F.reshape(r_targets[:, :t_size, :, :, :], (conf.train.batch_size *\n t_size, conf.train.crop_size*4, conf.train.crop_size*4, 3),\n inplace=False)\n t_batch = conf.train.batch_size * t_size // 3\n t_inputs_v_pre_batch = F.identity(\n flow_hr[:, 0:t_size:3, :, :, :]) # forward motion reused,\n t_inputs_v_batch = nn.Variable(t_inputs_v_pre_batch.shape)\n # no motion for middle frames\n t_inputs_v_batch.data.zero()\n t_inputs_v_nxt_batch = F.identity(\n flow_hr[:, -2:-1-t_size:-3, :, :, :]) # backward motion\n\n t_vel = F.stack(\n *[t_inputs_v_pre_batch, t_inputs_v_batch, t_inputs_v_nxt_batch], axis=2)\n # batch, t_size/3, 3, FLAGS.crop_size*4, FLAGS.crop_size*4, 2\n t_vel = F.reshape(t_vel, (conf.train.batch_size*t_size,\n conf.train.crop_size*4, conf.train.crop_size*4, 2), inplace=False)\n # Stop gradient to fnet from discriminator, details in TecoGAN supplemental paper\n t_vel.need_grad = False\n\n disc_data = collections.namedtuple('disc_data',\n 't_vel, t_gen_output, t_batch, t_targets, t_size')\n return disc_data(\n t_vel=t_vel,\n t_gen_output=t_gen_output,\n t_batch=t_batch,\n t_targets=t_targets,\n t_size=t_size\n )", "def TNG_net(self): \n \n import h5py as h5\n filename = localpath+'input/yields/TNG/SNII.hdf5'\n # Read H5 file\n f = h5.File(filename, \"r\")\n \n # Define element indexing\t\t\t\n indexing = {}\n indexing['H'] = 'Hydrogen'\n indexing['He'] = 'Helium'\n indexing['C'] = 'Carbon'\n indexing['N']= 'Nitrogen'\n indexing['O'] = 'Oxygen'\n indexing['Ne'] = 'Neon'\n indexing['Mg'] = 'Magnesium'\n indexing['Si'] = 'Silicon'\n indexing['S'] = 'Sulphur' # Not used by TNG simulation\n indexing['Ca'] = 'Calcium' # Not used by TNG simulation\n indexing['Fe'] = 'Iron'\n \n self.elements = list(indexing.keys())\n \n self.table = {}\n \n # Define masses / metallicities\n self.metallicities = list(f['Metallicities'].value)\n self.masses = f['Masses'].value\n\n \n for z_index,z in enumerate(self.metallicities):\n \n yield_subtable = {}\n \n z_name = f['Yield_names'].value[z_index].decode('utf-8')\n z_data = f['Yields/'+z_name+'/Yield']\n \n ejecta_mass = f['Yields/'+z_name+'/Ejected_mass'].value\n \n yield_subtable['Mass'] = self.masses\n remnants = self.masses-ejecta_mass\n yield_subtable['mass_in_remnants'] = np.divide(remnants,self.masses)\n for el in list(indexing.keys()):\n yield_subtable[el] = np.zeros(len(self.masses))\n \n summed_yields = np.zeros(len(self.masses))\n \n for m_index,mass in enumerate(self.masses):\n for el_index,el in enumerate(self.elements):\n el_yield_fraction = z_data[el_index][m_index]/mass #(mass-remnants[m_index]) # Find fraction of ejecta per element\n yield_subtable[el][m_index] = el_yield_fraction\t\t\t\t\t\n summed_yields[m_index]+=el_yield_fraction # Compute total yield\n \n yield_subtable['unprocessed_mass_in_winds'] = 1.-summed_yields-yield_subtable['mass_in_remnants']\n \n # Restructure table\n all_keys = ['Mass','mass_in_remnants','unprocessed_mass_in_winds']+self.elements\n \n list_of_arrays = [yield_subtable[key] for key in all_keys]\n restructure_subtable = np.core.records.fromarrays(list_of_arrays,names=all_keys)\n \n self.table[z] = restructure_subtable", "def dataConversion(data, I, attr, Na, noise, alpha, R=True):\n n = data.shape[0]\n m = data.shape[1]\n\n data_dict = {}\n structure = []\n # Transform the data to a dictionary and get the data structure\n for j in range(m):\n temp_data = data[:, j].reshape(-1, 1)\n enc = preprocessing.OneHotEncoder(categories='auto')\n enc.fit(temp_data)\n data_dict[j] = enc.transform(temp_data).toarray()\n structure.append(len(set(data[:, j])))\n\n x = np.rint(I * n).astype(int)\n # The indices of the records to which noise will be added\n row_inds = np.random.choice(range(n), x, replace=False)\n # For each corrupted observation:\n for i in row_inds:\n if R:\n attr_inds = np.random.choice(m, size=Na, replace=False)\n else:\n # Specify the attributes to which noise will be added\n attr_inds = attr\n # For each specified attribute\n for j in attr_inds:\n # Divide 'noise' over all the columns belonging to that attribute\n noise_individuals = np.random.dirichlet(np.ones(structure[j] - 1) * alpha, size=1) * noise\n t = 0\n for k in range(structure[j]):\n if data_dict[j][i, k] == 0 and t < noise_individuals.shape[1]:\n data_dict[j][i, k] = noise_individuals[0][t]\n t += 1\n elif data_dict[j][i, k] == 1:\n data_dict[j][i, k] -= noise\n new_data = np.concatenate([data_dict[k] for k in data_dict.keys()], axis=1)\n\n # One-hot encoding GT:\n enc = OneHotEncoder(categories='auto')\n enc.fit(data)\n GT = enc.transform(data).toarray()\n\n return GT, new_data, structure, row_inds", "def gen_mixture():\n npr.seed(0)\n num_exp = int(1e4)\n x_dim = 2\n z_dim = 2\n mu1 = [5, 5,]\n mu2 = [-5, -5]\n theta = np.array([[2,1],[-1,-2]])\n sigma = 0.1\n u = npr.uniform((num_exp,))\n z = np.zeros((num_exp, z_dim))\n cov = np.zeros((z_dim, z_dim))\n np.fill_diagonal(cov, 1)\n sz = int(num_exp/2)\n z[:sz, ]= npr.multivariate_normal(mu1, cov,sz)\n z[sz:, ] = npr.multivariate_normal(mu2,cov,sz)\n mu_x = [email protected]()\n\n x = np.zeros((num_exp, x_dim))\n for i in range(num_exp):\n x[i,:] = npr.multivariate_normal(mu_x[:,i], sigma*cov)\n print(x.shape)\n np.save('data/syn_mixture.npy', x)", "def generate_mog_dataset():\n\n n_per_class = 100\n dim = 2\n n_gaussians = 4\n mus = [(0, 1), (-1, 0), (0, -1), (1, 0)]\n mus = [torch.tensor(m) for m in mus]\n var = 0.05\n\n inputs, labels = [], []\n\n for id in range(n_gaussians):\n # Generate input data by mu + x @ sqrt(cov)\n cov = np.sqrt(var) * torch.eye(dim)\n mu = mus[id]\n inputs.append(mu + torch.randn(n_per_class, dim) @ cov)\n\n # Labels\n labels.append(torch.tensor(n_per_class * [1.0 if id < 2 else 0.0]))\n\n return torch.cat(inputs, dim=0), torch.cat(labels, dim=0)", "def create_dataset(img_rows=128, img_cols=128):\n print('Creating original dataset from the raw data')\n # first, get the patients directory names located in the data/ directory. These names (e.g. 'patient0001') will\n # be used for indexing (also avoid hidden files & folders)\n patients = [name for name in os.listdir(os.path.join(os.curdir, 'data/')) if not name.startswith('.')]\n\n # We sort this list to get the patients id in increasing order\n patients.sort(key=lambda s: s[-3:]) # sort according to last 3 characters\n\n # create an empty numpy.ndarray which will contain the images (resized to (img_rows, img_cols))\n images = np.ndarray((2 * len(patients), img_rows, img_cols), dtype=np.uint8) # 2 images per patient\n masks = np.ndarray((2 * len(patients), img_rows, img_cols), dtype=np.uint8) # 2 masks per patient\n\n # we now go through each patient's directory :\n idx = 0\n for patient in patients:\n\n for phase in ['ED', 'ES']:\n\n # read image & mask\n img, _, _, _ = load_mhd_data('data/{pa}/{pa}_4CH_{ph}.mhd'.format(pa=patient, ph=phase))\n mask, _, _, _ = load_mhd_data('data/{pa}/{pa}_4CH_{ph}_gt.mhd'.format(pa=patient, ph=phase))\n\n # resize the img & the mask to (img_rows, img_cols) to keep the network input manageable\n img = resize(img, (img_cols, img_rows), mode='reflect', preserve_range=True)\n mask = resize(mask, (img_cols, img_rows), mode='reflect', preserve_range=True)\n\n # now, save the resized image to the images np.ndarray\n images[idx] = img\n\n # save the corresponding mask to masks np.ndarray (at the same index)\n masks[idx] = mask\n\n idx += 1\n\n print('Created 2 np.ndarrays containing images & masks.')\n\n # Create directory to store files.\n directory = os.path.join(os.getcwd(), 'output/processed_data/')\n if not os.path.exists(directory):\n os.makedirs(directory)\n\n # save all ndarrays to a .npy files (for faster loading later)\n np.save('output/processed_data/images.npy', images)\n np.save('output/processed_data/masks.npy', masks)\n print('Saving to .npy files done: see files\\noutput/processed_data/images.npy & \\noutput/processed_data/masks.npy.')", "def create_datastructures_for_target_mtz(experiments, mtz_file):\n m = mtz.object(mtz_file)\n ind = m.extract_miller_indices()\n cols = m.columns()\n col_dict = {c.label(): c for c in cols}\n r_t = flex.reflection_table()\n if \"I\" in col_dict: # nice and simple\n r_t[\"miller_index\"] = ind\n r_t[\"intensity\"] = col_dict[\"I\"].extract_values().as_double()\n r_t[\"variance\"] = flex.pow2(col_dict[\"SIGI\"].extract_values().as_double())\n elif \"IMEAN\" in col_dict: # nice and simple\n r_t[\"miller_index\"] = ind\n r_t[\"intensity\"] = col_dict[\"IMEAN\"].extract_values().as_double()\n r_t[\"variance\"] = flex.pow2(col_dict[\"SIGIMEAN\"].extract_values().as_double())\n elif \"I(+)\" in col_dict: # need to combine I+ and I- together into target Ih\n if col_dict[\"I(+)\"].n_valid_values() == 0: # use I(-)\n r_t[\"miller_index\"] = ind\n r_t[\"intensity\"] = col_dict[\"I(-)\"].extract_values().as_double()\n r_t[\"variance\"] = flex.pow2(\n col_dict[\"SIGI(-)\"].extract_values().as_double()\n )\n elif col_dict[\"I(-)\"].n_valid_values() == 0: # use I(+)\n r_t[\"miller_index\"] = ind\n r_t[\"intensity\"] = col_dict[\"I(+)\"].extract_values().as_double()\n r_t[\"variance\"] = flex.pow2(\n col_dict[\"SIGI(+)\"].extract_values().as_double()\n )\n else: # Combine both - add together then use Ih table to calculate I and sigma\n r_tplus = flex.reflection_table()\n r_tminus = flex.reflection_table()\n r_tplus[\"miller_index\"] = ind\n r_tplus[\"intensity\"] = col_dict[\"I(+)\"].extract_values().as_double()\n r_tplus[\"variance\"] = flex.pow2(\n col_dict[\"SIGI(+)\"].extract_values().as_double()\n )\n r_tminus[\"miller_index\"] = ind\n r_tminus[\"intensity\"] = col_dict[\"I(-)\"].extract_values().as_double()\n r_tminus[\"variance\"] = flex.pow2(\n col_dict[\"SIGI(-)\"].extract_values().as_double()\n )\n r_tplus.extend(r_tminus)\n r_tplus.set_flags(\n flex.bool(r_tplus.size(), False), r_tplus.flags.bad_for_scaling\n )\n r_tplus = r_tplus.select(r_tplus[\"variance\"] != 0.0)\n Ih_table = create_Ih_table(\n [experiments[0]], [r_tplus], anomalous=True\n ).blocked_data_list[0]\n r_t[\"intensity\"] = Ih_table.Ih_values\n inv_var = Ih_table.sum_in_groups(Ih_table.weights, output=\"per_refl\")\n r_t[\"variance\"] = 1.0 / inv_var\n r_t[\"miller_index\"] = Ih_table.miller_index\n else:\n raise KeyError(\"Unable to find intensities (tried I, IMEAN, I(+)/I(-))\")\n logger.info(f\"Extracted {r_t.size()} intensities from target mtz\")\n r_t = r_t.select(r_t[\"variance\"] > 0.0)\n if r_t.size() == 0:\n raise ValueError(\"No reflections with positive sigma remain after filtering\")\n r_t[\"d\"] = (\n miller.set(\n crystal_symmetry=crystal.symmetry(\n space_group=m.space_group(), unit_cell=m.crystals()[0].unit_cell()\n ),\n indices=r_t[\"miller_index\"],\n )\n .d_spacings()\n .data()\n )\n r_t.set_flags(flex.bool(r_t.size(), True), r_t.flags.integrated)\n\n exp = Experiment()\n exp.crystal = deepcopy(experiments[0].crystal)\n exp.identifier = ersatz_uuid4()\n r_t.experiment_identifiers()[len(experiments)] = exp.identifier\n r_t[\"id\"] = flex.int(r_t.size(), len(experiments))\n\n # create a new KB scaling model for the target and set as scaled to fix scale\n # for targeted scaling.\n params = Mock()\n params.KB.decay_correction.return_value = False\n exp.scaling_model = KBScalingModel.from_data(params, [], [])\n exp.scaling_model.set_scaling_model_as_scaled() # Set as scaled to fix scale.\n return exp, r_t", "def makeD2hhAsymm(name,\n config,\n KPIDK_string,\n PiPIDK_string,\n Mass_low_string,\n Mass_high_string,\n CombPIDK_string,\n DecayDescriptor,\n inputSel,\n useTOS,\n Hlt1TOS,\n Hlt2TOS\n ) :\n\n def makeTISTOS( name, _input, _hlttos ) :\n from Configurables import TisTosParticleTagger\n _tisTosFilter = TisTosParticleTagger( name + \"Tagger\" )\n _tisTosFilter.TisTosSpecs = _hlttos\n return Selection( name\n , Algorithm = _tisTosFilter\n , RequiredSelections = [ _input ]\n ) \n\n _Kcuts1 = \"~ISMUON & (PT > %(DaugPtMin)s* MeV) & (MIPCHI2DV(PRIMARY) > %(DaugIPChi2)s)\" % locals()['config']\n _KcutsPIDK = KPIDK_string % locals()['config']\n _Kcuts2 = \" & (ISLONG) & (P > %(DaugP)s* MeV) & (TRCHI2DOF < %(DaugTrkChi2)s)\" % locals()['config']\n _Kcuts = _Kcuts1 + _KcutsPIDK + _Kcuts2\n _Picuts1 = \"~ISMUON & (PT > %(DaugPtMin)s* MeV) & (MIPCHI2DV(PRIMARY) > %(DaugIPChi2)s)\" % locals()['config']\n _PicutsPIDK = PiPIDK_string % locals()['config']\n _Picuts2 = \" & (ISLONG) & (P > %(DaugP)s* MeV) & (TRCHI2DOF < %(DaugTrkChi2)s)\" % locals()['config']\n _Picuts = _Picuts1 + _PicutsPIDK + _Picuts2\n _dauCuts = { 'K+': _Kcuts, 'pi+': _Picuts }\n\n _massLow = Mass_low_string % locals()['config']\n _massHigh = Mass_high_string % locals()['config']\n _combCuts1 = \"(APT > %(D0Pt)s* MeV)\" \\\n \"& (AHASCHILD( PT > %(DaugPtMax)s* MeV ) )\" \\\n \"& (ADOCA(1,2)< %(D0DOCA)s* mm)\" \\\n \"& (AP > %(D0P)s* MeV)\" % locals()['config']\n _combCutsPIDK = CombPIDK_string % locals()['config']\n _combCuts = _combCuts1 + _combCutsPIDK + _massLow + _massHigh\n\n _motherCuts = \"(VFASPF(VCHI2PDOF) < %(D0VtxChi2Ndof)s)\" \\\n \"& (BPVVDCHI2 > %(D0FDChi2)s)\" \\\n \"& (BPVLTIME() > %(D0Tau)s)\" \\\n \"& (BPVDIRA > %(D0BPVDira)s)\" % locals()['config']\n\n _D0 = CombineParticles( DecayDescriptor = DecayDescriptor,\n MotherCut = _motherCuts,\n CombinationCut = _combCuts,\n DaughtersCuts = _dauCuts)\n\n _sel = Selection ( name+'Sel',\n Algorithm = _D0,\n RequiredSelections = inputSel )\n\n if not useTOS:\n return _sel\n\n _selD2hhHlt1TOS = makeTISTOS( name + \"D2hhHlt1TOS\"\n , _sel\n , Hlt1TOS\n )\n _selD2hhHlt2TOS = makeTISTOS( name + \"D2hhHlt2TOS\"\n , _selD2hhHlt1TOS\n , Hlt2TOS\n )\n \n return _selD2hhHlt2TOS", "def template_dataset(self):\n exp_dict = {\n 'experiment_name': 'ALLEN_all_neurons',\n 'only_process_n': None, # Set to None to process all\n 'randomize_selection': True,\n 'reference_image_key': {'proc_stimuli': 'image'},\n 'reference_label_key': {'neural_trace_trimmed': 'label'},\n 'rf_query': [{\n 'rf_coordinate_range': { # Get all cells\n 'x_min': 40,\n 'x_max': 70,\n 'y_min': 20,\n 'y_max': 50,\n },\n 'cre_line': 'Cux2',\n 'structure': 'VISp'}\n ],\n 'cross_ref': 'rf_coordinate_range_and_stimuli',\n 'store_means': [\n 'image',\n 'label'\n ],\n 'cc_repo_vars': {\n 'output_size': [2, 1], # target variable -- neural activity,\n 'model_im_size': [152, 304, 1],\n 'loss_function': 'pearson',\n 'score_metric': 'pearson',\n 'preprocess': 'resize'\n },\n # 'deconv_method': 'elephant'\n }\n exp_dict = self.add_globals(exp_dict)\n return exp_dict", "def generate_t(d):\n typ= np.random.randint(3)\n if typ == 0:\n t= np.random.normal(size=d)\n elif typ == 1:\n t= np.random.rand(d)\n elif typ == 2:\n t= np.hstack([np.random.normal(size=int(d/2)), np.random.normal(loc=2, size=(d - int(d/2)))])\n \n t= (t - np.min(t))/(np.max(t) - np.min(t))\n \n exponent= np.random.randint(1, 4)\n \n if np.random.randint(2) == 0:\n t= t**exponent\n else:\n t= t**(1.0/exponent)\n \n t= (t - np.min(t))/(np.max(t) - np.min(t))\n \n return np.round(t, decimals=3)", "def generate_data(example_size):\n\n\n # A placeholder for data.\n x = np.zeros((example_size, 2))\n\n # Randomly selects one of two classes for each example.\n class_id = np.random.randint(2, size=example_size)\n\n # Generates data for the class 0.\n class_0_idx = np.where(class_id == 0)\n mean0 = [0.0, 1.0]\n cov0 = [[0.4, 0.0], [0.0, 1.0]]\n x[class_0_idx[0], :] = np.random.multivariate_normal(\n mean0, cov0, class_0_idx[0].shape[0])\n\n # Generates data for the class 0.\n class_1_idx = np.where(class_id == 1)\n mean1 = [1.0, 2.0]\n cov1 = [[1.0, 0.0], [0.0, 0.4]]\n x[class_1_idx[0], :] = np.random.multivariate_normal(\n mean1, cov1, class_1_idx[0].shape[0])\n\n return (x, class_id)", "def simulate(self):\r\n \r\n # Worker ID\r\n self.RawData[:, 0] = np.arange(start=1, stop=self.NumInd+1, step=1, dtype=np.int8)\r\n \r\n # IID draws for observables from standard normal distribution\r\n self.RawData[:, 1] = np.random.randn(self.NumInd) # Draw for X\r\n self.RawData[:, 2] = np.random.randn(self.NumInd) # Draw for Z\r\n \r\n # IID draws for unobservables from normal distribution with variance 0.04\r\n self.RawData[:, 3] = 0.2 * np.random.randn(self.NumInd) # Draw for epsilon\r\n self.RawData[:, 4] = 0.2 * np.random.randn(self.NumInd) # Draw for u\r\n self.RawData[:, 5] = 0.2 * np.random.randn(self.NumInd) # Draw for xi\r\n \r\n # DGP for log W := eta * X + Z + u\r\n self.RawData[:, 6] = self.η * self.RawData[:, 1] + self.RawData[:, 2] + self.RawData[:, 4]\r\n \r\n # DGP for log R := delta_0 + log W + delta * Z + xi\r\n self.RawData[:, 7] = self.δ0 + self.RawData[:, 6] + self.δ * self.RawData[:, 2] + self.RawData[:, 5]\r\n \r\n # DGP for log beta := nu * X + a * xi + eps\r\n self.RawData[:, 8] = self.ν * self.RawData[:, 1] + self.RawData[:, 3] + self.a * self.RawData[:, 5]\r\n \r\n # Determination of labor force participation (LFP)\r\n self.RawData[:, 9] = (np.log(self.ρ) + self.RawData[:, 6] - self.RawData[:, 7] >= 0)\r\n \r\n # Computing optimal number of hours worked\r\n self.RawData[:, 10] = np.power(np.divide(self.ρ * np.exp(self.RawData[:, 6]),\r\n np.exp(self.RawData[:, 8])), \r\n np.divide(1., self.γ)) * self.RawData[:, 9] \r\n \r\n # Hiding wage details for non-participants in the labor market\r\n self.RawData[:, 6] = self.RawData[:, 6] * self.RawData[:, 9] \r\n \r\n \r\n # Converstion to Pandas Dataframe\r\n panel_from_sim = pd.DataFrame(data=self.RawData, columns=self.DataColumns)\r\n\r\n # Establishing data types for each column of the Pandas DataFrame.\r\n panel_from_sim = panel_from_sim.astype({'ID': np.uint, 'X': np.float, 'Z': np.float,\r\n 'ϵ': np.float, 'u': np.float, 'ξ': np.float, \r\n 'W': np.float, 'R': np.float, 'β': np.float,\r\n 'LFP': np.uint, 'H': np.float})\r\n \r\n # panel_from_sim[['W', 'R', 'β']].applymap(np.exp)\r\n \r\n # Store RawData as a Pandas dataframe\r\n self.RawData = panel_from_sim.copy()" ]
[ "0.6372233", "0.6330223", "0.599442", "0.56800026", "0.5632845", "0.5620684", "0.5599266", "0.5580485", "0.55694973", "0.55557424", "0.55496943", "0.55215365", "0.5489964", "0.54840696", "0.54775316", "0.5472303", "0.5399753", "0.53778225", "0.53512293", "0.5343214", "0.5336143", "0.53075904", "0.5296409", "0.5296231", "0.5232811", "0.5205358", "0.5188737", "0.51723236", "0.51578724", "0.51529044" ]
0.725456
0
Creates polytomous unidimensional synthetic IRT data. Creates polytomous output with specified number of levels from [1, levels]
def create_synthetic_irt_polytomous(difficulty, discrimination, thetas, model='grm', seed=None): difficulty = np.atleast_2d(difficulty) n_items, n_levels = difficulty.shape if n_levels == 1: raise AssertionError("Polytomous items must have more than 1 threshold") rng = np.random.default_rng(seed) # Check for single input of discrimination if np.atleast_1d(discrimination).size == 1: discrimination = np.full((n_items,), discrimination) theta_length = thetas.shape[-1] # Get the model to use, will throw error if not supported probability_func = {'grm': _graded_func, 'grm_md': _graded_func_md, 'pcm': _credit_func, 'gum': _unfold_func}[model.lower()] # Check difficulty parameters for validity clip_high = _check_difficulty_parameters(difficulty, model.lower()) # Initialize output for memory concerns level_scratch = np.zeros((n_levels + 2, theta_length)) output = np.zeros((n_items, theta_length), dtype='int') # Loop over items and compute probability estimates # for each of the levels and assign level based on # those probabilities for item_ndx in range(n_items): # Obtain the probabilities for the data (in-place) probability_func(difficulty[item_ndx], discrimination[item_ndx], thetas, level_scratch[1:, :]) # Get the thresholds of the levels np.cumsum(level_scratch[1:, :], axis=0, out=level_scratch[1:, :]) level_scratch[0] = rng.uniform(size=theta_length) # Discritize the outputs based on the thresholds output[item_ndx] = np.apply_along_axis( _my_digitize, axis=0, arr=level_scratch) # Add 1 to return [1, n_levels] output += 1 np.clip(output, 1, clip_high, out=output) return output
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fullfact(levels):\n n = len(levels) # number of factors\n nb_lines = np.prod(levels) # number of trial conditions\n H = np.zeros((nb_lines, n))\n \n level_repeat = 1\n range_repeat = np.prod(levels)\n for i in range(n):\n range_repeat //= levels[i]\n lvl = []\n for j in range(levels[i]):\n lvl += [j]*level_repeat\n rng = lvl*range_repeat\n level_repeat *= levels[i]\n H[:, i] = rng\n \n return H", "def gen_level(\n root_path,\n floor_file = 'floor.lines',\n walls_file = 'walls.lines',\n windows_file = 'windows.lines',\n doors_file = 'doors.lines',\n handrails_file = 'handrails.lines',\n stairs_file = 'stairs.lines'):\n \n def internal(\n floor_thickness = .3,\n walls_thickness = .2, \n walls_height = 3, \n windows_fn = default_window(),\n doors_fn = default_door(),\n doors_height = 2.3,\n handrails_height = 1.2,\n handrails_thickness = .1):\n \n floor = gen_floor(root_path + floor_file, floor_thickness)\n \n walls = w8.gen_walls(\n root_path + walls_file, \n walls_thickness, \n walls_height + floor_thickness, \n external = False)\n \n windows = gen_windows(\n root_path + windows_file, \n windows_fn,\n walls_height + floor_thickness)\n \n doors, doorways = gen_doors(\n root_path + doors_file, \n doors_fn, \n doors_height)\n \n handrails = w8.gen_walls(\n root_path + handrails_file, \n handrails_thickness, \n handrails_height + floor_thickness,\n external = False)\n \n stair_foot = gen_stairs_foot(root_path + stairs_file)\n walls = DIFFERENCE([walls, T(3)(floor_thickness)(doorways)])\n \n return walls, windows, doors, handrails, floor, stair_foot\n \n return internal", "def build_poly(x, degree):\n \"\"\"\n Assemble the 3 label vectors with the original ordering \n Inputs:\n - x (ndarray) : binary prediction for set 1\n - degree (int) : binary prediction for set 2 \n Outputs: \n - p (ndarray) : predicted labels for test set ( with the original ordering)\n \"\"\"\n # forming a matrix containing the data points\n terms = np.hstack([np.ones([x.shape[0],1]),np.tile(x,(1,degree))])\n index = np.arange(degree)+1\n \n # forming a matrix contnaining the exponents\n exponents = np.multiply(np.ones((1, x.shape[1])), index[:, np.newaxis])\n exponents = exponents.reshape([1, x.shape[1]*degree])\n exponents = np.multiply(exponents, np.ones([x.shape[0], 1]))\n exponents = np.hstack([np.ones( (x.shape[0], 1) ),exponents])\n \n # using the exponent matrix as the element-wise exponents of the terms in the terms matrix\n p=np.power(terms,exponents)\n return p", "def get_poly_waypoints(t,n):\n waypoints_t = np.linspace(0, t, n)\n k1 = 0.1\n k2 = 0.1\n k3 = 1.0\n x = (k1*waypoints_t)#**2\n y = (k2*waypoints_t)#**3\n z = k3*waypoints_t\n #x = k1*np.ones_like(waypoints_t)\n #y = k2*np.ones_like(waypoints_t)\n #z = waypoints_t\n\n\n return np.stack((x, y, z), axis=-1)", "def to_std_levels(ident, filename, levels=None, return_table=False, **kwargs):\n import numpy as np\n from . import support as sp\n from . import std_plevels\n from .interp import dataframe\n\n if levels is None:\n levels = std_plevels\n\n kwargs = sp.kw_handle(kwargs, mname=ident, adddate=True)\n sp.message(ident, levels, **kwargs)\n\n # READ ASCII\n sp.message(\"Reading ascii data into dataframes\", **kwargs)\n if kwargs.get('uadb', False):\n data, station = uadb_ascii_to_dataframe(filename, **kwargs) # Dataframe\n else:\n data, station = ascii_to_dataframe(filename, **kwargs) # DataFrame\n\n #\n # interpolation to standard pressure levels\n #\n # Todo Convert pressure to gph\n # Todo Convert gph to pressure\n pindex = np.isfinite(data['pres']) # because of geopotential height in early days\n sp.message(\"Missing pressure values\", (~pindex).sum(), **kwargs)\n sp.message(\"Interpolating to standard pressure levels\", **kwargs)\n data = dataframe(data[pindex], 'pres', levels=levels, **kwargs)\n sp.message(\"Converting to xarray\", **kwargs)\n data = data.to_xarray()\n sp.message(\"Adding Metadata\", **kwargs)\n for ivar in list(data.data_vars):\n if ivar in _metadata.keys():\n if 'dpd' in ivar:\n if 'dewp' not in data.data_vars:\n attrs = _metadata[ivar]\n # attrs.update({'esat': 'foeewmo', 'rounded': 1})\n data[ivar].attrs.update(attrs)\n else:\n data[ivar].attrs.update(_metadata[ivar])\n\n if kwargs.get('uadb', False):\n data.attrs.update({'ident': ident, 'source': 'NCAR RSA', 'dataset': 'UADB, ds370.1', 'processed': 'UNIVIE, IMG',\n 'interpolated': 'to pres levs (#%d)' % len(levels)})\n else:\n data.attrs.update({'ident': ident, 'source': 'NOAA NCDC', 'dataset': 'IGRAv2', 'processed': 'UNIVIE, IMG',\n 'interpolated': 'to pres levs (#%d)' % len(levels)})\n\n sp.message(\"Converting temperature and humidity\", **kwargs)\n data['temp'] += 273.2 # Kelvin\n data['rhumi'] /= 100. # ratio\n\n # if as_table:\n # station = station.to_xarray().reindex(date=data.date)\n #\n # else:\n if station.index.duplicated().any():\n station = station.reset_index().drop_duplicates('date', keep='last').set_index('date')\n\n station = station.reindex(np.unique(data.date.values)) # same dates as data\n station = station.fillna(method='ffill') # fill Missing information with last known\n station = station.to_xarray()\n station.attrs.update(data.attrs.copy())\n del station.attrs['interpolated']\n\n sp.message(\"Collecting Station information\", **kwargs)\n for ivar in _metadata.keys():\n if ivar in station.data_vars:\n station[ivar].attrs.update(_metadata[ivar])\n\n # for ivar, idata in station.data_vars.items():\n # data[ivar] = idata\n\n if return_table:\n return data, station\n #\n # Convert to 2d Array\n #\n data = dataframe_to_array(data, dim='date', plev='pres', levels=levels)\n return data, station", "def build_poly(x, degree):\n tx = np.zeros((x.shape[0], x.shape[1]*(degree+1)))\n \n for j in range(degree+1):\n tx[:,x.shape[1]*j:x.shape[1]*(j+1)] = np.power(x,j)\n \n return tx", "def triangular_numbers(N_levels):\n zernike_rows = list(np.arange(1, N_levels + 1))\n triangular = {}\n for i, zernike_per_row in enumerate(zernike_rows):\n total = np.sum(zernike_rows[:i+1])\n triangular[zernike_per_row] = total\n\n return triangular", "def __init__(self, estimator, levels):\n if isinstance(levels, int):\n levels = np.arange(levels)\n\n self.est = estimator\n self.levels = levels\n self.tmpshape = None", "def render_pyramid(pyr, levels):\n positionLst = []\n finalLst = []\n if levels > len(pyr):\n print(\"error. number of levels to display is more than max_levels\")\n width = 0\n\n for i in range(levels):\n # streching each layer\n pyr[i] = strech_helper(pyr[i])\n width += pyr[i].shape[1]\n positionLst.append((pyr[i].shape[0], pyr[i].shape[1]))\n\n for i in range(levels):\n zeros = np.zeros(shape=(pyr[0].shape[0], pyr[i].shape[1]))\n zeros[:positionLst[i][0], :positionLst[i][1]] = pyr[i]\n finalLst.append(zeros)\n res = np.concatenate(finalLst, axis=1)\n return res", "def _generate_poly_array(self, nchan, coeff=[]):\n if nchan < 0:\n raise ValueError, \"nchan should be >=0\"\n if len(coeff)==0:\n if nchan ==0: return []\n else: raise ValueError, \"No valid coefficient given.\"\n polyarr = numpy.zeros(nchan)\n for iorder in range(len(coeff)):\n polyarr += coeff[iorder]*numpy.array(xrange(nchan))**iorder\n return polyarr", "def generate_lorenz_data(\r\n ics, t, n_points, normalization=None, sigma=10, beta=8 / 3, rho=28\r\n):\r\n n_ics = ics.shape[0]\r\n n_steps = t.size\r\n\r\n d = 3\r\n z = np.zeros((n_ics, n_steps, d))\r\n for i in range(n_ics):\r\n z[i] = simulate_lorenz(ics[i], t, sigma=sigma, beta=beta, rho=rho)\r\n\r\n if normalization is not None:\r\n z *= normalization\r\n\r\n np.savetxt(\r\n \"output/lorenz%s.txt\" % linear,\r\n np.reshape(z, (len(z[:, 0, 0]) * len(z[0, :, 0]), len(z[0, 0, :]))),\r\n delimiter=\",\",\r\n )\r\n\r\n n = n_points\r\n L = 1\r\n y_spatial = np.linspace(-L, L, n)\r\n modes = np.zeros((2 * d, n))\r\n for i in range(2 * d):\r\n test = legendre(i)\r\n modes[i] = legendre(i)(y_spatial)\r\n\r\n x1 = np.zeros((n_ics, n_steps, n))\r\n x2 = np.zeros((n_ics, n_steps, n))\r\n x3 = np.zeros((n_ics, n_steps, n))\r\n x4 = np.zeros((n_ics, n_steps, n))\r\n x5 = np.zeros((n_ics, n_steps, n))\r\n x6 = np.zeros((n_ics, n_steps, n))\r\n\r\n x = np.zeros((n_ics, n_steps, n))\r\n for i in range(n_ics):\r\n for j in range(n_steps):\r\n x1[i, j] = modes[0] * z[i, j, 0]\r\n x2[i, j] = modes[1] * z[i, j, 1]\r\n x3[i, j] = modes[2] * z[i, j, 2]\r\n x4[i, j] = modes[3] * z[i, j, 0] ** 3\r\n x5[i, j] = modes[4] * z[i, j, 1] ** 3\r\n x6[i, j] = modes[5] * z[i, j, 2] ** 3\r\n\r\n if linear:\r\n x[i, j] = x1[i, j] + x2[i, j] + x3[i, j]\r\n else:\r\n x[i, j] = (\r\n x1[i, j] + x2[i, j] + x3[i, j] + x4[i, j] + x5[i, j] + x6[i, j]\r\n )\r\n\r\n return x # data\r", "def __create_sample_data__(npts = 20):\n\t#data function\n\tdef wavy(x, y):\n\t\treturn np.sin(0.2*np.pi*x)*np.cos(0.4*np.pi*y)\n\t\n\t#make grid\n\txs = np.linspace(0, 2*20, 2*npts + 1)\n\tys = np.linspace(0, 20, npts + 1)\n\t(xgrid, ygrid) = np.meshgrid(xs, ys)\n\tzgrid = wavy(xgrid, ygrid)\n\t\n\treturn (xgrid, ygrid, zgrid)", "def toyData(w,sigma,N): \n #Degree of polynomial \n degree=w.size; \n \n #generate x values \n x=np.linspace(0, 1,N);\n \n poly=preprocessing.PolynomialFeatures(degree-1,include_bias=True)\n \n PHI=poly.fit_transform(x.reshape(N,1)) \n \n y=np.dot(PHI,w);\n \n target=y+np.random.normal(0, sigma, N);\n \n Out=[x,y,PHI, target]\n\n return Out", "def levels_to_data(levels):\n\n b4_conv_fact = [1, 4, 16, 64]\n levels = levels.reshape(levels.size / 4, 4)\n data = np.array(np.dot(levels, b4_conv_fact), dtype = np.uint8)\n\n return data", "def polyencode(ctx, inputfiles, output, poly_order, reflect):\n with rio.open(inputfiles[0]) as src:\n metaprof = src.profile.copy()\n\n metaprof.update(dtype=np.float32, count=(poly_order + 1))\n\n data = read_all(inputfiles, reflect=reflect)\n\n out = poly_multid(data, poly_order).astype(np.float32)\n\n with rio.open(output, \"w\", **metaprof) as dst:\n for i in range(poly_order + 1):\n dst.write(out[:, :, i], i + 1)", "def _set_level_ops(self, input_t, level, layers=None):\n tensor, output_t = input_t, []\n if layers:\n for layer_group in layers:\n for layer in layer_group:\n tensor = layer(tensor)\n output_t.append(tensor)\n else:\n layers = [[], []]\n enc_spec = list(map(deepcopy, self.m_cfg['new_model']['enc']))\n dec_spec = list(map(deepcopy, self.m_cfg['new_model']['dec']))\n for cont, spec in enumerate([enc_spec, dec_spec]):\n for layer_cfg in spec:\n layer_cfg['name'] = str(level) + '/' + layer_cfg['name']\n layer = KLayers(layer_cfg.pop('type')).value(**layer_cfg)\n tensor = layer(tensor)\n layers[cont] += [layer]\n output_t.append(tensor)\n\n latent_t = output_t[0]\n output_t = output_t[1]\n return latent_t, output_t, layers", "def dwt_levels(filtered_image, levels, quantization_Array):\n assert len(levels) <= 4\n for level in levels:\n filtered_image[level[0]] = dwt(\n filtered_image[level[0]], quantization_Array)\n try:\n # continue recursively\n dwt_levels(filtered_image[level[0]],\n level[1], quantization_Array)\n except IndexError:\n # happens when level has one element\n # we are done, no recursive lists left\n continue", "def build_poly(tx, degree) :\n shape = tx.shape\n poly = np.zeros((shape[0], shape[1] * degree))\n poly[:,:shape[1]] = tx\n for deg in range(2, degree + 1) :\n for j in range(0, shape[1]) :\n poly[:, shape[1] * (deg - 1) + j] = tx[:,j] ** deg\n return poly", "def grid_levels(self, nlevels):\n for j in np.arange(nlevels):\n if j in self.gridded['levels'].keys():\n continue\n self.gridded['levels'][j] = self.grid_param(self.levels[j],\n self.method)\n self.jmax = max(self.gridded['levels'].keys())\n if self.verbose:\n print('Gridded the first %d energy levels.' % (self.jmax))\n print('Use self.grid_levels() to read in more.\\n')\n return", "def generate(pts):\n cmds.polyCreateFacet(name=\"shirt\", p=points)\n cmds.polyTriangulate()\n cmds.polySubdivideFacet(dv=SUBDIVISIONS)\n cmds.polyTriangulate()", "def construct_poly(data, power):\n return np.power(data, power)", "def grid_levels(self, nlevels):\n for j in np.arange(nlevels):\n if j in self.gridded['levels'].keys():\n continue\n self.gridded['levels'][j] = self.grid_param(self.levels[j])\n self.jmax = max(self.gridded['levels'].keys())\n if self.verbose:\n print('Gridded the first %d energy levels.' % (self.jmax))\n print('Use self.grid_levels() to read in more.\\n')\n return", "def poly_level(f):\n if poly_univariate_p(f):\n return 1\n else:\n return 1 + poly_level(poly_LC(f))", "def generatePolygons():", "def buildTree(data, level):\n \n node = maxIG(data)\n subsets = splitBy(data, node[0])\n header = [\"Outlook\", \"Temp\", \"Humidity\", \"Wind\", \"Play\"]\n \n if node[1] == 0:\n print(\"\\t\" * level, level, getColumn(data, node[0])[0], \":\", getColumn(data, -1)[0]) \n elif level < 4:\n print(\"\\t\" * level, level, getColumn(data, level - 1)[0], \"->\", header[node[0]]) \n rec = [buildTree(subset, level + 1) for subset in subsets]\n else:\n print(\"\\t\" * level, level, getColumn(data, level - 1)[0], \":\", getColumn(data, -1))", "def render_pyramid(pyr, levels):\n if len(pyr) < levels:\n levels = len(pyr)\n colRes = 0\n for i in range(levels):\n colRes += pyr[i].shape[1]\n rowRes = pyr[0].shape[0]\n resIm = np.zeros((rowRes,colRes),dtype=np.float32)\n curCol, curRow = 0,0\n for i in range(levels):\n minIm , maxIm = np.min(pyr[i]), np.max(pyr[i])\n pyr[i] = (pyr[i] - minIm) / (maxIm - minIm)\n resIm[curRow : pyr[i].shape[0],curCol:pyr[i].shape[1] + curCol] = pyr[i]\n curCol += pyr[i].shape[1]\n\n return resIm.astype(np.float32)", "def make_steer_frs(dims, numlevels, numorientations, bandwidth):\n \n result = []\n bands=[]\n p = numorientations-1\n const = math.sqrt(float(math.pow(2,(2*p))*math.pow(math.factorial(p),2)) / float(math.factorial(2*p)*(p+1)))\n f1 = freqspace(dims[0])\n f2 = freqspace(dims[1])\n wx, wy = np.meshgrid(f1, f2)\n size = wx.shape\n r = np.sqrt(wx**2 + wy**2)\n theta = np.arctan2(wy, wx) \n \n bands = np.full((numlevels, numorientations, dims[0], dims[1]), const*1j)\n for level in range(numlevels):\n for orientation in range(numorientations):\n theta_offset = orientation * np.pi / numorientations\n ctrfreq = pi / math.pow(2, (level+1)*bandwidth)\n band = np.cos(theta - theta_offset)**p * log_raised_cos(r, ctrfreq, bandwidth)\n bands[level,orientation,:,:] *= band\n \n hi = log_raised_coshi(r, pi / math.pow(2, bandwidth), bandwidth)\n\n lo = log_raised_coslo(r, pi / math.pow(2, bandwidth * numlevels), bandwidth)\n \n result.append(hi)\n result.append(bands)\n result.append(lo)\n return result", "def generate_3d_plots_invue(prefs, data, dir_path, filename, intp_pts, polyh_pts, offset):\n \n # Validating existance of all columns\n for col in prefs:\n if col not in data['map'][0]:\n raise ValueError, 'Column given \"%s\" does not exits in mapping \\\n file' % col\n \n # Split matrix by labelname, groups & give colors\n groups_and_colors=iter_color_groups(data['map'],prefs)\n groups_and_colors=list(groups_and_colors)\n \n smp_lbl, smp_lbl_grp, polypts = make_3d_plots_invue(data, groups_and_colors, \\\n intp_pts, polyh_pts, offset)\n \n # Looping to binning result to write full and binned files\n for lbl in smp_lbl:\n for grp in smp_lbl_grp[lbl]:\n # writting individual files\n ind_path = \"%s/%s_%s_%s.txt\" % (dir_path, filename, lbl, grp)\n smp = smp_lbl_grp[lbl][grp]\n outfile = open(ind_path, 'w')\n outfile.write(format_coords(smp['headrs'], smp['coords'], [], [], False))\n outfile.close()\n # writing full file\n full_path = \"%s/%s_%s.txt\" % (dir_path, filename, lbl)\n outfile = open(full_path, 'w')\n outfile.write (format_coords(smp_lbl[lbl]['headrs'], smp_lbl[lbl]['coords'], \\\n [], [], False))\n outfile.close()\n \n # Writing tetraVertices.txt\n ind_path = \"%s/tetraVertices.txt\" % (dir_path)\n outfile = open(ind_path, 'w')\n outfile.write('\\n'.join(['\\t'.join(map(str, row)) for row in polypts]))\n outfile.write('\\n')\n outfile.close()", "def build_poly(x, degree): \n # ***************************************************\n # COPY YOUR CODE FROM EX03 HERE\n # polynomial basis function: TODO\n # this function should return the matrix formed\n # by applying the polynomial basis to the input data\n # ***************************************************\n raise NotImplementedError", "def generate_cuts(depths, side=SIDE_LENGTH):\n for num, den in depths:\n ad = num * side / den\n poly = Polygon([(0, 0), (side, 0), (side, ad), (0, ad)])\n yield poly" ]
[ "0.55394226", "0.5507189", "0.5322405", "0.5317046", "0.5283658", "0.52765316", "0.52758163", "0.526203", "0.5245328", "0.51992226", "0.5163439", "0.5163245", "0.51610357", "0.51115924", "0.5107499", "0.50974375", "0.5083864", "0.50748754", "0.50553256", "0.5054883", "0.5045434", "0.50263304", "0.50242305", "0.501079", "0.50101215", "0.499002", "0.49843866", "0.4937789", "0.49357253", "0.4914645" ]
0.6119926
0
Returns the list of overhead satellite objects given the window
def get_overhead_satellites_dicts(lat, lng, radius): overhead_satellites_dict = [] for sat in satellites: try: sat_dict = sat.serialize_if_is_in_radius(lat, lng, radius) except Exception: continue if sat_dict: overhead_satellites_dict.append(sat_dict) return overhead_satellites_dict
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getweigths():\n ls = []\n for i_lay in range(1, len(layers)):\n ls.append(layers[i_lay][\"weigths\"])\n return ls", "def get_stamp_windows(self):\n early_window = self.get_earliest_stamp_window()\n late_window = self.get_latest_stamp_window()\n window_delta = late_window[0] - early_window[0]\n ms_per_metric_window = 86400000\n # Given 12 hour windows, get the number of iterations between them\n iters = window_delta / ms_per_metric_window\n windows = []\n for i in range(0, iters):\n windows.append([early_window[0]+i*ms_per_metric_window, early_window[1]+i*ms_per_metric_window])\n return windows", "def get_Delta_weigths():\n ls = []\n for i_lay in range(1, len(layers)):\n ls.append(layers[i_lay][\"Delta_w\"])\n return ls", "def get_visible_objects(lat, lon):\n visible = []\n observer = ephem.Observer()\n observer.lat = str(lat)\n observer.lon = str(lon)\n for object_class in AstroObject.INTERESTING_OBJECTS:\n obj = object_class()\n obj.compute(observer)\n if obj.alt >= MIN_ALT:\n visible.append(AstroObject(obj, observer))\n return visible", "def get_train_windows(self, scene: Scene) -> List[Box]:\n\n def filter_windows(windows):\n if scene.aoi_polygons:\n windows = Box.filter_by_aoi(windows, scene.aoi_polygons)\n return windows\n\n raster_source = scene.raster_source\n extent = raster_source.get_extent()\n label_store = scene.ground_truth_label_source\n chip_size = self.config.chip_size\n\n chip_options = self.config.chip_options\n\n if chip_options.window_method == 'random_sample':\n return get_random_sample_train_windows(\n label_store, chip_size, self.config.class_map, extent,\n chip_options, filter_windows)\n elif chip_options.window_method == 'sliding':\n stride = chip_options.stride\n if stride is None:\n stride = chip_size / 2\n\n return list(\n filter_windows((extent.get_windows(chip_size, stride))))", "def _getBrailleRegionsForWindow(self, obj):\n\n self._debugGenerator(\"_getBrailleRegionsForWindow\", obj)\n\n return self._getDefaultBrailleRegions(obj)", "def surfaces(self):\n surfaces = []\n for i in range(1000):\n surface = self.surfaceInfo(i)\n if surface is not None:\n surfaces.append(surface)\n else:\n break\n\n return surfaces", "def get_all_windows(self):\n success, result = self.manager.c.eval(\n textwrap.dedent(\n \"\"\"\n [win.wid for win in self.core.mapped_windows]\n \"\"\"\n )\n )\n assert success\n return eval(result)", "def get_train_windows(self, scene: Scene) -> List[Box]:\n raise NotImplementedError()", "def getItems(self):\n return self.getCutPlanes() + self.getIsosurfaces()", "def get_train_windows(self, scene):\n pass", "def get_power_windows(self):\n return WindowSet([window for window in self.windows\n if window.source_type in {'wind', 'solar'} and\n not window.is_deterministic])", "def get_absolute_window_words(self, pano_windows, window):\n words = []\n im, scale_w, scale_h, window_w, window_h = self.__resize(window.im)\n with torch.no_grad():\n # char_bboxes, char_scores, word_instances = ...\n _, _, word_instances = self.charnet(im, scale_w, scale_h, window_w, window_h)\n\n for word in word_instances:\n # To combat google's watermark of street-view messing with the words\n if word.text == 'GOOGLE':\n continue\n old_word_bbox = word.word_bbox.copy()\n # update absolute position\n word.word_bbox[::2] = [x_coord + window.pos_x for x_coord in word.word_bbox[::2]]\n word.word_bbox[1::2] = [y_coord + window.pos_y for y_coord in word.word_bbox[1::2]]\n word_abs = word\n # open a new window for near-border words\n if self.__word_is_near_border(old_word_bbox, 50, window_w, window_h):\n zoom_w = pano_windows.get_window_at_pos(word.word_bbox[0], word.word_bbox[1], 50)\n z_im, z_scale_w, z_scale_h, z_window_w, z_window_h = self.__resize(zoom_w.im)\n with torch.no_grad():\n _, _, z_word_instances = self.charnet(z_im, z_scale_w, z_scale_h,\n z_window_w, z_window_h)\n\n for z_word in z_word_instances: # Swap only the word that intersects\n z_word.word_bbox[::2] = [x_coord + zoom_w.pos_x for\n x_coord in z_word.word_bbox[::2]]\n z_word.word_bbox[1::2] = [y_coord + zoom_w.pos_y for\n y_coord in z_word.word_bbox[1::2]]\n if self._do_words_intersect(word, z_word):\n word_abs = z_word # save only the new word from the window\n break\n\n words.append(word_abs)\n return words", "def get_aux_windows(self):\n return self.aux_windows.keys()", "def list_windows(cls):\n\n\t\tif cls._interface:\n\t\t\treturn tuple(cls(window) for window in cls._interface.list_windows())\n\t\telse:\n\t\t\traise NotImplementedError('Unsupported platform')", "def extract_statistics_from_windows(data: Data_dict_type) -> Data_dict_type:\n for key, item in data.items():\n values, sample_rate = item\n functionals = []\n for window_idx in range(values.shape[0]):\n window_functionals = extract_statistics_from_2d_window(values[window_idx])\n functionals.append(window_functionals[np.newaxis, ...])\n functionals = np.concatenate(functionals, axis=0)\n # squeeze last dimension\n if functionals.shape[-1] == 1:\n functionals = functionals.reshape(functionals.shape[:-1])\n data[key] = (functionals, sample_rate)\n return data", "def wires(self):\n top_exp = TopologyUtils.TopologyExplorer(self.topods_shape(), ignore_orientation=False)\n return map(Wire, top_exp.wires())", "def exposuretimes(self):\n all = self.allexposuretimes\n return [all[layer-1] for layer in self.__layers]", "def n_closest_waters(coordinates, atom, oxygens, n):\n\n waters = []\n for i in range(n):\n index = find_closest_atom(atom, oxygens)\n closest_oxygen = oxygens[index]\n if closest_oxygen in coordinates:\n oxygen_index = coordinates.index(closest_oxygen)\n OT = coordinates[oxygen_index]\n HT1 = coordinates[oxygen_index+1]\n HT2 = coordinates[oxygen_index+2]\n water = [OT, HT1, HT2]\n waters.append(water)\n oxygens = remove_atom(oxygens, index)\n return waters", "def info_towers_get():\n session = info_map.Session()\n\n q = session.query(info_map.Tower)\n q_towers = q.all()\n towers = []\n for q_tower in q_towers:\n tower = TowerInfo(\n type=q_tower.type,\n fuel_bay=q_tower.fuel_bay,\n stront_bay=q_tower.stront_bay,\n name=q_tower.name,\n storage_mult=q_tower.storage_mult,\n cpu=q_tower.cpu,\n powergrid=q_tower.powergrid,\n fuel_usage=q_tower.fuel_usage,\n stront_usage=q_tower.stront_usage,\n fuel_type=q_tower.fuel_type)\n\n towers.append(tower)\n\n return towers, 200", "def extract_windows(\n spectrograms: numba.typed.List[np.ndarray], wlength: int\n) -> Tuple[numba.typed.List[np.ndarray], List[int]]:\n windows = numba.typed.List()\n n_windows = []\n for spec in spectrograms:\n for i, w in enumerate(window(spec, wlength)):\n windows.append(w)\n n_windows.append(i + 1)\n return windows, n_windows", "def GetAllLinesOfPathWithinTimeWindow(PathInfo, TWindow):\r\n\tLineSet = set()\r\n\tfor ConnectionInfo in PathInfo:\r\n\t\tLineID = ConnectionInfo[ConnInfoInd['line_id']]\r\n\t\tDepartureTime = 60* ConnectionInfo[ConnInfoInd['departure_hour']] + ConnectionInfo[ConnInfoInd['departure_min']]\r\n\t\t\r\n\t\tif DepartureTime >= TWindow[0] and DepartureTime <= TWindow[1]:\r\n\t\t\tLineSet.add(LineID)\r\n\t\r\n\treturn LineSet", "def get_trajectories(self):\n use_timesteps = self.config[\"batch_update\"] == \"timesteps\"\n trajectories = []\n timesteps_total = 0\n i = 0\n while (use_timesteps and timesteps_total < self.config[\"timesteps_per_batch\"]) or (not(use_timesteps) and i < self.config[\"trajectories_per_batch\"]):\n i += 1\n trajectory = self.get_trajectory()\n trajectories.append(trajectory)\n timesteps_total += len(trajectory[\"reward\"])\n return trajectories", "def get_layers(self):\n layers = []\n\n for s in self.surfaces:\n n = self.miller_to_direction(s)\n r = np.dot(self.get_positions() - self.center, n).max()\n d = self.get_layer_distance(s, 2)\n l = 2 * np.round(r / d).astype(int)\n\n ls = np.arange(l-1,l+2)\n ds = np.array([self.get_layer_distance(s, i) for i in ls])\n\n mask = (np.abs(ds - r) < 1e-10)\n\n layers.append(ls[mask][0])\n\n return np.array(layers, int)", "def get_list_of_tracers_for_wsp(self):\n sacc_file = self.io.get_sacc_file()\n tracers = sacc_file.get_tracer_combinations()\n\n fnames = []\n tracers_out = []\n for i, trs1 in enumerate(tracers):\n s1, s2 = self.get_tracer_comb_spin(trs1)\n mn1, mn2 = [self.mask_names[tri] for tri in trs1]\n\n for trs2 in tracers[i:]:\n s3, s4 = self.get_tracer_comb_spin(trs2)\n mn3, mn4 = [self.mask_names[tri] for tri in trs2]\n\n fname1 = f\"w{s1}{s2}__{mn1}__{mn2}.fits\"\n fname2 = f\"w{s3}{s4}__{mn3}__{mn4}.fits\"\n\n if (fname1 in fnames) or (fname2 in fnames):\n continue\n\n fnames.append(fname1)\n fnames.append(fname2)\n\n tracers_out.append((trs1, trs2))\n\n return tracers_out", "def refresh_windows(self):\n for window_class, windows in self.extra_windows.items():\n obj = 'lord' if window_class is Nobleman else 'location'\n func = eval(f'self.manager.get_{obj}_of_id')\n for id, window in windows.items():\n instance = func(id)\n self.destroy_children_widgets(window)\n self.generate_window_content(instance, window)\n self.update_widgets_values()", "def get_image_corners (wcs):\n return list(wcs.calc_footprint())", "def sample_times():\n\tthe_times = []\n\tday = config.window_start_date\n\twhile day <= config.window_end_date:\n\t\t# times from start of window on day to end of window \n\t\ttime = config.tz.localize( datetime.combine( \n\t\t\tday, config.window_start_time \n\t\t) )\n\t\tend_time = config.tz.localize( datetime.combine( \n\t\t\tday, config.window_end_time \n\t\t) )\n\t\twhile time < end_time: # While still in the time window\n\t\t\tthe_times.append( time )\n\t\t\ttime += timedelta(minutes=1)\n\t\tday += timedelta(days=1)\n\treturn the_times", "def thermostats(self):\n\n return self._thermostats", "def get_tecogan_monitors(monitor):\n monitor_vgg_loss = MonitorSeries(\n 'vgg loss', monitor, interval=20)\n monitor_pp_loss = MonitorSeries(\n 'ping pong', monitor, interval=20)\n monitor_sum_layer_loss = MonitorSeries(\n 'd layer loss', monitor, interval=20)\n monitor_adv_loss = MonitorSeries(\n 'adversarial loss', monitor, interval=20)\n monitor_disc_loss = MonitorSeries(\n 'discriminator loss', monitor, interval=20)\n monitor_tb = MonitorSeries(\n 'tb', monitor, interval=20)\n Monitor_tecogan = collections.namedtuple('Monitor_tecogan',\n ['monitor_vgg_loss', 'monitor_pp_loss', 'monitor_sum_layer_loss',\n 'monitor_adv_loss', 'monitor_disc_loss', 'monitor_tb'])\n return Monitor_tecogan(monitor_vgg_loss, monitor_pp_loss, monitor_sum_layer_loss, monitor_adv_loss, monitor_disc_loss, monitor_tb)" ]
[ "0.5603363", "0.55234873", "0.5378708", "0.5343043", "0.53272885", "0.52247155", "0.5222112", "0.5221188", "0.5184873", "0.51602006", "0.515069", "0.51086634", "0.5056692", "0.5036433", "0.5034132", "0.502715", "0.49924698", "0.4959075", "0.4943926", "0.49367803", "0.49186295", "0.48809975", "0.4875927", "0.48426056", "0.483682", "0.48321125", "0.48147154", "0.48129869", "0.48110077", "0.48061284" ]
0.61297077
0
Returns satellites of a certain constellation
def get_constellation(startswith): constellation = [] for sat in satellites: if sat.name.startswith(startswith): serialized = sat.serialize() if serialized: constellation.append(serialized) return constellation
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def stations():\n\n return station_list", "def get_all_stations(session: Session) -> List[Row]:\n return session.query(PlanningWeatherStation.station_code).all()", "def getStations(self) :\n return self._stations", "def get_stations(self):\n return self.__request('stations')['stations']", "def get_stations(base_url, hts, mtype):\n stns1 = ws.site_list(base_url, hts, location='LatLong') # There's a problem with Hilltop that requires running the site list without a measurement first...\n stns1 = ws.site_list(base_url, hts, location='LatLong', measurement=mtype)\n stns2 = stns1[(stns1.lat > -47.5) & (stns1.lat < -34) & (stns1.lon > 166) & (stns1.lon < 179)].dropna().copy()\n stns2.rename(columns={'SiteName': 'ref'}, inplace=True)\n\n return stns2", "def station_list() -> List[Dict]:\n return STATIONS", "def stations(self):\n stations = []\n f = self._fetch(Citibike.STATION_URL)\n data = json.load(f)\n if 'stationBeanList' not in data or len(data['stationBeanList']) == 0:\n raise BadResponse('Station Fetch Failed', data)\n for station in data['stationBeanList']:\n stations.append(Station._from_json(station))\n logging.debug(\"Retrieved %d stations\" % len(stations))\n return stations", "async def _get_stations_remote() -> List[WeatherStation]:\n LOGGER.info('Using WFWX to retrieve station list')\n async with ClientSession() as session:\n # Get the authentication header\n header = await _get_auth_header(session)\n stations = []\n # Iterate through \"raw\" station data.\n async for raw_station in _fetch_raw_stations(session, header, BuildQueryAllStations()):\n # If the station is valid, add it to our list of stations.\n if _is_station_valid(raw_station):\n LOGGER.info('Processing raw_station %d',\n int(raw_station['stationCode']))\n stations.append(_parse_station(raw_station))\n LOGGER.debug('total stations: %d', len(stations))\n return stations", "def sentientPlanets():\n planet_list = []\n url = 'https://swapi-api.hbtn.io/api/species'\n while url is not None:\n data = requests.get(url).json()\n for species in data['results']:\n if ((species['designation'] == 'sentient'\n or species['designation'] == 'reptilian')):\n if species['homeworld'] is not None:\n hw = requests.get(species['homeworld']).json()\n planet_list.append(hw['name'])\n url = data['next']\n return planet_list", "async def get_stations() -> List[WeatherStation]:\n # Check if we're really using the api, or loading from pre-generated files.\n use_wfwx = config.get('USE_WFWX') == 'True'\n if use_wfwx:\n return await _get_stations_remote()\n return _get_stations_local()", "def icos_stations(*args):\n\n if len(args) != 4:\n filterstr = \" \"\n else:\n filterstr = \"\"\"\n filter(\n ?lat >= %s && ?lat <= %s &&\n ?lon >= %s && ?lon <= %s).\"\"\" % (args)\n\n\n query = \"\"\"\n PREFIX cpst: <http://meta.icos-cp.eu/ontologies/stationentry/>\n SELECT\n (IF(bound(?lat), str(?lat), \"?\") AS ?latstr)\n (IF(bound(?lon), str(?lon), \"?\") AS ?lonstr)\n (REPLACE(str(?class),\"http://meta.icos-cp.eu/ontologies/stationentry/\", \"\") AS ?themeShort)\n (str(?country) AS ?Country)\n (str(?sName) AS ?Short_name)\n (str(?lName) AS ?Long_name)\n (GROUP_CONCAT(?piLname; separator=\";\") AS ?PI_names)\n (str(?siteType) AS ?Site_type)\n FROM <http://meta.icos-cp.eu/resources/stationentry/>\n WHERE {\n ?s cpst:hasCountry ?country .\n ?s cpst:hasShortName ?sName .\n ?s cpst:hasLongName ?lName .\n ?s cpst:hasSiteType ?siteType .\n ?s cpst:hasPi ?pi .\n ?pi cpst:hasLastName ?piLname .\n ?s a ?class .\n OPTIONAL{?s cpst:hasLat ?lat } .\n OPTIONAL{?s cpst:hasLon ?lon } .\n OPTIONAL{?s cpst:hasSpatialReference ?spatRef } .\n OPTIONAL{?pi cpst:hasFirstName ?piFname } .\n %s\n }\n GROUP BY ?lat ?lon ?class ?country ?sName ?lName ?siteType\n ORDER BY ?themeShort ?sName\n \"\"\" %filterstr\n\n return query", "def constellations(self):\n # Create the constellation group.\n constellation_g = self.make_element(self.centered, 'g', (\n 'stroke-width', '.5'), ('stroke-dasharray', '3,3'), (\n 'clip-path', 'url(#innerClipPath)'))\n padding = 2\n for name, hips in constellation.constellations:\n for hip1, hip2 in hips:\n star1, star2 = self.hip_stars[hip1], self.hip_stars[hip2]\n if self.visible(hip1) or self.visible(hip2):\n r1, r2 = self.starsize(hip1), self.starsize(hip2)\n x1, y1 = self.radec2xy(star1[2], star1[3])\n x2, y2 = self.radec2xy(star2[2], star2[3])\n dx, dy = x2-x1, y2-y1\n dd = math.sqrt(dx*dx+dy*dy)\n if dd: dx, dy = dx/dd, dy/dd\n x1 += dx*(r1+padding)\n y1 += dy*(r1+padding)\n x2 -= dx*(r2+padding)\n y2 -= dy*(r2+padding)\n line = self.make_element(constellation_g, 'line', (\n 'x1',x1), ('y1',y1), ('x2',x2), ('y2',y2))", "async def get_stations_by_codes(station_codes: List[int]) -> List[WeatherStation]:\n use_wfwx = config.get('USE_WFWX') == 'True'\n if use_wfwx:\n return await _get_stations_by_codes_remote(station_codes)\n return _get_stations_by_codes_local(station_codes)", "def constellation(self):\n return self._constellation", "def train_stations(self) -> List[str]:\n return sorted([train_info['HE'] for train_info in train_api.stations_info.values()])", "def station_from_lat_lon(lat, lon, stations, n_nearest=3):\n lat, lon = float(lat), float(lon)\n distances = [(distance(lat, lon, st['lat'], st['lon']), st)\n for st in stations\n if (st['is_renting'] and st['is_installed'])]\n distances = sorted(distances)\n return [pair[1] for pair in distances[:n_nearest]]", "def get_stations():\n response = requests.get('https://api.hh.ru/metro/160')\n todos = json.loads(response.text)\n colors = {'CD0505': 'red'}\n all_stations_one_line = []\n\n for i in todos['lines']:\n all_stations_one_line = []\n\n for j in i['stations']:\n one_station = station.station()\n one_station.set_name(j['name'])\n one_station.set_color(colors.get(i['hex_color']))\n one_station.set_lat(j['lat'])\n one_station.set_lng(j['lng'])\n all_stations_one_line.append(one_station)\n return all_stations_one_line", "def station_list(request):\n center=request.session.__getitem__('center')\n ctrs = connection.Station.find({'cn': center.__unicode__()})\n return render(request, 'list_station.html',\n {'ctrs': ctrs}, content_type=\"text/html\")", "def get_station_boroughs(self):\\", "def list_stations(intent, session):\n stations = location.get_stations(config.bikes_api)\n street_name = intent['slots']['street_name']['value']\n possible = location.matching_station_list(stations,\n street_name,\n exact=True)\n street_name = street_name.capitalize()\n\n if len(possible) == 0:\n return reply.build(\"I didn't find any stations on %s.\" % street_name,\n is_end=True)\n elif len(possible) == 1:\n sta_name = location.text_to_speech(possible[0]['name'])\n return reply.build(\"There's only one: the %s \"\n \"station.\" % sta_name,\n card_title=(\"%s Stations on %s\" %\n (config.network_name, street_name)),\n card_text=(\"One station on %s: %s\" %\n (street_name, possible[0]['name'])),\n is_end=True)\n else:\n last_name = location.text_to_speech(possible[-1]['name'])\n speech = \"There are %d stations on %s: \" % (len(possible),\n street_name)\n speech += (', '.join([location.text_to_speech(p['name'])\n for p in possible[:-1]]) +\n ', and %s' % last_name)\n card_text = (\"The following %d stations are on %s:\\n%s\" %\n (len(possible), street_name,\n '\\n'.join(p['name'] for p in possible)))\n return reply.build(speech,\n card_title=(\"%s Stations on %s\" %\n (config.network_name, street_name)),\n card_text=card_text,\n is_end=True)", "def get_ships(self, time):\n\n # Format URL\n basin_dict = {\n 'north_atlantic':'northatlantic',\n 'east_pacific':'northeastpacific',\n 'west_pacific':'northwestpacific',\n 'north_indian':'northindian'\n }\n basin_name = basin_dict.get(self.basin,'southernhemisphere')\n url = f'http://hurricanes.ral.ucar.edu/realtime/plots/{basin_name}/{self.year}/{self.id.lower()}/stext/'\n url += f'{time.strftime(\"%y%m%d%H\")}{self.id[:-4]}{self.id[-2:]}_ships.txt'\n\n # Fetch SHIPS content\n try:\n content = read_url(url, split=False, subsplit=False)\n if len(content) < 10:\n warnings.warn('Improper SHIPS entry for this time. Returning a value of None.')\n return None\n except:\n raise ValueError('SHIPS data is unavailable for the requested storm or time.')\n\n return Ships(content)", "def get_all_stations(engine): \n # Query db\n sql = (\"SELECT DISTINCT a.station_id, \"\n \" a.station_code, \"\n \" a.station_name, \"\n \" c.station_type, \"\n \" d.latitude, \"\n \" d.longitude \"\n \"FROM nivadatabase.projects_stations a, \"\n \" nivadatabase.stations b, \"\n \" nivadatabase.station_types c, \"\n \" niva_geometry.sample_points d \"\n \"WHERE a.station_id = b.station_id \"\n \"AND b.station_type_id = c.station_type_id \"\n \"AND b.geom_ref_id = d.sample_point_id \"\n \"ORDER BY a.station_id\")\n df = pd.read_sql(sql, engine)\n\n return df", "def _get_goes_sat_num(start, end):\n goes_operational = {\n 2: TimeRange('1980-01-04', '1983-05-01'),\n 5: TimeRange('1983-05-02', '1984-08-01'),\n 6: TimeRange('1983-06-01', '1994-08-19'),\n 7: TimeRange('1994-01-01', '1996-08-14'),\n 8: TimeRange('1996-03-21', '2003-06-19'),\n 9: TimeRange('1997-01-01', '1998-09-09'),\n 10: TimeRange('1998-07-10', '2009-12-02'),\n 11: TimeRange('2006-06-20', '2008-02-16'),\n 12: TimeRange('2002-12-13', '2007-05-09'),\n 13: TimeRange('2006-08-01', '2006-08-01'),\n 14: TimeRange('2009-12-02', '2010-11-05'),\n 15: TimeRange('2010-09-01', Time.now()),\n }\n\n sat_list = []\n for sat_num in goes_operational:\n if (goes_operational[sat_num].start <= start <= goes_operational[sat_num].end and\n goes_operational[sat_num].start <= end <= goes_operational[sat_num].end):\n # if true then the satellite with sat_num is available\n sat_list.append(sat_num)\n\n if not sat_list:\n # if no satellites were found then raise an exception\n raise Exception('No operational GOES satellites within time range')\n else:\n return sat_list", "def get_seis(self, seis_name, well_name, radius=0):\n radius = int(radius)\n if well_name in self.inl_crl.keys():\n loc = self.inl_crl[well_name]\n if radius == 0:\n data = self.seismics[seis_name].cdp(loc)\n return [loc], [data]\n else:\n inlines, crlines = self._get_traces(seis_name, radius, loc)\n loc = list()\n data = list()\n for inl, crl in product(inlines, crlines):\n loc.append((inl, crl))\n data.append(self.seismics[seis_name].cdp((inl, crl)))\n return loc, data\n else:\n print(\"Well not found!\")\n return []", "def get_sandwichers(self):\n # pairs = [(square - 1, square + 1), (square - game[\"row_width\"], square + game[\"row_width\"])]\n results = []\n neighbors = self.get_neighbors()\n pairs = ((neighbors[0], neighbors[2]), (neighbors[1], neighbors[3]))\n for pair in pairs:\n if self.owner != pair[0].owner and pair[0].occupied and pair[0].owner == pair[1].owner \\\n and (not self.game.rules[\"trapping\"] or True not in [pair[0].trapped, pair[1].trapped]):\n results.extend([*pair])\n return results", "def get_station_entrances(self):\n station_entrances = []\n for wrapper in self.soup.find_all(\"div\", {\"class\": \"stop-wrapper\"}):\n text = wrapper.find(\"span\").text\n if text == '' or text is None:\n entrance = ''\n else:\n entrance = text.split(',')[0].lstrip().rstrip()\n station_entrances.append(entrance)\n return np.array(station_entrances).T", "def getSymmetricIdenticalsSixth(self, locClass=None):\n if not locClass:\n locClass = HexLocation\n if self.ring == 1:\n # nothing symmetric in the center.\n return []\n numInRing = (self.ring - 1) * 6\n locs = []\n pos = self.pos\n for others_ in range(5):\n pos += self.ring - 1\n if pos > numInRing:\n pos -= numInRing\n locs.append(locClass(self.ring, pos, self.axial))\n return locs", "def _get_ogd_stations():\n return {r[\"Station\"] for r in ZamgData.current_observations()}", "def all_stations(self, provider: ID) -> List[StationInfo]:\n srv_key = self.__stations_key(provider=provider)\n value = self.get(name=srv_key)\n if value is None:\n return []\n js = utf8_decode(data=value)\n array = json_decode(string=js)\n return StationInfo.convert(array=array)", "def sentientPlanets():\n\n url = \"https://swapi-api.hbtn.io/api/species/\"\n planets = []\n while url is not None:\n r = requests.get(url)\n results = r.json()[\"results\"]\n for specie in results:\n if (specie[\"designation\"] == \"sentient\" or\n specie[\"classification\"] == \"sentient\"):\n\n planet_url = specie[\"homeworld\"]\n if planet_url is not None:\n p = requests.get(planet_url).json()\n planets.append(p[\"name\"])\n url = r.json()[\"next\"]\n return planets" ]
[ "0.635593", "0.61479795", "0.6025066", "0.5984918", "0.57712275", "0.5671119", "0.5607439", "0.5596467", "0.55761504", "0.5566055", "0.55554676", "0.5551738", "0.55312026", "0.55017", "0.5489086", "0.545151", "0.54460144", "0.5422619", "0.5419575", "0.5411057", "0.5371812", "0.53491837", "0.5342885", "0.53340346", "0.53081584", "0.52983063", "0.5295759", "0.528647", "0.52846617", "0.5257321" ]
0.7290711
0
Return the TLE from sat_id
def get_tle(sat_id): for sat in satellites: if sat.name == sat_id: return sat.tle return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_tier(pkmn_id):\n if pkmn_id in tiers.TIERS[\"0\"]:\n return 0\n elif pkmn_id in tiers.TIERS[\"1\"]:\n return 1\n elif pkmn_id in tiers.TIERS[\"2\"]:\n return 2\n elif pkmn_id in tiers.TIERS[\"3\"]:\n return 3\n else:\n return 4", "def _get_tier_color(pkmn_id):\n if pkmn_id in tiers.TIERS[\"0\"]:\n return 0x000000\n elif pkmn_id in tiers.TIERS[\"1\"]:\n return 0xB80800\n elif pkmn_id in tiers.TIERS[\"2\"]:\n return 0x0009C4\n elif pkmn_id in tiers.TIERS[\"3\"]:\n return 0xF7AB09\n else:\n return 0x9909F7", "def get_spacecraft_tle(spacecraft_id):\n sc = segment_models.Spacecraft.objects.get(identifier=spacecraft_id)\n return tle_serializers.TleSerializer.serialize_tle(sc)", "def _get_slt_entry(self, entry):\n # Slt ptr is the second word in the SLT region (first is fingerprint)\n # Note how we deliberately don't use any debug information here (e.g.\n # finding the location of $_audio_slt_table).\n if Arch.addr_per_word == 4:\n sltptr_addr = Arch.pRegions['SLT'][0] + Arch.addr_per_word\n slt_entry_addr = self.get_data_pm(sltptr_addr) # index 0 of slt\n else:\n sltptr_addr = Arch.dRegions['SLT'][0] + Arch.addr_per_word\n slt_entry_addr = self.get_data(sltptr_addr) # index 0 of slt\n # Run through the slt looking for the entry we want, if we can't find\n # the one we're looking for maybe it's not in the list\n while entry > self.get_data(slt_entry_addr):\n slt_entry_addr += 2 * Arch.addr_per_word\n\n if entry == self.get_data(slt_entry_addr):\n return self.get_data(slt_entry_addr + Arch.addr_per_word)\n\n return None", "def get_slt(self):\n try:\n slt_ptr = self.chipdata.get_var_strict('$_audio_slt_table').address\n except ct.DebugInfoNoVariable:\n try:\n # old implementation\n slt_ptr = self.chipdata.get_var_strict(\n '$_audio_slut_table'\n ).address\n except ct.DebugInfoNoVariable:\n return None\n\n # If we read 20 words without stopping, something went wrong!\n failsafe = slt_ptr + 20 * Arch.addr_per_word\n slt_table = []\n\n while slt_ptr < failsafe:\n slt_key = self.debuginfo.get_dm_const(slt_ptr, 0)\n slt_value = self.debuginfo.get_dm_const(\n slt_ptr + Arch.addr_per_word,\n 0\n )\n slt_table.append((slt_key, slt_value))\n slt_ptr += 2 * Arch.addr_per_word\n\n if slt_key == 0:\n break\n\n return tuple(slt_table)", "def get(self, id_code):\r\n return self.fetch('getLegislators', id=id_code)['legislator']", "def get_number_toroll(self, state):\n if CardEnum.TrainStation in state.Deck.keys() and state.Deck[CardEnum.TrainStation] > 0:\n return 2\n\n return 1", "def ksat_func(ThS, Th33, lbd):\n\n # assert ThS > Th33, \"sat <= fc IN ksat_func\"\n ksat = 1930 * (ThS - Th33) ** (3 - lbd)\n return ksat", "def ttd_l_func(self):\n i2 = self.inl[1].to_flow()\n o1 = self.outl[0].to_flow()\n return (self.ttd_l.val - T_mix_ph(o1, T0=self.outl[0].T.val_SI) +\n T_mix_ph(i2, T0=self.inl[1].T.val_SI))", "def sat_step(tests):\n prop = ('\\\\program -> '\n 'compute_program(program:[26][lginsn], '\n 'CRC32_oracle, [CRC32_to_state(test) | test <- ? ])')\n res = crc32.sat(prop, tests, prover=cryptol.Provers.ABC)\n if res.has_assignment():\n return res.get_assignment()[0]\n else:\n return None", "def teid(self):\n return self._teid", "def tlwh_nei(self):\n if self.cur_neighbor is None:\n return None\n else:\n box = self.cur_neighbor['tlbr'] # [neighbor_k, 4]\n if box is not None:\n if isinstance(box, torch.Tensor):\n box = box.to(torch.device('cpu')).numpy()\n box[:, 2:4] = box[:, 2:4] - box[:, 0:2] + 1\n return box", "def get_traffic_light_state(self, landmark_id):\n if landmark_id not in self._tls:\n return None\n return self._tls[landmark_id].state", "def coupling_id(self, leg):\n for i, x in enumerate(self.coupling):\n for j, (y, _) in enumerate(x):\n if y == leg:\n return i, j\n return None", "def diffuse_transmittance(TL = TL_default):\n return ((-21.657) + (41.752 * (TL)) + (0.51905 * (TL) * (TL)))", "def get_station_configuration_l(station_id, station_configuration):\n \n si = np.string_(station_id) \n \n if b':' in si:\n #si = bytes( (si.split(':')[1] , 'utf-8') )\n station_id_secondary = b'0-20000-0-' +si # remove the prefix to the station id \n else:\n for iid in b'0',b'1':\n \n station_id_primary = b'0-2000'+iid+b'-0-' +si # remove the prefix to the station id \n matching_primary_ind = np.where(station_configuration['primary_id'] == station_id_primary)[0]\n stat_conf_retrieved = station_configuration.loc[matching_primary_ind]\n if len(stat_conf_retrieved) > 0:\n return stat_conf_retrieved\n else:\n station_id_secondary=station_id_primary\n\n secondary = station_configuration['secondary_id'] \n loc=0\n for s in secondary:\n s = np.bytes_(str(s))\n try: \n if b'[' in s:\n st = s.replace(b'[',b'').replace(b']',b'')\n stl=st.split(b',')\n for st in stl:\n if si==st:\n #return loc # OLD wrong \n return station_configuration.loc[loc] # new version, might not work ???\n else:\n if si==s: \n return station_configuration.loc[loc] # new version, might not work ???\n except MemoryError:\n return 0\n loc=loc+1\n return 0", "def get_leadtime(self, state, sku):\n try:\n rank = self.ranking[sku][state]\n if rank == 'TOP 10':\n return 0\n elif rank == 'MIDDLE 50':\n return 1\n else:\n return 2\n except:\n return 2", "def get_tithi(jd, ayanamsha_id=swe.SIDM_LAHIRI):\n\n return get_angam(jd, TITHI, ayanamsha_id=ayanamsha_id)", "def station_by_id(self, id):\n for station in self.stations:\n if id == station.stationid:\n return station\n\n print(str.format(\"Could not find station with '{0}'\",str(id)))", "def get_station(station_id):\n return STATIONS.station_details_for(station_id)", "def transits(self, sat):\n return predict.transits(sat.tle, self.predict_qth)", "def get_lower_long_from_trace_id(trace_id):\n lower_bytes = trace_id[16:]\n lower_long = int(lower_bytes, 16)\n\n return lower_long", "def TL_from_T(T):\n return -20*np.log10(np.abs(T))", "def LT(f):\n return dmp_ground_LT(f.rep, f.lev, f.dom)", "def get_thermostat_target_temp(self) -> int:\n hex_temp = hexlify(self.message[139:140]).decode()\n return int(hex_temp, 16)", "def get_lz_by_id(self, lz_id):\n return self._lz_service.get_lz_by_id(lz_id)", "def get_tile_radec(tileid):\n tiles = io.load_tiles()\n if tileid in tiles['TILEID']:\n i = np.where(tiles['TILEID'] == tileid)[0][0]\n return tiles[i]['RA'], tiles[i]['DEC']\n else:\n return (0.0, 0.0)", "def get_forecast_variable(gt_id):\n if gt_id.endswith(\"tmp2m\"):\n return \"tmp2m\"\n if gt_id.endswith(\"precip\"):\n return \"prate\"\n raise ValueError(\"Unrecognized gt_id \"+gt_id)", "def read_tsl2561(self):\n try:\n # Create the I2C bus\n i2c = busio.I2C(board.SCL, board.SDA)\n # Create the TSL2561 instance, passing in the I2C bus\n tsl = adafruit_tsl2561.TSL2561(i2c)\n # Print chip info\n print(\"Chip ID = {}\".format(tsl.chip_id))\n print(\"Enabled = {}\".format(tsl.enabled))\n print(\"Gain = {}\".format(tsl.gain))\n print(\"Integration time = {}\".format(tsl.integration_time))\n print(\"Configuring TSL2561...\")\n print(\"Configuring TSL2561...\")\n # Enable the light sensor\n tsl.enabled = True\n time.sleep(1)\n # Set gain 0=1x, 1=16x\n tsl.gain = 0\n # Set integration time (0=13.7ms, 1=101ms, 2=402ms, or 3=manual)\n tsl.integration_time = 1\n # print(\"Getting readings...\")\n print(\"Getting readings....\")\n # Get raw (luminosity) readings individually\n broadband = tsl.broadband\n infrared = tsl.infrared\n # Get raw (luminosity) readings using tuple unpacking\n # broadband, infrared = tsl.luminosity\n # Get computed lux value (tsl.lux can return None or a float)\n lux = tsl.lux\n # Print results\n # print(\"Enabled = {}\".format(tsl.enabled))\n print(\"Enabled = {}\".format(tsl.enabled))\n # print(\"Gain = {}\".format(tsl.gain))\n print(\"Gain = {}\".format(tsl.gain))\n # print(\"Integration time = {}\".format(tsl.integration_time))\n print(\"Integration time = {}\".format(tsl.integration_time))\n # print(\"Broadband = {}\".format(broadband))\n print(\"Broadband = {}\".format(broadband))\n # print(\"Infrared = {}\".format(infrared))\n print(\"Infrared = {}\".format(infrared))\n # if lux is not None:\n # print(\"Lux = {}\".format(lux))\n # else:\n # print(\"Lux value is None. Possible \\\n # sensor underrange or overrange.\")\n # Disble the light sensor (to save power)\n tsl.enabled = False\n print('read light data: ')\n print(lux)\n print(infrared)\n print(broadband)\n return lux, infrared, broadband\n except BaseException as e:\n print('An exception occurred: {}'.format(e))", "def choose_ltv(self, label):\n tids = self.node_tids[label]\n vals = self.node_vals[label]\n losses = [self.tid_losses_dct[tid] for tid in tids]\n\n # -- try to return the value corresponding to one of the\n # trials that was previously chosen\n tid_set = set(tids)\n for tid in self.best_tids:\n if tid in tid_set:\n idx = tids.index(tid)\n rval = losses[idx], tid, vals[idx]\n break\n else:\n # -- choose a new best idx\n ltvs = sorted(zip(losses, tids, vals))\n best_idx = int(self.rng.geometric(1.0 / self.avg_best_idx)) - 1\n best_idx = min(best_idx, len(ltvs) - 1)\n assert best_idx >= 0\n best_loss, best_tid, best_val = ltvs[best_idx]\n self.best_tids.append(best_tid)\n rval = best_loss, best_tid, best_val\n return rval" ]
[ "0.54744244", "0.5327852", "0.5293958", "0.5189246", "0.5185192", "0.5171703", "0.51700526", "0.51252013", "0.5116307", "0.5108249", "0.5074463", "0.49921268", "0.4957534", "0.49423563", "0.49208283", "0.49129754", "0.48870516", "0.48595324", "0.4837113", "0.4836963", "0.48334643", "0.48199585", "0.48152825", "0.48046002", "0.47905585", "0.476998", "0.47656834", "0.47629288", "0.47486934", "0.47478086" ]
0.72863823
0
Populates the satellites array from TLEs
def populate_satellites_array(): total_tles = 0 tles = storage.get_tles_from_cache() metadata = storage.get_metadata() last_updated.append(metadata.get('last_updated')) if len(last_updated) > 1: del last_updated[0] if not tles: print('Fetching from spacetrack') cron_refresh_spacetrack_cache() tles = storage.get_tles_from_cache() for tle in tles: total_tles += 1 s = Satellite(tle) if s.is_valid(): satellites.append(s) print('Loaded {} of {} satellites'.format(len(satellites), total_tles))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def stations():\n\n return station_list", "def __init__(\n self,\n num_satellites,\n fix_type):\n self.num_satellites = num_satellites\n self.fix_type = fix_type", "def get_stations():\n response = requests.get('https://api.hh.ru/metro/160')\n todos = json.loads(response.text)\n colors = {'CD0505': 'red'}\n all_stations_one_line = []\n\n for i in todos['lines']:\n all_stations_one_line = []\n\n for j in i['stations']:\n one_station = station.station()\n one_station.set_name(j['name'])\n one_station.set_color(colors.get(i['hex_color']))\n one_station.set_lat(j['lat'])\n one_station.set_lng(j['lng'])\n all_stations_one_line.append(one_station)\n return all_stations_one_line", "def get_sunspot_data(yy, time1):\n master = []\n num_of_ss = np.max(yy.flatten()) # get number of different SS's\n centroids = []\n sizes = []\n numbers = []\n\n for i in np.arange(1, num_of_ss + 1): # for each SS:\n temp_sunspot = SunSpot(1, 1, 1)\n copy_yy = np.array(yy, copy = True)\n copy_yy[copy_yy != i] = 0 # get only points == i\n copy_yy[copy_yy == i] = 1\n\n indices_x, indices_y = np.where(yy == i)\n\n max_lat = np.max(indices_x)\n min_lat = np.min(indices_x)\n mean_lat = max_lat - (max_lat - min_lat)/2\n \n max_lon = np.max(indices_y)\n min_lon = np.min(indices_y)\n mean_lon = max_lon - (max_lon - min_lon)/2\n \n temp_sunspot.mask = copy_yy\n temp_sunspot.centroid = [mean_lon, mean_lat]\n temp_sunspot.size = len(indices_x)\n temp_sunspot.number = i\n temp_sunspot.x_points = indices_x\n temp_sunspot.y_points = indices_y\n temp_sunspot.timestamp = time1\n temp_sunspot.min_x = min_lon\n temp_sunspot.max_x = max_lon\n temp_sunspot.min_y = min_lat\n temp_sunspot.max_y = max_lat\n\n master.append(temp_sunspot)\n\n return num_of_ss, master", "def _build_stations(self, stop_list):\n # stations = [] TODO: What is this for\n dists = self._euclidian_distances(stop_list)\n stations = self._calculate_y_lines(dists)\n return stations", "def _sort_by_satellite(self) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:\n time = []\n satellite = []\n system = []\n for sat in sorted(self.dset.unique(\"satellite\"), reverse=True):\n idx = self.dset.filter(satellite=sat)\n time.extend(self.dset.time.gps.datetime[idx])\n satellite.extend(self.dset.satellite[idx])\n system.extend(self.dset.system[idx])\n \n return np.array([time]), np.array([satellite]), np.array([system])", "def sat(ds, dates):\n\tsat = ds.satellites.values.astype(float)\n\tDA=xr.DataArray(sat,\n\t\tdims = ['time'], \n\t\tcoords = {'time': dates[\"CFTime\"]},\n\t\tattrs = ({\n\t\t\t'_FillValue':-1, #9.96921e+36\n\t\t\t# 'units' :\"1\",\n\t\t\t'standard_name':\"satellites\",\n\t\t\t'long_name':\"satellites\",\n\t\t\t# 'scale': 1,\n\t\t\t# 'valid_range': [bn.nanmin(Val), np.ceil(np.nanmax(Val))]\n\t\t\t}),\n\t)\n\n\t# DA.longitude.attrs['units'] = 'degrees_east'\n\t# DA.latitude.attrs['units'] = 'degrees_north'\n\tDA.time.attrs[\"calendar\"] = dates[\"calendar\"]\n\tDA.time.attrs[\"units\"] = dates[\"units\"]\n\t\n\tencoding = ({'shuffle':True, \n\t\t# 'chunksizes':[1, ensinfo.lats.shape[0], 100],\n\t\t'zlib':True,\n\t\t'complevel':5})\n\t\n\treturn DA, encoding", "def _create_site_list(self):\n\n connector = Connectors(baseLocation = \"../data/sqlite3/\", dbName = \"radars.sqlite\",\n isInMem = False, isAutoSave = False)\n #command = '{:s}and tval>=? and tval<=? ORDER BY tval ASC'.format(command)\n #connector.cursor.execute(command, (self.rad_id, self.stm, self.etm))\n #rows = connector.cursor.fetchall()\n command = \"SELECT tval FROM hdw WHERE id=? \"\n command = '{:s}and tval>=? ORDER BY tval ASC'.format(command)\n connector.cursor.execute(command, (self.rad_id, self.stm))\n tvals_stm = connector.cursor.fetchall()\n tvals_stm = [x[0] for x in tvals_stm]\n\n command = \"SELECT tval FROM hdw WHERE id=? \"\n command = '{:s}and tval>=? ORDER BY tval ASC'.format(command)\n connector.cursor.execute(command, (self.rad_id, self.etm))\n tval_etm = connector.cursor.fetchone()[0]\n indx_etm = tvals_stm.index(tval_etm)\n\n # select the tvals of interest\n tvals = tvals_stm[:indx_etm+1]\n\n site_list = []\n for tval in tvals:\n site_list.append(site(code=self.rad, dt=tval))\n return site_list", "def prep_stations(url):\n stations = []\n _stations = requests.get(url).json()\n\n for _station in _stations['stationBeanList']:\n if _station['statusKey'] == 1:\n stations.append([_station['stationName'], _station['id'],\n _station['availableDocks'], _station['totalDocks'],\n _station['latitude'], _station['longitude']])\n\n return stations", "def importSites(self,sites_list):\n \"\"\" Append these sites objects to a sample \"\"\" \n self.sites = []\n for s in sites_list:\n mySite = Site(s)\n self._addSite(mySite)", "def main():\n populate_satellites_array()\n latitude = float(os.environ['LATITUDE'])\n longitude = float(os.environ['LONGITUDE'])\n radius = int(os.environ['RADIUS'])\n timeout = 1\n previous_satellites = []\n while True:\n if (last_updated[0] + 86400) < int(time.time()):\n print('Expired data, updating from spacetrack')\n cron_refresh_spacetrack_cache()\n populate_satellites_array()\n print('Checking {}, {}'.format(latitude, longitude))\n currently_overhead = get_overhead_satellites_dicts(latitude, longitude, radius)\n for sat in currently_overhead:\n if not sat['name'] in previous_satellites:\n announce_satellite(sat)\n previous_satellites = [x['name'] for x in currently_overhead]\n time.sleep(timeout)", "def get_stations(base_url, hts, mtype):\n stns1 = ws.site_list(base_url, hts, location='LatLong') # There's a problem with Hilltop that requires running the site list without a measurement first...\n stns1 = ws.site_list(base_url, hts, location='LatLong', measurement=mtype)\n stns2 = stns1[(stns1.lat > -47.5) & (stns1.lat < -34) & (stns1.lon > 166) & (stns1.lon < 179)].dropna().copy()\n stns2.rename(columns={'SiteName': 'ref'}, inplace=True)\n\n return stns2", "def propagate(satellite):", "def do_setup(self, ants): \n log_filter = LogFilter()\n getLogger().addFilter(log_filter)\n\n self.hills = []\n self.directions = []\n\n self.seen = [] #areas that have been seen, use this to avoid repetition\n self.unseen = []\n self.stepped_on = []\n\n self.intent = {}\n self.lc = {} #center of mass for a location\n self.i = {} #number of iterations for an ant\n\n for row in range(ants.rows):\n for col in range(ants.cols):\n self.unseen.append((row, col))\n self.intent[(row,col)] = Intent.GATHER\n\n self.lc[(row,col)] = (-1.0,-1.0) #set up center of mass\n self.i[(row,col)] = -1", "def build_sites_list():\n ns_getsites_filter = '''\n <nc:filter type=\"xpath\"\n xmlns:nc=\"urn:ietf:params:xml:ns:netconf:base:1.0\"\n xmlns:rm=\"http://cisco.com/ns/yang/Cisco-IOS-XE-route-map\"\n select=\"/native/route-map[substring(name, 1, 3)='To_']/name\"\n />\n '''\n m = manager.connect( host='10.112.83.100',\n port=830,\n username='cisco',\n password='cisco',\n hostkey_verify=False)\n answer = m.get_config(source='running', filter=ns_getsites_filter).data_xml\n c = xmltodict.parse (answer)\n # build the list\n liste_sites = [ r['name'][3:] for r in c['data']['native']['route-map'] ]\n return liste_sites", "def _get_setup_from_noiseSFTs(self):\n SFTConstraint = self._get_sft_constraints_from_tstart_duration()\n noise_multi_sft_catalog = lalpulsar.GetMultiSFTCatalogView(\n lalpulsar.SFTdataFind(self.noiseSFTs, SFTConstraint)\n )\n if noise_multi_sft_catalog.length == 0:\n raise IOError(\"Got empty SFT catalog.\")\n\n # Information to be extracted from the SFTs themselves\n IFOs = []\n tstart = []\n tend = []\n Tsft = []\n self.sftfilenames = [] # This refers to the MFD output!\n\n for ifo_catalog in noise_multi_sft_catalog.data:\n ifo_name = lalpulsar.ListIFOsInCatalog(ifo_catalog).data[0]\n\n time_stamps = lalpulsar.TimestampsFromSFTCatalog(ifo_catalog)\n this_Tsft = int(round(1.0 / ifo_catalog.data[0].header.deltaF))\n this_start_time = time_stamps.data[0].gpsSeconds\n this_end_time = time_stamps.data[-1].gpsSeconds + this_Tsft\n\n self.sftfilenames.append(\n utils.get_official_sft_filename(\n ifo_name,\n time_stamps.length, # ifo_catalog.length fails for NB case\n this_Tsft,\n this_start_time,\n this_end_time - this_start_time,\n self.label,\n )\n )\n\n IFOs.append(ifo_name)\n tstart.append(this_start_time)\n tend.append(this_end_time)\n Tsft.append(this_Tsft)\n\n # Get the \"overall\" values of the search\n Tsft = np.unique(Tsft)\n if len(Tsft) != 1:\n raise ValueError(f\"SFTs contain different basetimes: {Tsft}\")\n if Tsft[0] != self.Tsft:\n logger.warning(\n f\"Overwriting self.Tsft={self.Tsft}\"\n f\" with value {Tsft[0]} read from noiseSFTs.\"\n )\n self.Tsft = Tsft[0]\n self.tstart = min(tstart)\n self.duration = max(tend) - self.tstart\n self.detectors = \",\".join(IFOs)", "def __init__(self):\n self.networks = [\n ipaddress.ip_network(address)\n for address in self.addresses\n ]", "def __init__(self, trip_update, stops, position_in_list):\n self.trip_update = trip_update\n self.stops = stops\n self.routeID = str(self.trip_update.trip.route_id)\n # A minor quirk in the MTA's data is fixed here. S trains were listed as GS for some reason\n if self.routeID == \"GS\":\n self.routeID = \"S\"\n self.index = position_in_list", "def _setData(self):\n\n if not self.stationId:\n return\n \"\"\" \n # get the ressource url and adjust lat and lon from data portal\n query = sparqls.stationResource(self.stationId)\n key, val = RunSparql(query, 'array').run()\n if val: \n self.url = val[0][0]\n self.lat = float(val[0][2])\n self.lon = float(val[0][3])\n \"\"\"\n\n # it is possible, that a station id has multiple URI\n # ask for all URI\n query = sparqls.stationData(self.uri, 'all')\n data = RunSparql(query, 'pandas').run()\n\n if not data.empty:\n self._data = data\n else:\n self._data = 'no data available'\n\n # check if data is available and extract the 'unique' data products\n if isinstance(self._data, pd.DataFrame):\n p = self._data['specLabel'].unique()\n self._products = pd.DataFrame(p)\n\n # replace samplingheight=None with empty string\n self._data.samplingheight.replace(to_replace=[None], value=\"\", inplace=True)\n else:\n self._products = 'no data available'", "def gps_data():\n gpsd = gps(mode=WATCH_ENABLE|WATCH_NEWSTYLE)\n lat = 0\n lon = 0\n satellites = 0\n tpv = False\n sky = False\n while True:\n nx = gpsd.next()\n if nx['class'] == 'TPV':\n lat = getattr(nx, 'lat', 0)\n lon = getattr(nx, 'lon', 0)\n tpv = True\n elif nx['class'] == 'SKY':\n satellites = len(nx['satellites'])\n sky = True\n if sky and tpv:\n return satellites, lat, lon", "def collect_stations(self):\n # First, iterate provinces and build url's\n site = urllib.request.urlopen(self.base_url)\n\n # Check that the site is still valid or operating by collecting a list of provinces\n print(\"Collecting provinces\")\n provinces = [s[9:11] for s in re.findall('<a href=\"../\">../</a>', site.read())]\n\n # Iterate provinces and collect list of available times\n print(\"Collecting time periods and station ID's\")\n self.stations = defaultdict(dict)\n for prov in provinces:\n site = urllib.request.urlopen(self.build_url(prov))\n expression = '<a href=\"[hd][a-zA-Z]*/\">[hd][a-zA-Z]*/</a>'\n times = [s.split('>')[1].split('<')[0].replace('/', '') for s in re.findall(expression, site.read())]\n\n # Iterate times and collect the station ID's\n for time in times:\n site = urllib.request.urlopen(self.build_url(prov, time))\n expression = '<a href=\"{0}_[a-zA-Z0-9]*_{1}_hydrometric.csv\">{0}_[a-zA-Z0-9]*_{1}_hydrometric.csv</a>'\n expression = expression.format(prov.upper(), time.lower())\n stations = [s.split('_')[1] for s in re.findall(expression, site.read())]\n self.stations[prov][time] = stations", "def initCitys(self):\n self.cities = []\n for vertex in self.metaGraph:\n self.cities.append(vertex)", "def populate_stops(self):\n stops = self.load_csv('stops.txt')\n stops = self.process_stops(stops)\n\n connection = db.connect()\n for stop in stops:\n try:\n connection.execute(schema.stops.insert(), stop)\n except DataError:\n print \"Missing data for stop: %s\" % (stop)", "def __init__(self):\n self._distance_data = []\n self._location_data = []\n self._package_data = []", "async def _get_stations_remote() -> List[WeatherStation]:\n LOGGER.info('Using WFWX to retrieve station list')\n async with ClientSession() as session:\n # Get the authentication header\n header = await _get_auth_header(session)\n stations = []\n # Iterate through \"raw\" station data.\n async for raw_station in _fetch_raw_stations(session, header, BuildQueryAllStations()):\n # If the station is valid, add it to our list of stations.\n if _is_station_valid(raw_station):\n LOGGER.info('Processing raw_station %d',\n int(raw_station['stationCode']))\n stations.append(_parse_station(raw_station))\n LOGGER.debug('total stations: %d', len(stations))\n return stations", "def setup(env, channel, interT, station, mean):\r\n network = Network(env, channel)\r\n system = System(env, network, station, mean)\r\n transmitList = []\r\n \r\n while True:\r\n system.stations", "def populate_features(self):\n # AssetFeatureValue types\n satellite_feature_value = AssetFeatureValue.Standard.FUND_TYPE_SATELLITE.get_object()\n core_feature_value = AssetFeatureValue.Standard.FUND_TYPE_CORE.get_object()\n\n logger.info('Populating features for ticker %s' % self)\n r_feat = self.get_region_feature_value()\n ac_feat = self.get_asset_class_feature_value()\n curr_feat = self.get_currency_feature_value()\n at_feat = self.get_asset_type_feature_value()\n self.features.clear()\n self.features.add(r_feat, ac_feat, curr_feat, at_feat)\n if self.ethical:\n self.features.add(AssetFeatureValue.Standard.SRI_OTHER.get_object())\n self.features.add(core_feature_value if self.etf else satellite_feature_value)", "def eventlist():\n\n infile = conf[\"run_path_derived\"] + 'LOCALIZED.txt'\n\n data = np.genfromtxt(infile, skip_header=1) \n\n mlt = cx.MAGtoMLT(data[:, 5], data[:, 0:5])\n\n # Swap mlat and mlon colums so in expected order (lat then long)\n data[:, [6,5]] = data[:, [5,6]]\n \n data = np.hstack((data, np.reshape(mlt, (mlt.shape[0], 1))))\n \n return data", "def _update_ips(self):\n self.ip_others = []\n ips = self.mesh.ipaddr()\n self.rloc16 = self.mesh.rloc()\n for line in ips:\n if line.startswith('fd'):\n # Mesh-Local unicast IPv6\n try:\n addr = int(line.split(':')[-1], 16)\n except Exception:\n continue\n if addr == self.rloc16:\n # found RLOC\n # RLOC IPv6 has x:x:x:x:0:ff:fe00:RLOC16\n self.rloc = line\n elif ':0:ff:fe00:' not in line:\n # found Mesh-Local EID\n self.ip_eid = line\n elif line.startswith('fe80'):\n # Link-Local\n self.ip_link = line\n else:\n self.ip_others.append(line)", "def reload(self):\n self.known_stations = {}\n self.read_noaa_stations()\n self.read_table_stations()\n self.last_reload_check_time = datetime.datetime.utcnow()\n LOGGER.info('Have %s known stations', len(self.known_stations.keys()))" ]
[ "0.588268", "0.56820977", "0.5487302", "0.53803194", "0.53755254", "0.5344415", "0.52817106", "0.52787256", "0.5267636", "0.52112997", "0.5177532", "0.5159447", "0.51321965", "0.5129857", "0.50877666", "0.50871104", "0.50531954", "0.50431925", "0.50205386", "0.50128496", "0.5012243", "0.5010765", "0.5007167", "0.4981402", "0.49738172", "0.49290463", "0.49010813", "0.48997772", "0.48982337", "0.48902798" ]
0.79974955
0
Refreshes the cache from spacetrack daily
def cron_refresh_spacetrack_cache(): s = SpaceTrackApi() updated_tles_str = s.get_all_tles() storage.save_tle_cache(updated_tles_str) last_updated[0] = int(time.time()) metadata = { 'last_updated': last_updated[0], } storage.save_metadata(metadata)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _refresh_cache(self, data_dict):\r\n pass", "def refresh(self):\n\n self._refreshed_on = time.time() * 1000", "async def _timein_refresh(self):\n\t\t\n\t\tawait self.refresh_cache()", "def update_cache(self, rfid):\n\n values = {'id' : rfid}\n data = urllib.parse.urlencode(values)\n data = data.encode('utf-8')\n req = urllib.request.Request(self.weekly_url, data)\n\n t1 = perf_counter()\n\n try:\n resp = urllib.request.urlopen(req, timeout=self.request_timeout)\n except URLError:\n print(\"Weekly TODO: log that the connection was rejected...\")\n return\n\n except timeout as err:\n print(\"Timeout (weekly): \", err)\n return\n\n text = resp.read()\n \n #save the file\n base = os.path.dirname(os.path.abspath(__file__))\n db_path = \"{}/db/{}.json\".format(base, rfid)\n \n try:\n db_file = open(db_path, 'wb')\n except:\n print(\"error opening db file: \", db_path)\n return\n \n db_file.write(text)\n db_file.close()\n\n t2 = perf_counter()\n print(\"cache updated in {} seconds\".format(t2-t1))", "async def afterHoursAutoPurge(self, ctx: Context):", "def update_data(self):\n data, meta_data = ts.get_daily(symbol=self.stock_ticker, outputsize='full')\n self.data = data\n self.meta_data = meta_data", "def refreshTable(self):\n ds = []\n for id in self.protocol.getRefreshIDs():\n node = Node(id)\n nearest = self.protocol.router.findNeighbors(node, self.alpha)\n spider = NodeSpiderCrawl(self.protocol, node, nearest)\n ds.append(spider.find())\n\n def republishKeys(_):\n ds = []\n # Republish keys older than one hour\n for key, value in self.storage.iteritemsOlderThan(3600):\n ds.append(self.set(key, value))\n return defer.gatherResults(ds)\n\n d = defer.gatherResults(ds)\n d.addCallback(republishKeys)\n d.addErrback(self.onError)\n return d", "def clear_cache():\n sudo('service varnish restart')", "def reload_cache(self):\n self.data = self.read_data_cache()", "async def refresh_cache(request: Request) -> Response:\n await request.state.canvas.sync_cache(request.state.db_conn, skip_check=True)\n\n return Response(status_code=204)", "async def periodic_refresh(self):\n started_cycle = time()\n while True:\n now = time()\n if (now - started_cycle) < CYCLE_DELAY:\n await asyncio.sleep(CYCLE_DELAY - (now - started_cycle) + 1)\n started_cycle = time()\n await self.refresh_cache()\n # Never exits.", "def refresh():\r\n DB.drop_all()\r\n DB.create_all()\r\n # TODO Get data from OpenAQ, make Record objects with it, and add to db\r\n for i in time_x_values():\r\n DB.session.add(Record(datetime=i[0], value=i[1]))\r\n DB.session.commit()\r\n return 'Data refreshed!'", "def _purge():\r\n _cache.clear()", "def refresh_cached_account(username, registry):\n settings = registry.settings\n cache_ttl = int(settings.get(\"account_cache_ttl_seconds\", 30))\n cache_key = get_account_cache_key(username, registry)\n cache = registry.cache\n cache_result = cache.expire(cache_key, cache_ttl)\n return cache_result", "def refresh(self, key: str):\n self._registry[key] = time.time()\n # Also update the whole thing\n return self.get_registry()", "def flush_caches(self):\n spotify.Error.maybe_raise(\n lib.sp_session_flush_caches(self._sp_session))", "def refresh():\n DB.drop_all()\n DB.create_all()\n df_meas = open_api.measurements(city='Los Angeles', parameter='pm25', df=True)\n df_meas['date.utc'] = df_meas['date.utc'].astype(str)\n create_DB_records(df_meas)\n DB.session.commit()\n message = 'Data refreshed on: ' + str(datetime.datetime.now())\n over9s = Record.query.filter(Record.value > 9)\n recs = Record.query.filter(Record.id < 20)\n over5s = Record.query.filter(Record.value > 5)\n return render_template('base.html', message=message, over9s=over9s, over5s=over5s, recs=recs)", "def reset_file_index_cache() -> None:\n fileindex_cache_five_minutes.invalidate()", "def clear(self, cacheDir):", "def refresh_cache_file(form, model, is_created):\n common.save_serialized_file()\n app.global_content = common.load_cached()", "def cache_expiration(self):\n\n\t\t# Iterate through servers\n\t\tfor serv in self.servers:\n\t\t\tserv.cache.hash_table.clear() # Erase the cache\n\t\t\tserv.cache.cur_size = 0 # Resets the number of items in the cache to 0", "def flush_cache(self):\n if self.cache_modified:\n self.cache_manager.write(self.cache_file, self.cache)", "def flush():\n for k in cache._thecache.keys():\n del cache._thecache[k]", "def refreshStories(self):\n\t\tself.stories = self.h.getLatestStories(self.newestOrTop, self.alreadyReadList)\n\t\tself.lastRefreshed = time.localtime()\n\t\tif self.hnUserName != \"\":\n\t\t\tprint \"Getting \" + self.hnUserName + \"'s karma from HN...\"\n\t\t\tuser = HackerNewsUser(self.hnUserName)\n\t\t\tif self.karma != user.karma and self.karma != -1000:\n\t\t\t\tkarmaChange = 1\n\t\t\tself.karma = user.karma", "def clear_cache():\n # TODO\n pass", "def refresh_cache(url):\n openid_config = OpenIDConnectConfiguration.load_config(url)\n jwks_uri = openid_config['jwks_uri']\n keys = OpenIDConnectConfiguration.load_jwks(jwks_uri)\n \n OpenIDConnectConfiguration.signing_keys[url] = keys", "def flush_local_cache(self):\n self._local_cache = {}", "def update_caches():\n a = get_expired_acache()\n c = get_expired_cnamecache()\n ns = get_expired_nscache()\n for keys, cache in [a, c]:\n for key in keys:\n print \"deleting {0}\".format(key)\n del cache[key]\n ns_keys, cache = ns\n for key, dn in ns_keys:\n print \"deleting {0}{1}\".format(key, dn)\n del nscache[key][dn]", "def clear_cache(self):\n pass", "def refresh_all(self) -> None:\n self._update_thread.force_refresh_folder(self.feed_cache)" ]
[ "0.6387744", "0.62355447", "0.6228594", "0.61083984", "0.6058347", "0.60180247", "0.60163856", "0.59902626", "0.5981703", "0.5968459", "0.59570175", "0.5755967", "0.5755143", "0.5724268", "0.568075", "0.56786877", "0.5677507", "0.56684744", "0.5660523", "0.5646071", "0.5645575", "0.564389", "0.5643494", "0.56381106", "0.5635962", "0.5623816", "0.56121504", "0.56009156", "0.5585608", "0.5551312" ]
0.82856715
0
Envia os produtos novos para o site
def enviar_novos(): template = 'produtos/form-enviar-produtos.html' form = EnviarProdutosNovosForm() imagens = read_images().keys() task = current_app.enviar_novos_task clear_task = request.args.get('clear_task', None) if clear_task and clear_task == 'yes': if task and task.state == 'SUCCESS': current_app.enviar_novos_task = task = None return redirect(url_for('produtos.enviar_novos')) produtos = None if not task: produtos = buscar_produtos_novos() if not produtos and not task: warning('Não existem produtos para serem enviados para o site') categorias = buscar_categorias_produtos() if not categorias: warning('Não existem categorias do site cadastradas') if form.validate_on_submit() and produtos and categorias: produtos_task = {} for p in produtos: secao = int(request.form.get(f'secao-{p.idsubproduto}', 0)) grupo = int(request.form.get(f'grupo-{p.idsubproduto}', 0)) subgrupo = int(request.form.get(f'subgrupo-{p.idsubproduto}', 0)) if secao and grupo and subgrupo: categorias = [secao, grupo, subgrupo] produtos_task[p.idsubproduto] = categorias task = enviar_novos_task.apply_async(args=(produtos_task,)) current_app.enviar_novos_task = task success(f'Tarefa iniciada com sucesso') return redirect(url_for('produtos.enviar_novos')) result = { 'title': 'Produtos', 'subtitle': 'Enviar Novos', 'form': form, 'imagens': imagens, 'produtos': produtos, 'categorias': categorias, 'task': task } return render_template(template, **result)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def excavar(self, sentido_giro, rpm, minutos):\n pass", "def ejecutarproceso(self):\n self.generarConsultasLibres()\n self.aplicarConsultasLibres()\n self.generarCombinaciones()\n self.generarConsultasConexion()\n self.archivoSalida()", "def atualiza_base():\n template = 'produtos/form-atualiza-base.html'\n form = EnviarProdutosNovosForm()\n\n if form.validate_on_submit():\n produtos = productList()\n atualizar_base(produtos)\n\n success('Base atualizada com sucesso')\n return redirect(url_for('produtos.atualiza_base'))\n\n result = {\n 'title': 'Atualizar Estoque Produtos',\n 'form': form,\n 'produtos': None\n }\n return render_template(template, **result)", "def dispone_producto_espacio_sistema(self, sistema):\r\n camiones_comparables_a_cargar = 0\r\n camiones_comparables_a_descargar = 0\r\n\r\n if self.tipo == \"Carga\":\r\n camiones_comparables_a_cargar += \\\r\n sum(1 for c in sistema.camiones_en_sistema\r\n if c.carga == self.carga and c.tipo == \"Carga\" and not c.manipulado.triggered)\r\n camiones_comparables_a_descargar += \\\r\n sum(1 for c in sistema.camiones_en_sistema\r\n if c.carga == self.carga and c.tipo == \"Descarga\" and not c.manipulado.triggered)\r\n if not self.manipulado.triggered and self in sistema.camiones_en_sistema:\r\n camiones_comparables_a_cargar -= 1\r\n else:\r\n camiones_comparables_a_cargar += \\\r\n sum(1 for c in sistema.camiones_en_sistema\r\n if self.medios_almacenamiento_destino(sistema) == c.medios_almacenamiento_destino(sistema) and\r\n c.tipo == \"Carga\" and not c.manipulado.triggered)\r\n camiones_comparables_a_descargar += \\\r\n sum(1 for c in sistema.camiones_en_sistema\r\n if self.medios_almacenamiento_destino(sistema) == c.medios_almacenamiento_destino(sistema) and\r\n c.tipo == \"Descarga\" and not c.manipulado.triggered)\r\n if not self.manipulado.triggered and self in sistema.camiones_en_sistema:\r\n camiones_comparables_a_descargar -= 1\r\n\r\n producto_espacio_disponible = 0\r\n\r\n for almacen in self.medios_almacenamiento_destino(sistema):\r\n\r\n if self.tipo == \"Carga\":\r\n producto_espacio_disponible += almacen.niveles[self.carga]\r\n else:\r\n producto_espacio_disponible += almacen.espacio\r\n\r\n if self.tipo == \"Carga\":\r\n\r\n if producto_espacio_disponible + 28 * (camiones_comparables_a_descargar - camiones_comparables_a_cargar) \\\r\n >= 28:\r\n return True\r\n else:\r\n return False\r\n else:\r\n if self.carga == \"Fierro\":\r\n return True\r\n elif producto_espacio_disponible + 28 * (camiones_comparables_a_cargar - camiones_comparables_a_descargar) \\\r\n >= 28:\r\n return True\r\n else:\r\n return False", "def production():\n env.config_file = 'config_production.py'\n env.hosts = ['[email protected]']\n env.host_type = 'production'\n env.user = 'ombu'\n env.host_webserver_user = 'nginx'\n env.host_site_path = '/home/ombu/webapps/ombuweb'", "def production():\n env.run = run\n env.cd = cd\n env.deployment = 'remote'", "def modificarProductosVenta(self, listaProductos):\n\t#modificar -- [idventa, codProducto, numeroitem, valorUnitario, cantidad, IVA]\n\tmodificar = listaProductos[0]\n\t#eliminar -- [idventa, idProducto, numeroitem]\n\teliminar = listaProductos[1]\n\t# primero se modifican los productos\n\tfor i in modificar:\n\t try:\n self.conexion.ejecutarSQL(\"\"\"update ProductosXVentas\n set valor_unitario=%s,\n cantidad=%s,\n IVA=%s,\n valor_total=%s*%s\n where id_Venta=%s\n and codigo_Producto = '%s'\n and numero_item=%s\"\"\"%(i[3], i[4], i[5], i[4], i[3], i[0], i[1], i[2]))\n\t except Exception, e:\n\t\tprint \"modificarProductosVenta excepcion: \", e\n\t\tself.conexion.rollback()\n\t\treturn False\n\t# Ahora se eliminan los Productos\n\tfor i in eliminar:\n\t try:\n\t\tself.conexion.ejecutarSQL(\"delete from ProductosXVentas where id_Venta=%s and codigo_Producto = '%s' and numero_item=%s\"%(i[0],i[1],i[2]))\n\t except Exception, e:\n\t\tprint \"modificarProductosVenta --> Eliminar excepcion: \", e\n\t\tself.conexion.rollback()\n\t\treturn False\n\t \n\t#Se actualiza el kardex para las ventas modificadas como para las ventas eliminadas.\n\t\n\t#Para las ventas modificadas\n\t# PENDIENTE REVISAR SI HUBO CAMBIO DE VALOR UNITARIO\n\ttry:\n\t #modificar -- [idventa,idProducto,numeroitem,cantidad,cantidad_anterior]\n\t for i in modificar:\n\t\tcantActual = float(i[3])\n\t\tcantAnterior = float(i[4])\n\t\tif cantActual == cantAnterior: #S'olo se actualiza el kardex si hubo cambio\n\t\t continue\n\t\telse:\n\t\t codigoProducto = i[1]\n\t\t info = self.getKardexProducto(codigoProducto)\n\t\t #info --> saldo_valor, saldo_cantidad, costo_unitario, valor_total, detalle\n\t\t info = info[-1]\n\t\t if cantActual < cantAnterior: # Si hizo una devoluci'on en la venta (el cliente me devolvi'o cant productos)\n\t\t\tcant = cantAnterior - cantActual\n\t\t\tdetalle = 'Compra'\n\t\t\tvalorUnitario = float(info[2])\n\t\t\tvalorTotal = valorUnitario*cant\n\t\t\tsaldoCantidad = float(info[1])+cant\n\t\t\tsaldoValor = float(info[0])+valorTotal\n\t\t else:# Si hizo una adici'on a la venta (le tengo que vender cant productos al cliente)\n\t\t\tcant = cantActual - cantAnterior\n\t\t\tdetalle = 'Venta'\n\t\t\tvalorUnitario = float(info[2])\n\t\t\tvalorTotal = valorUnitario*cant\n\t\t\tsaldoCantidad = float(info[1])-cant\n\t\t\tsaldoValor = float(info[0])-valorTotal\n\t\t costoUnitario = saldoValor/float(saldoCantidad)\n\t\t self.conexion.ejecutarSQL(\"\"\"\n\t\t insert into Kardex (codigo_Producto, fecha, hora, detalle,cantidad, valor_total,saldo_cantidad, \n\t\t saldo_valor, valor_unitario,costo_unitario) values \n\t\t ('%s',DATE('now','localtime'),TIME('now','localtime'),'%s',%s,%s,%s,%s,%s,%s )\"\"\"%(codigoProducto,\n\t\t detalle,cant,valorTotal,saldoCantidad,\n\t\t saldoValor,valorUnitario,costoUnitario))\n\t self.conexion.commit()\n\t return True\n\texcept:\n\t print \"modificarProductosVenta --> Actualizar kardex (ventas modificadas) excepcion: \", e\n\t self.conexion.rollback()\n\t return False", "def deploy():", "def production():\n puts(green('>>> Running on Production!'))\n env.hosts = ['web1.precog.com']\n puts(green('Servers: %s' % \", \".join(env.hosts)))", "def setup_prod():\n setup_general()", "def production():\n env.settings = 'production'\n env.hosts = [\n os.environ['ONELINER_PROD_HOST'],\n ]\n\n env.roledefs = {\n 'app': [\n os.environ['ONELINER_PROD_HOST'],\n ],\n 'admin': [\n os.environ['ONELINER_PROD_HOST'],\n ]\n }\n\n env.user = 'newsapps'\n env.site_domain = 'oneliner.inn.org'\n\n env.db_root_user = os.environ['MYSQL_PRODUCTION_ROOT_USER']\n env.db_root_pass = os.environ['MYSQL_PRODUCTION_ROOT_PASSWORD']\n env.db_type = 'mysql'\n env.db_host = 'localhost'\n env.database_password = os.environ['ONELINER_PROD_DB_PASSWORD']\n\n env.django_settings_module = '%(project_name)s.production_settings' % env", "def atualiza_estoque():\n\n template = 'produtos/form-atualiza-estoque.html'\n form = EnviarProdutosNovosForm()\n config = ConfigMagento.by_id(1)\n\n task = current_app.atualiza_estoque_task\n clear_task = request.args.get('clear_task', None)\n if clear_task and clear_task == 'yes':\n if task and task.state == 'SUCCESS':\n current_app.atualiza_estoque_task = task = None\n return redirect(url_for('produtos.atualiza_estoque'))\n\n produtos = None\n if not task:\n produtos = buscar_estoque_produtos(dthr_sincr=config.dtsincr_estoque)\n\n if form.validate_on_submit() and produtos:\n task = atualiza_estoque_task.apply_async()\n current_app.atualiza_estoque_task = task\n\n success(f'Tarefa iniciada com sucesso')\n return redirect(url_for('produtos.atualiza_estoque'))\n\n result = {\n 'title': 'Produtos',\n 'subtitle': 'Atualizar Estoque',\n 'form': form,\n 'produtos': produtos,\n 'task': task\n }\n return render_template(template, **result)", "def _production():\n env.environment = 'production'\n env.server_name = 'project-production.dimagi.com'\n env.hosts = [settings.PRODUCTION_HOST]", "def production():\n env.settings = 'production'\n env.hosts = ['db.tribapps.com'] \n env.user = 'newsapps'\n env.s3_bucket = 'media.apps.chicagotribune.com'", "def qa():\n env.config_file = 'config_production.py'\n env.hosts = ['[email protected]:34165']\n env.host_type = 'qa'\n env.user = 'ombu'\n env.host_webserver_user = 'www-data'\n env.host_site_path = '/mnt/main/qa/qa2/public'", "def espera_transbordo(self, sistema, tespera):\r\n # Inicia espera de camion para realizar transbordo\r\n try:\r\n\r\n print \"%s inicia espera Transbordo - Hora: %d\" % (self, sistema.now)\r\n yield sistema.timeout(tespera)\r\n\r\n # Consluida la espera verifica si hay producto o espacio disponible en almacenes.\r\n prod_o_esp_disp = self.dispone_producto_espacio_medios_almacenamiento(sistema)\r\n\r\n # Si producto o esapacio, termina su espera para realizar una carga o descarga\r\n if prod_o_esp_disp:\r\n\r\n print str(self) + \" termino espera - Hora: \" + str(sistema.now)\r\n\r\n # De lo contrario continua esperando mientras no haya producto o espacio en almacenes\r\n else:\r\n\r\n print str(self) + \" continua espera Producto/Espacio no disponible - Hora:\" + str(sistema.now)\r\n while not prod_o_esp_disp:\r\n yield sistema.timeout(1)\r\n prod_o_esp_disp = self.dispone_producto_espacio_medios_almacenamiento(sistema)\r\n\r\n sistema.exit({\"Resultado\": \"Termino espera\", \"Interrupcion\": None})\r\n\r\n # En caso que un camion llega, interrumpe la espera para proceder con el transbordo\r\n except simpy.Interrupt as interrupcion:\r\n\r\n print str(self) + \" espera interrumpida por \" + str(interrupcion.cause) + \" - Hora: \" + str(\r\n sistema.now)\r\n self.transbordo = \"Si\"\r\n sistema.exit({\"Resultado\": \"Espera interrumpida\", \"Interrupcion\": interrupcion.cause})", "def registra_ped(self):\n if len(self.anotados) > 0:\n # randomiza um tempo que ele leva para ir ate a copa\n tempo = random.randint(1, 3)\n time.sleep(tempo)\n logging.info(\" \".join([\"garcom\",\n str(self.nome),\n \"registrou os pedidos dos clientes\",\n str([i.nome for i in self.anotados])]))", "def export_inventory(self, websites):\n for website in websites:\n website.export_inventory_to_magento()", "def prod():\n # Varnish proxies.\n # env.roledefs['varnish_servers'] = ['varnish1.example.org', 'varnish2.example.org']\n # The Django app servers.\n env.roledefs['webapp_servers'] = ['djangopatterns.com']\n # Static media servers\n # env.roledefs['media_servers'] = ['djangopatterns.com']\n # Postgres servers.\n env.roledefs['db_servers'] = ['djangopatterns.com']\n\n # Combine all of the roles into the env.hosts list.\n env.hosts = [host[0] for host in env.roledefs.values()]", "def ingresarVenta(self, total, tipoPago, idCliente, usuarioColaborador, listaProductos):\n self._lockIngresarVenta.acquire()\n try:\n # agrupar totales vendidos de cada cod_barras\n numVendidosXcod = dict()\n # calcular subtotal, totalIVA basado en IVA y valorTotal de cada producto en lista. Subtotal=total-totalIVA\n totalIVA = 0\n for (cod_barras, desc, cantidad, valorUnitario, IVA, valorTotal) in listaProductos:\n totalIVA += float(valorTotal) * float(IVA) / 100\n if numVendidosXcod.has_key(cod_barras):\n numVendidosXcod[cod_barras] += float(cantidad)\n else:\n numVendidosXcod[cod_barras] = float(cantidad)\n # almacena subtotal en variable\n subtotal = total - totalIVA \n # revisar si existen las cantidades disponibles de cada codigo\n for cod, cant in numVendidosXcod.iteritems():\n cantidadDisponible = self.getInfoProducto(cod)[0][2]\n if cant > cantidadDisponible:\n return (False,\"Cantidades insuficientes del producto %s. Cantidad disponible: %s y Cantidad en Venta: %s\"%(cod,cantidadDisponible,cant),-1)\n # obtener id_TipoPago dependiendo del tipo de pago\n id_TipoPago = self.getIdTipoPago(tipoPago)\n # obtener estado de la venta dependiendo del tipo de pago\n estado = self.estadoVentaDadoTipoPago(id_TipoPago)\n # ingresar la venta en Ventas \n if estado == __PAGADA__:\n self.conexion.ejecutarSQL(\"insert into Ventas (fecha,hora,subtotal,totalIVA,total,estado,usuario_Colaborador,id_Cliente,id_TipoPago,fechaPagoTotal,horaPagoTotal) values (DATE('now','localtime'),TIME('now','localtime'),%s,%s,%s,'%s','%s','%s',%s,DATE('now','localtime'),TIME('now','localtime'))\"%(0,0,0,estado,usuarioColaborador,idCliente,id_TipoPago))\n else:\n self.conexion.ejecutarSQL(\"insert into Ventas (fecha,hora,subtotal,totalIVA,total,estado,usuario_Colaborador,id_Cliente,id_TipoPago) values (DATE('now','localtime'),TIME('now','localtime'),%s,%s,%s,'%s','%s','%s',%s)\"%(0,0,0,estado,usuarioColaborador,idCliente,id_TipoPago))\n # obtener id de última venta\n id_Venta = self.getIdUltimaVenta()\n # ingresar todos los productos de listaProductos en ProductosXVentas\n numItem = 1\n for (cod_barras, desc, cantidad, valorUnitario, IVA, valorTotal) in listaProductos:\n self.conexion.ejecutarSQL(\"insert into ProductosXVentas (numero_item,codigo_producto,id_venta,usuario_Colaborador,cantidad,valor_unitario,IVA,valor_total) values (%s,'%s',%s,'%s',%s,%s,%s,%s)\"%(numItem,cod_barras,id_Venta,usuarioColaborador,cantidad,valorUnitario,IVA,valorTotal))\n\n\t\t#Por cada producto vendido, se debe actualizar la tabla kardex.\n\t\tcantidadKardex = cantidad\n\t\tidProductoKardex = cod_barras\n\t\tsaldos = self.conexion.ejecutarSQL(\"\"\"select saldo_cantidad, saldo_valor, costo_unitario from kardex\n where codigo_Producto='%s'\n order by fecha and hora\"\"\"%(idProductoKardex))\n\t\tif len(saldos) == 0:\n\t\t v_unitarioKardex = valorUnitario\n\t\t valor_TotalKardex = float(cantidadKardex)*float(v_unitarioKardex)\n\t\t saldo_cantidadKardex = cantidadKardex\n\t\t saldo_valorKardex = valor_TotalKardex\n\t\telse:\n\t\t v_unitarioKardex = saldos[len(saldos)-1][2]\n\t\t valor_TotalKardex = float(cantidadKardex)*float(v_unitarioKardex)\n\t\t saldo_cantidadKardex = saldos[len(saldos)-1][0]-float(cantidadKardex)\n\t\t saldo_valorKardex = saldos[len(saldos)-1][1]-valor_TotalKardex\n\t\tif float(saldo_cantidadKardex) != 0:\n costo_unitarioKardex = saldo_valorKardex/float(saldo_cantidadKardex)\n else:\n costo_unitarioKardex = saldo_valorKardex\n\t\ttry:\n\t\t self.conexion.ejecutarSQL(\"\"\"insert into Kardex (codigo_Producto, fecha, hora, detalle,cantidad, valor_total,saldo_cantidad, saldo_valor, valor_unitario,costo_unitario)\n values ('%s',DATE('now','localtime'),TIME('now','localtime'),'Venta',%s,%s,%s,%s,%s,%s )\"\"\"\n %(idProductoKardex, cantidadKardex, valor_TotalKardex, saldo_cantidadKardex,\n saldo_valorKardex, v_unitarioKardex, costo_unitarioKardex))\n\t\texcept Exception, e:\n\t\t print \"Kardex Venta: \", e\n\t\t self.conexion.rollback()\n\t\t return (False,str(e),-1)\n\t \n numItem += 1 \n # comprometer\n\t self.conexion.commit()\n return (True,\"\",id_Venta)\n except Exception, e:\n print \"ingresarVenta excepcion: \", e\n self.conexion.rollback()\n return (False,str(e),-1)\n finally:\n self._lockIngresarVenta.release()", "def inativar():\n\n template = 'produtos/form-inativar-produtos.html'\n\n task = current_app.inativar_task\n clear_task = request.args.get('clear_task', None)\n if clear_task and clear_task == 'yes':\n if task and task.state == 'SUCCESS':\n current_app.inativar_task = task = None\n return redirect(url_for('produtos.inativar'))\n\n produtos = None\n if not task:\n produtos = buscar_produtos_inativos()\n\n form = EnviarProdutosNovosForm()\n\n if form.validate_on_submit() and produtos:\n task = inativar_task.apply_async()\n current_app.inativar_task = task\n\n success(f'Tarefa iniciada com sucesso')\n return redirect(url_for('produtos.inativar'))\n\n result = {\n 'title': 'Produtos',\n 'subtitle': 'Inativar',\n 'form': form,\n 'produtos': produtos,\n 'task': task\n }\n return render_template(template, **result)", "def sync_products_with_gmc(self) :\n products = self.env['product.product'].search([('sync_with_mc','=',True), ('website_published','=',True), ('google_product_brand_id','!=',False), ('google_merchant_center_id','!=',False),('google_mcid','=',False)])\n _logger.info('Total products to be synced------ %s', len(products))\n self.sync_product_with_gmc(products)", "def deploy(site):\n\n # Stop the program if the folder isn't initialized yet.\n Vagrant.stop_if_not_init()\n\n # Stop the program if the site is NOT herokufied.\n Heroku.stop_if_not_herokufied(site)\n\n # Now, run the \"deployheroku\" script on the VM.\n # That will deploy the site for you.\n Vagrant.run_script_on_vm(\"deployheroku\", site)", "def actualizar_puntaje(self):\r\n pass", "def full_deploy():\n refresh_cts()\n push_mockups()\n deploy()", "def insert_products(self):\n logic = ProductLogic()\n \n try:\n # We create the list of product objects\n products = self.objects_factory.create_product_object_list()\n products = set(products)\n\n for product in products:\n logic.insert(product)\n except:\n print('Il y a eu un problème lors de la récupération des données, veuillez rééssayer')", "def actualizar(self):\n if self.obraSocialSeleccionada!=None:\n self.cargar_productos(self.obraSocialSeleccionada)\n else:\n self.cargarProductosSinObra()", "def qa():\n env.hosts = ['[email protected]']\n env.directory = '/var/www/swordpushweb'", "def main():\n django.setup()\n devices = NetworkDevice.objects.all()\n creds = Credentials.objects.all()\n\n std_creds = creds[0]\n arista_creds = creds[1]\n \n for a_device in devices:\n if 'pynet-sw' in a_device.device_name:\n a_device.credentials = arista_creds\n else:\n a_device.credentials = std_creds\n a_device.save()\n for a_device in devices:\n print a_device, a_device.credentials", "def comenzar_nuevo_juego():\n escena_uno.cargarEscena1(screen, display_width, display_height)#Se pone a correr la escena\n #escena_uno.cargarEscena2(screen, display_width, display_height)" ]
[ "0.582344", "0.5736055", "0.5695689", "0.55553174", "0.54479647", "0.5444111", "0.5412343", "0.538099", "0.5352548", "0.53012", "0.5286463", "0.5281915", "0.5279147", "0.52585804", "0.52194196", "0.5212064", "0.5198973", "0.51872057", "0.5181836", "0.5137674", "0.51251155", "0.5100996", "0.50910276", "0.5088732", "0.50824326", "0.507138", "0.50288445", "0.5012015", "0.50094074", "0.49956682" ]
0.65614074
0
Atualiza a base de dados com produtos ja enviados
def atualiza_base(): template = 'produtos/form-atualiza-base.html' form = EnviarProdutosNovosForm() if form.validate_on_submit(): produtos = productList() atualizar_base(produtos) success('Base atualizada com sucesso') return redirect(url_for('produtos.atualiza_base')) result = { 'title': 'Atualizar Estoque Produtos', 'form': form, 'produtos': None } return render_template(template, **result)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ejecutarproceso(self):\n self.generarConsultasLibres()\n self.aplicarConsultasLibres()\n self.generarCombinaciones()\n self.generarConsultasConexion()\n self.archivoSalida()", "def actualizar(self):\n if self.obraSocialSeleccionada!=None:\n self.cargar_productos(self.obraSocialSeleccionada)\n else:\n self.cargarProductosSinObra()", "def setup(self):\n print(\"INIT DATA\")\n\n self.nutella = Product.objects.create(name=\"nutella\", nutriscore=\"e\")", "def fill_data_product(self):\n self.product.fill_data_product(self.list_products, self.mycursor, self.my_database)", "def create_prod_db():\n _create_database(is_production=True)", "def cargarProductosSinObra(self):\n\n self.limpiarTabla(self.tableProductos)\n\n ##Cnsulta para obtener todos los productos del sistema, con su correspondiente\n ##codigo de barra, monodroga, descuento, importe\n query=self.sesion.query(ProductoModel.codigo_barra,ProductoModel.id_medicamento,ProductoModel.id_presentacion,MonodrogaModel.nombre,ProductoModel.importe).\\\n join(MedicamentoModel).filter(ProductoModel.id_medicamento==MedicamentoModel.nombre_comercial).\\\n join(MonodrogaModel).filter(MedicamentoModel.id_monodroga==MonodrogaModel.nombre).\\\n filter(ProductoModel.baja==False).order_by(ProductoModel.codigo_barra)\n\n ##Se cargan los datos obtenidos en la tabla de Producto\n for n, obj in enumerate(query):\n self.tableProductos.insertRow(n)\n self.tableProductos.setItem(n, 0, QtGui.QTableWidgetItem(str(obj[0])))\n self.tableProductos.setItem(n, 1, QtGui.QTableWidgetItem(str(obj[1])))\n self.tableProductos.setItem(n, 2, QtGui.QTableWidgetItem(str(obj[2])))\n self.tableProductos.setItem(n, 3, QtGui.QTableWidgetItem(str(obj[3])))\n self.tableProductos.setItem(n, 4, QtGui.QTableWidgetItem(str(0)))\n self.tableProductos.setItem(n, 5, QtGui.QTableWidgetItem(str(obj[4])))\n\n ##Se carga la cantidad de cada producto en la tabla\n for row,producto in enumerate(ProductoModel.buscarTodos(ProductoModel.codigo_barra,self.sesion)):\n self.tableProductos.setItem(row,6,QtGui.QTableWidgetItem(str(producto.getCantidad(self.sesion))))", "def update_data():\n etf_prices = get_prices(start=START_DATE, end=END_DATE)\n etf_returns = compute_returns(etf_prices)\n merged_etf_data = etf_prices.merge(etf_returns, right_index=True, left_index=True)\n indicators = compute_indicators(merged_etf_data) # this uses the \"ta\" lib, but it does not need\n # to be imported\n merged_etf_data = merged_etf_data.merge(indicators, right_index=True, left_index=True)\n vix_data = get_vix()\n data = merged_etf_data.merge(vix_data, right_index=True, left_index=True)\n data.to_csv('Data/database.csv')\n return", "def limpiarVentana(self):\n\n self.productosAgregados=0\n self.lotesVentas={}\n self.facturaCobrada=False\n self.obraSocialSeleccionada=None\n self.formapago = None\n self.factura = None\n self.data = {}\n self.detallesTabla = {}\n self.lineObra.clear()\n self.lineObra.setEnabled(True)\n self.lineCuit.clear()\n self.lineCuit.setEnabled(True)\n self.tableObra.setVisible(False)\n self.rbtnObra.setChecked(False)\n self.limpiarTabla(self.tableProductos)\n self.limpiarTabla(self.tableFactura)\n self.cargarProductosSinObra()", "def modificarProductosVenta(self, listaProductos):\n\t#modificar -- [idventa, codProducto, numeroitem, valorUnitario, cantidad, IVA]\n\tmodificar = listaProductos[0]\n\t#eliminar -- [idventa, idProducto, numeroitem]\n\teliminar = listaProductos[1]\n\t# primero se modifican los productos\n\tfor i in modificar:\n\t try:\n self.conexion.ejecutarSQL(\"\"\"update ProductosXVentas\n set valor_unitario=%s,\n cantidad=%s,\n IVA=%s,\n valor_total=%s*%s\n where id_Venta=%s\n and codigo_Producto = '%s'\n and numero_item=%s\"\"\"%(i[3], i[4], i[5], i[4], i[3], i[0], i[1], i[2]))\n\t except Exception, e:\n\t\tprint \"modificarProductosVenta excepcion: \", e\n\t\tself.conexion.rollback()\n\t\treturn False\n\t# Ahora se eliminan los Productos\n\tfor i in eliminar:\n\t try:\n\t\tself.conexion.ejecutarSQL(\"delete from ProductosXVentas where id_Venta=%s and codigo_Producto = '%s' and numero_item=%s\"%(i[0],i[1],i[2]))\n\t except Exception, e:\n\t\tprint \"modificarProductosVenta --> Eliminar excepcion: \", e\n\t\tself.conexion.rollback()\n\t\treturn False\n\t \n\t#Se actualiza el kardex para las ventas modificadas como para las ventas eliminadas.\n\t\n\t#Para las ventas modificadas\n\t# PENDIENTE REVISAR SI HUBO CAMBIO DE VALOR UNITARIO\n\ttry:\n\t #modificar -- [idventa,idProducto,numeroitem,cantidad,cantidad_anterior]\n\t for i in modificar:\n\t\tcantActual = float(i[3])\n\t\tcantAnterior = float(i[4])\n\t\tif cantActual == cantAnterior: #S'olo se actualiza el kardex si hubo cambio\n\t\t continue\n\t\telse:\n\t\t codigoProducto = i[1]\n\t\t info = self.getKardexProducto(codigoProducto)\n\t\t #info --> saldo_valor, saldo_cantidad, costo_unitario, valor_total, detalle\n\t\t info = info[-1]\n\t\t if cantActual < cantAnterior: # Si hizo una devoluci'on en la venta (el cliente me devolvi'o cant productos)\n\t\t\tcant = cantAnterior - cantActual\n\t\t\tdetalle = 'Compra'\n\t\t\tvalorUnitario = float(info[2])\n\t\t\tvalorTotal = valorUnitario*cant\n\t\t\tsaldoCantidad = float(info[1])+cant\n\t\t\tsaldoValor = float(info[0])+valorTotal\n\t\t else:# Si hizo una adici'on a la venta (le tengo que vender cant productos al cliente)\n\t\t\tcant = cantActual - cantAnterior\n\t\t\tdetalle = 'Venta'\n\t\t\tvalorUnitario = float(info[2])\n\t\t\tvalorTotal = valorUnitario*cant\n\t\t\tsaldoCantidad = float(info[1])-cant\n\t\t\tsaldoValor = float(info[0])-valorTotal\n\t\t costoUnitario = saldoValor/float(saldoCantidad)\n\t\t self.conexion.ejecutarSQL(\"\"\"\n\t\t insert into Kardex (codigo_Producto, fecha, hora, detalle,cantidad, valor_total,saldo_cantidad, \n\t\t saldo_valor, valor_unitario,costo_unitario) values \n\t\t ('%s',DATE('now','localtime'),TIME('now','localtime'),'%s',%s,%s,%s,%s,%s,%s )\"\"\"%(codigoProducto,\n\t\t detalle,cant,valorTotal,saldoCantidad,\n\t\t saldoValor,valorUnitario,costoUnitario))\n\t self.conexion.commit()\n\t return True\n\texcept:\n\t print \"modificarProductosVenta --> Actualizar kardex (ventas modificadas) excepcion: \", e\n\t self.conexion.rollback()\n\t return False", "def insert_in_tendencias():\n\n ultimos = persistence.traer_ultimos_precios_doge()\n average_doge = sum(ultimos) / len(ultimos)\n min_price_doge = min(ultimos)\n max_price_doge = max(ultimos)\n openprice = persistence.traer_masviejo_precio_doge()\n closeprice = persistence.traer_ultimo_precio_doge()\n persistence.insert_new_tendencia(\"DOGE\", str(average_doge), str(min_price_doge), str(max_price_doge), str(openprice), str(closeprice))\n\n ultimos = persistence.traer_ultimos_precios_btc()\n average_btc = sum(ultimos) / len(ultimos)\n min_price_btc = min(ultimos)\n max_price_btc = max(ultimos)\n openprice = persistence.traer_masviejo_precio_btc()\n closeprice = persistence.traer_ultimo_precio_btc()\n persistence.insert_new_tendencia(\"BTC\", str(average_btc), str(min_price_btc), str(max_price_btc), str(openprice), str(closeprice))\n\n ultimos = persistence.traer_ultimos_precios_eth()\n average_eth = sum(ultimos) / len(ultimos)\n min_price_eth = min(ultimos)\n max_price_eth = max(ultimos)\n openprice = persistence.traer_masviejo_precio_eth()\n closeprice = persistence.traer_ultimo_precio_eth()\n persistence.insert_new_tendencia(\"ETH\", str(average_eth), str(min_price_eth), str(max_price_eth), str(openprice), str(closeprice))", "def populate_database(self):\n self.insert_products()\n self.insert_categories()\n self.insert_products_categories()\n self.insert_stores()\n self.insert_products_stores()", "def run_data_prep(context = ''):\n run_data_prep_confirmtion(context)\n run_data_prep_settlement(context)", "def sync_prod_db(env=None, reset_db=False, haus_vars={}):\n print green('sync/migrate DB')\n if reset_db:\n # uncomment below and replace DATABSE_URL with the prod database url\n # note that this is destructive of the PROD DB\n #local('heroku pg:reset DATABASE_URL') #add \"--confirm haus\" to remove required input\n pass\n local('heroku run ./manage.py migrate -a {}'.format(APP_INFO[env][\"heroku_app_name\"]))", "def migrar(self):\r\n start = time.time()\r\n if not os.path.exists('output'):\r\n os.makedirs('output')\r\n # seleciona as tabelas\r\n res = self.cur_origem.execute(\r\n \"select rdb$relation_name from rdb$relations where rdb$view_blr is null and (rdb$system_flag is null or rdb$system_flag = 0) %s;\" % self.filtro)\r\n\r\n # para cada tabela\r\n for row, in res.fetchall():\r\n row = row.strip()\r\n\r\n # conta os registros\r\n countsql = self.cur_origem.execute(\r\n \"select count(*) as total from %s \" % row)\r\n count, = countsql.fetchall()[0]\r\n start_time = time.time()\r\n start_datetime = datetime.datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\")\r\n print(\"MIGRANDO: %s\\n NRO REGISTROS: %s registros\\n INICIO: %s\" % (\r\n row, count, start_datetime))\r\n\r\n # gera o create table e trunca a tabela ( se ja existir )\r\n create, tipos = self.ddl_table(row)\r\n self.cur_destino.execute(create)\r\n self.cur_destino.execute(\"TRUNCATE TABLE %s\" % row)\r\n\r\n # busca os dados\r\n self.cur_origem.execute(\"select * from %s \" % (row))\r\n\r\n # grava os dados no TXT\r\n with open(\"output/%s.txt\" % row, \"wb\") as f:\r\n writer = csv.writer(f, delimiter='|')\r\n writer.writerows(self.cur_origem.fetchall())\r\n\r\n # le o arquivo gravado e copia para o banco destino\r\n with open(\"output/%s.txt\" % row, \"r\") as f:\r\n try:\r\n self.cur_destino.copy_expert(\r\n \"\"\"COPY %s FROM STDIN WITH QUOTE '\"' DELIMITER '|' NULL '' CSV \"\"\" % row, f)\r\n except Exception as e:\r\n self.erros.append([\"%s\" % row, e])\r\n end_time = time.time()\r\n end_datetime = datetime.datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\")\r\n print(\" FIM: %s\\n TEMPO: %ss\\n TABELA COM ERRO %s\" %\r\n (end_datetime, round(end_time-start_time, 0), e))\r\n else:\r\n end_time = time.time()\r\n end_datetime = datetime.datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\")\r\n print(\" FIM: %s\\n TEMPO: %ss\\n OK\" %\r\n (end_datetime, round(end_time-start_time, 0)))\r\n end = time.time()\r\n print(\"TEMPO GASTO: %s s\" % (end-start))", "def update_database():\n\n # We obtain the data from the official database\n df = getData.extractData()\n\n # We save the dataframe for later use in the API\n auxiliary.saveToCsv(df, 'app/resources')", "def bd_iniciar():\n db = bd_conecta()\n with app.open_resource('schema.sql', mode='r') as f:\n db.cursor().executescript(f.read())\n db.commit()", "def ingresarVenta(self, total, tipoPago, idCliente, usuarioColaborador, listaProductos):\n self._lockIngresarVenta.acquire()\n try:\n # agrupar totales vendidos de cada cod_barras\n numVendidosXcod = dict()\n # calcular subtotal, totalIVA basado en IVA y valorTotal de cada producto en lista. Subtotal=total-totalIVA\n totalIVA = 0\n for (cod_barras, desc, cantidad, valorUnitario, IVA, valorTotal) in listaProductos:\n totalIVA += float(valorTotal) * float(IVA) / 100\n if numVendidosXcod.has_key(cod_barras):\n numVendidosXcod[cod_barras] += float(cantidad)\n else:\n numVendidosXcod[cod_barras] = float(cantidad)\n # almacena subtotal en variable\n subtotal = total - totalIVA \n # revisar si existen las cantidades disponibles de cada codigo\n for cod, cant in numVendidosXcod.iteritems():\n cantidadDisponible = self.getInfoProducto(cod)[0][2]\n if cant > cantidadDisponible:\n return (False,\"Cantidades insuficientes del producto %s. Cantidad disponible: %s y Cantidad en Venta: %s\"%(cod,cantidadDisponible,cant),-1)\n # obtener id_TipoPago dependiendo del tipo de pago\n id_TipoPago = self.getIdTipoPago(tipoPago)\n # obtener estado de la venta dependiendo del tipo de pago\n estado = self.estadoVentaDadoTipoPago(id_TipoPago)\n # ingresar la venta en Ventas \n if estado == __PAGADA__:\n self.conexion.ejecutarSQL(\"insert into Ventas (fecha,hora,subtotal,totalIVA,total,estado,usuario_Colaborador,id_Cliente,id_TipoPago,fechaPagoTotal,horaPagoTotal) values (DATE('now','localtime'),TIME('now','localtime'),%s,%s,%s,'%s','%s','%s',%s,DATE('now','localtime'),TIME('now','localtime'))\"%(0,0,0,estado,usuarioColaborador,idCliente,id_TipoPago))\n else:\n self.conexion.ejecutarSQL(\"insert into Ventas (fecha,hora,subtotal,totalIVA,total,estado,usuario_Colaborador,id_Cliente,id_TipoPago) values (DATE('now','localtime'),TIME('now','localtime'),%s,%s,%s,'%s','%s','%s',%s)\"%(0,0,0,estado,usuarioColaborador,idCliente,id_TipoPago))\n # obtener id de última venta\n id_Venta = self.getIdUltimaVenta()\n # ingresar todos los productos de listaProductos en ProductosXVentas\n numItem = 1\n for (cod_barras, desc, cantidad, valorUnitario, IVA, valorTotal) in listaProductos:\n self.conexion.ejecutarSQL(\"insert into ProductosXVentas (numero_item,codigo_producto,id_venta,usuario_Colaborador,cantidad,valor_unitario,IVA,valor_total) values (%s,'%s',%s,'%s',%s,%s,%s,%s)\"%(numItem,cod_barras,id_Venta,usuarioColaborador,cantidad,valorUnitario,IVA,valorTotal))\n\n\t\t#Por cada producto vendido, se debe actualizar la tabla kardex.\n\t\tcantidadKardex = cantidad\n\t\tidProductoKardex = cod_barras\n\t\tsaldos = self.conexion.ejecutarSQL(\"\"\"select saldo_cantidad, saldo_valor, costo_unitario from kardex\n where codigo_Producto='%s'\n order by fecha and hora\"\"\"%(idProductoKardex))\n\t\tif len(saldos) == 0:\n\t\t v_unitarioKardex = valorUnitario\n\t\t valor_TotalKardex = float(cantidadKardex)*float(v_unitarioKardex)\n\t\t saldo_cantidadKardex = cantidadKardex\n\t\t saldo_valorKardex = valor_TotalKardex\n\t\telse:\n\t\t v_unitarioKardex = saldos[len(saldos)-1][2]\n\t\t valor_TotalKardex = float(cantidadKardex)*float(v_unitarioKardex)\n\t\t saldo_cantidadKardex = saldos[len(saldos)-1][0]-float(cantidadKardex)\n\t\t saldo_valorKardex = saldos[len(saldos)-1][1]-valor_TotalKardex\n\t\tif float(saldo_cantidadKardex) != 0:\n costo_unitarioKardex = saldo_valorKardex/float(saldo_cantidadKardex)\n else:\n costo_unitarioKardex = saldo_valorKardex\n\t\ttry:\n\t\t self.conexion.ejecutarSQL(\"\"\"insert into Kardex (codigo_Producto, fecha, hora, detalle,cantidad, valor_total,saldo_cantidad, saldo_valor, valor_unitario,costo_unitario)\n values ('%s',DATE('now','localtime'),TIME('now','localtime'),'Venta',%s,%s,%s,%s,%s,%s )\"\"\"\n %(idProductoKardex, cantidadKardex, valor_TotalKardex, saldo_cantidadKardex,\n saldo_valorKardex, v_unitarioKardex, costo_unitarioKardex))\n\t\texcept Exception, e:\n\t\t print \"Kardex Venta: \", e\n\t\t self.conexion.rollback()\n\t\t return (False,str(e),-1)\n\t \n numItem += 1 \n # comprometer\n\t self.conexion.commit()\n return (True,\"\",id_Venta)\n except Exception, e:\n print \"ingresarVenta excepcion: \", e\n self.conexion.rollback()\n return (False,str(e),-1)\n finally:\n self._lockIngresarVenta.release()", "def db_update(start_date=None, end_date=None, sandbox=False):\n \n # Make dates if necessary\n date_inter = datetime.date.today()\n if start_date == None:\n if date_inter.month == 1:\n start_date = str(date_inter.replace(year = date_inter.year - 1, month = 12, day = 1)) + 'T00:00:00+00:00'\n else:\n start_date = str(date_inter.replace(month = date_inter.month - 1, day = 1)) + 'T00:00:00+00:00'\n if end_date == None:\n end_date = str(date_inter.replace(day = 1) - datetime.timedelta(days = 1)) + 'T23:59:59+00:00'\n\n # Get Token\n headers_token = {\n 'Authorization': 'Basic ' + api_key,\n 'Content-Type': 'application/x-www-form-urlencoded'\n }\n url_token = 'https://digital.iservices.rte-france.com/token/oauth/'\n response_token = requests.post(url_token, headers=headers_token)\n json_response_token = json.loads(response_token.text)\n token = json_response_token['access_token']\n\n # Make the RTE API request\n headers = {\n 'Authorization': 'Bearer ' + token\n }\n url_sandbox = 'https://digital.iservices.rte-france.com/open_api/actual_generation/v1/sandbox/actual_generations_per_production_type'\n url = 'https://digital.iservices.rte-france.com/open_api/actual_generation/v1/actual_generations_per_production_type?start_date=' + start_date + '&end_date=' + end_date\n\n if sandbox:\n response = requests.get(url_sandbox, headers=headers)\n else:\n response = requests.get(url, headers=headers)\n\n print(response)\n\n # print(response.text)\n return response.text", "def migrar(self):\r\n start = time.time()\r\n if not os.path.exists('output'):\r\n os.makedirs('output')\r\n # seleciona as tabelas\r\n res = self.cur_origem.execute(\r\n \"SELECT table_name FROM dba_tables WHERE owner = '%s' %s order by table_name\" % (self.schema_origem, self.filtro))\r\n\r\n # para cada tabela\r\n for row, in res.fetchall():\r\n row = row.strip()\r\n # conta os registros\r\n countsql = self.cur_origem.execute(\r\n \"select count(*) as total from %s \" % row)\r\n count, = countsql.fetchall()[0]\r\n start_time = time.time()\r\n start_datetime = datetime.datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\")\r\n print(\"MIGRANDO: %s\\n NRO REGISTROS: %s registros\\n INICIO: %s\" % (\r\n row, count, start_datetime))\r\n\r\n # gera o create table e trunca a tabela ( se ja existir )\r\n create, tipos = self.ddl_table(row)\r\n self.cur_destino.execute(create)\r\n self.cur_destino.execute(\"TRUNCATE TABLE %s\" % row)\r\n\r\n # gera as colunas\r\n cols = \"\"\r\n # tratamento exclusivo SONNER\r\n for id, [col, tipo] in tipos.iteritems():\r\n if col == \"SENHA\" or (col == \"DADOS\" and row == \"CADARQUIVODIGITAL\"):\r\n cols += \"NULL AS %s,\" % col\r\n else:\r\n cols += \"%s,\" % col\r\n # padrao seria:\r\n # for id, [col, tipo] in tipos.iteritems():\r\n # cols += \"%s,\" % col\r\n # print \"select %s from %s \" % (cols[:-1], row)\r\n\r\n # busca os dados\r\n self.cur_origem.execute(\"select %s from %s \" % (cols[:-1], row))\r\n\r\n # grava os dados no TXT\r\n with open(\"output/%s.txt\" % row, \"wb\") as f:\r\n w = csv.writer(\r\n f, delimiter='|', quotechar='\"')\r\n try:\r\n\r\n # execao SONNER (dados TEXT tipo cblob necessita de conversao anterior por conta de encoding)\r\n if row == \"CADPESSOACONTSOC\":\r\n for cada in self.cur_origem.fetchall():\r\n cada = list(cada)\r\n if cada[5] is not None:\r\n cada[5] = cada[5].read().encode(\"latin-1\")\r\n w.writerow(cada)\r\n\r\n else:\r\n w.writerows(self.cur_origem.fetchall())\r\n except Exception as e:\r\n self.erros.append([\"%s\" % row, e])\r\n end_time = time.time()\r\n end_datetime = datetime.datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\")\r\n print(\"\\tFIM: %s\\tTEMPO: %ss\\tTABELA COM ERRO %s\" %\r\n (end_datetime, round(end_time-start_time, 0), e))\r\n\r\n # le o arquivo gravado e copia para o banco destino\r\n with open(\"output/%s.txt\" % row, \"r\") as f:\r\n try:\r\n self.cur_destino.copy_expert(\r\n \"\"\"COPY %s FROM STDIN WITH QUOTE '\"' DELIMITER '|' NULL '' CSV \"\"\" % row, f)\r\n except Exception as e:\r\n self.erros.append([\"%s\" % row, e])\r\n end_time = time.time()\r\n end_datetime = datetime.datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\")\r\n print(\" FIM: %s\\n TEMPO: %ss\\n TABELA COM ERRO %s\" %\r\n (end_datetime, round(end_time-start_time, 0), e))\r\n else:\r\n end_time = time.time()\r\n end_datetime = datetime.datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\")\r\n print(\" FIM: %s\\n TEMPO: %ss\\n OK\" %\r\n (end_datetime, round(end_time-start_time, 0)))\r\n end = time.time()\r\n print(\"TEMPO GASTO: %s s\" % (end-start))", "def create_base_testes(dbsession):\n # print('Iniciando criação da base #############################')\n # print('dbsession', dbsession)\n base1 = BaseOrigem('alimentos_e_esportes')\n dbsession.add(base1)\n dbsession.commit()\n risco1 = PadraoRisco('perigo', base1)\n dbsession.add(risco1)\n dbsession.commit()\n param1 = ParametroRisco('alimento', 'teste1', risco1)\n param2 = ParametroRisco('esporte', 'teste2', risco1)\n dbsession.add(param1)\n dbsession.add(param2)\n dbsession.commit()\n valor1 = ValorParametro('bacon', Filtro.igual, param1)\n valor2 = ValorParametro('base jump', Filtro.igual, param2)\n dbsession.add(valor1)\n dbsession.add(valor2)\n dbsession.commit()\n visao1 = Visao('viagens', base1.id)\n dbsession.add(visao1)\n dbsession.commit()\n tabela1 = Tabela('viagens', 'viagem', '', 0, visao1.id)\n tabela2 = Tabela('alimentos', 'alimento', 'viagem', tabela1.id, visao1.id)\n tabela3 = Tabela('esportes', 'esporte', 'viagem', tabela1.id, visao1.id)\n dbsession.add(tabela1)\n dbsession.add(tabela2)\n dbsession.add(tabela3)\n dbsession.commit()\n # print('Base criada! ########################')", "def get_data(self):\n products_list = []\n for category in CATEGORIES:\n json_data = self.url_to_json(category)\n pages_nb = self.retrieve_cat_pages_nb(json_data)\n for page in range(pages_nb):\n page_json_data = self.page_to_json(category, page+1)\n products = page_json_data[\"products\"]\n for p in products:\n params = {\n 'brands': \"\",\n 'product_name_fr': \"\",\n 'nutrition_grades': \"\",\n 'stores': \"\",\n 'url': \"\",\n 'categories': \"\"\n }\n for key in params:\n try:\n params[key] = p[key]\n except KeyError:\n continue\n if params['product_name_fr'] != \"\" and params['nutrition_grades'] != \"\" and params['url'] != \"\" and params['categories'] != \"\":\n product = Product(brand=params['brands'],\n name=params['product_name_fr'],\n nutrition_grade=params['nutrition_grades'],\n stores=params['stores'], url=params['url'],\n category=params['categories'])\n products_list.append(product)\n try:\n self.manager.save_all(self.clean_data(products_list))\n print(f\"\\n La base de données |{DB_NAME}| a été peuplée \\n\")\n except:\n print(\"\\n Une erreur s'est produite lors \"\n \"du peuplement de la base de données \\n\")", "def actualizar_puntaje(self):\r\n pass", "def setup_app(command, conf, vars):\n load_environment(conf.global_conf, conf.local_conf)\n # Load the models\n engine = config['pylons.app_globals'].sa_engine\n print \"Droping tables\"\n model.metadata.drop_all(engine)\n print \"Finish..\\nCreating tables\"\n model.metadata.create_all(bind=engine)\n\n manager = model.Usuario()\n manager.user_name = u'admin'\n manager.nombre = u'Administrador'\n manager.email_address = u'[email protected]'\n manager.password = u'admin'\n\n model.DBSession.add(manager)\n ##usuarios\n #usr, usr2, usr3 = cargar_usuarios()\n #Roles\n model.DBSession.flush()\n group, group2, group3 = cargar_roles(manager)\n model.DBSession.flush()\n #estados\n inicial, desarrollo, cancelado, pausado, finalizado = cargar_estados()\n #Permisos\n model.DBSession.flush()\n cargar_permisos(group, group2, group3)\n #Proyectos\n model.DBSession.flush()\n #proyecto,proyecto2,proyecto3 = cargar_proyectos(usr,usr2)\n #fases\n #model.DBSession.flush()\n #fase1, fase2,fase3,fase4 = cargar_fases()\n #tipo de atributo\n #model.DBSession.flush()\n #tipo1, tipo2, tipo3 = cargar_tipo_atributo()\n #tipo de item\n #model.DBSession.flush()\n #tipodeitem1,tipodeitem2,tipodeitem3,tipodeitem4 = cargar_tipo_item(\n # fase1, fase2,fase3,\n # fase4)\n #estados de item\n model.DBSession.flush()\n estadoItem1,estadoItem2,estadoItem3, estadoItem4 = cargar_estados_item()\n #items\n #cargar_items()\n model.DBSession.flush()\n #tipo de relaciones\n cargar_tipos_relaciones()\n model.DBSession.flush()\n #Permisos del rol de proyecto\n #cargar_permisos_proyecto(group21,group22,group23,group31)\n #estado linea base\n model.DBSession.flush()\n cargar_estado_lineabase()\n model.DBSession.flush()\n transaction.commit()\n print \"Successfully setup\"", "def connect_db_and_load_data(cls):\n db.connect()\n db.create_tables([Product], safe=True)\n load_data(transform_data('./inventory.csv'))", "def write_base_values(dt, session):\n\n for user in get_all_users(session):\n base_values = define_base_values(user.inhabitants, dt)\n\n # Create PerCapitaConsumption instance\n session.add(PerCapitaConsumption(dt, user.meter_id, base_values['consumption'],\n base_values['consumption_cumulated'],\n base_values['inhabitants'],\n base_values['per_capita_consumption'],\n base_values['per_capita_consumption_cumulated'],\n base_values['days'],\n base_values['moving_average'],\n base_values['moving_average_annualized']))\n\n session.commit()", "def main():\n config = configparser.ConfigParser()\n config.read('dwh.cfg')\n\n conn = psycopg2.connect(\"host={} dbname={} user={} password={} port={}\".format(*config['STAGE'].values()))\n cur = conn.cursor()\n \n #remove the existing tables\n drop_tables(cur, conn)\n \n #replace the tables with new ones\n create_tables(cur, conn)\n \n #add missing postcode value into table\n default_missing_values(cur, conn)\n \n conn.close()", "def enviar_novos():\n\n template = 'produtos/form-enviar-produtos.html'\n form = EnviarProdutosNovosForm()\n imagens = read_images().keys()\n\n task = current_app.enviar_novos_task\n clear_task = request.args.get('clear_task', None)\n if clear_task and clear_task == 'yes':\n if task and task.state == 'SUCCESS':\n current_app.enviar_novos_task = task = None\n return redirect(url_for('produtos.enviar_novos'))\n\n produtos = None\n if not task:\n produtos = buscar_produtos_novos()\n\n if not produtos and not task:\n warning('Não existem produtos para serem enviados para o site')\n\n categorias = buscar_categorias_produtos()\n if not categorias:\n warning('Não existem categorias do site cadastradas')\n\n if form.validate_on_submit() and produtos and categorias:\n produtos_task = {}\n\n for p in produtos:\n secao = int(request.form.get(f'secao-{p.idsubproduto}', 0))\n grupo = int(request.form.get(f'grupo-{p.idsubproduto}', 0))\n subgrupo = int(request.form.get(f'subgrupo-{p.idsubproduto}', 0))\n\n if secao and grupo and subgrupo:\n categorias = [secao, grupo, subgrupo]\n produtos_task[p.idsubproduto] = categorias\n\n task = enviar_novos_task.apply_async(args=(produtos_task,))\n current_app.enviar_novos_task = task\n\n success(f'Tarefa iniciada com sucesso')\n return redirect(url_for('produtos.enviar_novos'))\n\n result = {\n 'title': 'Produtos',\n 'subtitle': 'Enviar Novos',\n 'form': form,\n 'imagens': imagens,\n 'produtos': produtos,\n 'categorias': categorias,\n 'task': task\n }\n return render_template(template, **result)", "def atualizar(self, **kwargs):\n p_id_aluno = kwargs.get('id_aluno')\n p_nome = kwargs.get('nome')\n p_cpf = kwargs.get('cpf')\n p_data_nasc = kwargs.get('data_nasc')\n p_telefone = kwargs.get('telefone')\n\n sql = f\"\"\"\n UPDATE public.Alunos\n SET nome_aluno = '{p_nome}',\n cpf_aluno = '{p_cpf}',\n data_nasc_aluno = '{p_data_nasc}',\n telefone_aluno = '{p_telefone}'\n WHERE id_aluno = '{p_id_aluno}';\n \"\"\"\n\n conn = None\n updated_rows = 0\n\n try: \n params = config(filename=\".\\database.ini\")\n conn = psycopg2.connect(**params)\n\n cur = conn.cursor()\n\n cur.execute(sql)\n\n deleted_rows = cur.rowcount\n\n conn.commit()\n\n cur.close()\n return deleted_rows\n except(Exception, psycopg2.DatabaseError) as error:\n print(error)\n finally:\n if conn is not None:\n conn.close()", "def setup_prod():\n setup_general()", "def saveProgrammingValuesToDatabase(self):\n #print(\"save programming values\")\n #print(\"username: \", self.user.username)\n #self.user.data.printData()\n self.dbManager.setUserProgramData(self.user.username, self.user.data)" ]
[ "0.5921855", "0.5573658", "0.5570171", "0.5525268", "0.54616237", "0.54220945", "0.54185116", "0.53700054", "0.53599256", "0.5307463", "0.5303823", "0.52824324", "0.5279865", "0.5278441", "0.5268684", "0.52643067", "0.52426183", "0.5233388", "0.5212365", "0.5201543", "0.52003336", "0.51713276", "0.51557416", "0.5132978", "0.51238227", "0.511822", "0.5114239", "0.5105096", "0.5078419", "0.5077888" ]
0.65626353
0
Generates the penalty matrix associated with the Bspline basis `bspline` and the variation `variation`. If the chosen variation needs some parameters to be defined, they must be included on the kwargs. Returns np.ndarray of shape (`bspline.n_int` + `bspline.int_forw` + `bspline.int_back` + `bspline.deg`, `bspline.n_int` + `bspline.int_forw` + `bspline.int_back` + `bspline.deg`) The penalty matrix. Raises ValueError Variation type must be on ("diff", )
def get_penalty_matrix(self, **kwargs) -> np.ndarray: if self.variation not in ("diff",): raise ValueError("Penalty matrix type not valid.") if self.variation == "diff": self.matrixD = self.get_diff_matrix(**kwargs) return self.matrixD.T @ self.matrixD
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bspline_array(self, nspline=13, log=False, get_matrix=True):\n if get_matrix:\n bspl = utils.bspline_templates(wave=self.spec_wobs*1.e4,\n degree=3,\n df=nspline,\n log=log,\n get_matrix=get_matrix\n )\n bspl = bspl.T\n else:\n bspl = utils.bspline_templates(wave=self.spec_wobs*1.e4,\n degree=3,\n df=nspline,\n log=log,\n get_matrix=get_matrix\n )\n \n return bspl", "def loopy_belief_propagation(tests, groups,\n base_infection_rate,\n sensitivity, specificity,\n min_iterations, max_iterations,\n atol):\n n_groups, n_patients = groups.shape\n if np.size(groups) == 0:\n if np.size(base_infection_rate) == 1: # only one rate\n marginal = base_infection_rate * np.ones(n_patients)\n return marginal, 0\n elif np.size(base_infection_rate) == n_patients:\n return base_infection_rate, 0\n else:\n raise ValueError(\"Improper size for vector of base infection rates\")\n\n mu = -jax.scipy.special.logit(base_infection_rate)\n\n groups_size = np.sum(groups, axis=1)\n sensitivity = utils.select_from_sizes(sensitivity, groups_size)\n specificity = utils.select_from_sizes(specificity, groups_size)\n gamma0 = np.log(sensitivity + specificity - 1) - np.log(1 - sensitivity)\n gamma1 = np.log(sensitivity + specificity - 1) - np.log(sensitivity)\n gamma = tests * gamma1 + (1 - tests) * gamma0\n test_sign = 1 - 2 * tests[:, np.newaxis]\n\n # Initialization\n alphabeta = np.zeros((2, n_groups, n_patients))\n alpha_beta_iteration = [alphabeta, 0]\n\n # return marginal from alphabeta\n def marginal_from_alphabeta(alphabeta):\n beta_bar = np.sum(alphabeta[1, :, :], axis=0)\n return jax.scipy.special.expit(-beta_bar - mu)\n\n # lbp loop\n def lbp_loop(_, alphabeta):\n alpha = alphabeta[0, :, :]\n beta = alphabeta[1, :, :]\n\n # update alpha\n beta_bar = np.sum(beta, axis=0)\n alpha = jax.nn.log_sigmoid(beta_bar - beta + mu)\n alpha *= groups\n\n # update beta\n alpha_bar = np.sum(alpha, axis=1, keepdims=True)\n beta = np.log1p(test_sign *\n np.exp(-alpha + alpha_bar + gamma[:, np.newaxis]))\n beta *= groups\n return np.stack((alpha, beta), axis=0)\n\n def cond_fun(alpha_beta_iteration):\n alphabeta, iteration = alpha_beta_iteration\n marginal = marginal_from_alphabeta(alphabeta)\n marginal_plus_one_iteration = marginal_from_alphabeta(\n lbp_loop(0, alphabeta))\n converged = np.allclose(marginal, marginal_plus_one_iteration, atol=atol)\n return (not converged) and (iteration < max_iterations)\n\n def body_fun(alpha_beta_iteration):\n alphabeta, iteration = alpha_beta_iteration\n alphabeta = jax.lax.fori_loop(0, min_iterations, lbp_loop, alphabeta)\n iteration += min_iterations\n return [alphabeta, iteration]\n\n # Run LBP while loop\n while cond_fun(alpha_beta_iteration):\n alpha_beta_iteration = body_fun(alpha_beta_iteration)\n\n alphabeta, _ = alpha_beta_iteration\n\n # Compute two consecutive marginals\n marginal = marginal_from_alphabeta(alphabeta)\n marginal_plus_one_iteration = marginal_from_alphabeta(lbp_loop(0, alphabeta))\n\n return marginal, np.amax(np.abs(marginal - marginal_plus_one_iteration))", "def weights_midas_beta(\n th:np.array, bt:np.array,\n Spc:Dict) -> np.array:\n\n dict_list = create_time_dicts(Spc)\n\n if Spc['TwoParam']:\n th1=th[0:Spc['nbvar']]\n th2=th[Spc['nbvar']:2*Spc['nbvar']]\n else:\n th2=th\n\n for time_period in dict_list:\n for i in time_period['range']:\n if Spc['TwoParam']:\n if Spc['almon']:\n W0=np.exp(th1[i]*time_period['k'] + th2[i]*np.square(time_period['k'])) \\\n / np.sum(np.exp(th1[i]*time_period['k'] + th2[i]*np.square(time_period['k'])))\n elif Spc['betaFc']:\n W0=np.exp(th1[i]*time_period['k'] + th2[i]*np.square(time_period['k'])) \\\n / np.sum(np.exp(th1[i]*time_period['k'] + th2[i]*np.square(time_period['k'])))\n elif Spc['Averaging']:\n W0=time_period['one']/time_period['kk']\n elif Spc['betaFc']:\n W0=np.power(th2[i]*(1-time_period['w']),(th2[i]-1)) \\\n / sum(np.power(th2[i]*(1-time_period['w']),(th2[i]-1)))\n elif Spc['betaFc_special']:\n W0=th2[i]*time_period['w']*np.power((1-time_period['w']),(th2[i]-1))\\\n / sum(th2[i]*time_period['w']*np.power((1-time_period['w']),(th2[i]-1)))\n if i==0:\n W = W0*bt[i]\n ww = W0\n else:\n W = np.r_[W,W0*bt[i]]\n ww = np.r_[ww,W0]\n\n return W.T", "def beta_gen_slope(p):\n cardi = 0.005\n return np.array( [0]*int(p-int(cardi*p)) + list(np.arange(1, int(cardi*p)+1, 1)) )", "def run_kb_variation(self, ctx, params):\n # ctx is the context object\n # return variables are: output\n #BEGIN run_kb_variation\n\n print (params)\n \n #Download reads as fastq and assembly as fasta\n futils = file_utils(self.callback_url, self.ws_url)\n \n \n reads = futils.fetch_reads_from_reference(params['fastq_ref'])\n assembly = futils.fetch_fasta_from_genome_or_assembly (params['genome_or_assembly_ref'])\n\n print (reads)\n print (assembly)\n \n #Get file names of reads and assembly\n name_map = futils.get_object_names([params['fastq_ref'], \n params['genome_or_assembly_ref']\n ])\n\n genome_or_assembly_name = name_map[params['genome_or_assembly_ref']]\n reads_name = name_map[params['fastq_ref']]\n\n print (reads)\n print (assembly)\n print (name_map)\n\n \n #Run Snippy analysis\n snippy = SnippyUtils(self.scratch)\n snippy_vcf_file_path = snippy.run_snippy_command_paired_end(assembly, reads, reads_name, genome_or_assembly_name)\n\n \n\n #Save variation object in workspace using VariationUtil API\n\n vu = VariationUtil(self.callback_url)\n\n VariationUtilParams = {\n 'workspace_name': params['workspace_name'],\n 'genome_or_assembly_ref': params['genome_or_assembly_ref'],\n 'vcf_staging_file_path': snippy_vcf_file_path,\n 'sample_attribute_ref': params['sample_attribute_ref'],\n 'variation_object_name': params['variation_object_name']\n }\n\n vu.save_variation_from_vcf(VariationUtilParams)\n\n\n #TODO: Remove hard coded stuff from here\n template_dir = \"/kb/module/lib/kb_variation/Utils/report_template\"\n #data_dir = \"/kb/module/lib/kb_variation/Utils/report/data\"\n\n\n report_dir = self.scratch + \"/report_dir\"\n\n\n\n destination_report = shutil.copytree(template_dir, report_dir)\n #destination_report_data = shutil.copytree(data_dir, report_data_dir)\n\n \n\n assembly_file_path = assembly['path']\n ig = igvutils (report_dir)\n filenames = ig.prepare_data_igv(assembly_file_path, snippy_vcf_file_path)\n\n\n #Create html report\n name_dict = dict()\n name_dict['GENOME'] = \"data/\" + filenames['assembly_name']\n name_dict['GENOME_NAME'] = filenames['assembly_name']\n name_dict['SNP_FILE'] = \"data/\" + filenames['vcf_gz']\n name_dict['SNP_INDEX_FILE'] = \"data/\" + filenames['vcf_gz_index']\n name_dict['SNP_NAME'] = reads_name #TODO: Fix this for population\n\n hu = htmlreportutils(self.callback_url, params['workspace_name'], report_dir)\n\n output = hu.create_html_report(report_dir, name_dict)\n\n \n ##report = KBaseReport(self.callback_url)\n #report_info = report.create({'report': {'objects_created':[],\n # 'text_message': \"xmx\"},\n # 'workspace_name': params['workspace_name']})\n #output = {\n # 'report_name': report_info['name'],\n # 'report_ref': report_info['ref'],\n #}\n #END run_kb_variation\n\n # At some point might do deeper type checking...\n if not isinstance(output, dict):\n raise ValueError('Method run_kb_variation return value ' +\n 'output is not type dict as required.')\n # return the results\n return [output]", "def method_2d(knots,y_n,num):\n cv_iter = 10 # number of iteration for cross-validation \n GSV = np.zeros((cv_iter,cv_iter))\n# tr = np.zeros((cv_iter,cv_iter))\n# fun =np.zeros((cv_iter,cv_iter))\n lam_x = np.linspace(0,0.2,cv_iter)\n lam_y = np.linspace(0,0.2,cv_iter)\n num_knots = len(knots)\n linear_knots = knots[1:num_knots-1]\n num_knots = num_knots-4\n znam = np.zeros((num_knots))\n basis = np.zeros((num,num_knots))\n basis_1 = np.zeros((num,num_knots))\n basis_deriative = np.zeros((num,num_knots))\n basis_deriative_1 = np.zeros((num,num_knots))\n S = np.zeros((num_knots,num_knots,num))\n vs = BsplineVectorSpace(2, knots)\n vs_1 = BsplineVectorSpace(1, linear_knots)\n I_i = np.eye(num_knots)\n for i in xrange(0,num_knots):\n basis[:,i] = vs.basis_der(i,0)(np.linspace(0,1,num))\n basis_deriative[:,i] = vs.basis_der(i,1)(np.linspace(0,1,num))/num\n basis_1[:,i] = vs_1.basis_der(i,0)(np.linspace(0,1,num))\n basis_deriative_1[:,i] = vs_1.basis_der(i,1)(np.linspace(0,1,num))/num\n B = abs(basis_deriative-basis_1)\n S = np.zeros((num_knots,num_knots,num))\n k = np.zeros((num_knots,num_knots,num))\n for i in xrange(num_knots):\n for j in xrange(num_knots):\n S[i,j,:] = B[:,i]*B[:,j]\n k[i,j,:] =basis_deriative_1[:,i] * basis_deriative_1[:,j]\n S_int = np.zeros((num_knots,num_knots))\n k_int = np.zeros((num_knots,num_knots))\n for i in xrange(num_knots):\n for j in xrange(num_knots):\n S_int[i,j] = integrate.trapz(S[i,j,:])\n k_int[i,j] = integrate.trapz(k[i,j,:])\n basis_product = np.kron(basis,basis)\n S_x = np.kron(S_int,I_i)\n S_y = np.kron(I_i,S_int)\n K_x = np.kron(k_int,I_i)\n K_y = np.kron(I_i,k_int)\n for i in xrange(cv_iter):\n for j in xrange(cv_iter):\n influence_matrix = np.dot(np.dot(basis_product,(np.linalg.inv(np.dot(np.transpose(\n basis_product),basis_product)+lam_x[i]*S_x+lam_y[j]*S_y+lam_x[i]* K_x+lam_y[j]*K_y))),np.transpose(basis_product))\n for k in xrange(num_knots):\n znam[k] =(1-influence_matrix[k,k])**2\n tr = np.sum(znam)\n fun = np.sum((y_n-np.dot(influence_matrix,y_n))**2)\n GSV[i,j] =fun/(num*tr)\n print i,j\n a,b = np.unravel_index(GSV.argmin(), GSV.shape)\n# a = np.argmin(np.argmin(GSV,axis = 0))\n# b = np.argmin(np.argmin(GSV,axis = 1))\n lamb_x = lam_x[a]\n lamb_y = lam_y[b]\n print lamb_x,lamb_y\n model_fit = np.dot(np.dot(np.dot(basis_product,(np.linalg.inv(np.dot(np.transpose(\n basis_product),basis_product)+lamb_x*S_x+lamb_y*S_y+lamb_x* K_x+lamb_y*K_y))),np.transpose(basis_product)),y_n)\n return model_fit,GSV", "def PGD(Params, relaxationVars, fixedBs, fixedTs, data):\n Tol = Params[\"tol\"]\n TolCD = Params[\"tolCD\"]\n Lambda0 = Params[\"Lambda\"]\n Lambda1 = Params[\"alpha\"] * Lambda0\n M = Params[\"M\"]\n y = data.ycentered # data.y - data.ybar\n\n Bindices = relaxationVars.BActive.copy() # list\n Tindices = relaxationVars.TActive.copy() # list of tuples (i,j)\n currentB, currentT = relaxationVars.initialSol.ToArray(Bindices, Tindices)\n fixedB = fixedBs.copy() # Dict. key = index, value = 0 or 1 (no index if not fixed)\n fixedT = fixedTs.copy() # Dict. key = (i,j), value = 0 or 1 (no index if not fixed)\n DualInitial = relaxationVars.useDual\n\n # Store the index mappings\n Bmap = {} # Bmap[i] = index of i in currentB or XB\n for i in range(len(Bindices)):\n Bmap[Bindices[i]] = i\n\n Tmap = {} # Tmap[(i,j)] = index of interaction in XT and currentT\n for i in range(len(Tindices)):\n c1, c2 = Tindices[i]\n Tmap[(c1, c2)] = i\n Tmap[(c2, c1)] = i\n\n # Next: Some sanity checks (those can be removed if we're carful about the\n # inputs)\n\n # Make sure if B_i is fixed to 0 then all T_{ij}'s (in Tindices) are also\n # fixed to zero\n for i, val in fixedB.items():\n if val == 0:\n for l, j in Tmap:\n if l < j and (l == i or j == i):\n fixedT[(l, j)] = 0\n\n # Make sure if T_{ij} is fixed to 1 then both B_i and B_j are fixed to 1\n for key, val in fixedT.items():\n if val == 1:\n i, j = key\n fixedB[i] = 1\n fixedB[j] = 1\n\n # Delete from Bindices and Tindices all the indices s.t. z_i = 0 / z_{ij}\n # = 0\n Bzeros = []\n for i, val in fixedB.items():\n if val == 0:\n Bzeros.append(Bmap[i])\n for i in sorted(Bzeros, reverse=True):\n del Bindices[i]\n currentB = np.delete(currentB, Bzeros)\n\n Tzeros = []\n for key, val in fixedT.items():\n if val == 0:\n Tzeros.append(Tmap[key])\n for i in sorted(Tzeros, reverse=True):\n del Tindices[i]\n currentT = np.delete(currentT, Tzeros)\n\n # Update the index mappings\n Bmap = {} # Bmap[i] = index of i in currentB or XB\n for i in range(len(Bindices)):\n Bmap[Bindices[i]] = i\n\n Tmap = {} # Tmap[(i,j)] = index of interaction in XT and currentT\n for i in range(len(Tindices)):\n c1, c2 = Tindices[i]\n Tmap[(c1, c2)] = i\n Tmap[(c2, c1)] = i\n\n # End of sanity checks\n\n # Retrive the matrices of the optimization variables\n # Later: We can store the centered columns (but this will require twice\n # the memory)\n XB, XT = data.Retrieve(Bindices, Tindices)\n XBMean = XB.mean(axis=0)\n XB = XB - XBMean\n XTMean = XT.mean(axis=0)\n XT = XT - XTMean\n\n Bfree = [i for i in Bindices if i not in fixedB]\n Tfree = [(i, j) for i, j in Tmap if i < j and (i, j) not in fixedT]\n TfreeIndices = [Tmap[(i, j)]\n for i, j in Tmap if i < j and (i, j) not in fixedT]\n lenFixedB = len(Bindices) - len(Bfree)\n lenFixedT = len([key for key in fixedT if fixedT[key] == 1])\n\n # (Dual) Block CD Variables\n u = defaultdict(float)\n w = defaultdict(dict)\n if not DualInitial:\n for i in Bindices:\n u[i] = 0\n for pair in Tmap:\n i, j = pair\n w[i][j] = 0\n else:\n for i in Bindices:\n if i in relaxationVars.u and i not in fixedB:\n u[i] = relaxationVars.u[i]\n else:\n u[i] = 0\n for i, j in Tmap:\n if j in relaxationVars.w[i] and (min(i, j), max(\n i, j)) not in fixedT and i not in fixedB and j not in fixedB:\n w[i][j] = relaxationVars.w[i][j]\n else:\n # Important: we need w[i][j] = 0 if T_{ij} if fixed (this is\n # due to the thresholding function)\n w[i][j] = 0\n\n sortedIndices = {i: sorted(w[i]) for i in w}\n sortedIndices = defaultdict(list, sortedIndices)\n\n # Prepare all the fixed matrices/vectors required for grad evaluation\n # later.\n XBty = np.dot(XB.T, y)\n XBtXB = np.dot(XB.T, XB)\n XTty = np.dot(XT.T, y)\n XTtXT = np.dot(XT.T, XT)\n XBtXT = np.dot(XB.T, XT)\n\n # Compute the lipschitz constant of the grad.\n Xfull = np.hstack((XB, XT))\n if Xfull.shape[1] != 0:\n eigvals, v = np.linalg.eig(np.dot(Xfull.T, Xfull))\n L = np.max(np.real(eigvals))\n else:\n L = 1 # any value here should suffice - it's not used.\n\n # Compute the lipschitz constants for BCD.\n LCD = {}\n for i in Bindices:\n LCD[i] = (len(w[i]) + 1) * ((Lambda0**2) / (L * M**2))\n\n # Define the thresholding constants\n frac = Lambda0 / (M * L)\n Mpfrac = M + frac\n frac1 = Lambda1 / (M * L)\n Mpfrac1 = M + frac1\n fracsqL = frac * frac * L\n LambdaovM = Lambda0 / M\n Lambda1ovM = Lambda1 / M\n Lambda1ovLambda0 = Lambda1 / Lambda0\n\n start = time.time()\n\n oldObj = math.inf\n for it in range(5000):\n grad_B = - XBty + np.dot(XBtXB, currentB) + np.dot(XBtXT, currentT)\n grad_T = - XTty + np.dot(XTtXT, currentT) + np.dot(XBtXT.T, currentB)\n Bstar = currentB - grad_B / L\n Tstar = currentT - grad_T / L\n # Iterate over the blocks, running dual BCD.\n # We employ dual warm starts by using the same (u,w) across the PGD updates.\n CDPrevObj = -math.inf\n LCDCurrent = copy(LCD)\n useZeroSuffCondition = True\n if useZeroSuffCondition:\n # Perform proximal screening below.\n zeroGroups = set()\n for i in Bfree:\n zeroSufficient = False\n cumsum = 0\n for j in w[i]:\n thrshld = max(\n (abs(Tstar[Tmap[(i, j)]]) / frac - Lambda1ovLambda0), 0)\n # Do feature level screening below.\n if thrshld == 0:\n # The initialization below ensures that \\theta_{ij} is\n # never updated by BCA.\n w[i][j] = 0\n w[j][i] = 0\n else:\n cumsum += thrshld\n\n if cumsum <= 1 - abs(Bstar[Bmap[i]]) / frac:\n zeroSufficient = True\n if zeroSufficient:\n u[i] = Bstar[Bmap[i]] / frac\n for j in w[i]:\n if abs(Tstar[Tmap[(i, j)]]) > frac1:\n w[i][j] = Tstar[Tmap[(\n i, j)]] / frac - Lambda1ovLambda0 * np.sign(Tstar[Tmap[(i, j)]])\n else:\n w[i][j] = 0\n w[j][i] = 0\n # Not nec. but can improve speed.\n LCDCurrent[j] -= (Lambda0**2) / (L * M**2)\n zeroGroups.add(i)\n\n BfreeMinusZeroGroups = [i for i in Bfree if i not in zeroGroups]\n CDObjConst = 0\n '''\n for i in zeroGroups:\n CDObjConst += q(u[i], Bstar[Bmap[i]], M, Lambda0, L,frac)\n for j in w[i]:\n if i < j:\n # T(wij, wji, thetaij, M, Lambda0, L, frac, frac1, Mpfrac1, LambdaovM, Lambda1ovM)\n CDObjConst += T(w[i][j], w[j][i], Tstar[Tmap[(i,j)]], M, Lambda0, L,frac, frac1, Mpfrac1, LambdaovM, Lambda1ovM)\n '''\n ####\n else:\n zeroGroups = set()\n CDObjConst = 0\n BfreeMinusZeroGroups = Bfree\n # To Turn the part above off, comment it out and set the following:\n # zeroGroups = set()\n # CDObjConst = 0\n # BfreeMinusZeroGroups = Bfree\n\n for innerit in range(10000):\n # for i in Bfree:\n for i in BfreeMinusZeroGroups:\n # First, Calculate utilde and wtilde for ith block\n utilde = u[i] + delq(u[i],\n Bstar[Bmap[i]],\n M,\n Lambda0,\n L,\n frac,\n Mpfrac,\n fracsqL,\n LambdaovM) / LCDCurrent[i]\n\n #wtilde = {}\n # for j in w[i]:\n # if B_j is fixed to 1, then we already set w[j][i] = 0\n # wtilde[j] = w[i][j] + delT(w[i][j], w[j][i], Tstar[Tmap[(i,j)]], M, Lambda0, L,frac, Mpfrac, fracsqL, LambdaovM)/LCD[i]\n sortedIndicesi = sortedIndices[i]\n # delT(wij, wji, thetaij, M, Lambda0, L, frac, frac1, Mpfrac1, LambdaovM)\n wtilde = [w[i][j] + delT(w[i][j],\n w[j][i],\n Tstar[Tmap[(i,\n j)]],\n M,\n Lambda0,\n L,\n frac,\n frac1,\n Mpfrac1,\n LambdaovM) / LCDCurrent[i] for j in sortedIndicesi]\n\n x = np.empty(shape=len(wtilde) + 1)\n # Solve the l1 projection problem.\n x[0] = utilde\n x[1:] = np.array(wtilde)\n projection = project(x)\n # Update the solution.\n u[i] = projection[0]\n # for j in range(len(w[i])):\n # w[i][sortedIndicesi[j]] = projection[j+1] ## +1 since u[i] is\n # first\n for counter, j in enumerate(sortedIndicesi):\n w[i][j] = projection[counter + 1]\n # Calculate the current objective\n CDObj = CDObjConst # 0\n for i in BfreeMinusZeroGroups: # Bfree:\n CDObj += q(u[i], Bstar[Bmap[i]], M, Lambda0, L, frac)\n for j in w[i]:\n if i < j:\n # T(wij, wji, thetaij, M, Lambda0, L, frac, frac1, Mpfrac1, LambdaovM, Lambda1ovM)\n CDObj += T(w[i][j], w[j][i], Tstar[Tmap[(i, j)]], M,\n Lambda0, L, frac, frac1, Mpfrac1, LambdaovM, Lambda1ovM)\n #Params[\"print\"](\"Inner obj: \", CDObj)\n if terminate(CDPrevObj, CDObj, TolCD):\n break\n CDPrevObj = CDObj\n\n # Get back the primal solution.\n for i in range(len(Bindices)):\n # if Bindices[i] is fixed to 1, then u[Bindices[i]] = 0 and the\n # update below will lead to currentB[i] = Bstar[i] (or +- M)\n if Bindices[i] not in zeroGroups:\n # assuming Bindices is sorted\n currentB[i] = dualtoprimalu(\n u[Bindices[i]], Bstar[i], M, Lambda0, L, frac)\n else:\n currentB[i] = 0\n\n for i, j in Tmap:\n # if i or j is fixed, the corresponding w[i][j] will be zero, which\n # leads to the correct update.\n if i < j:\n if (i, j) in Tfree:\n # dualtoprimalw(wij, wji, thetaij, M, Lambda0, L, frac, frac1, Mpfrac1)\n if i in zeroGroups or j in zeroGroups:\n currentT[Tmap[(i, j)]] = 0\n else:\n currentT[Tmap[(i, j)]] = dualtoprimalw(\n w[i][j], w[j][i], Tstar[Tmap[(i, j)]], M, Lambda0, L, frac, frac1, Mpfrac1)\n else: # careful, this is the case when no thresholding should be applied\n coefficient = Tstar[Tmap[(i, j)]]\n if np.abs(coefficient) <= M:\n currentT[Tmap[(i, j)]] = coefficient\n else:\n currentT[Tmap[(i, j)]] = M * np.sign(coefficient)\n\n r = y - np.dot(XB, currentB) - np.dot(XT, currentT)\n\n maxterm = 0\n for i in range(len(currentB)):\n if Bindices[i] not in fixedB:\n maxtemp = np.abs(currentB[i])\n for j in w[Bindices[i]]:\n maxtemp = max(maxtemp, np.abs(\n currentT[Tmap[(Bindices[i], j)]]))\n maxterm += maxtemp\n l1norm = np.sum(np.abs(currentT[TfreeIndices]))\n # IMPORTANT: Avoid using lenFixed and lenFixedT here.....!!!!!! ####\n currentobjective = 0.5 * np.dot(r, r) + Lambda0 * (\n lenFixedB + lenFixedT) + (Lambda0 / M) * maxterm + (Lambda1 / M) * l1norm\n\n if currentobjective > oldObj:\n Params[\"print\"](\"Objective Increased!!!\")\n\n if terminate(oldObj, currentobjective, Tol):\n break\n\n oldObj = currentobjective\n Params[\"print\"](\"Iteration :\", it, \". Objective: \", currentobjective)\n\n end = time.time()\n Params[\"print\"](\"Time: \", end - start, \" seconds.\")\n\n # Check if any small values should be zero.\n # Start with more aggressive checks first.\n Trunc = False\n for epsilon in [0.01, 1e-3, 1e-4, 1e-5, 1e-6]:\n currentBtrunc = np.copy(currentB)\n currentTtrunc = np.copy(currentT)\n currentBSetToZero = np.nonzero(np.abs(currentB) < epsilon)[0]\n currentBtrunc[currentBSetToZero] = 0\n currentBSetToZeroPSet = set(currentBSetToZero)\n for (i, j) in Tmap:\n if Bmap[i] in currentBSetToZeroPSet or Bmap[j] in currentBSetToZeroPSet:\n currentTtrunc[Tmap[(i, j)]] = 0\n\n currentTtrunc[np.abs(currentT) < epsilon] = 0\n rtrunc = y - np.dot(XB, currentBtrunc) - np.dot(XT, currentTtrunc)\n maxterm = 0\n for i in range(len(currentBtrunc)):\n if Bindices[i] not in fixedB:\n maxtemp = np.abs(currentBtrunc[i])\n for j in w[Bindices[i]]:\n maxtemp = max(maxtemp, np.abs(\n currentTtrunc[Tmap[(Bindices[i], j)]]))\n maxterm += maxtemp\n l1norm = np.sum(np.abs(currentTtrunc[TfreeIndices]))\n objectivetrunc = 0.5 * np.dot(rtrunc, rtrunc) + Lambda0 * (\n lenFixedB + lenFixedT) + (Lambda0 / M) * maxterm + (Lambda1 / M) * l1norm\n\n Params[\"print\"](\n \"eps: \",\n epsilon,\n \" objectivetrunc: \",\n objectivetrunc,\n \" currentobjective: \",\n currentobjective)\n # 1.01 might be beneficial in some extreme cases where supp becomes\n # very large (but might also cause descent problems)\n if objectivetrunc <= currentobjective:\n '''\n currentB = currentBtrunc\n currentT = currentTtrunc\n r = rtrunc\n currentobjective = objectivetrunc\n '''\n Params[\"print\"](\"###CHANGE###\", \"eps: \", epsilon)\n Params[\"print\"](\"Final Objective :\", objectivetrunc)\n Trunc = True\n break\n\n integral = True\n\n for i in Bfree:\n zi = np.abs(currentB[Bmap[i]]) / M\n if zi > 0 and zi < 0.999:\n integral = False\n\n for i in TfreeIndices:\n zi = np.abs(currentT[i]) / M\n if zi > 0 and zi < 0.999:\n integral = False\n\n Bnnz = {key: currentB[Bmap[key]]\n for key in Bmap if currentB[Bmap[key]] != 0}\n Tnnz = {(i, j): currentT[Tmap[(i, j)]]\n for i, j in Tmap if i < j and currentT[Tmap[(i, j)]] != 0}\n intercept = data.ybar - np.dot(XBMean, currentB) - np.dot(XTMean, currentT)\n sol = Solution(Bnnz, Tnnz, intercept)\n\n if Trunc:\n BnnzTrunc = {key: currentBtrunc[Bmap[key]]\n for key in Bmap if currentBtrunc[Bmap[key]] != 0}\n TnnzTrunc = {(i, j): currentTtrunc[Tmap[(\n i, j)]] for i, j in Tmap if i < j and currentTtrunc[Tmap[(i, j)]] != 0}\n interceptTrunc = data.ybar - \\\n np.dot(XBMean, currentBtrunc) - np.dot(XTMean, currentTtrunc)\n solTrunc = Solution(BnnzTrunc, TnnzTrunc, interceptTrunc)\n else:\n BnnzTrunc = Bnnz\n TnnzTrunc = Tnnz\n interceptTrunc = intercept\n solTrunc = sol\n\n return (sol, solTrunc, currentobjective, integral, r, u, w)", "def cspline_params(self):\n b = np.zeros(self.n)\n c = np.zeros(self.n-1)\n d = np.zeros(self.n-1)\n B = np.zeros(self.n)\n Q = np.ones(self.n-1)\n D = 2 * np.ones(self.n)\n dx = np.zeros(self.n-1)\n p = np.zeros(self.n-1)\n\n # Calculate x-interval and slope\n for j in range(self.n-1):\n dx[j] = self.x[j+1] - self.x[j]\n p[j] = (self.y[j+1] - self.y[j]) / dx[j]\n\n # Fill B\n B[0] = 3 * p[0]\n for i in range(self.n-2):\n B[i+1] = 3 * (p[i] + p[i+1] * dx[i] / dx[i+1])\n B[-1] = 3 * p[-2]\n \n # Fill D\n for i in range(self.n-2):\n D[i+1] = 2 * dx[i] / dx[i+1] + 2\n\n # Fill Q\n for i in range(self.n-2):\n Q[i+1] = dx[i] / dx[i+1]\n\n # Gauss elimination\n for i in range(1, self.n):\n D[i] = D[i] - Q[i-1] / D[i-1]\n B[i] = B[i] - B[i-1] / D[i-1]\n\n # Back-substitution\n b[-1] = B[-1] / D[-1]\n list = range(self.n-1)\n for i in list[::-1]:\n b[i] = (B[i] - Q[i] * b[i+1]) / D[i]\n\n # Calculate c and d\n for i in range(self.n-1):\n c[i] = (3 * p[i] - 2 * b[i] - b[i+1]) / dx[i]\n d[i] = (b[i] + b[i+1] - 2 * p[i]) / dx[i]\n c[-1] = -3 * d[-1] * dx[-1]\n\n return b, c, d", "def test_repeated_control_point_bspline_third_derivative() -> None:\n # a b-spline with a repeated control point and length=4.0\n control_points = np.array(\n [\n (0.0, 0.0, 0.0),\n (0.0, 0.0, 0.0),\n (2.0, 0.0, 0.0),\n (2.0, 1.0, 0.0),\n (2.0, 1.0, 1.0),\n ]\n )\n curve = BSplineCurve(control_points)\n nominal_third_derivatives = np.array(\n # computed using Mathematica's BSplineFunction over\n # np.linspace(0.0, 4.0, num=17) (inputs scaled by 1/3)\n [\n [0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0],\n [12.0, -12.0, 6.0],\n [12.0, -12.0, 6.0],\n [12.0, -12.0, 6.0],\n [12.0, -12.0, 6.0],\n [12.0, -12.0, 6.0],\n [12.0, -12.0, 6.0],\n [12.0, -12.0, 6.0],\n [12.0, -12.0, 6.0],\n [12.0, -12.0, 6.0],\n [12.0, -12.0, 6.0],\n [12.0, -12.0, 6.0],\n [12.0, -12.0, 6.0],\n [12.0, -12.0, 6.0],\n ]\n )\n for index, parameter in np.ndenumerate(np.linspace(0.0, 4.0, num=17)):\n np.testing.assert_allclose(\n curve.third_derivative_at(parameter),\n nominal_third_derivatives[index],\n err_msg=(\n \"Fails to find the third derivative along a b-spline \"\n \"with a repeated control point.\"\n ),\n )", "def facility_weight(runs, weight, hbv_pars, regions_perturbed):\n \n from numpy import zeros\n \n if runs==1:\n fac_cov_weighted=zeros(len(hbv_pars))\n com_cov_weighted=zeros(len(hbv_pars))\n \n for idx,(bd,facility) in enumerate(zip(hbv_pars[\"birth_dose\"], hbv_pars[\"Facility\"])):\n if bd <= 0.5:\n fac_cov_weighted[idx]=bd+((bd*(weight-1)/(weight+1))*(2*(1-facility)))\n com_cov_weighted[idx]=bd-((bd*(weight-1)/(weight+1))*(2*facility))\n elif bd >0.5:\n fac_cov_weighted[idx]=bd+(((1-bd)*(weight-1)/(weight+1))*(2*(1-facility)))\n com_cov_weighted[idx]=bd-(((1-bd)*(weight-1)/(weight+1))*(2*facility))\n \n elif runs >1:\n fac_cov_weighted=zeros((len(hbv_pars),runs,1))\n com_cov_weighted=zeros((len(hbv_pars),runs,1))\n \n birth_dose=regions_perturbed[:,1,:].reshape((len(hbv_pars),runs,1))\n facility=regions_perturbed[:,4,:].reshape((len(hbv_pars),runs,1))\n \n for idx,i in enumerate(birth_dose[:,:,:]):\n for idy,bd in enumerate(i):\n if bd <= 0.5:\n fac_cov_weighted[idx,idy,0]=bd+((bd*(weight-1)/(weight+1))*(2*(1-facility[idx,idy,0])))\n com_cov_weighted[idx,idy,0]=bd-((bd*(weight-1)/(weight+1))*(2*facility[idx,idy,0]))\n elif bd > 0.5:\n fac_cov_weighted[idx,idy,0]=bd+(((1-bd)*(weight-1)/(weight+1))*(2*(1-facility[idx,idy,0])))\n com_cov_weighted[idx,idy,0]=bd-(((1-bd)*(weight-1)/(weight+1))*(2*facility[idx,idy,0]))\n\n return fac_cov_weighted, com_cov_weighted", "def gen_params(no_cultures):\n # Plate level\n kn = 0.1 # Nutrient diffusion\n ks = 0.1 # Signal diffusion\n b = 0.05 # Signal on cells effect constant\n a = 0.05 # Signal secretion constant\n # Culture level\n # Growth rate constant\n r_mean = 1.0\n r_var = 1.0\n r_params = [max(0.0, gauss(r_mean, r_var)) for i in range(no_cultures)]\n params = np.array([kn, ks, b, a] + r_params)\n return params", "def update_parameters_with_adam(parameters, grads, v, s, t, lambd, learning_rate, mini_batch_size, beta1, beta2, epsilon):\n\n L = len(parameters) // 2 \n v_corrected = {}\n s_corrected = {}\n\n for l in range(L):\n v[\"dW\" + str(l + 1)] = beta1 * v[\"dW\" + str(l + 1)] + (1 - beta1) * grads['dW' + str(l + 1)]\n v[\"db\" + str(l + 1)] = beta1 * v[\"db\" + str(l + 1)] + (1 - beta1) * grads['db' + str(l + 1)]\n v_corrected[\"dW\" + str(l + 1)] = v[\"dW\" + str(l + 1)] / (1 - np.power(beta1, t))\n v_corrected[\"db\" + str(l + 1)] = v[\"db\" + str(l + 1)] / (1 - np.power(beta1, t))\n\n s[\"dW\" + str(l + 1)] = beta2 * s[\"dW\" + str(l + 1)] + (1 - beta2) * np.power(grads['dW' + str(l + 1)], 2)\n s[\"db\" + str(l + 1)] = beta2 * s[\"db\" + str(l + 1)] + (1 - beta2) * np.power(grads['db' + str(l + 1)], 2)\n s_corrected[\"dW\" + str(l + 1)] = s[\"dW\" + str(l + 1)] / (1 - np.power(beta2, t))\n s_corrected[\"db\" + str(l + 1)] = s[\"db\" + str(l + 1)] / (1 - np.power(beta2, t))\n parameters[\"W\" + str(l + 1)] = (1-learning_rate*(lambd/mini_batch_size))*parameters[\"W\" + str(l + 1)] \n parameters[\"W\" + str(l + 1)] = parameters[\"W\" + str(l + 1)] - learning_rate * v_corrected[\"dW\" + str(l + 1)] / np.sqrt(s_corrected[\"dW\" + str(l + 1)] + epsilon)\n parameters[\"b\" + str(l + 1)] = parameters[\"b\" + str(l + 1)] - learning_rate * v_corrected[\"db\" + str(l + 1)] / np.sqrt(s_corrected[\"db\" + str(l + 1)] + epsilon)\n\n\n return parameters, v, s", "def test_repeated_control_point_bspline_second_derivative() -> None:\n # a b-spline with a repeated control point and length=4.0\n control_points = np.array(\n [\n (0.0, 0.0, 0.0),\n (0.0, 0.0, 0.0),\n (2.0, 0.0, 0.0),\n (2.0, 1.0, 0.0),\n (2.0, 1.0, 1.0),\n ]\n )\n curve = BSplineCurve(control_points)\n nominal_second_derivatives = np.array(\n # computed using Mathematica's BSplineFunction over\n # np.linspace(0.0, 3.0, num=13) (inputs scaled by 1/3)\n [\n [0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0],\n [0.0, 0.0, 0.0],\n [-12.0, 6.0, 0.0],\n [-11.0, 5.0, 0.5],\n [-10.0, 4.0, 1.0],\n [-9.0, 3.0, 1.5],\n [-8.0, 2.0, 2.0],\n [-7.0, 1.0, 2.5],\n [-6.0, 0.0, 3.0],\n [-5.0, -1.0, 3.5],\n [-4.0, -2.0, 4.0],\n [-3.0, -3.0, 4.5],\n [-2.0, -4.0, 5.0],\n [-1.0, -5.0, 5.5],\n [0.0, -6.0, 6.0],\n ]\n )\n for index, parameter in np.ndenumerate(np.linspace(0.0, 4.0, num=17)):\n np.testing.assert_allclose(\n curve.second_derivative_at(parameter),\n nominal_second_derivatives[index],\n err_msg=(\n \"Fails to find the second_derivative along a b-spline \"\n \"with a repeated control point.\"\n ),\n )", "def mmd_gradient_penalty(self, x, x_gen, s_x, s_gen, batch_size, mode='fixed_g'):\r\n with tf.name_scope('gradient_penalty'):\r\n uni = tf.random_uniform(\r\n shape=[batch_size, 1, 1, 1], # [batch_size, channels, height, width]\r\n minval=0.0, maxval=1.0,\r\n name='uniform')\r\n x_hat = tf.identity(\r\n tf.add(tf.multiply(x, uni), tf.multiply(x_gen, tf.subtract(1.0, uni))),\r\n name='x_hat')\r\n s_x_hat = self.Dis(x_hat, is_training=False)\r\n # get witness function w.r.t. x, x_gen\r\n dist_zx = get_squared_dist(s_x_hat['x'], s_x, mode='xy', name='dist_zx', do_summary=self.do_summary)\r\n dist_zy = get_squared_dist(s_x_hat['x'], s_gen, mode='xy', name='dist_zy', do_summary=self.do_summary)\r\n if mode == 'fixed_g_gp':\r\n witness = witness_mix_g(\r\n dist_zx, dist_zy, sigma=[1.0, np.sqrt(2.0), 2.0, np.sqrt(8.0), 4.0],\r\n name='witness', do_summary=self.do_summary)\r\n elif mode == 'fixed_t_gp':\r\n witness = witness_mix_t(\r\n dist_zx, dist_zy, alpha=[0.25, 0.5, 0.9, 2.0, 25.0], beta=2.0,\r\n name='witness', do_summary=self.do_summary)\r\n elif mode in {'rep_gp', 'rmb_gp'}:\r\n witness = witness_g(\r\n dist_zx, dist_zy, sigma=1.0, name='witness', do_summary=self.do_summary)\r\n else:\r\n raise NotImplementedError('gradient penalty: {} not implemented'.format(mode))\r\n g_x_hat = tf.reshape(\r\n tf.gradients(witness, x_hat, name='gradient_x_hat')[0],\r\n [batch_size, -1])\r\n loss_grad_norm = tf.reduce_mean(\r\n tf.square(tf.norm(g_x_hat, ord=2, axis=1) - 1))\r\n return loss_grad_norm", "def _getBivecSpline(self):\n self._brep = {}\n\n for k in self.signals.keys():\n print('Reading signal ' + self.signals[k]['string'])\n tim = self._readsignal(self.signals[k]['string']).getDimensionAt(1).data()\n if tim[0]==0:\n tim = self._readsignal(r'dim_of('+self.signals[k]['string']+',1)').data()\n _idx = np.argmin(tim-self.t < 0)\n tim = tim[_idx]\n data = self._readsignal(self.signals[k]['string']).data()[self.trialindx, _idx, :]\n rhop = self._readsignal(self.signals[k]['string']).getDimensionAt(0).data()\n if rhop[1]==1:\n rhop = self._readsignal(r'dim_of('+self.signals[k]['string']+',0)').data()\n dummy = interpolate.interp1d(rhop, data, fill_value='extrapolate')\n self._brep[k] = dict([('spline', dummy)])", "def generate_data(params, sigma):\n rng = random.PRNGKey(0)\n k = len(params) // 2\n a_array = params[:k]\n b_array = params[k:]\n n = 20 * k\n xs = sample_our_uniform(n, 1, rng).reshape((n,))\n ys = onp.zeros(n)\n all_indices = set(onp.arange(n))\n for i in range(k):\n i_idxs = onp.random.choice(list(all_indices), 20, replace=False)\n all_indices = set(all_indices) - set(i_idxs)\n ys[i_idxs] = xs[i_idxs] * a_array[i] + b_array[i] + onp.random.normal(0, sigma, size=(20,))\n return xs, ys", "def test_multi_segment_non_axis_aligned_bspline_second_derivative() -> None:\n # a length=1.0 3-segment b-spline that isn't axis aligned. Each\n # segment is length=1/3.\n control_points = np.array(\n [\n (0.0, 0.0, 0.0),\n (-1 / 12 * (2 + 2 ** 0.5), 1 / 12 * (2 - 2 ** 0.5), 1 / 6),\n (-1.0 / 3.0, 1.0 / 3.0, 0.0),\n (-1.0 / 6.0, 1.0 / 2.0, 1.0 / (3.0 * (2 ** 0.5))),\n ]\n )\n curve = BSplineCurve(control_points)\n nominal_second_derivatives = np.array(\n # computed using Mathematica's BSplineFunction over\n # np.linspace(0.0, 3.0, num=13) (inputs scaled by 1/3)\n [\n [1.41421, 1.41421, -2.0],\n [1.4041, 1.23744, -1.63215],\n [1.39399, 1.06066, -1.2643],\n [1.38388, 0.883883, -0.896447],\n [1.37377, 0.707107, -0.528595],\n [1.36366, 0.53033, -0.160744],\n [1.35355, 0.353553, 0.207107],\n [1.34344, 0.176777, 0.574958],\n [1.33333, 4.44089 * 10 ** -16, 0.942809],\n [1.32322, -0.176777, 1.31066],\n [1.31311, -0.353553, 1.67851],\n [1.303, -0.53033, 2.04636],\n [1.29289, -0.707107, 2.41421],\n ]\n )\n for index, parameter in np.ndenumerate(np.linspace(0.0, 3.0, num=13)):\n np.testing.assert_allclose(\n curve.second_derivative_at(parameter),\n nominal_second_derivatives[index],\n err_msg=(\n \"Fails to find the second_derivative along a \"\n \"multi-displacement non-axis-aligned b-spline.\"\n ),\n )", "def create_synthetic_irt_polytomous(difficulty, discrimination, thetas,\n model='grm', seed=None):\n difficulty = np.atleast_2d(difficulty)\n n_items, n_levels = difficulty.shape\n\n if n_levels == 1:\n raise AssertionError(\"Polytomous items must have more than 1 threshold\")\n\n rng = np.random.default_rng(seed)\n\n # Check for single input of discrimination\n if np.atleast_1d(discrimination).size == 1:\n discrimination = np.full((n_items,), discrimination)\n\n theta_length = thetas.shape[-1]\n\n # Get the model to use, will throw error if not supported\n probability_func = {'grm': _graded_func,\n 'grm_md': _graded_func_md,\n 'pcm': _credit_func,\n 'gum': _unfold_func}[model.lower()]\n\n # Check difficulty parameters for validity\n clip_high = _check_difficulty_parameters(difficulty, model.lower())\n\n # Initialize output for memory concerns\n level_scratch = np.zeros((n_levels + 2, theta_length))\n output = np.zeros((n_items, theta_length), dtype='int')\n\n # Loop over items and compute probability estimates\n # for each of the levels and assign level based on\n # those probabilities\n for item_ndx in range(n_items):\n # Obtain the probabilities for the data (in-place)\n probability_func(difficulty[item_ndx], discrimination[item_ndx],\n thetas, level_scratch[1:, :])\n\n # Get the thresholds of the levels\n np.cumsum(level_scratch[1:, :], axis=0, out=level_scratch[1:, :])\n level_scratch[0] = rng.uniform(size=theta_length)\n\n # Discritize the outputs based on the thresholds\n output[item_ndx] = np.apply_along_axis(\n _my_digitize, axis=0, arr=level_scratch)\n\n # Add 1 to return [1, n_levels]\n output += 1\n np.clip(output, 1, clip_high, out=output)\n return output", "def geometric_brownian_motion_jump_diffusion_levels(params: 'ModelParameters') -> 'np.array':\n return convert_to_prices(params, geometric_brownian_motion_jump_diffusion_log_returns(params))", "def ib_params(key, num, dim, mean_std=0.3, var_mean=0.2, var_std=0.025):\n keys = random.split(key, 2)\n mean_params = mean_std * random.normal(keys[0], shape=(num, dim))\n logvar_params = np.log(var_mean * np.ones((num, dim))\n + var_std * random.normal(keys[1], shape=(num, dim)))\n return {'mean' : mean_params, 'logvar' : logvar_params}", "def update_generate_params(self,inps,trgs,preds):\n batch_size = np.shape(trgs)[0]\n\n self.delta_weight_h_to_v = self.learning_rate / batch_size * np.transpose(trgs) @ (inps - preds)\n self.delta_bias_v = self.learning_rate * np.mean(inps - preds)\n \n self.weight_h_to_v += self.delta_weight_h_to_v\n self.bias_v += self.delta_bias_v \n \n return", "def test_two_axis_bspline_third_derivative() -> None:\n # a length=1.0 curve in 2D\n control_points = np.array(\n [(0.0, 0.0, 0.0), (2.0 ** -0.5, 2.0 ** -0.5, 0.0)]\n )\n curve = BSplineCurve(control_points)\n for parameter in np.linspace(0.0, 1.0, num=5):\n third_derivative = parameter * control_points[1]\n np.testing.assert_allclose(\n curve.third_derivative_at(parameter),\n [2.0 ** -0.5, 2.0 ** -0.5, 0.0],\n err_msg=(\n f\"Fails to say that a bspline defined by \"\n f\"{control_points} is equal to {third_derivative} at \"\n f\"parameter {parameter}.\"\n ),\n )", "def _create_choice_rewards(states, is_inadmissible, optim_paras):\n n_states = states.shape[0]\n n_choices = len(optim_paras[\"choices\"])\n\n wages = np.ones((n_states, n_choices))\n nonpecs = np.zeros((n_states, n_choices))\n\n for i, choice in enumerate(optim_paras[\"choices\"]):\n if f\"wage_{choice}\" in optim_paras:\n wage_columns = optim_paras[f\"wage_{choice}\"].index\n log_wage = np.dot(\n states[wage_columns].to_numpy(dtype=COVARIATES_DOT_PRODUCT_DTYPE),\n optim_paras[f\"wage_{choice}\"].to_numpy(),\n )\n wages[:, i] = np.exp(log_wage)\n\n if f\"nonpec_{choice}\" in optim_paras:\n nonpec_columns = optim_paras[f\"nonpec_{choice}\"].index\n nonpecs[:, i] = np.dot(\n states[nonpec_columns].to_numpy(dtype=COVARIATES_DOT_PRODUCT_DTYPE),\n optim_paras[f\"nonpec_{choice}\"].to_numpy(),\n )\n\n # For inadmissible choices apply a penalty to the non-pecuniary rewards.\n penalty = optim_paras[\"inadmissibility_penalty\"]\n penalty = INADMISSIBILITY_PENALTY if penalty is None else penalty\n nonpecs[is_inadmissible] += penalty\n\n return wages, nonpecs", "def log_prob_with_policy_and_sigma(self, rng, params, x, policy, sigmas,\n train, context=None):\n d = np.prod(x.shape[1:])\n batch_size = x.shape[0]\n\n # Expand the dimensions of sigma if only a single order is given.\n if len(sigmas.shape) == 1:\n sigmas = jnp.repeat(sigmas[None], repeats=batch_size, axis=0)\n assert sigmas.shape == (batch_size, self.num_steps), (\n f'{sigmas.shape} does not match')\n\n rng_t, rng_drop = jax.random.split(rng, 2)\n\n # Sample t from policy.\n left_t, right_t, weight_policy = self.sample_policy_t(\n rng_t, batch_size, policy)\n num_tokens_in_parallel = right_t - left_t\n\n prev_selection, current_selection = ardm_utils.get_selections_for_sigma_and_range(\n sigmas, left_t, right_t, self.config.mask_shape)\n\n corrupted = self.corrupt(x, prev_selection)\n\n net_out = self.apply_fn(\n {'params': params}, corrupted, left_t, prev_selection, train,\n rngs={'dropout': rng_drop} if train else None, context=context)\n\n log_px_sigma_geq_t = self.logprob_fn(x, net_out)\n\n current_selection = current_selection.reshape(log_px_sigma_geq_t.shape)\n log_px_sigma_geq_t = current_selection * log_px_sigma_geq_t\n log_px_sigma_geq_t = util_fns.sum_except_batch(log_px_sigma_geq_t)\n\n # Reweigh for expectation over policy.\n log_prob = log_px_sigma_geq_t / num_tokens_in_parallel * weight_policy\n log_prob = log_prob / d / np.log(2)\n\n return log_prob", "def weights_treatment_parameters(init_dict, GRID):\n GRID = np.linspace(0.01, 0.99, num=99, endpoint=True)\n\n coeffs_untreated = init_dict[\"UNTREATED\"][\"params\"]\n coeffs_treated = init_dict[\"TREATED\"][\"params\"]\n cov = construct_covariance_matrix(init_dict)\n x = simulate_covariates(init_dict)\n\n # We take the specified distribution for the cost shifters from the paper.\n cost_mean, cost_sd = -0.0026, np.sqrt(0.270)\n v_mean, v_sd = 0.00, np.sqrt(cov[2, 2])\n\n eval_points = norm.ppf(GRID, loc=v_mean, scale=v_sd)\n\n ate_weights = np.tile(1.0, 99)\n tut_weights = norm.cdf(eval_points, loc=cost_mean, scale=cost_sd)\n\n tt_weights = 1 - tut_weights\n\n def tut_integrand(point):\n eval_point = norm.ppf(point, loc=v_mean, scale=v_sd)\n return norm.cdf(eval_point, loc=cost_mean, scale=cost_sd)\n\n def tt_integrand(point):\n eval_point = norm.ppf(point, loc=v_mean, scale=v_sd)\n return norm.cdf(eval_point, loc=cost_mean, scale=cost_sd)\n\n # Scaling so that the weights integrate to one.\n tut_scaling = quad(tut_integrand, 0.01, 0.99)[0]\n tut_weights /= tut_scaling\n\n tt_scaling = quad(tt_integrand, 0.01, 0.99)[0]\n tt_weights /= tt_scaling\n\n mte = mte_information(coeffs_treated, coeffs_untreated, cov, GRID, x, init_dict)\n\n return ate_weights, tt_weights, tut_weights, mte", "def test_multi_segment_non_axis_aligned_bspline_first_derivative() -> None:\n # a length=1.0 3-segment b-spline that isn't axis aligned. Each\n # segment is length=1/3.\n control_points = np.array(\n [\n (0.0, 0.0, 0.0),\n (-1 / 12 * (2 + 2 ** 0.5), 1 / 12 * (2 - 2 ** 0.5), 1 / 6),\n (-1.0 / 3.0, 1.0 / 3.0, 0.0),\n (-1.0 / 6.0, 1.0 / 2.0, 1.0 / (3.0 * (2 ** 0.5))),\n ]\n )\n curve = BSplineCurve(control_points)\n nominal_first_derivatives = np.array(\n # computed using Mathematica's BSplineFunction over\n # np.linspace(0.0, 3.0, num=13) (inputs scaled by 1/3)\n [\n [-0.853553, 0.146447, 0.5],\n [-0.736124, 0.256932, 0.34866],\n [-0.619536, 0.352686, 0.227975],\n [-0.503791, 0.433709, 0.137944],\n [-0.388889, 0.5, 0.0785674],\n [-0.274829, 0.55156, 0.0498449],\n [-0.161612, 0.588388, 0.0517767],\n [-0.0492368, 0.610485, 0.0843627],\n [0.0622956, 0.617851, 0.147603],\n [0.172985, 0.610485, 0.241498],\n [0.282833, 0.588388, 0.366046],\n [0.391838, 0.55156, 0.521249],\n [0.5, 0.5, 0.707107],\n ]\n )\n for index, parameter in np.ndenumerate(np.linspace(0.0, 3.0, num=13)):\n np.testing.assert_allclose(\n curve.first_derivative_at(parameter),\n nominal_first_derivatives[index],\n err_msg=(\n \"Fails to find the first_derivative along a \"\n \"multi-displacement non-axis-aligned b-spline.\"\n ),\n )", "def biharmonic_spline(xs, ys, zs, XI, YI, metric='euclidean',\n tension=0, regular=False, eps=1e-7, scale=0.02):\n #-- remove singleton dimensions\n xs = np.squeeze(xs)\n ys = np.squeeze(ys)\n zs = np.squeeze(zs)\n XI = np.squeeze(XI)\n YI = np.squeeze(YI)\n #-- size of new matrix\n if (np.ndim(XI) == 1):\n nx = len(XI)\n else:\n nx,ny = np.shape(XI)\n\n #-- Check to make sure sizes of input arguments are correct and consistent\n if (len(zs) != len(xs)) | (len(zs) != len(ys)):\n raise Exception('Length of X, Y, and Z must be equal')\n if (np.shape(XI) != np.shape(YI)):\n raise Exception('Size of XI and YI must be equal')\n if (tension < 0) or (tension >= 1):\n raise ValueError('TENSION must be greater than 0 and less than 1')\n\n #-- Compute GG matrix for GG*m = d inversion problem\n npts = len(zs)\n GG = np.zeros((npts,npts))\n #-- Computation of distance Matrix (data to data)\n if (metric == 'brute'):\n #-- use linear algebra to compute euclidean distances\n Rd = distance_matrix(\n np.array([xs, ys]),\n np.array([xs, ys])\n )\n else:\n #-- use scipy spatial distance routines\n Rd = scipy.spatial.distance.cdist(\n np.array([xs, ys]).T,\n np.array([xs, ys]).T,\n metric=metric)\n #-- Calculate length scale for regularized case (Mitasova and Mitas)\n length_scale = np.sqrt((XI.max() - XI.min())**2 + (YI.max() - YI.min())**2)\n #-- calculate Green's function for valid points (with or without tension)\n ii,jj = np.nonzero(Rd >= eps)\n if (tension == 0):\n GG[ii,jj] = (Rd[ii,jj]**2) * (np.log(Rd[ii,jj]) - 1.0)\n elif regular:\n GG[ii,jj] = regular_spline2D(Rd[ii,jj], tension, scale*length_scale)\n else:\n GG[ii,jj] = green_spline2D(Rd[ii,jj], tension)\n #-- detrend dataset\n z0,r0,p = detrend2D(xs,ys,zs)\n #-- Compute model m for detrended data\n m = np.linalg.lstsq(GG,z0,rcond=-1)[0]\n\n #-- Computation of distance Matrix (data to mesh points)\n if (metric == 'brute'):\n #-- use linear algebra to compute euclidean distances\n Re = distance_matrix(\n np.array([XI.flatten(),YI.flatten()]),\n np.array([xs,ys])\n )\n else:\n #-- use scipy spatial distance routines\n Re = scipy.spatial.distance.cdist(\n np.array([XI.flatten(),YI.flatten()]).T,\n np.array([xs, ys]).T,\n metric=metric)\n gg = np.zeros_like(Re)\n #-- calculate Green's function for valid points (with or without tension)\n ii,jj = np.nonzero(Re >= eps)\n if (tension == 0):\n gg[ii,jj] = (Re[ii,jj]**2) * (np.log(Re[ii,jj]) - 1.0)\n elif regular:\n gg[ii,jj] = regular_spline2D(Re[ii,jj], tension, scale*length_scale)\n else:\n gg[ii,jj] = green_spline2D(Re[ii,jj], tension)\n\n #-- Find 2D interpolated surface through irregular/regular X, Y grid points\n if (np.ndim(XI) == 1):\n ZI = np.squeeze(np.dot(gg,m))\n else:\n ZI = np.zeros((nx,ny))\n ZI[:,:] = np.dot(gg,m).reshape(nx,ny)\n #-- return output matrix after retrending\n return (ZI + r0[2]) + (XI-r0[0])*p[0] + (YI-r0[1])*p[1]", "def test_non_unit_length_bspline_second_derivative() -> None:\n # a multi-displacement b-spline with length=4.0\n control_points = np.array(\n [\n (0.0, 0.0, 0.0),\n (2.0, 0.0, 0.0),\n (2.0, 1.0, 0.0),\n (2.0, 1.0, 1.0),\n ]\n )\n curve = BSplineCurve(control_points)\n nominal_second_derivatives = np.array(\n # computed using Mathematica's BSplineFunction over\n # np.linspace(0.0, 3.0, num=13) (inputs scaled by 1/3)\n [\n [-12.0, 6.0, 0.0],\n [-11.0, 5.0, 0.5],\n [-10.0, 4.0, 1.0],\n [-9.0, 3.0, 1.5],\n [-8.0, 2.0, 2.0],\n [-7.0, 1.0, 2.5],\n [-6.0, 0.0, 3.0],\n [-5.0, -1.0, 3.5],\n [-4.0, -2.0, 4.0],\n [-3.0, -3.0, 4.5],\n [-2.0, -4.0, 5.0],\n [-1.0, -5.0, 5.5],\n [0.0, -6.0, 6.0],\n ]\n )\n for index, parameter in np.ndenumerate(np.linspace(0.0, 3.0, num=13)):\n np.testing.assert_allclose(\n curve.second_derivative_at(parameter),\n nominal_second_derivatives[index],\n err_msg=(\n \"Fails to find the second_derivative along a \"\n \"multi-displacement b-spline with non-unit length.\"\n ),\n )", "def test_three_axis_bspline_second_derivative() -> None:\n # a length=1.0 curve in 3D\n control_points = np.array(\n [(0.0, 0.0, 0.0), (3.0 ** -0.5, 3.0 ** -0.5, 3.0 ** -0.5)]\n )\n curve = BSplineCurve(control_points)\n for parameter in np.linspace(0.0, 1.0, num=5):\n second_derivative = tuple(parameter * (3.0 ** -0.5) for _ in range(3))\n np.testing.assert_allclose(\n curve.second_derivative_at(parameter),\n [3.0 ** -0.5, 3.0 ** -0.5, 3.0 ** -0.5],\n err_msg=(\n f\"Fails to say that a bspline defined by \"\n f\"{control_points} is equal to {second_derivative} at \"\n f\"parameter {parameter}.\"\n ),\n )", "def hendrix_perishable_substitution_two_product_waste_conscious_S_policy(\n policy_params: chex.Array,\n obs: chex.Array,\n rng: chex.PRNGKey,\n max_useful_life: int,\n mean_demand_a: float,\n mean_demand_b: float,\n) -> chex.Array:\n # policy_params = [[S_a], [S_b]]\n S_a = policy_params[0, 0]\n S_b = policy_params[1, 0]\n\n total_stock_a = jnp.sum(obs[0:max_useful_life])\n total_stock_b = jnp.sum(obs[max_useful_life : 2 * max_useful_life])\n\n stock_expiring_next_period_a = obs[max_useful_life - 1]\n stock_expiring_next_period_b = obs[-1]\n\n order_a = base_waste_conscious_S_policy(\n S_a, total_stock_a, stock_expiring_next_period_a, mean_demand_a\n )\n order_b = base_waste_conscious_S_policy(\n S_b, total_stock_b, stock_expiring_next_period_b, mean_demand_b\n )\n return jnp.array([order_a, order_b])" ]
[ "0.44813386", "0.44633934", "0.4418267", "0.43117476", "0.4301847", "0.42865905", "0.42623934", "0.42274022", "0.42186657", "0.42178455", "0.42045256", "0.41992435", "0.4193804", "0.41628328", "0.41511914", "0.41399053", "0.41374248", "0.40915483", "0.40820405", "0.40713498", "0.40697476", "0.40509725", "0.40413305", "0.40298986", "0.40222308", "0.40166724", "0.40021884", "0.4001753", "0.39987385", "0.39974082" ]
0.541615
0
Create new navigation property to connectors for print. Create new navigation property to connectors for print.
def create_connectors( self, body, # type: "models.MicrosoftGraphPrintConnector" **kwargs # type: Any ): # type: (...) -> "models.MicrosoftGraphPrintConnector" cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphPrintConnector"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.create_connectors.metadata['url'] # type: ignore # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MicrosoftGraphPrintConnector') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('MicrosoftGraphPrintConnector', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_connectors(\n self,\n print_connector_id, # type: str\n select=None, # type: Optional[List[Union[str, \"models.Enum28\"]]]\n expand=None, # type: Optional[List[str]]\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrintConnector\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrintConnector\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n # Construct URL\n url = self.get_connectors.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printConnector-id': self._serialize.url(\"print_connector_id\", print_connector_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n request = self._client.get(url, query_parameters, header_parameters)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrintConnector', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def to_blueprint(self):\n return BlueprintGraph(super().to_phenotype(BlueprintNode))", "def rdf_update_connections(rdf, prop, obj, subj, owl):\n conname = prop.split('#')[-1]\n print(\"createcon \"+str(obj)+ \" \" + str(subj))\n obj.relationships.create(conname, subj)\n for i in rdf.objects(subject=prop, predicate=RDFS.subPropertyOf):\n print(i)\n rdf_update_connections(rdf, i, obj, subj, owl)\n for i in rdf.objects(subject=prop, predicate=owl.inverseOf):\n conname = i.split('#')[-1]\n subj.relationships.create(conname, obj)", "def relationships(self):", "def _print_properties(self):\n return NotImplemented", "def print_pointers(self):\n\n ### FILL IN ###", "def _add_relationships(self, element: Element) -> None:\n elements: Set[str] = {v.id for v in self.element_views}\n\n for relationship in element.get_efferent_relationships():\n if relationship.destination.id in elements:\n self._relationship_views.add(\n RelationshipView(relationship=relationship)\n )\n\n for relationship in element.get_afferent_relationships():\n if relationship.source.id in elements:\n self._relationship_views.add(\n RelationshipView(relationship=relationship)\n )", "def test_route_pprint_property_method() -> None:\n route = Route(schema=RouteSchema(SpecificLocation()))\n assert route.pprint == \"🛣️ (Route)\"", "def with_property(self, schema):\n self.properties = self.properties if self.properties != None else []\n self.properties.append(schema)\n return self", "def generate(self):\n self.graph_repl = self.master.graph_repl", "def _add_relations(self):\n relations = {\n 'keystone:shared-db': 'percona-cluster:shared-db',\n 'swift-proxy:identity-service': 'keystone:identity-service',\n 'swift-storage:swift-storage': 'swift-proxy:swift-storage',\n 'glance:identity-service': 'keystone:identity-service',\n 'glance:shared-db': 'percona-cluster:shared-db',\n 'glance:object-store': 'swift-proxy:object-store'\n }\n super(SwiftProxyBasicDeployment, self)._add_relations(relations)", "def fixRelations (self):\n\t\tnodes = self.getFieldElements (\"relation\")\n\t\tif not nodes: return\n\t\t\n\t\tprint \"\\n%s\" % self.getId()\n\t\tfor r in nodes:\n\t\t\tvalue = XmlUtils.getText(r)\n\t\t\tif not value: return\n\t\t\tXmlUtils.setText (r,\"\")\n\t\t\tif value.startswith (\"http://\"):\n\t\t\t\tr.setAttribute (\"type\", \"Has part\")\n\t\t\t\tr.setAttribute (\"url\", value)\n\t\t\telse:\n\t\t\t\tr.setAttribute (\"type\", \"Is related\")\n\t\t\t\tr.setAttribute (\"title\", value)\n\t\t\tprint r.toxml()\n\t\tif 0:\n\t\t\tself.write()\n\t\t\tprint \"wrote record\"", "def printSetup(self):\n if not self.rank:\n logging.info('Setting up printing options')\n\n freq, args = self.pargs['print'][0], self.pargs['print'][1:]\n\n self.lmp.command('thermo_style custom' + (' {}' * len(args)).format(*args))\n self.lmp.command('thermo {}'.format(freq))\n self.lmp.command('thermo_modify norm no lost ignore')", "def __str__(self):\n if self.connected_to.get_size() != 0:\n s = ''\n for connection in self.connected_to:\n s += f\"{self.name} -> {connection.get_name()} \"\n return s\n return self.name", "def setup_schemaProperties(self):\n\n propname = 'xml_schema'\n curr = getattr(self, propname, '')\n try:\n self._delProperty(propname)\n except ValueError:\n pass\n try:\n delattr(self, propname)\n except:\n pass\n setattr(self, propname, curr)\n\n properties = list(self._properties)\n properties.append({'id': propname,\n 'type': 'selection',\n 'select_variable': 'get_schemaCandidates',\n 'mode': 'w'})\n\n self._properties = tuple(properties)", "def _setup_printer(self):\n for key, application in self.config['applications'].items():\n if application['type'] == 'printer':\n self.guest.shellExec(\n 'rundll32 printui.dll,PrintUIEntry /if /b IPPTool-Printer /m \"Generic / Text Only\" /r \"{}\"'.format(\n application['hostname']))\n time.sleep(3)\n self.guest.shellExec('rundll32 printui.dll,PrintUIEntry /y /n IPPTool-Printer')\n time.sleep(3)\n self.guest.shellExec(\n 'REG ADD \"HKCU\\\\Software\\\\Microsoft\\\\Windows NT\\\\CurrentVersion\\\\Windows\" /t REG_DWORD /v LegacyDefaultPrinterMode /d 1 /f')\n time.sleep(5)\n self._logger.info('[~] Created new printer %s.', key)", "def list_connectors(\n self,\n orderby=None, # type: Optional[List[Union[str, \"models.Get5ItemsItem\"]]]\n select=None, # type: Optional[List[Union[str, \"models.Get6ItemsItem\"]]]\n expand=None, # type: Optional[List[str]]\n **kwargs # type: Any\n ):\n # type: (...) -> Iterable[\"models.CollectionOfPrintConnector\"]\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.CollectionOfPrintConnector\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n def prepare_request(next_link=None):\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n if not next_link:\n # Construct URL\n url = self.list_connectors.metadata['url'] # type: ignore\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if self._config.top is not None:\n query_parameters['$top'] = self._serialize.query(\"self._config.top\", self._config.top, 'int', minimum=0)\n if self._config.skip is not None:\n query_parameters['$skip'] = self._serialize.query(\"self._config.skip\", self._config.skip, 'int', minimum=0)\n if self._config.search is not None:\n query_parameters['$search'] = self._serialize.query(\"self._config.search\", self._config.search, 'str')\n if self._config.filter is not None:\n query_parameters['$filter'] = self._serialize.query(\"self._config.filter\", self._config.filter, 'str')\n if self._config.count is not None:\n query_parameters['$count'] = self._serialize.query(\"self._config.count\", self._config.count, 'bool')\n if orderby is not None:\n query_parameters['$orderby'] = self._serialize.query(\"orderby\", orderby, '[str]', div=',')\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n request = self._client.get(url, query_parameters, header_parameters)\n else:\n url = next_link\n query_parameters = {} # type: Dict[str, Any]\n request = self._client.get(url, query_parameters, header_parameters)\n return request\n\n def extract_data(pipeline_response):\n deserialized = self._deserialize('CollectionOfPrintConnector', pipeline_response)\n list_of_elem = deserialized.value\n if cls:\n list_of_elem = cls(list_of_elem)\n return deserialized.odata_next_link or None, iter(list_of_elem)\n\n def get_next(next_link=None):\n request = prepare_request(next_link)\n\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n error = self._deserialize(models.OdataError, response)\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n return pipeline_response\n\n return ItemPaged(\n get_next, extract_data\n )", "def use_pypprint_for_implicit_print(self) -> None:\n if self.implicit_print is not None:\n self.implicit_print.func.id = \"pypprint\" # type: ignore\n # Make sure we import it later\n self.undefined.add(\"pypprint\")", "def __str__(self):\n stringRepresentation = []\n for node in self.getNodes():\n stringRepresentation.append(\"->\".join(\n (str(node), str(self.graph[node]))))\n\n return str(stringRepresentation)", "def _enableDebugPrint(self):\n self._dbPrint = Printer(debugPrint=True)", "def prepareConnectors(self):\n # Kinematic Connectors require creating node sets\n # These are created and added to the node set collection prior to writing\n\n numConnectors = 1\n\n for connector in self.connectors:\n # Node are created and are an attribute of a Connector\n self._nodeSets.append(connector.nodeset)\n\n numConnectors += 1", "def printProperties(self):\n print \"Properties of: {0}\".format(self.getName())\n for prop in self._getPropName():\n if isinstance(prop, types.IntType):\n print \"\\t{0} => {1}\".format(prop, self.getPropertie(prop))\n else:\n print \"\\t'{0}' => {1}\".format(prop, self.getPropertie(prop))", "def printDatabase():\n with driver.session() as s:\n personNodes = s.read_transaction(findAllPerson)\n houseNodes = s.read_transaction(findAllHome)\n locationNodes = s.read_transaction(findAllLocation)\n vaccineNodes = s.read_transaction(findAllVaccine)\n testNodes = s.read_transaction(findAllTest)\n liveRelationships = s.read_transaction(findAllLiveRelationships)\n visitRelationships = s.read_transaction(findAllVisitRelationships)\n appContactRelationships = s.read_transaction(findAllAppContactRelationships)\n getRelationships = s.read_transaction(findAllGetVaccineRelationships)\n makeRelationships = s.read_transaction(findAllMakeTestRelationships)\n infectRelationships = s.read_transaction(findAllInfectedRelationships)\n\n # Initialize the network attribute\n ps.PlotDBStructure.__init__()\n\n # Add nodes\n ps.PlotDBStructure.addStructure(personNodes)\n ps.PlotDBStructure.addStructure(houseNodes)\n ps.PlotDBStructure.addStructure(locationNodes)\n ps.PlotDBStructure.addStructure(vaccineNodes)\n ps.PlotDBStructure.addStructure(testNodes)\n\n # Add relationships\n ps.PlotDBStructure.addStructure(liveRelationships)\n ps.PlotDBStructure.addStructure(visitRelationships)\n ps.PlotDBStructure.addStructure(appContactRelationships)\n ps.PlotDBStructure.addStructure(makeRelationships)\n ps.PlotDBStructure.addStructure(getRelationships)\n ps.PlotDBStructure.addStructure(infectRelationships)\n\n # Show the graph structure\n ps.PlotDBStructure.showGraph()\n return", "def travel_print(self):\n if self.is_empty():\n print(\"Linked list's length is 0\")\n else:\n node = self.head\n print(\"head -->\", node.data, end=' ')\n while node.next:\n node = node.next\n print(\"-->\", node.data, end=' ')\n print(\" \")", "def print(self) -> None:\n\n print('')\n print(f\"{self.get_name()}, {self.get_description()}\")\n print('-------------')\n for child in self._children:\n child.print()", "def print_model_graph(self, name=None, agent=([], [], [])):\n dot = pygraphviz.AGraph(directed=\"True\")\n for outp in list(self.outputs.keys()):\n dot.add_node(outp, pos=(outp[1:] + \",10\"), color=\"red\", label=outp + \", \" + str(self.outputs[outp].taking.size) + \"-\" + self.outputs[outp].taking.type)\n for inp in list(self.inputs.keys()):\n dot.add_node(inp, pos=(inp[1:] + \",0\"), color=\"blue\", label=inp + \", \" + str(self.inputs[inp].producing.size) + \"-\" + self.inputs[inp].producing.type)\n for comp in list(self.networks.keys()):\n dot.add_node(comp, label=comp + \"-\" + str(type(self.networks[comp].descriptor).__name__)[:-14] + \":\" + str(self.networks[comp].taking.size) + \"-\" + str(self.networks[comp].producing.size))\n\n for c in self.connections:\n con = self.connections[c]\n if self.conn_in_agent(con, agent[0]):\n dot.add_edge(con.input, con.output, label=str(con.name) + \": \" + str(con.info.size) + \" \" + self.comp_by_ind(con.input).producing.type, color=\"blue\")\n elif self.conn_in_agent(con, agent[1]):\n dot.add_edge(con.input, con.output, label=str(con.name) + \": \" + str(con.info.size) + \" \" + self.comp_by_ind(con.input).producing.type, color=\"red\")\n elif self.conn_in_agent(con, agent[2]):\n dot.add_edge(con.input, con.output, label=str(con.name) + \": \" + str(con.info.size) + \" \" + self.comp_by_ind(con.input).producing.type, color=\"green\")\n else:\n dot.add_edge(con.input, con.output, label=str(con.name) + \": \" + str(con.info.size) + \" \" + self.comp_by_ind(con.input).producing.type, color=\"black\")\n dot.layout('dot')\n if not name:\n name = str(hash(self))\n dot.draw(name + '.pdf')", "def print_all(self):\n if not request:\n raise exceptions.Warning(_(''), _(''))\n session_id = request.session.sid\n config = self.env['ir.config_parameter']\n addons_url = config.get_param('addons_path')\n phantomjs_path = config.get_param('phantomjs_path')\n phantomjs_path = 'phantomjs' if not phantomjs_path else phantomjs_path\n print_url = self.env.context.get('protocol_url', False)\n if print_url:\n print_urls = [print_url]\n else:\n print_urls = self._get_print_urls()\n if not print_urls:\n return\n phantom = [\n phantomjs_path,\n addons_url +\n '/quality_protocol_report/static/src/js/phantom_url_to_pdf.js',\n session_id, \"/tmp\"] + print_urls\n process = subprocess.Popen(phantom)\n process.communicate()\n filenames = []\n for url in print_urls:\n fname = url.replace('/', '').replace(':', '')\n weight_pos = fname.find('?weight=')\n if weight_pos > -1:\n fname = fname[weight_pos+8:weight_pos+10] + '-' + fname[:weight_pos]\n filenames.append('/tmp/' + fname + '.pdf')\n filepath = self._merge_pdf(sorted(filenames))\n fildecode = open(filepath, 'r')\n encode_data = fildecode.read()\n fildecode.close()\n active_model = self.env.context.get('active_model', False)\n active_id = self.env.context.get('active_id', False)\n ungrouped_also = self.env.context.get('print_ungrouped_also', False)\n if active_model and active_id and not ungrouped_also:\n active_name = self.env[active_model].browse([active_id]).name\n else:\n dt = fields.Datetime.context_timestamp(self, datetime.now())\n active_name = dt.strftime('%d-%m-%Y_%Hh%M')\n filename = 'protocolo.pdf' if print_url else \\\n 'protocolos_' + str(active_name).lower() + '.pdf'\n attachment_data = {\n 'name': filename,\n 'datas_fname': filename,\n 'datas': base64.b64encode(encode_data),\n 'res_model': active_model,\n 'res_id': 0 if print_url else self.env.context.get('active_id', False),\n }\n self.env['ir.attachment'].search(\n [('name', '=', attachment_data['name']),\n ('res_id', '=', attachment_data['res_id']),\n ('res_model', '=', attachment_data['res_model'])]).unlink()\n attachment = self.env['ir.attachment'].create(attachment_data)\n\n filenames.append(filepath)\n for my_file in filenames:\n os.remove(my_file)\n\n if print_url:\n return {\n 'type': 'ir.actions.act_url',\n 'url': '/web/binary/saveas?model=ir.attachment&field=datas' +\n '&filename_field=name&id=%s' % (attachment.id),\n 'target': 'self',\n }\n else:\n return {'type': 'ir.actions.act_window_close'}", "def links_summary(self, is_print=True):\n if not self.nodes:\n self.get_nodes()\n if not self.links:\n self.get_links()\n\n _links_summary = []\n for _l in self.links:\n if not _l.nodes:\n continue\n _side_a = _l.nodes[0]\n _side_b = _l.nodes[1]\n _node_a = [x for x in self.nodes if x.node_id == _side_a[\"node_id\"]][0]\n _port_a = [\n x[\"name\"]\n for x in _node_a.ports\n if x[\"port_number\"] == _side_a[\"port_number\"]\n and x[\"adapter_number\"] == _side_a[\"adapter_number\"]\n ][0]\n _node_b = [x for x in self.nodes if x.node_id == _side_b[\"node_id\"]][0]\n _port_b = [\n x[\"name\"]\n for x in _node_b.ports\n if x[\"port_number\"] == _side_b[\"port_number\"]\n and x[\"adapter_number\"] == _side_b[\"adapter_number\"]\n ][0]\n endpoint_a = f\"{_node_a.name}: {_port_a}\"\n endpoint_b = f\"{_node_b.name}: {_port_b}\"\n if is_print:\n print(f\"{endpoint_a} ---- {endpoint_b}\")\n _links_summary.append((_node_a.name, _port_a, _node_b.name, _port_b))\n\n return _links_summary if not is_print else None", "def __str__(self):\n return \"->\".join([str(n.data) for n in self.as_list()])", "def _export_reference_representations(self):\n\n self.logger.msg1(\"Saving reference representations\")\n general_refset, _ = get_refsets(self.dbpath)\n general_refset.save(self.rootpath+\"-references\", \"phenotype\")" ]
[ "0.5191692", "0.498154", "0.47918597", "0.47604012", "0.46645373", "0.46233404", "0.45859265", "0.45806047", "0.4573805", "0.45636976", "0.45020878", "0.44743195", "0.44632915", "0.44620514", "0.4429466", "0.43730986", "0.4357729", "0.43509617", "0.43456122", "0.4344421", "0.43430746", "0.43164235", "0.43111658", "0.43109316", "0.43088403", "0.4302378", "0.42939028", "0.42852357", "0.4277324", "0.4267314" ]
0.53582543
0
Get connectors from print. Get connectors from print.
def get_connectors( self, print_connector_id, # type: str select=None, # type: Optional[List[Union[str, "models.Enum28"]]] expand=None, # type: Optional[List[str]] **kwargs # type: Any ): # type: (...) -> "models.MicrosoftGraphPrintConnector" cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphPrintConnector"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" # Construct URL url = self.get_connectors.metadata['url'] # type: ignore path_format_arguments = { 'printConnector-id': self._serialize.url("print_connector_id", print_connector_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if select is not None: query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('MicrosoftGraphPrintConnector', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_connectors_list(self):\n pass", "def list_connectors(\n self,\n orderby=None, # type: Optional[List[Union[str, \"models.Get5ItemsItem\"]]]\n select=None, # type: Optional[List[Union[str, \"models.Get6ItemsItem\"]]]\n expand=None, # type: Optional[List[str]]\n **kwargs # type: Any\n ):\n # type: (...) -> Iterable[\"models.CollectionOfPrintConnector\"]\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.CollectionOfPrintConnector\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n def prepare_request(next_link=None):\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n if not next_link:\n # Construct URL\n url = self.list_connectors.metadata['url'] # type: ignore\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if self._config.top is not None:\n query_parameters['$top'] = self._serialize.query(\"self._config.top\", self._config.top, 'int', minimum=0)\n if self._config.skip is not None:\n query_parameters['$skip'] = self._serialize.query(\"self._config.skip\", self._config.skip, 'int', minimum=0)\n if self._config.search is not None:\n query_parameters['$search'] = self._serialize.query(\"self._config.search\", self._config.search, 'str')\n if self._config.filter is not None:\n query_parameters['$filter'] = self._serialize.query(\"self._config.filter\", self._config.filter, 'str')\n if self._config.count is not None:\n query_parameters['$count'] = self._serialize.query(\"self._config.count\", self._config.count, 'bool')\n if orderby is not None:\n query_parameters['$orderby'] = self._serialize.query(\"orderby\", orderby, '[str]', div=',')\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n request = self._client.get(url, query_parameters, header_parameters)\n else:\n url = next_link\n query_parameters = {} # type: Dict[str, Any]\n request = self._client.get(url, query_parameters, header_parameters)\n return request\n\n def extract_data(pipeline_response):\n deserialized = self._deserialize('CollectionOfPrintConnector', pipeline_response)\n list_of_elem = deserialized.value\n if cls:\n list_of_elem = cls(list_of_elem)\n return deserialized.odata_next_link or None, iter(list_of_elem)\n\n def get_next(next_link=None):\n request = prepare_request(next_link)\n\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n error = self._deserialize(models.OdataError, response)\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n return pipeline_response\n\n return ItemPaged(\n get_next, extract_data\n )", "def list_connections(self, show_passthrough=True):\n return self._exprmapper.list_connections(show_passthrough)", "def listPrinters(self):\n raise NotImplementedError(\"listPrinters not implemented\")", "def print_connections(self):\n print(\"[Printing Connections]\")\n for key in self.connections.keys():\n print(f\"{key}:\\n\\t{self.connections[key]}\")", "def get_connector_references(context):\n\n catalog = plone.api.portal.get_tool('portal_catalog')\n query = dict(portal_type='xmldirector.connector')\n items = list()\n for brain in catalog(**query):\n items.append(SimpleTerm(brain.UID, brain.UID, brain.Title))\n items.sort(key=operator.attrgetter(\"title\"))\n return SimpleVocabulary(items)", "def list_cups_printer( ):\n\t\n\tprint( 'List CUPS printer' )\n\tprint( '-----------------' )\n\tmedium = PrinterCupsAdapter()\n\t\n\tmedium.open() # Open the media for transmission\n\ttry:\n\t\tmedium.dump_printers()\n\tfinally:\n\t\tmedium.close()\n\t\n\tdel( medium )", "def getConnectionList(self):\n return []", "def ssh_list_connections(cls):\n for name in cls._ssh_connections.keys():\n print (name)", "def print_all(self):\r\n for e in self.channels:\r\n e.print()", "def list_connections(self):\n return self.network.list_connections()", "def print_all(self):\n if not request:\n raise exceptions.Warning(_(''), _(''))\n session_id = request.session.sid\n config = self.env['ir.config_parameter']\n addons_url = config.get_param('addons_path')\n phantomjs_path = config.get_param('phantomjs_path')\n phantomjs_path = 'phantomjs' if not phantomjs_path else phantomjs_path\n print_url = self.env.context.get('protocol_url', False)\n if print_url:\n print_urls = [print_url]\n else:\n print_urls = self._get_print_urls()\n if not print_urls:\n return\n phantom = [\n phantomjs_path,\n addons_url +\n '/quality_protocol_report/static/src/js/phantom_url_to_pdf.js',\n session_id, \"/tmp\"] + print_urls\n process = subprocess.Popen(phantom)\n process.communicate()\n filenames = []\n for url in print_urls:\n fname = url.replace('/', '').replace(':', '')\n weight_pos = fname.find('?weight=')\n if weight_pos > -1:\n fname = fname[weight_pos+8:weight_pos+10] + '-' + fname[:weight_pos]\n filenames.append('/tmp/' + fname + '.pdf')\n filepath = self._merge_pdf(sorted(filenames))\n fildecode = open(filepath, 'r')\n encode_data = fildecode.read()\n fildecode.close()\n active_model = self.env.context.get('active_model', False)\n active_id = self.env.context.get('active_id', False)\n ungrouped_also = self.env.context.get('print_ungrouped_also', False)\n if active_model and active_id and not ungrouped_also:\n active_name = self.env[active_model].browse([active_id]).name\n else:\n dt = fields.Datetime.context_timestamp(self, datetime.now())\n active_name = dt.strftime('%d-%m-%Y_%Hh%M')\n filename = 'protocolo.pdf' if print_url else \\\n 'protocolos_' + str(active_name).lower() + '.pdf'\n attachment_data = {\n 'name': filename,\n 'datas_fname': filename,\n 'datas': base64.b64encode(encode_data),\n 'res_model': active_model,\n 'res_id': 0 if print_url else self.env.context.get('active_id', False),\n }\n self.env['ir.attachment'].search(\n [('name', '=', attachment_data['name']),\n ('res_id', '=', attachment_data['res_id']),\n ('res_model', '=', attachment_data['res_model'])]).unlink()\n attachment = self.env['ir.attachment'].create(attachment_data)\n\n filenames.append(filepath)\n for my_file in filenames:\n os.remove(my_file)\n\n if print_url:\n return {\n 'type': 'ir.actions.act_url',\n 'url': '/web/binary/saveas?model=ir.attachment&field=datas' +\n '&filename_field=name&id=%s' % (attachment.id),\n 'target': 'self',\n }\n else:\n return {'type': 'ir.actions.act_window_close'}", "def printings(self):\n return self.cdb.card_name_to_printings[self.name]", "def list_connections(self, show_passthrough=True):\n excludes = set([name for name, data in self._exprgraph.nodes(data=True)\n if data['expr'].refs_parent()])\n if show_passthrough:\n return [(u, v) for u, v in self._exprgraph.edges() if not (u in excludes or v in excludes)]\n else:\n return [(u, v) for u, v in self._exprgraph.edges()\n if '.' in u and '.' in v and not (u in excludes or v in excludes)]", "def get_outgoing_connections(self, comp):\n return self.connections.get(comp.id, [])", "def get_connections(self, name):\n cls, pending, connected = self._proxies[name]\n return list(connected)", "def list(self):\n\t\tif self.client is None:\n\t\t\traise UsageError(\"Not connected!\")\n\t\treturn self.client.list_conns()", "def list(self):\n return self.rpc.call(MsfRpcMethod.ConsoleList)['consoles']", "def findDevices(self):\n devs = []\n for name in self.serialLinks:\n port = self.serialLinks[name]\n if name not in self.client.servers:\n continue\n server = self.client[name]\n ports = yield server.list_serial_ports()\n print ports\n if port not in ports:\n continue\n devName = '%s - %s' % (name, port)\n devs += [(devName, (server, port))]\n returnValue(devs)", "def print(self):\r\n for e in self.channels:\r\n print(e)", "def get_connections(self):\n return self.connections", "def get_connections(self):\n return self.connections", "def protocols(self):\n return list(self.query(Protocol))", "def links_summary(self, is_print=True):\n if not self.nodes:\n self.get_nodes()\n if not self.links:\n self.get_links()\n\n _links_summary = []\n for _l in self.links:\n if not _l.nodes:\n continue\n _side_a = _l.nodes[0]\n _side_b = _l.nodes[1]\n _node_a = [x for x in self.nodes if x.node_id == _side_a[\"node_id\"]][0]\n _port_a = [\n x[\"name\"]\n for x in _node_a.ports\n if x[\"port_number\"] == _side_a[\"port_number\"]\n and x[\"adapter_number\"] == _side_a[\"adapter_number\"]\n ][0]\n _node_b = [x for x in self.nodes if x.node_id == _side_b[\"node_id\"]][0]\n _port_b = [\n x[\"name\"]\n for x in _node_b.ports\n if x[\"port_number\"] == _side_b[\"port_number\"]\n and x[\"adapter_number\"] == _side_b[\"adapter_number\"]\n ][0]\n endpoint_a = f\"{_node_a.name}: {_port_a}\"\n endpoint_b = f\"{_node_b.name}: {_port_b}\"\n if is_print:\n print(f\"{endpoint_a} ---- {endpoint_b}\")\n _links_summary.append((_node_a.name, _port_a, _node_b.name, _port_b))\n\n return _links_summary if not is_print else None", "def listConnectedBoards(dap_class=DAPAccess):\n all_mbeds = MbedBoard.getAllConnectedBoards(dap_class, close=True,\n blocking=False)\n index = 0\n if len(all_mbeds) > 0:\n for mbed in all_mbeds:\n print(\"%d => %s boardId => %s\" % (index, mbed.getInfo().encode('ascii', 'ignore'), mbed.unique_id))\n index += 1\n else:\n print(\"No available boards are connected\")", "def initConnectors(self):\n \n def handlerFunc(message, responseFunc):\n for h in self._handlers:\n h.handleMessage(message, responseFunc)\n \n getLogger(__name__).debug('Initializing %d connectors...' % len(self._connectors))\n for c in self._connectors:\n c.initialize(handlerFunc)", "def print_pointers(self):\n\n ### FILL IN ###", "def outputConnections(self, cls=None):\n scene = self.scene()\n if ( not scene ):\n return []\n \n if ( not cls ):\n cls = XNodeConnection\n \n output = []\n for item in scene.items():\n if ( not isinstance(item, cls) ):\n continue\n \n if ( item.outputNode() == self ):\n output.append(item)\n \n return output", "def get_components_drawables(self):\n # print self.component_list\n print len(self.component_list)\n for c in self.component_list:\n return c.get_drawables()", "def get_queryset(self):\n self.object = self.get_object()\n return self.object.desk.board_set.all()" ]
[ "0.6209401", "0.58279985", "0.5698457", "0.55695814", "0.55657", "0.5535963", "0.55350536", "0.55105424", "0.5420437", "0.5364777", "0.53070045", "0.5268804", "0.5262208", "0.5258166", "0.52504754", "0.5227817", "0.52085537", "0.5199218", "0.51769245", "0.5168415", "0.5168355", "0.5168355", "0.51411617", "0.514025", "0.5099957", "0.50939715", "0.50826126", "0.5042709", "0.5005877", "0.49868584" ]
0.596397
1
Update the navigation property connectors in print. Update the navigation property connectors in print.
def update_connectors( self, print_connector_id, # type: str body, # type: "models.MicrosoftGraphPrintConnector" **kwargs # type: Any ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.update_connectors.metadata['url'] # type: ignore path_format_arguments = { 'printConnector-id': self._serialize.url("print_connector_id", print_connector_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MicrosoftGraphPrintConnector') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rdf_update_connections(rdf, prop, obj, subj, owl):\n conname = prop.split('#')[-1]\n print(\"createcon \"+str(obj)+ \" \" + str(subj))\n obj.relationships.create(conname, subj)\n for i in rdf.objects(subject=prop, predicate=RDFS.subPropertyOf):\n print(i)\n rdf_update_connections(rdf, i, obj, subj, owl)\n for i in rdf.objects(subject=prop, predicate=owl.inverseOf):\n conname = i.split('#')[-1]\n subj.relationships.create(conname, obj)", "def sync_attributes(self, sync_imported=False):\n for cls in itertools.chain(self.classes(), self.object_properties(),\n self.individuals()):\n if not cls.label:\n try:\n cls.label.append(cls.__name__)\n except:\n cls.label.append(cls._name)\n if not cls.comment and cls.__doc__:\n cls.comment.append(inspect.cleandoc(cls.__doc__))\n if sync_imported:\n for onto in self.imported_ontologies:\n onto.sync_attributes()", "def set_property_setters_for_actions_and_links(self, meta):\n\t\tfor doctype, fieldname, field_map in (\n\t\t\t(\"DocType Link\", \"links\", doctype_link_properties),\n\t\t\t(\"DocType Action\", \"actions\", doctype_action_properties),\n\t\t\t(\"DocType State\", \"states\", doctype_state_properties),\n\t\t):\n\t\t\thas_custom = False\n\t\t\titems = []\n\t\t\tfor i, d in enumerate(self.get(fieldname) or []):\n\t\t\t\td.idx = i\n\t\t\t\tif frappe.db.exists(doctype, d.name) and not d.custom:\n\t\t\t\t\t# check property and apply property setter\n\t\t\t\t\toriginal = frappe.get_doc(doctype, d.name)\n\t\t\t\t\tfor prop, prop_type in field_map.items():\n\t\t\t\t\t\tif d.get(prop) != original.get(prop):\n\t\t\t\t\t\t\tself.make_property_setter(prop, d.get(prop), prop_type, apply_on=doctype, row_name=d.name)\n\t\t\t\t\titems.append(d.name)\n\t\t\t\telse:\n\t\t\t\t\t# custom - just insert/update\n\t\t\t\t\td.parent = self.doc_type\n\t\t\t\t\td.custom = 1\n\t\t\t\t\td.save(ignore_permissions=True)\n\t\t\t\t\thas_custom = True\n\t\t\t\t\titems.append(d.name)\n\n\t\t\tself.update_order_property_setter(has_custom, fieldname)\n\t\t\tself.clear_removed_items(doctype, items)", "def update_associations(self):\n for dt_format, old_value, new_value in self.own_list:\n DescriptorFormatTypeManager.own(dt_format, self.entity, old_value, new_value)", "def updateScptRefs(self):\n for scpt in self.refs_scpt.keys():\n self.refs_scpt[scpt] = scpt.getRef()\n self.scptRefs = set(self.refs_scpt.values())", "def refresh_info(self):\n if not self.is_setup_connected() and not self.target_namespace:\n return\n self.get_association_info()\n self.create_techanim_connections()", "def fixRelations (self):\n\t\tnodes = self.getFieldElements (\"relation\")\n\t\tif not nodes: return\n\t\t\n\t\tprint \"\\n%s\" % self.getId()\n\t\tfor r in nodes:\n\t\t\tvalue = XmlUtils.getText(r)\n\t\t\tif not value: return\n\t\t\tXmlUtils.setText (r,\"\")\n\t\t\tif value.startswith (\"http://\"):\n\t\t\t\tr.setAttribute (\"type\", \"Has part\")\n\t\t\t\tr.setAttribute (\"url\", value)\n\t\t\telse:\n\t\t\t\tr.setAttribute (\"type\", \"Is related\")\n\t\t\t\tr.setAttribute (\"title\", value)\n\t\t\tprint r.toxml()\n\t\tif 0:\n\t\t\tself.write()\n\t\t\tprint \"wrote record\"", "def _update_tarsqidoc(self, cp):\n self.tarsqidoc.remove_tlinks()\n for n1, rest in cp.graph.edges.items():\n for n2, edge in cp.graph.edges[n1].items():\n if edge.constraint is not None:\n if edge.constraint.has_simple_relation():\n self._add_constraint_to_tarsqidoc(edge)", "def printGraph(self):\n print \"-----\"\n for feature in self.features:\n feature.printFeature()\n for constraint in self.constraints:\n constraint.printConstraint()\n print \"-----\"", "def relationships(self):", "def print_pointers(self):\n\n ### FILL IN ###", "def update_vizualization_layout(self, new):\n self.stages[\"Connectome\"].define_inspect_outputs()\n self.stages[\"Connectome\"].config.subject = self.subject", "def updateProperties(self):\n self.manage_changeProperties(title = self.getTitle(),\n description = self.getDescription(),\n basepath = self.getPath())", "def _update_connections(self, oldVar, newVar):\n vars = [v for v in self.model.get_all_variables() if v.get_source_variable(True) is oldVar]\n # Remove old connections, including interfaces and types so creating the new connection works\n for v in vars:\n self.remove_connections(v)\n self.del_attr(v, u'public_interface')\n self.del_attr(v, u'private_interface')\n v.clear_dependency_info()\n # Create new connections\n for v in vars:\n self.connect_variables(newVar, v)", "def update_view(self):\n for row in self.view.obj_list:\n for obj in row:\n obj._update(self.model)", "def get_connectors(\n self,\n print_connector_id, # type: str\n select=None, # type: Optional[List[Union[str, \"models.Enum28\"]]]\n expand=None, # type: Optional[List[str]]\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrintConnector\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrintConnector\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n # Construct URL\n url = self.get_connectors.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printConnector-id': self._serialize.url(\"print_connector_id\", print_connector_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n request = self._client.get(url, query_parameters, header_parameters)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrintConnector', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def print_connections(self):\n print(\"[Printing Connections]\")\n for key in self.connections.keys():\n print(f\"{key}:\\n\\t{self.connections[key]}\")", "def _add_relationships(self, element: Element) -> None:\n elements: Set[str] = {v.id for v in self.element_views}\n\n for relationship in element.get_efferent_relationships():\n if relationship.destination.id in elements:\n self._relationship_views.add(\n RelationshipView(relationship=relationship)\n )\n\n for relationship in element.get_afferent_relationships():\n if relationship.source.id in elements:\n self._relationship_views.add(\n RelationshipView(relationship=relationship)\n )", "def postCommitHook(self, datamodel=None):\n self.updateProperties()", "def updateConnectLink(self, connected=False):\n self.welcomePage.updateConnectLink(connected=connected)", "def loadPaths(self):\n for ij in self.link:\n self.link[ij].flow = 0\n for p in self.path:\n for ij in self.path[p].links:\n self.link[ij].flow += self.path[p].flow\n for ij in self.link:\n self.link[ij].updateCost()\n for p in self.path:\n self.path[p].updateCost()", "def travel_print(self):\n if self.is_empty():\n print(\"Linked list's length is 0\")\n else:\n node = self.head\n print(\"head -->\", node.data, end=' ')\n while node.next:\n node = node.next\n print(\"-->\", node.data, end=' ')\n print(\" \")", "def sync_to_ontology(self):\n self.ontology.sync_entity_to_graph(self)", "def _print_properties(self):\n return NotImplemented", "def _populate_relationships(self, rec_curr):\n for relationship_type, goids in rec_curr.relationship.items():\n parent_recs = set([self[goid] for goid in goids]) \n rec_curr.relationship[relationship_type] = parent_recs # replace GO ID with GO Term record object\n for parent_rec in parent_recs:\n if relationship_type not in parent_rec.relationship_rev:\n parent_rec.relationship_rev[relationship_type] = set([rec_curr])\n else:\n parent_rec.relationship_rev[relationship_type].add(rec_curr)", "def commit(self):\n for node in self.dep_graph.nodes_iter():\n role = self.roles[node]\n role.cur_rep = role.new_rep\n role.cur_hosts = list(role.new_hosts)\n for edge in self.dep_graph.edges_iter():\n edge_data = self.dep_graph.get_edge_data(*edge)\n edge_data['cur_weight'] = edge_data['new_weight']", "def update_property(self, property_info):\n SchemaValidator(self.schema_extension_only, self.full_schema_graph).validate_property_schema(property_info)\n self.schema[\"@graph\"].append(property_info)\n self.load_schema(self.schema)\n print(\"Updated the property {} successfully!\".format(property_info[\"rdfs:label\"]))", "def _debug_pyomo_print(self, m):\n print('/' + '='*80)\n print('DEBUGG model pieces:')\n print(' -> objective:')\n print(' ', m.obj.pprint())\n print(' -> variables:')\n for var in m.component_objects(pyo.Var):\n print(' ', var.pprint())\n print(' -> constraints:')\n for constr in m.component_objects(pyo.Constraint):\n print(' ', constr.pprint())\n print('\\\\' + '='*80)\n print('')", "def generate(self):\n self.graph_repl = self.master.graph_repl", "def print_config(self):\n for pod in self.pods:\n for lb in pod.limbs:\n print '%s limb %s ' % (pod.name, lb.name)\n for br in lb.branches:\n br.printInfo()\n sys.stdout.flush()" ]
[ "0.5477307", "0.49132034", "0.49064565", "0.4866146", "0.48580322", "0.4847276", "0.48211145", "0.4750198", "0.47499287", "0.47480568", "0.46613815", "0.46569315", "0.463708", "0.46352023", "0.45975387", "0.45714426", "0.4561662", "0.4557477", "0.45368984", "0.45348784", "0.45271078", "0.45183071", "0.45062652", "0.4474242", "0.44540134", "0.44500154", "0.4448963", "0.44412425", "0.44363612", "0.44345137" ]
0.5036696
1
Create new navigation property to operations for print. Create new navigation property to operations for print.
def create_operations( self, body, # type: "models.MicrosoftGraphPrintOperation" **kwargs # type: Any ): # type: (...) -> "models.MicrosoftGraphPrintOperation" cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphPrintOperation"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.create_operations.metadata['url'] # type: ignore # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MicrosoftGraphPrintOperation') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('MicrosoftGraphPrintOperation', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_operations(\n self,\n print_operation_id, # type: str\n select=None, # type: Optional[List[Union[str, \"models.Enum31\"]]]\n expand=None, # type: Optional[List[str]]\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrintOperation\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrintOperation\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n # Construct URL\n url = self.get_operations.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printOperation-id': self._serialize.url(\"print_operation_id\", print_operation_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n request = self._client.get(url, query_parameters, header_parameters)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrintOperation', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def __str__(self):\n return f\"{self.PropertyName, self.Prop_ID}\"", "def _print_properties(self):\n return NotImplemented", "def operation(cls):\n return relationship.many_to_one(cls, 'operation')", "def operation(cls):\n return relationship.many_to_one(cls, 'operation')", "def get_raw(self):\n if not self.ops:\n return\n ops = [self.operations.Op_wrapper(op=o) for o in list(self.ops)]\n data = {\n 'author': self.proposer,\n 'title': self.title,\n 'memo': self.memo,\n 'proposed_operations': [o.json() for o in ops],\n 'expiration_time': formatTimeFromNow(self.proposal_expiration),\n 'extensions': [],\n }\n if self.proposal_review:\n data.update({\"review_period_time\": formatTimeFromNow(self.proposal_review)})\n\n ops = self.operations.Proposal_create(**data)\n return self.operation_class(ops)", "def get_po(self):\n predicate = str(self._predicate)\n if predicate.startswith(\"has_property\"):\n predicate = \"be\"\n return predicate + \" \" + str(self._object)", "def __repr__(self) -> str:\n return f\"Property(name={self.name}, property_type={self.property_type})\"", "def dumps(self, indent=0):\n outstr = \" \"*indent + \"MewloDbModel object '{0}' attribute values:\\n\".format(self.__class__.__name__)\n public_props = (name for name in dir(object) if not name.startswith('_'))\n for name in public_props:\n outstr += \" \"*indent + \"{0}: {1}\\n\".format(name, str(getattr(self,name)))\n return outstr", "def __repr__(self):\n return '{} <{}:{} \"{}\" ({}; Pop: {}; Disc Pops: {}; Rep Pops: {})>'.\\\n format(self.id, self.author, self.journal, self.title,\n self.phenotype.phenotype, self.population.population,\n self.disc_pops.to_simple_str(),\n self.rep_pops.to_simple_str())", "def __str__(self):\n if self.original_json is not None:\n return json.dumps(self.original_json)\n\n printable_dict = {}\n for key in self.printable_attributes:\n printable_dict[key] = getattr(self, key)\n\n return json.dumps(printable_dict)", "def __str__(self):\n left = ''\n right = ''\n for i in range(len(self.ant)):\n left += Prop.__str__(self.ant[i]) + \", \"\n \n for i in range(len(self.con)):\n right += Prop.__str__(self.con[i]) + \", \"\n return left[:-2] + '|-- ' + right[:-2]", "def _showProperty(self):\n pass", "def printProperties(self):\n print \"Properties of: {0}\".format(self.getName())\n for prop in self._getPropName():\n if isinstance(prop, types.IntType):\n print \"\\t{0} => {1}\".format(prop, self.getPropertie(prop))\n else:\n print \"\\t'{0}' => {1}\".format(prop, self.getPropertie(prop))", "def to_representation(self) -> str:\n raise NotImplementedError()", "def get_repr(self, *args):\n level_representation = \"--\"\n if self.level == 0:\n node = \"| \"\n else:\n node = \"+ \"\n _tree_structure = node + level_representation * self.level + ' ' + self.name\n return _tree_structure", "def pprint(self):\n # just here for defining the interface; work is done in subclasses\n pass", "def __repr__(self):\n\n if not self.output :\n return \"\"\n\n # if self.is_abstract:\n # result = \"\\n # Properties inherited from \" + self.class_name + \"\\n\"\n # if len(self.attributes) > 0:\n # for attribute_ in self.attributes:\n # result += repr(attribute_)\n # # else:\n # # result += \"\\n pass\"\n # result += \"\\n\"\n # result += \" # End of properties inherited from \" + self.class_name + \"\\n\"\n # return result.encode('ascii', 'ignore')\n\n # Not an abstract class, so output as a full class\n result = \"class \"\n result += self.class_name\n\n result += \"(Base): # class definition\\n\"\n\n result += \" __tablename__ = \\'\" + self.class_name.lower() + \"\\'\\n\\n\"\n result += \" id = Column(Integer, primary_key=True)\\n\"\n\n\n # If this class has a general class, then add a foreign key to it\n if self.general_class:\n #result += repr(self.general_class)\n result += \" \" + self.general_class_name.lower() + \"_id = \"\n result += \"Column('\" + self.general_class_name.lower() + \"_id', \"\n result += \"ForeignKey('\" + self.general_class_name.lower() + \".id'), nullable = True)\"\n\n if len(self.attributes) > 0:\n for attribute_ in self.attributes:\n result += repr(attribute_)\n # else:\n # result += \"\\n pass\"\n\n result += \"\\n\"\n\n return result.encode('ascii', 'ignore')", "def operation_command(self, persist=False):\n pass", "def __str__(self):\n return self.__id__() + \" || \" + str(self.__node_a.name) + \" -> \" + str(self.__node_b.name)", "def __str__(self) -> str:\n return '[' + ' -> '.join([str(element) for element in self]) + ']'", "def __str__(self) -> str:\n if self.name_field:\n return str(getattr(self, self.name_field))\n # noinspection PyUnresolvedReferences\n data = [\n # Collect the string representations of related objects.\n # getattr(self, fk_field.attname) and\n # fk_field.value_from_object(self) would only return the primary\n # key of the related object.\n str(getattr(self, fk_field.name))\n for fk_field in get_model_fields(\n self._meta.model, base=False, foreign=True, m2m=False\n )\n if not fk_field.null\n ]\n if len(data) < 2:\n # Cannot build a more meaningful representation than the default.\n return super().__str__()\n else:\n template = \"{}\" + \" ({})\" * (len(data) - 1)\n return template.format(*data)", "def make(self):\n return make_operation_space()", "def __str__(self):\n return self.properties.__str__()", "def print_JobProperties(self,mode='minimal'):\n print_view=''\n if self._context_name.count('.')==0:\n additionalinfo=''\n if (mode=='tree&valuenondefault'):\n additionalinfo=\"(Only non default values)\"\n if mode != \"minimal\":\n additionalinfo+=\"(X indicates locked properties)\"\n \n self._log.info(\"### Printing the job properties container %s %s ###\",\n self.__name__ ,additionalinfo)\n\n if(mode=='tree' or mode.startswith('tree&value')):\n print (' [-]'+self.__name__)\n print (' | ')\n elif(mode=='print_v'):\n print_view+=' [-]'+self.__name__+'\\n'+' | '+'\\n' \n else:\n self._log.info(' [-]'+self.__name__)\n \n for k in sorted(self.__dict__.keys()):\n m=self.__dict__.get(k)\n if hasattr(m,'print_JobProperty'):\n m.print_JobProperty(mode)\n if mode=='print_v': \n print_view+=str(m)+'\\n'\n elif hasattr(m,'print_JobProperties'):\n indent='-'\n for i in range(m._context_name.count('.')-1):\n indent+='-'\n if(mode=='tree' or mode.startswith('tree&value')): \n print (' /'+indent+'> ## '+m.__name__+' ## ')\n elif(mode=='print_v'): \n print_view+=' /'+indent+'> ## '+m.__name__+' ## '+'\\n' \n else:\n self._log.info(' /'+indent+'> ## '+m.__name__+' ## ')\n\n if(mode=='print_v'):\n print_view+=m.print_JobProperties(mode)\n else:\n m.print_JobProperties(mode)\n \n if mode=='print_v':\n return print_view \n if self._context_name.count('.')==0:\n self._log.info(\"### Ends the job properties container %s ###\",\n self.__name__ )", "def __str__(self) -> str:\n if self.is_empty():\n return ''\n elif self._rest.is_empty():\n return str(self._first)\n else:\n return str(self._first) + ' -> ' + str(self._rest)", "def __repr__(self):\n\n return \"<Popos popos_id={} name={}>\".format(self.popos_id, self.name)", "def __str__(self):\n if self.is_empty():\n return ''\n elif self._rest.is_empty():\n return str(self._first)\n else:\n return str(self._first) + ' -> ' + str(self._rest)", "def __str__(self):\n return \"->\".join([str(n.data) for n in self.as_list()])", "def preorder_print(self, start , traversal):\n if start:\n traversal += (str(start.value) + \"-\")\n traversal = self.preorder_print(start.left, traversal)\n traversal = self.preorder_print(start.right, traversal)\n return traversal" ]
[ "0.4949499", "0.49028972", "0.48541674", "0.4756891", "0.4756891", "0.47505158", "0.47417718", "0.4694572", "0.4610008", "0.4575893", "0.4546058", "0.45327964", "0.4529304", "0.45105064", "0.44938353", "0.44624767", "0.44544595", "0.4443512", "0.44328377", "0.44179463", "0.4414686", "0.4412858", "0.44096613", "0.44081742", "0.4390902", "0.438493", "0.43787995", "0.4366842", "0.43461052", "0.43278417" ]
0.49647254
0
Get printerShares from print. Get printerShares from print.
def list_printer_shares( self, orderby=None, # type: Optional[List[Union[str, "models.Enum59"]]] select=None, # type: Optional[List[Union[str, "models.Enum60"]]] expand=None, # type: Optional[List[Union[str, "models.Enum61"]]] **kwargs # type: Any ): # type: (...) -> Iterable["models.CollectionOfPrinterShare0"] cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfPrinterShare0"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list_printer_shares.metadata['url'] # type: ignore # Construct parameters query_parameters = {} # type: Dict[str, Any] if self._config.top is not None: query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0) if self._config.skip is not None: query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0) if self._config.search is not None: query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str') if self._config.filter is not None: query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str') if self._config.count is not None: query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool') if orderby is not None: query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',') if select is not None: query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('CollectionOfPrinterShare0', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.odata_next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize(models.OdataError, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_printer_shares(\n self,\n printer_share_id, # type: str\n select=None, # type: Optional[List[Union[str, \"models.Enum62\"]]]\n expand=None, # type: Optional[List[Union[str, \"models.Enum63\"]]]\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrinterShare\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrinterShare\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n # Construct URL\n url = self.get_printer_shares.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printerShare-id': self._serialize.url(\"printer_share_id\", printer_share_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n request = self._client.get(url, query_parameters, header_parameters)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrinterShare', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def get_shares(\n self,\n printer_share_id, # type: str\n select=None, # type: Optional[List[Union[str, \"models.Enum88\"]]]\n expand=None, # type: Optional[List[Union[str, \"models.Enum89\"]]]\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrinterShare\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrinterShare\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n # Construct URL\n url = self.get_shares.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printerShare-id': self._serialize.url(\"printer_share_id\", printer_share_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n request = self._client.get(url, query_parameters, header_parameters)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrinterShare', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def list_shares(\n self,\n orderby=None, # type: Optional[List[Union[str, \"models.Enum85\"]]]\n select=None, # type: Optional[List[Union[str, \"models.Enum86\"]]]\n expand=None, # type: Optional[List[Union[str, \"models.Enum87\"]]]\n **kwargs # type: Any\n ):\n # type: (...) -> Iterable[\"models.CollectionOfPrinterShare1\"]\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.CollectionOfPrinterShare1\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n def prepare_request(next_link=None):\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n if not next_link:\n # Construct URL\n url = self.list_shares.metadata['url'] # type: ignore\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if self._config.top is not None:\n query_parameters['$top'] = self._serialize.query(\"self._config.top\", self._config.top, 'int', minimum=0)\n if self._config.skip is not None:\n query_parameters['$skip'] = self._serialize.query(\"self._config.skip\", self._config.skip, 'int', minimum=0)\n if self._config.search is not None:\n query_parameters['$search'] = self._serialize.query(\"self._config.search\", self._config.search, 'str')\n if self._config.filter is not None:\n query_parameters['$filter'] = self._serialize.query(\"self._config.filter\", self._config.filter, 'str')\n if self._config.count is not None:\n query_parameters['$count'] = self._serialize.query(\"self._config.count\", self._config.count, 'bool')\n if orderby is not None:\n query_parameters['$orderby'] = self._serialize.query(\"orderby\", orderby, '[str]', div=',')\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n request = self._client.get(url, query_parameters, header_parameters)\n else:\n url = next_link\n query_parameters = {} # type: Dict[str, Any]\n request = self._client.get(url, query_parameters, header_parameters)\n return request\n\n def extract_data(pipeline_response):\n deserialized = self._deserialize('CollectionOfPrinterShare1', pipeline_response)\n list_of_elem = deserialized.value\n if cls:\n list_of_elem = cls(list_of_elem)\n return deserialized.odata_next_link or None, iter(list_of_elem)\n\n def get_next(next_link=None):\n request = prepare_request(next_link)\n\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n error = self._deserialize(models.OdataError, response)\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n return pipeline_response\n\n return ItemPaged(\n get_next, extract_data\n )", "def get_workspace_shares(workspaces_dict, kb_staff):\n client = MongoClient(mongoDB_metrics_connection + to_workspace)\n db = client.workspace\n max_shared_count = 0\n perms_dict = {10: \"view\", 20: \"edit\", 30: \"admin\"}\n\n shares_query = db.workspaceACLs.find(\n {\"perm\": {\"$in\": [10, 20, 30]}}, {\"id\": 1, \"user\": 1, \"perm\": 1, \"_id\": 0}\n )\n for record in shares_query:\n if record[\"id\"] in workspaces_dict:\n # do stuff as it is a users narrative and has at least 1 share.\n is_kb_staff = 0\n if record[\"user\"] in kb_staff:\n is_kb_staff = 1\n share_entry = [record[\"user\"], perms_dict[record[\"perm\"]], str(is_kb_staff)]\n workspaces_dict[record[\"id\"]][\"shares_list\"].extend(share_entry)\n\n max_shared_count = 0\n for ws in workspaces_dict:\n share_number = len(workspaces_dict[ws][\"shares_list\"])\n if share_number > max_shared_count:\n max_shared_count = share_number\n return (workspaces_dict, int(max_shared_count / 3))", "def list_accepted_portfolio_shares_single_page(self, **kwargs):\n return slurp(\n 'list_accepted_portfolio_shares',\n self.list_accepted_portfolio_shares,\n 'PortfolioDetails',\n **kwargs\n )", "def create_printer_shares(\n self,\n body, # type: \"models.MicrosoftGraphPrinterShare\"\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrinterShare\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrinterShare\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n content_type = kwargs.pop(\"content_type\", \"application/json\")\n accept = \"application/json\"\n\n # Construct URL\n url = self.create_printer_shares.metadata['url'] # type: ignore\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Content-Type'] = self._serialize.header(\"content_type\", content_type, 'str')\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n body_content_kwargs = {} # type: Dict[str, Any]\n body_content = self._serialize.body(body, 'MicrosoftGraphPrinterShare')\n body_content_kwargs['content'] = body_content\n request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [201]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrinterShare', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def get_shares(credentials: Credentials, subscription_id: str, file_service: Dict) -> List[Dict]:\n try:\n client = get_client(credentials, subscription_id)\n shares = list(\n map(\n lambda x: x.as_dict(), client.file_shares.list(\n file_service['resource_group_name'],\n file_service['storage_account_name'],\n ),\n ),\n )\n\n except ClientAuthenticationError as e:\n logger.warning(f\"Client Authentication Error while retrieving tables - {e}\")\n return []\n except ResourceNotFoundError as e:\n logger.warning(f\"Table resource not found error - {e}\")\n return []\n except HttpResponseError as e:\n logger.warning(f\"Error while retrieving file shares - {e}\")\n return []\n\n return shares", "def sharelist():\n # Get field to order by for displaying shares\n if(request.args.get('orderby')):\n orderby = request.args.get('orderby')\n else:\n orderby = None\n # Get order for displaying shares\n if(request.args.get('order')):\n order = request.args.get('order')\n else:\n order = \"asc\"\n # Get the page of shares to display and calculate offset\n # TODO: DEFINE LIMIT IN A CONFIG\n limit = 10\n if(request.args.get('page')):\n offset = 10*(int(request.args.get('page'))-1)\n else:\n offset = 0\n # Get shares\n shares, sharecount = gdb.getshares(\n orderby=orderby,\n order=order,\n offset=offset,\n limit=limit)\n\n # Render template\n return render_template('sharelist.html', shares=shares,\n sharecount=sharecount,\n countperpage=limit,\n userbalance=current_user.balance)", "def file_shares(self) -> pulumi.Output[Sequence['outputs.FileShareConfigResponse']]:\n return pulumi.get(self, \"file_shares\")", "def file_shares(self) -> Sequence['outputs.FileShareConfigResponse']:\n return pulumi.get(self, \"file_shares\")", "def get_shares_count(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetSharesCount', self.handle)", "def get_company_and_price(shares: list[Share]) -> list[Share]:\n\n for share in shares:\n\n share_info = lookup(share.symbol)\n if share_info is not None:\n share.company_name = share_info[\"name\"]\n share.price = share_info[\"price\"]\n share.total = share.price * share.qty\n else:\n share.company_name = CMP_NOT_FOUND\n\n return shares", "def create_shares(\n self,\n body, # type: \"models.MicrosoftGraphPrinterShare\"\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrinterShare\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrinterShare\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n content_type = kwargs.pop(\"content_type\", \"application/json\")\n accept = \"application/json\"\n\n # Construct URL\n url = self.create_shares.metadata['url'] # type: ignore\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Content-Type'] = self._serialize.header(\"content_type\", content_type, 'str')\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n body_content_kwargs = {} # type: Dict[str, Any]\n body_content = self._serialize.body(body, 'MicrosoftGraphPrinterShare')\n body_content_kwargs['content'] = body_content\n request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [201]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrinterShare', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def shares_to_core(self, shares):\n return shares * self.core_per_share()", "def get_workspace_share_details():\n\n # connect to mysql\n db_connection = mysql.connect(\n host=sql_host, user=\"metrics\", passwd=metrics_mysql_password, database=\"metrics\"\n )\n\n cursor = db_connection.cursor()\n query = \"use \" + query_on\n cursor.execute(query)\n\n workspaces_dict = get_workspaces(db_connection)\n kb_staff = get_kbase_staff(db_connection)\n (workspaces_dict, max_shared_count) = get_workspace_shares(\n workspaces_dict, kb_staff\n )\n\n ################\n # Print the header line:\n ################\n header_line = (\n \"Narrative ID\\tOwner\\tCreation Date\\tLast Modified\\tis_deleted\\tis_public\"\n )\n for i in range(max_shared_count):\n header_line += \"\\tShared_person_{}\\tShare_Type_{}\\tis_KB_Staff_{}\".format(\n str(i + 1), str(i + 1), str(i + 1)\n )\n print(header_line)\n\n ###############\n # Print the WS rows\n ###############\n for ws_id in workspaces_dict:\n print(\n \"{}\\t{}\\t{}\\t{}\\t{}\\t{}\\t{}\".format(\n str(ws_id),\n workspaces_dict[ws_id][\"username\"],\n workspaces_dict[ws_id][\"creation_date\"],\n workspaces_dict[ws_id][\"mod_date\"],\n str(workspaces_dict[ws_id][\"is_deleted\"]),\n str(workspaces_dict[ws_id][\"is_public\"]),\n \"\\t\".join(workspaces_dict[ws_id][\"shares_list\"]),\n )\n )", "def retrieve(self,shareslist):\n raise NotImplementedError('subclasses must override retrieve()!')", "def get_shares_for_url(url):\n return twitter_shares_for_url(url) + facebook_shares_for_url(url)", "def shares(self, unit=None, units=None, timezone=None,\r\n rollup=None, limit=None, unit_reference_ts=None):\r\n params = base.get_params(None, locals())\r\n return self._get('shares', params)", "def share_replicas_get_all_by_share(context, share_id,\n with_share_data=False,\n with_share_server=False, session=None):\n session = session or get_session()\n\n result = _share_replica_get_with_filters(\n context, with_share_server=with_share_server,\n share_id=share_id, session=session).all()\n\n if with_share_data:\n result = _set_instances_share_data(context, result, session)\n\n return result", "def test_dashboards_v2_list_shares(self):\n pass", "def share_instances_get_all_by_share(context, share_id):\n result = (\n model_query(context, models.ShareInstance).filter(\n models.ShareInstance.share_id == share_id,\n ).all()\n )\n return result", "def file_shares(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['FileShareConfigArgs']]]]:\n return pulumi.get(self, \"file_shares\")", "def get_share(entry):\n share, share_pc, currency = \"\", \"\", \"\"\n if entry != \"\":\n if \":\" in entry:\n clean_entry = entry.split(\":\")[1].strip()\n if \"(\" in clean_entry:\n share_list = clean_entry.replace(\")\", \"\").split(\"(\")\n share_pc = share_list[1]\n currency = share_list[0].split(\" \")[0]\n share = \" \".join(share_list[0].split(\" \")[1:]).strip()\n else:\n if \"(\" in entry and \"%\" in entry:\n share_list = entry.replace(\")\", \"\").split(\"(\")\n share_pc = share_list[1]\n for feat in share_list:\n if \"%\" in feat:\n share_pc = feat\n elif \"%\" in entry:\n share_pc = entry\n # Obsolete code\n # share =.replace(\"Kapital:\", \"\").strip()\n # currency = share.split(\" \")[0]\n return share, share_pc, currency", "def find_all_shares(self, user, stock=None, brokerage_account=None, budget_account=None):\n query_args = {'user': user, 'stock': stock, 'brokerage_account': brokerage_account, 'budget_account': budget_account}\n # don't include the args if the value is none\n final_query_args = {k: v for k, v in query_args.items() if v is not None}\n return super().get_queryset().filter(**final_query_args)", "def get_shares_in_recycle_bin_by_share_server(\n context, share_server_id, filters=None, sort_key=None, sort_dir=None):\n if filters is None:\n filters = {}\n filters[\"is_soft_deleted\"] = True\n query = _share_get_all_with_filters(\n context, share_server_id=share_server_id, filters=filters,\n sort_key=sort_key, sort_dir=sort_dir)\n return query", "def synchronize(self):\n self.increment_pc()\n shares = [self._exchange_shares(player, GF256(0))\n for player in self.players]\n result = gather_shares(shares)\n result.addCallback(lambda _: None)\n return result", "def get_shares_info(user_id: int, db: Connection) -> List[Tuple[str, int]]:\n\n sql = f\"\"\"SELECT symbol, SUM(qty) FROM shares \n WHERE user_id = {user_id}\n GROUP BY symbol;\"\"\"\n\n try:\n cur = db.cursor()\n cur.execute(sql)\n return cur.fetchall()\n except Error:\n return []", "def share_combining(self, shares):\n mod_shares = [share[1] * (calc_lambda(shares,\n share[0], self.precomputed_fac)) for i, share in enumerate(shares)]\n return sum(mod_shares)", "def update_printer_shares(\n self,\n printer_share_id, # type: str\n body, # type: \"models.MicrosoftGraphPrinterShare\"\n **kwargs # type: Any\n ):\n # type: (...) -> None\n cls = kwargs.pop('cls', None) # type: ClsType[None]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n content_type = kwargs.pop(\"content_type\", \"application/json\")\n accept = \"application/json\"\n\n # Construct URL\n url = self.update_printer_shares.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printerShare-id': self._serialize.url(\"printer_share_id\", printer_share_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Content-Type'] = self._serialize.header(\"content_type\", content_type, 'str')\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n body_content_kwargs = {} # type: Dict[str, Any]\n body_content = self._serialize.body(body, 'MicrosoftGraphPrinterShare')\n body_content_kwargs['content'] = body_content\n request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [204]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n if cls:\n return cls(pipeline_response, None, {})", "def get_share(self, activity_user_id, activity_id, share_id):\n return None" ]
[ "0.5811979", "0.5719837", "0.5379649", "0.53750217", "0.5357973", "0.53297055", "0.528701", "0.51888907", "0.50321615", "0.4997966", "0.49812928", "0.49800614", "0.49704927", "0.4904655", "0.48964918", "0.48660332", "0.48171493", "0.47529054", "0.46977648", "0.46971932", "0.46454364", "0.45945734", "0.45454213", "0.45418802", "0.45128226", "0.4495962", "0.4488998", "0.44774207", "0.44628212", "0.44584772" ]
0.5929783
0
Create new navigation property to printerShares for print. Create new navigation property to printerShares for print.
def create_printer_shares( self, body, # type: "models.MicrosoftGraphPrinterShare" **kwargs # type: Any ): # type: (...) -> "models.MicrosoftGraphPrinterShare" cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphPrinterShare"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.create_printer_shares.metadata['url'] # type: ignore # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MicrosoftGraphPrinterShare') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('MicrosoftGraphPrinterShare', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_shares(\n self,\n body, # type: \"models.MicrosoftGraphPrinterShare\"\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrinterShare\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrinterShare\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n content_type = kwargs.pop(\"content_type\", \"application/json\")\n accept = \"application/json\"\n\n # Construct URL\n url = self.create_shares.metadata['url'] # type: ignore\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Content-Type'] = self._serialize.header(\"content_type\", content_type, 'str')\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n body_content_kwargs = {} # type: Dict[str, Any]\n body_content = self._serialize.body(body, 'MicrosoftGraphPrinterShare')\n body_content_kwargs['content'] = body_content\n request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [201]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrinterShare', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def get_printer_shares(\n self,\n printer_share_id, # type: str\n select=None, # type: Optional[List[Union[str, \"models.Enum62\"]]]\n expand=None, # type: Optional[List[Union[str, \"models.Enum63\"]]]\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrinterShare\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrinterShare\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n # Construct URL\n url = self.get_printer_shares.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printerShare-id': self._serialize.url(\"printer_share_id\", printer_share_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n request = self._client.get(url, query_parameters, header_parameters)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrinterShare', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def get_shares(\n self,\n printer_share_id, # type: str\n select=None, # type: Optional[List[Union[str, \"models.Enum88\"]]]\n expand=None, # type: Optional[List[Union[str, \"models.Enum89\"]]]\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrinterShare\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrinterShare\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n # Construct URL\n url = self.get_shares.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printerShare-id': self._serialize.url(\"printer_share_id\", printer_share_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n request = self._client.get(url, query_parameters, header_parameters)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrinterShare', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def update_shares(self):\n self.nb_shares = self.shares.count()\n self.save()", "def update_printer_shares(\n self,\n printer_share_id, # type: str\n body, # type: \"models.MicrosoftGraphPrinterShare\"\n **kwargs # type: Any\n ):\n # type: (...) -> None\n cls = kwargs.pop('cls', None) # type: ClsType[None]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n content_type = kwargs.pop(\"content_type\", \"application/json\")\n accept = \"application/json\"\n\n # Construct URL\n url = self.update_printer_shares.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printerShare-id': self._serialize.url(\"printer_share_id\", printer_share_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Content-Type'] = self._serialize.header(\"content_type\", content_type, 'str')\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n body_content_kwargs = {} # type: Dict[str, Any]\n body_content = self._serialize.body(body, 'MicrosoftGraphPrinterShare')\n body_content_kwargs['content'] = body_content\n request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [204]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n if cls:\n return cls(pipeline_response, None, {})", "def update_shares(\n self,\n printer_share_id, # type: str\n body, # type: \"models.MicrosoftGraphPrinterShare\"\n **kwargs # type: Any\n ):\n # type: (...) -> None\n cls = kwargs.pop('cls', None) # type: ClsType[None]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n content_type = kwargs.pop(\"content_type\", \"application/json\")\n accept = \"application/json\"\n\n # Construct URL\n url = self.update_shares.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printerShare-id': self._serialize.url(\"printer_share_id\", printer_share_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Content-Type'] = self._serialize.header(\"content_type\", content_type, 'str')\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n body_content_kwargs = {} # type: Dict[str, Any]\n body_content = self._serialize.body(body, 'MicrosoftGraphPrinterShare')\n body_content_kwargs['content'] = body_content\n request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [204]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n if cls:\n return cls(pipeline_response, None, {})", "def list_printer_shares(\n self,\n orderby=None, # type: Optional[List[Union[str, \"models.Enum59\"]]]\n select=None, # type: Optional[List[Union[str, \"models.Enum60\"]]]\n expand=None, # type: Optional[List[Union[str, \"models.Enum61\"]]]\n **kwargs # type: Any\n ):\n # type: (...) -> Iterable[\"models.CollectionOfPrinterShare0\"]\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.CollectionOfPrinterShare0\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n def prepare_request(next_link=None):\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n if not next_link:\n # Construct URL\n url = self.list_printer_shares.metadata['url'] # type: ignore\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if self._config.top is not None:\n query_parameters['$top'] = self._serialize.query(\"self._config.top\", self._config.top, 'int', minimum=0)\n if self._config.skip is not None:\n query_parameters['$skip'] = self._serialize.query(\"self._config.skip\", self._config.skip, 'int', minimum=0)\n if self._config.search is not None:\n query_parameters['$search'] = self._serialize.query(\"self._config.search\", self._config.search, 'str')\n if self._config.filter is not None:\n query_parameters['$filter'] = self._serialize.query(\"self._config.filter\", self._config.filter, 'str')\n if self._config.count is not None:\n query_parameters['$count'] = self._serialize.query(\"self._config.count\", self._config.count, 'bool')\n if orderby is not None:\n query_parameters['$orderby'] = self._serialize.query(\"orderby\", orderby, '[str]', div=',')\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n request = self._client.get(url, query_parameters, header_parameters)\n else:\n url = next_link\n query_parameters = {} # type: Dict[str, Any]\n request = self._client.get(url, query_parameters, header_parameters)\n return request\n\n def extract_data(pipeline_response):\n deserialized = self._deserialize('CollectionOfPrinterShare0', pipeline_response)\n list_of_elem = deserialized.value\n if cls:\n list_of_elem = cls(list_of_elem)\n return deserialized.odata_next_link or None, iter(list_of_elem)\n\n def get_next(next_link=None):\n request = prepare_request(next_link)\n\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n error = self._deserialize(models.OdataError, response)\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n return pipeline_response\n\n return ItemPaged(\n get_next, extract_data\n )", "def set_share(self, total_people):\n self.paid = self._get_paid()\n self.share = round(self.paid/Decimal(total_people), 2)", "def report_shares(self, flush=False):\n now = int(time())\n if (now - self.last_graph_transmit) > 90:\n # bounds for a share to be grouped for transmission in minute\n # chunks. the upper bound is the last minute that has\n # completely passed, while the lower bound is the last time we\n # sent graph data (which should also be an exact round minute,\n # ensuring that we don't submit info for the same minute twice)\n upper = (now // 60) * 60\n # will cause all shares to get reported\n if flush:\n upper += 120\n lower = self.last_graph_transmit\n # share records that are to be discarded\n expire = now - self.config['keep_share']\n # for transmission\n chunks = {}\n # a list of indexes that have expired and are ready for removal\n rem = []\n for i, key in enumerate(self.share_types):\n for stamp, shares in getattr(self, key).iteritems():\n if stamp >= lower and stamp < upper:\n minute = (stamp // 60) * 60\n chunks.setdefault(minute, [0, 0, 0, 0])\n chunks[minute][i] += shares\n if stamp < expire:\n rem.append((key, stamp))\n\n for key, stamp in rem:\n del getattr(self, key)[stamp]\n\n for stamp, shares in chunks.iteritems():\n self.celery.send_task_pp(\n 'add_one_minute', self.address, shares[0],\n stamp, self.worker, *shares[1:])\n self.last_graph_transmit = upper\n\n # don't recalc their diff more often than interval\n if (self.config['vardiff']['enabled'] and\n now - self.last_diff_adj > self.config['vardiff']['interval']):\n self.recalc_vardiff()", "def create_share(self):\n\t\treturn handle_to_object(call_sdk_function('PrlVmCfg_CreateShare', self.handle))", "def shares_outstanding(self, shares_outstanding):\n\n self._shares_outstanding = shares_outstanding", "def list_shares(\n self,\n orderby=None, # type: Optional[List[Union[str, \"models.Enum85\"]]]\n select=None, # type: Optional[List[Union[str, \"models.Enum86\"]]]\n expand=None, # type: Optional[List[Union[str, \"models.Enum87\"]]]\n **kwargs # type: Any\n ):\n # type: (...) -> Iterable[\"models.CollectionOfPrinterShare1\"]\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.CollectionOfPrinterShare1\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n def prepare_request(next_link=None):\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n if not next_link:\n # Construct URL\n url = self.list_shares.metadata['url'] # type: ignore\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if self._config.top is not None:\n query_parameters['$top'] = self._serialize.query(\"self._config.top\", self._config.top, 'int', minimum=0)\n if self._config.skip is not None:\n query_parameters['$skip'] = self._serialize.query(\"self._config.skip\", self._config.skip, 'int', minimum=0)\n if self._config.search is not None:\n query_parameters['$search'] = self._serialize.query(\"self._config.search\", self._config.search, 'str')\n if self._config.filter is not None:\n query_parameters['$filter'] = self._serialize.query(\"self._config.filter\", self._config.filter, 'str')\n if self._config.count is not None:\n query_parameters['$count'] = self._serialize.query(\"self._config.count\", self._config.count, 'bool')\n if orderby is not None:\n query_parameters['$orderby'] = self._serialize.query(\"orderby\", orderby, '[str]', div=',')\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n request = self._client.get(url, query_parameters, header_parameters)\n else:\n url = next_link\n query_parameters = {} # type: Dict[str, Any]\n request = self._client.get(url, query_parameters, header_parameters)\n return request\n\n def extract_data(pipeline_response):\n deserialized = self._deserialize('CollectionOfPrinterShare1', pipeline_response)\n list_of_elem = deserialized.value\n if cls:\n list_of_elem = cls(list_of_elem)\n return deserialized.odata_next_link or None, iter(list_of_elem)\n\n def get_next(next_link=None):\n request = prepare_request(next_link)\n\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n error = self._deserialize(models.OdataError, response)\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n return pipeline_response\n\n return ItemPaged(\n get_next, extract_data\n )", "def shares_to_core(self, shares):\n return shares * self.core_per_share()", "def trade(self, shares, price, last_trade):\n # Simulate likelyhood of fill\n likely_fill = random.randint(0, 100) / 100\n if likely_fill > self.fill_rate:\n print('Order not filled')\n return\n\n trade_value = shares * price\n\n # Net long / short\n if self.sign(shares) != self.sign(self.portfoolio['Shares']) and self.portfoolio['Shares'] != 0:\n trade_pnl = (shares * self.portfoolio['Avg_Cost']) - trade_value\n\n self.portfoolio['PnL'] += trade_pnl\n\n # Flat\n else:\n trade_pnl = 0\n\n self.portfoolio['Shares'] += shares\n self.portfoolio['Cost'] += shares * price + trade_pnl\n self.portfoolio['Avg_Cost'] = self.portfoolio['Cost'] / self.portfoolio['Shares']\n self.portfoolio['Unrealized_PnL'] = self.portfoolio['Shares'] * last_trade - self.portfoolio['Avg_Cost'] * self.portfoolio['Shares']\n\n print('Pnl booked: {} | Portfolio: {}'.format(trade_pnl, self.portfoolio))\n print(last_trade)", "def test_set_share(self):\n self.app.post_json(url=\"/config/shares\",\n params=dict(\n source='gsiftp://source',\n destination='gsiftp://nowhere',\n vo='dteam',\n share=80\n ),\n status=200\n )", "def get_shares_count(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetSharesCount', self.handle)", "def add_price(self, price, date, shares):\n\t\tvalue = price * shares\n\t\tself.price_list.append(value)\n\t\tself.date_priced.append(date)", "def get_company_and_price(shares: list[Share]) -> list[Share]:\n\n for share in shares:\n\n share_info = lookup(share.symbol)\n if share_info is not None:\n share.company_name = share_info[\"name\"]\n share.price = share_info[\"price\"]\n share.total = share.price * share.qty\n else:\n share.company_name = CMP_NOT_FOUND\n\n return shares", "def _load_shares(neo4j_session: neo4j.Session, shares: List[Dict], update_tag: int) -> None:\n ingest_shares = \"\"\"\n UNWIND $shares_list as s\n MERGE (share:AzureStorageFileShare{id: s.id})\n ON CREATE SET share.firstseen = timestamp(), share.type = s.type\n SET share.name = s.name,\n share.lastupdated = $azure_update_tag,\n share.lastmodifiedtime = s.last_modified_time,\n share.sharequota = s.share_quota,\n share.accesstier = s.access_tier,\n share.deleted = s.deleted,\n share.accesstierchangetime = s.access_tier_change_time,\n share.accesstierstatus = s.access_tier_status,\n share.deletedtime = s.deleted_time,\n share.enabledprotocols = s.enabled_protocols,\n share.remainingretentiondays = s.remaining_retention_days,\n share.shareusagebytes = s.share_usage_bytes,\n share.version = s.version\n WITH share, s\n MATCH (fs:AzureStorageFileService{id: s.service_id})\n MERGE (fs)-[r:CONTAINS]->(share)\n ON CREATE SET r.firstseen = timestamp()\n SET r.lastupdated = $azure_update_tag\n \"\"\"\n\n neo4j_session.run(\n ingest_shares,\n shares_list=shares,\n azure_update_tag=update_tag,\n )", "def addShare(self, name, path, securityMode = NFSSecurityMode.NONE):\n if name in self.shares:\n raise ValueError(\"Share '%s' is already in use\"%name)\n \n share = NFSShare()\n share.name = name\n share.path = path\n share.securityMode = securityMode\n self.shares[name] = share\n return share", "def _exchange_shares(self, peer_id, field_element):\n assert isinstance(field_element, FieldElement)\n\n if peer_id == self.id:\n return Share(self, field_element.field, field_element)\n else:\n share = self._expect_share(peer_id, field_element.field)\n pc = tuple(self.program_counter)\n self.protocols[peer_id].sendShare(pc, field_element)\n return share", "def list_accepted_portfolio_shares_single_page(self, **kwargs):\n return slurp(\n 'list_accepted_portfolio_shares',\n self.list_accepted_portfolio_shares,\n 'PortfolioDetails',\n **kwargs\n )", "def memory_shares(self, shares):\n\n assert shares >= 0\n config_spec = vim.vm.ConfigSpec()\n shares_alloc = vim.ResourceAllocationInfo()\n shares_alloc.shares = vim.SharesInfo(level=\"custom\", shares=shares)\n config_spec.memoryAllocation = shares_alloc\n return self.vm_obj.ReconfigVM_Task(config_spec)", "def file_shares(self) -> Sequence['outputs.FileShareConfigResponse']:\n return pulumi.get(self, \"file_shares\")", "def share_level(self, new_level: str) -> None:\n self._db_data.share_level = new_level\n for data in self._child_data.values():\n original_auto_save = data.auto_save\n data.auto_save = False\n data.share_level = new_level\n data.auto_save = original_auto_save\n if self.auto_save:\n self.save_metadata()", "def file_shares(self) -> pulumi.Output[Sequence['outputs.FileShareConfigResponse']]:\n return pulumi.get(self, \"file_shares\")", "def create_share_from_snapshot(self, context, share, snapshot,\n share_server=None):\n raise NotImplementedError()", "def core_to_shares(self, amount):\n return amount / self.core_per_share()", "def file_shares(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['FileShareConfigArgs']]]]:\n return pulumi.get(self, \"file_shares\")", "def share(self):\n friend_sql = self.db.text(\"SELECT u.id, concat(u.second_name, ' ', u.first_name) text FROM user u \"\n \"inner join friend f on f.friend_id=u.id where f.user_id =%s \"\n \"and f.status = 1\" % self.current_user.id)\n share_sql = self.db.text(\"SELECT fs.user_assigned_id id, \"\n \"concat(u.first_name, ' ', COALESCE(u.second_name, '')) text \"\n \"FROM file_share fs inner join user u on u.id = fs.user_assigned_id \"\n \"where fs.file_id = %s\" % self.file_id)\n\n friend_db_list, share_db_list = self.db.execute([friend_sql, share_sql])\n return {'success': True,\n 'friend_list': friend_db_list,\n 'shared_list': share_db_list}" ]
[ "0.57783026", "0.5231743", "0.50983196", "0.4851825", "0.48485887", "0.47849977", "0.47806564", "0.45574224", "0.45157352", "0.44472182", "0.44129157", "0.4376351", "0.4357706", "0.4355947", "0.43264663", "0.4274848", "0.42341945", "0.4221774", "0.41627723", "0.41623652", "0.41577256", "0.4141662", "0.414043", "0.4106157", "0.40987843", "0.4076204", "0.40183246", "0.40072438", "0.3997681", "0.39825836" ]
0.6013365
0
Get printerShares from print. Get printerShares from print.
def get_printer_shares( self, printer_share_id, # type: str select=None, # type: Optional[List[Union[str, "models.Enum62"]]] expand=None, # type: Optional[List[Union[str, "models.Enum63"]]] **kwargs # type: Any ): # type: (...) -> "models.MicrosoftGraphPrinterShare" cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphPrinterShare"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" # Construct URL url = self.get_printer_shares.metadata['url'] # type: ignore path_format_arguments = { 'printerShare-id': self._serialize.url("printer_share_id", printer_share_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if select is not None: query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('MicrosoftGraphPrinterShare', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_printer_shares(\n self,\n orderby=None, # type: Optional[List[Union[str, \"models.Enum59\"]]]\n select=None, # type: Optional[List[Union[str, \"models.Enum60\"]]]\n expand=None, # type: Optional[List[Union[str, \"models.Enum61\"]]]\n **kwargs # type: Any\n ):\n # type: (...) -> Iterable[\"models.CollectionOfPrinterShare0\"]\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.CollectionOfPrinterShare0\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n def prepare_request(next_link=None):\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n if not next_link:\n # Construct URL\n url = self.list_printer_shares.metadata['url'] # type: ignore\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if self._config.top is not None:\n query_parameters['$top'] = self._serialize.query(\"self._config.top\", self._config.top, 'int', minimum=0)\n if self._config.skip is not None:\n query_parameters['$skip'] = self._serialize.query(\"self._config.skip\", self._config.skip, 'int', minimum=0)\n if self._config.search is not None:\n query_parameters['$search'] = self._serialize.query(\"self._config.search\", self._config.search, 'str')\n if self._config.filter is not None:\n query_parameters['$filter'] = self._serialize.query(\"self._config.filter\", self._config.filter, 'str')\n if self._config.count is not None:\n query_parameters['$count'] = self._serialize.query(\"self._config.count\", self._config.count, 'bool')\n if orderby is not None:\n query_parameters['$orderby'] = self._serialize.query(\"orderby\", orderby, '[str]', div=',')\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n request = self._client.get(url, query_parameters, header_parameters)\n else:\n url = next_link\n query_parameters = {} # type: Dict[str, Any]\n request = self._client.get(url, query_parameters, header_parameters)\n return request\n\n def extract_data(pipeline_response):\n deserialized = self._deserialize('CollectionOfPrinterShare0', pipeline_response)\n list_of_elem = deserialized.value\n if cls:\n list_of_elem = cls(list_of_elem)\n return deserialized.odata_next_link or None, iter(list_of_elem)\n\n def get_next(next_link=None):\n request = prepare_request(next_link)\n\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n error = self._deserialize(models.OdataError, response)\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n return pipeline_response\n\n return ItemPaged(\n get_next, extract_data\n )", "def get_shares(\n self,\n printer_share_id, # type: str\n select=None, # type: Optional[List[Union[str, \"models.Enum88\"]]]\n expand=None, # type: Optional[List[Union[str, \"models.Enum89\"]]]\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrinterShare\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrinterShare\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n # Construct URL\n url = self.get_shares.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printerShare-id': self._serialize.url(\"printer_share_id\", printer_share_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n request = self._client.get(url, query_parameters, header_parameters)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrinterShare', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def list_shares(\n self,\n orderby=None, # type: Optional[List[Union[str, \"models.Enum85\"]]]\n select=None, # type: Optional[List[Union[str, \"models.Enum86\"]]]\n expand=None, # type: Optional[List[Union[str, \"models.Enum87\"]]]\n **kwargs # type: Any\n ):\n # type: (...) -> Iterable[\"models.CollectionOfPrinterShare1\"]\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.CollectionOfPrinterShare1\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n def prepare_request(next_link=None):\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n if not next_link:\n # Construct URL\n url = self.list_shares.metadata['url'] # type: ignore\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if self._config.top is not None:\n query_parameters['$top'] = self._serialize.query(\"self._config.top\", self._config.top, 'int', minimum=0)\n if self._config.skip is not None:\n query_parameters['$skip'] = self._serialize.query(\"self._config.skip\", self._config.skip, 'int', minimum=0)\n if self._config.search is not None:\n query_parameters['$search'] = self._serialize.query(\"self._config.search\", self._config.search, 'str')\n if self._config.filter is not None:\n query_parameters['$filter'] = self._serialize.query(\"self._config.filter\", self._config.filter, 'str')\n if self._config.count is not None:\n query_parameters['$count'] = self._serialize.query(\"self._config.count\", self._config.count, 'bool')\n if orderby is not None:\n query_parameters['$orderby'] = self._serialize.query(\"orderby\", orderby, '[str]', div=',')\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n request = self._client.get(url, query_parameters, header_parameters)\n else:\n url = next_link\n query_parameters = {} # type: Dict[str, Any]\n request = self._client.get(url, query_parameters, header_parameters)\n return request\n\n def extract_data(pipeline_response):\n deserialized = self._deserialize('CollectionOfPrinterShare1', pipeline_response)\n list_of_elem = deserialized.value\n if cls:\n list_of_elem = cls(list_of_elem)\n return deserialized.odata_next_link or None, iter(list_of_elem)\n\n def get_next(next_link=None):\n request = prepare_request(next_link)\n\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n error = self._deserialize(models.OdataError, response)\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n return pipeline_response\n\n return ItemPaged(\n get_next, extract_data\n )", "def get_workspace_shares(workspaces_dict, kb_staff):\n client = MongoClient(mongoDB_metrics_connection + to_workspace)\n db = client.workspace\n max_shared_count = 0\n perms_dict = {10: \"view\", 20: \"edit\", 30: \"admin\"}\n\n shares_query = db.workspaceACLs.find(\n {\"perm\": {\"$in\": [10, 20, 30]}}, {\"id\": 1, \"user\": 1, \"perm\": 1, \"_id\": 0}\n )\n for record in shares_query:\n if record[\"id\"] in workspaces_dict:\n # do stuff as it is a users narrative and has at least 1 share.\n is_kb_staff = 0\n if record[\"user\"] in kb_staff:\n is_kb_staff = 1\n share_entry = [record[\"user\"], perms_dict[record[\"perm\"]], str(is_kb_staff)]\n workspaces_dict[record[\"id\"]][\"shares_list\"].extend(share_entry)\n\n max_shared_count = 0\n for ws in workspaces_dict:\n share_number = len(workspaces_dict[ws][\"shares_list\"])\n if share_number > max_shared_count:\n max_shared_count = share_number\n return (workspaces_dict, int(max_shared_count / 3))", "def list_accepted_portfolio_shares_single_page(self, **kwargs):\n return slurp(\n 'list_accepted_portfolio_shares',\n self.list_accepted_portfolio_shares,\n 'PortfolioDetails',\n **kwargs\n )", "def create_printer_shares(\n self,\n body, # type: \"models.MicrosoftGraphPrinterShare\"\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrinterShare\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrinterShare\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n content_type = kwargs.pop(\"content_type\", \"application/json\")\n accept = \"application/json\"\n\n # Construct URL\n url = self.create_printer_shares.metadata['url'] # type: ignore\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Content-Type'] = self._serialize.header(\"content_type\", content_type, 'str')\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n body_content_kwargs = {} # type: Dict[str, Any]\n body_content = self._serialize.body(body, 'MicrosoftGraphPrinterShare')\n body_content_kwargs['content'] = body_content\n request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [201]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrinterShare', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def get_shares(credentials: Credentials, subscription_id: str, file_service: Dict) -> List[Dict]:\n try:\n client = get_client(credentials, subscription_id)\n shares = list(\n map(\n lambda x: x.as_dict(), client.file_shares.list(\n file_service['resource_group_name'],\n file_service['storage_account_name'],\n ),\n ),\n )\n\n except ClientAuthenticationError as e:\n logger.warning(f\"Client Authentication Error while retrieving tables - {e}\")\n return []\n except ResourceNotFoundError as e:\n logger.warning(f\"Table resource not found error - {e}\")\n return []\n except HttpResponseError as e:\n logger.warning(f\"Error while retrieving file shares - {e}\")\n return []\n\n return shares", "def sharelist():\n # Get field to order by for displaying shares\n if(request.args.get('orderby')):\n orderby = request.args.get('orderby')\n else:\n orderby = None\n # Get order for displaying shares\n if(request.args.get('order')):\n order = request.args.get('order')\n else:\n order = \"asc\"\n # Get the page of shares to display and calculate offset\n # TODO: DEFINE LIMIT IN A CONFIG\n limit = 10\n if(request.args.get('page')):\n offset = 10*(int(request.args.get('page'))-1)\n else:\n offset = 0\n # Get shares\n shares, sharecount = gdb.getshares(\n orderby=orderby,\n order=order,\n offset=offset,\n limit=limit)\n\n # Render template\n return render_template('sharelist.html', shares=shares,\n sharecount=sharecount,\n countperpage=limit,\n userbalance=current_user.balance)", "def file_shares(self) -> pulumi.Output[Sequence['outputs.FileShareConfigResponse']]:\n return pulumi.get(self, \"file_shares\")", "def file_shares(self) -> Sequence['outputs.FileShareConfigResponse']:\n return pulumi.get(self, \"file_shares\")", "def get_shares_count(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetSharesCount', self.handle)", "def get_company_and_price(shares: list[Share]) -> list[Share]:\n\n for share in shares:\n\n share_info = lookup(share.symbol)\n if share_info is not None:\n share.company_name = share_info[\"name\"]\n share.price = share_info[\"price\"]\n share.total = share.price * share.qty\n else:\n share.company_name = CMP_NOT_FOUND\n\n return shares", "def create_shares(\n self,\n body, # type: \"models.MicrosoftGraphPrinterShare\"\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrinterShare\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrinterShare\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n content_type = kwargs.pop(\"content_type\", \"application/json\")\n accept = \"application/json\"\n\n # Construct URL\n url = self.create_shares.metadata['url'] # type: ignore\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Content-Type'] = self._serialize.header(\"content_type\", content_type, 'str')\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n body_content_kwargs = {} # type: Dict[str, Any]\n body_content = self._serialize.body(body, 'MicrosoftGraphPrinterShare')\n body_content_kwargs['content'] = body_content\n request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [201]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrinterShare', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def shares_to_core(self, shares):\n return shares * self.core_per_share()", "def get_workspace_share_details():\n\n # connect to mysql\n db_connection = mysql.connect(\n host=sql_host, user=\"metrics\", passwd=metrics_mysql_password, database=\"metrics\"\n )\n\n cursor = db_connection.cursor()\n query = \"use \" + query_on\n cursor.execute(query)\n\n workspaces_dict = get_workspaces(db_connection)\n kb_staff = get_kbase_staff(db_connection)\n (workspaces_dict, max_shared_count) = get_workspace_shares(\n workspaces_dict, kb_staff\n )\n\n ################\n # Print the header line:\n ################\n header_line = (\n \"Narrative ID\\tOwner\\tCreation Date\\tLast Modified\\tis_deleted\\tis_public\"\n )\n for i in range(max_shared_count):\n header_line += \"\\tShared_person_{}\\tShare_Type_{}\\tis_KB_Staff_{}\".format(\n str(i + 1), str(i + 1), str(i + 1)\n )\n print(header_line)\n\n ###############\n # Print the WS rows\n ###############\n for ws_id in workspaces_dict:\n print(\n \"{}\\t{}\\t{}\\t{}\\t{}\\t{}\\t{}\".format(\n str(ws_id),\n workspaces_dict[ws_id][\"username\"],\n workspaces_dict[ws_id][\"creation_date\"],\n workspaces_dict[ws_id][\"mod_date\"],\n str(workspaces_dict[ws_id][\"is_deleted\"]),\n str(workspaces_dict[ws_id][\"is_public\"]),\n \"\\t\".join(workspaces_dict[ws_id][\"shares_list\"]),\n )\n )", "def retrieve(self,shareslist):\n raise NotImplementedError('subclasses must override retrieve()!')", "def get_shares_for_url(url):\n return twitter_shares_for_url(url) + facebook_shares_for_url(url)", "def shares(self, unit=None, units=None, timezone=None,\r\n rollup=None, limit=None, unit_reference_ts=None):\r\n params = base.get_params(None, locals())\r\n return self._get('shares', params)", "def share_replicas_get_all_by_share(context, share_id,\n with_share_data=False,\n with_share_server=False, session=None):\n session = session or get_session()\n\n result = _share_replica_get_with_filters(\n context, with_share_server=with_share_server,\n share_id=share_id, session=session).all()\n\n if with_share_data:\n result = _set_instances_share_data(context, result, session)\n\n return result", "def test_dashboards_v2_list_shares(self):\n pass", "def share_instances_get_all_by_share(context, share_id):\n result = (\n model_query(context, models.ShareInstance).filter(\n models.ShareInstance.share_id == share_id,\n ).all()\n )\n return result", "def file_shares(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['FileShareConfigArgs']]]]:\n return pulumi.get(self, \"file_shares\")", "def get_share(entry):\n share, share_pc, currency = \"\", \"\", \"\"\n if entry != \"\":\n if \":\" in entry:\n clean_entry = entry.split(\":\")[1].strip()\n if \"(\" in clean_entry:\n share_list = clean_entry.replace(\")\", \"\").split(\"(\")\n share_pc = share_list[1]\n currency = share_list[0].split(\" \")[0]\n share = \" \".join(share_list[0].split(\" \")[1:]).strip()\n else:\n if \"(\" in entry and \"%\" in entry:\n share_list = entry.replace(\")\", \"\").split(\"(\")\n share_pc = share_list[1]\n for feat in share_list:\n if \"%\" in feat:\n share_pc = feat\n elif \"%\" in entry:\n share_pc = entry\n # Obsolete code\n # share =.replace(\"Kapital:\", \"\").strip()\n # currency = share.split(\" \")[0]\n return share, share_pc, currency", "def find_all_shares(self, user, stock=None, brokerage_account=None, budget_account=None):\n query_args = {'user': user, 'stock': stock, 'brokerage_account': brokerage_account, 'budget_account': budget_account}\n # don't include the args if the value is none\n final_query_args = {k: v for k, v in query_args.items() if v is not None}\n return super().get_queryset().filter(**final_query_args)", "def get_shares_in_recycle_bin_by_share_server(\n context, share_server_id, filters=None, sort_key=None, sort_dir=None):\n if filters is None:\n filters = {}\n filters[\"is_soft_deleted\"] = True\n query = _share_get_all_with_filters(\n context, share_server_id=share_server_id, filters=filters,\n sort_key=sort_key, sort_dir=sort_dir)\n return query", "def synchronize(self):\n self.increment_pc()\n shares = [self._exchange_shares(player, GF256(0))\n for player in self.players]\n result = gather_shares(shares)\n result.addCallback(lambda _: None)\n return result", "def get_shares_info(user_id: int, db: Connection) -> List[Tuple[str, int]]:\n\n sql = f\"\"\"SELECT symbol, SUM(qty) FROM shares \n WHERE user_id = {user_id}\n GROUP BY symbol;\"\"\"\n\n try:\n cur = db.cursor()\n cur.execute(sql)\n return cur.fetchall()\n except Error:\n return []", "def share_combining(self, shares):\n mod_shares = [share[1] * (calc_lambda(shares,\n share[0], self.precomputed_fac)) for i, share in enumerate(shares)]\n return sum(mod_shares)", "def update_printer_shares(\n self,\n printer_share_id, # type: str\n body, # type: \"models.MicrosoftGraphPrinterShare\"\n **kwargs # type: Any\n ):\n # type: (...) -> None\n cls = kwargs.pop('cls', None) # type: ClsType[None]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n content_type = kwargs.pop(\"content_type\", \"application/json\")\n accept = \"application/json\"\n\n # Construct URL\n url = self.update_printer_shares.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printerShare-id': self._serialize.url(\"printer_share_id\", printer_share_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Content-Type'] = self._serialize.header(\"content_type\", content_type, 'str')\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n body_content_kwargs = {} # type: Dict[str, Any]\n body_content = self._serialize.body(body, 'MicrosoftGraphPrinterShare')\n body_content_kwargs['content'] = body_content\n request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [204]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n if cls:\n return cls(pipeline_response, None, {})", "def get_share(self, activity_user_id, activity_id, share_id):\n return None" ]
[ "0.5929783", "0.5719837", "0.5379649", "0.53750217", "0.5357973", "0.53297055", "0.528701", "0.51888907", "0.50321615", "0.4997966", "0.49812928", "0.49800614", "0.49704927", "0.4904655", "0.48964918", "0.48660332", "0.48171493", "0.47529054", "0.46977648", "0.46971932", "0.46454364", "0.45945734", "0.45454213", "0.45418802", "0.45128226", "0.4495962", "0.4488998", "0.44774207", "0.44628212", "0.44584772" ]
0.5811979
1
Update the navigation property printerShares in print. Update the navigation property printerShares in print.
def update_printer_shares( self, printer_share_id, # type: str body, # type: "models.MicrosoftGraphPrinterShare" **kwargs # type: Any ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.update_printer_shares.metadata['url'] # type: ignore path_format_arguments = { 'printerShare-id': self._serialize.url("printer_share_id", printer_share_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MicrosoftGraphPrinterShare') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_shares(\n self,\n printer_share_id, # type: str\n body, # type: \"models.MicrosoftGraphPrinterShare\"\n **kwargs # type: Any\n ):\n # type: (...) -> None\n cls = kwargs.pop('cls', None) # type: ClsType[None]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n content_type = kwargs.pop(\"content_type\", \"application/json\")\n accept = \"application/json\"\n\n # Construct URL\n url = self.update_shares.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printerShare-id': self._serialize.url(\"printer_share_id\", printer_share_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Content-Type'] = self._serialize.header(\"content_type\", content_type, 'str')\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n body_content_kwargs = {} # type: Dict[str, Any]\n body_content = self._serialize.body(body, 'MicrosoftGraphPrinterShare')\n body_content_kwargs['content'] = body_content\n request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [204]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n if cls:\n return cls(pipeline_response, None, {})", "def update_shares(self):\n self.nb_shares = self.shares.count()\n self.save()", "def set_share(self, total_people):\n self.paid = self._get_paid()\n self.share = round(self.paid/Decimal(total_people), 2)", "def create_printer_shares(\n self,\n body, # type: \"models.MicrosoftGraphPrinterShare\"\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrinterShare\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrinterShare\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n content_type = kwargs.pop(\"content_type\", \"application/json\")\n accept = \"application/json\"\n\n # Construct URL\n url = self.create_printer_shares.metadata['url'] # type: ignore\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Content-Type'] = self._serialize.header(\"content_type\", content_type, 'str')\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n body_content_kwargs = {} # type: Dict[str, Any]\n body_content = self._serialize.body(body, 'MicrosoftGraphPrinterShare')\n body_content_kwargs['content'] = body_content\n request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [201]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrinterShare', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def get_printer_shares(\n self,\n printer_share_id, # type: str\n select=None, # type: Optional[List[Union[str, \"models.Enum62\"]]]\n expand=None, # type: Optional[List[Union[str, \"models.Enum63\"]]]\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrinterShare\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrinterShare\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n # Construct URL\n url = self.get_printer_shares.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printerShare-id': self._serialize.url(\"printer_share_id\", printer_share_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n request = self._client.get(url, query_parameters, header_parameters)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrinterShare', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def shares_outstanding(self, shares_outstanding):\n\n self._shares_outstanding = shares_outstanding", "def report_shares(self, flush=False):\n now = int(time())\n if (now - self.last_graph_transmit) > 90:\n # bounds for a share to be grouped for transmission in minute\n # chunks. the upper bound is the last minute that has\n # completely passed, while the lower bound is the last time we\n # sent graph data (which should also be an exact round minute,\n # ensuring that we don't submit info for the same minute twice)\n upper = (now // 60) * 60\n # will cause all shares to get reported\n if flush:\n upper += 120\n lower = self.last_graph_transmit\n # share records that are to be discarded\n expire = now - self.config['keep_share']\n # for transmission\n chunks = {}\n # a list of indexes that have expired and are ready for removal\n rem = []\n for i, key in enumerate(self.share_types):\n for stamp, shares in getattr(self, key).iteritems():\n if stamp >= lower and stamp < upper:\n minute = (stamp // 60) * 60\n chunks.setdefault(minute, [0, 0, 0, 0])\n chunks[minute][i] += shares\n if stamp < expire:\n rem.append((key, stamp))\n\n for key, stamp in rem:\n del getattr(self, key)[stamp]\n\n for stamp, shares in chunks.iteritems():\n self.celery.send_task_pp(\n 'add_one_minute', self.address, shares[0],\n stamp, self.worker, *shares[1:])\n self.last_graph_transmit = upper\n\n # don't recalc their diff more often than interval\n if (self.config['vardiff']['enabled'] and\n now - self.last_diff_adj > self.config['vardiff']['interval']):\n self.recalc_vardiff()", "def get_shares(\n self,\n printer_share_id, # type: str\n select=None, # type: Optional[List[Union[str, \"models.Enum88\"]]]\n expand=None, # type: Optional[List[Union[str, \"models.Enum89\"]]]\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrinterShare\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrinterShare\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n # Construct URL\n url = self.get_shares.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printerShare-id': self._serialize.url(\"printer_share_id\", printer_share_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n request = self._client.get(url, query_parameters, header_parameters)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrinterShare', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def create_shares(\n self,\n body, # type: \"models.MicrosoftGraphPrinterShare\"\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrinterShare\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrinterShare\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n content_type = kwargs.pop(\"content_type\", \"application/json\")\n accept = \"application/json\"\n\n # Construct URL\n url = self.create_shares.metadata['url'] # type: ignore\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Content-Type'] = self._serialize.header(\"content_type\", content_type, 'str')\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n body_content_kwargs = {} # type: Dict[str, Any]\n body_content = self._serialize.body(body, 'MicrosoftGraphPrinterShare')\n body_content_kwargs['content'] = body_content\n request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [201]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrinterShare', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def list_printer_shares(\n self,\n orderby=None, # type: Optional[List[Union[str, \"models.Enum59\"]]]\n select=None, # type: Optional[List[Union[str, \"models.Enum60\"]]]\n expand=None, # type: Optional[List[Union[str, \"models.Enum61\"]]]\n **kwargs # type: Any\n ):\n # type: (...) -> Iterable[\"models.CollectionOfPrinterShare0\"]\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.CollectionOfPrinterShare0\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n def prepare_request(next_link=None):\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n if not next_link:\n # Construct URL\n url = self.list_printer_shares.metadata['url'] # type: ignore\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if self._config.top is not None:\n query_parameters['$top'] = self._serialize.query(\"self._config.top\", self._config.top, 'int', minimum=0)\n if self._config.skip is not None:\n query_parameters['$skip'] = self._serialize.query(\"self._config.skip\", self._config.skip, 'int', minimum=0)\n if self._config.search is not None:\n query_parameters['$search'] = self._serialize.query(\"self._config.search\", self._config.search, 'str')\n if self._config.filter is not None:\n query_parameters['$filter'] = self._serialize.query(\"self._config.filter\", self._config.filter, 'str')\n if self._config.count is not None:\n query_parameters['$count'] = self._serialize.query(\"self._config.count\", self._config.count, 'bool')\n if orderby is not None:\n query_parameters['$orderby'] = self._serialize.query(\"orderby\", orderby, '[str]', div=',')\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n request = self._client.get(url, query_parameters, header_parameters)\n else:\n url = next_link\n query_parameters = {} # type: Dict[str, Any]\n request = self._client.get(url, query_parameters, header_parameters)\n return request\n\n def extract_data(pipeline_response):\n deserialized = self._deserialize('CollectionOfPrinterShare0', pipeline_response)\n list_of_elem = deserialized.value\n if cls:\n list_of_elem = cls(list_of_elem)\n return deserialized.odata_next_link or None, iter(list_of_elem)\n\n def get_next(next_link=None):\n request = prepare_request(next_link)\n\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n error = self._deserialize(models.OdataError, response)\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n return pipeline_response\n\n return ItemPaged(\n get_next, extract_data\n )", "def trade(self, shares, price, last_trade):\n # Simulate likelyhood of fill\n likely_fill = random.randint(0, 100) / 100\n if likely_fill > self.fill_rate:\n print('Order not filled')\n return\n\n trade_value = shares * price\n\n # Net long / short\n if self.sign(shares) != self.sign(self.portfoolio['Shares']) and self.portfoolio['Shares'] != 0:\n trade_pnl = (shares * self.portfoolio['Avg_Cost']) - trade_value\n\n self.portfoolio['PnL'] += trade_pnl\n\n # Flat\n else:\n trade_pnl = 0\n\n self.portfoolio['Shares'] += shares\n self.portfoolio['Cost'] += shares * price + trade_pnl\n self.portfoolio['Avg_Cost'] = self.portfoolio['Cost'] / self.portfoolio['Shares']\n self.portfoolio['Unrealized_PnL'] = self.portfoolio['Shares'] * last_trade - self.portfoolio['Avg_Cost'] * self.portfoolio['Shares']\n\n print('Pnl booked: {} | Portfolio: {}'.format(trade_pnl, self.portfoolio))\n print(last_trade)", "def updateScptRefs(self):\n for scpt in self.refs_scpt.keys():\n self.refs_scpt[scpt] = scpt.getRef()\n self.scptRefs = set(self.refs_scpt.values())", "def _update_share_stats(self):\n\n # sanity check for gluster ctl mount\n smpb = os.stat(self.configuration.glusterfs_mount_point_base)\n smp = os.stat(self._get_mount_point_for_gluster_vol())\n if smpb.st_dev == smp.st_dev:\n raise exception.GlusterfsException(\n _(\"GlusterFS control mount is not available\")\n )\n smpv = os.statvfs(self._get_mount_point_for_gluster_vol())\n\n data = dict(\n storage_protocol='NFS',\n vendor_name='Red Hat',\n share_backend_name=self.backend_name,\n reserved_percentage=self.configuration.reserved_share_percentage,\n total_capacity_gb=(smpv.f_blocks * smpv.f_frsize) >> 30,\n free_capacity_gb=(smpv.f_bavail * smpv.f_frsize) >> 30)\n super(GlusterfsShareDriver, self)._update_share_stats(data)", "def set_cash(self, cash):\n portfolio = self.get_portfolio_object()\n if portfolio is not None:\n portfolio.cash += cash\n portfolio.initial_cash += cash", "def shares_to_core(self, shares):\n return shares * self.core_per_share()", "def memory_shares(self, shares):\n\n assert shares >= 0\n config_spec = vim.vm.ConfigSpec()\n shares_alloc = vim.ResourceAllocationInfo()\n shares_alloc.shares = vim.SharesInfo(level=\"custom\", shares=shares)\n config_spec.memoryAllocation = shares_alloc\n return self.vm_obj.ReconfigVM_Task(config_spec)", "def reduce_order(self, uid, shares):\n if uid in self.buy_map:\n order = self.buy_map[uid]\n limit = self.buy_levels[order.price]\n order.reduce(shares)\n if order.shares == 0:\n del self.buy_map[uid]\n if limit.size == 0:\n self.buy_tree.size -= 1\n if order.price == self.highest_buy:\n self.update_highest_buy(limit)\n elif uid in self.sell_map:\n order = self.sell_map[uid]\n limit = self.sell_levels[order.price]\n order.reduce(shares)\n if order.shares == 0:\n del self.sell_map[uid]\n if limit.size == 0:\n self.sell_tree.size -= 1\n if order.price == self.lowest_sell:\n self.update_lowest_sell(limit)\n else:\n return\n self.update_book()", "def update(self, context, data):\n self.context = context\n self.data = data\n\n dt = get_datetime()\n\n for tkt, bo in self._d_orders['trades'].items():\n price = self.data[bo.symbol].price\n bo.update(price, dt)", "def save_modified_imports(self):\n # apply changed imports.\n imports_range = self.pe_manager.get_imports_range_in_structures()\n self.PE.__structures__[imports_range[0]:imports_range[1]] = \\\n self.import_structures", "def _flush(self):\n old_pumps = self._pumps\n new_pumps = self._pumps = set()\n for p in old_pumps:\n p.flush()\n if p.clientIO.disconnected and p.serverIO.disconnected:\n continue\n new_pumps.add(p)", "def _update_volume_stats(self):\n LOG.debug('Updating volume stats')\n total_space = 0\n free_space = 0\n share = None\n for _share in self._mounted_shares:\n if self.shares_with_capacities[_share]['free'] > free_space:\n free_space = self.shares_with_capacities[_share]['free']\n total_space = self.shares_with_capacities[_share]['total']\n share = _share\n\n location_info = '%(driver)s:%(share)s' % {\n 'driver': self.__class__.__name__,\n 'share': share\n }\n nms_url = self.share2nms[share].url\n self._stats = {\n 'vendor_name': 'Nexenta',\n 'dedup': self.volume_deduplication,\n 'compression': self.volume_compression,\n 'description': self.volume_description,\n 'nms_url': nms_url,\n 'ns_shares': self.shares_with_capacities,\n 'driver_version': self.VERSION,\n 'storage_protocol': constants.NFS,\n 'total_capacity_gb': total_space,\n 'free_capacity_gb': free_space,\n 'reserved_percentage': self.configuration.reserved_percentage,\n 'QoS_support': False,\n 'location_info': location_info,\n 'volume_backend_name': self.backend_name,\n 'nfs_mount_point_base': self.nfs_mount_point_base\n }", "def get_company_and_price(shares: list[Share]) -> list[Share]:\n\n for share in shares:\n\n share_info = lookup(share.symbol)\n if share_info is not None:\n share.company_name = share_info[\"name\"]\n share.price = share_info[\"price\"]\n share.total = share.price * share.qty\n else:\n share.company_name = CMP_NOT_FOUND\n\n return shares", "def delete_printer_shares(\n self,\n printer_share_id, # type: str\n if_match=None, # type: Optional[str]\n **kwargs # type: Any\n ):\n # type: (...) -> None\n cls = kwargs.pop('cls', None) # type: ClsType[None]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n # Construct URL\n url = self.delete_printer_shares.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printerShare-id': self._serialize.url(\"printer_share_id\", printer_share_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n if if_match is not None:\n header_parameters['If-Match'] = self._serialize.header(\"if_match\", if_match, 'str')\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n request = self._client.delete(url, query_parameters, header_parameters)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [204]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n if cls:\n return cls(pipeline_response, None, {})", "def list_shares(\n self,\n orderby=None, # type: Optional[List[Union[str, \"models.Enum85\"]]]\n select=None, # type: Optional[List[Union[str, \"models.Enum86\"]]]\n expand=None, # type: Optional[List[Union[str, \"models.Enum87\"]]]\n **kwargs # type: Any\n ):\n # type: (...) -> Iterable[\"models.CollectionOfPrinterShare1\"]\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.CollectionOfPrinterShare1\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n def prepare_request(next_link=None):\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n if not next_link:\n # Construct URL\n url = self.list_shares.metadata['url'] # type: ignore\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if self._config.top is not None:\n query_parameters['$top'] = self._serialize.query(\"self._config.top\", self._config.top, 'int', minimum=0)\n if self._config.skip is not None:\n query_parameters['$skip'] = self._serialize.query(\"self._config.skip\", self._config.skip, 'int', minimum=0)\n if self._config.search is not None:\n query_parameters['$search'] = self._serialize.query(\"self._config.search\", self._config.search, 'str')\n if self._config.filter is not None:\n query_parameters['$filter'] = self._serialize.query(\"self._config.filter\", self._config.filter, 'str')\n if self._config.count is not None:\n query_parameters['$count'] = self._serialize.query(\"self._config.count\", self._config.count, 'bool')\n if orderby is not None:\n query_parameters['$orderby'] = self._serialize.query(\"orderby\", orderby, '[str]', div=',')\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n request = self._client.get(url, query_parameters, header_parameters)\n else:\n url = next_link\n query_parameters = {} # type: Dict[str, Any]\n request = self._client.get(url, query_parameters, header_parameters)\n return request\n\n def extract_data(pipeline_response):\n deserialized = self._deserialize('CollectionOfPrinterShare1', pipeline_response)\n list_of_elem = deserialized.value\n if cls:\n list_of_elem = cls(list_of_elem)\n return deserialized.odata_next_link or None, iter(list_of_elem)\n\n def get_next(next_link=None):\n request = prepare_request(next_link)\n\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n error = self._deserialize(models.OdataError, response)\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n return pipeline_response\n\n return ItemPaged(\n get_next, extract_data\n )", "def update_holdings_from_fill(self, fill):\n # Check whether the Fill is a Buy or a Sell\n fill_dir = 0\n if fill.direction == 'BUY':\n fill_dir = 1\n if fill.direction == 'SELL':\n fill_dir = -1\n \n # Update holdings list with new quantities\n fill_cost = self.bars.get_latest_bar_value(fill.symbol, \"close\")\n cost = fill_dir * fill_cost * fill.quantity\n self.current_holdings[fill.symbol] += cost\n self.current_holdings['commission'] += fill.commission\n self.current_holdings['cash'] -= (cost + fill.commission)\n self.current_holdings['total'] -= (cost + fill.commission)", "def update_portfolio(self):\n if not self._dirty_portfolio:\n return\n\n portfolio = self._portfolio\n pt = self.position_tracker\n\n portfolio.positions = pt.get_positions()\n position_stats = pt.stats\n\n portfolio.positions_value = position_value = (\n position_stats.net_value\n )\n portfolio.positions_exposure = position_stats.net_exposure\n self._cash_flow(self._get_payout_total(pt.positions))\n\n start_value = portfolio.portfolio_value\n\n # update the new starting value\n portfolio.portfolio_value = end_value = portfolio.cash + position_value\n\n pnl = end_value - start_value\n if start_value != 0:\n returns = pnl / start_value\n else:\n returns = 0.0\n\n portfolio.pnl += pnl\n portfolio.returns = (\n (1 + portfolio.returns) *\n (1 + returns) -\n 1\n )\n\n # the portfolio has been fully synced\n self._dirty_portfolio = False", "def test_set_share(self):\n self.app.post_json(url=\"/config/shares\",\n params=dict(\n source='gsiftp://source',\n destination='gsiftp://nowhere',\n vo='dteam',\n share=80\n ),\n status=200\n )", "def cpu_shares(self, shares):\n assert shares >= 0\n config_spec = vim.vm.ConfigSpec()\n shares_alloc = vim.ResourceAllocationInfo()\n shares_alloc.shares = vim.SharesInfo(level=\"custom\", shares=shares)\n config_spec.cpuAllocation = shares_alloc\n return self.vm_obj.ReconfigVM_Task(config_spec)", "def _save_ballot_shares(\n self,\n guardian_id: GuardianId,\n guardians_ballot_shares: Dict[BallotId, Optional[DecryptionShare]],\n ) -> None:\n for ballot_id, guardian_ballot_share in guardians_ballot_shares.items():\n shares = self._ballot_shares.get(ballot_id)\n if shares is None:\n shares = {}\n if guardian_ballot_share is not None:\n shares[guardian_id] = guardian_ballot_share\n self._ballot_shares[ballot_id] = shares", "def get_shares_count(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetSharesCount', self.handle)" ]
[ "0.60168207", "0.58797425", "0.5139873", "0.50018686", "0.49725", "0.489075", "0.47493216", "0.46902797", "0.46506637", "0.4645971", "0.44011953", "0.43775907", "0.43299082", "0.4329202", "0.43075097", "0.42506906", "0.42386127", "0.42100638", "0.4194462", "0.41872737", "0.41506875", "0.41426432", "0.41328475", "0.41262403", "0.41210806", "0.40860674", "0.40771666", "0.40743548", "0.40473774", "0.4038524" ]
0.613246
0
Delete navigation property printerShares for print. Delete navigation property printerShares for print.
def delete_printer_shares( self, printer_share_id, # type: str if_match=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) accept = "application/json" # Construct URL url = self.delete_printer_shares.metadata['url'] # type: ignore path_format_arguments = { 'printerShare-id': self._serialize.url("printer_share_id", printer_share_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_shares(\n self,\n printer_share_id, # type: str\n if_match=None, # type: Optional[str]\n **kwargs # type: Any\n ):\n # type: (...) -> None\n cls = kwargs.pop('cls', None) # type: ClsType[None]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n # Construct URL\n url = self.delete_shares.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printerShare-id': self._serialize.url(\"printer_share_id\", printer_share_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n if if_match is not None:\n header_parameters['If-Match'] = self._serialize.header(\"if_match\", if_match, 'str')\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n request = self._client.delete(url, query_parameters, header_parameters)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [204]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n if cls:\n return cls(pipeline_response, None, {})", "def delete_share(self, context, share, share_server=None):\n volume_uuid = self._resolve_volume_name(share['name'],\n share['project_id'])\n if not volume_uuid:\n LOG.warning(\"No volume found for \"\n \"share %(project_id)s/%(name)s\",\n {\"project_id\": share['project_id'],\n \"name\": share['name']})\n return\n\n if self.configuration.quobyte_delete_shares:\n self.rpc.call('deleteVolume', {'volume_uuid': volume_uuid})\n else:\n self.rpc.call('exportVolume', {\"volume_uuid\": volume_uuid,\n \"remove_export\": True,\n })", "def delete_share(self, pool, project, share):\n svc = self.share_path % (pool, project, share)\n ret = self.rclient.delete(svc)\n if ret.status != restclient.Status.NO_CONTENT:\n exception_msg = (('Error deleting '\n 'share: %(share)s to '\n 'pool: %(pool)s '\n 'project: %(project)s '\n 'return code: %(ret.status)d '\n 'message: %(ret.data)s.'),\n {'share': share,\n 'pool': pool,\n 'project': project,\n 'ret.status': ret.status,\n 'ret.data': ret.data})\n LOG.error(exception_msg)", "def _quota_destroy_all_by_share_type(context, share_type_id, project_id=None):\n share_type_quotas = model_query(\n context, models.ProjectShareTypeQuota,\n read_deleted=\"no\",\n ).filter_by(share_type_id=share_type_id)\n\n share_type_quota_usages = model_query(\n context, models.QuotaUsage, read_deleted=\"no\",\n ).filter_by(share_type_id=share_type_id)\n\n share_type_quota_reservations = model_query(\n context, models.Reservation, read_deleted=\"no\",\n ).filter_by(share_type_id=share_type_id)\n\n if project_id is not None:\n share_type_quotas = share_type_quotas.filter_by(\n project_id=project_id,\n )\n share_type_quota_usages = share_type_quota_usages.filter_by(\n project_id=project_id,\n )\n share_type_quota_reservations = (\n share_type_quota_reservations.filter_by(project_id=project_id)\n )\n\n share_type_quotas.soft_delete(synchronize_session=False)\n share_type_quota_usages.soft_delete(synchronize_session=False)\n share_type_quota_reservations.soft_delete(synchronize_session=False)", "def test_remove_share(self):\n self.app.delete(url=\"/config/shares?share=80&destination=gsiftp://nowhere&vo=dteam\", status=400)\n self.app.delete(url=\"/config/shares?share=80&destination=gsiftp://nowhere&vo=dteam&source=gsiftp://source\", status=204)", "def remove(self):\n\t\tcall_sdk_function('PrlShare_Remove', self.handle)", "def Delete(self):\n\n if self.network_id:\n self.cs.delete_network(self.network_id)\n\n if self.is_vpc and self.vpc_id:\n self.cs.delete_vpc(self.vpc_id)", "def update_printer_shares(\n self,\n printer_share_id, # type: str\n body, # type: \"models.MicrosoftGraphPrinterShare\"\n **kwargs # type: Any\n ):\n # type: (...) -> None\n cls = kwargs.pop('cls', None) # type: ClsType[None]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n content_type = kwargs.pop(\"content_type\", \"application/json\")\n accept = \"application/json\"\n\n # Construct URL\n url = self.update_printer_shares.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printerShare-id': self._serialize.url(\"printer_share_id\", printer_share_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Content-Type'] = self._serialize.header(\"content_type\", content_type, 'str')\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n body_content_kwargs = {} # type: Dict[str, Any]\n body_content = self._serialize.body(body, 'MicrosoftGraphPrinterShare')\n body_content_kwargs['content'] = body_content\n request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [204]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n if cls:\n return cls(pipeline_response, None, {})", "def create_printer_shares(\n self,\n body, # type: \"models.MicrosoftGraphPrinterShare\"\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrinterShare\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrinterShare\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n content_type = kwargs.pop(\"content_type\", \"application/json\")\n accept = \"application/json\"\n\n # Construct URL\n url = self.create_printer_shares.metadata['url'] # type: ignore\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Content-Type'] = self._serialize.header(\"content_type\", content_type, 'str')\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n body_content_kwargs = {} # type: Dict[str, Any]\n body_content = self._serialize.body(body, 'MicrosoftGraphPrinterShare')\n body_content_kwargs['content'] = body_content\n request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [201]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrinterShare', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def unShare(sharedItem):\n sharedItem.store.query(Share, Share.sharedItem == sharedItem).deleteFromStore()", "def delete(self):\n for obj in self:\n _unset_related_objects_relations(obj)\n\n self.update(deleted=now())", "def get_printer_shares(\n self,\n printer_share_id, # type: str\n select=None, # type: Optional[List[Union[str, \"models.Enum62\"]]]\n expand=None, # type: Optional[List[Union[str, \"models.Enum63\"]]]\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrinterShare\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrinterShare\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n # Construct URL\n url = self.get_printer_shares.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printerShare-id': self._serialize.url(\"printer_share_id\", printer_share_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n request = self._client.get(url, query_parameters, header_parameters)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrinterShare', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def delete_share(self, context, share, share_server=None):\n local_share_path = self._get_local_share_path(share)\n cmd = ['rm', '-rf', local_share_path]\n try:\n self._execute(*cmd, run_as_root=True)\n except exception.ProcessExecutionError:\n LOG.error(_LE('Unable to delete share %s'), share['name'])\n raise", "def update_shares(\n self,\n printer_share_id, # type: str\n body, # type: \"models.MicrosoftGraphPrinterShare\"\n **kwargs # type: Any\n ):\n # type: (...) -> None\n cls = kwargs.pop('cls', None) # type: ClsType[None]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n content_type = kwargs.pop(\"content_type\", \"application/json\")\n accept = \"application/json\"\n\n # Construct URL\n url = self.update_shares.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printerShare-id': self._serialize.url(\"printer_share_id\", printer_share_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Content-Type'] = self._serialize.header(\"content_type\", content_type, 'str')\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n body_content_kwargs = {} # type: Dict[str, Any]\n body_content = self._serialize.body(body, 'MicrosoftGraphPrinterShare')\n body_content_kwargs['content'] = body_content\n request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [204]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n if cls:\n return cls(pipeline_response, None, {})", "def test_dashboards_v2_delete_share(self):\n pass", "def delete(self):\n logger.info('Delete the port chain: %s' % self.name)\n # Delete port chain\n self.pc_client.delete('port_chain', self.name)\n\n logger.info('Delete the flow classifier.')\n self.pc_client.delete('flow_classifier', self.flow_conf['name'])\n\n # Delete all port pair groups\n logger.info('Delete port pair groups and port pairs.')\n srv_ppgrp_lst = self.srv_chain.get_srv_ppgrp_id()\n for grp_idx in range(len(srv_ppgrp_lst)):\n pp_grp_name = 'pp_grp_%s' % grp_idx\n self.pc_client.delete('port_pair_group', pp_grp_name)\n\n # Delete all port pairs\n for grp_idx, pp_grp in enumerate(srv_ppgrp_lst):\n for pp_idx in range(len(pp_grp)):\n pp_name = 'pp_%s_%s' % (grp_idx, pp_idx)\n self.pc_client.delete('port_pair', pp_name)", "def delete_policy(self, policy_ref: str) -> None:\n self.batch_write(\n [self.batch_detach_policy(policy_ref, obj_ref) for obj_ref in self.list_policy_attachments(\n policy_ref,\n ConsistencyLevel=ConsistencyLevel.SERIALIZABLE.name)])\n self.batch_write(\n [self.batch_detach_object(parent_ref, link_name) for parent_ref, link_name in self.list_object_parents(\n policy_ref,\n ConsistencyLevel=ConsistencyLevel.SERIALIZABLE.name)])\n retry(**cd_read_retry_parameters)(cd_client.delete_object)(\n DirectoryArn=self._dir_arn,\n ObjectReference={'Selector': policy_ref})", "async def test_endpoint_delete_container_shares_correct(self):\n with self.patch_json_dump:\n resp = await delete_container_shares_handler(self.mock_request)\n self.assertEqual(resp.status, 204)", "def remove_many(self, hashes):\n if isinstance(hashes, MinHash):\n self._methodcall(lib.kmerminhash_remove_from, hashes._objptr)\n else:\n self._methodcall(lib.kmerminhash_remove_many, list(hashes), len(hashes))", "def delete_snapshot(self, pool, project, share, snapshot):\n svc = self.snapshot_path % (pool, project, share, snapshot)\n ret = self.rclient.delete(svc)\n if ret.status != restclient.Status.NO_CONTENT:\n exception_msg = (_('Error deleting '\n 'snapshot: %(snapshot)s on '\n 'share: %(share)s to '\n 'pool: %(pool)s '\n 'project: %(project)s '\n 'return code: %(ret.status)d '\n 'message: %(ret.data)s.')\n % {'snapshot': snapshot,\n 'share': share,\n 'pool': pool,\n 'project': project,\n 'ret.status': ret.status,\n 'ret.data': ret.data})\n LOG.error(exception_msg)\n raise exception.ShareBackendException(msg=exception_msg)", "def delete_relatives(self):\n category_ratings = list(self.category_ratings.all())\n self.category_ratings.clear()\n for category_rating in category_ratings:\n if category_rating.isOrphaned():\n category_rating.delete()\n\n word_counts = list(self.word_counts.all())\n self.word_counts.clear()\n for word_count in word_counts:\n if word_count.isOrphaned():\n word_count.delete()", "def destroy(self, request, *args, **kwargs):\n delete_stock_items = 'delete_stock_items' in request.data and request.data['delete_stock_items'] == '1'\n delete_sub_locations = 'delete_sub_locations' in request.data and request.data['delete_sub_locations'] == '1'\n return super().destroy(request,\n *args,\n **dict(kwargs,\n delete_sub_locations=delete_sub_locations,\n delete_stock_items=delete_stock_items))", "def delete(self):\n _unset_related_objects_relations(self)\n self.deleted = now()\n self.save()\n\n return self", "def hard_delete_user_related_data(self):\n from contentcuration.viewsets.common import SQCount\n\n # Hard delete invitations associated to this account.\n self.sent_to.all().delete()\n self.sent_by.all().delete()\n\n editable_channels_user_query = (\n User.objects.filter(editable_channels__id=OuterRef('id'))\n .values_list('id', flat=True)\n .distinct()\n )\n non_public_channels_sole_editor = self.editable_channels.annotate(num_editors=SQCount(\n editable_channels_user_query, field=\"id\")).filter(num_editors=1, public=False)\n\n # Point sole editor non-public channels' contentnodes to orphan tree to let\n # our garbage collection delete the nodes and underlying files.\n ContentNode._annotate_channel_id(ContentNode.objects).filter(channel_id__in=list(\n non_public_channels_sole_editor.values_list(\"id\", flat=True))).update(parent_id=settings.ORPHANAGE_ROOT_ID)\n\n # Hard delete non-public channels associated with this user (if user is the only editor).\n non_public_channels_sole_editor.delete()\n\n # Hard delete non-public channel collections associated with this user (if user is the only editor).\n user_query = (\n User.objects.filter(channel_sets__id=OuterRef('id'))\n .values_list('id', flat=True)\n .distinct()\n )\n self.channel_sets.annotate(num_editors=SQCount(user_query, field=\"id\")).filter(num_editors=1, public=False).delete()\n\n # Create history!\n self.history.create(user_id=self.pk, action=user_history.RELATED_DATA_HARD_DELETION)", "def delete_policies():\n if PoliciesOutput.POLICIES_EVENT not in ctx.instance.runtime_properties:\n return\n\n service_component_name = ctx.instance.runtime_properties.get(\n PoliciesOutput.SERVICE_COMPONENT_NAME\n )\n if not service_component_name:\n ctx.logger.warn(\"failed to find service_component_name to delete_policies in consul-kv\")\n return\n\n delete_policies = [\n PoliciesOutput._gen_txn_operation(\n PoliciesOutput.OPERATION_DELETE_FOLDER, service_component_name\n )\n ]\n PoliciesOutput._run_transaction(\"delete_policies\", delete_policies)", "def delete(shelter_id=0, section_name=\"\"):\r\n shelter = Shelter.query.filter(Shelter.id==shelter_id,\r\n Shelter.user_id==current_user.id)\r\n if shelter is not None:\r\n ShelterPicture.query.filter(ShelterPicture.shelter_id==shelter_id).delete()\r\n\r\n properties = Property.query.filter(Property.shelter_id==shelter_id)\r\n for property in properties:\r\n Association.query.filter(Association.property_id==property.id).delete()\r\n db.session.delete(property)\r\n\r\n shelter.delete()\r\n db.session.commit()\r\n\r\n return redirect(redirect_url())", "def get_shares(\n self,\n printer_share_id, # type: str\n select=None, # type: Optional[List[Union[str, \"models.Enum88\"]]]\n expand=None, # type: Optional[List[Union[str, \"models.Enum89\"]]]\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrinterShare\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrinterShare\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n # Construct URL\n url = self.get_shares.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printerShare-id': self._serialize.url(\"printer_share_id\", printer_share_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n request = self._client.get(url, query_parameters, header_parameters)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrinterShare', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def create_shares(\n self,\n body, # type: \"models.MicrosoftGraphPrinterShare\"\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrinterShare\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrinterShare\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n content_type = kwargs.pop(\"content_type\", \"application/json\")\n accept = \"application/json\"\n\n # Construct URL\n url = self.create_shares.metadata['url'] # type: ignore\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Content-Type'] = self._serialize.header(\"content_type\", content_type, 'str')\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n body_content_kwargs = {} # type: Dict[str, Any]\n body_content = self._serialize.body(body, 'MicrosoftGraphPrinterShare')\n body_content_kwargs['content'] = body_content\n request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [201]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrinterShare', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def delete(self):\r\n if self.provider.readonly:\r\n raise DAVError(HTTP_FORBIDDEN)\r\n\r\n self.nibbler.rmdir(self.path.rstrip('/'), recursive=True)\r\n\r\n self.removeAllProperties(True)\r\n self.removeAllLocks(True)", "def delete(self, id=None, **kwargs):\r\n rm = ResourceManager()\r\n pt = self.db.auth_permission\r\n if id and not isinstance(id, (list, tuple, set)):\r\n id = [id]\r\n\r\n # removing private args\r\n if self.private_args:\r\n private_args = self.private_args.table\r\n self.private_args.delete(id)\r\n else:\r\n private_args = None\r\n\r\n # # removing many to many references\r\n # m2ms = set()\r\n # for reference in (tuple(x.split('/')) for x in imap(itemgetter('indexName'),self.many_to_many)):\r\n # resource = rm.m2m(reference)\r\n # if resource:\r\n # m2ms.add(resource.table)\r\n # resource.delete(self,collection = id)\r\n\r\n # getting table names and field names to delete\r\n cascading_deletion = tuple((field.table, field) for field in self.table._referenced_by if\r\n field.ondelete == 'CASCADE' and field.table != private_args) # and field.table not in m2ms)\r\n # deleting all related objects\r\n for table, field in cascading_deletion:\r\n res = rm.resource(table)\r\n if res:\r\n # fetch all id of related rows\r\n ids = set(chain(*self.sql(field.belongs(id), table._id, as_dict=False)))\r\n if ids:\r\n # if related entitiy is a many to many relation delete reference with other objects, but not related objects\r\n if isinstance(res, ManyToManyRelation):\r\n # making deletion simpy by forign related attribute\r\n res.delete(self, resource_id=ids)\r\n else:\r\n res.delete(id=ids, _check_permissions=False)\r\n\r\n self.db(self.table.id.belongs(id)).delete()\r\n # deleting all directly related permissions\r\n self.db((pt.table_name == self.table._tablename) & pt.record_id.belongs(id)).delete()\r\n # if realtime_enabled and self.minimal_permissions:\r\n # sync_permissions(self.table._tablename, id, self.minimal_permissions)\r\n # perms = sql(pt.record_id.belongs(id) & (pt.table_name == self.table._tablename))\r\n # if perms:\r\n # rt_sync_permissions(self.table, id, perms)\r" ]
[ "0.5683588", "0.5448751", "0.5174152", "0.5008662", "0.49760768", "0.49688616", "0.4931828", "0.4845979", "0.48247477", "0.47982392", "0.47931978", "0.47914562", "0.4761925", "0.47239703", "0.46946055", "0.46627745", "0.46516562", "0.46299604", "0.46165127", "0.46001336", "0.4590099", "0.4588908", "0.4579941", "0.45761093", "0.45699692", "0.45577016", "0.4528387", "0.45100018", "0.45014554", "0.449045" ]
0.5776565
0
Create new navigation property to services for print. Create new navigation property to services for print.
def create_services( self, body, # type: "models.MicrosoftGraphPrintService" **kwargs # type: Any ): # type: (...) -> "models.MicrosoftGraphPrintService" cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphPrintService"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.create_services.metadata['url'] # type: ignore # Construct parameters query_parameters = {} # type: Dict[str, Any] # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(body, 'MicrosoftGraphPrintService') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [201]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize(models.OdataError, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('MicrosoftGraphPrintService', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_services(\n self,\n print_service_id, # type: str\n select=None, # type: Optional[List[Union[str, \"models.Enum80\"]]]\n expand=None, # type: Optional[List[Union[str, \"models.Enum81\"]]]\n **kwargs # type: Any\n ):\n # type: (...) -> \"models.MicrosoftGraphPrintService\"\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.MicrosoftGraphPrintService\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n # Construct URL\n url = self.get_services.metadata['url'] # type: ignore\n path_format_arguments = {\n 'printService-id': self._serialize.url(\"print_service_id\", print_service_id, 'str'),\n }\n url = self._client.format_url(url, **path_format_arguments)\n\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n request = self._client.get(url, query_parameters, header_parameters)\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n error = self._deserialize(models.OdataError, response)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n deserialized = self._deserialize('MicrosoftGraphPrintService', pipeline_response)\n\n if cls:\n return cls(pipeline_response, deserialized, {})\n\n return deserialized", "def __str__(self):\n\t\treturn pprint.pformat({'collectionname':self.collection_name,'services':self.services})", "def addPrintOrder(self, type):\n self.printOrder.append(type)", "def _add_services(self):\n this_service = {'name': 'swift-proxy'}\n other_services = [\n {'name': 'percona-cluster'},\n {'name': 'keystone'},\n {'name': 'glance'},\n {'name': 'swift-storage'}\n ]\n super(SwiftProxyBasicDeployment, self)._add_services(this_service,\n other_services)", "def _add_services(self):\n # Services and relations which are present merely to satisfy\n # required_interfaces and workload status are not inspected.\n # Fix me. Inspect those too.\n this_service = {'name': 'neutron-openvswitch'}\n other_services = [\n {'name': 'nova-compute'},\n {'name': 'nova-cloud-controller'},\n {'name': 'rabbitmq-server'},\n {'name': 'keystone'},\n {'name': 'glance'},\n {'name': 'neutron-api'},\n self.get_percona_service_entry(),\n ]\n if self._get_openstack_release() >= self.bionic_train:\n other_services.append({'name': 'placement'})\n super(NeutronOVSBasicDeployment, self)._add_services(this_service,\n other_services)", "def to_service(self):\n pass", "def service(cls):\n return relationship.many_to_one(cls, 'service')", "def service(cls):\n return relationship.many_to_one(cls, 'service')", "def _add_relations(self):\n relations = {\n 'keystone:shared-db': 'percona-cluster:shared-db',\n 'swift-proxy:identity-service': 'keystone:identity-service',\n 'swift-storage:swift-storage': 'swift-proxy:swift-storage',\n 'glance:identity-service': 'keystone:identity-service',\n 'glance:shared-db': 'percona-cluster:shared-db',\n 'glance:object-store': 'swift-proxy:object-store'\n }\n super(SwiftProxyBasicDeployment, self)._add_relations(relations)", "def collect_properties(service_instance, view_ref, obj_type, path_set=None,\n include_mors=False):\n collector = service_instance.content.propertyCollector\n\n # Create object specification to define the starting point of\n # inventory navigation\n obj_spec = vmodl.query.PropertyCollector.ObjectSpec()\n obj_spec.obj = view_ref\n obj_spec.skip = True\n\n # Create a traversal specification to identify the path for collection\n traversal_spec = vmodl.query.PropertyCollector.TraversalSpec()\n traversal_spec.name = 'traverseEntities'\n traversal_spec.path = 'view'\n traversal_spec.skip = False\n traversal_spec.type = view_ref.__class__\n obj_spec.selectSet = [traversal_spec]\n\n # Identify the properties to the retrieved\n property_spec = vmodl.query.PropertyCollector.PropertySpec()\n property_spec.type = obj_type\n\n if not path_set:\n property_spec.all = True\n\n property_spec.pathSet = path_set\n\n # Add the object and property specification to the\n # property filter specification\n filter_spec = vmodl.query.PropertyCollector.FilterSpec()\n filter_spec.objectSet = [obj_spec]\n filter_spec.propSet = [property_spec]\n\n # Retrieve properties\n props = collector.RetrieveContents([filter_spec])\n\n data = []\n for obj in props:\n properties = {}\n for prop in obj.propSet:\n properties[prop.name] = prop.val\n\n if include_mors:\n properties['obj'] = obj.obj\n\n data.append(properties)\n return data", "def serialize(self):\n\t\tres = {'collection':self.collection_name}\n\t\tres['services'] = {}\n\t\tfor k,sinfo in self.services.items():\n\t\t\tres['services'][k] = sinfo.serialize()\n\t\treturn res", "def setupSERVICES():\n services = Services()\n services.rest = setupREST()\n\n return services", "def action_create_invoice(self):\n if self.partner_id:\n supplier = self.partner_id\n else:\n supplier = self.partner_id.search(\n [(\"name\", \"=\", \"Salon Default Customer\")])\n lines = []\n product_id = self.env['product.product'].search(\n [(\"name\", \"=\", \"Salon Service\")])\n for records in self.order_line_ids:\n if product_id.property_account_income_id.id:\n income_account = product_id.property_account_income_id.id\n elif product_id.categ_id.property_account_income_categ_id.id:\n income_account = product_id.categ_id.\\\n property_account_income_categ_id.id\n else:\n raise UserError(\n _(\"Please define income account for this product: \"\n \"'%s' (id:%d).\") % (product_id.name, product_id.id))\n value = (0, 0, {\n 'name': records.service_id.name,\n 'account_id': income_account,\n 'price_unit': records.price,\n 'quantity': 1,\n 'product_id': product_id.id,\n })\n lines.append(value)\n invoice_line = {\n 'move_type': 'out_invoice',\n 'partner_id': supplier.id,\n 'invoice_user_id': self.env.user.id,\n 'invoice_origin': self.name,\n 'invoice_line_ids': lines,\n }\n inv = self.env['account.move'].create(invoice_line)\n action = self.env.ref('account.action_move_out_invoice_type',\n raise_if_not_found=False)\n result = {\n 'name': action.name,\n 'type': 'ir.actions.act_window',\n 'views': [[False, 'form']],\n 'target': 'current',\n 'res_id': inv.id,\n 'res_model': 'account.move',\n }\n self.inv_stage_identifier = True\n self.stage_id = 3\n invoiced_records = self.env['salon.order'].search(\n [('stage_id', 'in', [3, 4]), ('chair_id', '=', self.chair_id.id)])\n total = 0\n for rows in invoiced_records:\n invoiced_date = str(rows.date)\n invoiced_date = invoiced_date[0:10]\n if invoiced_date == str(date.today()):\n total = total + rows.price_subtotal\n self.chair_id.collection_today = total\n self.update_number_of_orders()\n return result", "def add_custom_properties(product):\n \n # TODO: may override property of object\n if product['properties']:\n for p in product['properties']:\n product[p['name']] = p['value']\n \n return product", "def action_ship_create(self):\n res = super(SaleOrder, self).action_ship_create()\n for sale_order in self:\n if sale_order.invoiced:\n sale_order.picking_ids.write({'x_is_paid': True})\n return res", "def list_services(\n self,\n orderby=None, # type: Optional[List[Union[str, \"models.Enum77\"]]]\n select=None, # type: Optional[List[Union[str, \"models.Enum78\"]]]\n expand=None, # type: Optional[List[Union[str, \"models.Enum79\"]]]\n **kwargs # type: Any\n ):\n # type: (...) -> Iterable[\"models.CollectionOfPrintService\"]\n cls = kwargs.pop('cls', None) # type: ClsType[\"models.CollectionOfPrintService\"]\n error_map = {\n 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError\n }\n error_map.update(kwargs.pop('error_map', {}))\n accept = \"application/json\"\n\n def prepare_request(next_link=None):\n # Construct headers\n header_parameters = {} # type: Dict[str, Any]\n header_parameters['Accept'] = self._serialize.header(\"accept\", accept, 'str')\n\n if not next_link:\n # Construct URL\n url = self.list_services.metadata['url'] # type: ignore\n # Construct parameters\n query_parameters = {} # type: Dict[str, Any]\n if self._config.top is not None:\n query_parameters['$top'] = self._serialize.query(\"self._config.top\", self._config.top, 'int', minimum=0)\n if self._config.skip is not None:\n query_parameters['$skip'] = self._serialize.query(\"self._config.skip\", self._config.skip, 'int', minimum=0)\n if self._config.search is not None:\n query_parameters['$search'] = self._serialize.query(\"self._config.search\", self._config.search, 'str')\n if self._config.filter is not None:\n query_parameters['$filter'] = self._serialize.query(\"self._config.filter\", self._config.filter, 'str')\n if self._config.count is not None:\n query_parameters['$count'] = self._serialize.query(\"self._config.count\", self._config.count, 'bool')\n if orderby is not None:\n query_parameters['$orderby'] = self._serialize.query(\"orderby\", orderby, '[str]', div=',')\n if select is not None:\n query_parameters['$select'] = self._serialize.query(\"select\", select, '[str]', div=',')\n if expand is not None:\n query_parameters['$expand'] = self._serialize.query(\"expand\", expand, '[str]', div=',')\n\n request = self._client.get(url, query_parameters, header_parameters)\n else:\n url = next_link\n query_parameters = {} # type: Dict[str, Any]\n request = self._client.get(url, query_parameters, header_parameters)\n return request\n\n def extract_data(pipeline_response):\n deserialized = self._deserialize('CollectionOfPrintService', pipeline_response)\n list_of_elem = deserialized.value\n if cls:\n list_of_elem = cls(list_of_elem)\n return deserialized.odata_next_link or None, iter(list_of_elem)\n\n def get_next(next_link=None):\n request = prepare_request(next_link)\n\n pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)\n response = pipeline_response.http_response\n\n if response.status_code not in [200]:\n error = self._deserialize(models.OdataError, response)\n map_error(status_code=response.status_code, response=response, error_map=error_map)\n raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)\n\n return pipeline_response\n\n return ItemPaged(\n get_next, extract_data\n )", "def convert_to_model(self, *args):\n rt_xref_service_data, *_ = args\n return [RealtyTypeXRefService(**rt_xref_service) for rt_xref_service in rt_xref_service_data]", "def invoices(self):\r\n return Invoices(self)", "def printout_all(self, indent_level):\n indent = \" \"*indent_level*INDENTATION_MULTIPLIER\n\n print(indent, \"VNF or e2e Service ID:\", self.ID, sep='')\n print(indent, \"|-name:\", self.name, sep='')\n\n print(indent, \"|-info:\", self.info, sep='')\n print(indent, \"|-IP address:\", self.IP_address, sep='')\n print(indent, \"|-URL:\", self.URL, sep='')\n\n if self.related_phys_rsrc_ID_list != None:\n if len(self.related_phys_rsrc_ID_list) >0:\n print(indent, \"|-related/associated physical resource(s):\", sep='')\n for phys_resource_ID in self.related_phys_rsrc_ID_list:\n phys_resource_item = get_indexed_item_from_list(phys_resource_ID, AutoResilGlobal.physical_resource_list)\n if phys_resource_item != None:\n phys_resource_item.printout_all(indent_level+1)\n\n if self.related_cloud_virt_rsrc_ID_list != None:\n if len(self.related_cloud_virt_rsrc_ID_list) >0:\n print(indent, \"|-related/associated cloud virtual resource(s):\", sep='')\n for cloud_resource_ID in self.related_cloud_virt_rsrc_ID_list:\n cloud_resource_item = get_indexed_item_from_list(cloud_resource_ID, AutoResilGlobal.cloud_virtual_resource_list)\n if cloud_resource_item != None:\n cloud_resource_item.printout_all(indent_level+1)", "def services(self, services):\n\n self._services = services", "def services(self, services):\n\n self._services = services", "def _get_object_properties(self):\n super()._get_object_properties()\n add_prefix(root=self.root, prefix=self.naming_prefix, exclude=self.exclude_from_prefixing)", "def append_service_to_name(self, data, **kwargs):\n\n data['name'] = f'{data.get(\"name\").upper()}_SERVICE'\n return data", "def provide_services(self, services):\n for conv in self.conversations():\n conv.set_remote('services', json.dumps(services))", "def service_model(self, service_name):\n from sagas.ofbiz.services import OfService as s, create_service_data_frame\n meta=create_service_data_frame(service_name)\n print(meta)", "def __str__(self):\n\t\treturn pprint.pformat({'servicename':self.servicename,'doc_lines':self.doc_lines,'sourcefile':self.sourcefile,'methods':self.methods})", "def with_property(self, schema):\n self.properties = self.properties if self.properties != None else []\n self.properties.append(schema)\n return self", "def convert_to_model(self, *args):\n op_type_xref_service_data, *_ = args\n return [OperationTypeXRefService(**op_type_xref_service) for op_type_xref_service in op_type_xref_service_data]", "def add_object(self, name, env, contentnode):\n props = PropertyDefinition(name, env.docname)\n props.gather(contentnode)\n self.data['objects'][props.key] = props\n self.data['all_objects'][props.key] = props\n return props", "def add_services(self):\n # first get the names\n names = str(self.client.console_execute('services -c name {0}\\n'.format(self.ip))[b'data'])\n while not 'name' in names:\n sleep(10)\n names = self.client.console_read()\n names = names.split('\\n')\n for row in names:\n if self.ip in row:\n row = strip_whitespaces(row)\n self.services.append({'name': row.split(' ')[1]})\n\n # get the ports by service name\n ports = str(self.client.console_execute('services -c port {0}\\n'.format(self.ip))[b'data'])\n while not 'port' in ports:\n sleep(10)\n ports = self.client.console_read()\n ports = ports.split('\\n')\n for row in ports:\n for service in self.services:\n if service['name'] in row:\n row = strip_whitespaces(row)\n service['port'] = row.split(' ')[1]\n\n # get some information by service name (only useful if a report shall be generated)\n info = str(self.client.console_execute('services -c info {0}\\n'.format(self.ip))[b'data'])\n while not 'info' in info:\n sleep(10)\n info = self.client.console_read()\n info = info.split('\\n')\n for row in info:\n for service in self.services:\n if service['name'] in row:\n row = strip_whitespaces(row)\n service['info'] = row.split(' ')[1]" ]
[ "0.47759324", "0.46511307", "0.46290323", "0.46284688", "0.4617787", "0.45360902", "0.45328298", "0.45328298", "0.4516657", "0.45115456", "0.4414093", "0.43213654", "0.43100104", "0.43074372", "0.4304879", "0.42978", "0.4257366", "0.4256935", "0.4253727", "0.42359823", "0.42359823", "0.42120096", "0.4210084", "0.42013592", "0.41857088", "0.41731682", "0.4150573", "0.41486838", "0.41367933", "0.41211942" ]
0.51095986
0