query
stringlengths
9
9.05k
document
stringlengths
10
222k
metadata
dict
negatives
listlengths
30
30
negative_scores
listlengths
30
30
document_score
stringlengths
4
10
document_rank
stringclasses
2 values
Pass through to provider supports_catalog_hierarchy_design
def supports_catalog_hierarchy_design(self): # Implemented from kitosid template for - # osid.resource.ResourceProfile.supports_resource_lookup return self._provider_manager.supports_catalog_hierarchy_design()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def supports_catalog_hierarchy_design(self):\n return False", "def supports_catalog_hierarchy(self):\n return False", "def supports_book_hierarchy_design(self):\n return False", "def supports_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy()", "def get_catalog_hierarchy_design_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_catalog_hierarchy_design_session(self):\n raise Unimplemented()", "def create_catalog_hierarchy(self, *args, **kwargs):\n # Patched in by [email protected], Jul 23, 2014, added by birdland to template on Aug 8, 2014\n # Is not part of specs for catalog hierarchy design sessions, but may want to be in hierarchy service instead\n # Will not return an actual object, just JSON\n # since a BankHierarchy does not seem to be an OSID thing.\n return self._get_provider_session('catalog_hierarchy_design_session').create_catalog_hierarchy(*args, **kwargs)", "def can_access_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').can_access_catalog_hierarchy()", "def can_modify_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.can_modify_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_design_session').can_modify_catalog_hierarchy()", "def create_hierarchy(self):\n\t\tpass", "def get_catalog_hierarchy_design_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_hierarchy_design_session(*args, **kwargs)", "def supports_book_hierarchy(self):\n return False", "def get_book_hierarchy_design_session(self):\n raise Unimplemented()", "def getHierarchies():", "def getHierarchies():", "def get_designs(self):", "def get_catalog_hierarchy_session(self):\n raise Unimplemented()", "def get_catalog_hierarchy_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_hierarchy()", "def get_catalog_hierarchy_design_session(self, proxy=None):\n if proxy is None:\n raise NullArgument()\n raise Unimplemented()", "def show_hierarchy_chart(self):\n\n chart_type_index = self.ui.comboBox_sunburst_charts.currentIndex()\n if chart_type_index < 1:\n return\n self.get_selected_categories_and_codes()\n self.helper_for_matching_category_and_code_name()\n if chart_type_index == 1: # Code frequency sunburst\n self.hierarchy_code_frequency(\"sunburst\")\n if chart_type_index == 2: # Code frequency treemap\n self.hierarchy_code_frequency(\"treemap\")\n if chart_type_index == 3: # Code by characters sunburst\n self.hierarchy_code_volume_by_characters(\"sunburst\")\n if chart_type_index == 4: # Code by characters treemap\n self.hierarchy_code_volume_by_characters(\"treemap\")\n if chart_type_index == 5: # Code by image area sunburst\n self.hierarchy_code_volume_by_area(\"sunburst\")\n if chart_type_index == 6: # Code by image area treemap\n self.hierarchy_code_volume_by_area(\"treemap\")\n if chart_type_index == 7: # Code by A/V sunburst\n self.hierarchy_code_volume_by_segments(\"sunburst\")\n if chart_type_index == 8: # Code by A/V treemap\n self.hierarchy_code_volume_by_segments(\"treemap\")\n self.ui.comboBox_sunburst_charts.setCurrentIndex(0)", "def getHierarchy(unique_name):", "def getHierarchy(unique_name):", "def getHierarchy(unique_name):", "def generate_hierarchy(self,descr):\n # assert the existence of all the keys we need to set up at least on level\n assert 'problem_class' in descr\n assert 'problem_params' in descr\n assert 'dtype_u' in descr\n assert 'dtype_f' in descr\n assert 'sweeper_class' in descr\n assert 'level_params' in descr\n\n # convert problem-dependent parameters consisting of dictionary of lists to a list of dictionaries with only a\n # single entry per key, one dict per level\n pparams_list = self.__dict_to_list(descr['problem_params'])\n # put this newly generated list into the description dictionary (copy to avoid changing the original one)\n descr_new = cp.deepcopy(descr)\n descr_new['problem_params'] = pparams_list\n # generate list of dictionaries out of the description\n descr_list = self.__dict_to_list(descr_new)\n\n # sanity check: is there a transfer class? is there one even if only a single level is specified?\n if len(descr_list) > 1:\n assert 'transfer_class' in descr_new\n assert 'transfer_params' in descr_new\n elif 'transfer_class' in descr_new:\n print('WARNING: you have specified transfer classes, but only a single level...')\n\n # generate levels, register and connect if needed\n for l in range(len(descr_list)):\n\n # check if we have a hook on this list. if not, use default class.\n if 'hook_class' in descr_list[l]:\n hook = descr_list[l]['hook_class']\n else:\n hook = hookclass.hooks\n\n if 'sweeper_params' in descr_list[l]:\n swparams = descr_list[l]['sweeper_params']\n else:\n swparams = {}\n\n if not 'collocation_class' in swparams:\n assert 'collocation_class' in descr_list[l]\n swparams['collocation_class'] = descr_list[l]['collocation_class']\n\n if not 'num_nodes' in swparams:\n assert 'num_nodes' in descr_list[l]\n swparams['num_nodes'] = descr_list[l]['num_nodes']\n\n L = levclass.level(problem_class = descr_list[l]['problem_class'],\n problem_params = descr_list[l]['problem_params'],\n dtype_u = descr_list[l]['dtype_u'],\n dtype_f = descr_list[l]['dtype_f'],\n sweeper_class = descr_list[l]['sweeper_class'],\n sweeper_params = swparams,\n level_params = descr_list[l]['level_params'],\n hook_class = hook,\n id = 'L'+str(l))\n\n self.register_level(L)\n\n if l > 0:\n self.connect_levels(transfer_class = descr_list[l]['transfer_class'],\n transfer_params = descr_list[l]['transfer_params'],\n fine_level = self.levels[l-1],\n coarse_level = self.levels[l])", "def can_access_family_hierarchy(self):\n # Implemented from template for\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_access_catalog_hierarchy()\n return True", "def delete_catalog_hierarchy(self, *args, **kwargs):\n # Patched in by [email protected], Jul 23, 2014, added by birdland to template on Aug 8, 2014\n # Is not part of specs for catalog hierarchy design sessions, but may want to be in hierarchy service instead\n # Will not return an actual object, just JSON\n # since a BankHierarchy does not seem to be an OSID thing.\n return self._get_provider_session('catalog_hierarchy_design_session').delete_catalog_hierarchy(*args, **kwargs)", "def can_access_book_hierarchy(self):\n # Implemented from template for\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_access_catalog_hierarchy()\n return True", "def supports_catalog(self):\n return False", "def can_modify_family_hierarchy(self):\n # Implemented from template for\n # osid.resource.BinHierarchyDesignSession.can_modify_bin_hierarchy_template\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_modify_catalog_hierarchy()\n return True" ]
[ "0.86324745", "0.7739988", "0.72497606", "0.71859103", "0.7051641", "0.70463115", "0.6772082", "0.6659368", "0.6655546", "0.65757155", "0.6380629", "0.6223649", "0.6004748", "0.59804386", "0.59804386", "0.5856369", "0.5785725", "0.5724177", "0.5713283", "0.5661503", "0.54989386", "0.54794765", "0.54794765", "0.54794765", "0.54746014", "0.5472285", "0.5419409", "0.53847355", "0.5369839", "0.5324078" ]
0.8373554
1
Pass through to provider get_catalog_record_types
def get_catalog_record_types(self): # Implemented from kitosid template for - # osid.resource.ResourceProfile.get_resource_record_types return self._provider_manager.get_catalog_record_types()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_catalog_record_types(self):\n return TypeList([])", "def get_catalog_search_record_types(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.get_resource_record_types\n return self._provider_manager.get_catalog_search_record_types()", "def get_catalog_search_record_types(self):\n return TypeList([])", "def record_type(values):\n field = basic.lookup(values, name='Record Type (one of %s)' % values)\n\n return field.setResultsName('record_type')", "def _get_types(self):\n\n db = Database()\n self.c_built_ins = list(map(lambda tup: tup[0], db.select_built_types()))\n self.c_built_in_array_types = r'^(' + '|'.join(self.escaped(self.c_built_ins)) + ')\\[[0-9]*\\]'\n self.c_types = list(map(lambda tup: tup[0], db.select_types()))\n self.c_array_types = r'^(' + '|'.join(self.escaped(self.c_types)) + ')\\[[0-9]*\\]'\n db.close_connection()", "def get_book_record_types(self):\n return TypeList([])", "def list_record_types(self):\n return [RecordType.A]", "def can_create_catalog_with_record_types(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_create_bin_with_record_types\n return self._get_provider_session('catalog_admin_session').can_create_catalog_with_record_types(*args, **kwargs)", "def get_book_search_record_types(self):\n return TypeList([])", "def data_types():\n\n return ...", "def get_types(self):\n return self.types", "def gettypes(self):\n return [str(self.sd.xlate(t[0])) for t in self.sd.types]", "def data_types(self):", "def get_types(self):\n return self.column_type", "def get_comment_search_record_types(self):\n return TypeList([])", "def getTypesList():\n return Gw2Spidy._request('types')['results']", "def complete_info_record_type(self, text, line, begidx, endidx):\n begidx = begidx\n endidx = endidx\n mline = line.partition(' ')[2]\n offs = len(mline) - len(text)\n info_record_types = ['description', 'access', 'default', 'bit',\n 'flag', 'max', 'min']\n return [s[offs:] for s in info_record_types if s.startswith(mline)]", "def get_comment_record_types(self):\n return TypeList([])", "def test_get_types(self):\n pass", "def data_types(self):\n return self['data_types']", "def ntypes(self): # -> list[str]:\n ...", "def get_types(self) :\n\n return list(self.types)[1:]", "def supports_catalog_record_type(self, catalog_record_type=None):\n if catalog_record_type is None:\n raise NullArgument()\n return False", "def get_catalogs_by_record_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_record_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_record_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def types():\n sql = \"\"\"SELECT DISTINCT sample_type\n FROM barcodes.sample\n ORDER BY sample_type\"\"\"\n with pm.sql.TRN:\n pm.sql.TRN.add(sql)\n return pm.sql.TRN.execute_fetchflatten()", "def etypes(self): # -> list[str]:\n ...", "def getTypes():\n\n\t\tquery = \"\\\n\t\t\tSELECT\\\n\t\t\t\tid_item_container_type,\\\n\t\t\t\tlabel\\\n\t\t\tFROM\\\n\t\t\t\titem_container_type\\\n\t\t\"\n\n\t\treturn {t['id_item_container_type']: t['label'] for t in Model.fetchAllRows(query)}", "def get_column_types():\n\n def predicate(obj):\n \"\"\"A predicate to get all classes that are subclasses of\n MafColumnRecord\"\"\"\n return inspect.isclass(obj) and issubclass(obj, MafColumnRecord)\n\n # Get all available column types\n return inspect.getmembers(sys.modules[\"maflib.column_types\"], predicate)", "def document_types(db: Session = Depends(get_db)):\n return get_document_types(db)", "def types(self) -> List[str]:\n return self._types" ]
[ "0.8058518", "0.78703153", "0.7501678", "0.70615953", "0.6976058", "0.6870028", "0.6829432", "0.6801652", "0.67067724", "0.66908145", "0.66009", "0.65769285", "0.64779913", "0.644772", "0.6430422", "0.6419788", "0.63961476", "0.63717157", "0.63620895", "0.63473743", "0.6343547", "0.6318732", "0.629471", "0.6274897", "0.6255263", "0.6209096", "0.61409944", "0.6125757", "0.60840267", "0.60825354" ]
0.8274152
0
Pass through to provider get_catalog_search_record_types
def get_catalog_search_record_types(self): # Implemented from kitosid template for - # osid.resource.ResourceProfile.get_resource_record_types return self._provider_manager.get_catalog_search_record_types()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_catalog_search_record_types(self):\n return TypeList([])", "def get_book_search_record_types(self):\n return TypeList([])", "def get_catalog_record_types(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.get_resource_record_types\n return self._provider_manager.get_catalog_record_types()", "def get_catalog_record_types(self):\n return TypeList([])", "def get_comment_search_record_types(self):\n return TypeList([])", "def search_types(self, params, context=None):\n return self._client.call_method(\n 'KBaseSearchEngine.search_types',\n [params], self._service_ver, context)", "def supports_catalog_search_record_type(self, catalog_search_record_type=None):\n if catalog_search_record_type is None:\n raise NullArgument()\n return False", "def get_searchable_content_types():\n record = dict(interface=ICoverSettings, name='searchable_content_types')\n return api.portal.get_registry_record(**record)", "def record_type(values):\n field = basic.lookup(values, name='Record Type (one of %s)' % values)\n\n return field.setResultsName('record_type')", "def get_book_record_types(self):\n return TypeList([])", "def getTypesList():\n return Gw2Spidy._request('types')['results']", "def list_types(self, params, context=None):\n return self._client.call_method(\n 'KBaseSearchEngine.list_types',\n [params], self._service_ver, context)", "def get_catalogs_by_record_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_record_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_record_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def can_create_catalog_with_record_types(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_create_bin_with_record_types\n return self._get_provider_session('catalog_admin_session').can_create_catalog_with_record_types(*args, **kwargs)", "def _get_types(self):\n\n db = Database()\n self.c_built_ins = list(map(lambda tup: tup[0], db.select_built_types()))\n self.c_built_in_array_types = r'^(' + '|'.join(self.escaped(self.c_built_ins)) + ')\\[[0-9]*\\]'\n self.c_types = list(map(lambda tup: tup[0], db.select_types()))\n self.c_array_types = r'^(' + '|'.join(self.escaped(self.c_types)) + ')\\[[0-9]*\\]'\n db.close_connection()", "def get_types(self):\n return self.types", "def document_types(db: Session = Depends(get_db)):\n return get_document_types(db)", "def test_get_types(self):\n pass", "def supports_book_search_record_type(self, book_search_record_type=None):\n if book_search_record_type is None:\n raise NullArgument()\n return False", "def list_record_types(self):\n return [RecordType.A]", "def get_filter_types(verbose=False):\n if verbose:\n pprint(filter_types)\n return filter_types", "def ntypes(self): # -> list[str]:\n ...", "def types():\n sql = \"\"\"SELECT DISTINCT sample_type\n FROM barcodes.sample\n ORDER BY sample_type\"\"\"\n with pm.sql.TRN:\n pm.sql.TRN.add(sql)\n return pm.sql.TRN.execute_fetchflatten()", "def getTypes(self):\n return self._doRequest(self.httpClient.getTypes)", "def data_types():\n\n return ...", "def get_types(self) :\n\n return list(self.types)[1:]", "def getTypes():\n\n\t\tquery = \"\\\n\t\t\tSELECT\\\n\t\t\t\tid_item_container_type,\\\n\t\t\t\tlabel\\\n\t\t\tFROM\\\n\t\t\t\titem_container_type\\\n\t\t\"\n\n\t\treturn {t['id_item_container_type']: t['label'] for t in Model.fetchAllRows(query)}", "def search_doc_type(self):\n return self._meta.model_name", "def findsrvtypes(self, na = \"\", scopelist = \"default\",\n callback = None, cbdata = None):\n cb = callback\n if not callback:\n cb = self.__srvtypecb\n cbdata = [ SLPError.SLP_OK, [] ]\n err = self.slph.findsrvtypes(na, slpstr(scopelist), cb, cbdata)\n if err != SLPError.SLP_OK:\n raise SLPError(err)\n if not callback:\n if cbdata[0] != SLPError.SLP_OK:\n raise SLPError(cbdata[0])\n return cbdata[1]", "def gettypes(self):\n return [str(self.sd.xlate(t[0])) for t in self.sd.types]" ]
[ "0.8379832", "0.7579349", "0.735006", "0.7297328", "0.72568154", "0.69283974", "0.69054884", "0.6652005", "0.6462058", "0.64570224", "0.6413988", "0.6193402", "0.61646557", "0.6160769", "0.61058164", "0.605122", "0.59236294", "0.58947945", "0.58835816", "0.580844", "0.57843614", "0.57633376", "0.5750339", "0.5747993", "0.57475734", "0.57315296", "0.56866306", "0.5672427", "0.56423426", "0.56290334" ]
0.86107105
0
Sets the underlying catalog view to match current view
def _set_catalog_view(self, session): if self._catalog_view == COMPARATIVE: try: session.use_comparative_catalog_view() except AttributeError: pass else: try: session.use_plenary_catalog_view() except AttributeError: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _set_catalog_view(self, session):\n if self._catalog_view == FEDERATED:\n try:\n session.use_federated_catalog_view()\n except AttributeError:\n pass\n else:\n try:\n session.use_isolated_catalog_view()\n except AttributeError:\n pass", "def use_comparative_catalog_view(self):\n self._catalog_view = COMPARATIVE\n # self._get_provider_session('catalog_lookup_session') # To make sure the session is tracked\n for session in self._get_provider_sessions():\n try:\n session.use_comparative_catalog_view()\n except AttributeError:\n pass", "def on_action_set_view(self, content):\n self._view = content['view']\n self.refresh_traits_widget()", "def _set_catalog(self, catalog: cat.Catalog) -> None:\n self._catalog_interface = CatalogInterface(catalog)\n self._catalog = catalog", "def _connectView(self):\n self._view.select_asset = self.select_asset\n self._view.add_assets = self.add_assets\n self._view.remove_assets = self.remove_assets\n self._view.update_assets = self.update_assets\n self._view.commit = self.commit", "def set_catalogue(self, catalogue, force_it=False,\n fast_setup=False):\n from .catalogue.basecatalogue import Catalogue\n\n if not fast_setup:\n if self.has_catalogue() and force_it is False:\n raise AttributeError(\"'catalogue' already defined\"+\\\n \" Set force_it to True if you really known what you are doing\")\n \n if Catalogue not in catalogue.__class__.__mro__:\n raise TypeError(\"the input 'catalogue' must be an astrobject Catalogue\")\n \n if hasattr(self,\"wcs\") and self.has_wcs():\n catalogue.set_wcs(self.wcs, force_it=True)\n if catalogue.nobjects_in_fov < 1:\n warnings.warn(\"WARNING No object in the field of view,\"+\"\\n\"+\\\n \" -> catalogue not loaded\")\n return\n \n # --------\n # - set it\n self._side_properties[\"catalogue\"] = catalogue", "def set_rate_catalog(self, rate_catalog):\n self.single_selection_from_kendo_dropdown(self.rate_catalog_kendo_dropdown_locator, rate_catalog)", "def set_view(self, index):\n view = index\n self.reset_scroll_area()\n self.clearSelection()\n\n if view == self.SURFACE_VIEW:\n self.toggle_surface_mode()\n self.view = \"Surface View\"\n elif view == self.BORDER_VIEW:\n self.toggle_border_mode()\n self.view = \"Border View\"\n elif view == self.GEOM_VIEW:\n if self.show_geom() == \"Canceled\":\n self.canceled = True\n else:\n self.view = \"Geom View\"\n elif view == self.MESH_VIEW:\n if self.show_mesh() == \"Canceled\":\n self.canceled = True\n else:\n self.view = \"Mesh View\"", "def SetPackageView( self, directoryView ):\n self.directoryView = not self.directoryView\n self.packageMenuItem.Check( self.directoryView )\n self.packageViewTool.SetValue( self.directoryView )\n if self.loader:\n self.SetModel( self.loader )\n self.RecordHistory()", "def setViewComponent(self, viewComponent):\n self.viewComponent = viewComponent", "def catalog_id(self, catalog_id):\n self._catalog_id = catalog_id", "def use_plenary_catalog_view(self):\n self._catalog_view = PLENARY\n # self._get_provider_session('catalog_lookup_session') # To make sure the session is tracked\n for session in self._get_provider_sessions():\n try:\n session.use_plenary_catalog_view()\n except AttributeError:\n pass", "def update_view(self, selected):\n pass", "def _catalog_tree_view_factory(self, window, **traits):\n from mapero.dataflow_editor.view.catalog_tree_view import CatalogTreeView\n\n catalog = window.get_service( Catalog )\n \n catalog_tree_view = CatalogTreeView(obj = catalog, window = window)\n return catalog_tree_view", "def set_current(self):\n self.ensure_one()\n view_values = {}\n last_version = self.view.versions[0]\n if self.id == last_version.id:\n view_values.update({\n 'current_version': last_version.id\n })\n else:\n current_version = self.create({\n 'view': self.view.id,\n 'arch': self.arch,\n })\n view_values.update({\n 'arch': self.arch,\n 'current_version': current_version.id\n })\n\n # avoid_version is set because we don't want to trigger the creation\n # of a new version when updating the view\n self.with_context(avoid_version=True).view.write(view_values)", "def set_related_view(request, view_name):\n request.environ['cone.app.related_view'] = view_name", "def set_view(self):\n self.scene.mlab.view(azimuth=90.0, elevation=-90.0)", "def set_catalogue(self, catalogue, force_it=False,\n match_angsep=3, **kwargs):\n super(Image, self).set_catalogue(catalogue, force_it=force_it, **kwargs)\n\n # -- Lets save the pixel values\n if self.has_catalogue() and self.has_sepobjects():\n self.sepobjects.set_catalogue(catalogue,force_it=True,reset=False)\n self.sepobjects.match_catalogue(deltadist=match_angsep)", "def use_comparative_book_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_comparative_bin_view\n self._catalog_view = COMPARATIVE\n if self._catalog_session is not None:\n self._catalog_session.use_comparative_catalog_view()", "def use_comparative_book_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_comparative_bin_view\n self._catalog_view = COMPARATIVE\n if self._catalog_session is not None:\n self._catalog_session.use_comparative_catalog_view()", "def use_comparative_book_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_comparative_bin_view\n self._catalog_view = COMPARATIVE\n if self._catalog_session is not None:\n self._catalog_session.use_comparative_catalog_view()", "def assign_view(self):\n return self._assign_view if self._assign_view else self.assign_view_class.as_view()", "def UpdateView(self):\n self.View._viewData = self.Model.ModelViewData", "def view_type(self, view_type):\n\n self.container['view_type'] = view_type", "def view_name(self, view_name):\n\n self._view_name = view_name", "def use_comparative_family_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_comparative_bin_view\n self._catalog_view = COMPARATIVE\n if self._catalog_session is not None:\n self._catalog_session.use_comparative_catalog_view()", "def use_comparative_family_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_comparative_bin_view\n self._catalog_view = COMPARATIVE\n if self._catalog_session is not None:\n self._catalog_session.use_comparative_catalog_view()", "def viewer(self, value):\n\n # if viewer has somehow already been set, delete it to\n # disconnect all signals\n if self._viewer: del self._viewer\n\n self._viewer = value\n\n # TODO: need custom view for viewer; then we can connect a selection change to setting a new mod on that view", "def set_view(self, view, view_xml_filename):\n if self.view_exists(view):\n command = PlatformJenkinsJavaCLI.UPDATE_VIEW\n else:\n command = PlatformJenkinsJavaCLI.CREATE_VIEW\n\n with open(view_xml_filename) as view_xml_file:\n view_xml = view_xml_file.read()\n\n call = subprocess.Popen(self.cli + [command, view], stdin=subprocess.PIPE)\n call.communicate(view_xml)\n call.wait()", "def init_view(self):\n self.view_map = self.ctx.clientmap" ]
[ "0.7653462", "0.6380571", "0.6070326", "0.5951991", "0.5917372", "0.5841899", "0.5828823", "0.5798195", "0.57301855", "0.569667", "0.56866413", "0.56350636", "0.56345433", "0.56257606", "0.5610489", "0.55495274", "0.54750896", "0.5470648", "0.5437594", "0.5437594", "0.5437594", "0.5403253", "0.5375391", "0.5369194", "0.533839", "0.52645415", "0.52645415", "0.5238226", "0.52152926", "0.51466745" ]
0.7872513
0
Gets the session from a subpackage
def _get_sub_package_provider_session(self, sub_package, session_name, proxy=None): agent_key = self._get_agent_key(proxy) if session_name in self._provider_sessions[agent_key]: return self._provider_sessions[agent_key][session_name] else: manager = self._get_sub_package_provider_manager(sub_package) try: session = self._instantiate_session('get_' + session_name + '_for_bank', proxy=self._proxy, manager=manager) except AttributeError: session = self._instantiate_session('get_' + session_name, proxy=self._proxy, manager=manager) self._set_bank_view(session) if self._session_management != DISABLED: self._provider_sessions[agent_key][session_name] = session return session
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def session(get_session):\n return get_session()", "def getSession():\n return call(\"getSession\")", "def session(self):\n return self.session_store.get_session()", "def get_session(self):\n return self.session", "def session(self):\n return session", "def session(self):\n return self.ssession()", "def get_session(self):\n return self._session()", "def session(self):\n\t\treturn self._session", "def get_session(context, key):\n session_manager = getToolByName(context, 'session_data_manager')\n\n if not session_manager.hasSessionData():\n return None\n\n session = session_manager.getSessionData()\n\n if not key in session.keys():\n return None\n\n return session[key]", "def getSession(self):\n session = app.settings.cherrypy.session.get(self.session)\n return session", "def session():\n def session():\n return BaseUrlSession()\n return session", "def get_session(group, **session_kwargs):\n return ks_loading.load_session_from_conf_options(\n CONF, group, **session_kwargs)", "def session(self):\n return self.session_store.get_session()", "def session(self):\n return self.session_store.get_session()", "def session(self):\n return self.session_store.get_session(backend=\"datastore\")", "def session(self):\n return self.__session", "def getSession(self):\n return self.request.getSession()", "def get_session(cls):\r\n if cls._session is not None:\r\n return cls._session\r\n else:\r\n raise RuntimeError('Session not set.')", "def get_read_session() -> Session:\n return _read_session()", "def session(self):", "def get_session():\n if not hasattr(get_session, \"session\"):\n get_session.session = requests_cache.CachedSession(\n cache_name=CACHE_PATH.rstrip(\".sqlite\"),\n expire_after=518400, # 6 days\n )\n adapter = HTTPAdapter(max_retries=3)\n get_session.session.mount(\"http://\", adapter)\n get_session.session.mount(\"https://\", adapter)\n return get_session.session", "def get_session(app: object = None) -> AbsMetaSession:\n config = get_application_config(app)\n orignal_versions_path = config.get('DOCUMENT_ORIGNAL_VERSIONS_PATH', None)\n latest_versions_path = config.get('DOCUMENT_LATEST_VERSIONS_PATH', None)\n\n return AbsMetaSession(latest_versions_path, orignal_versions_path)", "def get_session(base_url, group_id, token, session_id):\n url = base_url + route_session.format(session_id=session_id)\n response = requests.get(url, headers=headers(group_id, token))\n return response", "def session(self):\n return self._session", "def session(self):\n return self._session", "def getSession(self):\n if self.accessToken is None:\n self.authenticate()\n\n s = requests.Session()\n s.auth = self.getAuthObj()\n s.headers = {\"Accept\": \"application/json\"}\n return s", "def get_session():\n name = request.args.get('name')\n sch = Scheduler()\n return sch.get_session(name)", "def get_session() -> Any:\n session = boto3.session.Session()\n return session", "def get_session():\n request_session = requests.Session()\n\n # Try to use what was passed in for username/password...\n username = CMD.username\n password = CMD.password\n \n # ...if there was nothing passed in then try to read it from config file\n if ((username is None or username == \"\") and (password is None or password == \"\")):\n # Try to read username and password from config file, if it exists\n # Otherwise default to DEFAULT_USERNAME/DEFAULT_PASSWORD\n try:\n with open(\"config.json\") as config_file:\n config_data = json.load(config_file)\n if (config_data):\n username = config_data[\"username\"]\n password = config_data[\"password\"]\n except:\n LOG.exception(\"Unable to open \\\"/collector/config.json\\\" file\")\n username = DEFAULT_USERNAME\n password = DEFAULT_PASSWORD\n\n request_session.auth = (username, password)\n request_session.headers = {\"Accept\": \"application/json\",\n \"Content-Type\": \"application/json\",\n \"netapp-client-type\": \"grafana-\" + __version__}\n # Ignore the self-signed certificate issues for https\n request_session.verify = False\n return request_session", "def _get_session_from_cache(thread_ident: int) -> requests.Session:\n return _GLOBAL_BACKEND_FACTORY()" ]
[ "0.71302146", "0.7097068", "0.6674535", "0.6588422", "0.6516722", "0.6399272", "0.6355631", "0.62893623", "0.6212347", "0.6211648", "0.6177836", "0.6176052", "0.6169064", "0.6169064", "0.61314595", "0.6100953", "0.6097932", "0.6096678", "0.6087465", "0.60775274", "0.6071135", "0.605066", "0.6046357", "0.602824", "0.602824", "0.602721", "0.60205394", "0.60112077", "0.6004107", "0.5994638" ]
0.79204565
0
Instantiates a provider session
def _instantiate_session(self, method_name, proxy=None, *args, **kwargs): if 'manager' in kwargs: session_class = getattr(kwargs['manager'], method_name) del kwargs['manager'] else: session_class = getattr(self._provider_manager, method_name) if proxy is None: try: return session_class(bank_id=self._catalog_id, *args, **kwargs) except AttributeError: return session_class(*args, **kwargs) else: try: return session_class(bank_id=self._catalog_id, proxy=proxy, *args, **kwargs) except AttributeError: return session_class(proxy=proxy, *args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_session(self, **params):\n raise NotImplementedError('Should be implemented by a sub-class.')", "def init_session(self):\n pass", "def init_session(self):\n pass", "def __init__(self):\r\n # create a session id\r\n self.session = ViSession()", "def get_session_factory(self, options):", "def __init__(self, session):\n self.session = session", "def provider_session(self):\n if self.flow_type == \"implicit\":\n return OAuth2Session(\n client_id=self.client_id,\n scope=self.client_kwargs[\"scope\"],\n redirect_uri=self.redirect_uri\n )\n elif self.flow_type == \"client\":\n return OAuth2Session(\n client_id=self.client_id,\n client_secret=self.client_secret,\n scope=self.client_kwargs[\"scope\"]\n )", "def __init__(self):\n engine = create_engine(\"postgresql://postgres:1@localhost:5432/postgres\")\n session_class = sessionmaker(bind=engine)\n self.session = session_class()", "def __init__(self, session):\n self._session = session", "def create_session(self):\n self._session = self.create_scoped_session()\n self.session = self._session()", "def init_session(self):\n ssl_context = ssl.create_default_context(\n purpose=ssl.Purpose.SERVER_AUTH, cafile=None, capath=None,\n cadata=None)\n ssl_settings = {\"ssl_context\": ssl_context}\n self.session = iRODSSession(\n host=self.module.params[\"host\"],\n port=self.module.params[\"port\"],\n user=self.module.params[\"admin_user\"],\n password=self.module.params[\"admin_password\"],\n zone=self.module.params[\"zone\"],\n **ssl_settings)", "def init_session(self, get_token=True):\n if (self._client_id is None) or (self._client_secret is None):\n sys.exit(\n \"Please make sure to set the client id and client secret \"\n \"via the constructor, the environment variables or the config \"\n \"file; otherwise, the LaMetric cloud cannot be accessed. \"\n \"Abort!\"\n )\n\n self._session = OAuth2Session(\n client=BackendApplicationClient(client_id=self._client_id)\n )\n\n if get_token is True:\n # get oauth token\n self.get_token()", "def new_session(self):\n self._session = self.vspk.NUVSDSession(\n username=self.user,\n password=self.password,\n enterprise=self.enterprise,\n api_url=self.uri)\n\n self._session.start()\n if not self.default_enterprise:\n self.default_enterprise = self.get_enterprise_by_name(\n self.default_netpartition_name)\n\n self.assertIsNotNone(self.default_enterprise,\n \"Should have a default \"\n \"enterprise for Nuage plugin\")\n\n return self._session", "def _get_provider_session(self, session_name):\n agent_key = self._get_agent_key()\n if session_name in self._provider_sessions[agent_key]:\n return self._provider_sessions[agent_key][session_name]\n else:\n session_class = getattr(self._provider_manager, 'get_' + session_name + '_for_catalog')\n if self._proxy is None:\n if 'notification_session' in session_name:\n # Is there something else we should do about the receiver field?\n session = session_class('fake receiver', self._catalog.get_id())\n else:\n session = session_class(self._catalog.get_id())\n else:\n if 'notification_session' in session_name:\n # Is there something else we should do about the receiver field?\n session = session_class('fake receiver', self._catalog.get_id(), self._proxy)\n else:\n session = session_class(self._catalog.get_id(), self._proxy)\n self._set_catalog_view(session)\n self._set_object_view(session)\n self._set_operable_view(session)\n self._set_containable_view(session)\n if self._session_management != DISABLED:\n self._provider_sessions[agent_key][session_name] = session\n return session", "def session(self, context: InjectionContext = None) -> \"ProfileSession\":", "def init_session():\n\n session = Session()\n\n # headers\n session.headers = {\n \"Accept-Encoding\": \"gzip, deflate, br\",\n \"Accept-Language\": \"pt-PT,pt;q=0.8,en-GB;q=0.6,en;q=0.4,en-US;q=0.2\",\n \"User-Agent\": \"Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0\",\n \"Content-Type\": \"application/x-www-form-urlencoded\",\n \"Accept\": \"text/javascript, application/javascript, */*\",\n \"Referer\": \"https://sigrhe.dgae.mec.pt/openerp/menu?active=474&tzoffset=-60\",\n \"X-Requested-With\": \"XMLHttpRequest\",\n \"Connection\": \"keep-alive\",\n \"DNT\": \"1\",\n \"Host\": \"sigrhe.dgae.mec.pt\",\n \"Origin\": \"https://sigrhe.dgae.mec.pt\",\n }\n\n return session", "def _set_session(self):\n self.__session = sessionmaker(bind=self.__engine)()", "def __init__(self, session):\n self.sess = session", "def __init__(\n self,\n profile: Profile,\n *,\n context: InjectionContext = None,\n settings: Mapping[str, Any] = None,\n ):\n self._active = False\n self._awaited = False\n self._entered = 0\n self._context = (context or profile.context).start_scope(\"session\", settings)\n self._profile = profile", "def create_session(self, transport):\n session_id = self.session_id_allocator.allocate()\n session = self.SESSION_CLS(self, transport, session_id)\n self.sessions[session.id] = session\n return session", "def create_session(self, transport):\n session = self.SESSION_CLS(self, transport, 0)\n self.session = session\n return session", "def __init__(self, config):\n self.config = config\n self.__session = None", "def __init__(self):\n engine = db_connect()\n self.Session = sessionmaker(bind=engine)", "def _new_session(self, username_key=None, **attributes):\n for key in ['username', 'token', 'tenant_id']:\n if attributes.get(key, None) is None:\n attributes[key] = key + \"_\" + text_type(uuid4())\n if 'expires' not in attributes:\n attributes['expires'] = (\n datetime.utcfromtimestamp(self._clock.seconds())\n + timedelta(days=1)\n )\n session = Session(**attributes)\n if username_key is None:\n username_key = session.username\n self._username_to_token[username_key] = session.token\n self._token_to_session[session.token] = session\n self._tenant_to_token[session.tenant_id] = session.token\n return session", "def session(self):", "def perform_session_create(self, environ: str, session_parameters: dict) -> Session:\n session_parameters[\"mounts\"] = []\n attach_context = self.client.start_session(environ, session_parameters)\n\n # TODO should we record some of the request\n # headers e.g. `REMOTE_ADDR`, `HTTP_USER_AGENT`, `HTTP_REFERER` for analytics?\n\n return Session.objects.create(\n project=self.project,\n url=attach_context.url,\n execution_id=attach_context.execution_id,\n client_class_id=self.client.class_id,\n )", "def start_session(self):\r\n ee.Initialize()", "def __enter__(self):\n self.new_session()\n return self", "def create_session(self, transport):\n session_id = self.session_id_allocator.allocate()\n session = self.SESSION_CLS(self, transport, session_id, self.message_mgr)\n self.sessions[session.id] = session\n return session", "def test_create_session(self):\n study_id = self.storage.create_study(sample_study_spec())\n\n session = sample_session(study_id=study_id)\n self.storage.create_session(session)\n\n self.assertEqual(self.storage.get_session(study_id, session.id), session)" ]
[ "0.6511579", "0.6373079", "0.6373079", "0.6330975", "0.6295228", "0.6226995", "0.6220061", "0.62079823", "0.615411", "0.61176205", "0.61087424", "0.60451734", "0.60339946", "0.60192454", "0.60136837", "0.601073", "0.6005657", "0.5998351", "0.5991237", "0.5929469", "0.59292233", "0.590246", "0.5896623", "0.58477426", "0.58120143", "0.5800205", "0.57755345", "0.5762185", "0.5747491", "0.57463133" ]
0.64221215
1
Close all sessions, unless session management is set to MANDATORY
def close_sessions(self): if self._session_management != MANDATORY: self._provider_sessions = dict()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def close_all_sessions() -> None:\n\n for sess in _sessions.values():\n sess.close()", "def close_all(cls) -> None:\n\n close_all_sessions()", "def close_all(cls):\n for sess in cls._session_registry.values():\n sess.close()", "def close_sessions(self):\n if self._session_management != MANDATORY:\n self._provider_sessions = dict()\n else:\n raise IllegalState()", "def close(self):\n for session in self.sessions.copy().values():\n session.close(SessionCloseErrorCode.SESSION_DIED)", "def close(self):\r\n _logger.debug(\"Closing sessions...\")\r\n dbs = self._sessions.keys()\r\n while len(dbs) > 0:\r\n session = self._sessions.pop(dbs.pop())\r\n session.close()\r\n if self.__provider is not None:\r\n self.__provider.close()\r\n self.__provider = None", "def close_session(self):\n self.sess.close()", "def close(self):\n self.session.close()\n self.session = None", "def teardown_session(e):\n my_db.close()\n OT_spider.close()", "def close_open_sessions(self):\n for open_session in self.current_sessions.values():\n self.closed_sessions.append(open_session.close_session())\n self.current_sessions = {}", "def close(self):\n PoolsManager().close_pool(self.settings)\n for session in self.sessions:\n session.close_connections()", "def close(self):\n self.session.close()", "def close(self):\n self.__session.remove()", "def close(self):\n self.__session.remove()", "def close(self):\n self.__session.remove()", "def _close_http_session(self):\n if self._api_http_session:\n try:\n self._api_http_session.close()\n except RequestException:\n pass", "def close_session(self):\n self.session.execute(QUERY_DROP_TABLE_1)\n self.session.execute(QUERY_DROP_TABLE_2)\n self.session.execute(QUERY_DROP_TABLE_3)\n self.session.shutdown()\n self.cluster.shutdown()", "def close_session(self):\n if self.sma_sid is None:\n return\n yield from self._fetch_json(URL_LOGOUT, {})\n self.sma_sid = None", "def close(self):\n self.sess.close()\n print(\"Current session closed!\")", "def close(self):\n self.__session.close()", "def close(self):\n self.__session.close()", "def close(self):\n self.__session.close()", "def close(self):\n self.__session.close()", "def close(self):\n self.__session.close()", "def close(self):\n self.__session.close()", "def close_session(self):\n self.driver.close()", "def close(self):\n yield from self.session.close()", "def close(self):\n self.session.close(SessionCloseErrorCode.SESSION_DIED)", "def close_db_session(self, session):\r\n session.close()", "def disable_session_management(self):\n self._session_management = DISABLED\n self.close_sessions()" ]
[ "0.8069694", "0.7919251", "0.78704804", "0.7759464", "0.7459878", "0.72732544", "0.7217262", "0.70671594", "0.7067076", "0.70469946", "0.70124066", "0.700974", "0.6959029", "0.6959029", "0.6959029", "0.6921884", "0.6890681", "0.68874145", "0.6872762", "0.68708336", "0.68708336", "0.68708336", "0.68708336", "0.68708336", "0.68708336", "0.6859463", "0.68456125", "0.68391544", "0.6790817", "0.6731055" ]
0.7968633
1
Pass through to provider get_catalog_query_session
def get_catalog_query_session(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_manager_template return self._provider_manager.get_catalog_query_session(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_catalog_query_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_catalog_query_session(self):\n raise Unimplemented()", "def get_catalog_search_session(self):\n raise Unimplemented()", "def get_catalog_query_session(self, proxy=None):\n if proxy is None:\n raise NullArgument()\n raise Unimplemented()", "def get_catalog_session(self):\n raise Unimplemented()", "def get_catalog_lookup_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_catalog_lookup_session(self):\n raise Unimplemented()", "def get_catalog_lookup_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_lookup_session(*args, **kwargs)", "def get_catalog_admin_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def getSession():\n return call(\"getSession\")", "def get_book_query_session(self):\n raise Unimplemented()", "def set_session_query_tag(self) -> None:\n if self.service_connection.queryTag:\n self.engine.execute(\n SNOWFLAKE_SESSION_TAG_QUERY.format(\n query_tag=self.service_connection.queryTag\n )\n )", "def session(self):", "def get_catalog_admin_session(self):\n raise Unimplemented()", "def get_catalog_search_session(self, proxy=None):\n if proxy is None:\n raise NullArgument()\n raise Unimplemented()", "def get_catalog_admin_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_admin_session(*args, **kwargs)", "def session(get_session):\n return get_session()", "def get_catalog_session(self, proxy=None):\n if proxy is None:\n raise NullArgument()\n raise Unimplemented()", "def get_comment_query_session(self):\n raise Unimplemented()", "def get_catalog_lookup_session(self, proxy=None):\n if proxy is None:\n raise NullArgument()\n raise Unimplemented()", "def get_catalog_query(self):\n # Implemented from kitosid template for -\n # osid.resource.BinQuerySession.get_bin_query_template\n return self._get_provider_session('catalog_query_session').get_catalog_query()", "def session_for_read():\n return enginefacade.reader.using(_CONTEXT)", "def session(self):\n return self.session_store.get_session()", "def session(self):\n return session", "def session(self):\n\t\treturn self._session", "def getSessionStore(self):\n pass", "def _get_provider_session(self, session_name, proxy=None):\n agent_key = self._get_agent_key(proxy)\n if session_name in self._provider_sessions[agent_key]:\n return self._provider_sessions[agent_key][session_name]\n else:\n session = self._instantiate_session('get_' + session_name, self._proxy)\n self._set_catalog_view(session)\n if self._session_management != DISABLED:\n self._provider_sessions[agent_key][session_name] = session\n return session", "def get_book_query_session(self, proxy=None):\n if proxy is None:\n raise NullArgument()\n raise Unimplemented()", "def get_session_info():\n query = {\"type\": \"op\", \"cmd\": \"<show><session><info></info></session></show>\"}\n\n return __proxy__[\"panos.call\"](query)", "def get_book_search_session(self):\n raise Unimplemented()" ]
[ "0.83558625", "0.8204912", "0.7239545", "0.7182094", "0.7030906", "0.6998137", "0.6978501", "0.68319386", "0.654379", "0.6536579", "0.6507497", "0.6347546", "0.6346573", "0.63335603", "0.62351453", "0.6196124", "0.6195246", "0.6028609", "0.59862524", "0.5964894", "0.5946849", "0.5916542", "0.5872382", "0.5776315", "0.5734089", "0.5705004", "0.5655197", "0.5654965", "0.5649617", "0.5647797" ]
0.8260475
1
Pass through to provider get_catalog_admin_session
def get_catalog_admin_session(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_manager_template return self._provider_manager.get_catalog_admin_session(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_catalog_admin_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_catalog_admin_session(self):\n raise Unimplemented()", "def get_catalog_admin_session(self, proxy=None):\n if proxy is None:\n raise NullArgument()\n raise Unimplemented()", "def get_catalog_session(self):\n raise Unimplemented()", "def get_book_admin_session(self):\n raise Unimplemented()", "def get_catalog_query_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_query_session(*args, **kwargs)", "def get_catalog_lookup_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_lookup_session(*args, **kwargs)", "def getSession():\n return call(\"getSession\")", "def get_catalog_query_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def session(self):", "def get_catalog_search_session(self):\n raise Unimplemented()", "def setup_admin_session(self, user):\n self.session['ad'] = user.get().key.urlsafe()", "def get_catalog_lookup_session(self):\n raise Unimplemented()", "def authenticate_admin():\n\n print request.json['session_id']\n username = _authenticate_admin_from_session(request)\n\n if username:\n if username == 'local_user':\n return Response(\"local_user\", 200)\n else:\n return Response(status=200)\n\n else:\n return Response('Bad or missing session id.', status=401)", "def get_comment_admin_session(self):\n raise Unimplemented()", "def get_catalog_query_session(self):\n raise Unimplemented()", "def _get_session():\n api_version = \"1.0\"\n originator = \"salt_cloud_{}_driver\".format(__virtualname__)\n url = config.get_cloud_config_value(\n \"url\", get_configured_provider(), __opts__, search_global=False\n )\n user = config.get_cloud_config_value(\n \"user\", get_configured_provider(), __opts__, search_global=False\n )\n password = config.get_cloud_config_value(\n \"password\", get_configured_provider(), __opts__, search_global=False\n )\n ignore_ssl = config.get_cloud_config_value(\n \"ignore_ssl\",\n get_configured_provider(),\n __opts__,\n default=False,\n search_global=False,\n )\n try:\n session = XenAPI.Session(url, ignore_ssl=ignore_ssl)\n log.debug(\n \"url: %s user: %s password: %s, originator: %s\",\n url,\n user,\n \"XXX-pw-redacted-XXX\",\n originator,\n )\n session.xenapi.login_with_password(user, password, api_version, originator)\n except XenAPI.Failure as ex:\n pool_master_addr = str(ex.__dict__[\"details\"][1])\n slash_parts = url.split(\"/\")\n new_url = \"/\".join(slash_parts[:2]) + \"/\" + pool_master_addr\n session = XenAPI.Session(new_url)\n log.debug(\n \"session is -> url: %s user: %s password: %s, originator:%s\",\n new_url,\n user,\n \"XXX-pw-redacted-XXX\",\n originator,\n )\n session.xenapi.login_with_password(user, password, api_version, originator)\n return session", "def session(self):\n return self.session_store.get_session()", "def get_catalog_lookup_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def session(get_session):\n return get_session()", "def get_catalog_session(self, proxy=None):\n if proxy is None:\n raise NullArgument()\n raise Unimplemented()", "def getSessionStore(self):\n pass", "def getSession(self):\n session = app.settings.cherrypy.session.get(self.session)\n return session", "def get_book_admin_session(self, proxy=None):\n if proxy is None:\n raise NullArgument()\n raise Unimplemented()", "def on_session_start(self, session, logger):\n cli_service = CliServiceImpl(session, self.enable_mode, logger)\n cli_service.send_command(\n \"enable-admin\",\n action_map={\n r\"Password\": lambda session, logger: session.send_line(\n self.enable_password, logger\n )\n },\n )\n cli_service.send_command(\"environment no more\")", "def _fetch_herd_session():\n session = requests.Session()\n session.auth = (ADMIN_USERNAME, ADMIN_PASS)\n session.headers.update(HERD_HEADERS)\n\n return session", "def get_type_admin_session(self):\n pass\n if not self.supports_type_admin():\n raise Unimplemented()\n try:\n from . import sessions\n except ImportError:\n raise # OperationFailed()\n try:\n session = sessions.TypeAdminSession()\n except AttributeError:\n raise # OperationFailed()\n return session", "def session(self):\n return session", "def get_session(self):\n return self.session", "def getAdmin():" ]
[ "0.8437221", "0.83185905", "0.7339218", "0.6643529", "0.64041466", "0.63769376", "0.6330563", "0.6111108", "0.60221255", "0.60099477", "0.60082084", "0.60075504", "0.600136", "0.599006", "0.5966561", "0.59342504", "0.59141946", "0.58589745", "0.5848066", "0.57931256", "0.57871455", "0.5739954", "0.5679039", "0.5667054", "0.55999285", "0.55702955", "0.5529086", "0.551974", "0.551917", "0.5491679" ]
0.8414123
1
Pass through to provider get_catalog_hierarchy_session
def get_catalog_hierarchy_session(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_manager_template return self._provider_manager.get_catalog_hierarchy_session(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_catalog_hierarchy_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_catalog_hierarchy_session(self):\n raise Unimplemented()", "def get_catalog_hierarchy_design_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_hierarchy_design_session(*args, **kwargs)", "def get_catalog_hierarchy_design_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_catalog_hierarchy_session(self, proxy=None):\n if proxy is None:\n raise NullArgument()\n raise Unimplemented()", "def get_catalog_hierarchy_design_session(self):\n raise Unimplemented()", "def get_book_hierarchy_session(self):\n raise Unimplemented()", "def get_catalog_hierarchy_design_session(self, proxy=None):\n if proxy is None:\n raise NullArgument()\n raise Unimplemented()", "def get_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_hierarchy()", "def get_catalog_lookup_session(self):\n raise Unimplemented()", "def create_catalog_hierarchy(self, *args, **kwargs):\n # Patched in by [email protected], Jul 23, 2014, added by birdland to template on Aug 8, 2014\n # Is not part of specs for catalog hierarchy design sessions, but may want to be in hierarchy service instead\n # Will not return an actual object, just JSON\n # since a BankHierarchy does not seem to be an OSID thing.\n return self._get_provider_session('catalog_hierarchy_design_session').create_catalog_hierarchy(*args, **kwargs)", "def get_catalog_lookup_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_catalog_session(self):\n raise Unimplemented()", "def can_access_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').can_access_catalog_hierarchy()", "def get_catalog_lookup_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_lookup_session(*args, **kwargs)", "def get_book_hierarchy_session(self, proxy=None):\n if proxy is None:\n raise NullArgument()\n raise Unimplemented()", "def get_book_hierarchy_design_session(self):\n raise Unimplemented()", "def supports_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy()", "def get_catalog_hierarchy_id(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_hierarchy_id\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_hierarchy_id()", "def supports_catalog_hierarchy(self):\n return False", "def get_catalog_search_session(self):\n raise Unimplemented()", "def get_catalog_admin_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_catalog_nodes(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_nodes\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_nodes(*args, **kwargs)", "def get_catalog_query_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_catalog_admin_session(self):\n raise Unimplemented()", "def get_catalog_query_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_query_session(*args, **kwargs)", "def get_catalog_query_session(self):\n raise Unimplemented()", "def getHierarchies():", "def getHierarchies():", "def get_book_hierarchy_design_session(self, proxy=None):\n if proxy is None:\n raise NullArgument()\n raise Unimplemented()" ]
[ "0.86353874", "0.8420382", "0.7561283", "0.73959595", "0.73737717", "0.7040935", "0.6796574", "0.65512466", "0.65199095", "0.6479028", "0.63418096", "0.6297986", "0.6224188", "0.6153219", "0.60889125", "0.596233", "0.59440124", "0.5941705", "0.59115237", "0.5806202", "0.57967556", "0.57717043", "0.5718536", "0.5695406", "0.5608774", "0.5600208", "0.5595881", "0.55374974", "0.55374974", "0.5526359" ]
0.84561896
1
Pass through to provider get_catalog_hierarchy_design_session
def get_catalog_hierarchy_design_session(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.ResourceManager.get_resource_lookup_session_manager_template return self._provider_manager.get_catalog_hierarchy_design_session(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_catalog_hierarchy_design_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_catalog_hierarchy_design_session(self):\n raise Unimplemented()", "def get_catalog_hierarchy_design_session(self, proxy=None):\n if proxy is None:\n raise NullArgument()\n raise Unimplemented()", "def get_catalog_hierarchy_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_catalog_hierarchy_session(self):\n raise Unimplemented()", "def get_book_hierarchy_design_session(self):\n raise Unimplemented()", "def get_catalog_hierarchy_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_hierarchy_session(*args, **kwargs)", "def get_book_hierarchy_design_session(self, proxy=None):\n if proxy is None:\n raise NullArgument()\n raise Unimplemented()", "def get_book_hierarchy_session(self):\n raise Unimplemented()", "def supports_catalog_hierarchy_design(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy_design()", "def get_catalog_hierarchy_session(self, proxy=None):\n if proxy is None:\n raise NullArgument()\n raise Unimplemented()", "def supports_catalog_hierarchy_design(self):\n return False", "def create_catalog_hierarchy(self, *args, **kwargs):\n # Patched in by [email protected], Jul 23, 2014, added by birdland to template on Aug 8, 2014\n # Is not part of specs for catalog hierarchy design sessions, but may want to be in hierarchy service instead\n # Will not return an actual object, just JSON\n # since a BankHierarchy does not seem to be an OSID thing.\n return self._get_provider_session('catalog_hierarchy_design_session').create_catalog_hierarchy(*args, **kwargs)", "def get_catalog_session(self):\n raise Unimplemented()", "def get_designs(self):", "def get_catalog_lookup_session(self):\n raise Unimplemented()", "def supports_catalog_hierarchy(self):\n return False", "def get_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_hierarchy()", "def create_hierarchy(self):\n\t\tpass", "def get_catalog_admin_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def can_access_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').can_access_catalog_hierarchy()", "def get_catalog_search_session(self):\n raise Unimplemented()", "def delete_catalog_hierarchy(self, *args, **kwargs):\n # Patched in by [email protected], Jul 23, 2014, added by birdland to template on Aug 8, 2014\n # Is not part of specs for catalog hierarchy design sessions, but may want to be in hierarchy service instead\n # Will not return an actual object, just JSON\n # since a BankHierarchy does not seem to be an OSID thing.\n return self._get_provider_session('catalog_hierarchy_design_session').delete_catalog_hierarchy(*args, **kwargs)", "def get_catalog_admin_session(self):\n raise Unimplemented()", "def get_catalog_lookup_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def getHierarchy(unique_name):", "def getHierarchy(unique_name):", "def getHierarchy(unique_name):", "def get_catalog_query_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def supports_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy()" ]
[ "0.87507206", "0.860315", "0.7749623", "0.7638858", "0.760503", "0.74266785", "0.7085852", "0.657221", "0.6267638", "0.6251328", "0.6228877", "0.6203119", "0.61071116", "0.56198263", "0.5512792", "0.54343414", "0.5387622", "0.53685737", "0.53505504", "0.5345083", "0.53332824", "0.5266366", "0.5256157", "0.5249996", "0.5248756", "0.5233061", "0.5233061", "0.5233061", "0.5224613", "0.521297" ]
0.8633038
1
Pass through to provider CatalogLookupSession.can_lookup_catalogs
def can_lookup_catalogs(self): # Implemented from kitosid template for - # osid.resource.BinLookupSession.can_lookup_bins_template return self._get_provider_session('catalog_lookup_session').can_lookup_catalogs()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def supports_catalog_lookup(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_lookup()", "def supports_catalog_lookup(self):\n return False", "def can_lookup_books(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.can_lookup_bins\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_lookup_catalogs()\n return True", "def can_search_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinQuerySession.can_search_bins_template\n return self._get_provider_session('catalog_query_session').can_search_catalogs()", "def can_lookup_families(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.can_lookup_bins\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_lookup_catalogs()\n return True", "def supports_catalog_query(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_query()", "def supports_catalog(self):\n return False", "def supports_catalog_search(self):\n return False", "def can_create_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_create_bins\n return self._get_provider_session('catalog_admin_session').can_create_catalogs()", "def supports_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy()", "def catalog_exists(self, args):\n catalog = self.server.connect_ermrest(self.id)\n pp(catalog.exists())", "def supports_catalog_admin(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_admin()", "def can_access_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').can_access_catalog_hierarchy()", "def supports_catalog_query(self):\n return False", "def can_update_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_update_bins\n return self._get_provider_session('catalog_admin_session').can_update_catalogs()", "def supports_cataloging_rules(self):\n return False", "def getCatalogs():", "def get_catalog_lookup_session(self):\n raise Unimplemented()", "def get_catalog_lookup_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def can_manage_catalog_aliases(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceAdminSession.can_manage_resource_aliases_template\n return self._get_provider_session('catalog_admin_session').can_manage_catalog_aliases()", "def get_catalogs_by_provider(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_provider\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_provider(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def has_catalogue(self):\n return self.catalogue is not None", "def get_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_template\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs()\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def can_create_books(self):\n # Implemented from template for\n # osid.resource.BinAdminSession.can_create_bins\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_create_catalogs()\n return True", "def get_catalog_lookup_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_lookup_session(*args, **kwargs)", "def get_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bin\n return Catalog(\n self._provider_manager,\n self._get_provider_session('catalog_lookup_session').get_catalog(*args, **kwargs),\n self._runtime,\n self._proxy)", "def test_get_hyperflex_app_catalog_list(self):\n pass", "def can_lookup_comments(self):\n # Implemented from template for\n # osid.resource.ResourceLookupSession.can_lookup_resources\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n return True", "def checkCatalogs():\n url = CHECKBASE % 'catalogs'\n catalogs = []\n try:\n fh = getURLHandle(url)\n #fh = urllib2.urlopen(url)\n data = fh.read()\n dom = minidom.parseString(data)\n fh.close()\n catalog_elements = dom.getElementsByTagName('Catalog')\n for catel in catalog_elements:\n if catel.firstChild is None:\n continue\n catalog = catel.firstChild.data.strip()\n if len(catalog):\n catalogs.append(str(catalog))\n except:\n raise Exception,\"Could not open %s to search for list of catalogs\" % url\n return catalogs", "def supports_catalog_admin(self):\n return False" ]
[ "0.82303095", "0.8136246", "0.795465", "0.76487786", "0.7565057", "0.71672946", "0.7157921", "0.67965895", "0.6784433", "0.6692012", "0.65605557", "0.65363663", "0.65017694", "0.63277256", "0.63157886", "0.6307392", "0.6290821", "0.62715584", "0.6236082", "0.62267435", "0.6213326", "0.6209004", "0.60853326", "0.607212", "0.605098", "0.59330016", "0.5923016", "0.5918719", "0.59151405", "0.5903468" ]
0.8945682
0
Pass through to provider CatalogLookupSession.use_comparative_catalog_view
def use_comparative_catalog_view(self): self._catalog_view = COMPARATIVE # self._get_provider_session('catalog_lookup_session') # To make sure the session is tracked for session in self._get_provider_sessions(): try: session.use_comparative_catalog_view() except AttributeError: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def use_comparative_book_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_comparative_bin_view\n self._catalog_view = COMPARATIVE\n if self._catalog_session is not None:\n self._catalog_session.use_comparative_catalog_view()", "def use_comparative_book_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_comparative_bin_view\n self._catalog_view = COMPARATIVE\n if self._catalog_session is not None:\n self._catalog_session.use_comparative_catalog_view()", "def use_comparative_book_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_comparative_bin_view\n self._catalog_view = COMPARATIVE\n if self._catalog_session is not None:\n self._catalog_session.use_comparative_catalog_view()", "def use_comparative_family_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_comparative_bin_view\n self._catalog_view = COMPARATIVE\n if self._catalog_session is not None:\n self._catalog_session.use_comparative_catalog_view()", "def use_comparative_family_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_comparative_bin_view\n self._catalog_view = COMPARATIVE\n if self._catalog_session is not None:\n self._catalog_session.use_comparative_catalog_view()", "def _set_catalog_view(self, session):\n if self._catalog_view == COMPARATIVE:\n try:\n session.use_comparative_catalog_view()\n except AttributeError:\n pass\n else:\n try:\n session.use_plenary_catalog_view()\n except AttributeError:\n pass", "def use_comparative_relationship_view(self):\n # Implemented from template for\n # osid.resource.ResourceLookupSession.use_comparative_resource_view\n self._use_comparative_object_view()", "def catalog_compare(env, compare, against):\n envs = environments()\n check_env(env, envs)\n\n if app.config['ENABLE_CATALOG']:\n compare_cat = get_or_abort(puppetdb.catalog,\n node=compare)\n against_cat = get_or_abort(puppetdb.catalog,\n node=against)\n\n return render_template('catalog_compare.html',\n compare=compare_cat,\n against=against_cat,\n envs=envs,\n current_env=env)\n else:\n log.warn('Access to catalog interface disabled by administrator')\n abort(403)", "def use_comparative_comment_view(self):\n # Implemented from template for\n # osid.resource.ResourceLookupSession.use_comparative_resource_view\n self._use_comparative_object_view()", "def _set_catalog_view(self, session):\n if self._catalog_view == FEDERATED:\n try:\n session.use_federated_catalog_view()\n except AttributeError:\n pass\n else:\n try:\n session.use_isolated_catalog_view()\n except AttributeError:\n pass", "def use_plenary_catalog_view(self):\n self._catalog_view = PLENARY\n # self._get_provider_session('catalog_lookup_session') # To make sure the session is tracked\n for session in self._get_provider_sessions():\n try:\n session.use_plenary_catalog_view()\n except AttributeError:\n pass", "def supports_catalog_lookup(self):\n return False", "def supports_catalog_search(self):\n return False", "def supports_catalog_query(self):\n return False", "def __cmp__(self, other_view):\n # __cmp__ functions return -1 if we are less than schema\n # 0 if we are the same as schema\n # 1 if we are greater than schema\n # If our 'compare' method returns anything there are differences\n if self.compare(other_view):\n return 1\n else:\n return 0", "def can_lookup_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.can_lookup_bins_template\n return self._get_provider_session('catalog_lookup_session').can_lookup_catalogs()", "def _set_operable_view(self, session):\n for obj_name in self._operable_views:\n if self._operable_views[obj_name] == ACTIVE:\n try:\n getattr(session, 'use_active_' + obj_name + '_view')()\n except AttributeError:\n pass\n else:\n try:\n getattr(session, 'use_any_status_' + obj_name + '_view')()\n except AttributeError:\n pass", "def supports_catalog_lookup(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_lookup()", "def supports_catalog(self):\n return False", "def supports_catalog_query(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_query()", "def get_catalog_lookup_session(self):\n raise Unimplemented()", "def get_oracle(verbosity, resultset, providerversion):\n try:\n response = requests.get(ORACLEAPIURL)\n if verbosity:\n print(response.status_code)\n if response.status_code == 200:\n cidrdata = json.loads(response.content)\n providerversion[\"ORACLE\"] = cidrdata[\"last_updated_timestamp\"]\n for i in range(0, len(cidrdata[\"regions\"])):\n for j in range(0, len(cidrdata[\"regions\"][i][\"cidrs\"])):\n if cidrdata[\"regions\"][i][\"cidrs\"][j][\"cidr\"] not in resultset:\n resultset[cidrdata[\"regions\"][i][\"cidrs\"][j][\"cidr\"]] = \"Oracle\"\n\n except Exception as get_exception:\n print(\"Exception\")\n print(get_exception)\n\n return resultset, providerversion", "def compare(self, *args):\n return _ida_hexrays.lvar_locator_t_compare(self, *args)", "def _set_object_view(self, session):\n for obj_name in self._object_views:\n if self._object_views[obj_name] == PLENARY:\n try:\n getattr(session, 'use_plenary_' + obj_name + '_view')()\n except AttributeError:\n pass\n else:\n try:\n getattr(session, 'use_comparative_' + obj_name + '_view')()\n except AttributeError:\n pass", "def compare(self, other_view):\n response = []\n if self.name != other_view.name:\n response.append('DIFF: View names:')\n response.append('%s and %s' % (self.name, other_view.name))\n # Compare columns\n tv_column_names = [col[1] for col in self.columns]\n ov_column_names = [col[1] for col in other_view.columns]\n for column in tv_column_names:\n if column not in ov_column_names:\n response.append('DIFF: Column %s' % column)\n response.append('not in %s' % other_view.name)\n for column in ov_column_names:\n if column not in tv_column_names:\n response.append('DIFF: Column %s' % column)\n response.append('not in %s' % self.name)\n return response", "def compare(self, *args):\n return _ida_hexrays.vdloc_t_compare(self, *args)", "def can_search_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinQuerySession.can_search_bins_template\n return self._get_provider_session('catalog_query_session').can_search_catalogs()", "def use_isolated_book_view(self):\n # Implemented from template for\n # osid.resource.ResourceLookupSession.use_isolated_bin_view\n self._use_isolated_catalog_view()", "def use_isolated_book_view(self):\n # Implemented from template for\n # osid.resource.ResourceLookupSession.use_isolated_bin_view\n self._use_isolated_catalog_view()", "def crossmatch_other_catalog(candid, ra, dec, catalog_name, radius_arcsec=None):\n pdf = pd.DataFrame(\n {\n 'ra': ra.values,\n 'dec': dec.values,\n 'candid': range(len(ra))\n }\n )\n\n curdir = os.path.dirname(os.path.abspath(__file__))\n if catalog_name.values[0] == 'gcvs':\n catalog = curdir + '/data/catalogs/gcvs.parquet'\n ra2, dec2, type2 = extract_gcvs(catalog)\n elif catalog_name.values[0] == 'vsx':\n catalog = curdir + '/data/catalogs/vsx.parquet'\n ra2, dec2, type2 = extract_vsx(catalog)\n elif catalog_name.values[0] == '3hsp':\n catalog = curdir + '/data/catalogs/3hsp.csv'\n ra2, dec2, type2 = extract_3hsp(catalog)\n elif catalog_name.values[0] == '4lac':\n catalog_h = curdir + '/data/catalogs/table-4LAC-DR3-h.fits'\n catalog_l = curdir + '/data/catalogs/table-4LAC-DR3-l.fits'\n ra2, dec2, type2 = extract_4lac(catalog_h, catalog_l)\n\n # create catalogs\n catalog_ztf = SkyCoord(\n ra=np.array(ra.values, dtype=float) * u.degree,\n dec=np.array(dec.values, dtype=float) * u.degree\n )\n\n catalog_other = SkyCoord(\n ra=np.array(ra2.values, dtype=float) * u.degree,\n dec=np.array(dec2.values, dtype=float) * u.degree\n )\n\n pdf_merge, mask, idx2 = cross_match_astropy(\n pdf, catalog_ztf, catalog_other, radius_arcsec=radius_arcsec\n )\n\n pdf_merge['Type'] = 'Unknown'\n pdf_merge.loc[mask, 'Type'] = [\n str(i).strip() for i in type2.astype(str).values[idx2]\n ]\n\n return pdf_merge['Type']" ]
[ "0.78012896", "0.78012896", "0.78012896", "0.75476444", "0.75476444", "0.7364595", "0.66365737", "0.6407088", "0.6262432", "0.622518", "0.60275424", "0.5577789", "0.5465742", "0.5254421", "0.50744075", "0.506318", "0.5031475", "0.50121546", "0.49354354", "0.489478", "0.4888819", "0.48853678", "0.48701385", "0.47982553", "0.4774493", "0.47593862", "0.473611", "0.47316793", "0.47316793", "0.47242653" ]
0.86723423
0
Pass through to provider CatalogLookupSession.use_plenary_catalog_view
def use_plenary_catalog_view(self): self._catalog_view = PLENARY # self._get_provider_session('catalog_lookup_session') # To make sure the session is tracked for session in self._get_provider_sessions(): try: session.use_plenary_catalog_view() except AttributeError: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def use_plenary_book_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_plenary_bin_view\n self._catalog_view = PLENARY\n if self._catalog_session is not None:\n self._catalog_session.use_plenary_catalog_view()", "def use_plenary_book_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_plenary_bin_view\n self._catalog_view = PLENARY\n if self._catalog_session is not None:\n self._catalog_session.use_plenary_catalog_view()", "def use_plenary_book_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_plenary_bin_view\n self._catalog_view = PLENARY\n if self._catalog_session is not None:\n self._catalog_session.use_plenary_catalog_view()", "def use_plenary_family_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_plenary_bin_view\n self._catalog_view = PLENARY\n if self._catalog_session is not None:\n self._catalog_session.use_plenary_catalog_view()", "def use_plenary_family_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_plenary_bin_view\n self._catalog_view = PLENARY\n if self._catalog_session is not None:\n self._catalog_session.use_plenary_catalog_view()", "def use_plenary_comment_view(self):\n # Implemented from template for\n # osid.resource.ResourceLookupSession.use_plenary_resource_view\n self._use_plenary_object_view()", "def use_plenary_relationship_view(self):\n # Implemented from template for\n # osid.resource.ResourceLookupSession.use_plenary_resource_view\n self._use_plenary_object_view()", "def _set_catalog_view(self, session):\n if self._catalog_view == COMPARATIVE:\n try:\n session.use_comparative_catalog_view()\n except AttributeError:\n pass\n else:\n try:\n session.use_plenary_catalog_view()\n except AttributeError:\n pass", "def use_comparative_catalog_view(self):\n self._catalog_view = COMPARATIVE\n # self._get_provider_session('catalog_lookup_session') # To make sure the session is tracked\n for session in self._get_provider_sessions():\n try:\n session.use_comparative_catalog_view()\n except AttributeError:\n pass", "def _set_catalog_view(self, session):\n if self._catalog_view == FEDERATED:\n try:\n session.use_federated_catalog_view()\n except AttributeError:\n pass\n else:\n try:\n session.use_isolated_catalog_view()\n except AttributeError:\n pass", "def _set_object_view(self, session):\n for obj_name in self._object_views:\n if self._object_views[obj_name] == PLENARY:\n try:\n getattr(session, 'use_plenary_' + obj_name + '_view')()\n except AttributeError:\n pass\n else:\n try:\n getattr(session, 'use_comparative_' + obj_name + '_view')()\n except AttributeError:\n pass", "def supports_catalog_lookup(self):\n return False", "def supports_catalog_lookup(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_lookup()", "def show_tier1_l2vpn(**kwargs):\n proxy = kwargs['proxy']\n session_token = kwargs['sessiontoken']\n\n l2vpn_table = PrettyTable(['Display Name', 'IPSec Transport Tunnel', 'Tier-1 Gateway', 'L2VPN Service', 'Peer Address', 'Local Endpoint'])\n\n tier1_json = get_t1_json(proxy, session_token)\n tier1 = tier1_json['results']\n tier1_lst = []\n for t in tier1:\n tier1_lst.append(t['id'])\n\n for t in tier1_lst:\n l2vpn_json = get_tier1_l2vpn_services_json(proxy, session_token, t)\n if l2vpn_json['result_count'] > 0:\n l2vpn_serv = l2vpn_json['results']\n for i in l2vpn_serv:\n l2vpn_serv_name = i['display_name']\n l2vpn_sessions_json = get_tier1_l2vpn_json(proxy, session_token, t, l2vpn_serv_name)\n l2vpn_sessions = l2vpn_sessions_json['results']\n for l in l2vpn_sessions:\n transport = l['transport_tunnels']\n for x in transport:\n ipsec_path = x\n ipsec_vpn_json = get_tier1_l2vpn_ipsec_json(proxy, session_token, ipsec_path)\n ipsec_name = ipsec_vpn_json['display_name']\n peer_addr = ipsec_vpn_json['peer_address']\n le_json = get_tier1_vpn_le_details_json(proxy, session_token, ipsec_vpn_json['local_endpoint_path'])\n local_addr = le_json['local_address']\n\n l2vpn_table.add_row([l2vpn_serv_name, ipsec_name, t, l2vpn_serv_name, peer_addr, local_addr])\n sys.exit(l2vpn_table)", "def show_tier1_vpn_le(**kwargs):\n proxy = kwargs['proxy']\n session_token = kwargs['sessiontoken']\n tier1_json = get_t1_json(proxy, session_token)\n tier1 = tier1_json['results']\n tier1_lst = []\n for t in tier1:\n tier1_lst.append(t['id'])\n table = PrettyTable(['Name', 'Local Address', 'Tier-1 Gateway', 'IPSec Service'])\n for l in tier1_lst:\n ipsec_json = get_tier1_ipsec_vpn_services_json(proxy, session_token, l)\n if ipsec_json['result_count'] > 0:\n ipsec_serv = ipsec_json['results']\n for i in ipsec_serv:\n ipsec_serv_name = i['display_name']\n le_json = get_tier1_vpn_le_json(proxy, session_token, l, ipsec_serv_name)\n if le_json['result_count'] > 0:\n le = le_json['results']\n for v in le:\n table.add_row([v['display_name'], v['local_address'], l, ipsec_serv_name])\n else:\n pass\n else:\n pass\n sys.exit(table)", "def show_tier1_l2vpn_details(**kwargs):\n proxy = kwargs['proxy']\n session_token = kwargs['sessiontoken']\n t1g = kwargs['tier1_gateway']\n l2vpn_serv = kwargs['vpn_service']\n display_name = kwargs['display_name']\n\n l2vpn_table = PrettyTable(['Display Name', 'IPSec Transport Tunnel', 'Tier-1 Gateway', 'L2VPN Service', 'Peer Address', 'Local Endpoint'])\n l2vpn_table.title = 'L2VPN Session Details'\n ipsec_tun_table = PrettyTable(['Display Name', 'BGP Address CIDR', 'Authentication Mode'])\n ipsec_tun_table.title = 'L2VPN IPSec Transport Tunnel Details'\n ike_table = PrettyTable(['Profile Name', 'IKE Version', 'Digest Algoritms', 'Encryption Algorithms', 'Diffie-Helman Groups'])\n ike_table.title = 'IKE Profile Details'\n ipsec_table = PrettyTable(['Profile Name', 'Digest Algorithm', 'Encryption Algorithm', 'Diffie-Helman Groups', 'PFS Status'])\n ipsec_table.title = 'IPSec Tunnel Profile Details'\n dpd_table = PrettyTable(['Profile Name', 'Probe Mode', 'Probe Interval', 'Retry Count'])\n dpd_table.title = 'Dead Peer Detection Profile Details'\n\n l2vpn_json = get_tier1_l2vpn_details_json(proxy, session_token, t1g, l2vpn_serv, display_name)\n transport = l2vpn_json['transport_tunnels']\n for x in transport:\n ipsec_path = x\n ipsec_vpn_json = get_tier1_l2vpn_ipsec_json(proxy, session_token, ipsec_path)\n ike_path = ipsec_vpn_json['ike_profile_path']\n tun_path = ipsec_vpn_json['tunnel_profile_path']\n dpd_path = ipsec_vpn_json['dpd_profile_path']\n le_path = ipsec_vpn_json['local_endpoint_path']\n\n ip_subnets = ipsec_vpn_json['tunnel_interfaces'][0]['ip_subnets']\n for i in ip_subnets:\n ip = i['ip_addresses'][0]\n prefix = i['prefix_length']\n bgp_cidr = f\"{ip}/{prefix}\"\n\n le_json = get_tier1_vpn_le_details_json(proxy, session_token, le_path)\n local_addr = le_json['local_address']\n\n l2vpn_table.add_row([l2vpn_json['display_name'], ipsec_vpn_json['display_name'], t1g, l2vpn_serv, ipsec_vpn_json['peer_address'], local_addr])\n ipsec_tun_table.add_row([ipsec_vpn_json['display_name'], bgp_cidr, ipsec_vpn_json['authentication_mode']])\n\n ike_json = get_vpn_ike_profile_details_json(proxy, session_token, ike_path)\n ike_table.add_row([ike_json['display_name'], ike_json['ike_version'], ike_json['digest_algorithms'], ike_json['encryption_algorithms'], ike_json['dh_groups']])\n\n ipsec_json = get_vpn_ipsec_profile_details_json(proxy, session_token, tun_path)\n ipsec_table.add_row([ipsec_json['display_name'], ipsec_json['digest_algorithms'], ipsec_json['encryption_algorithms'], ipsec_json['dh_groups'], ipsec_json['enable_perfect_forward_secrecy']])\n\n dpd_json = get_vpn_dpd_profile_details_json(proxy, session_token, dpd_path)\n dpd_table.add_row([dpd_json['display_name'], dpd_json['dpd_probe_mode'], dpd_json['dpd_probe_interval'], dpd_json['retry_count']])\n\n print(l2vpn_table)\n print(ipsec_tun_table)\n print(ike_table)\n print(ipsec_table)\n print(dpd_table)\n sys.exit(0)", "def catalog_node(env, node_name):\n envs = environments()\n check_env(env, envs)\n\n if app.config['ENABLE_CATALOG']:\n catalog = get_or_abort(puppetdb.catalog,\n node=node_name)\n return render_template('catalog.html',\n catalog=catalog,\n envs=envs,\n current_env=env)\n else:\n log.warn('Access to catalog interface disabled by administrator')\n abort(403)", "def _init_catalog_node(catalog, pid, lid=None, rid=None):\n if pid not in catalog: catalog[pid] = {'_langs': {}}\n if lid is not None:\n if lid not in catalog[pid]['_langs']: catalog[pid]['_langs'][lid] = {'_res': {}, 'language': {}}\n if lid is not None and rid is not None:\n if rid not in catalog[pid]['_langs'][lid]['_res']: catalog[pid]['_langs'][lid]['_res'][rid] = {}", "def network_views():\n return 'networkview?'", "def vplb():\r\n section = document.add_section()\r\n new_width, new_height = section.page_height, section.page_width\r\n section.orientation = WD_ORIENT.LANDSCAPE\r\n section.page_width = 7772400\r\n section.page_height = 10058400\r\n document.add_heading('Virtual Proxy Load Balancing details', 1)\r\n virtualproxylbnodes = get_qlik_sense.get_vploadbalancers()\r\n virtualproxylb_metrics = ['node', 'load balance nodes']\r\n num_of_virtualproxyslb = len(virtualproxylbnodes)\r\n num_of_virtualproxylb_metrics = len(virtualproxylb_metrics)\r\n table = document.add_table(rows=num_of_virtualproxyslb+1, cols=2)\r\n table.style = 'Grid Table 1 Light Accent 1'\r\n row = table.rows[0]\r\n for item in range(0, len(virtualproxylb_metrics)):\r\n row.cells[item].text = virtualproxylb_metrics[item]\r\n\r\n for item in range(num_of_virtualproxyslb):\r\n row = table.rows[item+1]\r\n row.cells[0].text = str(virtualproxylbnodes[item]['type'])\r\n row.cells[1].text = str(', '.join(virtualproxylbnodes[item]['items']))\r\n\r\n document.add_page_break()", "def showDecidePage(request, openid_request):\n trust_root = openid_request.trust_root\n return_to = openid_request.return_to\n\n try:\n # Stringify because template's ifequal can only compare to strings.\n trust_root_valid = verifyReturnTo(trust_root, return_to) \\\n and \"Valid\" or \"Invalid\"\n except DiscoveryFailure as err:\n trust_root_valid = \"DISCOVERY_FAILED\"\n except HTTPFetchingError as err:\n trust_root_valid = \"Unreachable\"\n\n pape_request = pape.Request.fromOpenIDRequest(openid_request)\n\n return render_to_response(\n 'server/trust.html', {\n 'trust_root': trust_root,\n 'trust_handler_url': getViewURL(request, processTrustResult),\n 'trust_root_valid': trust_root_valid,\n 'pape_request': pape_request,\n },\n context_instance=RequestContext(request))", "def init_view(self):\n self.view_map = self.ctx.clientmap", "def use_comparative_book_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_comparative_bin_view\n self._catalog_view = COMPARATIVE\n if self._catalog_session is not None:\n self._catalog_session.use_comparative_catalog_view()", "def use_comparative_book_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_comparative_bin_view\n self._catalog_view = COMPARATIVE\n if self._catalog_session is not None:\n self._catalog_session.use_comparative_catalog_view()", "def use_comparative_book_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_comparative_bin_view\n self._catalog_view = COMPARATIVE\n if self._catalog_session is not None:\n self._catalog_session.use_comparative_catalog_view()", "def supports_catalog_query(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_query()", "def get_catalog_lookup_session(self):\n raise Unimplemented()", "def init_catalog():\n return controller.init_catalog()", "def supports_catalog_search(self):\n return False", "def can_lookup_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.can_lookup_bins_template\n return self._get_provider_session('catalog_lookup_session').can_lookup_catalogs()" ]
[ "0.78021324", "0.78021324", "0.78021324", "0.77686775", "0.77686775", "0.65821373", "0.6460522", "0.64184636", "0.54385746", "0.5225436", "0.5125031", "0.49971545", "0.4929327", "0.48092023", "0.46781814", "0.46667302", "0.4628395", "0.4622144", "0.45594007", "0.45330468", "0.4529744", "0.45188278", "0.45172703", "0.45172703", "0.45172703", "0.44999433", "0.4495625", "0.44788575", "0.44691694", "0.44303328" ]
0.8617257
0
Pass through to provider CatalogLookupSession.get_catalogs_by_ids
def get_catalogs_by_ids(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinLookupSession.get_bins_by_ids catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_ids(*args, **kwargs) cat_list = [] for cat in catalogs: cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy)) return CatalogList(cat_list)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def query_ids_catalogs(self, source_ids, *, catalogs=__ALL_STRING, row_limit=DEFAULT_ROW_LIMIT,\n get_query_payload=False, cache=True, verbose=False):\n sanitized_catalogs = self._sanitize_input_catalogs(catalogs)\n sanitized_row_limit = self._sanitize_input_row_limit(row_limit)\n sanitized_source_ids = self._sanitize_input_ids(source_ids)\n\n query_result = {}\n self._store_query_result(query_result=query_result, names=sanitized_catalogs, json=self._get_catalogs_json(),\n row_limit=sanitized_row_limit, get_query_payload=get_query_payload, cache=cache,\n ids=sanitized_source_ids, verbose=verbose)\n\n if get_query_payload:\n return query_result\n\n return commons.TableList(query_result)", "def get_catalogs_by_provider(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_provider\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_provider(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def get_items_for_catalog(catalog_id):\n pass", "def get_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_template\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs()\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def get_locations_by_ids(self, id_list):", "def get_catalogs_by_query(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinQuerySession.get_bins_by_query_template\n return self._get_provider_session('catalog_query_session').get_catalogs_by_query(*args, **kwargs)", "def can_lookup_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.can_lookup_bins_template\n return self._get_provider_session('catalog_lookup_session').can_lookup_catalogs()", "def get_child_catalog_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_child_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_child_catalog_ids(*args, **kwargs)", "def get_all(self, *ids):", "def serve_recos(ids, ref_catalog):\r\n desc_list = []\r\n for desc_id in ids:\r\n desc_list.append(ref_catalog[ref_catalog['id'] == desc_id].iloc[0]['description'])\r\n return desc_list", "def getCatalogs():", "def read_many_by_id(self, ids, fields=None):\n assert isinstance(ids, collections.Iterable), \"Expected a list: %s\" % ids\n hasid = fields is None or \"id\" in fields\n res = self.read(self.table.c.id.in_(ids), fields if hasid else fields + [\"id\"])\n index = dict([(x[\"id\"], x) for x in res])\n res = []\n for id in ids:\n if id not in index:\n raise PersistenceException(\"Id %s was not found in table %s\" % (id, self.table.name))\n res.append(index[id])\n if not hasid:\n for el in res:\n del el[\"id\"]\n return res", "def get_books_by_ids(self, book_ids):\n # Implemented from template for\n # osid.resource.BinLookupSession.get_bins_by_ids_template\n # NOTE: This implementation currently ignores plenary view\n # Also, this should be implemented to use get_Book() instead of direct to database\n if self._catalog_session is not None:\n return self._catalog_session.get_catalogs_by_ids(catalog_ids=book_ids)\n catalog_id_list = []\n for i in book_ids:\n catalog_id_list.append(ObjectId(i.get_identifier()))\n collection = JSONClientValidated('commenting',\n collection='Book',\n runtime=self._runtime)\n result = collection.find({'_id': {'$in': catalog_id_list}}).sort('_id', DESCENDING)\n\n return objects.BookList(result, runtime=self._runtime, proxy=self._proxy)", "def get_objects(self, ids, **args):\n args[\"ids\"] = \",\".join(ids)\n return self.request(\"\", args)", "def get_objects(self,ids):\n return model.objects.filter(pk__in=ids).order_by(search_field)", "def findLocationByIds(cls, ids):\r\n for id in ids:\r\n return cls.query.filter_by(id=id[id])", "def get_objects(self, ids, **args):\n args[\"ids\"] = \",\".join(ids)\n return self.request(self.version + \"/\", args)", "async def read_many_by_id(\n self,\n ids: List[str],\n *,\n projection: Optional[Dict[str, Any]] = DEFAULT_PROJECTION,\n limit: int = DEFAULT_LIMIT,\n offset: int = DEFAULT_OFFSET,\n session: Optional[Any] = DEFAULT_SESSION,\n **kwargs: Any,\n ) -> List[Dict[str, Any]]:\n return await self._database.read_many_by_id(\n self.name,\n ids=ids,\n projection=projection,\n limit=limit,\n offset=offset,\n session=session,\n **kwargs,\n )", "def get_catalog_items(id):\n\n username = login_session.get('username', None)\n catalogs = session.query(Catalog).all()\n selected_catalog = session.query(Catalog).filter_by(id=id).one()\n items = selected_catalog.items\n catalogs_display = [\n {\n 'id': catalog.id,\n 'name': catalog.name\n } for catalog in catalogs]\n items_display = [{'id': item.id, 'title': item.title} for item in items]\n items_summary = '{0} Items ({1} items)'.format(\n selected_catalog.name,\n len(items_display))\n return render_template(\n 'home.html',\n catalogs_display=catalogs_display,\n items_display=items_display,\n items_summary=items_summary,\n username=username)", "def get_catalogs_by_record_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_record_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_record_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def resolve_entities(root, info, ids: list[int], **kwargs):\n return Entity.objects.filter(id__in=ids)", "def get_ids(self) -> List[str]:", "def resolve_ids(self, identifier):\r\n\r\n return resolve_ids(identifier, self.resolvers)", "def collect_ids_from_matching_indexes(self, branch, **kwargs):\n matching_indexes = self.find_matching_course_indexes(\n branch,\n search_targets=None,\n org_target=kwargs.get('org'),\n course_keys=kwargs.get('course_keys')\n )\n\n # collect ids and then query for those\n version_guids = []\n id_version_map = defaultdict(list)\n for course_index in matching_indexes:\n version_guid = course_index['versions'][branch]\n version_guids.append(version_guid)\n id_version_map[version_guid].append(course_index)\n return version_guids, id_version_map", "def load_by_ids(cls,ids):\n if not ids or ids[0] == '':\n return None\n es = from_caches(ids) #(ids,'SuiBook') as prefixed\n notfounds = filter(lambda e:e not in es, ids)\n if len(notfounds)>0:\n es2 = dict((str(e.key().id()),e) for e in SuiBook.get_by_id(map(lambda e:int(e),notfounds)) if e)\n to_caches(es2) #to_caches(dict(),time,key_prefix='SuiBook')\n es.update(es2)\n return es", "def can_search_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinQuerySession.can_search_bins_template\n return self._get_provider_session('catalog_query_session').can_search_catalogs()", "def get_root_catalog_ids(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_root_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_root_catalog_ids()", "def search_installation(conn, city, ids):\n\n c = conn.cursor()\n params = [city] + ids\n search_query = \"SELECT * FROM Installation T1 WHERE T1.City LIKE ? AND T1.Id IN ({})\".format(\n \",\".join([\"?\"] * len(ids)))\n\n c.execute(search_query, params)\n result = c.fetchall()\n return result", "def get(self):\n return GenericGet().get_catalogs()", "def get_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_child_bins\n return self._get_provider_session('catalog_hierarchy_session').get_child_catalogs(*args, **kwargs)" ]
[ "0.687608", "0.68108445", "0.6797527", "0.67103606", "0.6504811", "0.63963205", "0.62897474", "0.6222304", "0.61367583", "0.61326736", "0.60006285", "0.5967969", "0.59336644", "0.58902305", "0.58789563", "0.5856664", "0.580235", "0.57954663", "0.5789944", "0.57779837", "0.57563406", "0.5720043", "0.5717558", "0.56943977", "0.5666536", "0.5636451", "0.5603766", "0.5602923", "0.5600566", "0.5593432" ]
0.8377241
0
Pass through to provider CatalogLookupSession.get_catalogs_by_genus_type
def get_catalogs_by_genus_type(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinLookupSession.get_bins_by_genus_type catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_genus_type(*args, **kwargs) cat_list = [] for cat in catalogs: cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy)) return CatalogList(cat_list)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_catalogs_by_parent_genus_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_parent_genus_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_parent_genus_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def get_catalogs_by_record_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_record_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_record_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def get_books_by_genus_type(self, book_genus_type):\n # Implemented from template for\n # osid.resource.BinLookupSession.get_bins_by_genus_type_template\n # NOTE: This implementation currently ignores plenary view\n if self._catalog_session is not None:\n return self._catalog_session.get_catalogs_by_genus_type(catalog_genus_type=book_genus_type)\n collection = JSONClientValidated('commenting',\n collection='Book',\n runtime=self._runtime)\n result = collection.find({\"genusTypeId\": str(book_genus_type)}).sort('_id', DESCENDING)\n\n return objects.BookList(result, runtime=self._runtime, proxy=self._proxy)", "def get_families_by_genus_type(self, family_genus_type):\n # Implemented from template for\n # osid.resource.BinLookupSession.get_bins_by_genus_type_template\n # NOTE: This implementation currently ignores plenary view\n if self._catalog_session is not None:\n return self._catalog_session.get_catalogs_by_genus_type(catalog_genus_type=family_genus_type)\n collection = JSONClientValidated('relationship',\n collection='Family',\n runtime=self._runtime)\n result = collection.find({\"genusTypeId\": str(family_genus_type)}).sort('_id', DESCENDING)\n\n return objects.FamilyList(result, runtime=self._runtime, proxy=self._proxy)", "def get_catalogs_by_provider(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_provider\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_provider(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def getCatalogs():", "def get(self):\n return GenericGet().get_catalogs()", "def get_books_by_parent_genus_type(self, book_genus_type):\n raise errors.Unimplemented()", "def get_objects_by_type(self, *types) -> List[TgnObject]:\n if not types:\n return list(self.objects.values())\n types_l = [o.lower() for o in types]\n return [o for o in self.objects.values() if o.type.lower() in types_l]", "def get_recipes_by_types(self, recipe_type):\n if recipe_type not in self.recipe_list.keys():\n sys.stderr.write(UKN_TYP.format(recipe_type))\n raise KeyError\n return [r.name for r in self.recipe_list[recipe_type]]", "def get_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_template\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs()\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def getTypesList():\n return Gw2Spidy._request('types')['results']", "def get_recipes_by_types(self, recipe_type): \n\t\tfor key, val in self.recipes_list.items():\n\t\t\tif key == recipe_type:\n\t\t\t\tfor a, b in val.items():\n\t\t\t\t\tprint(str(b))", "def __getTypeListByASG( self, asg ):\r\n for (otherASG, typeList) in self.__trackASG.values():\r\n if( asg == otherASG ): \r\n return typeList\r\n return []", "def initCatalog(tipo):\n catalog = model.newCatalog(tipo)\n \n return catalog", "def repositories(self):\n return [\n self.collection(self._registered_types[key])\n for key in self._registered_types\n ]", "def initCatalog(list_type):\n catalog = model.newCatalog(list_type)\n return catalog", "def getItemsOfType(typeId):\n return Gw2Spidy._request('all-items', str(typeId))['results']", "def getCatalog(unique_name):", "def get_collections(self, pattern=\"*\", libtype=\"*\"):\n\n sql = \"\"\"SELECT collection.collection_id, collection.name, collection.doc,\n collection.type, collection.path\n FROM collection_table as collection\n WHERE name like ?\n AND type like ?\n ORDER BY collection.name\n \"\"\"\n\n cursor = self._execute(sql, (self._glob_to_sql(pattern),\n self._glob_to_sql(libtype)))\n sql_result = cursor.fetchall()\n\n return [{\"collection_id\": result[0],\n \"name\": result[1],\n \"synopsis\": result[2].split(\"\\n\")[0],\n \"type\": result[3],\n \"path\": result[4]\n } for result in sql_result]", "def refresh_types(self, type_clss):\n pass", "def get_recipes_by_type(self, recipe_type):\n if (recipe_type in self.recipe_list):\n for item in self.recipe_list[recipe_type]:\n print(item.name + \" \", end='')\n print()", "def getTypes(self):\n return self._doRequest(self.httpClient.getTypes)", "def get_catalog(self) -> Dict[str, str]:\n return self.catalog", "def catalogs(self):\n return sorted(self._catalog_comp_info_dicts.keys())", "def by_type(cls, typ='creditcard'):\n return Filter('type', values=(typ,), operator=Filter.OPERATOR['EQUAL'])", "def handle_list(self, detail, *args, **kwargs):\n for product_type in models.ProductType.objects.all():\n print(product_type.name)\n if detail:\n for coverage_type in product_type.allowed_coverage_types.all():\n print(\"\\t%s\" % coverage_type.name)", "def get_refs_by_type(self, search_term: str, ref_type: RefType) -> List[str]:\n pk = f\"{search_term}##{ref_type.value.lower()}\"\n filter_exp = Key(\"label_and_type\").eq(pk)\n try:\n matches = self.genes.query(KeyConditionExpression=filter_exp)\n return [m[\"concept_id\"] for m in matches.get(\"Items\", None)]\n except ClientError as e:\n logger.error(\n f\"boto3 client error on get_refs_by_type for \"\n f\"search term {search_term}: \"\n f\"{e.response['Error']['Message']}\"\n )\n return []", "def get_books_by_record_type(self, book_record_type):\n raise errors.Unimplemented()", "def get_catalog_search_record_types(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.get_resource_record_types\n return self._provider_manager.get_catalog_search_record_types()" ]
[ "0.73568535", "0.6875554", "0.63545704", "0.61625624", "0.6112447", "0.59714943", "0.5629469", "0.56187636", "0.5547003", "0.5511414", "0.55090463", "0.5447316", "0.5379001", "0.5371051", "0.53498363", "0.53343624", "0.53103036", "0.5240032", "0.52066165", "0.5198516", "0.51816815", "0.51704365", "0.5169875", "0.51631004", "0.5137371", "0.5121837", "0.5117987", "0.5085534", "0.5080508", "0.5076205" ]
0.8638777
0
Pass through to provider CatalogLookupSession.get_catalogs_by_parent_genus_type
def get_catalogs_by_parent_genus_type(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinLookupSession.get_bins_by_parent_genus_type catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_parent_genus_type(*args, **kwargs) cat_list = [] for cat in catalogs: cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy)) return CatalogList(cat_list)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_catalogs_by_genus_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_genus_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_genus_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def get_books_by_parent_genus_type(self, book_genus_type):\n raise errors.Unimplemented()", "def get_families_by_parent_genus_type(self, family_genus_type):\n raise errors.Unimplemented()", "def get_parent_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_parent_bins\n return self._get_provider_session('catalog_hierarchy_session').get_parent_catalogs(*args, **kwargs)", "def get_comments_by_parent_genus_type(self, comment_genus_type):\n # Implemented from template for\n # osid.resource.ResourceLookupSession.get_resources_by_parent_genus_type\n # STILL NEED TO IMPLEMENT!!!\n return objects.CommentList([])", "def get_relationships_by_parent_genus_type(self, relationship_genus_type):\n # Implemented from template for\n # osid.resource.ResourceLookupSession.get_resources_by_parent_genus_type\n # STILL NEED TO IMPLEMENT!!!\n return objects.RelationshipList([])", "def get_catalogs_by_record_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_record_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_record_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def get_parent_catalog_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_parent_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_parent_catalog_ids(*args, **kwargs)", "def has_parent_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.has_parent_bins\n return self._get_provider_session('catalog_hierarchy_session').has_parent_catalogs(*args, **kwargs)", "def parent_type(self, type: str) -> str:\n return self.types_df.loc[type][\"parent_type\"]", "def get_catalogs_by_provider(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_provider\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_provider(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def determine_child_types(registry, parent_type):\n child_types = []\n for potential_child_type, details in registry[TYPES].by_item_type.items():\n if parent_type in getattr(details, 'base_types', []):\n child_types.append(details.name)\n return child_types", "def test_get_parent_type_name(self):\n pass", "def get_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_child_bins\n return self._get_provider_session('catalog_hierarchy_session').get_child_catalogs(*args, **kwargs)", "def determine_parent_types(registry, item_type):\n base_types = []\n try:\n base_types = extract_base_types(registry, item_type)\n except KeyError: # indicative of an error if not testing\n log.info(f'Tried to determine parent type of invalid type: {item_type}')\n return [b for b in base_types if b != 'Item']", "def get_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_hierarchy()", "def retrieve_catalog_for_path(self, needle_path):\n clg = self.retrieve_root_catalog()\n while True:\n new_nested_reference = clg.find_nested_for_path(needle_path)\n if new_nested_reference is None:\n break\n nested_reference = new_nested_reference\n clg = self.retrieve_catalog(nested_reference.hash)\n return clg", "def get_parent_locations(self, location):\r\n store = self._get_modulestore_for_courseid(location.course_key)\r\n return store.get_parent_locations(location)", "def is_parent_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_parent_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_parent_of_catalog(*args, **kwargs)", "def getCatalogs():", "def parent(self, parent_object):\n lookup = get_parent_lookup_kwargs(parent_object)\n return self.filter(**lookup)", "def get_parent_object_of_type(self, meta_type):\n if hasattr(self.context, \"meta_type\") and self.context.meta_type == meta_type:\n return self.context\n obj = Acquisition.aq_inner(self.context)\n while not isinstance(obj, PloneSite):\n obj = Acquisition.aq_parent(obj)\n if hasattr(obj, \"meta_type\") and obj.meta_type == meta_type:\n return obj\n return None", "def _get_objects(self, object_type, **kwargs):\r\n return self.parent_connection._get_objects(object_type,\r\n sys_id=self.sys_id,\r\n **kwargs)", "def test_get_all_ancestor_types(self):\n pass", "def get_families_by_genus_type(self, family_genus_type):\n # Implemented from template for\n # osid.resource.BinLookupSession.get_bins_by_genus_type_template\n # NOTE: This implementation currently ignores plenary view\n if self._catalog_session is not None:\n return self._catalog_session.get_catalogs_by_genus_type(catalog_genus_type=family_genus_type)\n collection = JSONClientValidated('relationship',\n collection='Family',\n runtime=self._runtime)\n result = collection.find({\"genusTypeId\": str(family_genus_type)}).sort('_id', DESCENDING)\n\n return objects.FamilyList(result, runtime=self._runtime, proxy=self._proxy)", "def get_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_template\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs()\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def get_path_list(self, type_str=None):\n return list(\n reversed(\n [v.label_str for v in self.parent_gen if type_str in (None, v.type_str)]\n )\n )", "def get_books_by_genus_type(self, book_genus_type):\n # Implemented from template for\n # osid.resource.BinLookupSession.get_bins_by_genus_type_template\n # NOTE: This implementation currently ignores plenary view\n if self._catalog_session is not None:\n return self._catalog_session.get_catalogs_by_genus_type(catalog_genus_type=book_genus_type)\n collection = JSONClientValidated('commenting',\n collection='Book',\n runtime=self._runtime)\n result = collection.find({\"genusTypeId\": str(book_genus_type)}).sort('_id', DESCENDING)\n\n return objects.BookList(result, runtime=self._runtime, proxy=self._proxy)", "def get(self):\n return GenericGet().get_catalogs()", "def get_root_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_root_bins\n return self._get_provider_session('catalog_hierarchy_session').get_root_catalogs()" ]
[ "0.71648747", "0.6957899", "0.64228", "0.6352422", "0.61795235", "0.60324603", "0.5886922", "0.56472236", "0.55829614", "0.5565245", "0.5385594", "0.5288705", "0.52565855", "0.5242748", "0.5226299", "0.5188614", "0.5185927", "0.5169943", "0.5166642", "0.5164425", "0.5149007", "0.5144623", "0.50743663", "0.5071899", "0.5063386", "0.5053366", "0.50438505", "0.4984856", "0.49762997", "0.4927199" ]
0.8795304
0
Pass through to provider CatalogLookupSession.get_catalogs_by_record_type
def get_catalogs_by_record_type(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinLookupSession.get_bins_by_record_type catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_record_type(*args, **kwargs) cat_list = [] for cat in catalogs: cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy)) return CatalogList(cat_list)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_books_by_record_type(self, book_record_type):\n raise errors.Unimplemented()", "def get_catalog_record_types(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.get_resource_record_types\n return self._provider_manager.get_catalog_record_types()", "def get_catalogs_by_genus_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_genus_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_genus_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def get_catalog_search_record_types(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.get_resource_record_types\n return self._provider_manager.get_catalog_search_record_types()", "def get_catalog_record_types(self):\n return TypeList([])", "def get_catalog_search_record_types(self):\n return TypeList([])", "def supports_catalog_record_type(self, catalog_record_type=None):\n if catalog_record_type is None:\n raise NullArgument()\n return False", "def can_create_catalog_with_record_types(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_create_bin_with_record_types\n return self._get_provider_session('catalog_admin_session').can_create_catalog_with_record_types(*args, **kwargs)", "def supports_catalog_search_record_type(self, catalog_search_record_type=None):\n if catalog_search_record_type is None:\n raise NullArgument()\n return False", "def netsuite_get_all(self, record_type: str) -> list:\n if record_type is None:\n raise ValueError(\n \"Parameter 'record_type' is required for kw: netsuite_get_all\"\n )\n return self.client.getAll(recordType=record_type)", "def get_comments_by_record_type(self, comment_record_type):\n # Implemented from template for\n # osid.resource.ResourceLookupSession.get_resources_by_record_type\n # STILL NEED TO IMPLEMENT!!!\n return objects.CommentList([])", "def get_catalogs_by_parent_genus_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_parent_genus_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_parent_genus_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def netsuite_get(\n self, record_type: str = None, internal_id: str = None, external_id: str = None\n ) -> list:\n if record_type is None:\n raise ValueError(\"Parameter 'record_type' is required for kw: netsuite_get\")\n if internal_id is None and external_id is None:\n raise ValueError(\n \"Parameter 'internal_id' or 'external_id' \"\n \" is required for kw: netsuite_get\"\n )\n kwargs = {\"recordType\": record_type}\n if internal_id is not None:\n kwargs[\"internalId\"] = internal_id\n if external_id is not None:\n kwargs[\"externalId\"] = external_id\n\n return self.client.get(**kwargs)", "def get_families_by_record_type(self, family_record_type):\n raise errors.Unimplemented()", "def initCatalog(list_type):\n catalog = model.newCatalog(list_type)\n return catalog", "def get_catalog_search_order_record(self, catalog_record_type):\n return # osid.cataloging.records.CatalogSearchOrderRecord", "def _getRecords(self, record_type, filters):\n if not filters:\n # Always return a copy for consistency\n return list(self._dump_data[record_type])\n response = self._dump_data[record_type]\n for f in filters:\n response = [r for r in response if f(r)]\n return response", "def get_catalogs_by_provider(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_provider\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_provider(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def query_records(record_type):\n json_query = {\n 'query': {\n 'recordType': record_type\n }\n }\n\n records = []\n while True:\n result_query_authors = cloudkit_request(\n '/development/public/records/query',\n json.dumps(json_query))\n result_query_authors = json.loads(result_query_authors['content'])\n\n records += result_query_authors['records']\n\n if 'continuationMarker' in result_query_authors.keys():\n json_query['continuationMarker'] = \\\n result_query_authors['continuationMarker']\n else:\n break\n\n return records", "def record_type(values):\n field = basic.lookup(values, name='Record Type (one of %s)' % values)\n\n return field.setResultsName('record_type')", "def by_type(cls, typ='creditcard'):\n return Filter('type', values=(typ,), operator=Filter.OPERATOR['EQUAL'])", "def get_book_search_record_types(self):\n return TypeList([])", "def initCatalog(tad_list_type):\n catalog = model.newCatalog(tad_list_type)\n return catalog", "def get_book_record_types(self):\n return TypeList([])", "def scrapeRecords(self, record_type):\n if record_type == 'list':\n base_url = config.LISTING_URL\n num_cols = 10\n table = 'list_records'\n else:\n base_url = config.SOLD_RECORD_URL\n num_cols = 12\n table = 'sale_records'\n\n records = self._queryScrapedIds('records', record_type, 'FALSE')\n parsed_records = []\n for record_id in records:\n fields = self._scrapeRecord(base_url, record_id, record_type)\n parsed_records.append(tuple(fields))\n time.sleep(random.randint(0, config.SLEEP))\n\n cols = ','.join(['%s ' for i in range(0, num_cols)])\n query = \"INSERT INTO \" + table + \" VALUES (\" + cols + \\\n \", ST_GeomFromText('POINT(%s %s)', 4326))\"\n\n self._storeData(parsed_records, table, query)\n self._updateScraped(table, record_type)\n return", "def getCatalogs():", "def get_catalog_form(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.get_bin_form_for_update_template\n # This method might be a bit sketchy. Time will tell.\n if isinstance(args[-1], list) or 'catalog_record_types' in kwargs:\n return self.get_catalog_form_for_create(*args, **kwargs)\n else:\n return self.get_catalog_form_for_update(*args, **kwargs)", "def get_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_template\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs()\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def _get_catalog_object(self):\n return self.cluster.catalogd.service.read_debug_webpage(\n \"catalog_object?object_type=TABLE&object_name=functional.alltypes\")", "def _get_zone_specifics_for_zone_type(self, zone_type):\n with self.engine.connect() as conn:\n\n if zone_type == 'ward':\n table = 'project'\n elif zone_type == 'neighborhood_cluster':\n table = 'census_tract_to_neighborhood_cluster'\n else:\n table = 'census'\n\n query_result = conn.execute(\n 'select distinct {zone} from {table};'.format(zone=zone_type,\n table=table))\n zone_specifics = [row[0] for row in query_result.fetchall()]\n # zones.append(zone)\n\n return zone_specifics" ]
[ "0.68990505", "0.67648774", "0.6703916", "0.6664338", "0.664531", "0.64445525", "0.63894403", "0.6387929", "0.6130862", "0.6035976", "0.58686966", "0.58163524", "0.5804801", "0.5748946", "0.5695022", "0.56245303", "0.55583835", "0.55504656", "0.5542325", "0.5529362", "0.55207783", "0.549316", "0.5366363", "0.53597057", "0.5356909", "0.5336683", "0.5320693", "0.5288829", "0.5266761", "0.5258703" ]
0.85964394
0
Pass through to provider CatalogLookupSession.get_catalogs_by_provider
def get_catalogs_by_provider(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinLookupSession.get_bins_by_provider catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_provider(*args, **kwargs) cat_list = [] for cat in catalogs: cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy)) return CatalogList(cat_list)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getCatalogs():", "def get_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_template\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs()\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def can_lookup_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.can_lookup_bins_template\n return self._get_provider_session('catalog_lookup_session').can_lookup_catalogs()", "def get_catalogs_by_genus_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_genus_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_genus_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def fetch_account_catalogs(account:str):\n for config in accounts:\n if account in config['streamers']:\n return config['catalogs']\n return", "def get_catalogs_by_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_ids\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_ids(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def get_providers(self):\n \n r = requests.get(\n self._url('/dataproviders'),\n headers={'Authorization': self.token},\n proxies=self.proxy)\n r.raise_for_status()\n providers = r.json()\n self.providers = [p['name'] for p in providers if (p['user'] is not None and p['user']!='SCRIPTING ENGINE')]\n log.info('{:d} providers found'.format(len(self.providers)))\n\n return", "def get(self):\n return GenericGet().get_catalogs()", "def get_catalogs_by_record_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_record_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_record_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def get_catalogs_by_query(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinQuerySession.get_bins_by_query_template\n return self._get_provider_session('catalog_query_session').get_catalogs_by_query(*args, **kwargs)", "def supports_catalog_lookup(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_lookup()", "def provider_list(cls, args, config):\n # print \"MOLNSProvider.provider_list(args={0}, config={1})\".format(args, config)\n providers = config.list_objects(kind='Provider')\n if len(providers) == 0:\n print \"No providers configured\"\n else:\n table_data = []\n for p in providers:\n table_data.append([p.name, p.type])\n # table_print(['name', 'type'], table_data)\n r = {'type': 'table', 'column_names': ['name', 'type'], 'data': table_data}\n return r", "def catalogs(self):\n return sorted(self._catalog_comp_info_dicts.keys())", "def get_catalog(self):\n\n rep = req.get_json(self.CATALOG)\n repo_list = rep[\"repositories\"]\n\n for repo in repo_list:\n self.list.append(Repository(repo))\n\n return self.list", "def _fetch_providers(self, looking_for, providers=None):\n if providers is None:\n providers = self._reverse_mapping.get(looking_for, [])\n default_providers = []\n atom_providers = []\n for p in providers:\n if p.name in (_TRANSIENT_PROVIDER, self.injector_name):\n default_providers.append(p)\n else:\n atom_providers.append(p)\n return default_providers, atom_providers", "def get_catalog_lookup_session(self):\n raise Unimplemented()", "def get_catalogs_by_parent_genus_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_parent_genus_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_parent_genus_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def supports_catalog_lookup(self):\n return False", "def get_queryset(self):\n if self.requested_enterprise_uuid is None:\n raise ParseError('Required enterprise_customer_uuid is missing')\n enterprise_customer_idp = get_object_or_404(\n EnterpriseCustomerIdentityProvider,\n enterprise_customer__uuid=self.requested_enterprise_uuid\n )\n try:\n saml_provider = SAMLProviderConfig.objects.current_set().get(\n slug=convert_saml_slug_provider_id(enterprise_customer_idp.provider_id))\n except SAMLProviderConfig.DoesNotExist:\n raise Http404('No matching SAML provider found.') # lint-amnesty, pylint: disable=raise-missing-from\n return SAMLProviderData.objects.filter(entity_id=saml_provider.entity_id)", "def getProvidersReferences(self):\n field = self.getWrappedField('provider')\n providers = list(field._Vocabulary(self).items())\n providers.sort(lambda a, b: cmp(a[1].lower(), b[1].lower()))\n return atapi.DisplayList(providers)", "def get_catalog_lookup_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_lookup_session(*args, **kwargs)", "def get_books_by_provider(self, resource_id):\n raise errors.Unimplemented()", "def get_catalog_lookup_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def all_providers(self) -> List[ProviderInfo]:\n sp_key = self.__providers_key()\n value = self.get(name=sp_key)\n if value is None:\n return []\n js = utf8_decode(data=value)\n array = json_decode(string=js)\n return ProviderInfo.convert(array=array)", "def get_catalog(self) -> Dict[str, str]:\n return self.catalog", "def providers(self):\n return [p for p in self._db.providers.values() if self._dbattr(p.IDATTR)]", "def providers(self):\n return [p for p in self._db.providers.values() if self._dbattr(p.IDATTR)]", "def get_items_for_catalog(catalog_id):\n pass", "def test_get_hyperflex_app_catalog_list(self):\n pass", "def can_search_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinQuerySession.can_search_bins_template\n return self._get_provider_session('catalog_query_session').can_search_catalogs()" ]
[ "0.67733276", "0.67415875", "0.6619406", "0.6432229", "0.64017737", "0.6373136", "0.63554007", "0.63192785", "0.6243004", "0.61907905", "0.6176288", "0.6121346", "0.61044186", "0.61037636", "0.60998917", "0.6073811", "0.60145897", "0.60041785", "0.5992079", "0.599185", "0.5981195", "0.59471864", "0.5926991", "0.58186173", "0.58170396", "0.5777348", "0.5777348", "0.5746678", "0.57341886", "0.57183945" ]
0.8459622
0
Pass through to provider CatalogLookupSession.get_catalogs
def get_catalogs(self): # Implemented from kitosid template for - # osid.resource.BinLookupSession.get_bins_template catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs() cat_list = [] for cat in catalogs: cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy)) return CatalogList(cat_list)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getCatalogs():", "def get_catalogs_by_provider(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_provider\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_provider(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def get(self):\n return GenericGet().get_catalogs()", "def can_lookup_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.can_lookup_bins_template\n return self._get_provider_session('catalog_lookup_session').can_lookup_catalogs()", "def get_catalogs_by_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_ids\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_ids(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def get_catalog(self):\n\n rep = req.get_json(self.CATALOG)\n repo_list = rep[\"repositories\"]\n\n for repo in repo_list:\n self.list.append(Repository(repo))\n\n return self.list", "def get_catalog_lookup_session(self):\n raise Unimplemented()", "def list_catalogs(self):\n return self._json_object_field_to_list(\n self._get_catalogs_json(), self.__MISSION_STRING)", "def get_catalogs_by_query(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinQuerySession.get_bins_by_query_template\n return self._get_provider_session('catalog_query_session').get_catalogs_by_query(*args, **kwargs)", "def get_catalog(self) -> Dict[str, str]:\n return self.catalog", "def catalogs(self):\n return sorted(self._catalog_comp_info_dicts.keys())", "def get_catalog_lookup_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def fetch_account_catalogs(account:str):\n for config in accounts:\n if account in config['streamers']:\n return config['catalogs']\n return", "def get_catalogs_by_record_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_record_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_record_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def test_get_hyperflex_app_catalog_list(self):\n pass", "def get_items_for_catalog(catalog_id):\n pass", "def get_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bin\n return Catalog(\n self._provider_manager,\n self._get_provider_session('catalog_lookup_session').get_catalog(*args, **kwargs),\n self._runtime,\n self._proxy)", "def get_catalogs_by_genus_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_genus_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_genus_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def supports_catalog_lookup(self):\n return False", "def supports_catalog_lookup(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_lookup()", "def can_search_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinQuerySession.can_search_bins_template\n return self._get_provider_session('catalog_query_session').can_search_catalogs()", "def query_object_catalogs(self, position, catalogs=__ALL_STRING, row_limit=DEFAULT_ROW_LIMIT,\n get_query_payload=False, cache=True, verbose=False):\n return self.query_region_catalogs(position=position,\n radius=self.__ZERO_ARCMIN_STRING,\n catalogs=catalogs,\n row_limit=row_limit,\n get_query_payload=get_query_payload,\n cache=cache,\n verbose=verbose)", "def get_catalog(self):\n return self", "def get_catalog_lookup_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_lookup_session(*args, **kwargs)", "def init_catalog():\n return controller.init_catalog()", "def getCatalog(unique_name):", "def list_detail_catalog(self, catalog_name):\n # list catalog\n self._list_catalog(catalog_name)\n # detail catalog\n self._details_catalog(catalog_name)", "def _extract_catalog(self, data):\n interface = 'public'\n catalog = data['token']['catalog']\n service_map = {}\n for service in catalog:\n service_endpoint = None\n for endpoint in service['endpoints']:\n if endpoint['interface'] == interface:\n service_endpoint = endpoint['url']\n break\n if service_endpoint:\n service_map[service['type']] = service_endpoint\n LOG.debug('Service catalog: %s' % service_map)\n return service_map", "def __iter__(self):\n for key in self._catalogs:\n yield key", "def checkCatalogs():\n url = CHECKBASE % 'catalogs'\n catalogs = []\n try:\n fh = getURLHandle(url)\n #fh = urllib2.urlopen(url)\n data = fh.read()\n dom = minidom.parseString(data)\n fh.close()\n catalog_elements = dom.getElementsByTagName('Catalog')\n for catel in catalog_elements:\n if catel.firstChild is None:\n continue\n catalog = catel.firstChild.data.strip()\n if len(catalog):\n catalogs.append(str(catalog))\n except:\n raise Exception,\"Could not open %s to search for list of catalogs\" % url\n return catalogs" ]
[ "0.79242986", "0.7733211", "0.7497713", "0.7362184", "0.7123644", "0.7016613", "0.6908637", "0.6893423", "0.68917024", "0.68790054", "0.6840749", "0.6761759", "0.675409", "0.6751744", "0.6750636", "0.66686434", "0.66536814", "0.6608449", "0.6515111", "0.65041375", "0.641323", "0.6408174", "0.6395362", "0.63941604", "0.6295306", "0.6255241", "0.6250112", "0.6247885", "0.6245645", "0.61946785" ]
0.7808394
1
Pass through to provider CatalogQuerySession.can_search_catalogs
def can_search_catalogs(self): # Implemented from kitosid template for - # osid.resource.BinQuerySession.can_search_bins_template return self._get_provider_session('catalog_query_session').can_search_catalogs()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def supports_catalog_search(self):\n return False", "def supports_catalog_query(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_query()", "def can_lookup_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.can_lookup_bins_template\n return self._get_provider_session('catalog_lookup_session').can_lookup_catalogs()", "def supports_catalog_query(self):\n return False", "def can_lookup_books(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.can_lookup_bins\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_lookup_catalogs()\n return True", "def supports_catalog_lookup(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_lookup()", "def supports_catalog_lookup(self):\n return False", "def supports_catalog(self):\n return False", "def get_catalogs_by_query(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinQuerySession.get_bins_by_query_template\n return self._get_provider_session('catalog_query_session').get_catalogs_by_query(*args, **kwargs)", "def search_catalog(self, query):\n scope = datacatalog.SearchCatalogRequest.Scope()\n scope.include_project_ids.append(self.__project_id)\n\n request = datacatalog.SearchCatalogRequest()\n request.scope = scope\n request.query = query\n request.page_size = 1000\n\n return [\n result for result in self.__datacatalog.search_catalog(request)\n ]", "def can_search_comments(self):\n # Implemented from template for\n # osid.resource.ResourceQuerySession.can_search_resources\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n return True", "def can_create_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_create_bins\n return self._get_provider_session('catalog_admin_session').can_create_catalogs()", "def get_catalog_search_session(self):\n raise Unimplemented()", "def can_lookup_families(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.can_lookup_bins\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_lookup_catalogs()\n return True", "def supports_catalog_search_record_type(self, catalog_search_record_type=None):\n if catalog_search_record_type is None:\n raise NullArgument()\n return False", "def supports_book_search(self):\n return False", "def can_search_relationships(self):\n # Implemented from template for\n # osid.resource.ResourceQuerySession.can_search_resources\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n return True", "def supports_catalog_admin(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_admin()", "def _get_catalog_results(self, featured=False, **kw):\n if 'context' in kw.keys():\n kw['path'] = {'query': '/'.join(kw['context'].getPhysicalPath())}\n\n types = ('Article', 'Blog Entry', )\n states = ('published', )\n sort = 'Date'\n \n results = self.qrymethod(portal_type=types,\n review_state=states,\n is_featured=featured,\n sort_on=sort, \n sort_order='descending',\n **kw)\n\n return results", "def is_searchable(self):\n return self._get_search_query() != ''", "def getCatalogs():", "def query_object_catalogs(self, position, catalogs=__ALL_STRING, row_limit=DEFAULT_ROW_LIMIT,\n get_query_payload=False, cache=True, verbose=False):\n return self.query_region_catalogs(position=position,\n radius=self.__ZERO_ARCMIN_STRING,\n catalogs=catalogs,\n row_limit=row_limit,\n get_query_payload=get_query_payload,\n cache=cache,\n verbose=verbose)", "def catalog_exists(self, args):\n catalog = self.server.connect_ermrest(self.id)\n pp(catalog.exists())", "def test_get_hyperflex_app_catalog_list(self):\n pass", "def supports_cataloging_rules(self):\n return False", "def can_update_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_update_bins\n return self._get_provider_session('catalog_admin_session').can_update_catalogs()", "def can_access_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').can_access_catalog_hierarchy()", "def supports_catalog_admin(self):\n return False", "def search(self, *args, **kwargs): # real signature unknown\n pass", "def can_create_books(self):\n # Implemented from template for\n # osid.resource.BinAdminSession.can_create_bins\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_create_catalogs()\n return True" ]
[ "0.8033921", "0.74830914", "0.7328233", "0.7088473", "0.66452014", "0.65125376", "0.6422996", "0.6311823", "0.6307956", "0.62400585", "0.61784935", "0.6172449", "0.6154136", "0.5964574", "0.5960428", "0.5946245", "0.59361005", "0.5905267", "0.5871345", "0.58358365", "0.57341444", "0.5694099", "0.56923044", "0.5690113", "0.5687236", "0.5651729", "0.5648544", "0.56477696", "0.56130916", "0.56086475" ]
0.8864633
0
Pass through to provider CatalogQuerySession.get_catalog_query
def get_catalog_query(self): # Implemented from kitosid template for - # osid.resource.BinQuerySession.get_bin_query_template return self._get_provider_session('catalog_query_session').get_catalog_query()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_catalog_query_session(self):\n raise Unimplemented()", "def get_catalog_query_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_catalog_query_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_query_session(*args, **kwargs)", "def supports_catalog_query(self):\n return False", "def supports_catalog_query(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_query()", "def get_resource_query(self):\n pass", "def get_resource_query(self):\n pass", "def query(self, query):", "def query(self):\r\n raise NotImplementedError", "def get_catalogs_by_query(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinQuerySession.get_bins_by_query_template\n return self._get_provider_session('catalog_query_session').get_catalogs_by_query(*args, **kwargs)", "def get_query(self):\n return self.query_class(self)", "def make_query(self):", "def query(self):\n return self.__query", "def query(self):\n return self.details[KEY_QUERY]", "def generate_query(self):\n return", "def query(self):\n return self._query", "def query(self):\n return self._query", "def query(self):\n return self._query", "def query(self):\n \n return self._query", "def get_catalog_query_session(self, proxy=None):\n if proxy is None:\n raise NullArgument()\n raise Unimplemented()", "def query(self):\n pass", "def _make_query(self):\r\n raise NotImplementedError()", "def _get_catalog_results(self, featured=False, **kw):\n if 'context' in kw.keys():\n kw['path'] = {'query': '/'.join(kw['context'].getPhysicalPath())}\n\n types = ('Article', 'Blog Entry', )\n states = ('published', )\n sort = 'Date'\n \n results = self.qrymethod(portal_type=types,\n review_state=states,\n is_featured=featured,\n sort_on=sort, \n sort_order='descending',\n **kw)\n\n return results", "def search_catalog(self, query):\n scope = datacatalog.SearchCatalogRequest.Scope()\n scope.include_project_ids.append(self.__project_id)\n\n request = datacatalog.SearchCatalogRequest()\n request.scope = scope\n request.query = query\n request.page_size = 1000\n\n return [\n result for result in self.__datacatalog.search_catalog(request)\n ]", "def query(self):\n return self.snowflake_options.query", "def get_catalog_search_session(self):\n raise Unimplemented()", "def query(self):", "def query(self) -> None:\n raise NotImplementedError()", "def query(self, **kwargs):", "def _ATTopic_query(self, **kw):\n del kw['path'] # don't want to limit to context as w/ folders\n return self.context.queryCatalog(self.context.REQUEST, \n False, # no batch here\n None, # no b_size here\n False,\n **kw)" ]
[ "0.7529669", "0.74305516", "0.71600085", "0.68536264", "0.6706009", "0.6566236", "0.6566236", "0.6564279", "0.6545803", "0.6495516", "0.6379832", "0.6374171", "0.6347719", "0.63346213", "0.6286401", "0.6281748", "0.6281748", "0.6281748", "0.62769556", "0.6248882", "0.6213487", "0.62112314", "0.6185693", "0.61527854", "0.61342907", "0.6112043", "0.60747284", "0.6063794", "0.60521346", "0.6046115" ]
0.7912566
0
Pass through to provider CatalogQuerySession.get_catalogs_by_query
def get_catalogs_by_query(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinQuerySession.get_bins_by_query_template return self._get_provider_session('catalog_query_session').get_catalogs_by_query(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def search_catalog(self, query):\n scope = datacatalog.SearchCatalogRequest.Scope()\n scope.include_project_ids.append(self.__project_id)\n\n request = datacatalog.SearchCatalogRequest()\n request.scope = scope\n request.query = query\n request.page_size = 1000\n\n return [\n result for result in self.__datacatalog.search_catalog(request)\n ]", "def get_catalog_query(self):\n # Implemented from kitosid template for -\n # osid.resource.BinQuerySession.get_bin_query_template\n return self._get_provider_session('catalog_query_session').get_catalog_query()", "def query_object_catalogs(self, position, catalogs=__ALL_STRING, row_limit=DEFAULT_ROW_LIMIT,\n get_query_payload=False, cache=True, verbose=False):\n return self.query_region_catalogs(position=position,\n radius=self.__ZERO_ARCMIN_STRING,\n catalogs=catalogs,\n row_limit=row_limit,\n get_query_payload=get_query_payload,\n cache=cache,\n verbose=verbose)", "def supports_catalog_query(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_query()", "def get_catalog_query_session(self):\n raise Unimplemented()", "def getCatalogs():", "def get_catalog_query_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get(self):\n return GenericGet().get_catalogs()", "def get_catalogs_by_provider(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_provider\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_provider(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def supports_catalog_query(self):\n return False", "def _get_catalog_results(self, featured=False, **kw):\n if 'context' in kw.keys():\n kw['path'] = {'query': '/'.join(kw['context'].getPhysicalPath())}\n\n types = ('Article', 'Blog Entry', )\n states = ('published', )\n sort = 'Date'\n \n results = self.qrymethod(portal_type=types,\n review_state=states,\n is_featured=featured,\n sort_on=sort, \n sort_order='descending',\n **kw)\n\n return results", "def query_region_catalogs(self, position, radius, catalogs=__ALL_STRING, row_limit=DEFAULT_ROW_LIMIT,\n get_query_payload=False, cache=True, verbose=False):\n sanitized_radius = self._sanitize_input_radius(radius)\n sanitized_catalogs = self._sanitize_input_catalogs(catalogs)\n sanitized_row_limit = self._sanitize_input_row_limit(row_limit)\n\n sesame_database.set('simbad')\n coordinates = commons.parse_coordinates(position)\n\n query_result = {}\n\n self._store_query_result(query_result=query_result, names=sanitized_catalogs, json=self._get_catalogs_json(),\n coordinates=coordinates, radius=sanitized_radius, row_limit=sanitized_row_limit,\n get_query_payload=get_query_payload, cache=cache, verbose=verbose)\n\n if get_query_payload:\n return query_result\n\n return commons.TableList(query_result)", "def get_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_template\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs()\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def get_catalog_query_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_query_session(*args, **kwargs)", "def can_search_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinQuerySession.can_search_bins_template\n return self._get_provider_session('catalog_query_session').can_search_catalogs()", "def get_catalog(self, query=None, query_fields=None, print_query=False,exclude_gaia=False,**kwargs):\n # Query\n main_cat = super(DECaL_Survey, self).get_catalog(query_fields=query_fields, print_query=print_query,**kwargs)\n main_cat = Table(main_cat,masked=True)\n #\n for col in main_cat.colnames:\n main_cat[col].mask = np.isnan(main_cat[col])\n #Convert SNR to mag error values.\n snr_cols = [colname for colname in main_cat.colnames if \"snr\" in colname]\n for col in snr_cols:\n main_cat[col].mask = main_cat[col]<0\n main_cat[col] = 2.5*np.log10(1+1/main_cat[col])\n \n main_cat = main_cat.filled(-99.0)\n #Remove gaia objects if necessary\n if exclude_gaia:\n self.catalog = main_cat[main_cat['gaia_pointsource']==0]\n else:\n self.catalog = main_cat\n # Clean\n main_cat = catalog_utils.clean_cat(main_cat, photom['DECaL'])\n self.validate_catalog()\n # Return\n return self.catalog", "def get_items_for_catalog(catalog_id):\n pass", "def get_catalogs_by_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_ids\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_ids(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def _query_and_fetchall(self, query):\n with self._connect() as conn:\n cur = conn.cursor()\n cur.execute(query)\n results = cur.fetchall()\n\n return results", "def get_items_for_query(self, query_str):\n raise NotImplementedError()", "def test_get_hyperflex_app_catalog_list(self):\n pass", "def get_catalogs_by_record_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_record_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_record_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def soql_query(self, query):\n self.builtin.log(\"Running SOQL Query: {}\".format(query))\n return self.cumulusci.sf.query_all(query)", "def query_ids_catalogs(self, source_ids, *, catalogs=__ALL_STRING, row_limit=DEFAULT_ROW_LIMIT,\n get_query_payload=False, cache=True, verbose=False):\n sanitized_catalogs = self._sanitize_input_catalogs(catalogs)\n sanitized_row_limit = self._sanitize_input_row_limit(row_limit)\n sanitized_source_ids = self._sanitize_input_ids(source_ids)\n\n query_result = {}\n self._store_query_result(query_result=query_result, names=sanitized_catalogs, json=self._get_catalogs_json(),\n row_limit=sanitized_row_limit, get_query_payload=get_query_payload, cache=cache,\n ids=sanitized_source_ids, verbose=verbose)\n\n if get_query_payload:\n return query_result\n\n return commons.TableList(query_result)", "def get_catalog(self, query=None, query_fields=None, print_query=False,**kwargs):\n # Main DES query\n main_cat = super(DELVE_Survey, self).get_catalog(query=query,\n query_fields=query_fields,\n print_query=print_query,**kwargs)\n if len(main_cat) == 0:\n main_cat = catalog_utils.clean_cat(main_cat,photom['DELVE'])\n return main_cat\n main_cat = catalog_utils.clean_cat(main_cat, photom['DELVE'])\n #import pdb; pdb.set_trace()\n for col in main_cat.colnames:\n if main_cat[col].dtype==float:\n mask = np.isnan(main_cat[col])+(main_cat[col]==99.99)\n main_cat[col] = np.where(~mask, main_cat[col], -999.0)\n \n # Finish\n self.catalog = main_cat\n self.validate_catalog()\n return self.catalog", "def fetch_account_catalogs(account:str):\n for config in accounts:\n if account in config['streamers']:\n return config['catalogs']\n return", "def get_catalog_lookup_session(self):\n raise Unimplemented()", "def driver_queries(self) -> Iterable[Dict[str, str]]:\n return self.svc_queries", "def queryThis(query: Query) -> list:\n q = str(query) # extra line for clarity. calls the __str__ magic function\n\n return list(client.query(q).result())", "def get_catalog(self):\n\n rep = req.get_json(self.CATALOG)\n repo_list = rep[\"repositories\"]\n\n for repo in repo_list:\n self.list.append(Repository(repo))\n\n return self.list" ]
[ "0.7642009", "0.6733933", "0.66568756", "0.65138435", "0.64999104", "0.64843935", "0.64589727", "0.63104904", "0.63059086", "0.6284542", "0.62835383", "0.62727606", "0.62385577", "0.6213242", "0.61514544", "0.60568017", "0.60284245", "0.601286", "0.59745634", "0.5887758", "0.58210415", "0.578214", "0.5775361", "0.5723019", "0.571636", "0.56589425", "0.5638908", "0.55947185", "0.5587797", "0.5569728" ]
0.8549721
0
Pass through to provider CatalogAdminSession.can_create_catalogs
def can_create_catalogs(self): # Implemented from kitosid template for - # osid.resource.BinAdminSession.can_create_bins return self._get_provider_session('catalog_admin_session').can_create_catalogs()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def can_create_books(self):\n # Implemented from template for\n # osid.resource.BinAdminSession.can_create_bins\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_create_catalogs()\n return True", "def can_create_catalog_with_record_types(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_create_bin_with_record_types\n return self._get_provider_session('catalog_admin_session').can_create_catalog_with_record_types(*args, **kwargs)", "def can_create_families(self):\n # Implemented from template for\n # osid.resource.BinAdminSession.can_create_bins\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_create_catalogs()\n return True", "def supports_catalog_admin(self):\n return False", "def supports_catalog_admin(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_admin()", "def can_delete_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_delete_bins\n return self._get_provider_session('catalog_admin_session').can_delete_catalogs()", "def can_update_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_update_bins\n return self._get_provider_session('catalog_admin_session').can_update_catalogs()", "def supports_catalog(self):\n return False", "def can_lookup_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.can_lookup_bins_template\n return self._get_provider_session('catalog_lookup_session').can_lookup_catalogs()", "def catalog_exists(self, args):\n catalog = self.server.connect_ermrest(self.id)\n pp(catalog.exists())", "def catalog_create(self, args):\n try:\n if args.id and self.server.connect_ermrest(args.id).exists():\n print(\"Catalog already exists\")\n return\n owner = args.owner if args.owner else None\n catalog = self.server.create_ermrest_catalog(args.id, owner)\n if args.auto_configure:\n model = catalog.getCatalogModel()\n model.configure_baseline_catalog(**args.configure_args)\n if not args.quiet:\n print(\"Created new catalog %s with the following default configuration:\\n\" % catalog.catalog_id)\n pp(catalog.get('/').json())\n except HTTPError as e:\n if e.response.status_code == requests.codes.not_found:\n raise ResourceException('Catalog not found', e)\n elif e.response.status_code == requests.codes.conflict:\n raise ResourceException(\"Catalog already exists\", e)\n else:\n raise e", "def create_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.create_bin\n return Catalog(\n self._provider_manager,\n self._get_provider_session('catalog_admin_session').create_catalog(*args, **kwargs),\n self._runtime,\n self._proxy)", "def can_create(self):\n return True", "def can_search_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinQuerySession.can_search_bins_template\n return self._get_provider_session('catalog_query_session').can_search_catalogs()", "def can_access_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').can_access_catalog_hierarchy()", "def can_manage_catalog_aliases(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceAdminSession.can_manage_resource_aliases_template\n return self._get_provider_session('catalog_admin_session').can_manage_catalog_aliases()", "def can_create_book_with_record_types(self, book_record_types):\n # Implemented from template for\n # osid.resource.BinAdminSession.can_create_bin_with_record_types\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_create_catalog_with_record_types(catalog_record_types=book_record_types)\n return True", "def supports_cataloging_rules(self):\n return False", "def get_catalog_form_for_create(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.get_bin_form_for_create\n return self._get_provider_session('catalog_admin_session').get_catalog_form_for_create(*args, **kwargs)", "def supports_catalog_assignment(self):\n return False", "def can_modify_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.can_modify_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_design_session').can_modify_catalog_hierarchy()", "def test_create_hyperflex_app_catalog(self):\n pass", "def supports_catalog_lookup(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_lookup()", "def can_create(self, name):\n return name in self._recipes", "def test_create_with_permissions(self):\n permissions = Permission.objects.filter(name__in=('Can add course mode', 'Can change course mode'))\n for permission in permissions:\n self.user.user_permissions.add(permission)\n\n self.assert_can_create_course()", "def can_create_comments(self):\n # Implemented from template for\n # osid.resource.ResourceAdminSession.can_create_resources\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n return True", "def getCatalogs():", "def initCatalog(list_type):\n catalog = model.newCatalog(list_type)\n return catalog", "def can_delete_books(self):\n # Implemented from template for\n # osid.resource.BinAdminSession.can_delete_bins\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_delete_catalogs()\n return True", "def initCatalog():\n return controller.initCatalog()" ]
[ "0.75228477", "0.7024193", "0.6926643", "0.67800385", "0.6702987", "0.6540861", "0.6539525", "0.6433934", "0.63877416", "0.6312353", "0.62024695", "0.6192197", "0.6175844", "0.61720634", "0.60334325", "0.5963481", "0.59497404", "0.58628106", "0.58412707", "0.58098143", "0.5765193", "0.57036567", "0.5670126", "0.5668403", "0.5636549", "0.563434", "0.55887794", "0.5587043", "0.5581348", "0.5545556" ]
0.8740738
0
Pass through to provider CatalogAdminSession.can_create_catalog_with_record_types
def can_create_catalog_with_record_types(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinAdminSession.can_create_bin_with_record_types return self._get_provider_session('catalog_admin_session').can_create_catalog_with_record_types(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def supports_catalog_record_type(self, catalog_record_type=None):\n if catalog_record_type is None:\n raise NullArgument()\n return False", "def can_create_book_with_record_types(self, book_record_types):\n # Implemented from template for\n # osid.resource.BinAdminSession.can_create_bin_with_record_types\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_create_catalog_with_record_types(catalog_record_types=book_record_types)\n return True", "def get_catalog_record_types(self):\n return TypeList([])", "def get_catalog_record_types(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.get_resource_record_types\n return self._provider_manager.get_catalog_record_types()", "def can_create_comment_with_record_types(self, comment_record_types):\n # Implemented from template for\n # osid.resource.ResourceAdminSession.can_create_resource_with_record_types\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n return True", "def can_create_family_with_record_types(self, family_record_types):\n # Implemented from template for\n # osid.resource.BinAdminSession.can_create_bin_with_record_types\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_create_catalog_with_record_types(catalog_record_types=family_record_types)\n return True", "def supports_catalog_search_record_type(self, catalog_search_record_type=None):\n if catalog_search_record_type is None:\n raise NullArgument()\n return False", "def get_catalog_search_record_types(self):\n return TypeList([])", "def get_catalog_search_record_types(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.get_resource_record_types\n return self._provider_manager.get_catalog_search_record_types()", "def can_create_relationship_with_record_types(self, relationship_record_types):\n # Implemented from template for\n # osid.resource.ResourceAdminSession.can_create_resource_with_record_types\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n return True", "def get_catalogs_by_record_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_record_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_record_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def is_record_type(self):\n raise exceptions.NotImplementedError()", "def record_type(values):\n field = basic.lookup(values, name='Record Type (one of %s)' % values)\n\n return field.setResultsName('record_type')", "def can_create_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_create_bins\n return self._get_provider_session('catalog_admin_session').can_create_catalogs()", "def get_book_record_types(self):\n return TypeList([])", "def get_catalog_form(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.get_bin_form_for_update_template\n # This method might be a bit sketchy. Time will tell.\n if isinstance(args[-1], list) or 'catalog_record_types' in kwargs:\n return self.get_catalog_form_for_create(*args, **kwargs)\n else:\n return self.get_catalog_form_for_update(*args, **kwargs)", "def handle_create(self, name, coverage_type_names, mask_type_names,\n validity_mask_type_names, browse_type_names,\n *args, **kwargs):\n\n product_type = models.ProductType.objects.create(name=name)\n\n for coverage_type_name in coverage_type_names:\n try:\n coverage_type = models.CoverageType.objects.get(\n name=coverage_type_name\n )\n product_type.allowed_coverage_types.add(coverage_type)\n except models.CoverageType.DoesNotExist:\n raise CommandError(\n 'Coverage type %r does not exist' % coverage_type_name\n )\n\n for mask_type_name in mask_type_names:\n models.MaskType.objects.create(\n name=mask_type_name, product_type=product_type\n )\n\n for mask_type_name in validity_mask_type_names:\n models.MaskType.objects.create(\n name=mask_type_name, product_type=product_type,\n validity=True\n )\n\n for browse_type_name in browse_type_names:\n models.BrowseType.objects.create(\n name=browse_type_name, product_type=product_type\n )\n\n print('Successfully created product type %r' % name)", "def data_types():\n\n return ...", "def list_record_types(self):\n return [RecordType.A]", "def supports_book_record_type(self, book_record_type=None):\n if book_record_type is None:\n raise NullArgument()\n return False", "def identify(self, record):\n if record.record_type in self.record_type_iders:\n ident = self.record_type_iders[record.record_type](record)\n if ident:\n return [record.record_type] + ident\n return [record.record_type, False]\n return False", "def _get_types(self):\n\n db = Database()\n self.c_built_ins = list(map(lambda tup: tup[0], db.select_built_types()))\n self.c_built_in_array_types = r'^(' + '|'.join(self.escaped(self.c_built_ins)) + ')\\[[0-9]*\\]'\n self.c_types = list(map(lambda tup: tup[0], db.select_types()))\n self.c_array_types = r'^(' + '|'.join(self.escaped(self.c_types)) + ')\\[[0-9]*\\]'\n db.close_connection()", "def get_book_search_record_types(self):\n return TypeList([])", "def data_types(self):", "def on_new_types(self, types_params, new_types):\n if new_types:\n new_model = False\n model = self.dialog_base.types_list_view.model()\n if not model:\n source_model = QStandardItemModel(self.dialog_base.types_list_view)\n model = TypesModel(self.dialog_base.types_list_view)\n model.setSourceModel(source_model)\n model.setDynamicSortFilter(True)\n new_model = True\n for key, count in new_types.iteritems():\n if key in self.types_dict:\n self.types_dict[key].update_count(types_params.source, types_params.geometry, count)\n else:\n type_item = TypesItem(key)\n type_item.update_count(types_params.source, types_params.geometry, count)\n type_item.setCheckable(True)\n type_item.setCheckState(Qt.Checked)\n model.sourceModel().appendRow(type_item)\n self.types_dict[key] = type_item\n if key not in self.sources[types_params.source].type_entries:\n self.sources[types_params.source].type_entries[key] = self.types_dict[key]\n if key not in self.geometries[types_params.geometry].type_entries:\n self.geometries[types_params.geometry].type_entries[key] = self.types_dict[key]\n # source = types_params.source\n # self.start_new_item(source, key, types_params)\n if new_model:\n source_model.itemChanged.connect(self.on_type_check)\n self.dialog_base.types_list_view.setModel(model)\n self.on_task_complete()", "def test_get_types(self):\n pass", "def create_resource_types(resource_type_filenanme):\n\n print(\"Resource Types\")\n\n for i, row in enumerate(open(resource_type_filenanme)):\n row = row.rstrip()\n code, name, description, is_active = row.split(\"|\")\n\n if is_active == \"True\":\n is_active = True\n else:\n is_active = False\n\n\n resource_type = Resource_Type(code=code,\n name=name,\n description=description,\n is_active=is_active)\n\n # Add resource type to session\n db.session.add(resource_type)\n\n # Commit all resource type instances to DB\n db.session.commit()", "def initCatalog(list_type):\n catalog = model.newCatalog(list_type)\n return catalog", "def _flag_created_omf_type(configuration_key, type_id, asset_code):\n\n payload = payload_builder.PayloadBuilder()\\\n .INSERT(configuration_key=configuration_key,\n asset_code=asset_code,\n type_id=type_id)\\\n .payload()\n\n _storage.insert_into_tbl(\"omf_created_objects\", payload)", "def get_comment_record_types(self):\n return TypeList([])" ]
[ "0.73059326", "0.69893354", "0.69627184", "0.68502027", "0.6609438", "0.65626746", "0.63158554", "0.63049453", "0.6282918", "0.6282484", "0.59466547", "0.58646506", "0.5791292", "0.5788916", "0.57652843", "0.57473034", "0.57256", "0.559158", "0.5569805", "0.55403835", "0.55382615", "0.55349934", "0.5528974", "0.54959184", "0.54913676", "0.547082", "0.5450056", "0.5392493", "0.5365506", "0.5309687" ]
0.88522863
0
Pass through to provider CatalogAdminSession.get_catalog_form_for_create
def get_catalog_form_for_create(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinAdminSession.get_bin_form_for_create return self._get_provider_session('catalog_admin_session').get_catalog_form_for_create(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_catalog_form(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.get_bin_form_for_update_template\n # This method might be a bit sketchy. Time will tell.\n if isinstance(args[-1], list) or 'catalog_record_types' in kwargs:\n return self.get_catalog_form_for_create(*args, **kwargs)\n else:\n return self.get_catalog_form_for_update(*args, **kwargs)", "def show_create_form(self):\n # if there is no add permission then does not show the form\n if not self.has_add_permissions(): return\n\n params = {\n 'title':'Create',\n 'model':self.model,\n 'parent_model':self.parent_model,\n 'parent_pk':self.parent_pk,\n 'parent_win': self\n }\n\n if self.INLINES: params.update({'inlines':self.INLINES})\n if self.FIELDSETS: params.update({'fieldsets':self.FIELDSETS})\n if self.READ_ONLY: params.update({'readonly':self.READ_ONLY})\n\n createform = self.addmodel_class(**params)\n\n if hasattr(self, '_details') and self.USE_DETAILS_TO_ADD:\n self._list.hide()\n self._details.show()\n self._details.value = createform\n toolbar = [self.toolbar] if isinstance(self.toolbar, str) else self.toolbar\n if toolbar:\n for o in toolbar:\n if o and hasattr(self, o):\n getattr(self, o).hide()\n else:\n self._list.show()\n if hasattr(self, '_details'):\n self._details.hide()", "def get_form(self, request, obj=None, **kwargs):\n if not obj:\n kwargs['form'] = VPNClientCreationForm\n return super().get_form(request, obj, **kwargs)", "def get_catalog_form_for_update(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.get_bin_form_for_update\n return self._get_provider_session('catalog_admin_session').get_catalog_form_for_update(*args, **kwargs)", "def make_form(self):", "def __init__(self, *args, **kwargs):\n super(EnterpriseCustomerAdminForm, self).__init__(*args, **kwargs)\n\n self.fields['catalog'] = forms.ChoiceField(\n choices=self.get_catalog_options(),\n required=False,\n help_text=\"<a id='catalog-details-link' href='#' target='_blank'\"\n \"data-url-template='{catalog_admin_url}'> View catalog details.</a>\".format(\n catalog_admin_url=utils.get_catalog_admin_url_template(),\n )\n )", "def new(self, *args, **kw):\n\n\t\t\tif len(args) > 0:\n\t\t\t\tkw['id_fase_fk']= args[0] \n\n\t\t\ttmpl_context.widget = self.new_form\n\t\t\tretorno \t\t= dict(value = kw, model = self.model.__name__)\n\t\t\tretorno['fid']\t= args[0]\n\n\t\t\treturn retorno", "def get_form_class(self):\n if self.survey.get_requires_payment():\n return AuthorizenetSurveyPurchaseForm\n return super(AuthorizenetSurveyPurchaseCreate, self).get_form_class()", "def _form_for_type(request, C, defn, add_id_and_rev=False):\n form = build(defn, C, add_id_and_rev=add_id_and_rev,\n widget_registry=_widget_registry(request))\n form.renderer = request.environ['restish.templating'].renderer\n return form", "def prepare(self, form):\n \n return form", "def get_context_data(self, **kwargs):\n if 'form' not in kwargs:\n kwargs['form'] = self.get_form(self.form_class)\n return super(OrganizerDataSetCreate, self).get_context_data(**kwargs)", "def get_form(self):\n return self.form", "def get_form(self):\n return self.form", "def get_form(self):\n form = super(AjaxCreateView, self).get_form()\n\n if form.initial.get('part', None):\n # Hide the part field\n form.fields['part'].widget = HiddenInput()\n\n return form", "def _get_form_creator(self) -> MapsFormHandler:\n return self._form_creator(columns=self._working_context_service.get_file_columns(),\n shapes=self._document_service.get_supported_shapes(),\n shape_keys=self._document_service.get_supported_shape_keys(),\n colour_palette=self._document_service.get_supported_colours(),\n tiles=self._document_service.get_supported_tiles())", "def render_creation_form(request: Request):\n return templates.TemplateResponse(\"creation_form.html\",{'request': request})", "def get_form(self, request, obj=None, **kwargs):\n if not obj:\n kwargs['form'] = VPNCreationForm\n else:\n kwargs['form'] = VPNUpdateForm\n return super().get_form(request, obj, **kwargs)", "def get_form(self, kind, ins = None):\n from aha.widget.form import Form\n from aha.widget.field import TextField, RichText\n from formencode import validators as v\n\n class AddForm(Form):\n multipart = True\n form_title = u'Add New Category'\n button_title = u'Add'\n submit = u'Save'\n name = TextField(title = u'ID', args = {'size':40},\n validator = v.String(), required = True)\n title = TextField(title = u'Title', args = {'size':40},\n validator = v.String(), required = True)\n description = RichText(title = u'Description', args = dict(rows = 4),\n required = False, collapsable = True)\n \n class EditForm(AddForm):\n form_title = u'Edit Category'\n \n del EditForm['name']\n\n if kind == 'add':\n return AddForm()\n elif kind == 'edit':\n return EditForm()", "def get_book_form_for_create(self, book_record_types):\n # Implemented from template for\n # osid.resource.BinAdminSession.get_bin_form_for_create_template\n if self._catalog_session is not None:\n return self._catalog_session.get_catalog_form_for_create(catalog_record_types=book_record_types)\n for arg in book_record_types:\n if not isinstance(arg, ABCType):\n raise errors.InvalidArgument('one or more argument array elements is not a valid OSID Type')\n if book_record_types == []:\n result = objects.BookForm(\n runtime=self._runtime,\n effective_agent_id=self.get_effective_agent_id(),\n proxy=self._proxy) # Probably don't need effective agent id now that we have proxy in form.\n else:\n result = objects.BookForm(\n record_types=book_record_types,\n runtime=self._runtime,\n effective_agent_id=self.get_effective_agent_id(),\n proxy=self._proxy) # Probably don't need effective agent id now that we have proxy in form.\n self._forms[result.get_id().get_identifier()] = not CREATED\n return result", "def get_form(self):\n form = super(StickerCreate, self).get_form()\n\n form.fields.pop('label')\n\n if self.kwargs.get('sprint_number'):\n board = Board.objects.get(\n desk__owner__user=self.user,\n sequence=self.kwargs['board_sequence']\n )\n form.initial = {\n 'sprint': Sprint.objects.get(\n number=self.kwargs['sprint_number'], board=board\n )\n }\n form.fields['sprint'].widget = HiddenInput()\n else:\n form.fields['sprint'].empty_label = 'Backlog'\n\n return form", "def initcomponentform():\n form = ComponentsForm()\n form.name.value = []\n form.id.value = []\n form.currentstock.value = []\n form.reorderlevel.value = []\n form.unitprice.value = []\n form.supplier.value = []\n form.location.value = []\n form.datasheet.value = []\n return form", "def open_create_partner(self, cr, uid, ids, context=None):\n view_obj = self.pool.get('ir.ui.view')\n view_id = view_obj.search(cr, uid, [('model', '=', self._name), \\\n ('name', '=', self._name+'.view')])\n return {\n 'view_mode': 'form',\n 'view_type': 'form',\n 'view_id': view_id or False,\n 'res_model': self._name,\n 'context': context,\n 'type': 'ir.actions.act_window',\n 'target': 'new',\n }", "def _form(self, r, widget, **attr):\n\n widget_get = widget.get\n\n label = widget_get(\"label\", \"\")\n # Activate if-required\n #if label and isinstance(label, str):\n if label:\n label = current.T(label)\n icon = widget_get(\"icon\", \"\")\n if icon:\n icon = ICON(icon)\n\n context = widget_get(\"context\", None)\n tablename = widget_get(\"tablename\", None)\n resource, context = self._resolve_context(r, tablename, context)\n\n # Widget filter option\n widget_filter = widget_get(\"filter\", None)\n if widget_filter:\n resource.add_filter(widget_filter)\n\n record = resource.select([\"id\"], limit=1, as_rows=True).first()\n if record:\n record_id = record.id\n else:\n record_id = None\n\n if record_id:\n readonly = not current.auth.s3_has_permission(\"update\", tablename, record_id)\n else:\n readonly = not current.auth.s3_has_permission(\"create\", tablename)\n\n sqlform = widget.get(\"sqlform\", None)\n if not sqlform:\n sqlform = resource.get_config(\"crud_form\")\n if not sqlform:\n from ..ui import S3SQLDefaultForm\n sqlform = S3SQLDefaultForm()\n\n get_config = current.s3db.get_config\n if record_id:\n # Update form\n onvalidation = get_config(tablename, \"create_onvalidation\") or \\\n get_config(tablename, \"onvalidation\")\n onaccept = get_config(tablename, \"create_onaccept\") or \\\n get_config(tablename, \"onaccept\")\n else:\n # Create form\n onvalidation = get_config(tablename, \"create_onvalidation\") or \\\n get_config(tablename, \"onvalidation\")\n onaccept = get_config(tablename, \"create_onaccept\") or \\\n get_config(tablename, \"onaccept\")\n\n form = sqlform(request = r,\n resource = resource,\n record_id = record_id,\n readonly = readonly,\n format = \"html\",\n onvalidation = onvalidation,\n onaccept = onaccept,\n )\n _class = self._lookup_class(r, widget)\n\n # Render the widget\n output = DIV(H4(icon,\n label,\n _class = \"profile-sub-header\",\n ),\n DIV(form,\n _class = \"form-container thumbnail\",\n ),\n _class = _class,\n )\n\n return output", "def name(self) -> Text:\n\n return \"new_product_application_form\"", "def get_form(self):\n # setup request layer\n self.request = TestRequest()\n # get add view\n form = getMultiAdapter((self.experiments, self.request),\n name=\"newProjection\")\n # update the form once to initialise all widgets\n form.update()\n # go through all widgets on the form and update the request with default values\n data = {}\n for widget in form.widgets.values():\n data[widget.name] = widget.value\n data.update({\n 'form.widgets.IDublinCore.title': u\"My CC Experiment\",\n 'form.widgets.IDublinCore.description': u'This is my experiment description',\n 'form.widgets.species_distribution_models': unicode(self.sdmexp.UID()),\n 'form.widgets.species_distribution_models.model': [unicode(self.sdmmodel.UID())],\n 'form.widgets.future_climate_datasets': [unicode(self.future.UID())]\n })\n self.request.form.update(data)\n form = getMultiAdapter((self.experiments, self.request),\n name=\"newProjection\")\n return form", "def get_form(self, request, obj=None, **kwargs):\n defaults = {}\n if obj is None:\n defaults['form'] = self.add_form\n defaults.update(kwargs)\n return super(EventAdmin, self).get_form(request, obj, **defaults)", "def get_context_data(self, **kwargs):\n context = super(CRUDCreateView, self).get_context_data(**kwargs)\n\n context.update({\n 'model_verbose_name': self.form_class._meta.model._meta.verbose_name,\n 'model_verbose_name_plural': self.form_class._meta.model._meta.verbose_name_plural,\n })\n\n #try:\n context['fields'] = utils.get_fields(self.form_class._meta.model)\n # except AttributeError:\n # context['fields'] = utils.get_fields(self.form_class._meta.model)\n\n if hasattr(self, 'object') and self.object:\n for action in utils.INSTANCE_ACTIONS:\n try:\n url = reverse(\n utils.crud_url_name(self.form_class._meta.model, action),\n kwargs={'pk': self.form_class._meta.object.pk})\n except NoReverseMatch:\n url = None\n context['url_%s' % action] = url\n\n for action in utils.LIST_ACTIONS:\n try:\n url = reverse(\n utils.crud_url_name(self.form_class._meta.model, action)\n )\n except NoReverseMatch:\n url = None\n context['url_%s' % action] = url\n\n return context", "def get_form(self, request, obj=None, **kwargs):\n defaults = {}\n if obj is None:\n defaults['form'] = self.add_form\n defaults.update(kwargs)\n return super(ProfileAdmin, self).get_form(request, obj, **defaults)", "def get_comment_form_for_create(self, reference_id, comment_record_types):\n # Implemented from template for\n # osid.relationship.CommentAdminSession.get_comment_form_for_create_template\n # These really need to be in module imports:\n if not isinstance(reference_id, ABCId):\n raise errors.InvalidArgument('argument is not a valid OSID Id')\n for arg in comment_record_types:\n if not isinstance(arg, ABCType):\n raise errors.InvalidArgument('one or more argument array elements is not a valid OSID Type')\n if comment_record_types == []:\n # WHY are we passing book_id = self._catalog_id below, seems redundant:\n # Probably don't need to send effective_agent_id, since the form can get that from proxy.\n obj_form = objects.CommentForm(\n book_id=self._catalog_id,\n reference_id=reference_id,\n effective_agent_id=str(self.get_effective_agent_id()),\n catalog_id=self._catalog_id,\n runtime=self._runtime,\n proxy=self._proxy)\n else:\n obj_form = objects.CommentForm(\n book_id=self._catalog_id,\n record_types=comment_record_types,\n reference_id=reference_id,\n effective_agent_id=self.get_effective_agent_id(),\n catalog_id=self._catalog_id,\n runtime=self._runtime,\n proxy=self._proxy)\n obj_form._for_update = False\n self._forms[obj_form.get_id().get_identifier()] = not CREATED\n return obj_form", "def create_form(self, resource=None, edit_form=False):\n form = UserForm(self.config_models, obj=resource)\n\n form.totp_enabled = self.totp_enabled\n\n session = self.session()\n self.update_form_collection(\n resource, edit_form, form.groups, self.Group, 'sorted_groups',\n 'id', 'name', session\n )\n self.update_form_collection(\n resource, edit_form, form.roles, self.Role, 'sorted_roles', 'id',\n 'name', session\n )\n session.close()\n\n return form" ]
[ "0.7591304", "0.69972867", "0.6811873", "0.67891073", "0.65012026", "0.64878917", "0.6272795", "0.62517387", "0.6236303", "0.623171", "0.62170404", "0.6156007", "0.6156007", "0.61468923", "0.6145996", "0.614019", "0.6120052", "0.6082279", "0.6065062", "0.6064923", "0.6049344", "0.5975869", "0.5943596", "0.58918434", "0.5876974", "0.5872616", "0.58597726", "0.5848829", "0.58089024", "0.5805658" ]
0.8792004
0
Pass through to provider CatalogAdminSession.create_catalog
def create_catalog(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinAdminSession.create_bin return Catalog( self._provider_manager, self._get_provider_session('catalog_admin_session').create_catalog(*args, **kwargs), self._runtime, self._proxy)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def catalog_create(self, args):\n try:\n if args.id and self.server.connect_ermrest(args.id).exists():\n print(\"Catalog already exists\")\n return\n owner = args.owner if args.owner else None\n catalog = self.server.create_ermrest_catalog(args.id, owner)\n if args.auto_configure:\n model = catalog.getCatalogModel()\n model.configure_baseline_catalog(**args.configure_args)\n if not args.quiet:\n print(\"Created new catalog %s with the following default configuration:\\n\" % catalog.catalog_id)\n pp(catalog.get('/').json())\n except HTTPError as e:\n if e.response.status_code == requests.codes.not_found:\n raise ResourceException('Catalog not found', e)\n elif e.response.status_code == requests.codes.conflict:\n raise ResourceException(\"Catalog already exists\", e)\n else:\n raise e", "def initCatalog():\n catalog = model.newCatalog()\n return catalog", "def initCatalog():\n catalog = model.newCatalog()\n return catalog", "def initCatalog():\n catalog = model.newCatalog()\n return catalog", "def init_catalog():\n return controller.init_catalog()", "def initCatalog():\n return controller.initCatalog()", "def initCatalog():\n return controller.initCatalog()", "def initCatalog():\n return controller.initCatalog()", "def initCatalog():\n return controller.initCatalog()", "def initCatalog():\n return controller.initCatalog()", "def __init__(self):\n super(CatalogProxy, self).new_instance(\"catalog\", Catalog)", "def save_catalog(self, catalog_form, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.update_bin\n if catalog_form.is_for_update():\n return self.update_catalog(catalog_form, *args, **kwargs)\n else:\n return self.create_catalog(catalog_form, *args, **kwargs)", "def initCatalog(tipolista : str):\n catalog = model.newCatalog(tipolista)\n return catalog", "def initCatalog(list_type):\n catalog = model.newCatalog(list_type)\n return catalog", "def initCatalog(tipo):\n catalog = model.newCatalog(tipo)\n \n return catalog", "def init():\n catalog = model.newCatalog()\n return catalog", "def can_create_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_create_bins\n return self._get_provider_session('catalog_admin_session').can_create_catalogs()", "def get_catalog_form_for_create(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.get_bin_form_for_create\n return self._get_provider_session('catalog_admin_session').get_catalog_form_for_create(*args, **kwargs)", "def initCatalog():\n t = \"SINGLE_LINKED\"\n catalog = model.newCatalog(t)\n return catalog", "def initCatalog(tad_list_type):\n catalog = model.newCatalog(tad_list_type)\n return catalog", "def test_create_hyperflex_app_catalog(self):\n pass", "def create_catalog_v0(self, create_catalog_request, **kwargs):\n # type: (CreateCatalogRequest_f3cdf8bb, **Any) -> Union[ApiResponse, object, BadRequestError_a8ac8b44, CatalogDetails_912693fa, Error_d660d58]\n operation_name = \"create_catalog_v0\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'create_catalog_request' is set\n if ('create_catalog_request' not in params) or (params['create_catalog_request'] is None):\n raise ValueError(\n \"Missing the required parameter `create_catalog_request` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v0/catalogs'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n if 'create_catalog_request' in params:\n body_params = params['create_catalog_request']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v0.catalog.catalog_details.CatalogDetails\", status_code=201, message=\"Catalog created.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v0.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v0.error.Error\", status_code=401, message=\"The auth token is invalid/expired or doesn&#39;t have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v0.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v0.error.Error\", status_code=404, message=\"The resource being requested is not found.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v0.error.Error\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v0.error.Error\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v0.error.Error\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=\"ask_smapi_model.v0.catalog.catalog_details.CatalogDetails\")\n\n if full_response:\n return api_response\n return api_response.body", "def created(event):\n root = event.object\n registry = event.registry\n root.sdi_title = 'Simple Book Catalog'\n service = root['catalogs']\n service.add_catalog('books', update_indexes=True)\n books = registry.content.create('BookFolder', name='books', title='Books')\n root['books'] = books", "def initCatalogA():\n return controller.initCatalogA()", "def initCatalogA():\n return controller.initCatalogA()", "def POST(self, uri='catalog'):\n # content negotiation\n content_type = negotiated_content_type(self.supported_types, self.default_content_type)\n\n # registry acl enforcement\n allowed = web.ctx.ermrest_registry.can_create(web.ctx.webauthn2_context.attributes)\n if not allowed:\n raise rest.Forbidden(uri)\n\n # optional input\n docstr = web.ctx.env['wsgi.input'].read().decode().strip()\n if docstr:\n try:\n doc = json.loads(docstr)\n except:\n raise exception.rest.BadRequest('Could not deserialize JSON input.')\n else:\n doc = {}\n\n owner = doc.get('owner')\n annotations = doc.get('annotations')\n\n # create the catalog instance\n catalog_id = web.ctx.ermrest_registry.claim_id(id=doc.get('id'), id_owner=owner)\n catalog = web.ctx.ermrest_catalog_factory.create(catalog_id)\n\n # initialize the catalog instance\n pc = sanepg2.PooledConnection(catalog.dsn)\n try:\n next(pc.perform(lambda conn, cur: catalog.init_meta(conn, cur, owner=owner, annotations=annotations)))\n finally:\n pc.final()\n\n # register the catalog descriptor\n entry = web.ctx.ermrest_registry.register(catalog_id, descriptor=catalog.descriptor)\n\n web.header('Content-Type', content_type)\n web.ctx.ermrest_request_content_type = content_type\n\n # set location header and status\n location = '/ermrest/catalog/%s' % catalog_id\n web.header('Location', location)\n web.ctx.status = '201 Created'\n\n if content_type == _text_plain:\n return str(catalog_id)\n else:\n assert content_type == _application_json\n return json.dumps(dict(id=catalog_id))", "def register_catalog(catalog_name, catalog_config):\n _registered_catalogs[catalog_name] = catalog_config", "def CreateFromCatalog(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def _set_catalog(self, catalog: cat.Catalog) -> None:\n self._catalog_interface = CatalogInterface(catalog)\n self._catalog = catalog", "def catalog_alias_create(self, args):\n try:\n if args.id:\n alias = self.server.connect_ermrest_alias(args.id)\n try:\n if alias.retrieve():\n print(\"Catalog alias already exists\")\n return\n except requests.HTTPError as e:\n if e.response.status_code == 404:\n pass\n else:\n raise\n owner = args.owner if args.owner else None\n alias = self.server.create_ermrest_alias(args.id, owner, args.alias_target)\n if not args.quiet:\n print(\"Created new catalog alias %s with the following configuration:\\n\" % alias.alias_id)\n pp(alias.retrieve())\n except HTTPError as e:\n if e.response.status_code == requests.codes.not_found:\n raise ResourceException('Catalog alias not found', e)\n elif e.response.status_code == requests.codes.conflict:\n raise ResourceException(\"Catalog alias already exists\", e)\n else:\n raise" ]
[ "0.745109", "0.6975634", "0.6975634", "0.6975634", "0.6817875", "0.67300034", "0.67300034", "0.67300034", "0.67300034", "0.67300034", "0.6693062", "0.6683488", "0.66725117", "0.666314", "0.66110927", "0.66062343", "0.658615", "0.65681654", "0.6548711", "0.6512952", "0.6437378", "0.63280356", "0.6318785", "0.6274633", "0.6274633", "0.61170036", "0.6096288", "0.60264164", "0.6013394", "0.59912807" ]
0.7582978
0
Pass through to provider CatalogAdminSession.can_update_catalogs
def can_update_catalogs(self): # Implemented from kitosid template for - # osid.resource.BinAdminSession.can_update_bins return self._get_provider_session('catalog_admin_session').can_update_catalogs()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def can_update_books(self):\n # Implemented from template for\n # osid.resource.BinAdminSession.can_update_bins\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_update_catalogs()\n return True", "def can_update_families(self):\n # Implemented from template for\n # osid.resource.BinAdminSession.can_update_bins\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_update_catalogs()\n return True", "def supports_catalog_admin(self):\n return False", "def supports_catalog_admin(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_admin()", "def can_update_comments(self):\n # Implemented from template for\n # osid.resource.ResourceAdminSession.can_update_resources\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n return True", "def can_create_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_create_bins\n return self._get_provider_session('catalog_admin_session').can_create_catalogs()", "def can_modify_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.can_modify_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_design_session').can_modify_catalog_hierarchy()", "def can_delete_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_delete_bins\n return self._get_provider_session('catalog_admin_session').can_delete_catalogs()", "def has_update_permissions(self, obj):\n return True", "def _can_do_updates(self):\n return True", "def can_manage_catalog_aliases(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceAdminSession.can_manage_resource_aliases_template\n return self._get_provider_session('catalog_admin_session').can_manage_catalog_aliases()", "def update_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.update_bin\n # OSID spec does not require returning updated catalog\n return Catalog(\n self._provider_manager,\n self._get_provider_session('catalog_admin_session').update_catalog(*args, **kwargs),\n self._runtime,\n self._proxy)", "def _can_update(self):\r\n if not self._is_persisted: return False\r\n pks = self._primary_keys.keys()\r\n return all([not self._values[k].changed for k in self._primary_keys])", "def can_update(self, dataset, updates_allowed=None):\n need_sources = dataset.sources is not None\n existing = self.get(dataset.id, include_sources=need_sources)\n if not existing:\n raise ValueError('Unknown dataset %s, cannot update – did you intend to add it?' % dataset.id)\n\n if dataset.product.name != existing.product.name:\n raise ValueError('Changing product is not supported. From %s to %s in %s' % (existing.product.name,\n dataset.product.name,\n dataset.id))\n\n # TODO: figure out (un)safe changes from metadata type?\n allowed = {\n # can always add more metadata\n tuple(): changes.allow_extension,\n }\n allowed.update(updates_allowed or {})\n\n doc_changes = get_doc_changes(existing.metadata_doc, jsonify_document(dataset.metadata_doc))\n good_changes, bad_changes = changes.classify_changes(doc_changes, allowed)\n\n return not bad_changes, good_changes, bad_changes", "def can_update_relationships(self):\n # Implemented from template for\n # osid.resource.ResourceAdminSession.can_update_resources\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n return True", "def can_lookup_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.can_lookup_bins_template\n return self._get_provider_session('catalog_lookup_session').can_lookup_catalogs()", "def supports_catalog_assignment(self):\n return False", "def is_update(self):\n return self.action in [\"update\", \"partial_update\"]", "def save_catalog(self, catalog_form, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.update_bin\n if catalog_form.is_for_update():\n return self.update_catalog(catalog_form, *args, **kwargs)\n else:\n return self.create_catalog(catalog_form, *args, **kwargs)", "def get_catalog_form_for_update(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.get_bin_form_for_update\n return self._get_provider_session('catalog_admin_session').get_catalog_form_for_update(*args, **kwargs)", "def can_update(self, user, **data):\n raise Return((True, set([])))", "def should_update(self, instance, **kwargs):\n return True", "def can_search_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinQuerySession.can_search_bins_template\n return self._get_provider_session('catalog_query_session').can_search_catalogs()", "def can_update_order_items(self) -> bool:\n return self.is_created or self.is_pending", "def testUpdateAccessAllowed(self):\n for user in (self.contributor, self.delegate, self.owner, self.root):\n response = self.runPut(user, sequencer=self.hiseq2000.sodar_uuid, data=self.post_data)\n self.response_200(response)\n data = json.loads(response.content.decode(\"utf-8\"))\n self.assertEqual(data[\"vendor_id\"], self.post_data[\"vendor_id\"])", "def _modify_controls(self, catalog: cat.Catalog) -> cat.Catalog:\n logger.debug(f'modify specify catalog {catalog.metadata.title} for profile {self._profile.metadata.title}')\n self._catalog_interface = CatalogInterface(catalog)\n param_dict: Dict[str, prof.SetParameter] = {}\n alters: Optional[List[prof.Alter]] = None\n # find the modify and alters\n # build a param_dict for all the modifys\n if self._profile.modify is not None:\n if self._profile.modify.set_parameters is not None:\n param_list = self._profile.modify.set_parameters\n for param in param_list:\n param_dict[param.param_id] = param\n alters = self._profile.modify.alters\n\n if alters is not None:\n for alter in alters:\n if alter.control_id is None:\n raise TrestleError('Alters must have control id specified.')\n if alter.removes is not None:\n raise TrestleError('Alters not supported for removes.')\n if alter.adds is None:\n raise TrestleError('Alter has no adds to perform.')\n for add in alter.adds:\n if add.position is not None and add.position.name is not None and add.position.name != 'after':\n raise TrestleError('Alter position must be \"after\" or None.')\n control = self._catalog_interface.get_control(alter.control_id)\n if add.by_id is not None:\n self._add_to_parts(control, add.by_id, add.parts)\n self._catalog_interface.replace_control(control)\n continue\n if add.props is not None:\n if add.by_id is not None:\n TrestleError('Alter cannot add props by id.')\n if not control.props:\n control.props = []\n control.props.extend(add.props)\n continue\n TrestleError('Alter must either add parts or props')\n\n # use the param_dict to apply all modifys\n control_ids = self._catalog_interface.get_control_ids()\n for control_id in control_ids:\n control = self._catalog_interface.get_control(control_id)\n if control.parts is not None:\n for part in control.parts:\n self._replace_part_prose(control, part, param_dict)\n self._catalog_interface.replace_control(control)\n\n catalog = self._catalog_interface._catalog\n\n # update the original profile metadata with new contents\n # roles and responsible-parties will be pulled in with new uuid's\n new_metadata = self._profile.metadata\n new_metadata.title = f'{catalog.metadata.title}: Resolved by profile {self._profile.metadata.title}'\n links: List[common.Link] = []\n for import_ in self._profile.imports:\n links.append(common.Link(**{'href': import_.href, 'rel': 'resolution-source'}))\n new_metadata.links = links\n # move catalog controls from dummy group 'catalog' into the catalog\n if catalog.groups:\n for group in catalog.groups:\n if group.id == 'catalog':\n catalog.controls = group.controls\n catalog.groups = [group for group in catalog.groups if group.id != 'catalog']\n break\n\n catalog.metadata = new_metadata\n\n return catalog", "def supports_catalog(self):\n return False", "def test_update_hyperflex_app_catalog(self):\n pass", "def can_edit(self):\n return self._can_edit", "def can_modify_book_hierarchy(self):\n # Implemented from template for\n # osid.resource.BinHierarchyDesignSession.can_modify_bin_hierarchy_template\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_modify_catalog_hierarchy()\n return True" ]
[ "0.76364976", "0.6954973", "0.67726964", "0.67226964", "0.6306999", "0.6272507", "0.62427044", "0.6209875", "0.6097616", "0.6068694", "0.6023836", "0.5938341", "0.5894968", "0.58353955", "0.5790709", "0.57722265", "0.5749628", "0.5745617", "0.57250977", "0.56373686", "0.5565934", "0.5541757", "0.5534115", "0.55307984", "0.55111307", "0.55099857", "0.5382307", "0.5374438", "0.5360702", "0.53552014" ]
0.882789
0
Pass through to provider CatalogAdminSession.get_catalog_form_for_update
def get_catalog_form_for_update(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinAdminSession.get_bin_form_for_update return self._get_provider_session('catalog_admin_session').get_catalog_form_for_update(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_catalog_form(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.get_bin_form_for_update_template\n # This method might be a bit sketchy. Time will tell.\n if isinstance(args[-1], list) or 'catalog_record_types' in kwargs:\n return self.get_catalog_form_for_create(*args, **kwargs)\n else:\n return self.get_catalog_form_for_update(*args, **kwargs)", "def getEditForm( self ):\n return \"listc_edit\"", "def get_catalog_form_for_create(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.get_bin_form_for_create\n return self._get_provider_session('catalog_admin_session').get_catalog_form_for_create(*args, **kwargs)", "def render_update_form(request: Request, provider_id:UUID = Form(...)):\n provider_data = open_for_reading()\n if str(provider_id) in provider_data.keys():\n data_to_update = provider_data[str(provider_id)]\n\n return templates.TemplateResponse(\"updation_form.html\", {\n \"request\": request,\n \"provider_id\": provider_id,\n \"provider_data\": data_to_update})", "def get_context_data(self, **kwargs):\n if 'form' not in kwargs:\n kwargs['form'] = self.get_form(self.form_class)\n return super(OrganizerDataSetUpdate, self).get_context_data(**kwargs)", "def edit(self, *args, **kw):\n\t\t\ttmpl_context.widget = self.edit_form\n\t\t\tpks \t\t= self.provider.get_primary_fields(self.model)\n\t\t\tkw \t\t\t= {}\n\n\t\t\tfor i, pk in enumerate(pks):\n\t\t\t\tkw[pk] \t\t= args[i]\n\n\t\t\tvalue \t\t= self.edit_filler.get_value(kw)\n\t\t\tvalue['_method'] \t= 'PUT'\n\n\t\t\treturn dict(value = value, model = self.model.__name__, pk_count = len(pks))", "def edit(self, *args, **kw):\n tmpl_context.widget = self.edit_form\n #pks = self.provider.get_primary_fields(self.model)\n \n log.debug(\"soyRomperLB= %s\" %kw)\n\n ###########################################\n pks = self.provider.get_primary_fields(self.model)\n \n ###########################################\n kw = {}\n for i, pk in enumerate(pks):\n kw[pk] = args[i]\n value = self.edit_filler.get_value(kw)\n value['_method'] = 'PUT'\n return dict(value=value, model=self.model.__name__, pk_count=len(pks))", "def get_book_form_for_update(self, book_id):\n # Implemented from template for\n # osid.resource.BinAdminSession.get_bin_form_for_update_template\n if self._catalog_session is not None:\n return self._catalog_session.get_catalog_form_for_update(catalog_id=book_id)\n collection = JSONClientValidated('commenting',\n collection='Book',\n runtime=self._runtime)\n if not isinstance(book_id, ABCId):\n raise errors.InvalidArgument('the argument is not a valid OSID Id')\n result = collection.find_one({'_id': ObjectId(book_id.get_identifier())})\n\n cat_form = objects.BookForm(osid_object_map=result, runtime=self._runtime, proxy=self._proxy)\n self._forms[cat_form.get_id().get_identifier()] = not UPDATED\n\n return cat_form", "def get_form(self, *args, **kwargs):\n form = super(ExpenseUpdateView, self).get_form(*args, **kwargs)\n # Only include Accounts in the Account dropdown that are associated with the current user\n form.fields['account'].queryset = Account.objects.filter(owner=self.request.user)\n return form", "def edit_form():\n return template (\"edit\")", "def get_form(self):\n return self.form", "def get_form(self):\n return self.form", "def save_form(self, request, form, change):\n OwnableAdmin.save_form(self, request, form, change)\n return DisplayableAdmin.save_form(self, request, form, change)", "def save_form(self, request, form, change):\n OwnableAdmin.save_form(self, request, form, change)\n return DisplayableAdmin.save_form(self, request, form, change)", "def save_form(self, request, form, change):\n OwnableAdmin.save_form(self, request, form, change)\n return DisplayableAdmin.save_form(self, request, form, change)", "def edit(self, *args, **kw):\n tmpl_context.widget = self.edit_form\n pks = self.provider.get_primary_fields(self.model)\n kw = {}\n for i, pk in enumerate(pks):\n kw[pk] = args[i]\n\n value = self.edit_filler.get_value(kw)\n value['_method'] = 'PUT'\n \n return dict(value=value, model=self.model.__name__, pk_count=len(pks))", "def get_edit_form(self, data):\n self.add_success(data)\n rv = self.get((data[self.id_field], self.edit_url))\n assert not is_404(rv)\n assert in_response(rv, 'Edit {}'.format(data[self.name_field]))\n for field, name in self.fields:\n assert in_response(rv, name)\n return rv", "def show_edit_form(self, obj_pk=None):\n obj = self.model.objects.get(pk=obj_pk)\n # if there is no edit permission then does not show the form\n if not self.has_view_permissions(obj): return\n\n\n # create the edit form a add it to the empty widget details\n # override the function hide_form to make sure the list is shown after the user close the edition form\n params = {\n 'title':'Edit',\n 'model':self.model,\n 'pk':obj.pk,\n 'parent_model':self.parent_model,\n 'parent_pk':self.parent_pk,\n 'parent_win': self\n }\n\n if self.INLINES: params.update({'inlines': self.INLINES} )\n if self.FIELDSETS: params.update({'fieldsets':self.FIELDSETS})\n if self.READ_ONLY: params.update({'readonly': self.READ_ONLY})\n\n editmodel_class = self.get_editmodel_class(obj)\n editform = editmodel_class(**params)\n\n if hasattr(self, '_details') and self.USE_DETAILS_TO_EDIT:\n self._details.value = editform\n self._list.hide()\n self._details.show()\n\n # only if the button exists:\n toolbar = [self.toolbar] if isinstance(self.toolbar, str) else self.toolbar\n if toolbar:\n for o in toolbar:\n if o and hasattr(self, o): getattr(self, o).hide()\n\n else:\n self._list.show()\n if hasattr(self, '_details'):\n self._details.hide()", "def _form(self, r, widget, **attr):\n\n widget_get = widget.get\n\n label = widget_get(\"label\", \"\")\n # Activate if-required\n #if label and isinstance(label, str):\n if label:\n label = current.T(label)\n icon = widget_get(\"icon\", \"\")\n if icon:\n icon = ICON(icon)\n\n context = widget_get(\"context\", None)\n tablename = widget_get(\"tablename\", None)\n resource, context = self._resolve_context(r, tablename, context)\n\n # Widget filter option\n widget_filter = widget_get(\"filter\", None)\n if widget_filter:\n resource.add_filter(widget_filter)\n\n record = resource.select([\"id\"], limit=1, as_rows=True).first()\n if record:\n record_id = record.id\n else:\n record_id = None\n\n if record_id:\n readonly = not current.auth.s3_has_permission(\"update\", tablename, record_id)\n else:\n readonly = not current.auth.s3_has_permission(\"create\", tablename)\n\n sqlform = widget.get(\"sqlform\", None)\n if not sqlform:\n sqlform = resource.get_config(\"crud_form\")\n if not sqlform:\n from ..ui import S3SQLDefaultForm\n sqlform = S3SQLDefaultForm()\n\n get_config = current.s3db.get_config\n if record_id:\n # Update form\n onvalidation = get_config(tablename, \"create_onvalidation\") or \\\n get_config(tablename, \"onvalidation\")\n onaccept = get_config(tablename, \"create_onaccept\") or \\\n get_config(tablename, \"onaccept\")\n else:\n # Create form\n onvalidation = get_config(tablename, \"create_onvalidation\") or \\\n get_config(tablename, \"onvalidation\")\n onaccept = get_config(tablename, \"create_onaccept\") or \\\n get_config(tablename, \"onaccept\")\n\n form = sqlform(request = r,\n resource = resource,\n record_id = record_id,\n readonly = readonly,\n format = \"html\",\n onvalidation = onvalidation,\n onaccept = onaccept,\n )\n _class = self._lookup_class(r, widget)\n\n # Render the widget\n output = DIV(H4(icon,\n label,\n _class = \"profile-sub-header\",\n ),\n DIV(form,\n _class = \"form-container thumbnail\",\n ),\n _class = _class,\n )\n\n return output", "def form(self):\n\t\treturn self._form", "def forms(self):\n edit = EquipmentChownForm\n return {\n 'edit': edit,\n }", "def _changeform_view(self, request, object_id, form_url, extra_context):\n to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR))\n if to_field and not self.to_field_allowed(request, to_field):\n raise DisallowedModelAdminToField(\"The field %s cannot be referenced.\" % to_field)\n\n model = self.model\n opts = model._meta\n\n if request.method == 'POST' and '_saveasnew' in request.POST:\n object_id = None\n\n add = object_id is None\n\n if add:\n if not self.has_add_permission(request):\n raise PermissionDenied\n obj = None\n\n else:\n obj = self.get_object(request, unquote(object_id), to_field)\n\n if (\n (not self.has_module_permission(request))\n and (not self.has_change_permission(request, obj))):\n raise PermissionDenied\n\n if obj is None:\n return self._get_obj_does_not_exist_redirect(request, opts, object_id)\n\n ModelForm = self.get_form(request, obj)\n if request.method == 'POST':\n form = ModelForm(request.POST, request.FILES, instance=obj)\n if form.is_valid():\n form_validated = True\n new_object = self.save_form(request, form, change=not add)\n else:\n form_validated = False\n new_object = form.instance\n formsets, inline_instances = self._create_formsets(request, new_object, change=not add)\n if all_valid(formsets) and form_validated:\n response = self.do_saving(\n request=request, new_object=new_object, form=form, formsets=formsets, add=add)\n if response:\n return response\n else:\n form_validated = False\n else:\n form_validated = False\n else:\n if add:\n initial = self.get_changeform_initial_data(request)\n form = ModelForm(initial=initial)\n formsets, inline_instances = self._create_formsets(\n request, form.instance, change=False)\n self_formsets, self_inline_instances = self._create_self_formsets(\n request, form.instance, change=False)\n else:\n form = ModelForm(instance=obj)\n formsets, inline_instances = self._create_formsets(request, obj, change=True)\n self_formsets, self_inline_instances = self._create_self_formsets(\n request, obj, change=True)\n\n adminForm = helpers.AdminForm(\n form,\n list(self.get_fieldsets(request, obj)),\n self.get_prepopulated_fields(request, obj),\n self.get_readonly_fields(request, obj),\n model_admin=self)\n media = self.media + adminForm.media\n\n inline_formsets = self.get_inline_formsets(request, formsets, inline_instances, obj)\n for inline_formset in inline_formsets:\n media = media + inline_formset.media\n\n context = dict(\n self.admin_site.each_context(request),\n title=(_('Add %s') if add else _('Change %s')) % force_text(opts.verbose_name),\n adminform=adminForm,\n object_id=object_id,\n original=obj,\n is_popup=(IS_POPUP_VAR in request.POST or\n IS_POPUP_VAR in request.GET),\n to_field=to_field,\n media=media,\n inline_admin_formsets=inline_formsets,\n errors=helpers.AdminErrorList(form, formsets),\n preserved_filters=self.get_preserved_filters(request),\n )\n\n # Hide the \"Save\" and \"Save and continue\" buttons if \"Save as New\" was\n # previously chosen to prevent the interface from getting confusing.\n if (self.readonly_model or (\n request.method == 'POST' and not form_validated and \"_saveasnew\" in request.POST)):\n context['show_save'] = False\n context['show_save_and_continue'] = False\n # Use the change template instead of the add template.\n add = False\n\n context.update(extra_context or {})\n\n return self.render_change_form(\n request, context, add=add, change=not add, obj=obj, form_url=form_url)", "def get_comment_form_for_update(self, comment_id):\n # Implemented from template for\n # osid.resource.ResourceAdminSession.get_resource_form_for_update_template\n collection = JSONClientValidated('commenting',\n collection='Comment',\n runtime=self._runtime)\n if not isinstance(comment_id, ABCId):\n raise errors.InvalidArgument('the argument is not a valid OSID Id')\n if (comment_id.get_identifier_namespace() != 'commenting.Comment' or\n comment_id.get_authority() != self._authority):\n raise errors.InvalidArgument()\n result = collection.find_one({'_id': ObjectId(comment_id.get_identifier())})\n\n obj_form = objects.CommentForm(osid_object_map=result, runtime=self._runtime, proxy=self._proxy)\n self._forms[obj_form.get_id().get_identifier()] = not UPDATED\n\n return obj_form", "def get_change_form_context(self, context):\n form = context['form']\n request = self.request\n model = self.admin.model\n opts = model._meta\n app_label = opts.app_label\n obj = context.get('object', None)\n form_url = self.get_form_url(context)\n\n view_on_site_url = self.admin.get_view_on_site_url(obj)\n fieldsets = self.get_fieldsets()\n formsets, inline_instances = self.admin._create_formsets(\n request, obj, change=not self.hide_inline_formsets\n )\n readonly_fields = self.get_readonly_fields()\n admin_form = admin_helpers.AdminForm(\n form,\n list(fieldsets),\n self.admin.get_prepopulated_fields(request, obj),\n readonly_fields,\n model_admin=self.admin,\n )\n media = self.admin.media + admin_form.media\n\n # The inline formset code is copied from django's code. It has\n # not been used in practice yet and has no tests\n inline_formsets = self.admin.get_inline_formsets(\n request, formsets, inline_instances, obj\n )\n for inline_formset in inline_formsets: # pragma: no cover\n media = media + inline_formset.media\n\n has_editable_inline_admin_formsets = True if inline_formsets else False\n has_file_field = admin_form.form.is_multipart() or any(\n admin_formset.formset.is_multipart()\n for admin_formset in inline_formsets\n )\n\n # The admin admin also sets this variable\n request.current_app = self.admin.admin_site.name\n\n return {\n **self.admin.admin_site.each_context(request),\n 'title': self.display_name,\n 'adminform': admin_form,\n 'original': obj,\n 'is_popup': False,\n 'to_field': None,\n 'media': media,\n 'inline_admin_formsets': inline_formsets,\n 'errors': admin_helpers.AdminErrorList(form, formsets),\n 'preserved_filters': self.admin.get_preserved_filters(request),\n 'add': False,\n 'change': bool(obj),\n 'has_view_permission': self.admin.has_view_permission(\n request, obj\n ),\n 'has_add_permission': self.admin.has_add_permission(request),\n 'has_change_permission': self.admin.has_change_permission(\n request, obj\n ),\n 'has_delete_permission': self.admin.has_delete_permission(\n request, obj\n ),\n 'has_editable_inline_admin_formsets': (\n has_editable_inline_admin_formsets\n ),\n 'has_file_field': has_file_field,\n 'has_absolute_url': view_on_site_url is not None,\n 'absolute_url': view_on_site_url,\n 'form_url': form_url,\n 'opts': opts,\n 'content_type_id': (\n admin_options.get_content_type_for_model(self.admin.model).pk\n ),\n 'save_as': self.save_as,\n 'save_on_top': self.save_on_top,\n 'to_field_var': admin_options.TO_FIELD_VAR,\n 'is_popup_var': admin_options.IS_POPUP_VAR,\n 'app_label': app_label,\n 'hide_object_tools': self.hide_object_tools,\n }", "def save_catalog(self, catalog_form, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.update_bin\n if catalog_form.is_for_update():\n return self.update_catalog(catalog_form, *args, **kwargs)\n else:\n return self.create_catalog(catalog_form, *args, **kwargs)", "def get_family_form_for_update(self, family_id):\n # Implemented from template for\n # osid.resource.BinAdminSession.get_bin_form_for_update_template\n if self._catalog_session is not None:\n return self._catalog_session.get_catalog_form_for_update(catalog_id=family_id)\n collection = JSONClientValidated('relationship',\n collection='Family',\n runtime=self._runtime)\n if not isinstance(family_id, ABCId):\n raise errors.InvalidArgument('the argument is not a valid OSID Id')\n result = collection.find_one({'_id': ObjectId(family_id.get_identifier())})\n\n cat_form = objects.FamilyForm(osid_object_map=result, runtime=self._runtime, proxy=self._proxy)\n self._forms[cat_form.get_id().get_identifier()] = not UPDATED\n\n return cat_form", "def __init__(self, *args, **kwargs):\n super(EnterpriseCustomerAdminForm, self).__init__(*args, **kwargs)\n\n self.fields['catalog'] = forms.ChoiceField(\n choices=self.get_catalog_options(),\n required=False,\n help_text=\"<a id='catalog-details-link' href='#' target='_blank'\"\n \"data-url-template='{catalog_admin_url}'> View catalog details.</a>\".format(\n catalog_admin_url=utils.get_catalog_admin_url_template(),\n )\n )", "def edit_form(db_id):\r\n\r\n if request.args['collection'] == 'recipe':\r\n # initializes page title and header\r\n page_title = 'Update recipe'\r\n page_header = 'Update a recipe:'\r\n\r\n # returns the edit recipe template\r\n return render_template(\r\n 'edit_form.html',\r\n collection=mongo.db.recipe_categories.find().sort('name'),\r\n recipe=mongo.db.recipes.find_one({'_id': ObjectId(db_id)}),\r\n categories=mongo.db.appliance_categories.find().sort('name'),\r\n page_title=page_title,\r\n page_header=page_header\r\n )\r\n\r\n elif request.args['collection'] == 'recipe_category':\r\n # initializes page title and header\r\n page_title = 'Update recipe category'\r\n page_header = 'Update a recipe category:'\r\n\r\n # returns the edit recipe category template\r\n return render_template(\r\n 'edit_form.html',\r\n recipe_category=mongo.db.recipe_categories.find_one(\r\n {'_id': ObjectId(db_id)}),\r\n page_title=page_title,\r\n page_header=page_header\r\n )\r\n\r\n else:\r\n # returns an error message on incorrect argument\r\n return render_template(\r\n 'error.html',\r\n msg='Bad argument error! (/edit_form)'\r\n )", "def get_context_data(self, **kwargs):\n\n context = super(AIUpdateView, self).get_context_data(**kwargs)\n context['import_form'] = ImportAIForm\n return context", "def form(self):\n if getattr(self, '_form', None) is not None:\n return self._form\n\n add_view = queryMultiAdapter((self.context, self.request, self.fti),\n name=self.fti.factory)\n if add_view is None:\n add_view = queryMultiAdapter((self.context, self.request,\n self.fti))\n\n self._form = self._wrap_form(add_view.form)\n\n return self._form" ]
[ "0.7162163", "0.6573394", "0.63171417", "0.6288806", "0.61876136", "0.60886014", "0.60630125", "0.6054842", "0.5957297", "0.5948865", "0.59461755", "0.59461755", "0.58364516", "0.58364516", "0.58364516", "0.5799192", "0.57846195", "0.57535666", "0.57506627", "0.57150495", "0.56711674", "0.5645015", "0.56422806", "0.5635486", "0.55851066", "0.553202", "0.5459378", "0.54555976", "0.5425326", "0.5424775" ]
0.8787373
0
Pass through to provider CatalogAdminSession.get_catalog_form_for_update
def get_catalog_form(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinAdminSession.get_bin_form_for_update_template # This method might be a bit sketchy. Time will tell. if isinstance(args[-1], list) or 'catalog_record_types' in kwargs: return self.get_catalog_form_for_create(*args, **kwargs) else: return self.get_catalog_form_for_update(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_catalog_form_for_update(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.get_bin_form_for_update\n return self._get_provider_session('catalog_admin_session').get_catalog_form_for_update(*args, **kwargs)", "def getEditForm( self ):\n return \"listc_edit\"", "def get_catalog_form_for_create(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.get_bin_form_for_create\n return self._get_provider_session('catalog_admin_session').get_catalog_form_for_create(*args, **kwargs)", "def render_update_form(request: Request, provider_id:UUID = Form(...)):\n provider_data = open_for_reading()\n if str(provider_id) in provider_data.keys():\n data_to_update = provider_data[str(provider_id)]\n\n return templates.TemplateResponse(\"updation_form.html\", {\n \"request\": request,\n \"provider_id\": provider_id,\n \"provider_data\": data_to_update})", "def get_context_data(self, **kwargs):\n if 'form' not in kwargs:\n kwargs['form'] = self.get_form(self.form_class)\n return super(OrganizerDataSetUpdate, self).get_context_data(**kwargs)", "def edit(self, *args, **kw):\n\t\t\ttmpl_context.widget = self.edit_form\n\t\t\tpks \t\t= self.provider.get_primary_fields(self.model)\n\t\t\tkw \t\t\t= {}\n\n\t\t\tfor i, pk in enumerate(pks):\n\t\t\t\tkw[pk] \t\t= args[i]\n\n\t\t\tvalue \t\t= self.edit_filler.get_value(kw)\n\t\t\tvalue['_method'] \t= 'PUT'\n\n\t\t\treturn dict(value = value, model = self.model.__name__, pk_count = len(pks))", "def edit(self, *args, **kw):\n tmpl_context.widget = self.edit_form\n #pks = self.provider.get_primary_fields(self.model)\n \n log.debug(\"soyRomperLB= %s\" %kw)\n\n ###########################################\n pks = self.provider.get_primary_fields(self.model)\n \n ###########################################\n kw = {}\n for i, pk in enumerate(pks):\n kw[pk] = args[i]\n value = self.edit_filler.get_value(kw)\n value['_method'] = 'PUT'\n return dict(value=value, model=self.model.__name__, pk_count=len(pks))", "def get_book_form_for_update(self, book_id):\n # Implemented from template for\n # osid.resource.BinAdminSession.get_bin_form_for_update_template\n if self._catalog_session is not None:\n return self._catalog_session.get_catalog_form_for_update(catalog_id=book_id)\n collection = JSONClientValidated('commenting',\n collection='Book',\n runtime=self._runtime)\n if not isinstance(book_id, ABCId):\n raise errors.InvalidArgument('the argument is not a valid OSID Id')\n result = collection.find_one({'_id': ObjectId(book_id.get_identifier())})\n\n cat_form = objects.BookForm(osid_object_map=result, runtime=self._runtime, proxy=self._proxy)\n self._forms[cat_form.get_id().get_identifier()] = not UPDATED\n\n return cat_form", "def get_form(self, *args, **kwargs):\n form = super(ExpenseUpdateView, self).get_form(*args, **kwargs)\n # Only include Accounts in the Account dropdown that are associated with the current user\n form.fields['account'].queryset = Account.objects.filter(owner=self.request.user)\n return form", "def edit_form():\n return template (\"edit\")", "def get_form(self):\n return self.form", "def get_form(self):\n return self.form", "def save_form(self, request, form, change):\n OwnableAdmin.save_form(self, request, form, change)\n return DisplayableAdmin.save_form(self, request, form, change)", "def save_form(self, request, form, change):\n OwnableAdmin.save_form(self, request, form, change)\n return DisplayableAdmin.save_form(self, request, form, change)", "def save_form(self, request, form, change):\n OwnableAdmin.save_form(self, request, form, change)\n return DisplayableAdmin.save_form(self, request, form, change)", "def edit(self, *args, **kw):\n tmpl_context.widget = self.edit_form\n pks = self.provider.get_primary_fields(self.model)\n kw = {}\n for i, pk in enumerate(pks):\n kw[pk] = args[i]\n\n value = self.edit_filler.get_value(kw)\n value['_method'] = 'PUT'\n \n return dict(value=value, model=self.model.__name__, pk_count=len(pks))", "def get_edit_form(self, data):\n self.add_success(data)\n rv = self.get((data[self.id_field], self.edit_url))\n assert not is_404(rv)\n assert in_response(rv, 'Edit {}'.format(data[self.name_field]))\n for field, name in self.fields:\n assert in_response(rv, name)\n return rv", "def show_edit_form(self, obj_pk=None):\n obj = self.model.objects.get(pk=obj_pk)\n # if there is no edit permission then does not show the form\n if not self.has_view_permissions(obj): return\n\n\n # create the edit form a add it to the empty widget details\n # override the function hide_form to make sure the list is shown after the user close the edition form\n params = {\n 'title':'Edit',\n 'model':self.model,\n 'pk':obj.pk,\n 'parent_model':self.parent_model,\n 'parent_pk':self.parent_pk,\n 'parent_win': self\n }\n\n if self.INLINES: params.update({'inlines': self.INLINES} )\n if self.FIELDSETS: params.update({'fieldsets':self.FIELDSETS})\n if self.READ_ONLY: params.update({'readonly': self.READ_ONLY})\n\n editmodel_class = self.get_editmodel_class(obj)\n editform = editmodel_class(**params)\n\n if hasattr(self, '_details') and self.USE_DETAILS_TO_EDIT:\n self._details.value = editform\n self._list.hide()\n self._details.show()\n\n # only if the button exists:\n toolbar = [self.toolbar] if isinstance(self.toolbar, str) else self.toolbar\n if toolbar:\n for o in toolbar:\n if o and hasattr(self, o): getattr(self, o).hide()\n\n else:\n self._list.show()\n if hasattr(self, '_details'):\n self._details.hide()", "def _form(self, r, widget, **attr):\n\n widget_get = widget.get\n\n label = widget_get(\"label\", \"\")\n # Activate if-required\n #if label and isinstance(label, str):\n if label:\n label = current.T(label)\n icon = widget_get(\"icon\", \"\")\n if icon:\n icon = ICON(icon)\n\n context = widget_get(\"context\", None)\n tablename = widget_get(\"tablename\", None)\n resource, context = self._resolve_context(r, tablename, context)\n\n # Widget filter option\n widget_filter = widget_get(\"filter\", None)\n if widget_filter:\n resource.add_filter(widget_filter)\n\n record = resource.select([\"id\"], limit=1, as_rows=True).first()\n if record:\n record_id = record.id\n else:\n record_id = None\n\n if record_id:\n readonly = not current.auth.s3_has_permission(\"update\", tablename, record_id)\n else:\n readonly = not current.auth.s3_has_permission(\"create\", tablename)\n\n sqlform = widget.get(\"sqlform\", None)\n if not sqlform:\n sqlform = resource.get_config(\"crud_form\")\n if not sqlform:\n from ..ui import S3SQLDefaultForm\n sqlform = S3SQLDefaultForm()\n\n get_config = current.s3db.get_config\n if record_id:\n # Update form\n onvalidation = get_config(tablename, \"create_onvalidation\") or \\\n get_config(tablename, \"onvalidation\")\n onaccept = get_config(tablename, \"create_onaccept\") or \\\n get_config(tablename, \"onaccept\")\n else:\n # Create form\n onvalidation = get_config(tablename, \"create_onvalidation\") or \\\n get_config(tablename, \"onvalidation\")\n onaccept = get_config(tablename, \"create_onaccept\") or \\\n get_config(tablename, \"onaccept\")\n\n form = sqlform(request = r,\n resource = resource,\n record_id = record_id,\n readonly = readonly,\n format = \"html\",\n onvalidation = onvalidation,\n onaccept = onaccept,\n )\n _class = self._lookup_class(r, widget)\n\n # Render the widget\n output = DIV(H4(icon,\n label,\n _class = \"profile-sub-header\",\n ),\n DIV(form,\n _class = \"form-container thumbnail\",\n ),\n _class = _class,\n )\n\n return output", "def form(self):\n\t\treturn self._form", "def forms(self):\n edit = EquipmentChownForm\n return {\n 'edit': edit,\n }", "def _changeform_view(self, request, object_id, form_url, extra_context):\n to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR))\n if to_field and not self.to_field_allowed(request, to_field):\n raise DisallowedModelAdminToField(\"The field %s cannot be referenced.\" % to_field)\n\n model = self.model\n opts = model._meta\n\n if request.method == 'POST' and '_saveasnew' in request.POST:\n object_id = None\n\n add = object_id is None\n\n if add:\n if not self.has_add_permission(request):\n raise PermissionDenied\n obj = None\n\n else:\n obj = self.get_object(request, unquote(object_id), to_field)\n\n if (\n (not self.has_module_permission(request))\n and (not self.has_change_permission(request, obj))):\n raise PermissionDenied\n\n if obj is None:\n return self._get_obj_does_not_exist_redirect(request, opts, object_id)\n\n ModelForm = self.get_form(request, obj)\n if request.method == 'POST':\n form = ModelForm(request.POST, request.FILES, instance=obj)\n if form.is_valid():\n form_validated = True\n new_object = self.save_form(request, form, change=not add)\n else:\n form_validated = False\n new_object = form.instance\n formsets, inline_instances = self._create_formsets(request, new_object, change=not add)\n if all_valid(formsets) and form_validated:\n response = self.do_saving(\n request=request, new_object=new_object, form=form, formsets=formsets, add=add)\n if response:\n return response\n else:\n form_validated = False\n else:\n form_validated = False\n else:\n if add:\n initial = self.get_changeform_initial_data(request)\n form = ModelForm(initial=initial)\n formsets, inline_instances = self._create_formsets(\n request, form.instance, change=False)\n self_formsets, self_inline_instances = self._create_self_formsets(\n request, form.instance, change=False)\n else:\n form = ModelForm(instance=obj)\n formsets, inline_instances = self._create_formsets(request, obj, change=True)\n self_formsets, self_inline_instances = self._create_self_formsets(\n request, obj, change=True)\n\n adminForm = helpers.AdminForm(\n form,\n list(self.get_fieldsets(request, obj)),\n self.get_prepopulated_fields(request, obj),\n self.get_readonly_fields(request, obj),\n model_admin=self)\n media = self.media + adminForm.media\n\n inline_formsets = self.get_inline_formsets(request, formsets, inline_instances, obj)\n for inline_formset in inline_formsets:\n media = media + inline_formset.media\n\n context = dict(\n self.admin_site.each_context(request),\n title=(_('Add %s') if add else _('Change %s')) % force_text(opts.verbose_name),\n adminform=adminForm,\n object_id=object_id,\n original=obj,\n is_popup=(IS_POPUP_VAR in request.POST or\n IS_POPUP_VAR in request.GET),\n to_field=to_field,\n media=media,\n inline_admin_formsets=inline_formsets,\n errors=helpers.AdminErrorList(form, formsets),\n preserved_filters=self.get_preserved_filters(request),\n )\n\n # Hide the \"Save\" and \"Save and continue\" buttons if \"Save as New\" was\n # previously chosen to prevent the interface from getting confusing.\n if (self.readonly_model or (\n request.method == 'POST' and not form_validated and \"_saveasnew\" in request.POST)):\n context['show_save'] = False\n context['show_save_and_continue'] = False\n # Use the change template instead of the add template.\n add = False\n\n context.update(extra_context or {})\n\n return self.render_change_form(\n request, context, add=add, change=not add, obj=obj, form_url=form_url)", "def get_comment_form_for_update(self, comment_id):\n # Implemented from template for\n # osid.resource.ResourceAdminSession.get_resource_form_for_update_template\n collection = JSONClientValidated('commenting',\n collection='Comment',\n runtime=self._runtime)\n if not isinstance(comment_id, ABCId):\n raise errors.InvalidArgument('the argument is not a valid OSID Id')\n if (comment_id.get_identifier_namespace() != 'commenting.Comment' or\n comment_id.get_authority() != self._authority):\n raise errors.InvalidArgument()\n result = collection.find_one({'_id': ObjectId(comment_id.get_identifier())})\n\n obj_form = objects.CommentForm(osid_object_map=result, runtime=self._runtime, proxy=self._proxy)\n self._forms[obj_form.get_id().get_identifier()] = not UPDATED\n\n return obj_form", "def get_change_form_context(self, context):\n form = context['form']\n request = self.request\n model = self.admin.model\n opts = model._meta\n app_label = opts.app_label\n obj = context.get('object', None)\n form_url = self.get_form_url(context)\n\n view_on_site_url = self.admin.get_view_on_site_url(obj)\n fieldsets = self.get_fieldsets()\n formsets, inline_instances = self.admin._create_formsets(\n request, obj, change=not self.hide_inline_formsets\n )\n readonly_fields = self.get_readonly_fields()\n admin_form = admin_helpers.AdminForm(\n form,\n list(fieldsets),\n self.admin.get_prepopulated_fields(request, obj),\n readonly_fields,\n model_admin=self.admin,\n )\n media = self.admin.media + admin_form.media\n\n # The inline formset code is copied from django's code. It has\n # not been used in practice yet and has no tests\n inline_formsets = self.admin.get_inline_formsets(\n request, formsets, inline_instances, obj\n )\n for inline_formset in inline_formsets: # pragma: no cover\n media = media + inline_formset.media\n\n has_editable_inline_admin_formsets = True if inline_formsets else False\n has_file_field = admin_form.form.is_multipart() or any(\n admin_formset.formset.is_multipart()\n for admin_formset in inline_formsets\n )\n\n # The admin admin also sets this variable\n request.current_app = self.admin.admin_site.name\n\n return {\n **self.admin.admin_site.each_context(request),\n 'title': self.display_name,\n 'adminform': admin_form,\n 'original': obj,\n 'is_popup': False,\n 'to_field': None,\n 'media': media,\n 'inline_admin_formsets': inline_formsets,\n 'errors': admin_helpers.AdminErrorList(form, formsets),\n 'preserved_filters': self.admin.get_preserved_filters(request),\n 'add': False,\n 'change': bool(obj),\n 'has_view_permission': self.admin.has_view_permission(\n request, obj\n ),\n 'has_add_permission': self.admin.has_add_permission(request),\n 'has_change_permission': self.admin.has_change_permission(\n request, obj\n ),\n 'has_delete_permission': self.admin.has_delete_permission(\n request, obj\n ),\n 'has_editable_inline_admin_formsets': (\n has_editable_inline_admin_formsets\n ),\n 'has_file_field': has_file_field,\n 'has_absolute_url': view_on_site_url is not None,\n 'absolute_url': view_on_site_url,\n 'form_url': form_url,\n 'opts': opts,\n 'content_type_id': (\n admin_options.get_content_type_for_model(self.admin.model).pk\n ),\n 'save_as': self.save_as,\n 'save_on_top': self.save_on_top,\n 'to_field_var': admin_options.TO_FIELD_VAR,\n 'is_popup_var': admin_options.IS_POPUP_VAR,\n 'app_label': app_label,\n 'hide_object_tools': self.hide_object_tools,\n }", "def save_catalog(self, catalog_form, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.update_bin\n if catalog_form.is_for_update():\n return self.update_catalog(catalog_form, *args, **kwargs)\n else:\n return self.create_catalog(catalog_form, *args, **kwargs)", "def get_family_form_for_update(self, family_id):\n # Implemented from template for\n # osid.resource.BinAdminSession.get_bin_form_for_update_template\n if self._catalog_session is not None:\n return self._catalog_session.get_catalog_form_for_update(catalog_id=family_id)\n collection = JSONClientValidated('relationship',\n collection='Family',\n runtime=self._runtime)\n if not isinstance(family_id, ABCId):\n raise errors.InvalidArgument('the argument is not a valid OSID Id')\n result = collection.find_one({'_id': ObjectId(family_id.get_identifier())})\n\n cat_form = objects.FamilyForm(osid_object_map=result, runtime=self._runtime, proxy=self._proxy)\n self._forms[cat_form.get_id().get_identifier()] = not UPDATED\n\n return cat_form", "def __init__(self, *args, **kwargs):\n super(EnterpriseCustomerAdminForm, self).__init__(*args, **kwargs)\n\n self.fields['catalog'] = forms.ChoiceField(\n choices=self.get_catalog_options(),\n required=False,\n help_text=\"<a id='catalog-details-link' href='#' target='_blank'\"\n \"data-url-template='{catalog_admin_url}'> View catalog details.</a>\".format(\n catalog_admin_url=utils.get_catalog_admin_url_template(),\n )\n )", "def edit_form(db_id):\r\n\r\n if request.args['collection'] == 'recipe':\r\n # initializes page title and header\r\n page_title = 'Update recipe'\r\n page_header = 'Update a recipe:'\r\n\r\n # returns the edit recipe template\r\n return render_template(\r\n 'edit_form.html',\r\n collection=mongo.db.recipe_categories.find().sort('name'),\r\n recipe=mongo.db.recipes.find_one({'_id': ObjectId(db_id)}),\r\n categories=mongo.db.appliance_categories.find().sort('name'),\r\n page_title=page_title,\r\n page_header=page_header\r\n )\r\n\r\n elif request.args['collection'] == 'recipe_category':\r\n # initializes page title and header\r\n page_title = 'Update recipe category'\r\n page_header = 'Update a recipe category:'\r\n\r\n # returns the edit recipe category template\r\n return render_template(\r\n 'edit_form.html',\r\n recipe_category=mongo.db.recipe_categories.find_one(\r\n {'_id': ObjectId(db_id)}),\r\n page_title=page_title,\r\n page_header=page_header\r\n )\r\n\r\n else:\r\n # returns an error message on incorrect argument\r\n return render_template(\r\n 'error.html',\r\n msg='Bad argument error! (/edit_form)'\r\n )", "def get_context_data(self, **kwargs):\n\n context = super(AIUpdateView, self).get_context_data(**kwargs)\n context['import_form'] = ImportAIForm\n return context", "def form(self):\n if getattr(self, '_form', None) is not None:\n return self._form\n\n add_view = queryMultiAdapter((self.context, self.request, self.fti),\n name=self.fti.factory)\n if add_view is None:\n add_view = queryMultiAdapter((self.context, self.request,\n self.fti))\n\n self._form = self._wrap_form(add_view.form)\n\n return self._form" ]
[ "0.8787373", "0.6573394", "0.63171417", "0.6288806", "0.61876136", "0.60886014", "0.60630125", "0.6054842", "0.5957297", "0.5948865", "0.59461755", "0.59461755", "0.58364516", "0.58364516", "0.58364516", "0.5799192", "0.57846195", "0.57535666", "0.57506627", "0.57150495", "0.56711674", "0.5645015", "0.56422806", "0.5635486", "0.55851066", "0.553202", "0.5459378", "0.54555976", "0.5425326", "0.5424775" ]
0.7162163
1
Pass through to provider CatalogAdminSession.update_catalog
def update_catalog(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinAdminSession.update_bin # OSID spec does not require returning updated catalog return Catalog( self._provider_manager, self._get_provider_session('catalog_admin_session').update_catalog(*args, **kwargs), self._runtime, self._proxy)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def save_catalog(self, catalog_form, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.update_bin\n if catalog_form.is_for_update():\n return self.update_catalog(catalog_form, *args, **kwargs)\n else:\n return self.create_catalog(catalog_form, *args, **kwargs)", "def update(self, catalog: Metadata, action: str):\n self._insert_request(self.update_queue, catalog, action)", "def can_update_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_update_bins\n return self._get_provider_session('catalog_admin_session').can_update_catalogs()", "def update_catalog(self, old_catalog_name, new_catalog_name, description):\n if self.resource is None:\n self.resource = self.client.get_resource(self.href)\n org = self.resource\n links = get_links(\n org, rel=RelationType.DOWN, media_type=EntityType.CATALOG.value)\n for link in links:\n if old_catalog_name == link.name:\n catalog = self.client.get_resource(link.href)\n href = catalog.get('href')\n admin_href = href.replace('/api/catalog/',\n '/api/admin/catalog/')\n admin_view_of_catalog = self.client.get_resource(admin_href)\n if new_catalog_name is not None:\n admin_view_of_catalog.set('name', new_catalog_name)\n if description is not None:\n admin_view_of_catalog['Description'] = E.Description(\n description)\n return self.client.put_resource(\n admin_href,\n admin_view_of_catalog,\n media_type=EntityType.ADMIN_CATALOG.value)\n raise Exception('Catalog not found.')", "def test_update_hyperflex_app_catalog(self):\n pass", "def get_catalog_form_for_update(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.get_bin_form_for_update\n return self._get_provider_session('catalog_admin_session').get_catalog_form_for_update(*args, **kwargs)", "def updateControl(self, event):\r\n print(\"updating...\")\r\n product_dict = [\r\n {\"title\":\"Core Python Programming\", \"author\":\"Wesley Chun\",\r\n \"isbn\":\"0132269937\", \"mfg\":\"Prentice Hall\"},\r\n {\"title\":\"Python Programming for the Absolute Beginner\",\r\n \"author\":\"Michael Dawson\", \"isbn\":\"1598631128\",\r\n \"mfg\":\"Course Technology\"},\r\n {\"title\":\"Learning Python\", \"author\":\"Mark Lutz\",\r\n \"isbn\":\"0596513984\", \"mfg\":\"O'Reilly\"}\r\n ]\r\n data = self.products + product_dict\r\n self.dataOlv.SetObjects(data)", "def update_config():\n \n dburl = dbconn.DbURL()\n conn = dbconn.connect(dburl, utility=True)\n \n logger.info('Updating catalog...')\n sql = \"SELECT gp_activate_standby()\"\n dbconn.execSQL(conn, sql)\n\n conn.commit()\n conn.close()\n\n logger.info('Database catalog updated successful')", "def catalog_id(self, catalog_id):\n self._catalog_id = catalog_id", "def _modify_controls(self, catalog: cat.Catalog) -> cat.Catalog:\n logger.debug(f'modify specify catalog {catalog.metadata.title} for profile {self._profile.metadata.title}')\n self._catalog_interface = CatalogInterface(catalog)\n param_dict: Dict[str, prof.SetParameter] = {}\n alters: Optional[List[prof.Alter]] = None\n # find the modify and alters\n # build a param_dict for all the modifys\n if self._profile.modify is not None:\n if self._profile.modify.set_parameters is not None:\n param_list = self._profile.modify.set_parameters\n for param in param_list:\n param_dict[param.param_id] = param\n alters = self._profile.modify.alters\n\n if alters is not None:\n for alter in alters:\n if alter.control_id is None:\n raise TrestleError('Alters must have control id specified.')\n if alter.removes is not None:\n raise TrestleError('Alters not supported for removes.')\n if alter.adds is None:\n raise TrestleError('Alter has no adds to perform.')\n for add in alter.adds:\n if add.position is not None and add.position.name is not None and add.position.name != 'after':\n raise TrestleError('Alter position must be \"after\" or None.')\n control = self._catalog_interface.get_control(alter.control_id)\n if add.by_id is not None:\n self._add_to_parts(control, add.by_id, add.parts)\n self._catalog_interface.replace_control(control)\n continue\n if add.props is not None:\n if add.by_id is not None:\n TrestleError('Alter cannot add props by id.')\n if not control.props:\n control.props = []\n control.props.extend(add.props)\n continue\n TrestleError('Alter must either add parts or props')\n\n # use the param_dict to apply all modifys\n control_ids = self._catalog_interface.get_control_ids()\n for control_id in control_ids:\n control = self._catalog_interface.get_control(control_id)\n if control.parts is not None:\n for part in control.parts:\n self._replace_part_prose(control, part, param_dict)\n self._catalog_interface.replace_control(control)\n\n catalog = self._catalog_interface._catalog\n\n # update the original profile metadata with new contents\n # roles and responsible-parties will be pulled in with new uuid's\n new_metadata = self._profile.metadata\n new_metadata.title = f'{catalog.metadata.title}: Resolved by profile {self._profile.metadata.title}'\n links: List[common.Link] = []\n for import_ in self._profile.imports:\n links.append(common.Link(**{'href': import_.href, 'rel': 'resolution-source'}))\n new_metadata.links = links\n # move catalog controls from dummy group 'catalog' into the catalog\n if catalog.groups:\n for group in catalog.groups:\n if group.id == 'catalog':\n catalog.controls = group.controls\n catalog.groups = [group for group in catalog.groups if group.id != 'catalog']\n break\n\n catalog.metadata = new_metadata\n\n return catalog", "def upload_catalog(self, catalog: Catalog) -> None:\n self._status.check_authority_for_draft()\n\n put_data: Dict[str, Any] = {\"catalog\": catalog.dumps()}\n if not put_data:\n raise TypeError(\"Empty catalog\")\n put_data.update(self._status.get_status_info())\n\n self._client.open_api_do(\"PUT\", \"labels/catalogs\", self.dataset_id, json=put_data)", "def _set_catalog(self, catalog: cat.Catalog) -> None:\n self._catalog_interface = CatalogInterface(catalog)\n self._catalog = catalog", "def update_records(self, something):\n print(\"Some logic (not shown) to update database of units\")", "def add_update_catalog(gifts: Dict[str, int]) -> int:\n global gift_catalog\n _assert_is_bank(context.sender)\n\n gift_catalog.update(gifts)\n return get_catalog_size()", "def run(self):\n if self.action == \"delete\":\n # Check all runtime types in cache for an entry of the given name.\n # If found, remove only the components from this catalog\n for runtime_type in self._component_cache:\n if self.catalog.name in self._component_cache[runtime_type]:\n self._component_cache[runtime_type].pop(self.catalog.name, None)\n break\n else: # 'modify' - replace (or add) components from the given catalog an update its status\n runtime_type = self.catalog.runtime_type.name\n catalog_state = self._component_cache[runtime_type][self.catalog.name].get(\"status\")\n try:\n # Replace all components for the given catalog\n self._component_cache[runtime_type][self.catalog.name][\n \"components\"\n ] = ComponentCache.instance().read_component_catalog(self.catalog)\n catalog_state[\"state\"] = \"current\"\n catalog_state[\"errors\"] = [] # reset any errors that may have been present\n except Exception as e:\n # Update state with an 'error' action and the relevant message\n catalog_state[\"state\"] = \"error\"\n catalog_state[\"errors\"].append(str(e))", "def update():\n return 'update api in put'", "def update(self, *args, **kwargs):", "def _update(self, binding, data):\n self._validate_data(data)\n if not data.get('name',False):\n data['name'] = data.get('frontend_label',False) or 'No Label'\n if not data.get('create_variant',False):\n data['create_variant'] = data.get('is_configurable',False)\n binding.write(data)\n self._create_attribute_option(binding, data)\n _logger.debug('%d updated from magento %s', binding.id, self.magento_id)\n return", "def update(self, *args, **kwargs):\n pass", "def update(self, *args, **kwargs):\n pass", "def update(self, *args, **kwargs):\n pass", "def _update_existing_cart_article(self, items, **kwargs):", "def update(self):\n self.getDbRecord().update()", "def update_catalog_for_image_scales(context):\n # We could catch TypeError and ValueError, but really this is a user error,\n # so let's fail.\n update = bool(int(os.getenv(\"UPDATE_CATALOG_FOR_IMAGE_SCALES\", 1)))\n if not update:\n logger.warning(\n \"UPDATE_CATALOG_FOR_IMAGE_SCALES is false, so not updating catalog.\"\n )\n return\n catalog = getToolByName(context, \"portal_catalog\")\n column = \"image_scales\"\n if column not in catalog.schema():\n catalog.addColumn(column)\n logger.info(\"Added %s column to catalog metadata schema.\", column)\n start = time()\n update_catalog_metadata(context, column=column)\n end = time()\n minutes = (end - start) / 60\n logger.info(\"Time taken to update catalog for image scales: %.1f minutes.\", minutes)", "def update(self, request, *args, **kwargs):\n response = super(ProductViewSet, self).update(request, *args, **kwargs)\n response.data['message'] = \"Producto ha sido editado\"", "def alias_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.alias_bin\n self._get_provider_session('catalog_admin_session').alias_catalog(*args, **kwargs)", "def update_interaction_model_catalog_v1(self, catalog_id, update_request, **kwargs):\n # type: (str, UpdateRequest_12e0eebe, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"update_interaction_model_catalog_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'catalog_id' is set\n if ('catalog_id' not in params) or (params['catalog_id'] is None):\n raise ValueError(\n \"Missing the required parameter `catalog_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'update_request' is set\n if ('update_request' not in params) or (params['update_request'] is None):\n raise ValueError(\n \"Missing the required parameter `update_request` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/api/custom/interactionModel/catalogs/{catalogId}/update'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'catalog_id' in params:\n path_params['catalogId'] = params['catalog_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n if 'update_request' in params:\n body_params = params['update_request']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"No content, indicates the fields were successfully updated.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn&#39;t have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"There is no catalog defined for the catalogId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None", "def update( ):\r\n pass", "def oe_update(self, cr, uid, external_session, existing_rec_id, vals, resource, defaults, context=None):\n if context is None: context={}\n context['referential_id'] = external_session.referential_id.id #did it's needed somewhere?\n return self.write(cr, uid, existing_rec_id, vals, context)", "def update(self, context, id_, update_data):\n run_playbook = update_data.get(\"run_playbook\", True)\n\n try:\n _validate_update(context, self.db_api, update_data, id_,\n eon_const.EON_RESOURCE_MANAGER)\n _resource_mgr_data = _make_response(\n self.db_api.get_resource_manager(context, id_))\n resource_mgr_type = _resource_mgr_data.get('type')\n resource_mgr_driver = driver.load_resource_mgr_driver(\n resource_mgr_type)\n\n if resource_mgr_type == eon_const.EON_RESOURCE_MGR_TYPE_VCENTER:\n name = update_data.get(\"name\")\n if name and name != _resource_mgr_data.get(\"name\"):\n msg = (_(\"vCenter name cannot be updated\"))\n raise exception.UpdateException(msg=msg)\n\n _resource_mgr_data_update = deepcopy(_resource_mgr_data)\n _resource_mgr_data_update.update(update_data)\n LOG.info(\"Updating resource manager : %s\",\n logging.mask_password(_resource_mgr_data_update))\n\n _is_creds_changed = self._is_creds_changed(\n _resource_mgr_data, _resource_mgr_data_update)\n if _is_creds_changed:\n LOG.debug(\"[%s] Validating the updated credentials/Ip \"\n \"address\" % id_)\n resource_mgr_driver.validate_update(_resource_mgr_data_update,\n _resource_mgr_data)\n # Gets the activated resources for the resource manager\n resources_data = self._get_resources(context,\n _resource_mgr_data_update,\n eon_const.EON_RESOURCE_STATE_ACTIVATED)\n\n resource_mgr_driver.update_vc_pass_through(\n context, _resource_mgr_data_update)\n if resources_data and run_playbook:\n self.db_api.update_resource_mgr_property(context,\n \"update_property\",\n id_, key=eon_const.RESOURCE_MGR_STATE_KEY,\n value=eon_const.EON_RESOURCE_MANAGER_STATE_UPDATING)\n eventlet.spawn_n(resource_mgr_driver.update,\n context, id_, resource_inventory=resources_data)\n\n self.db_api.update_resource_manager(context, id_,\n _resource_mgr_data_update)\n props = self.db_api.get_resource_mgr_properties(context,\n id_, key=eon_const.RESOURCE_MGR_STATE_KEY)\n return _make_response(_resource_mgr_data_update,\n property_list=props)\n\n except Exception as e:\n LOG.exception(e)\n msg = (_(\"Updating resource manager failed. Reason: '%s'\")\n % e.message)\n log_msg = ((\"Updating resource manager failed. Reason: '%s'\")\n % e.message)\n LOG.error(log_msg)\n raise exception.UpdateException(msg=msg)" ]
[ "0.695575", "0.65899086", "0.64527303", "0.63767856", "0.6348782", "0.62388414", "0.5986489", "0.59143436", "0.5849566", "0.5802538", "0.57939005", "0.5788721", "0.5669034", "0.56669474", "0.56507957", "0.5650015", "0.5579512", "0.5533091", "0.5521288", "0.5521288", "0.5521288", "0.5513988", "0.5501719", "0.5501672", "0.54875916", "0.547656", "0.5455011", "0.54481864", "0.53587353", "0.53396356" ]
0.72620577
0
Pass through to provider CatalogAdminSession.update_catalog
def save_catalog(self, catalog_form, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinAdminSession.update_bin if catalog_form.is_for_update(): return self.update_catalog(catalog_form, *args, **kwargs) else: return self.create_catalog(catalog_form, *args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.update_bin\n # OSID spec does not require returning updated catalog\n return Catalog(\n self._provider_manager,\n self._get_provider_session('catalog_admin_session').update_catalog(*args, **kwargs),\n self._runtime,\n self._proxy)", "def update(self, catalog: Metadata, action: str):\n self._insert_request(self.update_queue, catalog, action)", "def can_update_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_update_bins\n return self._get_provider_session('catalog_admin_session').can_update_catalogs()", "def update_catalog(self, old_catalog_name, new_catalog_name, description):\n if self.resource is None:\n self.resource = self.client.get_resource(self.href)\n org = self.resource\n links = get_links(\n org, rel=RelationType.DOWN, media_type=EntityType.CATALOG.value)\n for link in links:\n if old_catalog_name == link.name:\n catalog = self.client.get_resource(link.href)\n href = catalog.get('href')\n admin_href = href.replace('/api/catalog/',\n '/api/admin/catalog/')\n admin_view_of_catalog = self.client.get_resource(admin_href)\n if new_catalog_name is not None:\n admin_view_of_catalog.set('name', new_catalog_name)\n if description is not None:\n admin_view_of_catalog['Description'] = E.Description(\n description)\n return self.client.put_resource(\n admin_href,\n admin_view_of_catalog,\n media_type=EntityType.ADMIN_CATALOG.value)\n raise Exception('Catalog not found.')", "def test_update_hyperflex_app_catalog(self):\n pass", "def get_catalog_form_for_update(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.get_bin_form_for_update\n return self._get_provider_session('catalog_admin_session').get_catalog_form_for_update(*args, **kwargs)", "def updateControl(self, event):\r\n print(\"updating...\")\r\n product_dict = [\r\n {\"title\":\"Core Python Programming\", \"author\":\"Wesley Chun\",\r\n \"isbn\":\"0132269937\", \"mfg\":\"Prentice Hall\"},\r\n {\"title\":\"Python Programming for the Absolute Beginner\",\r\n \"author\":\"Michael Dawson\", \"isbn\":\"1598631128\",\r\n \"mfg\":\"Course Technology\"},\r\n {\"title\":\"Learning Python\", \"author\":\"Mark Lutz\",\r\n \"isbn\":\"0596513984\", \"mfg\":\"O'Reilly\"}\r\n ]\r\n data = self.products + product_dict\r\n self.dataOlv.SetObjects(data)", "def update_config():\n \n dburl = dbconn.DbURL()\n conn = dbconn.connect(dburl, utility=True)\n \n logger.info('Updating catalog...')\n sql = \"SELECT gp_activate_standby()\"\n dbconn.execSQL(conn, sql)\n\n conn.commit()\n conn.close()\n\n logger.info('Database catalog updated successful')", "def catalog_id(self, catalog_id):\n self._catalog_id = catalog_id", "def _modify_controls(self, catalog: cat.Catalog) -> cat.Catalog:\n logger.debug(f'modify specify catalog {catalog.metadata.title} for profile {self._profile.metadata.title}')\n self._catalog_interface = CatalogInterface(catalog)\n param_dict: Dict[str, prof.SetParameter] = {}\n alters: Optional[List[prof.Alter]] = None\n # find the modify and alters\n # build a param_dict for all the modifys\n if self._profile.modify is not None:\n if self._profile.modify.set_parameters is not None:\n param_list = self._profile.modify.set_parameters\n for param in param_list:\n param_dict[param.param_id] = param\n alters = self._profile.modify.alters\n\n if alters is not None:\n for alter in alters:\n if alter.control_id is None:\n raise TrestleError('Alters must have control id specified.')\n if alter.removes is not None:\n raise TrestleError('Alters not supported for removes.')\n if alter.adds is None:\n raise TrestleError('Alter has no adds to perform.')\n for add in alter.adds:\n if add.position is not None and add.position.name is not None and add.position.name != 'after':\n raise TrestleError('Alter position must be \"after\" or None.')\n control = self._catalog_interface.get_control(alter.control_id)\n if add.by_id is not None:\n self._add_to_parts(control, add.by_id, add.parts)\n self._catalog_interface.replace_control(control)\n continue\n if add.props is not None:\n if add.by_id is not None:\n TrestleError('Alter cannot add props by id.')\n if not control.props:\n control.props = []\n control.props.extend(add.props)\n continue\n TrestleError('Alter must either add parts or props')\n\n # use the param_dict to apply all modifys\n control_ids = self._catalog_interface.get_control_ids()\n for control_id in control_ids:\n control = self._catalog_interface.get_control(control_id)\n if control.parts is not None:\n for part in control.parts:\n self._replace_part_prose(control, part, param_dict)\n self._catalog_interface.replace_control(control)\n\n catalog = self._catalog_interface._catalog\n\n # update the original profile metadata with new contents\n # roles and responsible-parties will be pulled in with new uuid's\n new_metadata = self._profile.metadata\n new_metadata.title = f'{catalog.metadata.title}: Resolved by profile {self._profile.metadata.title}'\n links: List[common.Link] = []\n for import_ in self._profile.imports:\n links.append(common.Link(**{'href': import_.href, 'rel': 'resolution-source'}))\n new_metadata.links = links\n # move catalog controls from dummy group 'catalog' into the catalog\n if catalog.groups:\n for group in catalog.groups:\n if group.id == 'catalog':\n catalog.controls = group.controls\n catalog.groups = [group for group in catalog.groups if group.id != 'catalog']\n break\n\n catalog.metadata = new_metadata\n\n return catalog", "def upload_catalog(self, catalog: Catalog) -> None:\n self._status.check_authority_for_draft()\n\n put_data: Dict[str, Any] = {\"catalog\": catalog.dumps()}\n if not put_data:\n raise TypeError(\"Empty catalog\")\n put_data.update(self._status.get_status_info())\n\n self._client.open_api_do(\"PUT\", \"labels/catalogs\", self.dataset_id, json=put_data)", "def _set_catalog(self, catalog: cat.Catalog) -> None:\n self._catalog_interface = CatalogInterface(catalog)\n self._catalog = catalog", "def update_records(self, something):\n print(\"Some logic (not shown) to update database of units\")", "def add_update_catalog(gifts: Dict[str, int]) -> int:\n global gift_catalog\n _assert_is_bank(context.sender)\n\n gift_catalog.update(gifts)\n return get_catalog_size()", "def run(self):\n if self.action == \"delete\":\n # Check all runtime types in cache for an entry of the given name.\n # If found, remove only the components from this catalog\n for runtime_type in self._component_cache:\n if self.catalog.name in self._component_cache[runtime_type]:\n self._component_cache[runtime_type].pop(self.catalog.name, None)\n break\n else: # 'modify' - replace (or add) components from the given catalog an update its status\n runtime_type = self.catalog.runtime_type.name\n catalog_state = self._component_cache[runtime_type][self.catalog.name].get(\"status\")\n try:\n # Replace all components for the given catalog\n self._component_cache[runtime_type][self.catalog.name][\n \"components\"\n ] = ComponentCache.instance().read_component_catalog(self.catalog)\n catalog_state[\"state\"] = \"current\"\n catalog_state[\"errors\"] = [] # reset any errors that may have been present\n except Exception as e:\n # Update state with an 'error' action and the relevant message\n catalog_state[\"state\"] = \"error\"\n catalog_state[\"errors\"].append(str(e))", "def update():\n return 'update api in put'", "def update(self, *args, **kwargs):", "def _update(self, binding, data):\n self._validate_data(data)\n if not data.get('name',False):\n data['name'] = data.get('frontend_label',False) or 'No Label'\n if not data.get('create_variant',False):\n data['create_variant'] = data.get('is_configurable',False)\n binding.write(data)\n self._create_attribute_option(binding, data)\n _logger.debug('%d updated from magento %s', binding.id, self.magento_id)\n return", "def update(self, *args, **kwargs):\n pass", "def update(self, *args, **kwargs):\n pass", "def update(self, *args, **kwargs):\n pass", "def _update_existing_cart_article(self, items, **kwargs):", "def update(self):\n self.getDbRecord().update()", "def update_catalog_for_image_scales(context):\n # We could catch TypeError and ValueError, but really this is a user error,\n # so let's fail.\n update = bool(int(os.getenv(\"UPDATE_CATALOG_FOR_IMAGE_SCALES\", 1)))\n if not update:\n logger.warning(\n \"UPDATE_CATALOG_FOR_IMAGE_SCALES is false, so not updating catalog.\"\n )\n return\n catalog = getToolByName(context, \"portal_catalog\")\n column = \"image_scales\"\n if column not in catalog.schema():\n catalog.addColumn(column)\n logger.info(\"Added %s column to catalog metadata schema.\", column)\n start = time()\n update_catalog_metadata(context, column=column)\n end = time()\n minutes = (end - start) / 60\n logger.info(\"Time taken to update catalog for image scales: %.1f minutes.\", minutes)", "def update(self, request, *args, **kwargs):\n response = super(ProductViewSet, self).update(request, *args, **kwargs)\n response.data['message'] = \"Producto ha sido editado\"", "def alias_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.alias_bin\n self._get_provider_session('catalog_admin_session').alias_catalog(*args, **kwargs)", "def update_interaction_model_catalog_v1(self, catalog_id, update_request, **kwargs):\n # type: (str, UpdateRequest_12e0eebe, **Any) -> Union[ApiResponse, object, StandardizedError_f5106a89, BadRequestError_f854b05]\n operation_name = \"update_interaction_model_catalog_v1\"\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n # verify the required parameter 'catalog_id' is set\n if ('catalog_id' not in params) or (params['catalog_id'] is None):\n raise ValueError(\n \"Missing the required parameter `catalog_id` when calling `\" + operation_name + \"`\")\n # verify the required parameter 'update_request' is set\n if ('update_request' not in params) or (params['update_request'] is None):\n raise ValueError(\n \"Missing the required parameter `update_request` when calling `\" + operation_name + \"`\")\n\n resource_path = '/v1/skills/api/custom/interactionModel/catalogs/{catalogId}/update'\n resource_path = resource_path.replace('{format}', 'json')\n\n path_params = {} # type: Dict\n if 'catalog_id' in params:\n path_params['catalogId'] = params['catalog_id']\n\n query_params = [] # type: List\n\n header_params = [] # type: List\n\n body_params = None\n if 'update_request' in params:\n body_params = params['update_request']\n header_params.append(('Content-type', 'application/json'))\n header_params.append(('User-Agent', self.user_agent))\n\n # Response Type\n full_response = False\n if 'full_response' in params:\n full_response = params['full_response']\n\n # Authentication setting\n access_token = self._lwa_service_client.get_access_token_from_refresh_token()\n authorization_value = \"Bearer \" + access_token\n header_params.append(('Authorization', authorization_value))\n\n error_definitions = [] # type: List\n error_definitions.append(ServiceClientResponse(response_type=None, status_code=204, message=\"No content, indicates the fields were successfully updated.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=400, message=\"Server cannot process the request due to a client error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=401, message=\"The auth token is invalid/expired or doesn&#39;t have access to the resource.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.bad_request_error.BadRequestError\", status_code=403, message=\"The operation being requested is not allowed.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=404, message=\"There is no catalog defined for the catalogId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=429, message=\"Exceed the permitted request limit. Throttling criteria includes total requests, per API, ClientId, and CustomerId.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=500, message=\"Internal Server Error.\"))\n error_definitions.append(ServiceClientResponse(response_type=\"ask_smapi_model.v1.skill.standardized_error.StandardizedError\", status_code=503, message=\"Service Unavailable.\"))\n\n api_response = self.invoke(\n method=\"POST\",\n endpoint=self._api_endpoint,\n path=resource_path,\n path_params=path_params,\n query_params=query_params,\n header_params=header_params,\n body=body_params,\n response_definitions=error_definitions,\n response_type=None)\n\n if full_response:\n return api_response\n \n return None", "def update( ):\r\n pass", "def oe_update(self, cr, uid, external_session, existing_rec_id, vals, resource, defaults, context=None):\n if context is None: context={}\n context['referential_id'] = external_session.referential_id.id #did it's needed somewhere?\n return self.write(cr, uid, existing_rec_id, vals, context)", "def update(self, context, id_, update_data):\n run_playbook = update_data.get(\"run_playbook\", True)\n\n try:\n _validate_update(context, self.db_api, update_data, id_,\n eon_const.EON_RESOURCE_MANAGER)\n _resource_mgr_data = _make_response(\n self.db_api.get_resource_manager(context, id_))\n resource_mgr_type = _resource_mgr_data.get('type')\n resource_mgr_driver = driver.load_resource_mgr_driver(\n resource_mgr_type)\n\n if resource_mgr_type == eon_const.EON_RESOURCE_MGR_TYPE_VCENTER:\n name = update_data.get(\"name\")\n if name and name != _resource_mgr_data.get(\"name\"):\n msg = (_(\"vCenter name cannot be updated\"))\n raise exception.UpdateException(msg=msg)\n\n _resource_mgr_data_update = deepcopy(_resource_mgr_data)\n _resource_mgr_data_update.update(update_data)\n LOG.info(\"Updating resource manager : %s\",\n logging.mask_password(_resource_mgr_data_update))\n\n _is_creds_changed = self._is_creds_changed(\n _resource_mgr_data, _resource_mgr_data_update)\n if _is_creds_changed:\n LOG.debug(\"[%s] Validating the updated credentials/Ip \"\n \"address\" % id_)\n resource_mgr_driver.validate_update(_resource_mgr_data_update,\n _resource_mgr_data)\n # Gets the activated resources for the resource manager\n resources_data = self._get_resources(context,\n _resource_mgr_data_update,\n eon_const.EON_RESOURCE_STATE_ACTIVATED)\n\n resource_mgr_driver.update_vc_pass_through(\n context, _resource_mgr_data_update)\n if resources_data and run_playbook:\n self.db_api.update_resource_mgr_property(context,\n \"update_property\",\n id_, key=eon_const.RESOURCE_MGR_STATE_KEY,\n value=eon_const.EON_RESOURCE_MANAGER_STATE_UPDATING)\n eventlet.spawn_n(resource_mgr_driver.update,\n context, id_, resource_inventory=resources_data)\n\n self.db_api.update_resource_manager(context, id_,\n _resource_mgr_data_update)\n props = self.db_api.get_resource_mgr_properties(context,\n id_, key=eon_const.RESOURCE_MGR_STATE_KEY)\n return _make_response(_resource_mgr_data_update,\n property_list=props)\n\n except Exception as e:\n LOG.exception(e)\n msg = (_(\"Updating resource manager failed. Reason: '%s'\")\n % e.message)\n log_msg = ((\"Updating resource manager failed. Reason: '%s'\")\n % e.message)\n LOG.error(log_msg)\n raise exception.UpdateException(msg=msg)" ]
[ "0.72620577", "0.65899086", "0.64527303", "0.63767856", "0.6348782", "0.62388414", "0.5986489", "0.59143436", "0.5849566", "0.5802538", "0.57939005", "0.5788721", "0.5669034", "0.56669474", "0.56507957", "0.5650015", "0.5579512", "0.5533091", "0.5521288", "0.5521288", "0.5521288", "0.5513988", "0.5501719", "0.5501672", "0.54875916", "0.547656", "0.5455011", "0.54481864", "0.53587353", "0.53396356" ]
0.695575
1
Pass through to provider CatalogAdminSession.can_delete_catalogs
def can_delete_catalogs(self): # Implemented from kitosid template for - # osid.resource.BinAdminSession.can_delete_bins return self._get_provider_session('catalog_admin_session').can_delete_catalogs()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def can_delete_books(self):\n # Implemented from template for\n # osid.resource.BinAdminSession.can_delete_bins\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_delete_catalogs()\n return True", "def can_delete_families(self):\n # Implemented from template for\n # osid.resource.BinAdminSession.can_delete_bins\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_delete_catalogs()\n return True", "def can_create_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_create_bins\n return self._get_provider_session('catalog_admin_session').can_create_catalogs()", "def can_delete(self):\r\n return True", "def can_update_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_update_bins\n return self._get_provider_session('catalog_admin_session').can_update_catalogs()", "def supports_catalog_admin(self):\n return False", "def supports_catalog_admin(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_admin()", "def test_delete_hyperflex_app_catalog(self):\n pass", "def can_manage_catalog_aliases(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceAdminSession.can_manage_resource_aliases_template\n return self._get_provider_session('catalog_admin_session').can_manage_catalog_aliases()", "def can_delete_comments(self):\n # Implemented from template for\n # osid.resource.ResourceAdminSession.can_delete_resources\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n return True", "def can_delete_relationships(self):\n # Implemented from template for\n # osid.resource.ResourceAdminSession.can_delete_resources\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n return True", "def on_model_delete(self, model):\n if not current_user.is_active or not current_user.is_authenticated:\n abort(403)\n if not user_has_permission(current_user, 'can_delete','admins'):\n abort(403)", "def on_model_delete(self, model):\n if not current_user.is_active or not current_user.is_authenticated:\n abort(403)\n if not user_has_permission(current_user, 'can_delete','roles'):\n abort(403)", "def has_delete_permission(self, request, obj=None):\r\n return False", "def on_model_delete(self, model):\n if not current_user.is_active or not current_user.is_authenticated:\n abort(403)\n if not user_has_permission(current_user, 'can_delete','subspecialties'):\n abort(403)", "def has_delete_permission(self, request, obj=None, *args, **kwargs):\n return False", "def test_delete_permission(self):\r\n self.assertFalse(self.creator_admin.has_delete_permission(self.request))", "def catalog_delete(self, args):\n headers = DEFAULT_HEADERS.copy()\n headers.update(args.headers)\n try:\n catalog = self.server.connect_ermrest(args.id)\n catalog.delete(args.path, headers)\n except HTTPError as e:\n if e.response.status_code == requests.codes.not_found:\n raise ResourceException('Catalog not found', e)\n else:\n raise e", "def check_delete_permission(self):\n if getSecurityManager().checkPermission(\"Delete objects\", self):\n username = getSecurityManager().getUser().getUserName()\n if username == self.getOwner().getId():\n return True\n return False", "def has_delete_permission(self, request, obj=None):\n return False", "def has_delete_permission(self, request, obj=None):\n return False", "def has_delete_permission(self, request, obj=None):\n if type(obj) is Client:\n return obj.is_user_in_main_sales_contacts_of_client(request.user)\n return False", "def has_delete_permission(self, request, instance=None):\n return False", "def has_delete_permission(self, request, obj=None):\n\n if obj and obj.cwr:\n return False\n return super().has_delete_permission(request, obj)", "def can_lookup_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.can_lookup_bins_template\n return self._get_provider_session('catalog_lookup_session').can_lookup_catalogs()", "def can_fast_delete(self, *args, **kwargs):\n return False", "def supportsAuthenticateDelete(self):\n return 1", "def test_vault_delete_authorization_for_vault_section(self):\n pass", "def on_model_delete(self, model):\n if not current_user.is_active or not current_user.is_authenticated:\n abort(403)\n if not user_has_permission(current_user, 'can_delete','specialties'):\n abort(403)", "def catalog_drop(self, args):\n try:\n catalog = self.server.connect_ermrest(args.id)\n catalog.delete_ermrest_catalog(really=True)\n except HTTPError as e:\n if e.response.status_code == requests.codes.not_found:\n raise ResourceException('Catalog not found', e)\n else:\n raise e" ]
[ "0.7544853", "0.7086018", "0.6701606", "0.6693337", "0.6532898", "0.6312073", "0.63093925", "0.62068963", "0.612478", "0.605064", "0.5948238", "0.59362906", "0.59276235", "0.58809143", "0.5872019", "0.58165145", "0.5810001", "0.579043", "0.5786722", "0.5723496", "0.5723496", "0.5715091", "0.57012266", "0.567836", "0.567558", "0.5637248", "0.5623499", "0.5597228", "0.5595248", "0.55580866" ]
0.8756142
0
Pass through to provider CatalogAdminSession.can_manage_catalog_aliases
def can_manage_catalog_aliases(self): # Implemented from kitosid template for - # osid.resource.ResourceAdminSession.can_manage_resource_aliases_template return self._get_provider_session('catalog_admin_session').can_manage_catalog_aliases()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def can_manage_book_aliases(self):\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n return True", "def can_manage_relationship_aliases(self):\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n return True", "def can_manage_family_aliases(self):\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n return True", "def can_manage_comment_aliases(self):\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n return True", "def alias_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.alias_bin\n self._get_provider_session('catalog_admin_session').alias_catalog(*args, **kwargs)", "def is_aliasable(self, *args):\n return _ida_hexrays.vdloc_t_is_aliasable(self, *args)", "def getAliases(self):", "def test_aliases_helper(self):\n self.t.config(\"alias.foo\", \"bar\")\n code, out, err = self.t(\"_aliases\")\n self.assertIn(\"foo\", out)", "def supports_catalog_admin(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_admin()", "def is_aliasable(self, *args):\n return _ida_hexrays.lvar_t_is_aliasable(self, *args)", "def list_aliases(client, sort):\n r = client.cat.aliases(v=True, s=sort)\n click.echo(r)", "def check_alias(self):\n es = self.get_es()\n aliased_indexes = es[self.es_alias].get('_aliases')\n return aliased_indexes.keys()", "def supports_catalog_admin(self):\n return False", "def testAliases(self):\n Options().parseOptions(\n [\n \"--maildirdbmdomain\",\n \"example.com=example.com\",\n \"--aliases\",\n self.aliasFilename,\n ]\n )", "def aliases(cls):\n return cls._ALIASES", "def list_aliases(config, detail=False):\n if detail:\n output = \"\\n# configured alias details...\"\n for section in sorted(config.sections()):\n output += \"\\n\\n[%s]\" % section\n for item in sorted(config.items(section)):\n output += \"\\n%s = %s\" % item\n output += \"\\n\"\n else:\n output = \"configured aliases and cached searches...\"\n for section in sorted(config.sections()):\n if config.has_option(section, \"description\"):\n description = config.get(section, \"description\")\n else: description = \"(no description provided)\"\n output += \"\\n %s: %s\" % (section, description)\n return output", "def AddAliases(self, aliases):\n self._legacy = False\n if aliases:\n self._aliases.update(aliases)", "def policy_alias(self):", "def get_alias(self):", "def aliases(self):\n return self.__aliases", "def test_aliases(self):\n field = self.base_field\n self.assertFalse(field.get('aliases'))\n self.assertEqual([], SchemaField(field).aliases)\n field['aliases'] = []\n self.assertEqual([], SchemaField(field).aliases)\n field['aliases'] = ['alias1', 'Alias2']\n sch = SchemaField(field)\n self.assertEqual(field['aliases'], sch.aliases)\n # test some related method\n self.assertTrue(sch.has_alias('alias1'))\n self.assertTrue(sch.has_alias('Alias2'))\n self.assertFalse(sch.has_alias('alias2'))\n self.assertTrue(sch.has_alias('alias2', icase=True))\n self.assertFalse(sch.has_alias(field['name']))\n self.assertTrue(sch.has_name_or_alias(field['name'], 'aaaa'))\n self.assertFalse(sch.has_name_or_alias(field['name'].lower(), 'aaaa'))\n self.assertTrue(sch.has_name_or_alias(field['name'].lower(), 'aaaa', icase=True))\n self.assertFalse(sch.has_name_or_alias('aaaa', 'alias2'))\n self.assertTrue(sch.has_name_or_alias('aaaa', 'alias2', icase=True))", "def assume_alias(self):\n\n es = self.get_es()\n if es.head(self.es_alias):\n #remove all existing aliases - this is destructive and could be harmful, but for current\n #uses, it is legal - in a more delicate routing arrangement, a configuration file of\n # some sort should be in use.\n alias_indices = es[self.es_alias].get('_status')['indices'].keys()\n\n remove_actions = [{\"remove\": {\"index\": x, \"alias\": self.es_alias}} for x in\n alias_indices]\n remove_data = {\"actions\": remove_actions}\n es.post('_aliases', data=remove_data)\n #now reapply HEAD/master index\n es.post('_aliases', data={\"actions\": [{\"add\":\n {\"index\": self.es_index,\n \"alias\": self.es_alias}}]})", "def use_ip_aliases(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"use_ip_aliases\")", "def test_sample_aliases(self):\n self.app = self.make_app(argv = ['report', 'project_status', 'J.Doe_00_03', '--debug'],extensions=['scilifelab.pm.ext.ext_couchdb'])\n handler.register(DeliveryReportController)\n self._run_app()\n data = ast.literal_eval(self.app._output_data['debug'].getvalue())\n # # This should fail since P003_101_index6 != 3_index6\n # self.assertEqual(len(data['table']), 2)\n\n self.app = self.make_app(argv = ['report', 'project_status', 'J.Doe_00_03', '--sample_alias', \"{'P003_101_index6':'3_index6'}\", '--debug'],extensions=['scilifelab.pm.ext.ext_couchdb'])\n handler.register(DeliveryReportController)\n self._run_app()\n data = ast.literal_eval(self.app._output_data['debug'].getvalue())\n samples = [x[0] for x in data['table']]\n self.assertIn(\"3_index6\", samples)", "def alias_settings_change(self):\n self.comp('assetmanager').set_alias_settings(self.settings.get_value(mconst.DEF_SETTINGSEC_aliases))", "def catalog_alias_create(self, args):\n try:\n if args.id:\n alias = self.server.connect_ermrest_alias(args.id)\n try:\n if alias.retrieve():\n print(\"Catalog alias already exists\")\n return\n except requests.HTTPError as e:\n if e.response.status_code == 404:\n pass\n else:\n raise\n owner = args.owner if args.owner else None\n alias = self.server.create_ermrest_alias(args.id, owner, args.alias_target)\n if not args.quiet:\n print(\"Created new catalog alias %s with the following configuration:\\n\" % alias.alias_id)\n pp(alias.retrieve())\n except HTTPError as e:\n if e.response.status_code == requests.codes.not_found:\n raise ResourceException('Catalog alias not found', e)\n elif e.response.status_code == requests.codes.conflict:\n raise ResourceException(\"Catalog alias already exists\", e)\n else:\n raise", "def load_aliases():\n for name in COMMANDS:\n load_alias(name)", "def aliases(self) -> List[str]:\n # Super overkill here using pagination when an account can only\n # have a single alias, but at least this implementation should be\n # future-proof.\n aliases: List[str] = []\n paginator = self.__session.client(\"iam\").get_paginator(\"list_account_aliases\")\n response_iterator = paginator.paginate()\n for page in response_iterator:\n aliases.extend(page.get(\"AccountAliases\", []))\n return aliases", "def query_aliases(self):\n\n result = set()\n\n infos = self.es_client.indices.get_alias(\n index='{}-*'.format(self.normalized_hostname)\n )\n\n for info in infos.values():\n for alias in info['aliases']:\n result.add(alias)\n\n return result", "def redirect_aliases(self, redirect_aliases):\n\n self._redirect_aliases = redirect_aliases" ]
[ "0.7482216", "0.71607053", "0.71438783", "0.6937126", "0.67473656", "0.6654894", "0.66373825", "0.647238", "0.6433087", "0.6335908", "0.62401813", "0.62278897", "0.61096185", "0.5870672", "0.5863249", "0.58247435", "0.5809853", "0.5801972", "0.5765618", "0.5741113", "0.57095456", "0.56557125", "0.5631531", "0.5578504", "0.557745", "0.55755115", "0.5546774", "0.5539642", "0.5500805", "0.5458183" ]
0.899018
0
Pass through to provider CatalogAdminSession.alias_catalog
def alias_catalog(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinAdminSession.alias_bin self._get_provider_session('catalog_admin_session').alias_catalog(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_alias(self):", "def can_manage_catalog_aliases(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceAdminSession.can_manage_resource_aliases_template\n return self._get_provider_session('catalog_admin_session').can_manage_catalog_aliases()", "def catalog_alias_get(self, args):\n try:\n alias = self.server.connect_ermrest_alias(args.id)\n response = alias.retrieve()\n if not args.quiet:\n pp(response)\n except HTTPError as e:\n if e.response.status_code == requests.codes.not_found:\n raise ResourceException('Catalog alias not found', e)\n else:\n raise e", "def resolveAlias(self, alias):", "def getAliases(self):", "def getAlias(self):\n pass;", "def catalog_alias_create(self, args):\n try:\n if args.id:\n alias = self.server.connect_ermrest_alias(args.id)\n try:\n if alias.retrieve():\n print(\"Catalog alias already exists\")\n return\n except requests.HTTPError as e:\n if e.response.status_code == 404:\n pass\n else:\n raise\n owner = args.owner if args.owner else None\n alias = self.server.create_ermrest_alias(args.id, owner, args.alias_target)\n if not args.quiet:\n print(\"Created new catalog alias %s with the following configuration:\\n\" % alias.alias_id)\n pp(alias.retrieve())\n except HTTPError as e:\n if e.response.status_code == requests.codes.not_found:\n raise ResourceException('Catalog alias not found', e)\n elif e.response.status_code == requests.codes.conflict:\n raise ResourceException(\"Catalog alias already exists\", e)\n else:\n raise", "def alias(self):\n return self._alias", "def alias(self):\n return self._alias", "def policy_alias(self):", "def add_alias(self, alias):\n if alias != self.name:\n self.alias = alias", "def alias(self):\n\n return self._alias", "def alias_settings_change(self):\n self.comp('assetmanager').set_alias_settings(self.settings.get_value(mconst.DEF_SETTINGSEC_aliases))", "def get_alias(conn, alias):\n rows = retrieve_alias(conn, alias)\n [print(\"%-20s: %s\" % x) for x in rows]", "def add_alias(self, alias):\n self.alias.append(alias)", "def alias(option=None):\n return alias(option)", "def alias(option=None):\n return alias(option)", "def alias(option=None):\n return alias(option)", "def list_aliases(client, sort):\n r = client.cat.aliases(v=True, s=sort)\n click.echo(r)", "def alias(self):\n return '{}_{}'.format(self.node.alias.replace('`', ''), self.label)", "def export_alias(conn, alias):\n with open(ALIAS_FILE, \"w\") as fo:\n rows = retrieve_alias(conn, alias)\n [fo.write(x[1] + \"\\n\") for x in rows]", "def addAlias(self, alias, node):", "def getCatalog(unique_name):", "def assume_alias(self):\n\n es = self.get_es()\n if es.head(self.es_alias):\n #remove all existing aliases - this is destructive and could be harmful, but for current\n #uses, it is legal - in a more delicate routing arrangement, a configuration file of\n # some sort should be in use.\n alias_indices = es[self.es_alias].get('_status')['indices'].keys()\n\n remove_actions = [{\"remove\": {\"index\": x, \"alias\": self.es_alias}} for x in\n alias_indices]\n remove_data = {\"actions\": remove_actions}\n es.post('_aliases', data=remove_data)\n #now reapply HEAD/master index\n es.post('_aliases', data={\"actions\": [{\"add\":\n {\"index\": self.es_index,\n \"alias\": self.es_alias}}]})", "def changeAlias(self, alias, node):", "def with_alias(self):\n return self.node.alias", "def magic_alias(self, parameter_s = ''):\n\n par = parameter_s.strip()\n if not par:\n if self.rc.automagic:\n prechar = ''\n else:\n prechar = '@'\n print 'Alias\\t\\tSystem Command\\n'+'-'*30\n aliases = self.alias_table.keys()\n aliases.sort()\n for alias in aliases:\n print prechar+alias+'\\t\\t'+self.alias_table[alias]\n return\n try:\n alias,cmd = par.split(' ',1)\n except:\n print inspect.getdoc(self.magic_alias)\n return\n nargs = cmd.count('%s')\n if nargs == 0: # simple aliases\n fndef = itpl(\n\"\"\"\ndef magic_${alias}(parameter_s = ''):\n '''Alias to the system command '$cmd' '''\n xsys('$cmd '+str(parameter_s))\n\nself.magic_$alias = magic_$alias\n\"\"\")\n else: # parametric aliases\n fndef = itpl(\n\"\"\"\ndef magic_${alias}(parameter_s = ''):\n '''Alias to the system command '$cmd' '''\n cmd = '$cmd'\n nargs = cmd.count('%s')\n args = str(parameter_s).split()\n\n if len(args) != nargs:\n print 'Incorrect number of arguments:',nargs,'expected.'\n print \"$alias is an alias to: '$cmd'\"\n return\n else:\n cmd_call = cmd % tuple(args)\n xsys(cmd_call)\n\nself.magic_$alias = magic_$alias\n\"\"\")\n try:\n exec fndef in globals(),locals()\n except:\n print self.magic_alias.__doc__\n self.alias_table.update({alias:cmd})", "def _get_alias(cfg, name):\n aliases = cfg.get('aliases', {})\n if name in aliases:\n return aliases[name]\n if cfg['group_attribute_as_default_alias']:\n return name.split(SEP)[-1]\n return name", "def list_aliases(config, detail=False):\n if detail:\n output = \"\\n# configured alias details...\"\n for section in sorted(config.sections()):\n output += \"\\n\\n[%s]\" % section\n for item in sorted(config.items(section)):\n output += \"\\n%s = %s\" % item\n output += \"\\n\"\n else:\n output = \"configured aliases and cached searches...\"\n for section in sorted(config.sections()):\n if config.has_option(section, \"description\"):\n description = config.get(section, \"description\")\n else: description = \"(no description provided)\"\n output += \"\\n %s: %s\" % (section, description)\n return output", "def alias_book(self, book_id, alias_id):\n # Implemented from template for\n # osid.resource.BinLookupSession.alias_bin_template\n if self._catalog_session is not None:\n return self._catalog_session.alias_catalog(catalog_id=book_id, alias_id=alias_id)\n self._alias_id(primary_id=book_id, equivalent_id=alias_id)" ]
[ "0.7177646", "0.67365676", "0.66353583", "0.6606362", "0.6505425", "0.650178", "0.64422536", "0.6232894", "0.6232894", "0.6196061", "0.6169235", "0.60587853", "0.60041684", "0.5951661", "0.5941393", "0.59237397", "0.59237397", "0.59237397", "0.5876475", "0.5863977", "0.58495903", "0.5831974", "0.58182037", "0.5809199", "0.58053887", "0.57983726", "0.57936615", "0.5785935", "0.5782257", "0.5780758" ]
0.8511196
0
Pass through to provider CatalogHierarchySession.get_catalog_hierarchy_id
def get_catalog_hierarchy_id(self): # Implemented from kitosid template for - # osid.resource.BinHierarchySession.get_bin_hierarchy_id return self._get_provider_session('catalog_hierarchy_session').get_catalog_hierarchy_id()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_catalog_id(self):\n return self._catalog_id", "def get_book_hierarchy_id(self):\n # Implemented from template for\n # osid.resource.BinHierarchySession.get_bin_hierarchy_id\n if self._catalog_session is not None:\n return self._catalog_session.get_catalog_hierarchy_id()\n return self._hierarchy_session.get_hierarchy_id()", "def get_book_hierarchy_id(self):\n # Implemented from template for\n # osid.resource.BinHierarchySession.get_bin_hierarchy_id\n if self._catalog_session is not None:\n return self._catalog_session.get_catalog_hierarchy_id()\n return self._hierarchy_session.get_hierarchy_id()", "def get_catalog_hierarchy_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_catalog_hierarchy_session(self):\n raise Unimplemented()", "def catalog_id(self):\n return self._catalog_id", "def get_catalog_hierarchy_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_hierarchy_session(*args, **kwargs)", "def get_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_hierarchy()", "def get_id(self):\n if not self.nccl_id:\n logger.warning(\"The NCCL ID has not been \"\n \"set yet for store {}.\".format(self.name))\n return self.nccl_id", "def get_family_hierarchy_id(self):\n # Implemented from template for\n # osid.resource.BinHierarchySession.get_bin_hierarchy_id\n if self._catalog_session is not None:\n return self._catalog_session.get_catalog_hierarchy_id()\n return self._hierarchy_session.get_hierarchy_id()", "def get_family_hierarchy_id(self):\n # Implemented from template for\n # osid.resource.BinHierarchySession.get_bin_hierarchy_id\n if self._catalog_session is not None:\n return self._catalog_session.get_catalog_hierarchy_id()\n return self._hierarchy_session.get_hierarchy_id()", "def get_id(self):\n pass", "def get_id(self):\n pass", "def get_id(self):\n pass", "def get_id(self):\n pass", "def get_id(self):\n if self.is_root():\n return self.id_field.name\n elif self.has_id():\n return getattr(self, self.id_field)\n elif self.has_cid():\n return self.cid\n else:\n raise Exception(\"No id-like value set when get_id() called.\")", "def get_catalog_hierarchy_design_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_hierarchy_design_session(*args, **kwargs)", "def get_parent_catalog_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_parent_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_parent_catalog_ids(*args, **kwargs)", "def get_primary_id(self):", "def _get_id(self):\n return self.id", "def getID():", "def layer_parent_nid(self, layer_id):\n ...", "def get_catalog_hierarchy_design_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def tree_id(self):\n if self.is_root:\n return 0\n elif self._link is not None:\n return self._link.tree_id\n else:\n return self._tree_id", "def showId(self):\n #Here I'm supposing that the name of the table, and the extent polygon gives a unique mapping.\n try:\n extent = self.geometry.extent\n name = self.grid_name\n res = self.dArea\n string = \"%s:%s:%s:%s\" %(self.parent_id,name,extent,res)\n return string\n except:\n logger.error(\"[biospatial.gbif.taxonomy.GriddedTaxonomy] \\n The total geometry area has not been defined. Try running mergeGeometries first\")\n raise Exception(\"Geometry Extent has not been instantiated\")\n return None", "def getID(self) -> int:\n ...", "def get_id(self):\n p = self.get_parent()\n if p:\n parent_id = p.get_id()\n if parent_id:\n return '.'.join((parent_id,self.name))\n else:\n return self.name\n else:\n return self.name", "def get_id(self):\n return self.iid", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id" ]
[ "0.6811032", "0.66457605", "0.66457605", "0.657649", "0.6574508", "0.6568247", "0.65487117", "0.63936114", "0.6189958", "0.61780137", "0.61780137", "0.6101886", "0.6101886", "0.6101886", "0.6101886", "0.6097169", "0.60141516", "0.5978055", "0.5973247", "0.593903", "0.5923195", "0.5907081", "0.58804363", "0.5862606", "0.5859323", "0.58568096", "0.5856791", "0.5823279", "0.58177", "0.58177" ]
0.8230053
0
Pass through to provider CatalogHierarchySession.can_access_catalog_hierarchy
def can_access_catalog_hierarchy(self): # Implemented from kitosid template for - # osid.resource.BinHierarchySession.can_access_bin_hierarchy return self._get_provider_session('catalog_hierarchy_session').can_access_catalog_hierarchy()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def supports_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy()", "def supports_catalog_hierarchy(self):\n return False", "def can_access_book_hierarchy(self):\n # Implemented from template for\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_access_catalog_hierarchy()\n return True", "def can_modify_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.can_modify_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_design_session').can_modify_catalog_hierarchy()", "def supports_catalog_hierarchy_design(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy_design()", "def can_access_family_hierarchy(self):\n # Implemented from template for\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_access_catalog_hierarchy()\n return True", "def supports_catalog_hierarchy_design(self):\n return False", "def can_modify_book_hierarchy(self):\n # Implemented from template for\n # osid.resource.BinHierarchyDesignSession.can_modify_bin_hierarchy_template\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_modify_catalog_hierarchy()\n return True", "def can_modify_family_hierarchy(self):\n # Implemented from template for\n # osid.resource.BinHierarchyDesignSession.can_modify_bin_hierarchy_template\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_modify_catalog_hierarchy()\n return True", "def get_catalog_hierarchy_session(self):\n raise Unimplemented()", "def can_lookup_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.can_lookup_bins_template\n return self._get_provider_session('catalog_lookup_session').can_lookup_catalogs()", "def get_catalog_hierarchy_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_catalog_hierarchy_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_hierarchy_session(*args, **kwargs)", "def supports_catalog_admin(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_admin()", "def get_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_hierarchy()", "def has_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.has_child_bins\n return self._get_provider_session('catalog_hierarchy_session').has_child_catalogs(*args, **kwargs)", "def supports_catalog_lookup(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_lookup()", "def is_child_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_child_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_child_of_catalog(*args, **kwargs)", "def can_search_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinQuerySession.can_search_bins_template\n return self._get_provider_session('catalog_query_session').can_search_catalogs()", "def get_catalog_hierarchy_design_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_catalog_hierarchy_design_session(self):\n raise Unimplemented()", "def can_create_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_create_bins\n return self._get_provider_session('catalog_admin_session').can_create_catalogs()", "def create_catalog_hierarchy(self, *args, **kwargs):\n # Patched in by [email protected], Jul 23, 2014, added by birdland to template on Aug 8, 2014\n # Is not part of specs for catalog hierarchy design sessions, but may want to be in hierarchy service instead\n # Will not return an actual object, just JSON\n # since a BankHierarchy does not seem to be an OSID thing.\n return self._get_provider_session('catalog_hierarchy_design_session').create_catalog_hierarchy(*args, **kwargs)", "def supports_catalog_lookup(self):\n return False", "def supports_catalog(self):\n return False", "def has_parent_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.has_parent_bins\n return self._get_provider_session('catalog_hierarchy_session').has_parent_catalogs(*args, **kwargs)", "def can_lookup_families(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.can_lookup_bins\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_lookup_catalogs()\n return True", "def get_catalog_hierarchy_design_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_hierarchy_design_session(*args, **kwargs)", "def is_descendant_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_descendant_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_descendant_of_catalog(*args, **kwargs)", "def supports_catalog_admin(self):\n return False" ]
[ "0.7873286", "0.7798301", "0.7606915", "0.75920963", "0.74497044", "0.7416652", "0.7145832", "0.68291575", "0.656526", "0.649082", "0.6440466", "0.6397366", "0.6219411", "0.60701853", "0.6026021", "0.5990887", "0.59899867", "0.59638166", "0.59417635", "0.5934202", "0.59136015", "0.5912718", "0.5876872", "0.5865253", "0.58327216", "0.5825502", "0.58234966", "0.57990235", "0.5781608", "0.5779567" ]
0.88391834
0
Pass through to provider CatalogHierarchySession.get_root_catalog_ids
def get_root_catalog_ids(self): # Implemented from kitosid template for - # osid.resource.BinHierarchySession.get_root_bin_ids return self._get_provider_session('catalog_hierarchy_session').get_root_catalog_ids()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_root_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_root_bins\n return self._get_provider_session('catalog_hierarchy_session').get_root_catalogs()", "def get_parent_catalog_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_parent_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_parent_catalog_ids(*args, **kwargs)", "def get_root_book_ids(self):\n # Implemented from template for\n # osid.resource.BinHierarchySession.get_root_bin_ids\n if self._catalog_session is not None:\n return self._catalog_session.get_root_catalog_ids()\n return self._hierarchy_session.get_roots()", "def get_catalog_node_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_node_ids\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_node_ids(*args, **kwargs)", "def get_root_family_ids(self):\n # Implemented from template for\n # osid.resource.BinHierarchySession.get_root_bin_ids\n if self._catalog_session is not None:\n return self._catalog_session.get_root_catalog_ids()\n return self._hierarchy_session.get_roots()", "def get_child_catalog_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_child_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_child_catalog_ids(*args, **kwargs)", "def get_parent_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_parent_bins\n return self._get_provider_session('catalog_hierarchy_session').get_parent_catalogs(*args, **kwargs)", "def get_catalog_hierarchy_id(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_hierarchy_id\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_hierarchy_id()", "def get_catalog_nodes(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_nodes\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_nodes(*args, **kwargs)", "def get_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_hierarchy()", "def get_all_master_ids(self):\r\n return self._handler.get_all_master_ids()", "def get_clan_pks(self):\n return self.get_ancestor_pks() + [self.pk, ] + self.get_descendant_pks()", "def get_catalog_id(self):\n return self._catalog_id", "def get_ancestor_pks(self):\n raise NotImplementedError()", "def add_root_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.add_root_bin\n self._get_provider_session('catalog_hierarchy_design_session').add_root_catalog(*args, **kwargs)", "def getIDs():", "def get_root_books(self):\n # Implemented from template for\n # osid.resource.BinHierarchySession.get_root_bins\n if self._catalog_session is not None:\n return self._catalog_session.get_root_catalogs()\n return BookLookupSession(\n self._proxy,\n self._runtime).get_books_by_ids(list(self.get_root_book_ids()))", "def getIDs(self):\n return self.multiengine.getIDs()", "def get_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_template\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs()\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def cluster_ids(self):\n return self.model.cluster_ids", "def all_base_cloud_ids(self):\n raise NotImplementedError", "def uids(self):\n if not self.is_root:\n return None\n if self._uids is None:\n self.arbor._build_attr(\"_uids\", self)\n return self._uids", "def parent_ids(self):\n return self._parent_ids", "def all_env_ids(self) -> np.ndarray:", "def catalog_id(self):\n return self._catalog_id", "def get_catalog_hierarchy_session(self):\n raise Unimplemented()", "def get_roots(self):\n raise NotImplementedError()", "def get_catalog_hierarchy_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_hierarchy_session(*args, **kwargs)", "def getCatalogs():", "def get_ids(self):\n return self._ids" ]
[ "0.766696", "0.73261166", "0.729064", "0.7192844", "0.68685246", "0.683469", "0.65727335", "0.6398405", "0.6275661", "0.62064254", "0.60082555", "0.5885929", "0.58608425", "0.58488876", "0.5833088", "0.5794371", "0.57656646", "0.57117355", "0.56929755", "0.56665826", "0.56581175", "0.56484216", "0.56404495", "0.5620535", "0.56079257", "0.5607826", "0.55942893", "0.55807203", "0.5574282", "0.5574146" ]
0.8767685
0
Pass through to provider CatalogHierarchySession.get_root_catalogs
def get_root_catalogs(self): # Implemented from kitosid template for - # osid.resource.BinHierarchySession.get_root_bins return self._get_provider_session('catalog_hierarchy_session').get_root_catalogs()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_root_catalog_ids(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_root_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_root_catalog_ids()", "def add_root_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.add_root_bin\n self._get_provider_session('catalog_hierarchy_design_session').add_root_catalog(*args, **kwargs)", "def get_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_hierarchy()", "def get_parent_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_parent_bins\n return self._get_provider_session('catalog_hierarchy_session').get_parent_catalogs(*args, **kwargs)", "def getCatalogs():", "def get_catalog_nodes(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_nodes\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_nodes(*args, **kwargs)", "def get_root_books(self):\n # Implemented from template for\n # osid.resource.BinHierarchySession.get_root_bins\n if self._catalog_session is not None:\n return self._catalog_session.get_root_catalogs()\n return BookLookupSession(\n self._proxy,\n self._runtime).get_books_by_ids(list(self.get_root_book_ids()))", "def get_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_template\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs()\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def get_catalog_hierarchy_session(self):\n raise Unimplemented()", "def remove_root_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.remove_root_bin\n self._get_provider_session('catalog_hierarchy_design_session').remove_root_catalog(*args, **kwargs)", "def get_catalog_hierarchy_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_hierarchy_session(*args, **kwargs)", "def get_root_book_ids(self):\n # Implemented from template for\n # osid.resource.BinHierarchySession.get_root_bin_ids\n if self._catalog_session is not None:\n return self._catalog_session.get_root_catalog_ids()\n return self._hierarchy_session.get_roots()", "def get(self):\n return GenericGet().get_catalogs()", "def get_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_child_bins\n return self._get_provider_session('catalog_hierarchy_session').get_child_catalogs(*args, **kwargs)", "def get_catalog_hierarchy_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_root_family_ids(self):\n # Implemented from template for\n # osid.resource.BinHierarchySession.get_root_bin_ids\n if self._catalog_session is not None:\n return self._catalog_session.get_root_catalog_ids()\n return self._hierarchy_session.get_roots()", "def get_catalog(self):\n\n rep = req.get_json(self.CATALOG)\n repo_list = rep[\"repositories\"]\n\n for repo in repo_list:\n self.list.append(Repository(repo))\n\n return self.list", "def get_parent_catalog_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_parent_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_parent_catalog_ids(*args, **kwargs)", "def get_root_families(self):\n # Implemented from template for\n # osid.resource.BinHierarchySession.get_root_bins\n if self._catalog_session is not None:\n return self._catalog_session.get_root_catalogs()\n return FamilyLookupSession(\n self._proxy,\n self._runtime).get_families_by_ids(list(self.get_root_family_ids()))", "def supports_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy()", "def catalogs(self):\n return sorted(self._catalog_comp_info_dicts.keys())", "def get_roots(self):\n raise NotImplementedError()", "def init_catalog():\n return controller.init_catalog()", "def get_catalogs_by_parent_genus_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_parent_genus_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_parent_genus_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def test_get_hyperflex_app_catalog_list(self):\n pass", "def root_nodes(request):\n # TODO: collect\n\n nodes = HierarchyNode.objects.filter(parent=None)\n return render_to_response('root_nodes.html', {'nodes': nodes},\n context_instance=RequestContext(request))", "def retrieve_catalog_for_path(self, needle_path):\n clg = self.retrieve_root_catalog()\n while True:\n new_nested_reference = clg.find_nested_for_path(needle_path)\n if new_nested_reference is None:\n break\n nested_reference = new_nested_reference\n clg = self.retrieve_catalog(nested_reference.hash)\n return clg", "def initCatalog():\n return controller.initCatalog()", "def initCatalog():\n return controller.initCatalog()", "def initCatalog():\n return controller.initCatalog()" ]
[ "0.7801686", "0.6804592", "0.67762333", "0.6740477", "0.6688819", "0.66048336", "0.6503141", "0.64630276", "0.6396706", "0.63399214", "0.6286938", "0.6255754", "0.62088436", "0.61911476", "0.6149752", "0.60605836", "0.6013731", "0.5986957", "0.59574866", "0.5935375", "0.5925987", "0.5923579", "0.5914303", "0.5911669", "0.5861088", "0.5850522", "0.5782302", "0.57304806", "0.57304806", "0.57304806" ]
0.85008
0
Pass through to provider CatalogHierarchySession.has_parent_catalogs
def has_parent_catalogs(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinHierarchySession.has_parent_bins return self._get_provider_session('catalog_hierarchy_session').has_parent_catalogs(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_parent_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_parent_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_parent_of_catalog(*args, **kwargs)", "def get_parent_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_parent_bins\n return self._get_provider_session('catalog_hierarchy_session').get_parent_catalogs(*args, **kwargs)", "def supports_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy()", "def is_ancestor_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_ancestor_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_ancestor_of_catalog(*args, **kwargs)", "def supports_catalog_hierarchy(self):\n return False", "def can_access_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').can_access_catalog_hierarchy()", "def has_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.has_child_bins\n return self._get_provider_session('catalog_hierarchy_session').has_child_catalogs(*args, **kwargs)", "def get_parent_catalog_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_parent_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_parent_catalog_ids(*args, **kwargs)", "def has_parent(self):\n return False", "def contains_parent(self, pid):\n return pid in self._parent_ids", "def is_parent_of(self):\n return self.hasLabel('parent_of')", "def has_parent(self):\n return self.parent != None", "def has_parent(self):\n return self._parent_ is not None", "def is_parent(self) -> bool:\n return AccountEntry.objects.filter(parent=self).exists()", "def has_parents(self):\n return len(self._parents) > 0", "def is_parent(self):\n if self.parent is not None:\n return False\n return True", "def has_parent(self, term):\n for parent in self.parents:\n if parent.id == term or parent.has_parent(term):\n return True\n return False", "def is_child_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_child_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_child_of_catalog(*args, **kwargs)", "def has_parent_key(self):\n if self.is_root():\n return False\n try:\n self.parent_key()\n return True\n except ParseException:\n return False", "def supports_catalog_hierarchy_design(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy_design()", "def is_parent_of(cls, *args):\n return cls.graph_traversal(None, None, Bytecode()).is_parent_of(*args)", "def has_parent(parent, parent_id, children):\n args = get_args(request.args)\n if request.method == 'GET':\n #Something like /api/domains/<id>/virtualmachines will be equivalent to listVirtualMachines?domainid=<id>\n verb = \"list\"\n subject = children\n #If parent is 'domains' it is added into args as domainid, i.e singular[domains] + 'id'\n args[singular[parent] + 'id'] = parent_id\n return apicall(verb, subject, args)", "def is_parent(review_request, commit_data=None):\n commit_data = fetch_commit_data(review_request, commit_data=commit_data)\n\n return str(commit_data.extra_data.get(\n SQUASHED_KEY, False)).lower() == 'true'", "def has_catalogue(self):\n return self.catalogue is not None", "def is_parent(self):\n return not self.children", "def supports_catalog_hierarchy_design(self):\n return False", "def has_parent(obj, parent_name):\n if obj.parent is None:\n return False\n if obj.parent.name is None:\n return False\n elif obj.parent.name == parent_name:\n return True\n else:\n return has_parent(obj.parent, parent_name)", "def can_lookup_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.can_lookup_bins_template\n return self._get_provider_session('catalog_lookup_session').can_lookup_catalogs()", "def is_parent(self, item):\n if len(self.df.loc[self.df['parent_code']==item, :]): \n return True\n return False", "def _refers_to_parent_table(self) -> bool:\n pt = self.parent_persist_selectable\n mt = self.child_persist_selectable\n result = False\n\n def visit_binary(binary: BinaryExpression[Any]) -> None:\n nonlocal result\n c, f = binary.left, binary.right\n if (\n isinstance(c, expression.ColumnClause)\n and isinstance(f, expression.ColumnClause)\n and pt.is_derived_from(c.table)\n and pt.is_derived_from(f.table)\n and mt.is_derived_from(c.table)\n and mt.is_derived_from(f.table)\n ):\n result = True\n\n visitors.traverse(self.primaryjoin, {}, {\"binary\": visit_binary})\n return result" ]
[ "0.81539416", "0.70088917", "0.68231374", "0.6792578", "0.6758142", "0.668867", "0.6635198", "0.66057396", "0.65017813", "0.637503", "0.63561594", "0.634598", "0.63103586", "0.6302823", "0.6243032", "0.6171307", "0.616938", "0.6165371", "0.6160172", "0.6029738", "0.6018495", "0.5981683", "0.5964566", "0.5915263", "0.58511376", "0.585041", "0.58414406", "0.5836384", "0.5827931", "0.5804429" ]
0.86852837
0
Pass through to provider CatalogHierarchySession.is_parent_of_catalog
def is_parent_of_catalog(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinHierarchySession.is_parent_of_bin return self._get_provider_session('catalog_hierarchy_session').is_parent_of_catalog(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def has_parent_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.has_parent_bins\n return self._get_provider_session('catalog_hierarchy_session').has_parent_catalogs(*args, **kwargs)", "def is_parent_of(self):\n return self.hasLabel('parent_of')", "def is_parent(self):\n if self.parent is not None:\n return False\n return True", "def is_ancestor_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_ancestor_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_ancestor_of_catalog(*args, **kwargs)", "def has_parent(self):\n return False", "def is_parent_of(cls, *args):\n return cls.graph_traversal(None, None, Bytecode()).is_parent_of(*args)", "def has_parent(self):\n return self.parent != None", "def has_parent(self):\n return self._parent_ is not None", "def is_parent(self) -> bool:\n return AccountEntry.objects.filter(parent=self).exists()", "def is_parent(review_request, commit_data=None):\n commit_data = fetch_commit_data(review_request, commit_data=commit_data)\n\n return str(commit_data.extra_data.get(\n SQUASHED_KEY, False)).lower() == 'true'", "def get_parent_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_parent_bins\n return self._get_provider_session('catalog_hierarchy_session').get_parent_catalogs(*args, **kwargs)", "def has_parent_key(self):\n if self.is_root():\n return False\n try:\n self.parent_key()\n return True\n except ParseException:\n return False", "def contains_parent(self, pid):\n return pid in self._parent_ids", "def supports_catalog_hierarchy(self):\n return False", "def get_parent_catalog_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_parent_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_parent_catalog_ids(*args, **kwargs)", "def is_parent(self):\n return not self.children", "def can_access_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').can_access_catalog_hierarchy()", "def has_parent(obj, parent_name):\n if obj.parent is None:\n return False\n if obj.parent.name is None:\n return False\n elif obj.parent.name == parent_name:\n return True\n else:\n return has_parent(obj.parent, parent_name)", "def supports_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy()", "def is_child_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_child_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_child_of_catalog(*args, **kwargs)", "def is_parent(self, item):\n if len(self.df.loc[self.df['parent_code']==item, :]): \n return True\n return False", "def _refers_to_parent_table(self) -> bool:\n pt = self.parent_persist_selectable\n mt = self.child_persist_selectable\n result = False\n\n def visit_binary(binary: BinaryExpression[Any]) -> None:\n nonlocal result\n c, f = binary.left, binary.right\n if (\n isinstance(c, expression.ColumnClause)\n and isinstance(f, expression.ColumnClause)\n and pt.is_derived_from(c.table)\n and pt.is_derived_from(f.table)\n and mt.is_derived_from(c.table)\n and mt.is_derived_from(f.table)\n ):\n result = True\n\n visitors.traverse(self.primaryjoin, {}, {\"binary\": visit_binary})\n return result", "def _determine_parent(self, caller):\n self.msgin(4, \"determine_parent\", caller)\n\n parent = None\n if caller:\n pname = caller.identifier\n\n if isinstance(caller, Package):\n parent = caller\n\n elif '.' in pname:\n pname = pname[:pname.rfind('.')]\n parent = self.findNode(pname)\n\n elif caller.packagepath:\n # XXX: I have no idea why this line\n # is necessary.\n parent = self.findNode(pname)\n\n self.msgout(4, \"determine_parent ->\", parent)\n return parent", "def isSetIdentifyingParent(self):\n return _libsbml.SpeciesTypeComponentIndex_isSetIdentifyingParent(self)", "def has_parent(self, term):\n for parent in self.parents:\n if parent.id == term or parent.has_parent(term):\n return True\n return False", "def is_parent(child, parent):\n # Get the list of processes\n assert child is not None\n assert parent is not None\n #child_ranks = [i for i in xrange(child.Get_size())]\n child_group = child.Get_group()\n parent_group = parent.Get_group()\n inter_group = MPI.Group.Intersect(child_group, parent_group)\n return child_group.Get_size() == inter_group.Get_size()", "def ignore_parent(self) -> bool:\n return self._strategy.ignore_parent", "def is_parent_ref(\n self,\n schema: s_schema.Schema,\n reference: Object,\n ) -> bool:\n return False", "def is_parent_of_book(self, id_, book_id):\n # Implemented from template for\n # osid.resource.BinHierarchySession.is_parent_of_bin\n if self._catalog_session is not None:\n return self._catalog_session.is_parent_of_catalog(id_=id_, catalog_id=book_id)\n return self._hierarchy_session.is_parent(id_=book_id, parent_id=id_)", "def has_parent(self, index):\n return self.get_parent_index(index) < len(self.heap)" ]
[ "0.7857881", "0.72796524", "0.6986244", "0.69826543", "0.68545604", "0.6775658", "0.67153686", "0.67133415", "0.66863465", "0.6580127", "0.65474105", "0.64807767", "0.6463843", "0.64192414", "0.6390802", "0.633645", "0.63349867", "0.63169396", "0.6304779", "0.62793714", "0.6259616", "0.61872756", "0.60769665", "0.60744876", "0.60592836", "0.59951633", "0.5953713", "0.5942228", "0.59277886", "0.5924703" ]
0.8697966
0
Pass through to provider CatalogHierarchySession.get_parent_catalog_ids
def get_parent_catalog_ids(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinHierarchySession.get_parent_bin_ids return self._get_provider_session('catalog_hierarchy_session').get_parent_catalog_ids(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_parent_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_parent_bins\n return self._get_provider_session('catalog_hierarchy_session').get_parent_catalogs(*args, **kwargs)", "def parent_ids(self):\n return self._parent_ids", "def get_root_catalog_ids(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_root_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_root_catalog_ids()", "def get_child_catalog_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_child_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_child_catalog_ids(*args, **kwargs)", "def has_parent_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.has_parent_bins\n return self._get_provider_session('catalog_hierarchy_session').has_parent_catalogs(*args, **kwargs)", "def get_catalog_node_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_node_ids\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_node_ids(*args, **kwargs)", "def get_parents_list(self):\n return []", "def is_parent_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_parent_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_parent_of_catalog(*args, **kwargs)", "def get_parent_collections(self, collid):\n coll_list = []\n pid_cond = SQLBinaryExpr(COL_NAME_COLL_COLLID, OP_EQ, SQLLiteral(collid))\n parent_id = self.select_generic_data(select_list=[COL_NAME_COLL_PARENTID], table_list=[self.GetQualifiedTableName(TABLE_NAME_COLL)], where=pid_cond)\n parcollid = parent_id[0][COL_NAME_COLL_PARENTID]\n while parcollid is not None :\n cond = SQLBinaryExpr(COL_NAME_COLL_COLLID, OP_EQ, SQLLiteral(parcollid))\n par_coll = self.select_generic_data(table_list=[self.GetQualifiedTableName(TABLE_NAME_COLL)], where=cond)\n parcollid = par_coll[0][COL_NAME_COLL_PARENTID]\n coll_list.append(par_coll[0])\n return coll_list", "def get_clan_pks(self):\n return self.get_ancestor_pks() + [self.pk, ] + self.get_descendant_pks()", "def get_catalog_hierarchy_id(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_hierarchy_id\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_hierarchy_id()", "def get_parent_resource_nodes(self):\n raise errors.Unimplemented()", "def get_parent_locations(self, location):\r\n store = self._get_modulestore_for_courseid(location.course_key)\r\n return store.get_parent_locations(location)", "def get_ancestor_pks(self):\n raise NotImplementedError()", "def _get_parents(self):\n parents = []\n parent = self.parent\n while(parent):\n parents.append(parent)\n parent = parent.parent\n parents.reverse()\n return parents", "def parent_family_ids(self):\n\n return self._parent_family_ids", "def parents(self):\n if self._pedigree is None:\n raise Exception(\"Pedigree is not defined\")\n return [self._pedigree.individual(pid) for pid in self._parent_ids]", "def getParents(self):\n return self.parents[:]", "def get_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_hierarchy()", "def get_parents(self):\r\n\r\n raise NotImplementedError()", "def get_parentID(self):\n parent = Collection.find(self.resource.container)\n return parent.uuid", "def get_catalogs_by_parent_genus_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_parent_genus_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_parent_genus_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def get_parent_book_ids(self, book_id):\n # Implemented from template for\n # osid.resource.BinHierarchySession.get_parent_bin_ids\n if self._catalog_session is not None:\n return self._catalog_session.get_parent_catalog_ids(catalog_id=book_id)\n return self._hierarchy_session.get_parents(id_=book_id)", "def layer_parent_nid(self, layer_id):\n ...", "def __getInterProParents(self, filePath):\n interProParentD = {}\n lineL = self.__mU.doImport(filePath, fmt=\"list\")\n stack = []\n for line in lineL:\n content = line.rstrip() # drop \\n\n row = content.split(\"--\")\n ff = row[-1].split(\"::\")\n tS = ff[0].strip()\n # stack[:] = stack[: len(row) - 1] + [row[-1]]\n stack[:] = stack[: len(row) - 1] + [tS]\n for ii, idCode in enumerate(stack):\n if idCode not in interProParentD: # prevents overwriting the parent of idCode, in case idCode has already been iterated over in ParentChildTreeFile.txt\n interProParentD[idCode] = None if ii == 0 else stack[ii - 1]\n else:\n # This will correct the parent of idCode from being None if it's later identified as having a parent at another point in ParentChildTreeFile.txt\n if interProParentD[idCode] is None and ii != 0:\n interProParentD[idCode] = stack[ii - 1]\n logger.debug(\"Lineage %r\", \"\\t\".join(stack))\n #\n return interProParentD", "def _parent_path(self, comp):\n path = [comp]\n while comp:\n path.append(comp.parent())\n comp = comp.parent()\n return path", "def get_parents(self):\n return NodeList(self._my_map['parentNodes'])", "def get_parentID(self):\n parent_path = self.collection.container\n if self.collection.is_root:\n parent_path = \"/\"\n parent = Collection.find(parent_path)\n return parent.uuid", "def parents(self):\n return tuple(self._parents)", "def get_parent_id(self):\n return self._parent_id" ]
[ "0.7998668", "0.7363536", "0.696838", "0.69027364", "0.66203564", "0.6506402", "0.64475", "0.6402672", "0.6333523", "0.6320524", "0.6316251", "0.6168418", "0.61279976", "0.610233", "0.60682", "0.60235626", "0.60160255", "0.6005199", "0.5984334", "0.5960438", "0.59309244", "0.5929857", "0.5923008", "0.59184676", "0.5908988", "0.5902802", "0.5896993", "0.5870993", "0.58413357", "0.5822362" ]
0.87886137
0
Pass through to provider CatalogHierarchySession.get_parent_catalogs
def get_parent_catalogs(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinHierarchySession.get_parent_bins return self._get_provider_session('catalog_hierarchy_session').get_parent_catalogs(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_parent_catalog_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_parent_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_parent_catalog_ids(*args, **kwargs)", "def has_parent_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.has_parent_bins\n return self._get_provider_session('catalog_hierarchy_session').has_parent_catalogs(*args, **kwargs)", "def is_parent_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_parent_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_parent_of_catalog(*args, **kwargs)", "def get_catalogs_by_parent_genus_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_parent_genus_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_parent_genus_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def get_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_hierarchy()", "def get_catalog_hierarchy_session(self):\n raise Unimplemented()", "def get_parents_list(self):\n return []", "def get_parent_locations(self, location):\r\n store = self._get_modulestore_for_courseid(location.course_key)\r\n return store.get_parent_locations(location)", "def get_catalog_hierarchy_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_hierarchy_session(*args, **kwargs)", "def get_parent_resource_nodes(self):\n raise errors.Unimplemented()", "def get_root_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_root_bins\n return self._get_provider_session('catalog_hierarchy_session').get_root_catalogs()", "def getParents(self):\n return self.parents[:]", "def get_parents(self):\r\n\r\n raise NotImplementedError()", "def get_catalog_hierarchy_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_parent_collections(self, collid):\n coll_list = []\n pid_cond = SQLBinaryExpr(COL_NAME_COLL_COLLID, OP_EQ, SQLLiteral(collid))\n parent_id = self.select_generic_data(select_list=[COL_NAME_COLL_PARENTID], table_list=[self.GetQualifiedTableName(TABLE_NAME_COLL)], where=pid_cond)\n parcollid = parent_id[0][COL_NAME_COLL_PARENTID]\n while parcollid is not None :\n cond = SQLBinaryExpr(COL_NAME_COLL_COLLID, OP_EQ, SQLLiteral(parcollid))\n par_coll = self.select_generic_data(table_list=[self.GetQualifiedTableName(TABLE_NAME_COLL)], where=cond)\n parcollid = par_coll[0][COL_NAME_COLL_PARENTID]\n coll_list.append(par_coll[0])\n return coll_list", "def get_root_catalog_ids(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_root_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_root_catalog_ids()", "def _parents(self, prefix):\n if self.inherit:\n suffix = self.inherit.name\n value = self.tracconfig.get(\n self.section, '%s.%s' % (prefix, suffix), default=None)\n if value:\n return self._parents_to_list(value)\n return None", "def show_available_parents(self):\n self.categoryParent.clear()\n\n parents = self.orm.fetch_parents()\n self.categoryParent.addItems([p.name for p in parents])\n\n self.categoryParent.addItem('')\n self.categoryParent.setCurrentText('')", "def get_parents(self):\n return self.parents", "def parent_ids(self):\n return self._parent_ids", "def get_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_child_bins\n return self._get_provider_session('catalog_hierarchy_session').get_child_catalogs(*args, **kwargs)", "def _get_parents(self):\n parents = []\n parent = self.parent\n while(parent):\n parents.append(parent)\n parent = parent.parent\n parents.reverse()\n return parents", "def get_child_catalog_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_child_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_child_catalog_ids(*args, **kwargs)", "def getParents(obj):", "def get_parents(self, table_name):\n return self._parent_map[table_name]", "def get_parent_subfolders(self):\n return [x[0] for x in os.walk(self.parent_folder)]", "def get_parents(self):\n return NodeList(self._my_map['parentNodes'])", "def _parent_path(self, comp):\n path = [comp]\n while comp:\n path.append(comp.parent())\n comp = comp.parent()\n return path", "def test_get_parents_role(self):\n root = role_middleware.get_root()\n children = role_middleware.get_all_children(root.id)\n role = children[-1]\n parent_list = role_middleware.get_parents(role.id)\n for ro in parent_list:\n print(ro.name, ro.id)", "def parent_resources(cls):\n parent = cls.parent_resource\n parents = [parent]\n\n try:\n while True:\n parent = parent.parent_resource\n parents.append(parent)\n except AttributeError:\n pass\n\n parents.reverse()\n return parents" ]
[ "0.8095617", "0.73897886", "0.7032074", "0.6821506", "0.6483291", "0.6390385", "0.6327576", "0.63254446", "0.6324419", "0.63218534", "0.62388545", "0.61941415", "0.6172516", "0.61589235", "0.615859", "0.61254734", "0.6033831", "0.60224056", "0.60156983", "0.60114145", "0.59960955", "0.59605825", "0.59138006", "0.59039205", "0.58825254", "0.5840333", "0.5833885", "0.5832764", "0.5831024", "0.5775249" ]
0.853051
0
Pass through to provider CatalogHierarchySession.is_ancestor_of_catalog
def is_ancestor_of_catalog(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinHierarchySession.is_ancestor_of_bin return self._get_provider_session('catalog_hierarchy_session').is_ancestor_of_catalog(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_parent_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_parent_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_parent_of_catalog(*args, **kwargs)", "def is_ancestor(self, other):\n\n if other is self:\n return True\n elif hasattr(other, 'base'):\n return self.is_ancestor(other.base)\n else:\n return False", "def is_ancestor_of(self, node):\r\n if self.node_type == 'Stage':\r\n return True\r\n\r\n parent = node.get_parent()\r\n while parent:\r\n if id(parent) == id(self):\r\n return True\r\n parent = parent.get_parent()\r\n\r\n return False", "def has_parent_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.has_parent_bins\n return self._get_provider_session('catalog_hierarchy_session').has_parent_catalogs(*args, **kwargs)", "def is_ancestor(ancestor, parent):\n try:\n subprocess.check_call([\"git\", \"merge-base\", \"--is-ancestor\", ancestor, parent],)\n return True\n except subprocess.CalledProcessError:\n return False", "def isancestor(s2,tree):\n if tree is s2: return True\n if tree is None: return False\n else:\n return isancestor(s2, tree.left) or isancestor(s2, tree.right)", "def is_descendant_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_descendant_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_descendant_of_catalog(*args, **kwargs)", "def HasAncestor(self, other):\n return (self == other) or (self.parent and self.parent.HasAncestor(other))", "def isAncestorOf(self, node):\n if (self in node.parents()):\n return True\n elif (not node.isSource()):\n return reduce(lambda x,y: x or y, [self.isAncestorOf(x) for x in node.parents()])\n else:\n return False", "def can_access_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').can_access_catalog_hierarchy()", "def supports_catalog_hierarchy(self):\n return False", "def isAncestorOf(ancestor, child):\n\twhile child is not None:\n\t\tif child is ancestor:\n\t\t\treturn True\n\t\tchild = child.parent()\n\treturn False", "def is_ancestor_of_book(self, id_, book_id):\n # Implemented from template for\n # osid.resource.BinHierarchySession.is_ancestor_of_bin\n if self._catalog_session is not None:\n return self._catalog_session.is_ancestor_of_catalog(id_=id_, catalog_id=book_id)\n return self._hierarchy_session.is_ancestor(id_=id_, ancestor_id=book_id)", "def get_real_ancestor(self):\n for ancestor in self.get_ancestors(ascending = True):\n if not ancestor.abstract_entity:\n return ancestor\n \n return None", "def is_ancestor(parent_alphabet, child_alphabet):\r\n alphabet = parent_alphabet\r\n while alphabet:\r\n if child_alphabet == alphabet:\r\n return True\r\n alphabet = alphabet.alphabet\r\n return False", "def supports_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy()", "def is_ancestor(self, ancestor_rev: \"Commit\", rev: \"Commit\") -> bool:\n try:\n self.git.merge_base(ancestor_rev, rev, is_ancestor=True)\n except GitCommandError as err:\n if err.status == 1:\n return False\n raise\n return True", "def _is_ancestor_of_contained_categories(\n fides_key: str,\n data_categories: List[str],\n) -> Tuple[bool, Optional[str]]:\n ref = _get_ref_from_taxonomy(fides_key=fides_key)\n if ref.parent_key:\n if ref.parent_key in data_categories:\n return True, ref.parent_key\n\n return _is_ancestor_of_contained_categories(\n fides_key=ref.parent_key,\n data_categories=data_categories,\n )\n\n return False, None", "def is_parent_of(self):\n return self.hasLabel('parent_of')", "def is_child_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_child_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_child_of_catalog(*args, **kwargs)", "def is_parent(self) -> bool:\n return AccountEntry.objects.filter(parent=self).exists()", "def has_parent(self):\n return False", "def ancestor(self):\n if hasattr(self, 'base'):\n return self.base.ancestor\n else:\n return self", "def _refers_to_parent_table(self) -> bool:\n pt = self.parent_persist_selectable\n mt = self.child_persist_selectable\n result = False\n\n def visit_binary(binary: BinaryExpression[Any]) -> None:\n nonlocal result\n c, f = binary.left, binary.right\n if (\n isinstance(c, expression.ColumnClause)\n and isinstance(f, expression.ColumnClause)\n and pt.is_derived_from(c.table)\n and pt.is_derived_from(f.table)\n and mt.is_derived_from(c.table)\n and mt.is_derived_from(f.table)\n ):\n result = True\n\n visitors.traverse(self.primaryjoin, {}, {\"binary\": visit_binary})\n return result", "def ancestor(self, c2):\n return self._parents[0].ancestor(c2) # punt on two parents for now", "def can_access_book_hierarchy(self):\n # Implemented from template for\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_access_catalog_hierarchy()\n return True", "def is_parent_of(cls, *args):\n return cls.graph_traversal(None, None, Bytecode()).is_parent_of(*args)", "def is_descendant(self, other):\n return other.is_ancestor(self)", "def is_ancestor_of_family(self, id_, family_id):\n # Implemented from template for\n # osid.resource.BinHierarchySession.is_ancestor_of_bin\n if self._catalog_session is not None:\n return self._catalog_session.is_ancestor_of_catalog(id_=id_, catalog_id=family_id)\n return self._hierarchy_session.is_ancestor(id_=id_, ancestor_id=family_id)", "def __Ancestor(self, flag):\n command = self._parent\n while command:\n if flag in command.flags:\n return True\n command = command._parent # pylint: disable=protected-access\n return False" ]
[ "0.73788285", "0.7033561", "0.68419254", "0.68260515", "0.67330486", "0.6610965", "0.6431064", "0.6375805", "0.63628376", "0.6359571", "0.62930906", "0.6237245", "0.6208972", "0.6201697", "0.61422455", "0.6132922", "0.6124667", "0.6006275", "0.59708625", "0.58888805", "0.5873623", "0.58010066", "0.57959974", "0.5698914", "0.5691251", "0.5673969", "0.5664063", "0.5662953", "0.5633027", "0.56252337" ]
0.867389
0
Pass through to provider CatalogHierarchySession.has_child_catalogs
def has_child_catalogs(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinHierarchySession.has_child_bins return self._get_provider_session('catalog_hierarchy_session').has_child_catalogs(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_child_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_child_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_child_of_catalog(*args, **kwargs)", "def can_access_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').can_access_catalog_hierarchy()", "def has_parent_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.has_parent_bins\n return self._get_provider_session('catalog_hierarchy_session').has_parent_catalogs(*args, **kwargs)", "def supports_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy()", "def supports_catalog_hierarchy(self):\n return False", "def get_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_child_bins\n return self._get_provider_session('catalog_hierarchy_session').get_child_catalogs(*args, **kwargs)", "def is_descendant_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_descendant_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_descendant_of_catalog(*args, **kwargs)", "def can_lookup_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.can_lookup_bins_template\n return self._get_provider_session('catalog_lookup_session').can_lookup_catalogs()", "def add_child_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.add_child_bin\n self._get_provider_session('catalog_hierarchy_design_session').add_child_catalog(*args, **kwargs)", "def has_children(self):\n\n pass", "def get_child_catalog_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_child_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_child_catalog_ids(*args, **kwargs)", "def supports_catalog_hierarchy_design(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy_design()", "def can_search_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinQuerySession.can_search_bins_template\n return self._get_provider_session('catalog_query_session').can_search_catalogs()", "def is_parent_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_parent_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_parent_of_catalog(*args, **kwargs)", "def hasChildren():", "def can_create_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_create_bins\n return self._get_provider_session('catalog_admin_session').can_create_catalogs()", "def can_modify_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.can_modify_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_design_session').can_modify_catalog_hierarchy()", "def supports_catalog_hierarchy_design(self):\n return False", "def has_catalogue(self):\n return self.catalogue is not None", "def remove_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.remove_child_bins\n self._get_provider_session('catalog_hierarchy_design_session').remove_child_catalogs(*args, **kwargs)", "def has_children(self):\n return False", "def has_dynamic_children(self):\r\n return True", "def has_dynamic_children(self):\r\n return True", "def hasChildren(self):\n return self.childCount() > 0", "def can_access_book_hierarchy(self):\n # Implemented from template for\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_access_catalog_hierarchy()\n return True", "def can_delete_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_delete_bins\n return self._get_provider_session('catalog_admin_session').can_delete_catalogs()", "def is_ancestor_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_ancestor_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_ancestor_of_catalog(*args, **kwargs)", "def supports_catalog(self):\n return False", "def is_subhalo(self, childid, parentid):\n if (childid in self._halos[parentid].properties['children']):\n return True\n else:\n return False", "def has_dynamic_children(self):\r\n return False" ]
[ "0.7949357", "0.7360942", "0.72842836", "0.7187016", "0.71072066", "0.6979136", "0.6863362", "0.6737884", "0.6410965", "0.63950104", "0.638041", "0.6379482", "0.63554066", "0.62884295", "0.62827367", "0.6278772", "0.625046", "0.6156322", "0.61274344", "0.6070764", "0.6001654", "0.59514356", "0.59514356", "0.5908918", "0.5904894", "0.585778", "0.58501095", "0.58477724", "0.5845046", "0.5844246" ]
0.86689335
0
Pass through to provider CatalogHierarchySession.is_child_of_catalog
def is_child_of_catalog(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinHierarchySession.is_child_of_bin return self._get_provider_session('catalog_hierarchy_session').is_child_of_catalog(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def has_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.has_child_bins\n return self._get_provider_session('catalog_hierarchy_session').has_child_catalogs(*args, **kwargs)", "def can_access_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').can_access_catalog_hierarchy()", "def is_descendant_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_descendant_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_descendant_of_catalog(*args, **kwargs)", "def is_parent_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_parent_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_parent_of_catalog(*args, **kwargs)", "def supports_catalog_hierarchy(self):\n return False", "def supports_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy()", "def has_parent_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.has_parent_bins\n return self._get_provider_session('catalog_hierarchy_session').has_parent_catalogs(*args, **kwargs)", "def is_child_of(self, *args):\n return _ida_hexrays.cexpr_t_is_child_of(self, *args)", "def is_subhalo(self, childid, parentid):\n if (childid in self._halos[parentid].properties['children']):\n return True\n else:\n return False", "def islchild(self):\n\t\tif (self.parent() and self.parent().lchild() is self): #TODO is or == here\n\t\t\treturn True\n\t\treturn False", "def can_modify_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.can_modify_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_design_session').can_modify_catalog_hierarchy()", "def get_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_child_bins\n return self._get_provider_session('catalog_hierarchy_session').get_child_catalogs(*args, **kwargs)", "def _is_child(self, parent, child): # type: (str, str) -> bool\n return child != parent and child.startswith(parent + \".\")", "def add_child_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.add_child_bin\n self._get_provider_session('catalog_hierarchy_design_session').add_child_catalog(*args, **kwargs)", "def has_child(self):\n return False", "def haschild(self, child):\n return pbxhelper.pbxobj_has_pbxlist_value(self, u'pbx_children', child, \\\n self.is_valid_child)", "def has_child(self, term):\n for parent in self.children:\n if parent.id == term or parent.has_child(term):\n return True\n return False", "def has_children(self):\n\n pass", "def is_ancestor_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_ancestor_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_ancestor_of_catalog(*args, **kwargs)", "def supports_catalog_hierarchy_design(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy_design()", "def supports_catalog_hierarchy_design(self):\n return False", "def hasChildren():", "def get_child_catalog_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_child_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_child_catalog_ids(*args, **kwargs)", "def IsChild(self, *args):\n return _XCAFDoc.XCAFDoc_GraphNode_IsChild(self, *args)", "def is_known(self, child):\r\n return child in self._parents", "def contains_child(self, pid):\n return pid in self._children_ids", "def has_child(self, uid: str) -> bool:\n return uid in self._children_uids", "def has_children(self):\n return False", "def is_allowed_to_have_child_terms(self):\n return self._is_allowed_to_have_child_terms", "def test_hasChild(self):\n bag = Bag('faded plum', [(2, 'wavy cyan'), (1, 'navy turquoise')])\n res = bag.hasChild('wavy cyan')\n self.assertTrue(res)\n bag1 = Bag('gaded plum', [(2, 'wavy cyan'), (1, 'navy turquoise')])\n res1 = bag1.hasChild('gaded plum')\n self.assertFalse(res1)" ]
[ "0.77237946", "0.7143672", "0.70289576", "0.69561505", "0.688092", "0.66770446", "0.66351765", "0.66218406", "0.6584798", "0.6354802", "0.6343982", "0.6327329", "0.632024", "0.6268294", "0.6172168", "0.6165548", "0.61519617", "0.6147762", "0.6114851", "0.60704774", "0.6052648", "0.60372835", "0.600308", "0.5986564", "0.5963951", "0.5951952", "0.5939503", "0.5872893", "0.5855785", "0.5833108" ]
0.85293674
0
Pass through to provider CatalogHierarchySession.get_child_catalog_ids
def get_child_catalog_ids(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinHierarchySession.get_child_bin_ids return self._get_provider_session('catalog_hierarchy_session').get_child_catalog_ids(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_child_bins\n return self._get_provider_session('catalog_hierarchy_session').get_child_catalogs(*args, **kwargs)", "def get_parent_catalog_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_parent_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_parent_catalog_ids(*args, **kwargs)", "def get_root_catalog_ids(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_root_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_root_catalog_ids()", "def get_catalog_node_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_node_ids\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_node_ids(*args, **kwargs)", "def children_ids(self):\n return self._children_ids", "def get_parent_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_parent_bins\n return self._get_provider_session('catalog_hierarchy_session').get_parent_catalogs(*args, **kwargs)", "def get_child_ids(id,conn):\n\n child_ids = ('WITH RECURSIVE children AS '\n '(SELECT subject_id '\n 'FROM cvterm_relationship '\n 'WHERE object_id = %s '\n 'UNION '\n 'SELECT cr.subject_id '\n 'FROM cvterm_relationship cr '\n 'INNER JOIN children ch ON ch.subject_id = cr.object_id) '\n 'SELECT * FROM children')\n ids = connect(child_ids,id,conn)\n list_of_ids = []\n for item in ids:\n list_of_ids.append(item[0])\n return(list_of_ids)", "def get_catalog_hierarchy_id(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_hierarchy_id\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_hierarchy_id()", "def get_child_book_ids(self, book_id):\n # Implemented from template for\n # osid.resource.BinHierarchySession.get_child_bin_ids\n if self._catalog_session is not None:\n return self._catalog_session.get_child_catalog_ids(catalog_id=book_id)\n return self._hierarchy_session.get_children(id_=book_id)", "def getChildPIDs(self):\n\t\treturn self.pids", "def has_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.has_child_bins\n return self._get_provider_session('catalog_hierarchy_session').has_child_catalogs(*args, **kwargs)", "def get_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_hierarchy()", "def get_catalogs_by_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_ids\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_ids(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def _selected_children(self):\n ids = self.session.pop('phoenix.selected-children')\n self.session.changed()\n return ids", "def parent_ids(self):\n return self._parent_ids", "def get_child_ids(self, job_specifier, project=None, status=None):\n if project is None:\n project = self._project\n id_master = self.get_job_id(project=project, job_specifier=job_specifier)\n if id_master is None:\n return []\n else:\n if status is not None:\n id_lst = self._job_table[\n (self._job_table.masterid == id_master) & (self._job_table.status == status)].id.values\n else:\n id_lst = self._job_table[(self._job_table.masterid == id_master)].id.values\n return sorted(id_lst)", "def get_catalog_nodes(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_nodes\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_nodes(*args, **kwargs)", "def remove_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.remove_child_bins\n self._get_provider_session('catalog_hierarchy_design_session').remove_child_catalogs(*args, **kwargs)", "def get_child_locations(self,location):\n child_list=[]\n child_list.append(location.id)\n child_locations = self.env['stock.location'].search([('usage','=','internal'),('location_id','=',location.id)])\n if child_locations:\n for child_location in child_locations:\n child_list.append(child_location.id)\n ## recursive calling to find child of child lcoations\n children_loc = self.get_child_locations(child_location)\n ## adding child into one list\n for child in children_loc:\n child_list.append(child)\n return child_list", "def getIDs():", "def add_child_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.add_child_bin\n self._get_provider_session('catalog_hierarchy_design_session').add_child_catalog(*args, **kwargs)", "def get_ids(self):\n return self._ids", "def get_ids(self) -> List[str]:", "def get_child_ids(cur, node):\n sql = \"\"\"\n SELECT\n id\n FROM\n nodes\n WHERE\n parent=%s\n ORDER BY\n position;\n \"\"\"\n cur.execute(sql, (str(node), ))\n for result in cur:\n yield str(result['id'])", "def get_root_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_root_bins\n return self._get_provider_session('catalog_hierarchy_session').get_root_catalogs()", "def getIDs(self):\n return self.multiengine.getIDs()", "def get_child_family_ids(self, family_id):\n # Implemented from template for\n # osid.resource.BinHierarchySession.get_child_bin_ids\n if self._catalog_session is not None:\n return self._catalog_session.get_child_catalog_ids(catalog_id=family_id)\n return self._hierarchy_session.get_children(id_=family_id)", "def ids(self):\n return self._ids", "def activeChildWellIds(self):\n lst=[]\n if self.isReplicateGroup():\n for tc in self.activeChildWells():\n lst.extend(tc.activeChildWellIds())\n else:\n if self.wellids is not None and self.wellids[0] is not None:\n wellid=self.wellids[0]\n else:\n wellid=str(self.childWellIndices()[0])\n lst.append(wellid)\n return lst", "def _subnode_ids(self):\n for ticket in self:\n ticket.subnode_ids = self.search([\n ('parent_id', '=', ticket.id),\n ('type.has_children', '=', True)])" ]
[ "0.7401815", "0.7348298", "0.7025363", "0.6930277", "0.678443", "0.6497605", "0.6119004", "0.611284", "0.60767853", "0.6071542", "0.6027419", "0.5965562", "0.59502524", "0.59161633", "0.5915887", "0.5891585", "0.5855213", "0.58476925", "0.5836786", "0.5830484", "0.5828751", "0.58229136", "0.58116055", "0.57971114", "0.5774937", "0.57091403", "0.56805545", "0.56563264", "0.5633227", "0.5624119" ]
0.8718959
0
Pass through to provider CatalogHierarchySession.get_child_catalogs
def get_child_catalogs(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinHierarchySession.get_child_bins return self._get_provider_session('catalog_hierarchy_session').get_child_catalogs(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_child_catalog_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_child_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_child_catalog_ids(*args, **kwargs)", "def has_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.has_child_bins\n return self._get_provider_session('catalog_hierarchy_session').has_child_catalogs(*args, **kwargs)", "def remove_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.remove_child_bins\n self._get_provider_session('catalog_hierarchy_design_session').remove_child_catalogs(*args, **kwargs)", "def add_child_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.add_child_bin\n self._get_provider_session('catalog_hierarchy_design_session').add_child_catalog(*args, **kwargs)", "def get_parent_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_parent_bins\n return self._get_provider_session('catalog_hierarchy_session').get_parent_catalogs(*args, **kwargs)", "def get_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_hierarchy()", "def is_child_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_child_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_child_of_catalog(*args, **kwargs)", "def getCatalogs():", "def get_catalogs_by_parent_genus_type(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_parent_genus_type\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_parent_genus_type(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def get_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_template\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs()\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def get_root_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_root_bins\n return self._get_provider_session('catalog_hierarchy_session').get_root_catalogs()", "def get_catalog_hierarchy_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_hierarchy_session(*args, **kwargs)", "def remove_child_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.remove_child_bin\n self._get_provider_session('catalog_hierarchy_design_session').remove_child_catalog(*args, **kwargs)", "def children_data(self) -> List[CatalogDataCategoryTreeInterface]:\n return self._children_data", "def get_catalog_hierarchy_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_catalog_nodes(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_nodes\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_nodes(*args, **kwargs)", "def get_catalog_hierarchy_session(self):\n raise Unimplemented()", "def get_parent_catalog_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_parent_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_parent_catalog_ids(*args, **kwargs)", "def get_catalogs_by_provider(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinLookupSession.get_bins_by_provider\n catalogs = self._get_provider_session('catalog_lookup_session').get_catalogs_by_provider(*args, **kwargs)\n cat_list = []\n for cat in catalogs:\n cat_list.append(Catalog(self._provider_manager, cat, self._runtime, self._proxy))\n return CatalogList(cat_list)", "def children(self):\n method = \"list_clusters\"\n params = {\n \"parent_id\": self.id\n }\n response = self._client.connection.make_request(method, params)\n\n clusters = []\n for item in response:\n clusters.append(Cluster(self._client, **item))\n return clusters", "def can_access_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').can_access_catalog_hierarchy()", "def get_child_locations(self,location):\n child_list=[]\n child_list.append(location.id)\n child_locations = self.env['stock.location'].search([('usage','=','internal'),('location_id','=',location.id)])\n if child_locations:\n for child_location in child_locations:\n child_list.append(child_location.id)\n ## recursive calling to find child of child lcoations\n children_loc = self.get_child_locations(child_location)\n ## adding child into one list\n for child in children_loc:\n child_list.append(child)\n return child_list", "def get(self):\n return GenericGet().get_catalogs()", "def get_root_catalog_ids(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_root_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_root_catalog_ids()", "def get_children(self):\n\n pass", "def supports_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy()", "def get_children(self):\r\n\r\n if not self.has_children:\r\n return []\r\n\r\n if getattr(self, '_child_instances', None) is None:\r\n self._child_instances = [] # pylint: disable=attribute-defined-outside-init\r\n for child_loc in self.children:\r\n try:\r\n child = self.runtime.get_block(child_loc)\r\n child.runtime.export_fs = self.runtime.export_fs\r\n except ItemNotFoundError:\r\n log.exception(u'Unable to load item {loc}, skipping'.format(loc=child_loc))\r\n continue\r\n self._child_instances.append(child)\r\n\r\n return self._child_instances", "def get_children(self):\n raise NotImplementedError()", "def list_detail_catalog(self, catalog_name):\n # list catalog\n self._list_catalog(catalog_name)\n # detail catalog\n self._details_catalog(catalog_name)", "def list_catalogs(self):\n return self._json_object_field_to_list(\n self._get_catalogs_json(), self.__MISSION_STRING)" ]
[ "0.7695005", "0.7191783", "0.6933719", "0.6777457", "0.6694784", "0.6565827", "0.6523393", "0.64990836", "0.64811045", "0.638691", "0.62869525", "0.6214057", "0.6202119", "0.61270833", "0.612524", "0.6088896", "0.6064623", "0.6010594", "0.60035586", "0.5971074", "0.59446925", "0.59282637", "0.58619386", "0.5845368", "0.58100057", "0.57926905", "0.57809126", "0.5755495", "0.57400185", "0.5716325" ]
0.84957665
0
Pass through to provider CatalogHierarchySession.is_descendant_of_catalog
def is_descendant_of_catalog(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinHierarchySession.is_descendant_of_bin return self._get_provider_session('catalog_hierarchy_session').is_descendant_of_catalog(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_descendant_of(self, node):\n return False", "def is_descendant(self, other):\n return other.is_ancestor(self)", "def is_descendant(self, tree, ancestor, descendant):\n if ancestor == descendant:\n return True\n if descendant:\n return self.is_descendant(tree, ancestor, tree[descendant])\n else:\n return False", "def is_child_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_child_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_child_of_catalog(*args, **kwargs)", "def is_ancestor_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_ancestor_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_ancestor_of_catalog(*args, **kwargs)", "def has_descendant(self, uid: str) -> bool:\n if self.has_child(uid):\n return True\n\n for child in self._children:\n if child.has_descendant(uid):\n return True\n\n return False", "def is_descendant_of(self, class_id):\n if class_id is None or not class_id:\n return False\n if isinstance(class_id, ThesClass):\n class_id = class_id.id\n class_id = int(class_id)\n if class_id in [a.id for a in self.ancestors()]:\n return True\n else:\n return False", "def HasCommonDescendant(self, *args):\n return _BRepAlgo.BRepAlgo_AsDes_HasCommonDescendant(self, *args)", "def can_access_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').can_access_catalog_hierarchy()", "def is_descendant(self, u, v):\n return bool(self._ll_tree.is_descendant(u, v))", "def supports_catalog_hierarchy(self):\n return False", "def has_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.has_child_bins\n return self._get_provider_session('catalog_hierarchy_session').has_child_catalogs(*args, **kwargs)", "def IsDescendantOf(self, parent, item):\r\n\r\n while item:\r\n \r\n if item == parent:\r\n \r\n # item is a descendant of parent\r\n return True\r\n \r\n item = item.GetParent()\r\n \r\n return False", "def is_descendant_of_book(self, id_, book_id):\n # Implemented from template for\n # osid.resource.BinHierarchySession.is_descendant_of_bin\n if self._catalog_session is not None:\n return self._catalog_session.is_descendant_of_catalog(id_=id_, catalog_id=book_id)\n return self._hierarchy_session.is_descendant(id_=id_, descendant_id=book_id)", "def supports_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy()", "def islchild(self):\n\t\tif (self.parent() and self.parent().lchild() is self): #TODO is or == here\n\t\t\treturn True\n\t\treturn False", "def hasChildren():", "def is_parent_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_parent_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_parent_of_catalog(*args, **kwargs)", "def test_with_descendants(self):\n branch = ChildRouteFactory.create()\n ChildRouteFactory.create(parent=branch)\n\n with self.assertNumQueries(1):\n ancestors = list(branch.get_ancestors())\n\n self.assertEqual(ancestors, [])", "def supports_catalog_hierarchy_design(self):\n return False", "def supports_catalog_hierarchy_design(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy_design()", "def test_descendants(self):\n root = RouteFactory.create()\n branch = ChildRouteFactory.create(parent=root)\n\n with self.assertNumQueries(1):\n descendants = list(root.get_descendants())\n\n self.assertEqual(descendants, [branch])", "def has_parent_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.has_parent_bins\n return self._get_provider_session('catalog_hierarchy_session').has_parent_catalogs(*args, **kwargs)", "def can_access_book_hierarchy(self):\n # Implemented from template for\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_access_catalog_hierarchy()\n return True", "def can_modify_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.can_modify_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_design_session').can_modify_catalog_hierarchy()", "def isAncestorOf(ancestor, child):\n\twhile child is not None:\n\t\tif child is ancestor:\n\t\t\treturn True\n\t\tchild = child.parent()\n\treturn False", "def _is_ancestor_of_contained_categories(\n fides_key: str,\n data_categories: List[str],\n) -> Tuple[bool, Optional[str]]:\n ref = _get_ref_from_taxonomy(fides_key=fides_key)\n if ref.parent_key:\n if ref.parent_key in data_categories:\n return True, ref.parent_key\n\n return _is_ancestor_of_contained_categories(\n fides_key=ref.parent_key,\n data_categories=data_categories,\n )\n\n return False, None", "def is_descendant_of_family(self, id_, family_id):\n # Implemented from template for\n # osid.resource.BinHierarchySession.is_descendant_of_bin\n if self._catalog_session is not None:\n return self._catalog_session.is_descendant_of_catalog(id_=id_, catalog_id=family_id)\n return self._hierarchy_session.is_descendant(id_=id_, descendant_id=family_id)", "def conditions_presence_one_descendant(self, position):\n return 2 * position + 1 < self.size_heap <= 2 * position + 2", "def Descendant(self, *args):\n return _BRepAlgo.BRepAlgo_AsDes_Descendant(self, *args)" ]
[ "0.7492194", "0.7202114", "0.68935853", "0.67118734", "0.66138786", "0.65653235", "0.6547139", "0.6419343", "0.63985586", "0.639619", "0.6300235", "0.6179104", "0.6114777", "0.604928", "0.60068697", "0.5845451", "0.58135253", "0.580462", "0.5800574", "0.57724535", "0.5709981", "0.5578215", "0.5562351", "0.5552882", "0.55487865", "0.5528558", "0.5474271", "0.5464558", "0.54315567", "0.5426602" ]
0.8551386
0
Pass through to provider CatalogHierarchySession.get_catalog_node_ids
def get_catalog_node_ids(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinHierarchySession.get_bin_node_ids return self._get_provider_session('catalog_hierarchy_session').get_catalog_node_ids(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_node_ids(self):\n \n return self.node_ids", "def node_ids(self):\n return [self.node_id]", "def _node_ids(self, nodes=None): # this function comes from BaseCard.py\n if not nodes:\n nodes = self.nodes\n if isinstance(nodes[0], integer_types):\n node_ids = [node for node in nodes]\n else:\n node_ids = [node.nid for node in nodes]\n assert 0 not in node_ids, 'node_ids = %s' % (node_ids)\n return node_ids", "def _node_ids(self, nodes=None):\n if not nodes:\n nodes = self.nodes\n if isinstance(nodes[0], integer_types):\n return nodes\n return [node.nid for node in nodes]", "def get_catalog_nodes(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_nodes\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_nodes(*args, **kwargs)", "def get_plate_ids_from_node_ids(self, nodes: list[int]) -> list[int]:\n ids = []\n\n for k, v in vars(self).items():\n if v.nodes == nodes:\n ids.append(k)\n\n if len(ids) == 0:\n ids = None\n\n return ids", "def get_node_ids(self, node_id):\n if node_id is not None:\n names = [node_id]\n else:\n names = []\n query_url = ('/api/node/class/fabricNode.json?'\n 'query-target-filter=eq(fabricNode.role,\"leaf\")')\n error_message = 'Could not get switch list from APIC.'\n nodes = self._get_query(query_url, error_message)\n for node in nodes:\n names.append(str(node['fabricNode']['attributes']['id']))\n return names", "def get_root_catalog_ids(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_root_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_root_catalog_ids()", "def get_parent_catalog_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_parent_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_parent_catalog_ids(*args, **kwargs)", "def get_child_catalog_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_child_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_child_catalog_ids(*args, **kwargs)", "def getIDs():", "def get_book_node_ids(self, book_id, ancestor_levels, descendant_levels, include_siblings):\n # Implemented from template for\n # osid.resource.BinHierarchySession.get_bin_node_ids\n if self._catalog_session is not None:\n return self._catalog_session.get_catalog_node_ids(\n catalog_id=book_id,\n ancestor_levels=ancestor_levels,\n descendant_levels=descendant_levels,\n include_siblings=include_siblings)\n return self._hierarchy_session.get_nodes(\n id_=book_id,\n ancestor_levels=ancestor_levels,\n descendant_levels=descendant_levels,\n include_siblings=include_siblings)", "def all_ids(self) -> Set[int]:\n return {node_id for _, (node_id, _) in self.nodes.items()}", "def get_nids(self, nodes):\n nids = []\n\n for node in nodes.values():\n try:\n hostname = Conf.get(self._index, f'cluster>{node}>hostname')\n except:\n raise MotrError(errno.EINVAL, f\"{node} hostname not found\")\n\n check_type(hostname, str, \"hostname\")\n\n if self._server_id == node:\n cmd = \"lctl list_nids\"\n else:\n cmd = (f\"ssh -o \\\"StrictHostKeyChecking=no\\\" {hostname}\"\n \" lctl list_nids\")\n op = execute_command(self, cmd)\n nids.append(op[0].rstrip(\"\\n\"))\n\n return nids", "def _subnode_ids(self):\n for ticket in self:\n ticket.subnode_ids = self.search([\n ('parent_id', '=', ticket.id),\n ('type.has_children', '=', True)])", "def getNodeIds(self, cellId, edgeIndex):\n LIB.mnt_grid_getNodeIds.argtypes = [POINTER(c_void_p),\n c_longlong, c_int,\n POINTER(c_size_t)]\n nodeIds = (c_size_t*2)()\n ier = LIB.mnt_grid_getNodeIds(self.obj, cellId, edgeIndex, nodeIds)\n if ier:\n error_handler(FILE, 'getNodeIds', ier)\n return (nodeIds[0], nodeIds[1])", "def get_nodes(self, ids):\n return [self.node_labels[i] for i in ids]", "def get_nodes(self):\n self.map_graph_id()\n self.nodes_list = [\n self.NX_GRAPHS[self.graph_id].nodes[idx]['label'] \n for idx in range(len(self.NX_GRAPHS[self.graph_id].nodes))]", "def all_node_ids(self):\n return [i for i in range(0, self.n_inputs + self.n_hidden + self.n_outputs)]", "def get_ids(self):\n return self._ids", "def cluster_ids(self):\n return self.model.cluster_ids", "def get_node_list(self):\n logger.debug('Updating node list')\n self.subscribe_mqtt('/nodes/+/responses/ping')\n self.node_ids = []\n\n def on_response(payload, data):\n if data and data.get('node', None):\n node_id = data['node']\n logger.debug('Found node with ID \"%s\"' % node_id)\n\n if node_id not in self.node_ids:\n self.node_ids.append(node_id)\n\n return False\n\n self.publish_mqtt('/ping', on_response=on_response)\n time.sleep(self.timeout / 1000)\n\n return self.node_ids", "def get_ids(self) -> List[str]:", "def _MocaCtlGetNodeIDs(self):\n mc = subprocess.Popen([MOCACTL, 'showtbl', '--nodestats'],\n stdout=subprocess.PIPE)\n out, _ = mc.communicate(None)\n nodes = set()\n for line in out.splitlines():\n node = NODE_RE.search(line)\n if node is not None:\n nodes.add(int(node.group(1)))\n node_list = list(nodes)\n length = len(node_list)\n if int(self.AssociatedDeviceCount) != length:\n type(self).AssociatedDeviceCount.Set(self, length)\n return node_list", "def vertex_ids(self):\n return self.get_ids()", "def leafs_ids(cls, tree_depth):\n return cls.nodes_at_depth(tree_depth)", "def input_node_ids(self):\n return [i for i in range(self.n_inputs)]", "def get_host_ids(self):\n host_ids = []\n \n for node_id in self.nodes:\n if (isinstance(self.nodes[node_id], HostNode)):\n host_ids.append(node_id)\n \n return host_ids", "def children_ids(self):\n return self._children_ids", "def output_node_ids(self):\n return [\n i\n for i in range(\n self.n_inputs + self.n_hidden,\n self.n_inputs + self.n_hidden + self.n_outputs,\n )\n ]" ]
[ "0.77367467", "0.75492555", "0.74984306", "0.7128543", "0.7099024", "0.6643188", "0.6526547", "0.64715105", "0.6450242", "0.637974", "0.6292235", "0.62349534", "0.6192993", "0.6161532", "0.615063", "0.61332947", "0.61310065", "0.6128657", "0.61259454", "0.6114231", "0.6087251", "0.6078877", "0.60734355", "0.6033068", "0.6004963", "0.5991817", "0.5985574", "0.5966623", "0.59619653", "0.59354645" ]
0.8636566
0
Pass through to provider CatalogHierarchySession.get_catalog_nodes
def get_catalog_nodes(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinHierarchySession.get_bin_nodes return self._get_provider_session('catalog_hierarchy_session').get_catalog_nodes(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_nodes(self):\n pass", "def get_catalog_node_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_node_ids\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_node_ids(*args, **kwargs)", "def get_nodes(self):\n return requests.get(self.__url + 'nodes').json()", "def getNodes(self):\n data = self.connect('get','nodes',None)\n return data", "def get_nodes(self):\n\n return self._nodes", "def getNodes(self):\n return self.__allNodes", "def list_nodes(self):\n\n return list(\n dict(\n self._from_json(self.manage.run(override=\"list-nodes\"))\n ).keys()\n )", "def get_nodes(self):\n self.map_graph_id()\n self.nodes_list = [\n self.NX_GRAPHS[self.graph_id].nodes[idx]['label'] \n for idx in range(len(self.NX_GRAPHS[self.graph_id].nodes))]", "def list_nodes(self):\n return self.ironic_client.node.list()", "def list_nodes(self, type_):\n raise NotImplementedError()", "def nodes(self):\n return self.__nodes", "def nodes(self):\n return self._nodes", "def nodes(self):\n return self._nodes", "def nodes(self):\n return self._nodes", "def nodes(topology):\n return topology.nodes()", "def nodes (self):\n return self.__nodes", "def get_nodes(self):\n _url = f\"{self.connector.base_url}/projects/{self.project_id}/nodes\"\n\n _response = self.connector.http_call(\"get\", _url)\n\n # Create the Nodes array but cleanup cache if there is one\n if self.nodes:\n self.nodes = []\n for _node in _response.json():\n _n = Node(connector=self.connector, **_node)\n _n.project_id = self.project_id\n self.nodes.append(_n)", "def nodes(self):\n return self._get_tree_queryset()", "def get_nodes(self):\n self.get_status()\n old_api = self.version[0] <= '3'\n if old_api:\n certs_path = \"%s/certificate_statuses/*\" % (self.environment)\n nodeinfo_path_tpl = \"{env}/node/{node}\"\n else:\n certs_path = \"puppet-ca/v1/certificate_statuses/no_key?environment=%s\" % (self.environment)\n nodeinfo_path_tpl = \"puppet/v3/node/{node}?environment={env}\"\n\n csts = self._send('GET', certs_path)\n nodes_names = []\n for cst in csts:\n nodes_names.append(cst['name'])\n\n all_nodes = []\n for nname in nodes_names:\n path = nodeinfo_path_tpl.format(node=nname, env=self.environment)\n nodeinfo = self._send('GET', path)\n if old_api:\n nodeinfo = self._from_pson(nodeinfo['data'])\n else:\n nodeinfo = self._from_pson(nodeinfo)\n if 'parameters' in nodeinfo:\n node = nodeinfo['parameters']\n if self.onlynodes:\n if not (node.get('hostname') in self.onlynodes or\n node.get('ipaddress') in self.onlynodes or\n node.get('fqdn') in self.onlynodes or\n node.get('uuid') in self.onlynodes):\n continue\n all_nodes.append(node)\n\n return all_nodes", "def get_nodes_info(self, ctxt):\n cctxt = self.client.prepare(server=DEFAULT_SERVER, timeout=RPC_TIMEOUT)\n return cctxt.call(ctxt, \"get_nodes_info\")", "def nodes(self): \n return [n for n in self.iternodes()]", "def get_nodes(self):\n\n return list(self.graph.nodes)", "def nodes(self):\n return list(self._nodes_dict.values())", "def get_nodes():\n nodes_config_file = Settings.CONF_NODES_FILE\n current_nodes = load_node_names(nodes_config_file)\n\n return current_nodes", "def list_nodes(self):\n return self.datanodes.keys()", "def get_nodes(self):\n return [node for node in self._nodes.itervalues()]", "def get_nodes(self):\n return [node for node in self._nodes.itervalues()]", "def nodes(self):\n return self.graph.nodes", "def get_catalog_hierarchy_session(self):\n raise Unimplemented()", "def get_catalog_hierarchy_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))" ]
[ "0.73020756", "0.7236734", "0.6919679", "0.6883763", "0.6679498", "0.6602363", "0.6584786", "0.6574527", "0.6553018", "0.65429574", "0.6517708", "0.6517684", "0.6517684", "0.6517684", "0.6512372", "0.6499225", "0.64947295", "0.64802605", "0.6421677", "0.6402249", "0.6383097", "0.6318841", "0.62766373", "0.62717223", "0.62469834", "0.6244857", "0.6244857", "0.62428105", "0.62374145", "0.62241936" ]
0.8524279
0
Pass through to provider CatalogHierarchyDesignSession.can_modify_catalog_hierarchy
def can_modify_catalog_hierarchy(self): # Implemented from kitosid template for - # osid.resource.BinHierarchyDesignSession.can_modify_bin_hierarchy return self._get_provider_session('catalog_hierarchy_design_session').can_modify_catalog_hierarchy()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def can_modify_book_hierarchy(self):\n # Implemented from template for\n # osid.resource.BinHierarchyDesignSession.can_modify_bin_hierarchy_template\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_modify_catalog_hierarchy()\n return True", "def can_access_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').can_access_catalog_hierarchy()", "def can_modify_family_hierarchy(self):\n # Implemented from template for\n # osid.resource.BinHierarchyDesignSession.can_modify_bin_hierarchy_template\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_modify_catalog_hierarchy()\n return True", "def supports_catalog_hierarchy_design(self):\n return False", "def supports_catalog_hierarchy_design(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy_design()", "def supports_catalog_hierarchy(self):\n return False", "def supports_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy()", "def can_access_book_hierarchy(self):\n # Implemented from template for\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_access_catalog_hierarchy()\n return True", "def can_access_family_hierarchy(self):\n # Implemented from template for\n # osid.resource.BinHierarchySession.can_access_bin_hierarchy\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_access_catalog_hierarchy()\n return True", "def can_update_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_update_bins\n return self._get_provider_session('catalog_admin_session').can_update_catalogs()", "def supports_book_hierarchy_design(self):\n return False", "def supports_catalog_admin(self):\n return False", "def supports_catalog_admin(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_admin()", "def create_catalog_hierarchy(self, *args, **kwargs):\n # Patched in by [email protected], Jul 23, 2014, added by birdland to template on Aug 8, 2014\n # Is not part of specs for catalog hierarchy design sessions, but may want to be in hierarchy service instead\n # Will not return an actual object, just JSON\n # since a BankHierarchy does not seem to be an OSID thing.\n return self._get_provider_session('catalog_hierarchy_design_session').create_catalog_hierarchy(*args, **kwargs)", "def can_create_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_create_bins\n return self._get_provider_session('catalog_admin_session').can_create_catalogs()", "def get_catalog_hierarchy_design_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_catalog_hierarchy_design_session(self):\n raise Unimplemented()", "def can_update_families(self):\n # Implemented from template for\n # osid.resource.BinAdminSession.can_update_bins\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_update_catalogs()\n return True", "def supports_book_hierarchy(self):\n return False", "def handle_permission_change(instance, **kwargs):\n new = set(instance.users_with_permission(Permission.VIEW, with_superusers=True))\n # The \"_old_viewers\" property may not exist. For instance if data object is\n # created in the collection and collection permissions are assigned to it\n # without set_permission call.\n old = set(getattr(instance, \"_old_viewers\", []))\n gains = new - old\n losses = old - new\n observe_containers = kwargs.get(\"observe_containers\", True)\n Observer.observe_permission_changes(instance, gains, losses, observe_containers)", "def is_subdossier_addable(self):\n return True", "def can_alter_line(self):\n hosts = self.hosts\n caller = self.caller\n if caller not in hosts and not caller.check_permstring(\"builders\"):\n self.msg(\"You do not have permission to alter the line.\")\n return\n return True", "def can_manage_catalog_aliases(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceAdminSession.can_manage_resource_aliases_template\n return self._get_provider_session('catalog_admin_session').can_manage_catalog_aliases()", "def get_catalog_hierarchy_session(self):\n raise Unimplemented()", "def create_hierarchy(self):\n\t\tpass", "def has_permission(self):\n return super().has_permission()", "def is_child_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_child_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_child_of_catalog(*args, **kwargs)", "def has_change_permissions_permission(self, request):\n return self.has_generic_permission(request, \"change_permissions\")", "def isModifiedByCategory(self,node, queryCategory):\n pred = self.getModifiers(node )\n for p in pred:\n #if( queryCategory.lower() == p.getCategory().lower() ):\n if( p.isA(queryCategory) ):\n return True\n\n return False", "def can_update_books(self):\n # Implemented from template for\n # osid.resource.BinAdminSession.can_update_bins\n # NOTE: It is expected that real authentication hints will be\n # handled in a service adapter above the pay grade of this impl.\n if self._catalog_session is not None:\n return self._catalog_session.can_update_catalogs()\n return True" ]
[ "0.76344025", "0.7528093", "0.74309456", "0.7356475", "0.7188082", "0.70395255", "0.6643134", "0.6203075", "0.6158186", "0.6116174", "0.58900255", "0.5861437", "0.57747525", "0.5696459", "0.56135714", "0.55301833", "0.5500113", "0.5452903", "0.5327024", "0.5275934", "0.5197248", "0.5162417", "0.51254386", "0.51073277", "0.50978", "0.5084903", "0.50844437", "0.505271", "0.5046692", "0.5038869" ]
0.8979005
0
Pass through to provider CatalogHierarchyDesignSession.add_root_catalog
def add_root_catalog(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinHierarchyDesignSession.add_root_bin self._get_provider_session('catalog_hierarchy_design_session').add_root_catalog(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_root_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.remove_root_bin\n self._get_provider_session('catalog_hierarchy_design_session').remove_root_catalog(*args, **kwargs)", "def add_child_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.add_child_bin\n self._get_provider_session('catalog_hierarchy_design_session').add_child_catalog(*args, **kwargs)", "def create_catalog_hierarchy(self, *args, **kwargs):\n # Patched in by [email protected], Jul 23, 2014, added by birdland to template on Aug 8, 2014\n # Is not part of specs for catalog hierarchy design sessions, but may want to be in hierarchy service instead\n # Will not return an actual object, just JSON\n # since a BankHierarchy does not seem to be an OSID thing.\n return self._get_provider_session('catalog_hierarchy_design_session').create_catalog_hierarchy(*args, **kwargs)", "def initCatalog():\n catalog = model.newCatalog()\n return catalog", "def initCatalog():\n catalog = model.newCatalog()\n return catalog", "def initCatalog():\n catalog = model.newCatalog()\n return catalog", "def init_catalog():\n return controller.init_catalog()", "def initCatalog():\n return controller.initCatalog()", "def initCatalog():\n return controller.initCatalog()", "def initCatalog():\n return controller.initCatalog()", "def initCatalog():\n return controller.initCatalog()", "def initCatalog():\n return controller.initCatalog()", "def get_root_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_root_bins\n return self._get_provider_session('catalog_hierarchy_session').get_root_catalogs()", "def initCatalog():\n t = \"SINGLE_LINKED\"\n catalog = model.newCatalog(t)\n return catalog", "def get_root_catalog_ids(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_root_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_root_catalog_ids()", "def supports_catalog_hierarchy(self):\n return False", "def initCatalogA():\n return controller.initCatalogA()", "def initCatalogA():\n return controller.initCatalogA()", "def init():\n catalog = model.newCatalog()\n return catalog", "def _set_catalog(self, catalog: cat.Catalog) -> None:\n self._catalog_interface = CatalogInterface(catalog)\n self._catalog = catalog", "def supports_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.ResourceProfile.supports_resource_lookup\n return self._provider_manager.supports_catalog_hierarchy()", "def created(event):\n root = event.object\n registry = event.registry\n root.sdi_title = 'Simple Book Catalog'\n service = root['catalogs']\n service.add_catalog('books', update_indexes=True)\n books = registry.content.create('BookFolder', name='books', title='Books')\n root['books'] = books", "def initializeCatalog():\n libxml2mod.xmlInitializeCatalog()", "def test_create_hyperflex_app_catalog(self):\n pass", "def manage_addAlissCatalog(self, REQUEST=None):\n ob = AlissCatalog()\n self._setObject(ALISS_CATALOG_ID, ob)\n ob = self._getOb(ALISS_CATALOG_ID)\n if REQUEST is not None:\n return self.manage_main(self, REQUEST, update_menu=1)", "def get_catalog_hierarchy_session(self, *args, **kwargs):\n raise Unimplemented('Unimplemented in dlkit.services - args=' + str(args) + ', kwargs=' + str(kwargs))", "def get_catalog_hierarchy_session(self):\n raise Unimplemented()", "def get_catalog_hierarchy(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_bin_hierarchy\n return self._get_provider_session('catalog_hierarchy_session').get_catalog_hierarchy()", "def _init_catalog_node(catalog, pid, lid=None, rid=None):\n if pid not in catalog: catalog[pid] = {'_langs': {}}\n if lid is not None:\n if lid not in catalog[pid]['_langs']: catalog[pid]['_langs'][lid] = {'_res': {}, 'language': {}}\n if lid is not None and rid is not None:\n if rid not in catalog[pid]['_langs'][lid]['_res']: catalog[pid]['_langs'][lid]['_res'][rid] = {}", "def initCatalog(tipo):\n catalog = model.newCatalog(tipo)\n \n return catalog" ]
[ "0.6813875", "0.6523921", "0.62321925", "0.61788124", "0.61788124", "0.61788124", "0.61527556", "0.6121284", "0.6121284", "0.6121284", "0.6121284", "0.6121284", "0.60987204", "0.60559887", "0.587504", "0.58617026", "0.5803131", "0.5803131", "0.5765531", "0.5675847", "0.56757814", "0.56488234", "0.56360126", "0.55964684", "0.5560487", "0.55008876", "0.54976463", "0.5493582", "0.5475708", "0.543625" ]
0.8653618
0
Pass through to provider CatalogHierarchyDesignSession.remove_root_catalog
def remove_root_catalog(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinHierarchyDesignSession.remove_root_bin self._get_provider_session('catalog_hierarchy_design_session').remove_root_catalog(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_child_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.remove_child_bin\n self._get_provider_session('catalog_hierarchy_design_session').remove_child_catalog(*args, **kwargs)", "def delete_catalog_hierarchy(self, *args, **kwargs):\n # Patched in by [email protected], Jul 23, 2014, added by birdland to template on Aug 8, 2014\n # Is not part of specs for catalog hierarchy design sessions, but may want to be in hierarchy service instead\n # Will not return an actual object, just JSON\n # since a BankHierarchy does not seem to be an OSID thing.\n return self._get_provider_session('catalog_hierarchy_design_session').delete_catalog_hierarchy(*args, **kwargs)", "def remove_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.remove_child_bins\n self._get_provider_session('catalog_hierarchy_design_session').remove_child_catalogs(*args, **kwargs)", "def catalogRemove(value):\n ret = libxml2mod.xmlCatalogRemove(value)\n return ret", "def catalogCleanup():\n libxml2mod.xmlCatalogCleanup()", "def destroy_catalogue(self):\n # Call the backend to remove anything related to the archive.\n if self._catalogue_exists():\n self._backend.destroy()", "def add_root_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.add_root_bin\n self._get_provider_session('catalog_hierarchy_design_session').add_root_catalog(*args, **kwargs)", "def destroy(self):\n self.destroy_catalogue()\n\n # Remove the archive root path (if it exists).\n if self._archive_exists():\n try:\n util.remove_path(self._root)\n except EnvironmentError as _error:\n raise Error(\"unable to remove archive root path '%s' [%s]\" % (self._root, _error))", "def test_delete_hyperflex_app_catalog(self):\n pass", "def remove(self, value):\n ret = libxml2mod.xmlACatalogRemove(self._o, value)\n return ret", "def _delete_root_dir(self):\n\n staf_request = ('DELETE ENTRY \"{0}\" RECURSE '\n 'CONFIRM '.format(unix_style_path(self._sut.bespoke_root)))\n\n result = self._staf_handle.submit(self._sut.network_address, 'fs', staf_request)\n\n if result.rc not in [result.Ok, result.DoesNotExist]:\n raise CoreError(result.result)", "def pre_config_root_delete(self, resource_id):\n pass", "def test_delete_config_root(self):\n config_root = self._create_config_root()\n config_root_uuid = config_root['config-root']['uuid']\n with self.override_role():\n self.config_client.delete_config_root(\n config_root_uuid)", "def DeleteRoot(self):\r\n\r\n if self._anchor:\r\n\r\n self._dirty = True\r\n self.SendDeleteEvent(self._anchor)\r\n self._current = None\r\n self._selectItem = None\r\n self._anchor.DeleteChildren(self)\r\n del self._anchor\r\n self._anchor = None", "def _destroy(self):\r\n if self._client:\r\n self._client.returnNr(self._nr)\r\n self._client.unregisterContainer(self)\r\n self._client = None\r\n\r\n if self._confDir:\r\n shutil.rmtree(self._confDir, True)\r\n self._confDir = None\r\n\r\n if self._dataDir:\r\n shutil.rmtree(self._dataDir, True)\r\n self._dataDir = None", "def catalog_drop(self, args):\n try:\n catalog = self.server.connect_ermrest(args.id)\n catalog.delete_ermrest_catalog(really=True)\n except HTTPError as e:\n if e.response.status_code == requests.codes.not_found:\n raise ResourceException('Catalog not found', e)\n else:\n raise e", "def clear(self):\n self.root = None", "def __del__(self):\n #print 'del in'\n if hasattr(self,'root'):\n #print 'del root'\n if self.root:\n #print 'del circ'\n self.root.delete_circulars()\n del self.root", "def remove_vizant(tree):\n for childpath in [\".//target[@name='graph.init']\", \".//target[@name='graph.all']\", \".//target[@name='graph.sabbus']\"]:\n child = tree.find(childpath)\n parent = tree.find(\"%s/..\" % childpath)\n parent.remove(child)", "def statDestroy():\n root.destroy()\n statView()", "def _cleanPackageDir(self, *_):\r\n for _, path in self._pkgDir:\r\n os.rmdir(os.path.join(self._rootfs, path))\r\n\r\n assert len(self._containers) == 0", "def clearFindAndRebuild(self):\r\n def indexObject(obj, path):\r\n if (base_hasattr(obj, 'indexObject') and\r\n safe_callable(obj.indexObject)):\r\n try:\r\n obj.indexObject()\r\n except TypeError:\r\n # Catalogs have 'indexObject' as well, but they\r\n # take different args, and will fail\r\n pass\r\n self.manage_catalogClear()\r\n portal = aq_parent(aq_inner(self))\r\n at_tool = getToolByName(self, 'archetype_tool')\r\n catalog_map = at_tool.listCatalogs()\r\n portal_types = catalog_map.keys()\r\n obj_metatypes = []\r\n for portal_type in portal_types:\r\n obj_metatypes.append(portal_type.replace(' ', ''))\r\n portal.ZopeFindAndApply(portal, obj_metatypes=obj_metatypes, search_sub=True, apply_func=indexObject)", "def reset(self):\r\n self._root_dir = None", "def tearDown(self):\n if self.rootdir and os.path.exists(self.rootdir):\n shutil.rmtree(self.rootdir)", "def get_root_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_root_bins\n return self._get_provider_session('catalog_hierarchy_session').get_root_catalogs()", "def _on_root_finder_update(self, change):\n if hasattr(self, \"_root_finder\"):\n del self._root_finder", "def get_root_catalog_ids(self):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_root_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_root_catalog_ids()", "def remove_root_book(self, book_id):\n # Implemented from template for\n # osid.resource.BinHierarchyDesignSession.remove_root_bin_template\n if self._catalog_session is not None:\n return self._catalog_session.remove_root_catalog(catalog_id=book_id)\n return self._hierarchy_session.remove_root(id_=book_id)", "def remove_root(data, root_ind):\r\n assert data.ndim >= 2 and data.shape[-1] in (2, 3)\r\n\r\n roots = data[..., [root_ind], :] # (..., 1, [2|3])\r\n data = data - roots\r\n data = np.delete(data, root_ind, axis=-2)\r\n\r\n return data", "def destroy(self, log_level=''):\n # Get all the additional volumes and detach,delete.\n volumes = self.utils.get_volumes_with_tag(\n {'cluster_name': config.ENV_DATA['cluster_name']}\n )\n self.flexy_instance.destroy()\n self.utils.detach_and_delete_vols(volumes)" ]
[ "0.6850928", "0.67396414", "0.652211", "0.6465035", "0.6255723", "0.61706936", "0.6166417", "0.6135597", "0.6002828", "0.57741207", "0.57471824", "0.572127", "0.57035214", "0.5689762", "0.56423634", "0.56191367", "0.561291", "0.55900383", "0.55674046", "0.5533645", "0.5533078", "0.5531742", "0.54394287", "0.5434047", "0.5400866", "0.5370653", "0.5367519", "0.5349067", "0.53452355", "0.5336993" ]
0.86070573
0
Pass through to provider CatalogHierarchyDesignSession.add_child_catalog
def add_child_catalog(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinHierarchyDesignSession.add_child_bin self._get_provider_session('catalog_hierarchy_design_session').add_child_catalog(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_child_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.remove_child_bin\n self._get_provider_session('catalog_hierarchy_design_session').remove_child_catalog(*args, **kwargs)", "def add_root_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.add_root_bin\n self._get_provider_session('catalog_hierarchy_design_session').add_root_catalog(*args, **kwargs)", "def create_catalog_hierarchy(self, *args, **kwargs):\n # Patched in by [email protected], Jul 23, 2014, added by birdland to template on Aug 8, 2014\n # Is not part of specs for catalog hierarchy design sessions, but may want to be in hierarchy service instead\n # Will not return an actual object, just JSON\n # since a BankHierarchy does not seem to be an OSID thing.\n return self._get_provider_session('catalog_hierarchy_design_session').create_catalog_hierarchy(*args, **kwargs)", "def remove_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.remove_child_bins\n self._get_provider_session('catalog_hierarchy_design_session').remove_child_catalogs(*args, **kwargs)", "def get_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_child_bins\n return self._get_provider_session('catalog_hierarchy_session').get_child_catalogs(*args, **kwargs)", "def is_child_of_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.is_child_of_bin\n return self._get_provider_session('catalog_hierarchy_session').is_child_of_catalog(*args, **kwargs)", "def _newChild(self, child):\n self._testKeySubNsAdd()\n self._getSubNsList().append(child)", "def add_child(self, child):\r\n self.children.append(child)", "def create_child(self):\n raise NotImplementedError", "def has_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.has_child_bins\n return self._get_provider_session('catalog_hierarchy_session').has_child_catalogs(*args, **kwargs)", "def get_child_catalog_ids(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_child_bin_ids\n return self._get_provider_session('catalog_hierarchy_session').get_child_catalog_ids(*args, **kwargs)", "def add_child(self, child_account):\r\n self._children.append(child_account)", "def add_child(self, child):\r\n self.children.append(child)", "def add_child(self, child):\r\n \r\n self._children.append(child)\r\n self.update_batch(self._batch, self._group)", "def add_child(self, cls, id=None, collection=\"channels\", prefix=\"ch_\", attr_name=\"\", **kwargs):\n child = cls(self, id, **kwargs)\n collection_data = getattr(self, collection, {})\n if isinstance(collection_data, CommonBase.BaseChannelCreator):\n collection_data = {}\n # Create channel interface if prefix or name is present\n if (prefix or attr_name) and id is not None:\n if not collection_data:\n # Add a grouplist to the parent.\n setattr(self, collection, collection_data)\n collection_data[id] = child\n child._collection = collection\n if attr_name:\n setattr(self, attr_name, child)\n child._name = attr_name\n else:\n setattr(self, f\"{prefix}{id}\", child)\n child._name = f\"{prefix}{id}\"\n elif attr_name and id is None:\n # If attribute name is passed with no channel id\n # set the child to the attribute name.\n setattr(self, attr_name, child)\n child._name = attr_name\n else:\n if collection_data:\n raise ValueError(f\"An attribute '{collection}' already exists.\")\n setattr(self, collection, child)\n child._name = collection\n return child", "def addChild(self, child):\n #assert child not in self.children\n #if child not in self.children:\n child.parents.append(self)\n self.children.append(child)", "def add_child(self, ldraw_model):\n self.children.append(ldraw_model)", "def addChild( self, child ):\n\n self.childs.append( child )", "def add_child(self, child):\n self.children.append(child)", "def add_child(self, child):\n self.children.append(child)", "def supports_catalog_hierarchy(self):\n return False", "def appendChild(self, child):\n self.__initChild()\n self.__child.append(child)", "def add_child(self, child):\n self.childs.append(child)", "def addChildObject(self, child):\n \n currChild = self.getChild(child.getName())\n if currChild:\n index = self.getIndex(currChild)\n if index != -1:\n self._children[index] = child\n child.setParent(self)\n # Unset the existing child's parent\n currChild.setParent(None)\n del currChild\n \n self.__setChildDict(child)\n else:\n child.setParent(self) \n self._children.append(child)\n self.__setChildDict(child)", "def supports_catalog_hierarchy_design(self):\n return False", "def add_child(self, child, label):\n self.children[label] = child\n child.parents.append(self)", "def append_child(self, child):\n\t\tself._children.append(child)", "def add(self, child):\r\n# child = Node()\r\n# child._id = Kinetic.Global.id_counter\r\n# Kinetic.Global.id_counter += 1\r\n child.index = len(self.children)\r\n child.parent = self\r\n self.children.append(child)\r\n stage = child.get_stage()\r\n\r\n if not stage:\r\n Kinetic.Global._add_temp_node(child)\r\n else:\r\n stage._add_id(child)\r\n stage._add_name(child)\r\n\r\n go = Kinetic.Global\r\n go._pull_nodes(stage)\r\n\r\n if hasattr(self, '_add'):\r\n self._add(child)\r\n\r\n return '%s.add(%s);' %(self.name, child.name)", "def add_child(self, pid):\n self._children_ids.append(pid)\n self._sort_children()", "def add_child(self, child):\n name = child.name\n self._children[name] = child\n self._name_dict[name.split('-')[0]] += 1" ]
[ "0.6714754", "0.6531312", "0.6380619", "0.6301281", "0.6141535", "0.6067072", "0.6044568", "0.60347086", "0.5952647", "0.5951329", "0.59413445", "0.59090525", "0.5812203", "0.5704964", "0.5689148", "0.5640887", "0.5628979", "0.55973136", "0.55917484", "0.55917484", "0.5576806", "0.55693305", "0.555228", "0.5522056", "0.5520549", "0.5505332", "0.5497397", "0.54778844", "0.54402", "0.54369336" ]
0.85860157
0
Pass through to provider CatalogHierarchyDesignSession.remove_child_catalog
def remove_child_catalog(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinHierarchyDesignSession.remove_child_bin self._get_provider_session('catalog_hierarchy_design_session').remove_child_catalog(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.remove_child_bins\n self._get_provider_session('catalog_hierarchy_design_session').remove_child_catalogs(*args, **kwargs)", "def delete_catalog_hierarchy(self, *args, **kwargs):\n # Patched in by [email protected], Jul 23, 2014, added by birdland to template on Aug 8, 2014\n # Is not part of specs for catalog hierarchy design sessions, but may want to be in hierarchy service instead\n # Will not return an actual object, just JSON\n # since a BankHierarchy does not seem to be an OSID thing.\n return self._get_provider_session('catalog_hierarchy_design_session').delete_catalog_hierarchy(*args, **kwargs)", "def remove_root_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.remove_root_bin\n self._get_provider_session('catalog_hierarchy_design_session').remove_root_catalog(*args, **kwargs)", "def _delChild(self, child):\n try:\n self._getSubNsList().remove(child)\n except KeyError:\n # This shouldn't happen, but what do we do if it does?\n pass\n self._testKeySubNsDel()", "def catalogRemove(value):\n ret = libxml2mod.xmlCatalogRemove(value)\n return ret", "def del_child(self, child):\n\n try:\n self.children.remove(child)\n except ValueError:\n pass\n else:\n self.rebuild_children_dict()", "def child_removed(self, child):\n super(AbstractItemView, self).child_removed(child)\n self.get_member(\"_items\").reset(self)", "def remove_child(self, child):\n if hasattr(child, \"_protected\"):\n raise TypeError(\"You cannot remove channels defined at class level.\")\n if hasattr(child, \"_collection\"):\n collection = getattr(self, child._collection)\n del collection[child.id]\n delattr(self, child._name)", "def remove_child(self, child_id):\r\n self.children = [ c for c in self.children if c.id!= child_id ]", "def remove_child(self, descendant):\n self.children.through.objects.get(\n parent=self, child=descendant).delete()", "def remove_child(self, child):\n\n self.children.remove(child)", "def _remove_child(self, widget):\n self.node.removeChild(widget.node)", "def test_delete_child(self):\r\n # Create 2 children of main course.\r\n resp_1 = self.create_xblock(display_name='child 1', category='chapter')\r\n resp_2 = self.create_xblock(display_name='child 2', category='chapter')\r\n chapter1_usage_key = self.response_usage_key(resp_1)\r\n chapter2_usage_key = self.response_usage_key(resp_2)\r\n\r\n course = self.get_item_from_modulestore(self.usage_key)\r\n self.assertIn(chapter1_usage_key, course.children)\r\n self.assertIn(chapter2_usage_key, course.children)\r\n\r\n # Remove one child from the course.\r\n resp = self.client.ajax_post(\r\n self.course_update_url,\r\n data={'children': [unicode(chapter2_usage_key)]}\r\n )\r\n self.assertEqual(resp.status_code, 200)\r\n\r\n # Verify that the child is removed.\r\n course = self.get_item_from_modulestore(self.usage_key)\r\n self.assertNotIn(chapter1_usage_key, course.children)\r\n self.assertIn(chapter2_usage_key, course.children)", "def remove_component(self, child):\n if (child.component_type in self._children and\n child is self._children[child.component_type]):\n child.parent = None\n del self._children[child.component_type]\n if (child.component_type in self._spoofed_children and\n child in self._spoofed_children[child.component_type]):\n self._spoofed_children[child.component_type].remove(child)\n child.parent = None\n self._remove_child_from_tag_table(child)\n return child", "def remove(self, child):\r\n if child and child.index is not None and id(self.children[child.index]) == id(child):\r\n stage = self.get_stage()\r\n if stage:\r\n stage._remove_id(child.get_id())\r\n stage._remove_name(child.get_name())\r\n\r\n Kinetic.Global._remove_temp_node(child)\r\n del self.children[child.index]\r\n self._set_children_indices()\r\n\r\n while child.children and len(child.children) > 0:\r\n child.remove(child.children)\r\n\r\n if hasattr(child, '_remove'):\r\n child._remove()\r\n return '%s.remove(%s);' %(self.name, child.name)", "def removeChild(self, childRegion):\n self._children.remove(childRegion)\n self._zincRegion.removeChild(childRegion._zincRegion)\n childRegion._parent = None\n childRegion.freeContents()\n if childRegion._ancestorModelSourceCreated:\n self._reload()\n else:\n self._informRegionChange(True)", "def delete(self):\n\n # TODO find a way to remove this when sub-classing in HCRoot\n self.parent.del_child(self)", "def removeChild(self, *args):\n return _libsbml.ASTBasePlugin_removeChild(self, *args)", "def remove_child(self, child):\r\n try:\r\n self._children.remove(child)\r\n except ValueError:\r\n #Don't care if it's not in the list\r\n pass", "def remove_child(self, child: \"AbstractNode\") -> None:\n self._children.remove(child)", "def add_child_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.add_child_bin\n self._get_provider_session('catalog_hierarchy_design_session').add_child_catalog(*args, **kwargs)", "def delete_child(self, model):\n assert isinstance(model, self.model_class) # it's a homogeneous collection\n m_id = str(model.get_id())\n assert m_id != None # needs a real id or cid\n assert m_id in self._models\n model._mark_deleted()\n del self._models[m_id]", "def destroy_catalogue(self):\n # Call the backend to remove anything related to the archive.\n if self._catalogue_exists():\n self._backend.destroy()", "def delete_child(self, val):\n del self._children[val]\n return val", "def remove_child(self, child):\n if self.is_root:\n return\n self.children.remove(child)\n # pylint: disable=protected-access\n child.__parent = None", "def remove_child(self, child):\n\n self._children.remove(child)", "def removeChild(self, child):\n child.parents.remove(self)\n self.children.remove(child)", "def remove_vizant(tree):\n for childpath in [\".//target[@name='graph.init']\", \".//target[@name='graph.all']\", \".//target[@name='graph.sabbus']\"]:\n child = tree.find(childpath)\n parent = tree.find(\"%s/..\" % childpath)\n parent.remove(child)", "def remove_child(self, child: int = 0) -> None:\n logging.info(f\"Remove a node from the child node list. {self.desc}\")\n js = f\"\"\"var elm = document.querySelectorAll(\"{self.css}\")[{self.index}];\n elm.removeChild(elm.childNodes[{child}]);\"\"\"\n self._execute_javascript(js)", "def DeleteChildren(self, tree):\r\n\r\n for child in self._children:\r\n if tree:\r\n tree.SendDeleteEvent(child)\r\n\r\n child.DeleteChildren(tree)\r\n \r\n if child == tree._selectItem:\r\n tree._selectItem = None\r\n\r\n # We have to destroy the associated window\r\n for wnd in child._wnd:\r\n if wnd:\r\n wnd.Hide()\r\n wnd.Destroy()\r\n \r\n child._wnd = []\r\n\r\n if child in tree._itemWithWindow:\r\n tree._itemWithWindow.remove(child)\r\n \r\n del child\r\n \r\n self._children = []" ]
[ "0.7964511", "0.69431597", "0.66732615", "0.62930405", "0.6147283", "0.6056059", "0.60204434", "0.59958714", "0.5990975", "0.59719396", "0.596596", "0.5918154", "0.5915128", "0.5896816", "0.5896775", "0.58822143", "0.58287936", "0.5758725", "0.57460916", "0.57098067", "0.56888723", "0.56666934", "0.56422716", "0.562925", "0.56242037", "0.5620359", "0.5620233", "0.5568459", "0.556052", "0.55537313" ]
0.84851974
0
Pass through to provider CatalogHierarchyDesignSession.remove_child_catalogs
def remove_child_catalogs(self, *args, **kwargs): # Implemented from kitosid template for - # osid.resource.BinHierarchyDesignSession.remove_child_bins self._get_provider_session('catalog_hierarchy_design_session').remove_child_catalogs(*args, **kwargs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_child_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.remove_child_bin\n self._get_provider_session('catalog_hierarchy_design_session').remove_child_catalog(*args, **kwargs)", "def delete_catalog_hierarchy(self, *args, **kwargs):\n # Patched in by [email protected], Jul 23, 2014, added by birdland to template on Aug 8, 2014\n # Is not part of specs for catalog hierarchy design sessions, but may want to be in hierarchy service instead\n # Will not return an actual object, just JSON\n # since a BankHierarchy does not seem to be an OSID thing.\n return self._get_provider_session('catalog_hierarchy_design_session').delete_catalog_hierarchy(*args, **kwargs)", "def remove_root_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.remove_root_bin\n self._get_provider_session('catalog_hierarchy_design_session').remove_root_catalog(*args, **kwargs)", "def catalogRemove(value):\n ret = libxml2mod.xmlCatalogRemove(value)\n return ret", "def DeleteChildren(self, tree):\r\n\r\n for child in self._children:\r\n if tree:\r\n tree.SendDeleteEvent(child)\r\n\r\n child.DeleteChildren(tree)\r\n \r\n if child == tree._selectItem:\r\n tree._selectItem = None\r\n\r\n # We have to destroy the associated window\r\n for wnd in child._wnd:\r\n if wnd:\r\n wnd.Hide()\r\n wnd.Destroy()\r\n \r\n child._wnd = []\r\n\r\n if child in tree._itemWithWindow:\r\n tree._itemWithWindow.remove(child)\r\n \r\n del child\r\n \r\n self._children = []", "def destroy_catalogue(self):\n # Call the backend to remove anything related to the archive.\n if self._catalogue_exists():\n self._backend.destroy()", "def catalogCleanup():\n libxml2mod.xmlCatalogCleanup()", "def cleanup(self):\n for child in self.children():\n child.deleteLater()", "def clean_DD_output_cats(catalog):\n print('Initial length of catalog: {}'.format(len(catalog)))\n for ev in list(catalog.events):\n if ev.resource_id.id.startswith('smi:de'):\n catalog.events.remove(ev)\n print('After removing temps, length is: {}'.format(len(catalog)))\n for ev in list(catalog.events):\n if ev.origins[-1].method_id == None:\n catalog.events.remove(ev)\n print('After removing non-located, length is: {}'.format(len(catalog)))\n for ev in catalog:\n ev.preferred_origin_id = ev.origins[-1].resource_id.id\n return catalog", "def remove(self, children=None):\n if children is not None and self.children:\n logging.warning('%s is being removed by remove(children=%s), '\n ' but it has (unexpected) children', self, children)\n self.bundle.trees = [root for root in self.bundle.trees if root != self]", "def DeleteChildren(self, tree):\r\n\r\n for child in self._children:\r\n if tree:\r\n tree.SendDeleteEvent(child)\r\n\r\n child.DeleteChildren(tree)\r\n \r\n if child == tree._select_me:\r\n tree._select_me = None\r\n\r\n # We have to destroy the associated window\r\n wnd = child.GetWindow()\r\n if wnd:\r\n wnd.Destroy()\r\n child._wnd = None\r\n\r\n if child in tree._itemWithWindow:\r\n tree._itemWithWindow.remove(child)\r\n \r\n del child\r\n \r\n self._children = []", "def can_delete_catalogs(self):\n # Implemented from kitosid template for -\n # osid.resource.BinAdminSession.can_delete_bins\n return self._get_provider_session('catalog_admin_session').can_delete_catalogs()", "def child_removed(self, child):\n super(AbstractItemView, self).child_removed(child)\n self.get_member(\"_items\").reset(self)", "def get_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.get_child_bins\n return self._get_provider_session('catalog_hierarchy_session').get_child_catalogs(*args, **kwargs)", "def remove_child(self, child_id):\r\n self.children = [ c for c in self.children if c.id!= child_id ]", "def add_child_catalog(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchyDesignSession.add_child_bin\n self._get_provider_session('catalog_hierarchy_design_session').add_child_catalog(*args, **kwargs)", "def reset_spoofed_children(self):\n for child_type in self._spoofed_children.values():\n for child in child_type:\n self.remove_component(child)\n self._spoofed_children = {} # Unecessary?", "def _prune_catalog(self) -> cat.Catalog:\n if self._import is None:\n return self._catalog\n\n needed_ids = self._find_needed_control_ids()\n\n # if a control includes controls - only include those that we know are needed\n final_control_ids = self._prune_controls(needed_ids)\n\n # build the needed groups of controls\n group_dict: Dict[str, cat.Group] = {}\n for control_id in final_control_ids:\n group_id, group_title, group_class = self._catalog_interface.get_group_info(control_id)\n group = group_dict.get(group_id)\n control = self._catalog_interface.get_control(control_id)\n if group is None:\n group = cat.Group(id=group_id, title=group_title, class_=group_class, controls=[control])\n group_dict[group_id] = group\n else:\n group_dict[group_id].controls.append(control)\n\n # find all referenced uuids - they should be 1:1 with those in backmatter\n needed_uuid_refs: Set[str] = self._find_all_uuid_refs(final_control_ids)\n\n # prune the list of resources to only those that are needed\n new_resources: Optional[List[common.Resource]] = []\n if self._catalog.back_matter is not None and self._catalog.back_matter.resources is not None:\n for resource in self._catalog.back_matter.resources:\n if resource.uuid in needed_uuid_refs:\n new_resources.append(resource)\n\n new_groups: Optional[List[cat.Group]] = list(group_dict.values())\n\n # should avoid empty lists so set to None if empty\n new_resources = new_resources if new_resources else None\n new_groups = new_groups if new_groups else None\n\n new_cat = cat.Catalog(\n uuid=str(uuid4()),\n metadata=self._catalog.metadata,\n back_matter=common.BackMatter(resources=new_resources),\n groups=new_groups\n )\n\n return new_cat", "def catalog_drop(self, args):\n try:\n catalog = self.server.connect_ermrest(args.id)\n catalog.delete_ermrest_catalog(really=True)\n except HTTPError as e:\n if e.response.status_code == requests.codes.not_found:\n raise ResourceException('Catalog not found', e)\n else:\n raise e", "def _delChild(self, child):\n try:\n self._getSubNsList().remove(child)\n except KeyError:\n # This shouldn't happen, but what do we do if it does?\n pass\n self._testKeySubNsDel()", "def delete(self):\n for lv in self.logical_volumes:\n self.delete_lv(lv_name=lv)\n\n super().delete()", "def remove_vizant(tree):\n for childpath in [\".//target[@name='graph.init']\", \".//target[@name='graph.all']\", \".//target[@name='graph.sabbus']\"]:\n child = tree.find(childpath)\n parent = tree.find(\"%s/..\" % childpath)\n parent.remove(child)", "def test_delete_child(self):\r\n # Create 2 children of main course.\r\n resp_1 = self.create_xblock(display_name='child 1', category='chapter')\r\n resp_2 = self.create_xblock(display_name='child 2', category='chapter')\r\n chapter1_usage_key = self.response_usage_key(resp_1)\r\n chapter2_usage_key = self.response_usage_key(resp_2)\r\n\r\n course = self.get_item_from_modulestore(self.usage_key)\r\n self.assertIn(chapter1_usage_key, course.children)\r\n self.assertIn(chapter2_usage_key, course.children)\r\n\r\n # Remove one child from the course.\r\n resp = self.client.ajax_post(\r\n self.course_update_url,\r\n data={'children': [unicode(chapter2_usage_key)]}\r\n )\r\n self.assertEqual(resp.status_code, 200)\r\n\r\n # Verify that the child is removed.\r\n course = self.get_item_from_modulestore(self.usage_key)\r\n self.assertNotIn(chapter1_usage_key, course.children)\r\n self.assertIn(chapter2_usage_key, course.children)", "def delete_ancestry(self, ancestry):\n for layer in ancestry.layers[::-1]:\n self.delete_layer(layer.name)", "def _clear_child_(self):\n try:\n del self.child_\n except AttributeError, e:\n pass\n\n for attr in filter(self._child_re.match, dir(self)):\n try:\n getattr(self, attr)._clear_child_()\n except AttributeError:\n pass", "def remove_child(self, child):\n if hasattr(child, \"_protected\"):\n raise TypeError(\"You cannot remove channels defined at class level.\")\n if hasattr(child, \"_collection\"):\n collection = getattr(self, child._collection)\n del collection[child.id]\n delattr(self, child._name)", "def test_delete_hyperflex_app_catalog(self):\n pass", "def delete_ancestry(self, ancestry):\n raise NotImplementedError(\"Implement in sub classes\")", "def has_child_catalogs(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.BinHierarchySession.has_child_bins\n return self._get_provider_session('catalog_hierarchy_session').has_child_catalogs(*args, **kwargs)", "def remove_child_books(self, book_id):\n # Implemented from template for\n # osid.resource.BinHierarchyDesignSession.remove_child_bin_template\n if self._catalog_session is not None:\n return self._catalog_session.remove_child_catalogs(catalog_id=book_id)\n return self._hierarchy_session.remove_children(id_=book_id)" ]
[ "0.8125606", "0.67528665", "0.66996413", "0.5946237", "0.579681", "0.57834023", "0.5759221", "0.5613291", "0.5572947", "0.5561859", "0.5501291", "0.54950947", "0.54866475", "0.5486411", "0.54716957", "0.54330075", "0.54288805", "0.5419981", "0.54142064", "0.54124284", "0.53952986", "0.53786385", "0.53721", "0.5360923", "0.5357177", "0.5326439", "0.53238946", "0.53005576", "0.5266215", "0.5262792" ]
0.84887135
0
Sets the underlying catalog view to match current view
def _set_catalog_view(self, session): if self._catalog_view == FEDERATED: try: session.use_federated_catalog_view() except AttributeError: pass else: try: session.use_isolated_catalog_view() except AttributeError: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _set_catalog_view(self, session):\n if self._catalog_view == COMPARATIVE:\n try:\n session.use_comparative_catalog_view()\n except AttributeError:\n pass\n else:\n try:\n session.use_plenary_catalog_view()\n except AttributeError:\n pass", "def use_comparative_catalog_view(self):\n self._catalog_view = COMPARATIVE\n # self._get_provider_session('catalog_lookup_session') # To make sure the session is tracked\n for session in self._get_provider_sessions():\n try:\n session.use_comparative_catalog_view()\n except AttributeError:\n pass", "def on_action_set_view(self, content):\n self._view = content['view']\n self.refresh_traits_widget()", "def _set_catalog(self, catalog: cat.Catalog) -> None:\n self._catalog_interface = CatalogInterface(catalog)\n self._catalog = catalog", "def _connectView(self):\n self._view.select_asset = self.select_asset\n self._view.add_assets = self.add_assets\n self._view.remove_assets = self.remove_assets\n self._view.update_assets = self.update_assets\n self._view.commit = self.commit", "def set_catalogue(self, catalogue, force_it=False,\n fast_setup=False):\n from .catalogue.basecatalogue import Catalogue\n\n if not fast_setup:\n if self.has_catalogue() and force_it is False:\n raise AttributeError(\"'catalogue' already defined\"+\\\n \" Set force_it to True if you really known what you are doing\")\n \n if Catalogue not in catalogue.__class__.__mro__:\n raise TypeError(\"the input 'catalogue' must be an astrobject Catalogue\")\n \n if hasattr(self,\"wcs\") and self.has_wcs():\n catalogue.set_wcs(self.wcs, force_it=True)\n if catalogue.nobjects_in_fov < 1:\n warnings.warn(\"WARNING No object in the field of view,\"+\"\\n\"+\\\n \" -> catalogue not loaded\")\n return\n \n # --------\n # - set it\n self._side_properties[\"catalogue\"] = catalogue", "def set_rate_catalog(self, rate_catalog):\n self.single_selection_from_kendo_dropdown(self.rate_catalog_kendo_dropdown_locator, rate_catalog)", "def set_view(self, index):\n view = index\n self.reset_scroll_area()\n self.clearSelection()\n\n if view == self.SURFACE_VIEW:\n self.toggle_surface_mode()\n self.view = \"Surface View\"\n elif view == self.BORDER_VIEW:\n self.toggle_border_mode()\n self.view = \"Border View\"\n elif view == self.GEOM_VIEW:\n if self.show_geom() == \"Canceled\":\n self.canceled = True\n else:\n self.view = \"Geom View\"\n elif view == self.MESH_VIEW:\n if self.show_mesh() == \"Canceled\":\n self.canceled = True\n else:\n self.view = \"Mesh View\"", "def SetPackageView( self, directoryView ):\n self.directoryView = not self.directoryView\n self.packageMenuItem.Check( self.directoryView )\n self.packageViewTool.SetValue( self.directoryView )\n if self.loader:\n self.SetModel( self.loader )\n self.RecordHistory()", "def setViewComponent(self, viewComponent):\n self.viewComponent = viewComponent", "def catalog_id(self, catalog_id):\n self._catalog_id = catalog_id", "def use_plenary_catalog_view(self):\n self._catalog_view = PLENARY\n # self._get_provider_session('catalog_lookup_session') # To make sure the session is tracked\n for session in self._get_provider_sessions():\n try:\n session.use_plenary_catalog_view()\n except AttributeError:\n pass", "def update_view(self, selected):\n pass", "def _catalog_tree_view_factory(self, window, **traits):\n from mapero.dataflow_editor.view.catalog_tree_view import CatalogTreeView\n\n catalog = window.get_service( Catalog )\n \n catalog_tree_view = CatalogTreeView(obj = catalog, window = window)\n return catalog_tree_view", "def set_current(self):\n self.ensure_one()\n view_values = {}\n last_version = self.view.versions[0]\n if self.id == last_version.id:\n view_values.update({\n 'current_version': last_version.id\n })\n else:\n current_version = self.create({\n 'view': self.view.id,\n 'arch': self.arch,\n })\n view_values.update({\n 'arch': self.arch,\n 'current_version': current_version.id\n })\n\n # avoid_version is set because we don't want to trigger the creation\n # of a new version when updating the view\n self.with_context(avoid_version=True).view.write(view_values)", "def set_related_view(request, view_name):\n request.environ['cone.app.related_view'] = view_name", "def set_view(self):\n self.scene.mlab.view(azimuth=90.0, elevation=-90.0)", "def set_catalogue(self, catalogue, force_it=False,\n match_angsep=3, **kwargs):\n super(Image, self).set_catalogue(catalogue, force_it=force_it, **kwargs)\n\n # -- Lets save the pixel values\n if self.has_catalogue() and self.has_sepobjects():\n self.sepobjects.set_catalogue(catalogue,force_it=True,reset=False)\n self.sepobjects.match_catalogue(deltadist=match_angsep)", "def use_comparative_book_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_comparative_bin_view\n self._catalog_view = COMPARATIVE\n if self._catalog_session is not None:\n self._catalog_session.use_comparative_catalog_view()", "def use_comparative_book_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_comparative_bin_view\n self._catalog_view = COMPARATIVE\n if self._catalog_session is not None:\n self._catalog_session.use_comparative_catalog_view()", "def use_comparative_book_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_comparative_bin_view\n self._catalog_view = COMPARATIVE\n if self._catalog_session is not None:\n self._catalog_session.use_comparative_catalog_view()", "def assign_view(self):\n return self._assign_view if self._assign_view else self.assign_view_class.as_view()", "def UpdateView(self):\n self.View._viewData = self.Model.ModelViewData", "def view_type(self, view_type):\n\n self.container['view_type'] = view_type", "def view_name(self, view_name):\n\n self._view_name = view_name", "def use_comparative_family_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_comparative_bin_view\n self._catalog_view = COMPARATIVE\n if self._catalog_session is not None:\n self._catalog_session.use_comparative_catalog_view()", "def use_comparative_family_view(self):\n # Implemented from template for\n # osid.resource.BinLookupSession.use_comparative_bin_view\n self._catalog_view = COMPARATIVE\n if self._catalog_session is not None:\n self._catalog_session.use_comparative_catalog_view()", "def viewer(self, value):\n\n # if viewer has somehow already been set, delete it to\n # disconnect all signals\n if self._viewer: del self._viewer\n\n self._viewer = value\n\n # TODO: need custom view for viewer; then we can connect a selection change to setting a new mod on that view", "def set_view(self, view, view_xml_filename):\n if self.view_exists(view):\n command = PlatformJenkinsJavaCLI.UPDATE_VIEW\n else:\n command = PlatformJenkinsJavaCLI.CREATE_VIEW\n\n with open(view_xml_filename) as view_xml_file:\n view_xml = view_xml_file.read()\n\n call = subprocess.Popen(self.cli + [command, view], stdin=subprocess.PIPE)\n call.communicate(view_xml)\n call.wait()", "def init_view(self):\n self.view_map = self.ctx.clientmap" ]
[ "0.7872513", "0.6380571", "0.6070326", "0.5951991", "0.5917372", "0.5841899", "0.5828823", "0.5798195", "0.57301855", "0.569667", "0.56866413", "0.56350636", "0.56345433", "0.56257606", "0.5610489", "0.55495274", "0.54750896", "0.5470648", "0.5437594", "0.5437594", "0.5437594", "0.5403253", "0.5375391", "0.5369194", "0.533839", "0.52645415", "0.52645415", "0.5238226", "0.52152926", "0.51466745" ]
0.7653462
1
Sets the underlying object views to match current view
def _set_object_view(self, session): for obj_name in self._object_views: if self._object_views[obj_name] == PLENARY: try: getattr(session, 'use_plenary_' + obj_name + '_view')() except AttributeError: pass else: try: getattr(session, 'use_comparative_' + obj_name + '_view')() except AttributeError: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def on_action_set_view(self, content):\n self._view = content['view']\n self.refresh_traits_widget()", "def views(self, views):\n\n self._views = views", "def set_view(self, index):\n view = index\n self.reset_scroll_area()\n self.clearSelection()\n\n if view == self.SURFACE_VIEW:\n self.toggle_surface_mode()\n self.view = \"Surface View\"\n elif view == self.BORDER_VIEW:\n self.toggle_border_mode()\n self.view = \"Border View\"\n elif view == self.GEOM_VIEW:\n if self.show_geom() == \"Canceled\":\n self.canceled = True\n else:\n self.view = \"Geom View\"\n elif view == self.MESH_VIEW:\n if self.show_mesh() == \"Canceled\":\n self.canceled = True\n else:\n self.view = \"Mesh View\"", "def set_view(self, s):\n #s.scene.reset_zoom()\n s.scene.z_plus_view()\n c = s.scene.camera\n c.azimuth(30)\n c.elevation(30)\n s.render()", "def set_view(self, s):\n #s.scene.reset_zoom()\n s.scene.z_plus_view()\n c = s.scene.camera\n c.azimuth(-30)\n c.elevation(20)\n s.render()", "def _set_containable_view(self, session):\n for obj_name in self._containable_views:\n if self._containable_views[obj_name] == SEQUESTERED:\n try:\n getattr(session, 'use_sequestered_' + obj_name + '_view')()\n except AttributeError:\n pass\n else:\n try:\n getattr(session, 'use_unsequestered_' + obj_name + '_view')()\n except AttributeError:\n pass", "def update_view(self): \n raise NotImplementedError(\"Widget descendents MUST implement the update_view() method!\")", "def UpdateView(self):\n self.View._viewData = self.Model.ModelViewData", "def reset_view(self, flags=None):\n self.view = {'cull': True,\n 'axis': False,\n 'fullscreen': False,\n 'wireframe': False,\n 'translation': np.zeros(3),\n 'center': self.scene.centroid,\n 'scale': self.scene.scale,\n 'ball': Arcball()}\n\n try:\n # place the arcball (rotation widget) in the center of the view\n self.view['ball'].place([self.width / 2.0,\n self.height / 2.0],\n (self.width + self.height) / 2.0)\n\n # if any flags are passed override defaults\n if isinstance(flags, dict):\n for k, v in flags.items():\n if k in self.view:\n self.view[k] = v\n self.update_flags()\n except BaseException:\n pass", "def set_view(self):\n self.scene.mlab.view(azimuth=90.0, elevation=-90.0)", "def set_xviews(self, *args):\n self.xview(*args)\n self.tablecolheader.xview(*args)\n self.redrawVisible()\n return", "def _set_operable_view(self, session):\n for obj_name in self._operable_views:\n if self._operable_views[obj_name] == ACTIVE:\n try:\n getattr(session, 'use_active_' + obj_name + '_view')()\n except AttributeError:\n pass\n else:\n try:\n getattr(session, 'use_any_status_' + obj_name + '_view')()\n except AttributeError:\n pass", "def update_view(self, selected):\n pass", "def redraw(self):\n bpy.context.scene.objects.active = bpy.context.scene.objects.active", "def setObjectRenderable(obj, state):\n obj.select_set(state)\n # Only make the object active if it's also selected.\n if state:\n bpy.context.view_layer.objects.active = obj\n # Set the object to be renderable.\n obj.hide_render = not state", "def _connectView(self):\n self._view.select_asset = self.select_asset\n self._view.add_assets = self.add_assets\n self._view.remove_assets = self.remove_assets\n self._view.update_assets = self.update_assets\n self._view.commit = self.commit", "def switch_view(self, *args):\n if not self.rightwin.data:\n return\n\n if self.cur == Win.right:\n self.switch_view_left()\n else:\n self.switch_view_right()\n self.leftwin.win.touchwin()", "def __init__(self, *args, **kwargs):\n \n super(AvatarView, self).__init__(*args, **kwargs)\n \n wm = bpy.context.window_manager\n wm.verse_avatars.add()\n wm.verse_avatars[-1].node_id = self.id\n \n # Force redraw of 3D view\n ui.update_all_views(('VIEW_3D',))\n\n self.scene_node = None\n view_initialized = False\n self.visualized = True\n self.cur_area = None\n self.cur_space = None\n\n if self.id == self.session.avatar_id:\n # Initialize default values\n self.cur_screen = bpy.context.screen\n self.__class__.__my_view = self\n\n # Try to find current 3D view \n for area in bpy.context.screen.areas.values():\n if area.type == 'VIEW_3D':\n self.cur_area = area\n for space in area.spaces.values():\n if space.type == 'VIEW_3D':\n self.cur_space = space\n break\n break\n\n if self.cur_area.type == 'VIEW_3D' and self.cur_space.type == 'VIEW_3D':\n view_initialized = True\n # Create tag group containing information about view\n self.view_tg = vrsent.VerseTagGroup(\n node=self,\n custom_type=TG_INFO_CT)\n # Create tags with data of view to 3D view\n # Location\n self.location = AvatarLocation(\n tg=self.view_tg,\n value=tuple(self.cur_space.region_3d.view_location))\n # Rotation\n self.rotation = AvatarRotation(\n tg=self.view_tg,\n value=tuple(self.cur_space.region_3d.view_rotation))\n # Distance\n self.distance = AvatarDistance(\n tg=self.view_tg,\n value=(self.cur_space.region_3d.view_distance,))\n # Perspective/Orthogonal\n self.perspective = AvatarPerspective(\n tg=self.view_tg,\n value=(self.cur_space.region_3d.view_perspective,))\n # Width\n self.width = AvatarWidth(\n tg=self.view_tg,\n value=(self.cur_area.width,))\n # Height\n self.height = AvatarHeight(\n tg=self.view_tg,\n value=(self.cur_area.height,))\n # Lens\n self.lens = AvatarLens(\n tg=self.view_tg,\n value=(self.cur_space.lens,))\n # Get current Scene ID\n if bpy.context.scene.verse_node_id != -1:\n scene_node_id = bpy.context.scene.verse_node_id\n else:\n scene_node_id = 0\n self.scene_node_id = AvatarScene(\n tg=self.view_tg,\n value=(scene_node_id,))\n \n # TODO: check following code (may be not needed anymore)\n original_type = bpy.context.area.type\n bpy.context.area.type = 'VIEW_3D'\n bpy.ops.view3d.verse_avatar()\n bpy.context.area.type = original_type\n else:\n # TODO: Add some assert, because this should not happen.\n pass\n else:\n self.__class__.__other_views[self.id] = self\n \n if view_initialized is False:\n # Create tag group containing information about view\n self.view_tg = vrsent.VerseTagGroup(\n node=self,\n custom_type=TG_INFO_CT)\n # Create tags with data of view to 3D view\n self.location = AvatarLocation(tg=self.view_tg)\n self.rotation = AvatarRotation(tg=self.view_tg)\n self.distance = AvatarDistance(tg=self.view_tg)\n self.perspective = AvatarPerspective(tg=self.view_tg)\n self.width = AvatarWidth(tg=self.view_tg)\n self.height = AvatarHeight(tg=self.view_tg)\n self.lens = AvatarLens(tg=self.view_tg)\n self.scene_node_id = AvatarScene(tg=self.view_tg)", "def assign_view(self):\n return self._assign_view if self._assign_view else self.assign_view_class.as_view()", "def set_view_options(self):\n active_panel = self.get_active_panel()\n # turn all show/hide display options off except for polygons and\n # surfaces\n pm.modelEditor(active_panel, e=1, allObjects=False)\n pm.modelEditor(active_panel, e=1, manipulators=False)\n pm.modelEditor(active_panel, e=1, grid=False)\n\n pm.modelEditor(active_panel, e=1, polymeshes=True)\n pm.modelEditor(active_panel, e=1, nurbsSurfaces=True)\n pm.modelEditor(active_panel, e=1, subdivSurfaces=True)\n pm.modelEditor(active_panel, e=1,\n pluginObjects=('gpuCacheDisplayFilter', True))\n pm.modelEditor(active_panel, e=1, planes=True)\n\n # turn all hud displays off\n hud_flags = pm.headsUpDisplay(lh=1)\n for flag in hud_flags:\n pm.headsUpDisplay(flag, e=1, vis=0)\n\n # set camera options for playblast\n for camera in pm.ls(type='camera'):\n camera.setAttr('overscan', 1)\n camera.setAttr('filmFit', 1)\n camera.setAttr('displayFilmGate', 1)\n camera.setAttr('displayResolution', 0)", "def init_view(self):\n self.view_map = self.ctx.clientmap", "def reset_view(contr):\n human = contr.owner\n \n # if the human is external, do nothing\n if human.get('External_Robot_Tag') or human['disable_keyboard_control']:\n return\n \n # get the suffix of the human to reference the right objects\n suffix = human.name[-4:] if human.name[-4] == \".\" else \"\"\n \n scene = logic.getCurrentScene()\n target = scene.objects['Target_Empty' + suffix]\n # Reset the Empty object to its original position\n target.localPosition = [1.3, 0.0, 1.7]", "def _update_view(self):\n NavigationToolbar2._update_view(self)\n\n self._myParent.evt_view_updated()\n\n return", "def update_view(self):\n for row in self.view.obj_list:\n for obj in row:\n obj._update(self.model)", "def get_view ( self, object ):\n return self.view", "def sync_view(self):\n new_callbacks = []\n for c in self._ngl_displayed_callbacks_after_loaded:\n if (c._method_name == 'loadFile' and\n 'defaultRepresentation' in c._ngl_msg['kwargs']):\n # set to False to avoid autoView\n # so subsequent display of `self` won't reset view orientation.\n c._ngl_msg['kwargs']['defaultRepresentation'] = False\n msg = c._ngl_msg\n msg['last_child'] = True\n def callback(widget, msg=msg):\n widget.send(msg)\n callback._method_name = msg['methodName']\n callback._ngl_msg = msg\n new_callbacks.append(callback)\n\n msg = {}\n msg['target'] = 'Widget'\n msg['type'] = 'call_method'\n msg['methodName'] = 'set_representation_from_backend'\n msg['args'] = []\n msg['kwargs'] = {}\n msg['last_child'] = True\n\n def callback(widget, msg=msg):\n widget.send(msg)\n callback._method_name = msg['methodName']\n callback._ngl_msg = msg\n\n new_callbacks.append(callback)\n self._fire_callbacks(new_callbacks)", "def viewAll(self):\n self._sceneviewer.viewAll()", "def resetView(self):\n\n self.zoomLevel = 0\n self.setTransform(QTransform())", "def transform_view(self):\n translation = self.get_translation()\n scale = self.get_scaling()\n # update all non static visuals\n for visual in self.paint_manager.get_visuals():\n if not visual.get('is_static', False):\n self.set_data(visual=visual['name'], \n scale=scale, translation=translation)", "def on_show_view(self):\n self.setup()" ]
[ "0.642989", "0.6317955", "0.62381274", "0.62121856", "0.61683553", "0.61243427", "0.60863346", "0.607278", "0.60501987", "0.60130256", "0.59132856", "0.5909396", "0.5882508", "0.58528304", "0.5812102", "0.5759648", "0.5733881", "0.57300025", "0.5714099", "0.5709085", "0.5691893", "0.5680695", "0.56690246", "0.5657146", "0.56557024", "0.56541955", "0.563271", "0.56007296", "0.55679685", "0.55088663" ]
0.665704
0
Sets the underlying operable views to match current view
def _set_operable_view(self, session): for obj_name in self._operable_views: if self._operable_views[obj_name] == ACTIVE: try: getattr(session, 'use_active_' + obj_name + '_view')() except AttributeError: pass else: try: getattr(session, 'use_any_status_' + obj_name + '_view')() except AttributeError: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def on_action_set_view(self, content):\n self._view = content['view']\n self.refresh_traits_widget()", "def views(self, views):\n\n self._views = views", "def switch_view(self, *args):\n if not self.rightwin.data:\n return\n\n if self.cur == Win.right:\n self.switch_view_left()\n else:\n self.switch_view_right()\n self.leftwin.win.touchwin()", "def set_view(self, index):\n view = index\n self.reset_scroll_area()\n self.clearSelection()\n\n if view == self.SURFACE_VIEW:\n self.toggle_surface_mode()\n self.view = \"Surface View\"\n elif view == self.BORDER_VIEW:\n self.toggle_border_mode()\n self.view = \"Border View\"\n elif view == self.GEOM_VIEW:\n if self.show_geom() == \"Canceled\":\n self.canceled = True\n else:\n self.view = \"Geom View\"\n elif view == self.MESH_VIEW:\n if self.show_mesh() == \"Canceled\":\n self.canceled = True\n else:\n self.view = \"Mesh View\"", "def _set_containable_view(self, session):\n for obj_name in self._containable_views:\n if self._containable_views[obj_name] == SEQUESTERED:\n try:\n getattr(session, 'use_sequestered_' + obj_name + '_view')()\n except AttributeError:\n pass\n else:\n try:\n getattr(session, 'use_unsequestered_' + obj_name + '_view')()\n except AttributeError:\n pass", "def update_view(self, selected):\n pass", "def set_view(self, s):\n #s.scene.reset_zoom()\n s.scene.z_plus_view()\n c = s.scene.camera\n c.azimuth(30)\n c.elevation(30)\n s.render()", "def set_view(self, s):\n #s.scene.reset_zoom()\n s.scene.z_plus_view()\n c = s.scene.camera\n c.azimuth(-30)\n c.elevation(20)\n s.render()", "def xview_moveto(self, fraction):\n self.tk.call(self._w, 'xview', 'moveto', fraction)", "def update_view(self): \n raise NotImplementedError(\"Widget descendents MUST implement the update_view() method!\")", "def __initViewActions(self):\n self.viewActGrp = createActionGroup(self)\n self.viewFoldActGrp = createActionGroup(self)\n \n self.zoomInAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Zoom in'),\n UI.PixmapCache.getIcon(\"zoomIn.png\"),\n QCoreApplication.translate('ViewManager', 'Zoom &in'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl++\", \"View|Zoom in\")),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Zoom In\", \"View|Zoom in\")),\n self.viewActGrp, 'vm_view_zoom_in')\n self.zoomInAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Zoom in on the text'))\n self.zoomInAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Zoom in</b>\"\"\"\n \"\"\"<p>Zoom in on the text. This makes the text bigger.</p>\"\"\"\n ))\n self.zoomInAct.triggered.connect(self.__zoomIn)\n self.viewActions.append(self.zoomInAct)\n \n self.zoomOutAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Zoom out'),\n UI.PixmapCache.getIcon(\"zoomOut.png\"),\n QCoreApplication.translate('ViewManager', 'Zoom &out'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+-\", \"View|Zoom out\")),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Zoom Out\", \"View|Zoom out\")),\n self.viewActGrp, 'vm_view_zoom_out')\n self.zoomOutAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Zoom out on the text'))\n self.zoomOutAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Zoom out</b>\"\"\"\n \"\"\"<p>Zoom out on the text. This makes the text smaller.</p>\"\"\"\n ))\n self.zoomOutAct.triggered.connect(self.__zoomOut)\n self.viewActions.append(self.zoomOutAct)\n \n self.zoomResetAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Zoom reset'),\n UI.PixmapCache.getIcon(\"zoomReset.png\"),\n QCoreApplication.translate('ViewManager', 'Zoom &reset'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+0\", \"View|Zoom reset\")),\n 0,\n self.viewActGrp, 'vm_view_zoom_reset')\n self.zoomResetAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Reset the zoom of the text'))\n self.zoomResetAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Zoom reset</b>\"\"\"\n \"\"\"<p>Reset the zoom of the text. \"\"\"\n \"\"\"This sets the zoom factor to 100%.</p>\"\"\"\n ))\n self.zoomResetAct.triggered.connect(self.__zoomReset)\n self.viewActions.append(self.zoomResetAct)\n \n self.zoomToAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Zoom'),\n UI.PixmapCache.getIcon(\"zoomTo.png\"),\n QCoreApplication.translate('ViewManager', '&Zoom'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+#\", \"View|Zoom\")),\n 0,\n self.viewActGrp, 'vm_view_zoom')\n self.zoomToAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Zoom the text'))\n self.zoomToAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Zoom</b>\"\"\"\n \"\"\"<p>Zoom the text. This opens a dialog where the\"\"\"\n \"\"\" desired size can be entered.</p>\"\"\"\n ))\n self.zoomToAct.triggered.connect(self.__zoom)\n self.viewActions.append(self.zoomToAct)\n \n self.toggleAllAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Toggle all folds'),\n QCoreApplication.translate('ViewManager', '&Toggle all folds'),\n 0, 0, self.viewFoldActGrp, 'vm_view_toggle_all_folds')\n self.toggleAllAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Toggle all folds'))\n self.toggleAllAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Toggle all folds</b>\"\"\"\n \"\"\"<p>Toggle all folds of the current editor.</p>\"\"\"\n ))\n self.toggleAllAct.triggered.connect(self.__toggleAll)\n self.viewActions.append(self.toggleAllAct)\n \n self.toggleAllChildrenAct = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'Toggle all folds (including children)'),\n QCoreApplication.translate(\n 'ViewManager', 'Toggle all &folds (including children)'),\n 0, 0, self.viewFoldActGrp, 'vm_view_toggle_all_folds_children')\n self.toggleAllChildrenAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Toggle all folds (including children)'))\n self.toggleAllChildrenAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Toggle all folds (including children)</b>\"\"\"\n \"\"\"<p>Toggle all folds of the current editor including\"\"\"\n \"\"\" all children.</p>\"\"\"\n ))\n self.toggleAllChildrenAct.triggered.connect(\n self.__toggleAllChildren)\n self.viewActions.append(self.toggleAllChildrenAct)\n \n self.toggleCurrentAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Toggle current fold'),\n QCoreApplication.translate('ViewManager', 'Toggle &current fold'),\n 0, 0, self.viewFoldActGrp, 'vm_view_toggle_current_fold')\n self.toggleCurrentAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Toggle current fold'))\n self.toggleCurrentAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Toggle current fold</b>\"\"\"\n \"\"\"<p>Toggle the folds of the current line of the current\"\"\"\n \"\"\" editor.</p>\"\"\"\n ))\n self.toggleCurrentAct.triggered.connect(self.__toggleCurrent)\n self.viewActions.append(self.toggleCurrentAct)\n \n self.clearAllFoldsAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Clear all folds'),\n QCoreApplication.translate('ViewManager', 'Clear &all folds'),\n 0, 0, self.viewFoldActGrp, 'vm_view_clear_all_folds')\n self.clearAllFoldsAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Clear all folds'))\n self.clearAllFoldsAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Clear all folds</b>\"\"\"\n \"\"\"<p>Clear all folds of the current editor, i.e. ensure that\"\"\"\n \"\"\" all lines are displayed unfolded.</p>\"\"\"\n ))\n self.clearAllFoldsAct.triggered.connect(self.__clearAllFolds)\n self.viewActions.append(self.clearAllFoldsAct)\n \n self.unhighlightAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Remove all highlights'),\n UI.PixmapCache.getIcon(\"unhighlight.png\"),\n QCoreApplication.translate('ViewManager', 'Remove all highlights'),\n 0, 0,\n self, 'vm_view_unhighlight')\n self.unhighlightAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Remove all highlights'))\n self.unhighlightAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Remove all highlights</b>\"\"\"\n \"\"\"<p>Remove the highlights of all editors.</p>\"\"\"\n ))\n self.unhighlightAct.triggered.connect(self.__unhighlight)\n self.viewActions.append(self.unhighlightAct)\n \n self.newDocumentViewAct = E5Action(\n QCoreApplication.translate('ViewManager', 'New Document View'),\n UI.PixmapCache.getIcon(\"documentNewView.png\"),\n QCoreApplication.translate('ViewManager', 'New &Document View'),\n 0, 0, self, 'vm_view_new_document_view')\n self.newDocumentViewAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Open a new view of the current document'))\n self.newDocumentViewAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>New Document View</b>\"\"\"\n \"\"\"<p>Opens a new view of the current document. Both views show\"\"\"\n \"\"\" the same document. However, the cursors may be positioned\"\"\"\n \"\"\" independently.</p>\"\"\"\n ))\n self.newDocumentViewAct.triggered.connect(self.__newDocumentView)\n self.viewActions.append(self.newDocumentViewAct)\n \n self.newDocumentSplitViewAct = E5Action(\n QCoreApplication.translate(\n 'ViewManager', 'New Document View (with new split)'),\n UI.PixmapCache.getIcon(\"splitVertical.png\"),\n QCoreApplication.translate(\n 'ViewManager', 'New Document View (with new split)'),\n 0, 0, self, 'vm_view_new_document_split_view')\n self.newDocumentSplitViewAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager',\n 'Open a new view of the current document in a new split'))\n self.newDocumentSplitViewAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>New Document View</b>\"\"\"\n \"\"\"<p>Opens a new view of the current document in a new split.\"\"\"\n \"\"\" Both views show the same document. However, the cursors may\"\"\"\n \"\"\" be positioned independently.</p>\"\"\"\n ))\n self.newDocumentSplitViewAct.triggered.connect(\n self.__newDocumentSplitView)\n self.viewActions.append(self.newDocumentSplitViewAct)\n \n self.splitViewAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Split view'),\n UI.PixmapCache.getIcon(\"splitVertical.png\"),\n QCoreApplication.translate('ViewManager', '&Split view'),\n 0, 0, self, 'vm_view_split_view')\n self.splitViewAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Add a split to the view'))\n self.splitViewAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Split view</b>\"\"\"\n \"\"\"<p>Add a split to the view.</p>\"\"\"\n ))\n self.splitViewAct.triggered.connect(self.__splitView)\n self.viewActions.append(self.splitViewAct)\n \n self.splitOrientationAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Arrange horizontally'),\n QCoreApplication.translate('ViewManager', 'Arrange &horizontally'),\n 0, 0, self, 'vm_view_arrange_horizontally', True)\n self.splitOrientationAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Arrange the splitted views horizontally'))\n self.splitOrientationAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Arrange horizontally</b>\"\"\"\n \"\"\"<p>Arrange the splitted views horizontally.</p>\"\"\"\n ))\n self.splitOrientationAct.setChecked(False)\n self.splitOrientationAct.toggled[bool].connect(self.__splitOrientation)\n self.viewActions.append(self.splitOrientationAct)\n \n self.splitRemoveAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Remove split'),\n UI.PixmapCache.getIcon(\"remsplitVertical.png\"),\n QCoreApplication.translate('ViewManager', '&Remove split'),\n 0, 0, self, 'vm_view_remove_split')\n self.splitRemoveAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Remove the current split'))\n self.splitRemoveAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Remove split</b>\"\"\"\n \"\"\"<p>Remove the current split.</p>\"\"\"\n ))\n self.splitRemoveAct.triggered.connect(self.removeSplit)\n self.viewActions.append(self.splitRemoveAct)\n \n self.nextSplitAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Next split'),\n QCoreApplication.translate('ViewManager', '&Next split'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+Alt+N\", \"View|Next split\")),\n 0,\n self, 'vm_next_split')\n self.nextSplitAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Move to the next split'))\n self.nextSplitAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Next split</b>\"\"\"\n \"\"\"<p>Move to the next split.</p>\"\"\"\n ))\n self.nextSplitAct.triggered.connect(self.nextSplit)\n self.viewActions.append(self.nextSplitAct)\n \n self.prevSplitAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Previous split'),\n QCoreApplication.translate('ViewManager', '&Previous split'),\n QKeySequence(QCoreApplication.translate(\n 'ViewManager', \"Ctrl+Alt+P\", \"View|Previous split\")),\n 0, self, 'vm_previous_split')\n self.prevSplitAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Move to the previous split'))\n self.prevSplitAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Previous split</b>\"\"\"\n \"\"\"<p>Move to the previous split.</p>\"\"\"\n ))\n self.prevSplitAct.triggered.connect(self.prevSplit)\n self.viewActions.append(self.prevSplitAct)\n \n self.previewAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Preview'),\n UI.PixmapCache.getIcon(\"previewer.png\"),\n QCoreApplication.translate('ViewManager', 'Preview'),\n 0, 0, self, 'vm_preview', True)\n self.previewAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Preview the current file in the web browser'))\n self.previewAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Preview</b>\"\"\"\n \"\"\"<p>This opens the web browser with a preview of\"\"\"\n \"\"\" the current file.</p>\"\"\"\n ))\n self.previewAct.setChecked(Preferences.getUI(\"ShowFilePreview\"))\n self.previewAct.toggled[bool].connect(self.__previewEditor)\n self.viewActions.append(self.previewAct)\n \n self.astViewerAct = E5Action(\n QCoreApplication.translate('ViewManager', 'Python AST Viewer'),\n UI.PixmapCache.getIcon(\"astTree\"),\n QCoreApplication.translate('ViewManager', 'Python AST Viewer'),\n 0, 0, self, 'vm_python_ast_viewer', True)\n self.astViewerAct.setStatusTip(QCoreApplication.translate(\n 'ViewManager', 'Show the AST for the current Python file'))\n self.astViewerAct.setWhatsThis(QCoreApplication.translate(\n 'ViewManager',\n \"\"\"<b>Python AST Viewer</b>\"\"\"\n \"\"\"<p>This opens the a tree view of the AST of the current\"\"\"\n \"\"\" Python source file.</p>\"\"\"\n ))\n self.astViewerAct.setChecked(False)\n self.astViewerAct.toggled[bool].connect(self.__astViewer)\n self.viewActions.append(self.astViewerAct)\n \n self.viewActGrp.setEnabled(False)\n self.viewFoldActGrp.setEnabled(False)\n self.unhighlightAct.setEnabled(False)\n self.splitViewAct.setEnabled(False)\n self.splitOrientationAct.setEnabled(False)\n self.splitRemoveAct.setEnabled(False)\n self.nextSplitAct.setEnabled(False)\n self.prevSplitAct.setEnabled(False)\n self.previewAct.setEnabled(True)\n self.astViewerAct.setEnabled(False)\n self.newDocumentViewAct.setEnabled(False)\n self.newDocumentSplitViewAct.setEnabled(False)\n \n self.splitOrientationAct.setChecked(\n Preferences.getUI(\"SplitOrientationVertical\"))", "def isometric_view(self):\n self.view_isometric()", "def set_xviews(self, *args):\n self.xview(*args)\n self.tablecolheader.xview(*args)\n self.redrawVisible()\n return", "def _connectView(self):\n self._view.select_asset = self.select_asset\n self._view.add_assets = self.add_assets\n self._view.remove_assets = self.remove_assets\n self._view.update_assets = self.update_assets\n self._view.commit = self.commit", "def reset_view(self, flags=None):\n self.view = {'cull': True,\n 'axis': False,\n 'fullscreen': False,\n 'wireframe': False,\n 'translation': np.zeros(3),\n 'center': self.scene.centroid,\n 'scale': self.scene.scale,\n 'ball': Arcball()}\n\n try:\n # place the arcball (rotation widget) in the center of the view\n self.view['ball'].place([self.width / 2.0,\n self.height / 2.0],\n (self.width + self.height) / 2.0)\n\n # if any flags are passed override defaults\n if isinstance(flags, dict):\n for k, v in flags.items():\n if k in self.view:\n self.view[k] = v\n self.update_flags()\n except BaseException:\n pass", "def set_view_options(self):\n active_panel = self.get_active_panel()\n # turn all show/hide display options off except for polygons and\n # surfaces\n pm.modelEditor(active_panel, e=1, allObjects=False)\n pm.modelEditor(active_panel, e=1, manipulators=False)\n pm.modelEditor(active_panel, e=1, grid=False)\n\n pm.modelEditor(active_panel, e=1, polymeshes=True)\n pm.modelEditor(active_panel, e=1, nurbsSurfaces=True)\n pm.modelEditor(active_panel, e=1, subdivSurfaces=True)\n pm.modelEditor(active_panel, e=1,\n pluginObjects=('gpuCacheDisplayFilter', True))\n pm.modelEditor(active_panel, e=1, planes=True)\n\n # turn all hud displays off\n hud_flags = pm.headsUpDisplay(lh=1)\n for flag in hud_flags:\n pm.headsUpDisplay(flag, e=1, vis=0)\n\n # set camera options for playblast\n for camera in pm.ls(type='camera'):\n camera.setAttr('overscan', 1)\n camera.setAttr('filmFit', 1)\n camera.setAttr('displayFilmGate', 1)\n camera.setAttr('displayResolution', 0)", "def set_view(self):\n self.scene.mlab.view(azimuth=90.0, elevation=-90.0)", "def _update_view(self):\n NavigationToolbar2._update_view(self)\n\n self._myParent.evt_view_updated()\n\n return", "def add_views_widget(self):\n axial_view = QtWidgets.QPushButton(\"Axial\")\n coronal_view = QtWidgets.QPushButton(\"Coronal\")\n sagittal_view = QtWidgets.QPushButton(\"Sagittal\")\n views_box = QtWidgets.QGroupBox(\"Views\")\n views_box_layout = QtWidgets.QVBoxLayout()\n views_box_layout.addWidget(axial_view)\n views_box_layout.addWidget(coronal_view)\n views_box_layout.addWidget(sagittal_view)\n views_box.setLayout(views_box_layout)\n self.grid.addWidget(views_box, 3, 0, 2, 2)\n axial_view.clicked.connect(self.set_axial_view)\n coronal_view.clicked.connect(self.set_coronal_view)\n sagittal_view.clicked.connect(self.set_sagittal_view)", "def yview_moveto(self, fraction):\n self.tk.call(self._w, 'yview', 'moveto', fraction)", "def _add_all(self, window):\r\n\r\n for view in window.views:\r\n if view.visible:\r\n self._add_view(window, view)\r\n\r\n return", "def set_axial_view(self):\n self.renderer.ResetCamera()\n fp = self.renderer.GetActiveCamera().GetFocalPoint()\n p = self.renderer.GetActiveCamera().GetPosition()\n dist = math.sqrt((p[0] - fp[0]) ** 2 + (p[1] - fp[1]) ** 2 + (p[2] - fp[2]) ** 2)\n self.renderer.GetActiveCamera().SetPosition(fp[0], fp[1], fp[2] + dist)\n self.renderer.GetActiveCamera().SetViewUp(0.0, 1.0, 0.0)\n self.renderer.GetActiveCamera().Zoom(1.8)\n self.render_window.Render()", "def xview(self, *what):\n if not what:\n return self._getdoubles(self.tk.call(self._w, 'xview'))\n self.tk.call((self._w, 'xview') + what)", "def _set_object_view(self, session):\n for obj_name in self._object_views:\n if self._object_views[obj_name] == PLENARY:\n try:\n getattr(session, 'use_plenary_' + obj_name + '_view')()\n except AttributeError:\n pass\n else:\n try:\n getattr(session, 'use_comparative_' + obj_name + '_view')()\n except AttributeError:\n pass", "def adjustToView(self):\r\n view = self.activeView()\r\n if view:\r\n \r\n self.resizeToView() # Resize first because the x-position is dependant on correct width.\r\n\r\n # pos = self.parentWidget().mapFromGlobal(view.mapToGlobal(QPoint(view.width() - self.parentWidget().width(), 0))) # Move to top of QMdiArea. Only suitable for 'AdjustToSubwindows' mode.\r\n pos = self.parentWidget().mapFromGlobal(view.mapToGlobal(QPoint(view.width() - self.width(), 0))) # Move to top left corner of current view. Hacky, but works!\r\n self.move(pos)", "def on_show_view(self):\n self.setup()", "def on_show_view(self):\n self.setup()", "def on_show_view(self):\n self.setup()", "def set_yviews(self, *args):\n self.yview(*args)\n self.tablerowheader.yview(*args)\n self.redrawVisible()\n return", "def views(self):\r\n return Views(self)" ]
[ "0.6402035", "0.6123511", "0.60095805", "0.59634674", "0.5906052", "0.58527505", "0.5831052", "0.5789425", "0.5751221", "0.57355815", "0.57335454", "0.5718964", "0.5668233", "0.56401366", "0.5618445", "0.5529013", "0.54861164", "0.5480957", "0.54507565", "0.5442017", "0.54279405", "0.5388553", "0.53408253", "0.53111076", "0.5309627", "0.52781373", "0.52781373", "0.52781373", "0.5266089", "0.52445644" ]
0.6814051
0
Sets the underlying containable views to match current view
def _set_containable_view(self, session): for obj_name in self._containable_views: if self._containable_views[obj_name] == SEQUESTERED: try: getattr(session, 'use_sequestered_' + obj_name + '_view')() except AttributeError: pass else: try: getattr(session, 'use_unsequestered_' + obj_name + '_view')() except AttributeError: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def on_action_set_view(self, content):\n self._view = content['view']\n self.refresh_traits_widget()", "def views(self, views):\n\n self._views = views", "def _add_all(self, window):\r\n\r\n for view in window.views:\r\n if view.visible:\r\n self._add_view(window, view)\r\n\r\n return", "def update_view(self): \n raise NotImplementedError(\"Widget descendents MUST implement the update_view() method!\")", "def update_view(self, selected):\n pass", "def viewAll(self):\n self._sceneviewer.viewAll()", "def _update_view(self):\n NavigationToolbar2._update_view(self)\n\n self._myParent.evt_view_updated()\n\n return", "def set_xviews(self, *args):\n self.xview(*args)\n self.tablecolheader.xview(*args)\n self.redrawVisible()\n return", "def _updateBoundingRect(self):\n self.setPen(QPen(Qt.NoPen))\n self.setRect(self.childrenBoundingRect())\n # move and show or hide the buttons if necessary\n addButton = self._addBasesButton\n rmButton = self._removeBasesButton\n if len(self._virtualHelixItemList) > 0:\n addRect = addButton.boundingRect()\n rmRect = rmButton.boundingRect()\n x = self._vHRect.right()\n y = -styles.PATH_HELIX_PADDING\n addButton.setPos(x, y)\n rmButton.setPos(x-rmRect.width(), y)\n addButton.show()\n rmButton.show()\n else:\n addButton.hide()\n rmButton.hide()", "def _connectView(self):\n self._view.select_asset = self.select_asset\n self._view.add_assets = self.add_assets\n self._view.remove_assets = self.remove_assets\n self._view.update_assets = self.update_assets\n self._view.commit = self.commit", "def reset_view(self, flags=None):\n self.view = {'cull': True,\n 'axis': False,\n 'fullscreen': False,\n 'wireframe': False,\n 'translation': np.zeros(3),\n 'center': self.scene.centroid,\n 'scale': self.scene.scale,\n 'ball': Arcball()}\n\n try:\n # place the arcball (rotation widget) in the center of the view\n self.view['ball'].place([self.width / 2.0,\n self.height / 2.0],\n (self.width + self.height) / 2.0)\n\n # if any flags are passed override defaults\n if isinstance(flags, dict):\n for k, v in flags.items():\n if k in self.view:\n self.view[k] = v\n self.update_flags()\n except BaseException:\n pass", "def set_view(self, s):\n #s.scene.reset_zoom()\n s.scene.z_plus_view()\n c = s.scene.camera\n c.azimuth(30)\n c.elevation(30)\n s.render()", "def on_show_view(self):\n self.setup()", "def on_show_view(self):\n self.setup()", "def on_show_view(self):\n self.setup()", "def fit_to_window(self):\n if self.view:\n self.view.fit_view()", "def set_view(self, s):\n #s.scene.reset_zoom()\n s.scene.z_plus_view()\n c = s.scene.camera\n c.azimuth(-30)\n c.elevation(20)\n s.render()", "def set_content(self, content):\n \n # clean sizer\n if self._view is not None:\n self.remove_subview(self._view)\n \n # init view\n if isinstance(content, UIView):\n self._view = content\n \n elif isinstance(content, Control):\n self._view = UIView()\n self._view.set_control(content)\n \n elif isinstance(content, Graphics):\n self._view = UIView()\n self._view.set_control(Control(graphics=content))\n \n else:\n message = \"Unknown content type! -> %s\" % type(content)\n raise TypeError(message)\n \n # add to sizer\n self.add_subview(self._view)\n \n # set layout\n self._view.flex = \"WH\"", "def updateContents(self):\n selSpots = self.treeView.selectionModel().selectedSpots()\n if self.isChildView:\n if len(selSpots) > 1 or self.hideChildView:\n self.hide()\n return\n if not selSpots:\n # use top node childList from tree structure\n selSpots = [globalref.mainControl.activeControl.structure.\n structSpot()]\n elif not selSpots:\n self.hide()\n return\n self.show()\n if not self.isVisible() or self.height() == 0 or self.width() == 0:\n return\n if self.isChildView:\n selSpots = selSpots[0].childSpots()\n self.blockSignals(True)\n if selSpots:\n self.setPlainText('\\n'.join(spot.nodeRef.title(spot) for spot in\n selSpots))\n else:\n self.clear()\n self.blockSignals(False)", "def set_view(self):\n self.scene.mlab.view(azimuth=90.0, elevation=-90.0)", "def adjustToView(self):\r\n view = self.activeView()\r\n if view:\r\n \r\n self.resizeToView() # Resize first because the x-position is dependant on correct width.\r\n\r\n # pos = self.parentWidget().mapFromGlobal(view.mapToGlobal(QPoint(view.width() - self.parentWidget().width(), 0))) # Move to top of QMdiArea. Only suitable for 'AdjustToSubwindows' mode.\r\n pos = self.parentWidget().mapFromGlobal(view.mapToGlobal(QPoint(view.width() - self.width(), 0))) # Move to top left corner of current view. Hacky, but works!\r\n self.move(pos)", "def update_list_view(self):\n # Clear the list/tree view.\n self.list_view.clear()\n\n # Find all the selected things in Maya.\n selected = cmds.ls(selection=True)\n\n # For each of the selected things, create a widget item.\n for thing in selected:\n item = QtGui.QListWidgetItem(thing)\n item.setFlags(item.flags() | QtCore.Qt.ItemIsEditable)\n self.list_view.addItem(item)\n # Set the flags on the widget item so it is editable.", "def child_views(self):\n return self.children", "def view_tree_set(v, treeset):\r\n treeset.add(v)\r\n for cl, v_input_pos_to_cl in v.clients:\r\n if cl == 'output':\r\n continue\r\n vmap = getattr(cl.op, 'view_map', {})\r\n dmap = getattr(cl.op, 'destroy_map', {})\r\n for opos, iposlist in vmap.items() + dmap.items():\r\n if v_input_pos_to_cl in iposlist:\r\n if cl.outputs[opos] not in treeset:\r\n view_tree_set(cl.outputs[opos], treeset)", "def __init__(self, parent: View):\n self.parent = parent\n self.root = self.parent.root\n # Content frame\n self.frame = tk.Frame(self.parent.frame)\n # Reference\n self.visible = False", "def updateViewer(self):\n if not self.hasImage():\n return\n if len(self.zoomStack):\n self.fitInView(self.zoomStack[-1], self.aspectRatioMode) # Show zoomed rect.\n else:\n self.fitInView(self.sceneRect(), self.aspectRatioMode) # Show entire image.", "def _add_contents(self, window, contents):\r\n\r\n # If we are adding specific contents then we ignore any default view\r\n # visibility.\r\n #\r\n # fixme: This is a bit ugly! Why don't we pass the visibility in to\r\n # 'window.add_view'?\r\n for view in window.views:\r\n view.visible = False\r\n \r\n for item in contents:\r\n self._add_perspective_item(window, item)\r\n \r\n return", "def vue_relayout(self, *args, **kwargs):\n def resize(stack_items):\n for stack in stack_items:\n for viewer_item in stack.get('viewers'):\n viewer = self._viewer_by_id(viewer_item['id'])\n\n if viewer is not None:\n viewer.figure_widget.layout.height = '99.9%'\n viewer.figure_widget.layout.height = '100%'\n\n if len(stack.get('children')) > 0:\n resize(stack.get('children'))\n\n resize(self.state.stack_items)", "def _set_operable_view(self, session):\n for obj_name in self._operable_views:\n if self._operable_views[obj_name] == ACTIVE:\n try:\n getattr(session, 'use_active_' + obj_name + '_view')()\n except AttributeError:\n pass\n else:\n try:\n getattr(session, 'use_any_status_' + obj_name + '_view')()\n except AttributeError:\n pass", "def __toggleAllChildren(self):\n aw = self.activeWindow()\n if aw:\n aw.foldAll(True)" ]
[ "0.6018899", "0.5493033", "0.546562", "0.54010123", "0.5316874", "0.5293752", "0.52362514", "0.5230312", "0.52012634", "0.51944643", "0.51662743", "0.51123357", "0.50995046", "0.50995046", "0.50995046", "0.50925916", "0.50868434", "0.5046261", "0.50452673", "0.50266683", "0.50157934", "0.5005042", "0.5004232", "0.5003941", "0.49787387", "0.4973978", "0.49736068", "0.49682358", "0.49377292", "0.49333686" ]
0.67774415
0
Returns the requested provider session. Instantiates a new one if the named session is not already known.
def _get_provider_session(self, session_name): agent_key = self._get_agent_key() if session_name in self._provider_sessions[agent_key]: return self._provider_sessions[agent_key][session_name] else: session_class = getattr(self._provider_manager, 'get_' + session_name + '_for_catalog') if self._proxy is None: if 'notification_session' in session_name: # Is there something else we should do about the receiver field? session = session_class('fake receiver', self._catalog.get_id()) else: session = session_class(self._catalog.get_id()) else: if 'notification_session' in session_name: # Is there something else we should do about the receiver field? session = session_class('fake receiver', self._catalog.get_id(), self._proxy) else: session = session_class(self._catalog.get_id(), self._proxy) self._set_catalog_view(session) self._set_object_view(session) self._set_operable_view(session) self._set_containable_view(session) if self._session_management != DISABLED: self._provider_sessions[agent_key][session_name] = session return session
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_provider_session(self, session_name, proxy=None):\n agent_key = self._get_agent_key(proxy)\n if session_name in self._provider_sessions[agent_key]:\n return self._provider_sessions[agent_key][session_name]\n else:\n session = self._instantiate_session('get_' + session_name, self._proxy)\n self._set_catalog_view(session)\n if self._session_management != DISABLED:\n self._provider_sessions[agent_key][session_name] = session\n return session", "def _get_session(self):\n if current_uow and not self._outside_uow:\n return current_uow.get_session(self.provider.name)\n else:\n new_connection = self.provider.get_connection()\n if not new_connection.is_active:\n new_connection.begin()\n return new_connection", "def provider_session(self):\n if self.flow_type == \"implicit\":\n return OAuth2Session(\n client_id=self.client_id,\n scope=self.client_kwargs[\"scope\"],\n redirect_uri=self.redirect_uri\n )\n elif self.flow_type == \"client\":\n return OAuth2Session(\n client_id=self.client_id,\n client_secret=self.client_secret,\n scope=self.client_kwargs[\"scope\"]\n )", "def get_catalog_lookup_session(self, *args, **kwargs):\n # Implemented from kitosid template for -\n # osid.resource.ResourceManager.get_resource_lookup_session_manager_template\n return self._provider_manager.get_catalog_lookup_session(*args, **kwargs)", "def _get_sub_package_provider_session(self, sub_package, session_name, proxy=None):\n agent_key = self._get_agent_key(proxy)\n if session_name in self._provider_sessions[agent_key]:\n return self._provider_sessions[agent_key][session_name]\n else:\n manager = self._get_sub_package_provider_manager(sub_package)\n try:\n session = self._instantiate_session('get_' + session_name + '_for_bank',\n proxy=self._proxy,\n manager=manager)\n except AttributeError:\n session = self._instantiate_session('get_' + session_name,\n proxy=self._proxy,\n manager=manager)\n self._set_bank_view(session)\n if self._session_management != DISABLED:\n self._provider_sessions[agent_key][session_name] = session\n return session", "def session_open(session_name: Optional[str], provider: Optional[str] = None, **kwargs):\n providers = [_safe_get_provider(provider)] if provider else get_supported_session_providers()\n project_name = get_renku_project_name()\n\n for session_provider in providers:\n if session_provider.session_open(project_name, session_name, **kwargs):\n return\n\n if session_name:\n raise errors.ParameterError(f\"Could not find '{session_name}' among the running sessions.\")\n else:\n raise errors.ParameterError(\"Session name is missing\")", "def session(get_session):\n return get_session()", "def by_name(cls, name):\n datastore = Session._datastores.get(name)\n if datastore is None:\n return None\n\n for odmsession in cls._session_registry.values():\n if odmsession.bind is datastore:\n return odmsession\n else:\n return ThreadLocalODMSession(bind=datastore)", "def session(self):\n if not self._session: #Create new session if none exists\n return self._new_session()\n return self._session", "def get_session_for_requester(self, requester_name: str) -> ProlificClient:\n if requester_name not in self.session_storage:\n session = get_authenticated_client(requester_name)\n self.session_storage[requester_name] = session\n\n return self.session_storage[requester_name]", "def getSession():\n return call(\"getSession\")", "def session(self):\n ssn = pn_session(self._impl)\n if ssn is None:\n raise (SessionException(\"Session allocation failed.\"))\n else:\n return Session(ssn)", "def getSession(conn_string=None, req=None):\n global engine, session_factory\n if engine is None:\n engine = getEngine(conn_string)\n if session_factory is None:\n session_factory = scoped_session(sessionmaker(bind=engine))\n session = session_factory()\n return session", "def get_session(cls):\r\n if cls._session is not None:\r\n return cls._session\r\n else:\r\n raise RuntimeError('Session not set.')", "def get_session(context, key):\n session_manager = getToolByName(context, 'session_data_manager')\n\n if not session_manager.hasSessionData():\n return None\n\n session = session_manager.getSessionData()\n\n if not key in session.keys():\n return None\n\n return session[key]", "def getSession( self, name, default=None, REQUEST=None, cookie=None ):\n return GetSessionValue( self, name, default, REQUEST, cookie )", "def get_session(group, **session_kwargs):\n return ks_loading.load_session_from_conf_options(\n CONF, group, **session_kwargs)", "def session(self):\n return self.session_store.get_session()", "def _get_session_from_cache(thread_ident: int) -> requests.Session:\n return _GLOBAL_BACKEND_FACTORY()", "def _session(self):\n if self.session is None:\n self.session = create_session(self.config, self.auth)\n return self.session", "def get_session():\n session = scoped_session(sessionmaker(bind=engine))\n return session", "def get_session(self):\n return self.session", "def _get_session():\n api_version = \"1.0\"\n originator = \"salt_cloud_{}_driver\".format(__virtualname__)\n url = config.get_cloud_config_value(\n \"url\", get_configured_provider(), __opts__, search_global=False\n )\n user = config.get_cloud_config_value(\n \"user\", get_configured_provider(), __opts__, search_global=False\n )\n password = config.get_cloud_config_value(\n \"password\", get_configured_provider(), __opts__, search_global=False\n )\n ignore_ssl = config.get_cloud_config_value(\n \"ignore_ssl\",\n get_configured_provider(),\n __opts__,\n default=False,\n search_global=False,\n )\n try:\n session = XenAPI.Session(url, ignore_ssl=ignore_ssl)\n log.debug(\n \"url: %s user: %s password: %s, originator: %s\",\n url,\n user,\n \"XXX-pw-redacted-XXX\",\n originator,\n )\n session.xenapi.login_with_password(user, password, api_version, originator)\n except XenAPI.Failure as ex:\n pool_master_addr = str(ex.__dict__[\"details\"][1])\n slash_parts = url.split(\"/\")\n new_url = \"/\".join(slash_parts[:2]) + \"/\" + pool_master_addr\n session = XenAPI.Session(new_url)\n log.debug(\n \"session is -> url: %s user: %s password: %s, originator:%s\",\n new_url,\n user,\n \"XXX-pw-redacted-XXX\",\n originator,\n )\n session.xenapi.login_with_password(user, password, api_version, originator)\n return session", "def _instantiate_session(self, method_name, proxy=None, *args, **kwargs):\n if 'manager' in kwargs:\n session_class = getattr(kwargs['manager'], method_name)\n del kwargs['manager']\n else:\n session_class = getattr(self._provider_manager, method_name)\n if proxy is None:\n try:\n return session_class(bank_id=self._catalog_id, *args, **kwargs)\n except AttributeError:\n return session_class(*args, **kwargs)\n else:\n try:\n return session_class(bank_id=self._catalog_id, proxy=proxy, *args, **kwargs)\n except AttributeError:\n return session_class(proxy=proxy, *args, **kwargs)", "def get_current_session(self):\n if self.session is not None:\n return self.session\n else:\n return None", "def get_session():\n name = request.args.get('name')\n sch = Scheduler()\n return sch.get_session(name)", "def get_session():\n if MYSQL['username'] is None:\n raise ValueError(\"User name is mandatory\")\n\n if MYSQL['password'] is None:\n raise ValueError(\"Password is mandatory\")\n\n if MYSQL['host'] is None:\n raise ValueError(\"Host is mandatory\")\n\n if MYSQL['db_name'] is None:\n raise ValueError(\"Database Name is mandatory\")\n\n try:\n engine = create_engine(\n '{engine}://{username}:{password}@{host}/{db_name}'.format(**MYSQL),\n pool_size=MYSQL[\"pool_size\"],\n echo=MYSQL[\"debug\"]\n )\n\n session_factory = sessionmaker(bind=engine)\n sess = scoped_session(session_factory)\n return sess\n\n except Exception as err:\n print(err)\n exit()", "def get_session(self):\n session = Session(self.settings)\n self.sessions.append(session)\n return session", "def get_session(self):\n return ESSession(self)", "def new_session(self):\n return self._SessionLocal()" ]
[ "0.8087127", "0.6742886", "0.6699897", "0.66368324", "0.6628441", "0.6581126", "0.657559", "0.6502984", "0.649964", "0.64979595", "0.6478074", "0.64763755", "0.64591295", "0.64428717", "0.6420301", "0.63978803", "0.6389079", "0.6378353", "0.63697463", "0.6349902", "0.6341175", "0.63380677", "0.6329764", "0.6291548", "0.6272216", "0.62558645", "0.62336475", "0.6224768", "0.61988384", "0.6186142" ]
0.8100593
0
Gets the Id of this catalog.
def get_catalog_id(self): return self._catalog_id
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def catalog_id(self):\n return self._catalog_id", "def getId(self):\n return _libsbml.Compartment_getId(self)", "def getId(self):\n\n return self.__id", "def get_id(self):\n return self._id", "def get_id(self):\n return self._id", "def get_id(self):\n return self._id", "def get_id(self):\n return self._id", "def get_id(self):\n return self._id", "def get_id(self):\n return self._id", "def table_catalog_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"table_catalog_id\")", "def get_id(self):\n if not self.nccl_id:\n logger.warning(\"The NCCL ID has not been \"\n \"set yet for store {}.\".format(self.name))\n return self.nccl_id", "def GetId(self):\r\n\r\n return self.id", "def get_id(self):\n return self.__id", "def get_id(self):\n return self.__id", "def get_id(self):\n\n\t\treturn self.__id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id", "def get_id(self):\n return self.id" ]
[ "0.8678884", "0.7484622", "0.73726004", "0.7355493", "0.7355493", "0.7355493", "0.7355493", "0.7355493", "0.7355493", "0.7279988", "0.72571033", "0.72358394", "0.72195864", "0.72195864", "0.7185139", "0.7183837", "0.7183837", "0.7183837", "0.7183837", "0.7183837", "0.7183837", "0.7183837", "0.7183837", "0.7183837", "0.7183837", "0.7183837", "0.7183837", "0.7183837", "0.7183837", "0.7183837" ]
0.86728644
1
Returns a list of all the endpoints currently being managed by the reactor.
def endpoint_list(self): _, body = self.request('/v1.1/endpoints', 'GET') return body
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def endpoints(self):\n return self[\"endpoints\"]", "def get_flask_endpoints(self):\n urls = self.endpoints.keys()\n return urls", "def endpoints(self):\n return self.settings[\"endpoints\"]", "def get_endpoints(self):\n return self.endpoints.values()", "async def Available_Endpoints() -> List[Dict[str, str]]:\n return [{\"path\": endpoint} for endpoint in busylightapi.endpoints]", "def get_endpoints(self):\r\n return ENDPOINTS", "def get_view_endpoints(self):\n return []", "def get_endpoints(self):\n url = self.urlobject_single.format(self._cb.credentials.org_key, self._model_unique_id) + \"/endpoints\"\n resp = self._cb.get_object(url)\n return resp.get(\"results\", [])", "def endpoints(self) -> pulumi.Input[Sequence[pulumi.Input['EndpointDependencyArgs']]]:\n return pulumi.get(self, \"endpoints\")", "def endpoints(self) -> Optional[Sequence['outputs.EndpointResponse']]:\n return pulumi.get(self, \"endpoints\")", "def endpoints(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['LiveEventEndpointArgs']]]]:\n return pulumi.get(self, \"endpoints\")", "def endpoints(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['LiveEventEndpointArgs']]]]:\n return pulumi.get(self, \"endpoints\")", "def actions(self) -> List[str]:\n return list(self.__endpoints.keys())", "def get_endpoints(self, **kwargs):\n return self._database.lookup('endpoint', kwargs)", "def list_resolver_endpoints(MaxResults=None, NextToken=None, Filters=None):\n pass", "def get_rest_handlers(self):\n return self._rest_handlers", "def api_endpoints(self):\n # This result appears to be a PaginatedList, but objects in the list don't\n # have IDs and can't be retrieved on their own, and it doesn't accept normal\n # pagination properties, so we're converting this to a list of strings.\n if not hasattr(self, \"_api_endpoints\"):\n results = self._client.get(\n \"{}/api-endpoints\".format(LKECluster.api_endpoint), model=self\n )\n\n self._api_endpoints = [MappedObject(**c) for c in results[\"data\"]]\n\n return self._api_endpoints", "def _get_app_endpoints():\n endpoints = {\n (r'/', handlers.HeartbeatRequestHandler),\n (r'/1/issue/retrieve', handlers.RetrieveRequestHandler),\n (r'/1/issue/search', handlers.SearchRequestHandler),\n (r'/1/issue/search/setup', handlers.SearchSetupRequestHandler),\n }\n\n log(\"Endpoint to handler mappings:\")\n for url, handler in sorted(endpoints, key=lambda ep: ep[0]):\n log(\"{0} ---> {1}\".format(url, handler))\n\n return endpoints", "def list_endpoints(self):\n resp, body = self.get(\"endpoints\")\n body = self._parse_array(etree.fromstring(body))\n return resp, body", "def exposed_services(self):\n return self._exposed_services", "async def connections_endpoints(request: web.BaseRequest):\n context: AdminRequestContext = request[\"context\"]\n connection_id = request.match_info[\"conn_id\"]\n\n profile = context.profile\n connection_mgr = ConnectionManager(profile)\n try:\n endpoints = await connection_mgr.get_endpoints(connection_id)\n except StorageNotFoundError as err:\n raise web.HTTPNotFound(reason=err.roll_up) from err\n except (BaseModelError, StorageError, WalletError) as err:\n raise web.HTTPBadRequest(reason=err.roll_up) from err\n\n return web.json_response(dict(zip((\"my_endpoint\", \"their_endpoint\"), endpoints)))", "def endpoints(self):\n config = self.device.get_active_configuration()\n interface = config.interfaces()[0]\n endpoints = interface.endpoints()\n return [endpoint.bEndpointAddress for endpoint in endpoints]", "def routes(self):\n return self._routes", "def get_imported_endpoints(self):\n with self.__import_lock:\n return [reg.get_import_reference() for reg in self.__imported_regs]", "def get_path_endpoints(self):\n endpoints = []\n\n # Get the far end of the last path segment\n path, split_ends, position_stack = self.trace()\n endpoint = path[-1][2]\n if split_ends is not None:\n for termination in split_ends:\n endpoints.extend(termination.get_path_endpoints())\n elif endpoint is not None:\n endpoints.append(endpoint)\n\n return endpoints", "def service_endpoints(self) -> Optional[Sequence['outputs.ServiceEndpointPropertiesFormatResponse']]:\n return pulumi.get(self, \"service_endpoints\")", "def getRoutes(self):\n pass", "def list(self):\n return self.connection.get(self.service)", "def urls(self):\n return self._list_urls()", "def api_endpoints(self):\n endpoints = [\n furl(path=('index', entity_type), args={'size': '1'})\n for entity_type in self.entity_types\n ]\n with ThreadPoolExecutor(len(endpoints)) as tpe:\n status = dict(tpe.map(self._api_endpoint, endpoints))\n status['up'] = all(v['up'] for v in status.values())\n return status" ]
[ "0.77835125", "0.7656806", "0.75507295", "0.75212026", "0.7391385", "0.7229766", "0.7081538", "0.7042687", "0.7039604", "0.7015201", "0.66251105", "0.66251105", "0.65999174", "0.65949714", "0.65367913", "0.6516309", "0.651511", "0.6484123", "0.648212", "0.6472074", "0.64654714", "0.643638", "0.64343184", "0.6419939", "0.6379228", "0.6363228", "0.6329578", "0.62357503", "0.62203884", "0.6176008" ]
0.7719079
1
Create the endpoint using the given configuration.
def endpoint_create(self, endpoint_name=None, config=None): if config is None: raise Exception("Config required!") if endpoint_name is None: self.request('/v1.1/endpoint', 'PUT', body=config) else: self.request('/v1.1/endpoints/%s' % endpoint_name, 'PUT', body=config)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_endpoint(EndpointName=None, EndpointConfigName=None, Tags=None):\n pass", "def create_endpoint_config(EndpointConfigName=None, ProductionVariants=None, Tags=None, KmsKeyId=None):\n pass", "def build_endpoint(self, **kwargs):\n\n raise NotImplementedError()", "def create_endpoint(self, endpoint_id, endpoint_ref):\n raise exception.NotImplemented() # pragma: no cover", "def create_endpoint(coriolis, name, platform_type, connection_info,\n barbican=None, description=''):\n # check provider type is installed server-side:\n providers_dict = coriolis.providers.list().to_dict()\n if platform_type not in providers_dict:\n raise ValueError(\n 'platform_type must be one of %s' % providers_dict.keys())\n\n # if Barbican is available, store the connection info in it:\n if barbican:\n secret_ref = store_barbican_secret_for_coriolis(\n barbican, connection_info, name='Coriolis Endpoint %s' % name)\n connection_info = {'secret_ref': secret_ref}\n\n # create the endpoint:\n endpoint = coriolis.endpoints.create(\n name, platform_type, connection_info, description)\n\n return endpoint", "def create_endpoint(path, workspace):\n client = Client()\n\n client.create_endpoint(path, workspace=workspace)", "def create_endpoint(request):\n service_name = request.registry.settings['service_name']\n host = socket.gethostbyname(socket.gethostname())\n port = request.server_port\n\n # Convert ip address to network byte order\n ipv4 = struct.unpack('!i', socket.inet_aton(host))[0]\n port = int(port)\n # Zipkin passes unsigned values in signed types because Thrift has no\n # unsigned types, so we have to convert the value.\n port = struct.unpack('h', struct.pack('H', port))[0]\n\n return zipkin_core.Endpoint(\n ipv4=ipv4, port=port, service_name=service_name)", "def _create(self, **kwargs):\n\n config = misc_utils.resolve_config(\n kwargs.pop('config', None),\n kwargs.pop('config_file', None)\n )\n\n return self._make_request(method='POST', config=config)", "def create_endpoint(self, service_id, interface, url, **kwargs):\n region = kwargs.get('region', None)\n if 'force_enabled' in kwargs:\n enabled = kwargs['force_enabled']\n else:\n enabled = kwargs.get('enabled', None)\n if enabled is not None:\n enabled = str(enabled).lower()\n create_endpoint = common.Element(\"endpoint\",\n xmlns=XMLNS,\n service_id=service_id,\n interface=interface,\n url=url, region=region,\n enabled=enabled)\n resp, body = self.post('endpoints',\n str(common.Document(create_endpoint)))\n body = self._parse_body(etree.fromstring(body))\n return resp, body", "def create_endpoint(*args):\n endpoint = ''\n for arg in args:\n endpoint = endpoint + str(arg) + \"/\"\n endpoint = endpoint[:-1]\n endpoint = endpoint + \".json\"\n return endpoint", "def open(self, configuration: Union[str, Tuple[str, str]], create: bool = False) -> None: # type: ignore[no-redef] # noqa: F811\n\n if create:\n raise Exception(\"Cannot create a SPARQL Endpoint\")\n\n if isinstance(configuration, tuple):\n self.query_endpoint = configuration[0]\n if len(configuration) > 1:\n self.update_endpoint = configuration[1]\n else:\n self.query_endpoint = configuration\n self.update_endpoint = configuration", "def create_config(self) -> None:\n pass", "def create_config(self) -> None:\n pass", "def open(\n self, configuration: Union[str, Tuple[str, str]], create: bool = False\n ) -> None:\n if type(configuration) == str: # noqa: E721\n self.query_endpoint = configuration\n elif type(configuration) == tuple:\n self.query_endpoint = configuration[0]\n self.update_endpoint = configuration[1]\n else:\n raise Exception(\n \"configuration must be either a string (a single query endpoint URI) \"\n \"or a tuple (a query/update endpoint URI pair)\"\n )", "def create_dev_endpoint(self):\n\n self.dev_endpoint = self.glue_engine.create_dev_endpoint(\n EndpointName=self.dev_endpoint_name,\n RoleArn=self.dev_endpoint_role,\n PublicKey=self.dev_endpoint_pub_rsa,\n NumberOfNodes=2,\n ExtraPythonLibsS3Path=self.python_library,\n GlueVersion=\"1.0\",\n Arguments={\"GLUE_PYTHON_VERSION\": \"3\"})", "def create_blueprint(self, **options):\n if hasattr(self.config, \"url_prefix\"):\n options.setdefault(\"url_prefix\", self.config.url_prefix)\n return Blueprint(self.config.blueprint_name, __name__, **options)", "def api_factory(config):\n return SdkApi(config.get('apiKey'),\n sdk_api_base_url=config['sdkApiBaseUrl'],\n events_api_base_url=config['eventsApiBaseUrl'],\n split_sdk_machine_name=config['splitSdkMachineName'],\n split_sdk_machine_ip=config['splitSdkMachineIp'],\n connect_timeout=config['connectionTimeout'],\n read_timeout=config['readTimeout'])", "def CreateConfig(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def _create_config(self, body=None):\n request = Request.blank(self.uri_config, headers=[self.auth_header])\n request.method = 'POST'\n request.body = body\n return request.get_response(application)", "def create(self, config):\n\n assert config[\"name\"] == self.name, \"Given config is not for this template\"\n\n data = self._json_encode(config)\n headers = self._default_headers()\n\n return self._request(\"\",\n ok_status=None,\n data=data,\n headers=headers)", "def _custom_endpoint_init(self, node_config, *argv):\n config = {}\n selector = node_config.get('template', None)\n if not selector:\n selector = argv[0]\n _LOGGER.debug(\" selector: %s\", selector)\n config = {\n \"config_report\": [\n [0x0001, 0x0020, 60, 3600, 5], \n [0x0001, 0x0021, 60, 3600, 5]\n ],\n \"in_cluster\": [0x0000, 0x0001, 0x0500, ],\n \"out_cluster\": [0x0500],\n \"type\": \"binary_sensor\",\n }\n self.add_input_cluster(0x0500)\n self.add_output_cluster(0x0500)", "def init_endpoint_dir(self, endpoint_config=None):\n\n endpoint_dir = os.path.join(self.funcx_dir, self.name)\n self.logger.debug(f\"Creating endpoint dir {endpoint_dir}\")\n os.makedirs(endpoint_dir, exist_ok=True)\n\n endpoint_config_target_file = os.path.join(endpoint_dir, self.funcx_config_file_name)\n if endpoint_config:\n shutil.copyfile(endpoint_config, endpoint_config_target_file)\n return endpoint_dir\n\n endpoint_config = endpoint_default_config.__file__\n with open(endpoint_config) as r:\n endpoint_config_template = Template(r.read())\n\n endpoint_config_template = endpoint_config_template.substitute(name=self.name)\n with open(endpoint_config_target_file, \"w\") as w:\n w.write(endpoint_config_template)\n\n return endpoint_dir", "def create(cls,configuration):\n raise NotImplementedError('Abstract method has not been implemented')", "def create_host(self, conf, tenant_id, network_id, params):\n\t\tpass", "def endpoint_update(self, endpoint_name=None, config=None):\n if config is None:\n raise Exception(\"Config required!\")\n if endpoint_name is None:\n self.request('/v1.1/endpoint', 'POST', body=config)\n else:\n self.request('/v1.1/endpoints/%s' % endpoint_name, 'POST', body=config)", "def __init__(self, endpoint_a, endpoint_b):\n self.endpoint_a = endpoint_a\n self.endpoint_b = endpoint_b", "def update_endpoint(EndpointName=None, EndpointConfigName=None):\n pass", "def _create_listening_endpoint(self, **kwargs):\n eptypestr = CFG.get_safe('container.messaging.endpoint.proc_listening_type', None)\n if eptypestr is not None:\n module, cls = eptypestr.rsplit('.', 1)\n mod = __import__(module, fromlist=[cls])\n eptype = getattr(mod, cls)\n ep = eptype(**kwargs)\n else:\n conv_enabled = CFG.get_safe('container.messaging.endpoint.rpc_conversation_enabled', False)\n if conv_enabled:\n ep = ConversationRPCServer(**kwargs)\n else:\n ep = ProcessRPCServer(**kwargs)\n return ep", "def create_container(self, hostname, container_id, endpoint):\n\n endpoint_path = ENDPOINT_PATH % {\"hostname\": hostname,\n \"container_id\": container_id,\n \"endpoint_id\": endpoint.id}\n\n _log.info(\"Creating endpoint at %s\", endpoint_path)\n try:\n self.client.write(endpoint_path + \"addrs\", json.dumps(endpoint.addrs))\n self.client.write(endpoint_path + \"mac\", endpoint.mac)\n self.client.write(endpoint_path + \"state\", endpoint.state)\n except etcd.EtcdException as e:\n _log.exception(\"Hit Exception %s writing to etcd.\", e)\n pass", "def build_from_configuration_parameters(cls, config_filename, args=None):\n\n # XXX: in case the user doesn't specify a name,\n # this will help auto-generate unique ones in a sequence.\n global __scale_client_n_anon_apps_added__\n __scale_client_n_anon_apps_added__ = 0\n\n if config_filename is None and args is None:\n raise ValueError(\"can't build from configuration parameters when both filename and args are None!\")\n\n # Dummy config dict in case no config file\n cfg = {'eventsinks': {}, 'sensors': {}, 'applications': {}, 'networks': {}}\n\n if config_filename is not None:\n try:\n cfg = cls.load_configuration_file(config_filename)\n # log.debug(\"Final configuration: %s\" % cfg)\n except IOError as e:\n log.error(\"Error reading config file: %s\" % e)\n exit(1)\n\n def __make_event_sink(_class, broker, event_reporter, **config):\n res = _class(broker, **config)\n event_reporter.add_sink(res)\n return res\n\n ### BEGIN ACTUAL CONFIG FILE USAGE\n # We call appropriate handlers for each section in the appropriate order,\n # starting by getting any relevant command line parameters to create the client.\n\n client = cls(quit_time=args.quit_time, raise_errors=args.raise_errors)\n\n # TODO: include command line arguments when some are added\n if 'main' in cfg:\n client.setup_broker(cfg['main'])\n client.setup_reporter(cfg['main'])\n else: # use defaults\n client.setup_broker({})\n client.setup_reporter({})\n\n # These components are all handled almost identically.\n\n # EventSinks\n configs = cls.__join_configs_with_args(cfg.get('eventsinks', {}), args.event_sinks \\\n if args is not None and args.event_sinks is not None else [])\n client.setup_components(configs, 'scale_client.event_sinks', __make_event_sink, client.__reporter)\n\n # Set defaults if none were made\n if len(client.__reporter.get_sinks()) == 0:\n log.info(\"No event_sinks loaded: adding default LogEventSink\")\n LogEventSink = None\n try:\n from ..event_sinks.log_event_sink import LogEventSink\n except ValueError:\n # relative import error when this script called directly (isn't a package)\n try:\n from scale_client.event_sinks.log_event_sink import LogEventSink\n except ImportError as e:\n log.error(\"can't import LogEventSink! Error: %s\" % e)\n exit(1)\n default_sink = LogEventSink(client.__broker)\n client.__reporter.add_sink(default_sink)\n\n # Sensors\n log.info(\"Setting up Sensors...\")\n configs = cls.__join_configs_with_args(cfg.get('sensors', {}), args.sensors \\\n if args is not None and args.sensors is not None else [], \"anon_vs\")\n client.setup_sensors(configs)\n\n # Networks\n log.info(\"Setting up Networks...\")\n configs = cls.__join_configs_with_args(cfg.get('networks', {}), args.networks \\\n if args is not None and args.networks is not None else [], \"anon_network_app\")\n client.setup_networks(configs)\n\n # Applications\n log.info(\"Setting up other Applications...\")\n configs = cls.__join_configs_with_args(cfg.get('applications', {}), args.applications \\\n if args is not None and args.applications is not None else [])\n client.setup_applications(configs)\n\n # TODO: set some defaults if no applications, sensors, or networking components are enabled (heartbeat?)\n\n return client" ]
[ "0.7641476", "0.70716697", "0.6865303", "0.67652607", "0.6601517", "0.6475733", "0.64034057", "0.6290825", "0.62038696", "0.6139458", "0.6135826", "0.6062212", "0.6062212", "0.5979539", "0.58875746", "0.58744985", "0.5861857", "0.58543056", "0.58315206", "0.58291125", "0.5825837", "0.58047247", "0.57984245", "0.57284224", "0.56876737", "0.562606", "0.55836123", "0.5581041", "0.55745524", "0.5528729" ]
0.76554227
0
Update the endpoint using the given configuration.
def endpoint_update(self, endpoint_name=None, config=None): if config is None: raise Exception("Config required!") if endpoint_name is None: self.request('/v1.1/endpoint', 'POST', body=config) else: self.request('/v1.1/endpoints/%s' % endpoint_name, 'POST', body=config)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_endpoint(EndpointName=None, EndpointConfigName=None):\n pass", "def update_endpoint(self, endpoint_id, endpoint_ref):\n raise exception.NotImplemented() # pragma: no cover", "def update_endpoint(self, endpoint):\n exists = self.get_endpoint(endpoint)\n if exists:\n self.delete_endpoint(endpoint)\n self.add_endpoint(endpoint)", "def UpdateConfig(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def update_config(self, config):\n return self._update_config(\"config\", config)", "def _update(self, **kwargs):\n\n resource_name = self._get_resource_name(**kwargs)\n config = misc_utils.resolve_config(\n kwargs.pop('config', None),\n kwargs.pop('config_file', None)\n )\n\n return self._make_request(\n uri='%s/%s' % (self._metadata['uri'], resource_name),\n method='PUT',\n config=config\n )", "def update_endpoint(self, endpoint_id, service_id=None, interface=None,\n url=None, region=None, enabled=None, **kwargs):\n doc = common.Document()\n endpoint = common.Element(\"endpoint\")\n doc.append(endpoint)\n\n if service_id:\n endpoint.add_attr(\"service_id\", service_id)\n if interface:\n endpoint.add_attr(\"interface\", interface)\n if url:\n endpoint.add_attr(\"url\", url)\n if region:\n endpoint.add_attr(\"region\", region)\n\n if 'force_enabled' in kwargs:\n endpoint.add_attr(\"enabled\", kwargs['force_enabled'])\n elif enabled is not None:\n endpoint.add_attr(\"enabled\", str(enabled).lower())\n\n resp, body = self.patch('endpoints/%s' % str(endpoint_id), str(doc))\n body = self._parse_body(etree.fromstring(body))\n return resp, body", "def update(cls, webhook_endpoint_id, url=None, events=None, status=None):\n data = {}\n if url:\n data['url'] = url\n if events:\n data['events'] = events\n if status:\n data['status'] = status\n return WebhookEndpoint(Requester.patch(cls.endpoint + '/' + webhook_endpoint_id, data=data))", "def update_endpoint_in_sipserver(self, endpoint: str, password: str) -> None:", "def config_update(self, update: io.BytesIO) -> None:\n self.__logger.debug('Eva.config_update called')\n return self.__http_client.config_update(update)", "def update(self, descriptor_msg, auth=None):\n request_args = self._make_request_args(descriptor_msg, auth)\n try:\n response = requests.put(**request_args)\n response.raise_for_status()\n except requests.exceptions.ConnectionError as e:\n msg = \"Could not connect to restconf endpoint: %s\" % str(e)\n self._log.error(msg)\n raise UpdateError(msg) from e\n except requests.exceptions.HTTPError as e:\n msg = \"PUT request to %s error: %s\" % (request_args[\"url\"], response.text)\n self._log.error(msg)\n raise UpdateError(msg) from e\n except requests.exceptions.Timeout as e:\n msg = \"Timed out connecting to restconf endpoint: %s\", str(e)\n self._log.error(msg)\n raise UpdateError(msg) from e", "def update_resolver_endpoint(ResolverEndpointId=None, Name=None):\n pass", "def conf_update(self):\n pass", "def update(self, config_dict):\r\n self._update(config_dict, allow_new_keys=True)", "def set_endpoint(endpoint_url):\n log.info(\"Called set_endpoint with args %s\", locals())\n if 'cb/api' in endpoint_url:\n log.debug(\"Setting Cloudbreak endpoint to %s\", endpoint_url)\n this_config = config.cb_config\n elif ':7189' in endpoint_url:\n log.debug(\"Setting Altus Director endpoint to %s\", endpoint_url)\n this_config = config.cd_config\n else:\n raise ValueError(\"Unrecognised API Endpoint\")\n try:\n if this_config.api_client:\n log.debug(\"Found Active API Client, updating...\")\n this_config.api_client.host = endpoint_url\n except AttributeError:\n log.debug(\"No Active API Client found to update\")\n this_config.host = endpoint_url\n if this_config.host == endpoint_url:\n return True\n return False", "def fusion_api_update_sflow_configuration(self, body=None, uri=None, api=None, headers=None):\n param = '/sflow-configuration'\n return self.li.update(body=body, uri=uri, api=api, headers=headers, param=param)", "def do_PUT(self):\n if self.path == \"/set_config\" or self.path == \"/set_config/\":\n\n if len(self.post_dict) > 0:\n for key, value in self.post_dict.items():\n\n self.log_message(f\"Set config '{key}' to '{value}'\")\n\n try:\n value = json.loads(value)\n\n except ValueError:\n self.log_message(f\"Could not parse JSON: {value}\")\n self.send_response(400)\n\n else:\n self.server.config[key] = value\n self.send_response(200)\n\n # No parameters sent to configure, so return success by default\n else:\n self.send_response(200)\n\n else:\n self.send_response(404)", "def update_config(self, config):\n self.config = {\n \"display_name\": \"\",\n \"description\": \"\",\n \"required\": 1,\n \"type\": \"string\"\n }\n self.config.update(config)\n self.API_KEY = self.config['key']", "def update_config(self, config):\n self.config = {\n \"display_name\": \"\",\n \"description\": \"\",\n \"required\": 1,\n \"type\": \"string\"\n }\n self.config.update(config)\n self.API_KEY = self.config['key']", "def update(self, config_dict):\n self._update(config_dict, allow_new_keys=True)", "def endpoint_create(self, endpoint_name=None, config=None):\n if config is None:\n raise Exception(\"Config required!\")\n if endpoint_name is None:\n self.request('/v1.1/endpoint', 'PUT', body=config)\n else:\n self.request('/v1.1/endpoints/%s' % endpoint_name, 'PUT', body=config)", "def fusion_api_reapply_interconnect_configuration(self, uri, api=None, headers=None):\n param = '/configuration'\n return self.ic.put(body=None, uri=uri, api=api, headers=headers, param=param)", "def fusion_api_reapply_li_configuration(self, uri, api=None, headers=None):\n param = '/configuration'\n return self.li.update(body=None, uri=uri, api=api, headers=headers, param=param)", "def do_PUT(self):\r\n if self.path == \"/set_config\" or self.path == \"/set_config/\":\r\n\r\n if len(self.post_dict) > 0:\r\n for key, value in self.post_dict.iteritems():\r\n\r\n # Decode the params as UTF-8\r\n try:\r\n key = unicode(key, 'utf-8')\r\n value = unicode(value, 'utf-8')\r\n except UnicodeDecodeError:\r\n self.log_message(\"Could not decode request params as UTF-8\")\r\n\r\n self.log_message(u\"Set config '{0}' to '{1}'\".format(key, value))\r\n\r\n try:\r\n value = json.loads(value)\r\n\r\n except ValueError:\r\n self.log_message(u\"Could not parse JSON: {0}\".format(value))\r\n self.send_response(400)\r\n\r\n else:\r\n self.server.config[key] = value\r\n self.send_response(200)\r\n\r\n # No parameters sent to configure, so return success by default\r\n else:\r\n self.send_response(200)\r\n\r\n else:\r\n self.send_response(404)", "def update_host_config(self, hostid, config, **kwargs):\n pass", "def configure(self, config: dict):\n self.config.update(config)", "def update(self, config):\n if not isinstance(config, dict):\n raise ValueError(\"Argument `config` should be dictionary\")\n self.__data.update(config)", "def setEndpoint(self, endpoint):\n self.__lockobj.acquire()\n self.__endpoints[endpoint.getEndpoint()] = endpoint\n self.__lockobj.acquire()", "def update(self) -> None:\n self._gateway.update()", "def EditEphemeralConfig(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')" ]
[ "0.8008503", "0.7051678", "0.70378244", "0.67874885", "0.67596745", "0.65136", "0.6494991", "0.6477921", "0.6355377", "0.6339313", "0.63132703", "0.6277759", "0.62509173", "0.61962646", "0.6189687", "0.6161519", "0.6137492", "0.60737664", "0.60737664", "0.6072482", "0.60686666", "0.6045887", "0.6030523", "0.60024136", "0.59699684", "0.59606963", "0.5940947", "0.59019744", "0.58748156", "0.5768451" ]
0.84411603
0
Return the endpoint's configuration.
def endpoint_config(self, endpoint_name=None): if endpoint_name is None: _, body = self.request('/v1.1/endpoint', 'GET') else: _, body = self.request('/v1.1/endpoints/%s' % endpoint_name, 'GET') return body
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_config(self):\n return self.config", "def get_configuration(self) -> dict:\n return self.config", "def _est_config(self):\n return self._est_method.config", "def describe_endpoint_config(EndpointConfigName=None):\n pass", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config" ]
[ "0.7586859", "0.75094473", "0.7406959", "0.7339215", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158", "0.7338158" ]
0.82205737
0
Returns a list of all configured managers.
def manager_configs_list(self): _, body = self.request('/v1.1/managers/configs', 'GET') return body
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def manager_active_list(self):\n _, body = self.request('/v1.1/managers/active', 'GET')\n return body", "def manager_agents(self):\n return self.get(\"manager_agents\")", "def list_podmanager(cls):\n return cls.dbdriver.list_podmanager()", "def get_managers_list(self):\n try:\n role_id = [x[0] for x in self.db_handler.get_roles_list() if x[1] == 'Менеджер'][0]\n staff_by_role = self.db_handler.get_all_staff_by_role_id(role_id)\n\n self.logger.write_to_log('managers list got', 'model')\n\n return staff_by_role\n except Exception as err:\n method_name = sys._getframe().f_code.co_name\n\n self.logger.write_to_log('exception', 'model')\n self.logger.write_to_err_log(f'exception in method {method_name} - {err}', 'model')", "def add_managers(self, managers: Union[List[Any], Tuple[Any]]):\n for m in self._flatten(managers):\n self.apply_configuration_defaults(m)\n self._managers.add(m)", "def get_managers():\n return {'managers': get_users('managers')}", "def antenny_list_configs(self):\n return self.antenny_config.list_configs()", "def get_system_managers(only_name: bool = False) -> list[str]:\n\tHasRole = DocType(\"Has Role\")\n\tUser = DocType(\"User\")\n\n\tif only_name:\n\t\tfields = [User.name]\n\telse:\n\t\tfields = [User.full_name, User.name]\n\n\tsystem_managers = (\n\t\tfrappe.qb.from_(User)\n\t\t.join(HasRole)\n\t\t.on(HasRole.parent == User.name)\n\t\t.where(\n\t\t\t(HasRole.parenttype == \"User\")\n\t\t\t& (User.enabled == 1)\n\t\t\t& (HasRole.role == \"System Manager\")\n\t\t\t& (User.docstatus < 2)\n\t\t\t& (User.name.notin(frappe.STANDARD_USERS))\n\t\t)\n\t\t.select(*fields)\n\t\t.orderby(User.creation, order=Order.desc)\n\t\t.run(as_dict=True)\n\t)\n\n\tif only_name:\n\t\treturn [p.name for p in system_managers]\n\telse:\n\t\treturn [formataddr((p.full_name, p.name)) for p in system_managers]", "def gather_configs(self):\n configs = []\n for what in self.order:\n for key in self.plugins[what]:\n mgr = self.plugins[what][key]\n c = mgr.config(what='get')\n if c is not None:\n c.update({\n 'description': mgr.description\n })\n # print(\"Gathering configuration from \", c)\n configs.append(c)\n return configs", "def get_instance_group_managers(self, zone):\n response = self.call_api('/zones/%s/instanceGroupManagers' % zone)\n return {manager['name']: manager for manager in response.get('items', [])}", "def manager_config(self, manager):\n _, body = self.request('/v1.1/managers/configs/%s' % manager, 'GET')\n return body", "def propertyManagers(self) -> Iterator[unicode]:\n ...", "def getAllFlightServiceManagers(self):\n staffObject_list = self.mainObject.getStaffIO()\n flightServiceManagerObject_list = []\n for staffMember in staffObject_list:\n if staffMember.getRank() == 'flight service manager':\n flightServiceManagerObject_list.append(staffMember)\n return flightServiceManagerObject_list", "def getPackageManager(self) -> None:\n\t\tfor pkgmgr in config.SUPPORTED_PACKAGE_MGRS:\n\t\t\tif subprocess.run([\"which\", pkgmgr]).returncode == 0:\n\t\t\t\tself.package_manager = pkgmgr\n\t\t\t\treturn\n\t\tlogger.error(\"Supported package manager not found, aborting.\")\n\t\traise ValueError(\"Package manager unsupported\")", "def get_all(self, context, type_):\n types = None\n if type_ and isinstance(type_, basestring):\n types = type_.strip(\",\").split(\",\")\n\n try:\n db_resource_mgrs_data = self.db_api.get_all_resource_managers(\n context, types=types)\n\n _resource_mgrs_data = []\n for db_resource_mgr_data in db_resource_mgrs_data:\n _resource_mgrs_data.append(_make_response(\n db_resource_mgr_data))\n except Exception as e:\n msg = (\"Error retrieving the 'resource managers' reason : %s\"\n % e.message)\n LOG.exception(msg)\n raise exception.RetrieveException(e.message)\n return _resource_mgrs_data", "def GetManager(self):\r\n\r\n return self.manager", "def get_all():\n if not SERVICE_DIR:\n raise CommandExecutionError(\"Could not find service directory.\")\n # - List all daemontools services in\n return sorted(os.listdir(SERVICE_DIR))", "def configs(self):\n\n return self.__configs", "def configs(self):\n return self._configs", "def get_all_providers() -> list[str]:\n return list(ALL_PROVIDERS)", "def models(self):\n return self.config.models()", "def get_admins(self):\n from Employee import Employee\n admins = list()\n cursorRoles = self.dbconnect.get_cursor()\n cursorRoles.execute('select * from employeeRoles where role=\\'admin\\'')\n for row in cursorRoles:\n admins.append(self.get_employee(row[0]))\n return admins", "def get_known_repos() -> List[str]:\n return [db.name for db in PacmanConfig(conf=\"/etc/pacman.conf\").initialize_alpm().get_syncdbs()]", "def manufacturers(self):\n return self._manufacturers", "def getTimekprLoginManagers():\n global _loginManagers\n return(_loginManagers)", "def get_available_package_manager(self):\n for manager in self.package.keys():\n try:\n executable = self.SUPPORTED_PACKAGE_MANAGERS[manager]\n if is_executable_exists(executable):\n return manager\n except KeyError:\n raise NotImplementedError(\"{} is not supported\".format(manager))\n raise NotImplementedError(\"This system doesn't have any of the \"\n 'supported package manager(s): '\n '{}'.format(','.join(self.package.keys())))", "def manager_info(self, manager):\n _, body = self.request('/v1.1/managers/active/%s' % manager, 'GET')\n return body", "def _configure_managers() -> Tuple[tff.simulation.FileCheckpointManager,\n List[tff.simulation.MetricsManager]]:\n root_output_dir = FLAGS.root_output_dir\n experiment_name = FLAGS.experiment_name\n utils_impl.create_directory_if_not_exists(root_output_dir)\n\n checkpoint_dir = os.path.join(root_output_dir, 'checkpoints', experiment_name)\n utils_impl.create_directory_if_not_exists(checkpoint_dir)\n checkpoint_manager = tff.simulation.FileCheckpointManager(\n checkpoint_dir, step=FLAGS.rounds_per_checkpoint)\n\n results_dir = os.path.join(root_output_dir, 'results', experiment_name)\n utils_impl.create_directory_if_not_exists(results_dir)\n csv_file = os.path.join(results_dir, 'experiment.metrics.csv')\n csv_manager = tff.simulation.CSVMetricsManager(csv_file)\n\n summary_dir = os.path.join(root_output_dir, 'logdir', experiment_name)\n tensorboard_manager = tff.simulation.TensorBoardManager(summary_dir)\n\n logging.info('Writing...')\n logging.info(' checkpoints to: %s', checkpoint_dir)\n logging.info(' CSV metrics to: %s', csv_file)\n logging.info(' TensorBoard summaries to: %s', summary_dir)\n\n return checkpoint_manager, [csv_manager, tensorboard_manager]", "def configs(self) -> list[Config]:\n return self._configs", "def get_all_environments():\n return ENVIRONMENTS" ]
[ "0.6901478", "0.67316854", "0.65988505", "0.6525097", "0.646066", "0.6366126", "0.633342", "0.62295026", "0.61671317", "0.61062044", "0.6025322", "0.6015233", "0.59620667", "0.58453923", "0.5841155", "0.5767544", "0.57509774", "0.57332975", "0.5725349", "0.57098037", "0.56826246", "0.56759924", "0.56687045", "0.5663942", "0.56470686", "0.56378114", "0.5637646", "0.5612019", "0.55964607", "0.55881065" ]
0.8021785
0
Returns a list of all running managers.
def manager_active_list(self): _, body = self.request('/v1.1/managers/active', 'GET') return body
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_podmanager(cls):\n return cls.dbdriver.list_podmanager()", "def manager_configs_list(self):\n _, body = self.request('/v1.1/managers/configs', 'GET')\n return body", "def getAllFlightServiceManagers(self):\n staffObject_list = self.mainObject.getStaffIO()\n flightServiceManagerObject_list = []\n for staffMember in staffObject_list:\n if staffMember.getRank() == 'flight service manager':\n flightServiceManagerObject_list.append(staffMember)\n return flightServiceManagerObject_list", "def manager_agents(self):\n return self.get(\"manager_agents\")", "def get_managers_list(self):\n try:\n role_id = [x[0] for x in self.db_handler.get_roles_list() if x[1] == 'Менеджер'][0]\n staff_by_role = self.db_handler.get_all_staff_by_role_id(role_id)\n\n self.logger.write_to_log('managers list got', 'model')\n\n return staff_by_role\n except Exception as err:\n method_name = sys._getframe().f_code.co_name\n\n self.logger.write_to_log('exception', 'model')\n self.logger.write_to_err_log(f'exception in method {method_name} - {err}', 'model')", "def get_system_managers(only_name: bool = False) -> list[str]:\n\tHasRole = DocType(\"Has Role\")\n\tUser = DocType(\"User\")\n\n\tif only_name:\n\t\tfields = [User.name]\n\telse:\n\t\tfields = [User.full_name, User.name]\n\n\tsystem_managers = (\n\t\tfrappe.qb.from_(User)\n\t\t.join(HasRole)\n\t\t.on(HasRole.parent == User.name)\n\t\t.where(\n\t\t\t(HasRole.parenttype == \"User\")\n\t\t\t& (User.enabled == 1)\n\t\t\t& (HasRole.role == \"System Manager\")\n\t\t\t& (User.docstatus < 2)\n\t\t\t& (User.name.notin(frappe.STANDARD_USERS))\n\t\t)\n\t\t.select(*fields)\n\t\t.orderby(User.creation, order=Order.desc)\n\t\t.run(as_dict=True)\n\t)\n\n\tif only_name:\n\t\treturn [p.name for p in system_managers]\n\telse:\n\t\treturn [formataddr((p.full_name, p.name)) for p in system_managers]", "def get_instance_group_managers(self, zone):\n response = self.call_api('/zones/%s/instanceGroupManagers' % zone)\n return {manager['name']: manager for manager in response.get('items', [])}", "def get_current_jobs(ssh):\n stdin, stdout, stderr = ssh.exec_command('qstat')\n\n running_jobs = []\n for line in stdout.readlines():\n if '.awonmgr2' in line:\n jobid = line.split('.awonmgr2')[0]\n running_jobs.append(jobid)\n \n return running_jobs", "def marathon_services_running_here() -> List[Tuple[str, str, int]]:\n\n return mesos_services_running_here(\n framework_filter=lambda fw: fw[\"name\"].startswith(\"marathon\"),\n parse_service_instance_from_executor_id=parse_service_instance_from_executor_id,\n )", "def get_all_current_processes():\n p = subprocess.Popen(['ps', '-A'], stdout=subprocess.PIPE)\n out, err = p.communicate()\n return out", "def manager_info(self, manager):\n _, body = self.request('/v1.1/managers/active/%s' % manager, 'GET')\n return body", "def get_managers():\n return {'managers': get_users('managers')}", "def running_processes(self):\n return [process for process in self.processes.values()\n if process.running_on(self.address_name)]", "def get_running_processes(self):\n\n all_processes = []\n for _process in self.processes:\n all_processes.append(_process[\"pid\"])\n return all_processes", "def workers_status(self):\n workers = []\n for agent in self.agents_status():\n workers += agent['workers']\n return workers", "def getActiveProcesses():\n active = []\n\n for p in PROCESSRUNNER_PROCESSES:\n if p.is_alive():\n active.append(p)\n\n return active", "def get_all():\n if not SERVICE_DIR:\n raise CommandExecutionError(\"Could not find service directory.\")\n # - List all daemontools services in\n return sorted(os.listdir(SERVICE_DIR))", "async def running(self) -> list[dict[str, Any]]:\n data = await self.controller.request(\"get\", \"watering/program\")\n return cast(list[dict[str, Any]], data[\"programs\"])", "def workers(self):\n return self.worker_list", "def running_procs(self) -> List[int]:\n return [p.model_id for p in self.primary_scheduler.queue_nodes.run_q]", "def activemodes(self):\n\t\tret_active = []\n\t\tfor key,val in self.ms_all.iteritems():\n\t\t\tret_active.extend( val.active() )\n\t\treturn ret_active", "def run_manager(self) -> None:\n \n for p in self.process_list:\n try:\n p.daemon = True\n p.start()\n except:\n self.process_list.remove(p)\n p = Process(target=self.multiprocessing_job, args=(self.process_job,))\n p.daemon = True\n self.process_list.append(p)\n p.start()\n #Every 1 seconds, check for active Processes.\n while True:\n sleep(1)\n running = any(p.is_alive() for p in self.process_list)\n if not running or not active_children:\n self.restart_required = True\n break\n self.logger.info(self.name + \" has finished managing.\")", "def checkRoomManagerEntrys(self):\n roommaps = self.DB.getRoomMapsByDatum(datetime.datetime.now().strftime(\"%Y-%m-%d\"),\n (datetime.datetime.now() + datetime.timedelta(days=30)).strftime(\"%Y-%m-%d\"))\n return roommaps", "def get_global_active_list(self):\n return self.api.get_active_global_version_manager()", "def getTimekprLoginManagers():\n global _loginManagers\n return(_loginManagers)", "def jobs():\n result = []\n out = subprocess.check_output([\"/bin/launchctl\", \"list\"]).decode()\n for row in out.splitlines()[1:]:\n result.append(Job(row))\n return result", "def manager(model):\n return model.objects", "def running_services(self) -> List[Callable]:\n return self._running_svcs", "def list_running_tasks():\n inspector = current_app.control.inspect()\n\n return inspector.active()", "def get_all_vms(self):\n available_servers = self.connection.compute.servers()\n if available_servers:\n vm_names = [server.name for server in available_servers]\n return vm_names\n else:\n return []" ]
[ "0.69092685", "0.66732454", "0.63529295", "0.63384855", "0.6262632", "0.6169025", "0.61374104", "0.60452974", "0.5992976", "0.5987581", "0.5891855", "0.58843946", "0.58792484", "0.58792007", "0.5762086", "0.5749272", "0.5729349", "0.56972295", "0.56700397", "0.56640375", "0.56263125", "0.5614797", "0.5609539", "0.56088036", "0.5604367", "0.55938596", "0.55921197", "0.5581134", "0.5578552", "0.54799354" ]
0.75390095
0
Update the manager with the given configuration.
def manager_update(self, manager, config): self.request('/v1.1/managers/configs/%s' % manager, 'POST', body=config)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update(self, **kwargs):\n self.manager.update(self, **kwargs)", "def update(self, **kwargs):\n self.manager.update(self, **kwargs)", "def conf_update(self):\n pass", "def update_config(self, config):\n return self._update_config(\"config\", config)", "def update(self):\n self.save_config_file()", "def update(self, obj):\n\n self.cfg.update(obj)", "def update(self, config):\n if not isinstance(config, dict):\n raise ValueError(\"Argument `config` should be dictionary\")\n self.__data.update(config)", "def update(self, config_dict):\r\n self._update(config_dict, allow_new_keys=True)", "def update(self, **kwargs):\n return self.manager.update(self, **kwargs)", "def update(self, **kwargs):\n return self.manager.update(self, **kwargs)", "def update(self, **kwargs):\n return self.manager.update(self, **kwargs)", "def update(self, config_dict):\n self._update(config_dict, allow_new_keys=True)", "def configure(self, config: dict):\n self.config.update(config)", "def update_config(self, kv: dict):\n self._configs.update(kv)\n self._save()", "def update_configs(self, config):\n for what in self.plugins: # backend, repo etc.\n for key in self.plugins[what]: # s3, filesystem etc.\n # print(\"Updating configuration of\", what, key)\n self.plugins[what][key].config(what='set', params=config)\n return", "def _auto_update_configuration(self) -> None:\n self.config = rasa.utils.train_utils.update_confidence_type(self.config)\n rasa.utils.train_utils.validate_configuration_settings(self.config)\n self.config = rasa.utils.train_utils.update_similarity_type(self.config)\n self.config = rasa.utils.train_utils.update_evaluation_parameters(self.config)", "def _update(self):\n # clear group before rebuild\n self.clear()\n\n # build configuration groups\n self._config_names = []\n for i in range(self._n_configs):\n config_name = f\"config{i+1:02}\"\n self._config_names.append(config_name)\n self._build_config_group(config_name)\n\n # reset active configuration if necessary\n if not all(cname in self._config_names for cname in self._active_config):\n self._active_config = (self._config_names[0],)\n\n # build datasets\n self._build_datasets()", "def update_config(self, data):\n self.config.data = dict_merge(self.config.data, data)\n self.config.save()", "def refresh_configuration(self):\n pass", "def with_config_update(self):\n original_config = self.load_config()\n\n config_data = original_config.json\n if str(self.ITEM_PUBLIC_ID) in config_data[f\"{self.ITEM_TYPE}s\"]:\n config_data[f\"{self.ITEM_TYPE}s\"].remove(str(self.ITEM_PUBLIC_ID))\n config_data[f\"{self.ITEM_TYPE}s\"].append(\n f\"{self.ITEM_PUBLIC_ID.author}/{self.ITEM_PUBLIC_ID.name}:0.0.1\"\n )\n self.dump_config(AgentConfig.from_json(config_data))\n try:\n yield\n finally:\n self.dump_config(original_config)", "def update_config(self, update_dict):\n self.config = recursive_merge_dicts(self.config, update_dict)", "def updated(self, newConfiguration):\n log.debug('ConfigListener: configuration %s updated' % newConfiguration)", "def update(self, config):\n # find keys are in config but not in self.config\n extra_keys = set(config.keys()) - set(self.config.keys())\n if len(extra_keys) > 0:\n raise ValueError(\"keys {} in config are not in Config.config\".format(extra_keys))\n # update self.config by config\n else:\n self.config.update(config)", "def update_manager(self):\n if self.name == \"observable\":\n self.fsm_manager.update_latent()\n else:\n self.fsm_manager.update_observable()", "def update(self, **options):\n pass", "def update(self):\n\n pass", "def _update(self): \n def _saveOnSuccess(result):\n if result == False:\n return defer.succeed(self)\n return self._config.updateObj(self)\n \n def _beforeSave(result):\n if result == False:\n return defer.succeed(self)\n return defer.maybeDeferred(self.beforeSave).addCallback(_saveOnSuccess)\n \n return defer.maybeDeferred(self.beforeUpdate).addCallback(_beforeSave)", "def reload(self):\n self.load_config()\n # Seems we need to explicitly refresh this\n if self.main_instance:\n self.main_instance.config = self.config", "def update(self, config_settings: Optional[Dict[str, Any]]) -> None:\n if config_settings is not None:\n self._set_config_setting(config_settings)", "def _update_params(self):\n log.debug(\"Updating parameter dict\")\n old_config = self._param_dict.get_config()\n self._get_config()\n new_config = self._param_dict.get_config() \n if (new_config != old_config):\n self._driver_event(DriverAsyncEvent.CONFIG_CHANGE)" ]
[ "0.728846", "0.728846", "0.68354344", "0.67852616", "0.66971695", "0.66957706", "0.652014", "0.6490412", "0.648134", "0.648134", "0.648134", "0.6468057", "0.63706076", "0.6332101", "0.6330236", "0.6268073", "0.61836416", "0.6150563", "0.6149348", "0.6104999", "0.60649383", "0.60514504", "0.6033925", "0.59911966", "0.59717345", "0.5960296", "0.5954394", "0.59528226", "0.5941023", "0.59348065" ]
0.8075267
0
Return the manager's configuration.
def manager_config(self, manager): _, body = self.request('/v1.1/managers/configs/%s' % manager, 'GET') return body
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_config(self):\n return self.config", "def get_configuration(self) -> dict:\n return self.config", "def getConfig(self):\n \n return self.config", "def config(self):\n return self.__config", "def config(self):\n return self.__config", "def config(self):\n return self._config", "def config(self):\n return self._config", "def config(self):\n return self._config", "def config(self):\n return self._cfg", "def config(self):\n return self[CONFIG_KEY]", "def config(self):\r\n return self._config", "def config(self) -> dict:\n return self._config", "def config(self):\n return self._config", "def configuration(self):\n return self._config", "def config(self):\n if self.__config is None:\n self.__config = self._get_config(self.bot)\n return self.__config", "def get_config(self) -> Dict[str, Any]:\n if self.config is None:\n self.config = self.load_config()\n\n return self.config", "def get_config(self) -> Dict[str, Any]:\n if self.config is None:\n self.config = self.load_config()\n\n return self.config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config", "def _get_config(self):\n return self.__config" ]
[ "0.8012625", "0.799986", "0.7858629", "0.78259075", "0.78259075", "0.7732619", "0.7732619", "0.7732619", "0.77212644", "0.7713746", "0.7709552", "0.7693966", "0.76747704", "0.7667241", "0.76168424", "0.7599402", "0.7599402", "0.75130105", "0.75130105", "0.75130105", "0.75130105", "0.75130105", "0.75130105", "0.75130105", "0.75130105", "0.75130105", "0.75130105", "0.75130105", "0.75130105", "0.75130105" ]
0.81382024
0
Return the active manager info.
def manager_info(self, manager): _, body = self.request('/v1.1/managers/active/%s' % manager, 'GET') return body
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def manager_active_list(self):\n _, body = self.request('/v1.1/managers/active', 'GET')\n return body", "def get_manager_info(handle, timeout):\n mgr_info = dict()\n mgr_info['ls-modules'] = ceph_mon_command(handle, 'mgr module ls', timeout)\n mgr_info['dump'] = ceph_mon_command(handle, 'mgr dump' , timeout)\n mgr_info['metadata'] = ceph_mon_command(handle, 'mgr metadata' , timeout)\n return mgr_info", "def GetManager(self):\r\n\r\n return self.manager", "def getManager(self):\n return self._manager", "def get_managers():\n return {'managers': get_users('managers')}", "def info(self):\n return InfoManager(session=self._session)", "def manager_agents(self):\n return self.get(\"manager_agents\")", "def GetOwnerManager(self):\r\n\r\n return self._owner_mgr", "def get_manager():\n return __manager__", "def GetAuiManager(self):\r\n\r\n return self._mgr", "def getInfo(self):\n return self.info", "def get_atom_info(self):\n return", "def getInfo(self):\n return self._info", "def get_manager_ip():\n return os.environ[MANAGER_IP_KEY]", "def check_manager_status(brief=True):\n ret = {\n \"manager\": {\n \"manager_id\": None,\n \"queues\": [],\n \"queue_len\": [],\n \"status\": \"stopped\",\n },\n \"workers\": [],\n \"fabrics\": [],\n \"total_queue_len\": 0,\n }\n seq = get_random_sequence()\n msg = eptMsg(MSG_TYPE.GET_MANAGER_STATUS, seq=seq, data={\"brief\": brief})\n #logger.debug(\"get manager status (seq:0x%x) brief:%r\", seq, brief)\n redis = get_redis()\n p = redis.pubsub(ignore_subscribe_messages=True)\n p.subscribe(MANAGER_CTRL_RESPONSE_CHANNEL)\n redis.publish(MANAGER_CTRL_CHANNEL, msg.jsonify())\n start_ts = time.time()\n timeout = AppStatus.MANAGER_STATUS_TIMEOUT \n try:\n if brief:\n timeout = AppStatus.MANAGER_STATUS_BRIEF_TIMEOUT\n while start_ts + timeout > time.time():\n data = p.get_message(timeout=1)\n if data is not None:\n channel = data[\"channel\"]\n if channel == MANAGER_CTRL_RESPONSE_CHANNEL:\n msg = eptMsg.parse(data[\"data\"]) \n if msg.msg_type == MSG_TYPE.MANAGER_STATUS:\n #logger.debug(\"received manager status (seq:0x%x)\", msg.seq)\n ret[\"manager\"] = msg.data[\"manager\"]\n ret[\"workers\"] = msg.data[\"workers\"]\n ret[\"fabrics\"] = msg.data[\"fabrics\"]\n ret[\"total_queue_len\"] = msg.data[\"total_queue_len\"]\n return ret\n except Exception as e:\n logger.debug(\"Traceback:\\n%s\", traceback.format_exc())\n logger.debug(\"error: %s\", e)\n finally:\n if redis is not None and hasattr(redis, \"connection_pool\"):\n redis.connection_pool.disconnect()\n\n logger.warn(\"no manager response within timeout(%s sec)\", timeout)\n return ret", "def manager_config(self, manager):\n _, body = self.request('/v1.1/managers/configs/%s' % manager, 'GET')\n return body", "def systemManagerAddress(self):\n return self._systemManagerAddress.value", "def systemManagerAddress(self):\n return self._systemManagerAddress.value", "def get_manager():\n\n return multiprocessing.Manager()", "def info(self):\n return self._info", "def getProcessManager(self): \n \n return self.procmgr", "def manager_status(self, msg):\n result = {\n 'success': 0,\n 'msg': 'Service Manager Status',\n 'result': {\n 'status': 'running',\n 'uname': platform.uname(),\n 'frontend_endpoint': self.frontend_endpoint,\n 'backend_endpoint': self.backend_endpoint,\n 'sink_endpoint': self.sink_endpoint,\n 'mgmt_endpoint': self.mgmt_endpoint,\n 'result_publisher_port': self.result_pub_port,\n }\n }\n\n return result", "def info(self):\n return self.client.call('GET', self.name + 'info')", "def info(self):\n return self._info", "async def get_system_info(self) -> Dict[str, Any]:\n assert self._client is not None\n return await self._client.invoke_method(\"system.info\")", "def get_info(self):\n return None", "def _get_user_pro_manager(managerid):\n users_manager = User.objects.filter(manager=managerid)\n return users_manager", "def get_info(self):\n pass", "def get_info(self):\n pass", "def info(self):\n return self.current_run.info" ]
[ "0.7522451", "0.6985423", "0.695754", "0.6794533", "0.66179204", "0.6552652", "0.64267594", "0.6259618", "0.60771346", "0.6048752", "0.6020053", "0.5943902", "0.59426236", "0.5939026", "0.5847248", "0.58350825", "0.5816105", "0.5816105", "0.5776043", "0.5774912", "0.57687855", "0.57295096", "0.5719558", "0.5713818", "0.57124597", "0.5703037", "0.5674595", "0.5646168", "0.5646168", "0.5643098" ]
0.8867749
0
Remove the given manager's configuration.
def manager_remove(self, manager): self.request('/v1.1/managers/configs/%s' % manager, 'DELETE')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_config(name):\n db = dbm.open(config_file, 'c')\n del db[name]\n db.close()", "def del_conf(self, path):\n\t\tself.monitor.removePath(path)\n\t\tself.cache.pop(path, None)", "def remove_machine_from_config(machine):\n cfg = _get_config()\n cfg[\"frontends\"].pop(machine.id, None)\n cfg[\"backends\"].pop(machine.id, None)\n return _set_config(cfg)", "def remove_config_object() -> None:\n if G_CONFIG_OBJECT:\n G_CONFIG_OBJECT.clear()", "def delete_podmanager(cls, podmanager_uuid):\n cls.dbdriver.delete_podmanager(podmanager_uuid)", "def deconfigure(self):\n\n pass", "def remove_stored_config(self):\n stored_config_filename = self.stored_config_filename\n if stored_config_filename.exists():\n stored_config_filename.remove()\n self._stored_cmake_generator = self._stored_config.cmake_generator", "def stop_running_manager(self) -> None:\n self.remove_value(self._manager_running_attribute)", "def delete_configuration(self, node, ports):\n return hpssa_manager.delete_configuration()", "def fusion_api_remove_san_manager(self, name=None, uri=None, api=None, headers=None):\n return self.dm.delete(name, uri, api, headers)", "def delete(self):\n self.manager.delete(self)", "def deconfigure(self):\n\n self.platform.deconfigure()", "def manager_config(self, manager):\n _, body = self.request('/v1.1/managers/configs/%s' % manager, 'GET')\n return body", "def remove_repo_config(self):\n utils.remove_if_exists(self.repo_config_path)\n\n # One more damn hack! We have to remove auxiliary repo config\n # if we're rollbacking to the Fuel version that doesn't have\n # auxiliary repo at all.\n if utils.compare_version(self.config.from_version, '6.1') > 0:\n utils.remove_if_exists(\n self.host_system_config['repo_aux_config_path'])", "def remove(ctx, name, project_root):\n\n if name == 'logme':\n raise LogmeError(\"'logme' master logger configuration cannot be removed!\")\n\n with ensure_conf_exist(project_root) as logme_conf:\n\n config = read_config(logme_conf)\n config.remove_section(name)\n\n with logme_conf.open('w+') as conf:\n config.write(conf)", "def remove_local_config(self):\n with ignored(OSError):\n os.remove(os.path.join(self.rundir, const.LOCAL_CONFIG_FILE))", "def delete(self):\n self.manager.delete(self.name)", "def delete(self):\n self.manager.delete(self.name)", "def remove_config_lock():\n query = {\n \"type\": \"op\",\n \"cmd\": \"<request><config-lock><remove></remove></config-lock></request>\",\n }\n\n return __proxy__[\"panos.call\"](query)", "def removeConfigFile(alg):\r\n configPath = alg.getParameterValue('config')\r\n if isWindows():\r\n command = \"DEL {}\".format(os.path.join(rliPath(), configPath))\r\n else:\r\n command = \"rm {}\".format(os.path.join(rliPath(), configPath))\r\n alg.commands.append(command)", "def remove(self, host):\n for p, c in self.configs_:\n if host in c.hosts_:\n c.remove(host)\n return\n raise ValueError(\"Host %s: not found\" % host)", "def do_DELETE(self): # pylint: disable=C0103\r\n if self.path == \"/del_config\" or self.path == \"/del_config/\":\r\n self.server.config = dict()\r\n self.log_message(\"Reset Server Configuration.\")\r\n self.send_response(200)\r\n else:\r\n self.send_response(404)", "def delete_configs(configholder, configs):\n for config_name in configs:\n configholder.delete_config(config_name)", "def fusion_api_delete_hypervisor_manager(self, name=None, uri=None, api=None, headers=None):\n return self.hypervisor_mgr.delete(name=name, uri=uri, api=api, headers=headers)", "async def delete_local_configuration_routine(self, name: str):\n plat = get_local_platform_routines()\n user = LocalUserRoutines(plat)\n manager = LocalContainerConfigurationManager(user)\n cont = self.GetItemByName(name)\n manager.DeleteByID(cont.GetID())", "def clear(self):\r\n del self.__config\r\n self.__config = {}\r\n self.save()", "def test_component_configuration_removed_from_agent_config(self):\n with cd(self._get_cwd()):\n self.run_cli_command(\n \"add\", \"--local\", self.ITEM_TYPE, str(self.ITEM_PUBLIC_ID)\n )\n self.run_cli_command(\"add\", \"--local\", \"connection\", \"fetchai/http_server\")\n\n self.runner.invoke(\n cli,\n [\n \"config\",\n \"set\",\n \"vendor.fetchai.connections.soef.config.api_key\",\n \"some_api_key\",\n ],\n standalone_mode=False,\n catch_exceptions=False,\n )\n self.runner.invoke(\n cli,\n [\n \"config\",\n \"set\",\n \"vendor.fetchai.connections.http_server.config.port\",\n \"9000\",\n ],\n standalone_mode=False,\n catch_exceptions=False,\n )\n config = self.load_config()\n assert config.component_configurations\n assert (\n PackageId(self.ITEM_TYPE, self.ITEM_PUBLIC_ID)\n in config.component_configurations\n )\n\n self.run_cli_command(\"remove\", self.ITEM_TYPE, str(self.ITEM_PUBLIC_ID))\n\n config = self.load_config()\n assert (\n PackageId(self.ITEM_TYPE, self.ITEM_PUBLIC_ID)\n not in config.component_configurations\n )\n assert config.component_configurations", "def manager_update(self, manager, config):\n self.request('/v1.1/managers/configs/%s' % manager, 'POST', body=config)", "def delete(self):\r\n return self.connection.delete_launch_configuration(self.name)", "def removeProxyManagerConnection(address=None):\n global __mgr_cache__\n #: :type: ProxyManager\n if hasattr(__mgr_cache__[address], 'shutdown'):\n __mgr_cache__[address].shutdown()\n del __mgr_cache__[address]" ]
[ "0.6436418", "0.63584477", "0.6163542", "0.60472786", "0.60100406", "0.6005122", "0.59852", "0.5979645", "0.5950956", "0.5924563", "0.5869929", "0.5866752", "0.58290803", "0.58163756", "0.57940984", "0.57779753", "0.5771216", "0.5771216", "0.5770334", "0.5725332", "0.57213074", "0.5710136", "0.5691282", "0.5679154", "0.56650525", "0.56486446", "0.5633318", "0.56260926", "0.56177425", "0.55844665" ]
0.86958385
0
Set the custom endpoint metrics.
def endpoint_metrics_set(self, endpoint_name=None, metrics=None): if metrics is None: raise Exception("Metrics required!") if endpoint_name is None: self.request('/v1.1/endpoint/metrics', 'POST', body=metrics) else: self.request('/v1.1/endpoints/%s/metrics' % endpoint_name, 'POST', body=metrics)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_metrics(self):", "def set_metrics(self, metrics: List[Callable]) -> None:\n self.metrics = metrics", "def metrics(self, metrics):\n\n self._metrics = metrics", "def record_custom_metrics(self, metrics):\n\n if not self.__settings:\n return\n\n for name, value in metrics:\n self.record_custom_metric(name, value)", "def setPerfMetrics(self, perf_metrics):\n for event in perf_metrics.metric:\n attr_name = '%s_%s_%s' % (frontendConfig.glidein_perfmetric_prefix,\n perf_metrics.name, event)\n self.adParams[attr_name] = perf_metrics.event_lifetime(event)", "def endpoint_metrics(self, endpoint_name=None):\n if endpoint_name is None:\n _, body = self.request('/v1.1/endpoint/metrics', 'GET')\n else:\n _, body = self.request('/v1.1/endpoints/%s/metrics' % endpoint_name, 'GET')\n return body", "def send_metrics(self):\n metrics = self.get_metrics()\n if not metrics:\n return\n\n for mkey, metric in metrics.items():\n for mname, mval in metric.items():\n try:\n self.agent.record_custom_metric(self.convert_metric_name(mkey, mname), mval, None)\n except Exception as e:\n print_(e)", "def add_metrics(self, metric_dict: dict):\n self.metric_dict.update(metric_dict)", "def metrics_collector(self, metrics_collector):\n\n self._metrics_collector = metrics_collector", "def update_metrics(self, metrics, predictions, labels):\n return", "def add_metrics(self, metrics):\n for i, metric in enumerate(self.config.metrics):\n tf.summary.scalar(metric, metrics[i])", "def __init__(self):\n super().__init__()\n self.metric = 'FALLOUT'", "def endpoints(self, endpoints):\n\n self._endpoints = endpoints", "def __init__(self):\n super().__init__()\n self.metric = 'ACURCY'", "def __init__(self):\n super().__init__()\n self.metric = 'FN'", "def __init__(self):\n super().__init__()\n self.metric = 'FMEASR'", "def __init__(self):\n super().__init__()\n self.metric = 'FP'", "def __init__(self):\n super().__init__()\n self.metric = 'RECALL'", "def initialize_prometheus_middleware(app, endpoint='/metrics'):\n if get_setting('DEPLOYED'):\n logger.info(f'Enabling Prometheus endpoint on: \"{endpoint}\"')\n app.add_middleware(PrometheusMiddleware)\n app.add_route(endpoint, metrics)", "def __init__(self):\n super().__init__()\n self.metric = 'MAHLNBS'", "def setup_metrics_file(self):\n\n with open(self.metrics_path, \"w+\") as f_metrics:\n\n f_metrics.write(get_metrics_file_form())", "def metrics(self):\n raise NotImplementedError(\"metrics\")", "def add_endpoint(self, resource, method, data):\n self.endpoints.setdefault(resource, {})[method] = data", "def handle_raw_custom_metrics(\n msg: Union[prediction_pb2.SeldonMessage, Dict],\n seldon_metrics: SeldonMetrics,\n is_proto: bool,\n method: str,\n):\n metrics = []\n if is_proto:\n metrics = seldon_message_to_json(msg.meta).get(\"metrics\", [])\n if metrics and not INCLUDE_METRICS_IN_CLIENT_RESPONSE:\n del msg.meta.metrics[:]\n elif isinstance(msg, dict):\n metrics = msg.get(\"meta\", {}).get(\"metrics\", [])\n if metrics and not INCLUDE_METRICS_IN_CLIENT_RESPONSE:\n del msg[\"meta\"][\"metrics\"]\n seldon_metrics.update(metrics, method)", "def __init__(self):\n super().__init__()\n self.metric = 'PROBDST'", "def __init__(self):\n super().__init__()\n self.metric = 'PRCISON'", "def __init__(self):\n super().__init__()\n self.metric = 'SPCFTY'", "def __init__(self):\n super().__init__()\n self.metric = 'SNSVTY'", "def __init__(__self__, *,\n metrics: Optional[pulumi.Input['ManagedClusterAzureMonitorProfileMetricsArgs']] = None):\n if metrics is not None:\n pulumi.set(__self__, \"metrics\", metrics)", "def register_metrics(app, app_version=None, app_config=None):\n\n app.before_request(before_request)\n app.after_request(after_request)\n # APP_INFO.info({\"version\": app_version, \"config\": app_config})" ]
[ "0.7323405", "0.6530676", "0.64212704", "0.6347529", "0.60994095", "0.5981285", "0.58775526", "0.5772988", "0.5707842", "0.56893635", "0.5633273", "0.56062645", "0.56058544", "0.558932", "0.55644834", "0.5553006", "0.5525284", "0.5476125", "0.5475177", "0.54414856", "0.5430243", "0.5419509", "0.5410059", "0.5409811", "0.539299", "0.53767437", "0.53653705", "0.5346379", "0.5341361", "0.5338267" ]
0.7356621
0
Return the full endpoint log.
def endpoint_log(self, endpoint_name=None, since=None): if endpoint_name is None: url = '/v1.1/endpoint/log' else: url = '/v1.1/endpoints/%s/log' % endpoint_name if since is not None: url += '?since=%f' % float(since) _, body = self.request(url, 'GET') return body
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_full_log(self):\n return self._get_log('full')", "def log(self):\n resp = requests.get(\"%s/api/log\"%self.urlbase, verify=False)\n return resp.json[\"log\"]", "def getLog(self):\n return self.session.request('diag/log/')", "def getLogs():", "def getLogs():", "def getLog(self):\n \n return self.resp[\"log\"]", "def getLog(self):\n pass", "def getLog(self):\n return self.log", "def getLog(self):\n return self.log", "def log (self):\n return self._log", "def log(self):\r\n return self._log", "def log(self):\n return self._log", "def log(self):\n return self._log", "def get_main_log(self) -> Any:\n return self.logger", "def get_logs(self):\n return self.network.get_logs()", "def log(self):\n if self._log is None:\n self._log = Log(client=self)\n return self._log", "def logfile(self):\n return self._get('logfile')", "def logs(self):\n return self.logger.logs()", "def getLogs():\n # in flux, it may be possible to provide more structured information\n # like python Failure instances", "def GetLogs(self):\n raise NotImplementedError()", "def info_log(self):\n return self._info_log", "def logs_ingestion(self) -> Optional['outputs.DataCollectionEndpointResponseLogsIngestion']:\n return pulumi.get(self, \"logs_ingestion\")", "def get_log(self):\n\n open_lf = open(self.logfile, 'r')\n log_str = open_lf.read()\n sys.stdout.write(log_str)\n\n return log_str", "def detailed_log_handler(self):\n return self._detailed_log_handler", "def log(self) -> misc_.Logger:\n\t\treturn self._log", "def logs(self):\n return self._logs", "def logs(self):\n return self._logs", "def get_log(self):\n return Gumtree.gumtree.getLog() + ';'", "def logging(self):\r\n return None", "def event_log(self):\n pass" ]
[ "0.7822475", "0.72016114", "0.7017197", "0.6889371", "0.6889371", "0.67776823", "0.66976184", "0.6696164", "0.6696164", "0.66779715", "0.6653893", "0.66104734", "0.66104734", "0.6448469", "0.6329218", "0.6319319", "0.62839574", "0.6251437", "0.61940414", "0.61634755", "0.60994375", "0.6082557", "0.6052131", "0.6022521", "0.6014217", "0.59823716", "0.59823716", "0.5961301", "0.59611714", "0.59608346" ]
0.7715094
1
Post a message to the endpoint log.
def endpoint_post(self, endpoint_name=None, message=None, level=None): if message is None: raise Exception("Message required!") if endpoint_name is None: url = '/v1.1/endpoint/log' else: url = '/v1.1/endpoints/%s/log' % endpoint_name if level is not None: url += '?level=%s' % level _, body = self.request(url, 'POST', body=message) return body
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def log_post(msg):\n casalog.post(msg, 'INFO', 'bsvoboda')", "def log(self, message):", "def log(self, message: str):", "def log_message(self, message):\n log_info = {'logGroupName': self.log_group_name,\n 'logStreamName': self.log_stream_name,\n 'logEvents': [\n {\n 'timestamp': int(1000 * time.time()),\n 'message': '[{}]: {}'.format(self.stage, message)\n },\n ]}\n\n if self.sequence_token:\n log_info['sequenceToken'] = self.sequence_token\n\n response = self.logs_client.put_log_events(**log_info)\n\n self.sequence_token = response['nextSequenceToken']", "def log(tag, message=None):\n Log._post(\"log\", tag, message)", "def post_log(\n self, message, level=\"INFO\", item_id=None, file_to_attach=None\n ):\n self._log(\n message,\n level,\n file_to_attach=file_to_attach,\n item_id=item_id or self._log_item_id,\n )", "def log(self, message):\n self._logger.write(message)", "def _log(self, message):\n pass", "def post_message(self, message: dict) -> any:\n logging.info(message)\n # If the level is greater, do not post messages\n if message[\"level\"] > BaseConfig.LOGGING_LEVEL_FILTER:\n return\n try:\n blocks = self._generate_slack_block(message=message)\n response = self.client.chat_postMessage(\n channel=self.channel_id,\n blocks=blocks,\n mrkdwn=True,\n text=generate_mkdn_message(message=message, format=\"JIRA\")\n )\n return response\n except SlackApiError as e:\n logging.error(f\"Got an error: {e.response['error']}\")", "def _log(self, msg):\n self.telegram_queue.put(f\"{__name__.split('.')[-1]}: {msg}\")", "def emit(self, record):\n log_entry = self.format(record)\n try: \n requests.post(self.host+self.url, log_entry,headers={\"Content-type\": \"application/json\"}).content\n except Exception as e:\n if self.debug:\n print(e)", "def log_message(self, msg):\n\t\tself.logView.log_message(msg)", "def log(self, message):\n print(\"Server#{}\".format(message))\n if self.log_queue:\n self.log_queue.put()", "def logs_add_message(self, level, message):\n pass", "def log(self, msg):\n\n\t\tself.eyetribe.log_message(msg)", "def _post(level, tag, message=None):\n if message == None:\n message = tag\n tag = \"hotword\"\n\n message = \"%s%s\\033[0;37;40m\" % (Log.COLOURS[level], message)\n\n logger = Log._get_logger(level, tag)\n method = getattr(logger, level)\n method(Log._message(message))", "def write_log(self, msg: str):\n self.cta_engine.write_log(msg, self)", "def log_message(self, formate, *args):\n return", "async def log(self, message, level=logging.INFO):\n\t\tawait self.handle_log(LogEntry(level, self.name, message))", "def send_message(self, msg):\n self.logger.debug(msg)\n self.writer.send(json.dumps(msg))", "def write_message(self, message):\r\n logging.debug(\"Sending message {mes} to {usr}\".format(mes=message, usr=self.id))\r\n self.handler.write_message(message)", "def publish(self, message: str) -> None:", "def log(msg):\n\tfrom http_request import req\n\tif not req: return\n\t\t\n\tif not req.out.get('_log'):\n\t\treq.out['_log'] = []\n\treq.out['_log'].append(msg)", "def emit(self, record):\n data = self.mapLogRecord(record)\n client = Client()\n if self.method == 'GET':\n response = client.get(self.url, data)\n else:\n response = client.post(self.url, data)\n self.testcase.assertEqual(response.status_code, 200)\n self.testcase.assertContains(response, 'message saved')", "def on_message(self, msg):\n self.log.info(msg)", "def _send(self, message):\n logger.info(message)\n self.buffer.put(message)", "def log(self, message):\n timestamp = time.strftime(\"[%H:%M:%S]\", time.localtime(time.time()))\n self.file.write('%s %s\\n' % (timestamp, message))\n self.file.flush()", "def emit(self, record):\n\n trace = None\n exc = record.__dict__['exc_info']\n if exc:\n trace = traceback.format_exc(exc)\n path = request.path\n method = request.method\n ip = request.remote_addr\n slack_event = LogEntry(\n logger=record.__dict__['name'],\n level=record.__dict__['levelname'],\n trace=trace,\n message=record.__dict__['msg'],\n path=path,\n method=method,\n ip=ip,\n )\n slack_event.save()\n #hook = SlackWebhookClient(self.webhook)\n #hook.send(\n # message=slack_event.to_slack_msg(),\n # emoji=\":ghost:\",\n # username=\"battleship-server\"\n #)", "def log(self, message):\n self._log += \"%s\\n\" % message\n print message", "def log(self, _strMessage=\"\"):\n self.edLogging.log(_strMessage)" ]
[ "0.7623483", "0.69189143", "0.68100804", "0.6766771", "0.67448765", "0.67176473", "0.66051364", "0.6535955", "0.6521147", "0.65033543", "0.6469652", "0.6450492", "0.64386064", "0.6419901", "0.6396873", "0.63063043", "0.6303807", "0.6289788", "0.6288486", "0.6243808", "0.6234907", "0.6212763", "0.6206745", "0.62011385", "0.61896336", "0.6162368", "0.6155257", "0.6141104", "0.6115506", "0.6106753" ]
0.78706574
0
List all available metadata.
def metadata_list(self, endpoint_name=None): if endpoint_name is None: _, body = self.request('/v1.1/endpoint/metadata', 'GET') else: _, body = self.request('/v1.1/endpoints/%s/metadata' % endpoint_name, 'GET') return body
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_all_metadata(self):\n return self.db.get_all_nodes()", "def metadata(self): # -> list[Unknown]:\n ...", "def metadata(self): # -> list[Unknown]:\n ...", "def list(self, filter = NotImplemented, pager = NotImplemented):\n\n kparams = KalturaParams()\n kparams.addObjectIfDefined(\"filter\", filter)\n kparams.addObjectIfDefined(\"pager\", pager)\n self.client.queueServiceActionCall(\"metadata_metadata\", \"list\", \"KalturaMetadataListResponse\", kparams)\n if self.client.isMultiRequest():\n return self.client.getMultiRequestResult()\n resultNode = self.client.doQueue()\n return KalturaObjectFactory.create(resultNode, 'KalturaMetadataListResponse')", "def displayMetadata(self, metadata):\n #Unsupported types return -1 as metadata\n self.writeDetails(\"Listing metadata...\")\n if metadata is not None and len(metadata) == 0:\n self.writeDetails(\"\\tNo metadata found!\")\n self.metadataList.show()\n self.metadataList.clear()\n self.metadataList.setRowCount(0)\n self.metadataList.setHorizontalHeaderLabels([\"Metadata Header\", \"Value\"])\n elif metadata[0] == -1:\n self.metadataList.hide()\n self.fileNotSupported.show()\n self.writeDetails(\"\\tFile type not supported\")\n logging.debug(\"No metadata, file not supported\")\n\n #Supported types have list of metadata\n else:\n self.fileNotSupported.hide()\n self.metadataList.show()\n self.metadataList.clear()\n self.metadataList.setRowCount(0)\n self.metadataList.setHorizontalHeaderLabels([\"Metadata Header\", \"Value\"])\n i = self.metadataList.rowCount()\n for row in metadata:\n metaName = QtGui.QTableWidgetItem(row[0])\n metaValue = QtGui.QTableWidgetItem(row[1])\n metaName.setFlags(metaName.flags() & (~QtCore.Qt.ItemIsEditable))\n metaValue.setFlags(metaValue.flags() & (~QtCore.Qt.ItemIsEditable))\n self.metadataList.insertRow(i)\n self.metadataList.setItem(i, 0, metaName)\n self.metadataList.setItem(i, 1, metaValue)\n i += 1\n self.writeDetails(\"\\t\" + row[0] + \": \" + row[1])", "def test_list_base_metadata(self):\n\n # start out by creating some metadata\n\n self.save_base_metadata()\n\n # then list metadata with name = 'idsvc.basemeta'\n\n response = BaseMetadata.list(api_client=self.IDS_SYS_CLIENT)\n\n # we should have at least one in the list, since we just created one\n\n self.assertTrue(len(response)>0)\n\n # cleanup\n\n self.delete_base_metadata()", "def _getAllMeta(self):\n try:\n metadata = pyexiv2.ImageMetadata(self.imagePath)\n metadata.read()\n return metadata\n except:\n print 'error reading meta data'\n return None", "def getMetadataList(self):\n return [ x.toTuple() for x in self.metadata ]", "def list_datasets():\n return METADATA.keys()", "def metadata(self): # -> None:\n ...", "def test_list_server_metadata(self):\n metadata_response = self.servers_client.list_server_metadata(\n self.server.id)\n metadata = metadata_response.entity\n self.assertEqual(200, metadata_response.status_code)\n self.assertEqual(metadata.get('meta_key_1'), 'meta_value_1')\n self.assertEqual(metadata.get('meta_key_2'), 'meta_value_2')", "def test_list_image_metadata(self):\n pass", "def METADATA(self) -> Dict[str, Any]:\n return self._metadata", "def get_metadata(self):\n return self.manager.get_metadata(self)", "def GetMetadataSample():\n client = CreateClient()\n # Fetch the metadata entry and display bits of it\n metadata = client.GetMetadata()\n print 'Quota'\n print ' Total:', metadata.quota_bytes_total.text\n print ' Used:', metadata.quota_bytes_used.text\n print ' Trashed:', metadata.quota_bytes_used_in_trash.text\n print 'Import / Export'\n for input_format in metadata.import_formats:\n print ' Import:', input_format.source, 'to', input_format.target\n for export_format in metadata.export_formats:\n print ' Export:', export_format.source, 'to', export_format.target\n print 'Features'\n for feature in metadata.features:\n print ' Feature:', feature.name.text\n print 'Upload Sizes'\n for upload_size in metadata.max_upload_sizes:\n print ' Kind:', upload_size.kind, upload_size.text", "def get_metadata(self):\n return {}", "def listMetaDataFields(self, exclude=True):\n #tool = getToolByName(self, ATCT_TOOLNAME)\n #original_list = tool.getMetadataDisplay(exclude)\n\n return DisplayList((\n ('getAnalysisCategory', _p('Analysis Category')),\n ('getAnalysisService', _p('Analysis Service')),\n ('getAnalysts', _('Analyst')),\n ('getClientOrderNumber', _('Client Order')),\n ('getClientReference', _('Client Reference')),\n ('getClientSampleID', _('Client Sample ID')),\n ('getClientTitle', _('Client')),\n ('getContactTitle', _('Contact')),\n ('Creator', _p('Creator')),\n ('created', _('Date Created')),\n ('getDatePublished', _('Date Published')),\n ('getDateReceived', _('Date Received')),\n ('getDateSampled', _('Date Sampled')),\n ('getProfilesTitle', _('Analysis Profiles')),\n ('getRequestID', _('Request ID')),\n ('getSampleID', _('Sample ID')),\n ('getSamplePointTitle', _('Sample Point')),\n ('getSampleTypeTitle', _('Sample Type')),\n ('review_state', _p('Review state')),\n ))", "def list(self, filter = NotImplemented, pager = NotImplemented):\n\n kparams = KalturaParams()\n kparams.addObjectIfDefined(\"filter\", filter)\n kparams.addObjectIfDefined(\"pager\", pager)\n self.client.queueServiceActionCall(\"metadata_metadataprofile\", \"list\", \"KalturaMetadataProfileListResponse\", kparams)\n if self.client.isMultiRequest():\n return self.client.getMultiRequestResult()\n resultNode = self.client.doQueue()\n return KalturaObjectFactory.create(resultNode, 'KalturaMetadataProfileListResponse')", "def get_metadata():\n\n metadata = []\n current_date = (datetime.date.today(),)\n\n # make sql connection\n # execute query\n with sql_cursor() as cursor:\n try:\n cursor.execute('USE goggles')\n cursor.execute('SELECT b.image_name, b.X_Min, b.Y_Min, b.X_Max, b.Y_Max, '\n 'b.init_vector, b.goggles from BBOX AS b, IMAGE as i where '\n 'b.image_name=i.image_name and i.image_date=%s and b.goggles=False', current_date)\n\n for (image_name, x_min, y_min, x_max, y_max, init_vector, goggles) in cursor:\n metadata.append({'image_name': image_name,\n 'x_min': float(x_min),\n 'y_min': float(y_min),\n 'x_max': float(x_max),\n 'y_max': float(y_max),\n 'init_vector': init_vector\n })\n except Exception as e:\n print(e)\n\n with open(METADATA_FILE, 'w') as meta_file:\n json.dump(metadata, meta_file)\n return metadata", "async def list(self):\n all = (await self.get(self.profiles_list))['results']\n log(\"retrieved participant metadata.\")\n return all or []", "def metadata(self):\n return self.meta.metadata", "def get_metadata(self):\n items = self.get_playlist_items()\n uris = [item[\"track\"][\"uri\"] for item in items]\n features = self.API.audio_features(uris)\n\n list_data = []\n\n for idx, item in enumerate(items):\n data = {\"name\": item[\"track\"][\"name\"], \"uri\": item[\"track\"][\"uri\"]}\n data.update(features[idx])\n list_data.append(data)\n\n self.metadata = pd.DataFrame(data=list_data,\n index=range(len(list_data)))\n\n return self.metadata", "def metadata(self):\r\n return self._metadata", "def get_metadata(self):\n return self.client._perform_json(\n \"GET\", \"/projects/%s/recipes/%s/metadata\" % (self.project_key, self.recipe_name))", "def metadata(self) -> global___SummaryMetadata:", "def get_metadata(data):\n genres = list(data[\"genre\"])\n print(\"genres:\", len(set(genres)), set(genres))\n return genres", "def get_metadata(self):\n return self._metadata", "async def fetch_metadata(self, route: str):\n data = await self.http.get_metadata(route)\n return data", "def GetMetadata(self):\n return self.dict['meta']", "def test_api_can_get_metadata(self):\n response = self.client.get('/metadata/', format=\"json\")\n self.assertEqual(response.status_code, status.HTTP_200_OK)" ]
[ "0.72405505", "0.70674235", "0.70674235", "0.692333", "0.67305756", "0.6682802", "0.6525229", "0.64878017", "0.6483379", "0.6448371", "0.64321834", "0.6295765", "0.629346", "0.62883264", "0.6154566", "0.61519384", "0.6135041", "0.6129806", "0.60716623", "0.60687107", "0.60669655", "0.6066336", "0.6065283", "0.6053414", "0.6037392", "0.60319054", "0.6031489", "0.6022719", "0.6022654", "0.6018224" ]
0.7420968
0
Register the given IP.
def ip_register(self, ip=None): if ip is None: self.request('/v1.1/register', 'POST') else: self.request('/v1.1/register/%s' % ip, 'POST')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_ip(self, inf, ip):\n self.interfaces[inf]['ip'] = ip", "def new_ip(self, ip):\n if not ip in self.ip_list:\n self.ip_list.add(ip)\n host = self.hs.id_to_object(ip)\n host.add_tag('sniffer')\n host.save()\n print_success(\"New ip address: {}\".format(ip))", "def create(self, ip): # pylint: disable=invalid-name\n return self.request(\"POST\", data={\"ip\": ip})", "def register_elastic_ip(ElasticIp=None, StackId=None):\n pass", "def add_ip(self, ip: IPWrapper):\n ip_name = ip.top_name\n self._ips[ip_name] = ip\n self._ips_by_internal_name[ip.ip_name] = ip\n # create a placeholder for Instance arguments to instantiate the ip\n setattr(self, ip_name, dict())", "def ip(self, ip: str):\n\n self._ip = ip", "def ip(self, ip):\n\n self._ip = ip", "def ip(self, ip):\n\n self._ip = ip", "def setIP(self, idx, ip):\n self.ip[int(idx)-1] = ip", "def attach_public_ip(self, name=None, ip=None):\n raise NotImplementedError", "def ip(self, ip):\n self._ip = ip\n return self", "def register_instance(StackId=None, Hostname=None, PublicIp=None, PrivateIp=None, RsaPublicKey=None, RsaPublicKeyFingerprint=None, InstanceIdentity=None):\n pass", "def add_ip(uid, ip_addrs, ip_port, server_user, server_password, description):\r\n session = tables.get_session()\r\n if session is None:\r\n return False\r\n res = False\r\n try:\r\n ip_table = IpAddrs()\r\n if not ip_table.check_exist_by_ip(ip_addrs, uid, session):\r\n res = ip_table.insert_ip_by_user(ip_addrs, ip_port, server_user, server_password, description, uid, session)\r\n session.commit()\r\n except SQLAlchemyError as err:\r\n LOGGER.error('Insert new ip failed: %s', err)\r\n return res\r\n finally:\r\n session.close()\r\n return res", "def add_host(self, name, ip):\n rdataa = dns.rdata.from_text(dns.rdataclass.IN,dns.rdatatype.A,str(ip))\n rdataseta = dns.rdataset.from_rdata(300,rdataa)\n self.update.add(name,rdataseta)\n return dns.query.tcp(self.update,self.server_address)", "def add_ip(self, ip, mac):\n try:\n network_inter = self.session.query(self.networkInterfaces).filter(self.networkInterfaces.columns[\"macAddress\"] == mac.lower()).first()\n if not network_inter:\n return(False)\n except:\n raise MagdbError(\"Malformed MAC address\")\n\n insert_stmt = self.hostAddresses.insert(\n values={\n self.hostAddresses.columns[\"ipAddress\"]: ip,\n self.hostAddresses.columns[\"networkInterfaceId\"]: network_inter.id\n }\n )\n\n if self.session.execute(insert_stmt):\n return(True)", "def request_new_ip(self, mac):\n self.execute_script('new_ip', mac)", "def change_ip(self, address: int) -> None:\n self.regs[\"ip\"].write(address)", "def jmp(self, addr):\n\n self.reg.ip = addr", "def add_remote_duplicate_entry(self, ip):\n site2 = Session(SITE2_URL, SITE2_LOGIN, SITE2_PASSWORD)\n resp = site2.login()\n self.assertTrue(resp.ok)\n\n tenant = Tenant('intersite-testsuite-remote')\n l3out = OutsideL3('l3out', tenant)\n other_epg = OutsideEPG('other', l3out)\n subnet = OutsideNetwork(ip, other_epg)\n subnet.ip = ip + '/32'\n\n resp = tenant.push_to_apic(site2)\n self.assertTrue(resp.ok)", "def register(self, yoip):\r\n self.prints( \"Trying to acquire YO Address %s\" % yoip, 4)\r\n\r\n # This part is a little hacky and important to understand. Scapy, and the\r\n # computer for that matter, do not know which interface is associated with our YO address.\r\n # Since we want to be able to send packets anywhere on the network, we need to find the\r\n # interface that will allow us to do so (and it's MAC address, for ARP packets). Usually,\r\n # it's eth0, but not necessarily. Hence, we acquire the correct interface by assuming that\r\n # 0.0.0.0 is an IPv4 address that will always have the interface set up correctly. If you'd\r\n # like to see what I'm talking about, use scapy's conf.route.route and see the routing table.\r\n # So, TLDR - We ge the correct MAC address.\r\n\r\n # Ask who-has our YO address\r\n pkt = Ether(src=self.src_mac, dst=ETHER_BROADCAST) / YOARP(op=\"who-has\", hwsrc=self.src_mac, psrc=\"0.0\", pdst=yoip)\r\n\r\n res = srp1(pkt,\r\n iface=self.iface,\r\n timeout=0.2,\r\n verbose=0,\r\n retry=1,\r\n nofilter=1)\r\n\r\n if res is not None:\r\n # if (self._convert_yoip_to_num(res[YOARP].psrc) == yoip):\r\n self.prints( \"Error! YO Address %s is present on network, acquisition failed!\" % yoip, 4)\r\n return False\r\n\r\n # If no one answered the ARP question we can assume no one has our YO address!\r\n self.yo_addr = yoip\r\n register_yoip(yoip)\r\n\r\n\r\n\r\n # Announce in broadcast, I'm the king of this castle!\r\n arp_notification = Ether(src=self.src_mac, dst=\"ff:ff:ff:ff:ff:ff\") / YOARP(psrc=self.yo_addr, op=2, hwsrc=self.src_mac)\r\n\r\n # sendp(arp_notification, verbose=0)\r\n self.L2socket.send(arp_notification)\r\n\r\n self.prints( \"Successfully acquired address %s and broadcast it to network!\" % yoip, 4)\r\n return True", "def add_ip_to_input(self, ip, loggly_input, device_name=None):\n\n return self.add_device_to_input(LogglyDevice({'ip': ip}), loggly_input, device_name)", "def JP_addr(self, addr):\n\t\tself.IP = addr", "def add_remote_duplicate_entry(self, ip):\n site2 = Session(SITE2_URL, SITE2_LOGIN, SITE2_PASSWORD)\n resp = site2.login()\n self.assertTrue(resp.ok)\n\n tenant = Tenant('intersite-testsuite-remote')\n l3out = OutsideL3('l3out1', tenant)\n other_epg = OutsideEPG('other', l3out)\n subnet = OutsideNetwork(ip, other_epg)\n subnet.ip = ip + '/32'\n\n resp = tenant.push_to_apic(site2)\n self.assertTrue(resp.ok)", "def add_virtualip(self, vip):\n return self.manager.add_virtualip(self, vip)", "def setIP( self, intf, ip, prefixLen=8 ):\n ipSub = '%s/%d' % ( ip, prefixLen )\n result = self.cmd( 'ifconfig', intf, ipSub, 'up' )\n self.ips[ intf ] = ip\n return result", "def known_ip(ip=DEFAULT_IP):\r\n tunnel(ip)", "def register(self, service_name, service_addr, service_ttl):\n raise NotImplementedError", "def create_public_ip(self):\n raise NotImplementedError", "def post_instance_ip_create(self, resource_dict):\n pass", "def register_router(self, hostname, expire=-1):" ]
[ "0.7263847", "0.6985573", "0.6919389", "0.6659432", "0.6566385", "0.62973195", "0.62700164", "0.62700164", "0.6252467", "0.61964816", "0.6180218", "0.6099269", "0.6095723", "0.60725296", "0.5906903", "0.5898586", "0.5887893", "0.5861625", "0.5840779", "0.5830391", "0.5815397", "0.57938874", "0.57806367", "0.5755528", "0.5755171", "0.5711298", "0.5670644", "0.5644872", "0.5643586", "0.5640391" ]
0.86831564
0
Unregister the given IP.
def ip_drop(self, ip=None): if ip is None: self.request('/v1.1/unregister', 'POST') else: self.request('/v1.1/unregister/%s' % ip, 'POST')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove(ip):\n return __apf_cmd(\"-u {}\".format(ip))", "def deregister_elastic_ip(ElasticIp=None):\n pass", "def remove_ip(enode, portlbl, addr, shell=None):\n assert portlbl\n assert ip_interface(addr)\n port = enode.ports[portlbl]\n\n cmd = 'ip addr del {addr} dev {port}'.format(addr=addr, port=port)\n response = enode(cmd, shell=shell)\n assert not response", "def delete(self, ip): # pylint: disable=invalid-name\n return self.request(\"DELETE\", ip)", "def delete_public_ip(self, ip=None):\n raise NotImplementedError", "def remove_ip(self, ip_id):\n ip_id = ' \"IpAddressResourceId\": %s' % ip_id\n json_scheme = self.gen_def_json_scheme('SetRemoveIpAddress', ip_id)\n json_obj = self.call_method_post(method='SetRemoveIpAddress', json_scheme=json_scheme)\n pprint(json_obj)\n return True if json_obj['Success'] is True else False", "def remover_ip(self, id_equipamento, id_ip):\n\n if not is_valid_int_param(id_equipamento):\n raise InvalidParameterError(\n u'O identificador do equipamento é inválido ou não foi informado.')\n\n if not is_valid_int_param(id_ip):\n raise InvalidParameterError(\n u'O identificador do ip é inválido ou não foi informado.')\n\n url = 'ip/' + str(id_ip) + '/equipamento/' + str(id_equipamento) + '/'\n\n code, xml = self.submit(None, 'DELETE', url)\n\n return self.response(code, xml)", "def unblock_ip(ip, logger, dashboard_log, firewall_ip_and_port):\n try:\n request = requests.delete(f\"http://{firewall_ip_and_port}/firewall/{ip}\")\n if not request.ok:\n logger.error(f\"Unblocking IP {ip} was unsuccessful. Code {request.status_code}\")\n dashboard_log.append({\"message\": f\"Unblocking IP {ip} was unsuccessful. Code {request.status_code}\",\n \"time\": time.time()})\n return False\n return True\n except requests.exceptions.ConnectionError as e:\n logger.error(f\"Can't connect to firewall wrapper. {e}\")\n dashboard_log.append({\"message\": \"Can't connect to firewall wrapper.\",\n \"time\": time.time()})\n return False", "def detach_public_ip(self, name=None, ip=None):\n raise NotImplementedError", "def delete_virtualip(self, vip):\n return vip.delete()", "async def unlight(self, ip: str) -> None:\n miner = self.miners[ip]\n await miner.unlight()", "def unregister(self, service_name, service_addr):\n raise NotImplementedError", "def del_ip(ip_list, interrupted_ip):\n ip_index = ip_list.index(interrupted_ip)\n del ip_list[ip_index]\n return ip_list", "def delete_virtualip(self, vip):\n return self.manager.delete_virtualip(self, vip)", "def delete_device_by_ip(self, ip):\n\n self.delete_device(self.get_device_by_ip(ip))", "def unregister_user(self, userID: str):\n requests.post('https://' + self.serverIp + '/unregister/' + userID, verify=False)", "def unregister(url):\n return Client.get_client().unregister(url)", "def disassociate_elastic_ip(ElasticIp=None):\n pass", "def delete_vip(self, vip):\r\n return self.delete(self.vip_path % (vip))", "def detach_external_ip(self, ip: Union[dto.ExternalIp, str]) -> dto.ExternalIp:\n raise errors.UnsupportedOperationError(\n \"Operation not supported for provider '{}'\".format(self.provider_name)\n )", "def delete_floatingip(self, floatingip):\r\n return self.delete(self.floatingip_path % (floatingip))", "def unregister(self):\n assert self.state == State.SHUTDOWN\n del self._proto[self.dest_addr]", "def unregister(self, target, hostname, listener_type):", "def delIPfromPool(self,ippool_name,ips):\n self.__delIPfromPoolCheckInput(ippool_name,ips)\n ippool_obj=ippool_main.getLoader().getIPpoolByName(ippool_name)\n self.__delIPfromPoolDB(ippool_obj.getIPpoolID(),ips)\n self.__reloadIPpool(ippool_obj.getIPpoolID())", "def RoutingInterfaceNotificationUnregister(self, request, timeout, metadata=None, with_call=False, protocol_options=None):\n raise NotImplementedError()", "def unregister_router(self, hostname):", "def RoutingInterfaceNotificationUnregister(self, request, context):\n context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)", "def _unregister(self):\r\n if hasattr(self, '_registered') and self._registered:\r\n self._conn.unregisterInterface(self._iTag, self)\r\n self._registered = False", "def unassign_global_ip(self, global_ip_id):\r\n return self.client['Network_Subnet_IpAddress_Global'].unroute(\r\n id=global_ip_id)", "def remove_ipv4_address(self, net_interface, address):\n self._runner.run('ip addr del %s dev %s' % (address, net_interface))" ]
[ "0.67559963", "0.6748413", "0.6658068", "0.6620643", "0.63501626", "0.63131154", "0.6244726", "0.62287134", "0.6226786", "0.6176832", "0.61236423", "0.6076246", "0.6064919", "0.6024129", "0.60091347", "0.60011864", "0.5969445", "0.5937021", "0.58520246", "0.58340704", "0.5798709", "0.5778021", "0.57721686", "0.574907", "0.570556", "0.5686974", "0.56742257", "0.5616875", "0.55765986", "0.55764043" ]
0.8101048
0
Associate an instance with an endpoint.
def associate(self, endpoint_name=None, instance_id=None): if instance_id is None: raise Exception("Instance required!") if endpoint_name is None: self.request('/v1.1/endpoint/instances/%s' % instance_id, 'POST') else: self.request('/v1.1/endpoints/%s/instances/%s' % (endpoint_name, instance_id), 'POST')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def register_endpoint(self, **kwargs):\n self._database.update('endpoint', kwargs, kwargs, upsert=True)", "def add_endpoint(self, endpoint, **kwargs):\n endpoint.arguments = kwargs\n self.endpoints.append(endpoint)", "def endpoint_create(self, endpoint_name=None, config=None):\n if config is None:\n raise Exception(\"Config required!\")\n if endpoint_name is None:\n self.request('/v1.1/endpoint', 'PUT', body=config)\n else:\n self.request('/v1.1/endpoints/%s' % endpoint_name, 'PUT', body=config)", "def add_endpoint(self, endpoint):\n exists = self.get_endpoint(endpoint)\n if not exists:\n self.endpoints.append((endpoint, now()))", "def add_endpoint(self, endpoint):\n self._endpoints.append(endpoint)", "def add_endpoint(self, endpoint):\n name = endpoint.get_name()\n self._calls[name] = endpoint", "def add_endpoint(self, endpoint: USBEndpoint):\n self.endpoints[endpoint.get_identifier()] = endpoint\n endpoint.parent = self", "def setEndpoint(self, endpoint):\n self.__lockobj.acquire()\n self.__endpoints[endpoint.getEndpoint()] = endpoint\n self.__lockobj.acquire()", "def update_endpoint(self, endpoint):\n exists = self.get_endpoint(endpoint)\n if exists:\n self.delete_endpoint(endpoint)\n self.add_endpoint(endpoint)", "def create_endpoint(self, endpoint_id, endpoint_ref):\n raise exception.NotImplemented() # pragma: no cover", "def add_endpoint(self, resource, method, data):\n self.endpoints.setdefault(resource, {})[method] = data", "def update_endpoint(EndpointName=None, EndpointConfigName=None):\n pass", "def update_endpoint(self, endpoint_id, endpoint_ref):\n raise exception.NotImplemented() # pragma: no cover", "def endpoint_update(self, endpoint_name=None, config=None):\n if config is None:\n raise Exception(\"Config required!\")\n if endpoint_name is None:\n self.request('/v1.1/endpoint', 'POST', body=config)\n else:\n self.request('/v1.1/endpoints/%s' % endpoint_name, 'POST', body=config)", "def associate_resolver_endpoint_ip_address(ResolverEndpointId=None, IpAddress=None):\n pass", "def _attach_endpoints(self):\n for name, endpoint in inspect.getmembers(self):\n is_class = inspect.isclass(endpoint)\n is_subclass = is_class and issubclass(endpoint, self.Endpoint)\n not_endpoint = endpoint is not self.Endpoint\n\n if is_subclass and not_endpoint:\n endpoint_instance = endpoint(self.session)\n setattr(self, name.lower(), endpoint_instance)", "def associate_elastic_ip(ElasticIp=None, InstanceId=None):\n pass", "def add_endpoint_hit(db_session, endpoint, time, test, version, job_id):\n endpoint_id = db_session.query(Endpoint.id).filter(Endpoint.name == endpoint).first().id\n test_id = db_session.query(Test.id).filter(Test.name == test).first().id\n db_session.add(TestEndpoint(endpoint_id=endpoint_id, test_id=test_id, duration=time, app_version=version,\n travis_job_id=job_id))", "async def register_endpoint(\n self, callback: Callable[[Any, dict], Any], endpoint: str = None, **kwargs: Optional[Any]\n ) -> str:\n if endpoint is None:\n endpoint = self.name\n\n if self.AD.http is not None:\n return await self.AD.http.register_endpoint(callback, endpoint, self.name, **kwargs)\n else:\n self.logger.warning(\n \"register_endpoint for %s failed - HTTP component is not configured\",\n endpoint,\n )", "def endpointuuid(self, endpointuuid):\n\n self._endpointuuid = endpointuuid", "def secondary_endpoint(self, secondary_endpoint):\n\n self._secondary_endpoint = secondary_endpoint", "def register(self):\n self.endpoint = ES(self.url, auth=self.auth, port=self.port)\n return self.endpoint", "def __init__(self, endpoint_a, endpoint_b):\n self.endpoint_a = endpoint_a\n self.endpoint_b = endpoint_b", "def registerXMLRPC(self, unique_service_name, instance, endpoint):\n # TODO only set the ClientCert Handler if configured\n handler = XMLRPCHandler(unique_service_name)\n handler.connect(self._flaskapp.app, endpoint)\n handler.register_instance(instance)", "def register_instance(self, instance):\n self.instance = instance", "def disassociate(self, endpoint_name=None, instance_id=None):\n if instance_id is None:\n raise Exception(\"Instance required!\")\n if endpoint_name is None:\n self.request('/v1.1/endpoint/instances/%s' % instance_id, 'DELETE')\n else:\n self.request('/v1.1/endpoints/%s/instances/%s' %\n (endpoint_name, instance_id), 'DELETE')", "def _associate_floating_ip(self, context, domain_id, extra, floating_ip_id, floating_ip, port_id):\n\n addresses = [{\n 'version': 4,\n 'address': floating_ip,\n }]\n try:\n names = self._create(context=context,\n addresses=addresses,\n name_format=cfg.CONF[self.name].format,\n extra=extra,\n domain_id=domain_id,\n managed_extra='portid:%s' % (port_id),\n resource_type='a:floatingip',\n resource_id=floating_ip_id)\n except (designate.exceptions.DuplicateRecord, CirrusRecordExists):\n LOG.warn('Could not create record for %s using default format, '\n 'trying fallback format' % (extra['instance_name']))\n names = self._create(context=context,\n addresses=addresses,\n name_format=cfg.CONF[self.name].format_fallback,\n extra=extra,\n domain_id=domain_id,\n managed_extra='portid:%s' % (port_id),\n resource_type='a:floatingip',\n resource_id=floating_ip_id)\n LOG.info(\"Created %s to point at %s\" % (','.join(names), floating_ip))", "def create_endpoint(EndpointName=None, EndpointConfigName=None, Tags=None):\n pass", "def add_clustering_endpoint(self, endpoint_id, saved_model_id):\n self.settings[\"endpoints\"].append({\n \"id\" : endpoint_id,\n \"type\" : \"STD_CLUSTERING\",\n \"modelRef\": saved_model_id\n })", "def add_prediction_endpoint(self, endpoint_id, saved_model_id):\n self.settings[\"endpoints\"].append({\n \"id\" : endpoint_id,\n \"type\" : \"STD_PREDICTION\",\n \"modelRef\": saved_model_id\n })" ]
[ "0.7313337", "0.69591093", "0.69211304", "0.6877627", "0.6833103", "0.68066996", "0.6666065", "0.65525895", "0.6445415", "0.6299226", "0.62065667", "0.61701995", "0.61642414", "0.61555827", "0.6136932", "0.61315817", "0.6085916", "0.60416996", "0.6022668", "0.59917736", "0.59904706", "0.5920565", "0.5915804", "0.5915503", "0.5866817", "0.58080834", "0.5804834", "0.5795385", "0.5778466", "0.57558095" ]
0.83996296
0
Disassociate an instance from an endpoint.
def disassociate(self, endpoint_name=None, instance_id=None): if instance_id is None: raise Exception("Instance required!") if endpoint_name is None: self.request('/v1.1/endpoint/instances/%s' % instance_id, 'DELETE') else: self.request('/v1.1/endpoints/%s/instances/%s' % (endpoint_name, instance_id), 'DELETE')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def disassociate_elastic_ip(ElasticIp=None):\n pass", "def delete_endpoint(self, endpoint):\n exists = self.get_endpoint(endpoint)\n if exists:\n self.endpoints.remove(exists)", "def disassociate_resolver_endpoint_ip_address(ResolverEndpointId=None, IpAddress=None):\n pass", "async def deregister_endpoint(self, handle: str) -> None:\n await self.AD.http.deregister_endpoint(handle, self.name)", "def delete_endpoint(self):\n logger.warning(f\"Deleting hosting endpoint '{self.endpoint_name}'...\")\n self._realtime_predictor.delete_endpoint()", "def delete_endpoint(EndpointName=None):\n pass", "def unassign_instance(InstanceId=None):\n pass", "def disassociate(self, floating_ip_id):\n update_dict = {'port_id': None}\n self.client.update_floatingip(floating_ip_id,\n {'floatingip': update_dict})", "def deregister_instance(InstanceId=None):\n pass", "def disassociate_member_account(memberAccountId=None):\n pass", "def delete_endpoint(self, endpoint_id):\n raise exception.NotImplemented() # pragma: no cover", "def remove_endpoint_from_sipserver(self, endpoint: str) -> None:", "def test_basic_remove_endpoint(self):\n mac, ip = self.setup_with_endpoint()\n time.sleep(2)\n\n self.assertTrue(self.verify_remote_site_has_entry(mac, ip, 'intersite-testsuite', 'l3out',\n 'intersite-testsuite-app-epg1'))\n self.remove_endpoint(mac, ip, 'intersite-testsuite', 'app', 'epg1')\n self.assertFalse(self.verify_remote_site_has_entry(mac, ip, 'intersite-testsuite', 'l3out',\n 'intersite-testsuite-app-epg1'))", "def disassociate_fleet(FleetName=None, StackName=None):\n pass", "def destroy(self):\r\n # TODO: WHY ???\r\n if not self._endpoint:\r\n return\r\n\r\n self._endpoint.unregisterProtocol(self)\r\n self._endpoint = None\r\n\r\n # Endpoint should destroy all connections\r\n assert len(self._connections) == 0\r\n\r\n super(Protocol, self).destroy()", "def _remove_endpoint(self, endpoint):\n logger.debug('')\n with self._endpoint_lock:\n count = len(self._endpoints)\n self._endpoints = [e for e in self._endpoints if e != endpoint]\n return (count != len(self._endpoints))", "def _disassociate_floating_ip(self, context, address, interface,\n instance_uuid):\n interface = CONF.public_interface or interface\n\n @utils.synchronized(six.text_type(address))\n def do_disassociate():\n # NOTE(vish): Note that we are disassociating in the db before we\n # actually remove the ip address on the host. We are\n # safe from races on this host due to the decorator,\n # but another host might grab the ip right away. We\n # don't worry about this case because the minuscule\n # window where the ip is on both hosts shouldn't cause\n # any problems.\n floating = objects.FloatingIP.disassociate(context, address)\n fixed = floating.fixed_ip\n if not fixed:\n # NOTE(vish): ip was already disassociated\n return\n if interface:\n # go go driver time\n self.l3driver.remove_floating_ip(address, fixed.address,\n interface, fixed.network)\n payload = dict(project_id=context.project_id,\n instance_id=instance_uuid,\n floating_ip=address)\n self.notifier.info(context,\n 'network.floating_ip.disassociate', payload)\n do_disassociate()", "def removeEndpoint(self, endpoint):\n # If endpoint not recognized, returns False, else True\n self.__lockobj.acquire()\n retval = False\n if endpoint in self.__endpoints.keys():\n del self.__endpoints[endpoint]\n retval = True\n self.__lockobj.acquire()\n return retval", "def delete_dev_endpoint(self):\n self.glue_engine.delete_dev_endpoint(EndpointName=self.dev_endpoint_name)", "def delete_endpoint_config(EndpointConfigName=None):\n pass", "def endpoint_absent(name, region=None, profile=None, interface=None, **connection_args):\n ret = {\n \"name\": name,\n \"changes\": {},\n \"result\": True,\n \"comment\": 'Endpoint for service \"{}\"{} is already absent'.format(\n name,\n ', interface \"{}\",'.format(interface) if interface is not None else \"\",\n ),\n }\n\n # Check if service is present\n endpoint = __salt__[\"keystone.endpoint_get\"](\n name, region, profile=profile, interface=interface, **connection_args\n )\n if not endpoint:\n return ret\n else:\n if __opts__.get(\"test\"):\n ret[\"result\"] = None\n ret[\"comment\"] = 'Endpoint for service \"{}\" will be deleted'.format(name)\n return ret\n # Delete service\n __salt__[\"keystone.endpoint_delete\"](\n name, region, profile=profile, interface=interface, **connection_args\n )\n ret[\"comment\"] = 'Endpoint for service \"{}\"{} has been deleted'.format(\n name,\n ', interface \"{}\",'.format(interface) if interface is not None else \"\",\n )\n ret[\"changes\"][\"endpoint\"] = \"Deleted\"\n return ret", "def deregister_elastic_ip(ElasticIp=None):\n pass", "def delete_handler(event, context):\n delete_endpoint_config(event)", "def test_basic_remove_one_of_multiple_endpoint(self):\n mac1, ip1 = self.setup_with_endpoint()\n mac2 = '00:11:22:33:33:35'\n ip2 = '3.4.3.6'\n self.add_endpoint(mac2, ip2, 'intersite-testsuite', 'app', 'epg')\n time.sleep(2)\n\n self.assertTrue(self.verify_remote_site_has_entry(mac1, ip1, 'intersite-testsuite',\n 'l3out', 'intersite-testsuite-app-epg'))\n self.assertTrue(self.verify_remote_site_has_entry(mac2, ip2, 'intersite-testsuite',\n 'l3out', 'intersite-testsuite-app-epg'))\n\n self.remove_endpoint(mac1, ip1, 'intersite-testsuite', 'app', 'epg')\n self.assertFalse(self.verify_remote_site_has_entry(mac1, ip1, 'intersite-testsuite',\n 'l3out', 'intersite-testsuite-app-epg'))\n self.assertTrue(self.verify_remote_site_has_entry(mac2, ip2, 'intersite-testsuite',\n 'l3out', 'intersite-testsuite-app-epg'))", "def test_basic_remove_one_of_multiple_endpoint(self):\n mac1, ip1 = self.setup_with_endpoint()\n mac2 = '00:11:22:33:33:35'\n ip2 = '3.4.3.6'\n self.add_endpoint(mac2, ip2, 'intersite-testsuite', 'app', 'epg1')\n time.sleep(2)\n\n self.assertTrue(self.verify_remote_site_has_entry(mac1, ip1, 'intersite-testsuite', 'l3out',\n 'intersite-testsuite-app-epg1'))\n self.assertTrue(self.verify_remote_site_has_entry(mac2, ip2, 'intersite-testsuite', 'l3out',\n 'intersite-testsuite-app-epg1'))\n\n self.remove_endpoint(mac1, ip1, 'intersite-testsuite', 'app', 'epg1')\n self.assertFalse(self.verify_remote_site_has_entry(mac1, ip1, 'intersite-testsuite', 'l3out',\n 'intersite-testsuite-app-epg1'))\n self.assertTrue(self.verify_remote_site_has_entry(mac2, ip2, 'intersite-testsuite', 'l3out',\n 'intersite-testsuite-app-epg1'))", "def endpoint_deactivate(login_manager: LoginManager, *, endpoint_id: str) -> None:\n transfer_client = login_manager.get_transfer_client()\n res = transfer_client.endpoint_deactivate(endpoint_id)\n display(res, text_mode=TextMode.text_raw, response_key=\"message\")", "def deregister_rds_db_instance(RdsDbInstanceArn=None):\n pass", "def remove(self, *args, **kwargs):\n raise InvalidEndpointOperation(\n 'Not a valid operation on this endpoint.'\n )", "def remove(self, *args, **kwargs):\n raise InvalidEndpointOperation(\n 'Not a valid operation on this endpoint.'\n )", "def disassociate(self, community, member):\n if __debug__:\n from community import Community\n assert isinstance(community, Community)\n assert isinstance(member, Member)\n self._associations.remove((community.cid, member))" ]
[ "0.682443", "0.6806596", "0.6765547", "0.65948784", "0.64978296", "0.64549416", "0.64292914", "0.62376964", "0.619817", "0.6015638", "0.6000786", "0.59864897", "0.5925916", "0.5924013", "0.58500004", "0.5841745", "0.5825078", "0.5784667", "0.57450336", "0.5740795", "0.56936324", "0.5669763", "0.5634763", "0.5631051", "0.5627525", "0.56209916", "0.56173795", "0.56004107", "0.56004107", "0.5584458" ]
0.870822
0
Draw a bootstrap sample from a 1D data set.
def draw_bs_sample(data): return np.random.choice(data, size=len(data))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw_bs_sample(data):\n return rg.choice(data, size=len(data))", "def bootstrap(X):\n return X[np.random.choice(list(range(X.shape[0])), size=X.shape[0]), :]", "def bootstrap_sample(data):\n return [random.choice(data) for _ in data]", "def standard_bootstrap(dataset):\n randseed=np.random.randint(0,10000)\n np.random.seed(randseed)\n \n n = dataset.shape[0]\n b = np.random.randint(0, high=n-1, size=n)\n return dataset[b]", "def sample(self, shape=(1,)):\n pass", "def bootstrap(data):\r\n size = int(len(data))\r\n train = resample(data, n_samples=size, replace=True)\r\n test = data.drop(train.index) \r\n return train[encoded_features], train[target], test[encoded_features], test[target]", "def bootstrap_sample_from_data(data, weights=None, seed=0):\n # Set up the random number generator\n RNG = np.random.default_rng(seed)\n N = data.shape[0]\n\n # Set up weights\n if weights is not None:\n cutoffs = np.cumsum(weights)\n else:\n cutoffs = np.linspace(0, 1, N)\n\n # Draw random indices\n indices = np.searchsorted(cutoffs, RNG.uniform(size=N))\n\n # Create a bootstrapped sample\n new_data = deepcopy(data[indices,])\n return new_data", "def bootstrap_sample_generator_1D(samples: Union[NumpyFloatArray, NumpyIntArray]):\n n_samples = samples.shape[0]\n\n while True:\n _indices = np.random.randint(0, high=n_samples, size=n_samples)\n\n yield samples[_indices]", "def bootstrap_replicate_1d(data, func):\r\n bs_sample = np.random.choice(data, len(data))\r\n return func(bs_sample)", "def sample_and_plot(self):\n fig = plt.figure()\n ax = plt.axes(projection = '3d')\n ax.plot_surface(self.X, self.Y, self.sample(), cmap = plt.cm.jet, rstride = 2, cstride = 2, linewidth = 1)\n plt.show()", "def bootstrap_replicate_1d(data, func):\n bs_sample = np.random.choice(data, len(data))\n return func(bs_sample)", "def bootstrap_replicate_1d(data, func):\n bs_sample = np.random.choice(data, len(data))\n\n return func(bs_sample)", "def _get_bootstrap_sample(x, y, num_reps):\r\n combined = array(list(x) + list(y))\r\n total_obs = len(combined)\r\n num_x = len(x)\r\n for i in range(num_reps):\r\n # sampling with replacement\r\n indices = randint(0, total_obs, total_obs)\r\n sampled = combined.take(indices)\r\n # split into the two populations\r\n sampled_x = sampled[:num_x]\r\n sampled_y = sampled[num_x:]\r\n yield sampled_x, sampled_y", "def dataset_augmentation(data_start, bootstrapping = 1, epurate = 1, shuffle = True):\n data = data_start\n for ii in range(bootstrapping):\n data = data.append(data_start.apply(bootstrap_sample, axis=1), ignore_index=True)\n\n#Bugged version that weirdly works well....\n# for ii in range(bootstrapping):\n # data = data.append(bootstrap_sample(data_start), ignore_index=True)\n\n for ii in range(epurate):\n data = data.append(data_start.apply(epurate_sample, axis=1), ignore_index=True)\n\n # Shuffling (Important)\n if shuffle == True:\n data = data.sample(frac=1)\n return data", "def bootstrap_data(self):\n for i in range(self.bootstraps):\n df_i = self.training_df.groupby(\n self.random_effect, group_keys=False\n ).apply(\n lambda x: x.sample(len(x), replace=True)\n )\n self.models.append(self.convert(df=df_i))", "def bootstrap_resample(labels):\n idxs = np.arange(len(labels))\n num_labels = max(labels) + 1\n bootstrap_idxs = np.zeros_like(idxs)\n ptr = 0\n for i in range(num_labels):\n strat = idxs[labels == i]\n bootstrap_idxs[ptr:ptr + len(strat)] = np.random.choice(strat, len(strat), replace=True)\n ptr += len(strat)\n return bootstrap_idxs", "def show_random_samples(batch, rows=5, cols=5, width=None, height=None, shuffle=True):\n if width is None: width = 1.5*cols\n if height is None: height = 1.5*rows\n\n if rows * cols == 1:\n axes = [plt.subplots(rows, cols, figsize=(width, height))[1]]\n else:\n axes = plt.subplots(rows, cols, figsize=(width, height))[1].flatten()\n\n # by default batch_size=1 in DataLoader\n for ax, x in zip(axes, DataLoader(TensorDataset(batch), shuffle=shuffle)):\n ax.imshow(x[0].reshape(batch.shape[-2:]), cmap=\"gray\")\n ax.axis('off')", "def draw_sample(self, samp_pts=None, means=None, covar=None):\n return self.gp_core.draw_samples(1, X_test=samp_pts, mean_vals=means,\n covar=covar).ravel()", "def eg_pre():\n\n print(\"\\teg3\")\n\n d = 1\n\n for _ in range(10):\n t1 = []\n t2 = []\n\n for _ in range(32):\n t1.append(utils.gaussian(10, 1))\n t2.append(utils.gaussian(d * 10, 1))\n\n print(\"\", \"\", d, d < 1.1, stats.bootstrap(\n t1, t2), stats.bootstrap(t1, t1), sep=\"\\t\")\n\n d = round(d + .05, 2)", "def bootstrap(data,func,nboot):\n\n n = len(data)\n resamples = np.array([[random.choice(data) for i in range(n)]\n for j in range(nboot)])\n return np.apply_along_axis(func, 1, resamples)", "def compute_bootstrapped_sample(X_table, y_table):\n n = len(X_table)\n X_sample = []\n y_sample = []\n for _ in range(n):\n rand_index = random.randrange(0, n)\n X_sample.append(X_table[rand_index])\n y_sample.append(y_table[rand_index])\n return X_sample, y_sample", "def bagged_indicator(x_value, sample, b_iterations=50):\n\n predictions = np.ones(b_iterations) * np.nan\n for i in range(b_iterations):\n # Draw a new bootstrap sample.\n bootstrap_sample = (\n np.random.choice(sample, size=(sample.size,), replace=True)\n )\n # Calculate the bootstrap prediction.\n y_bootstrap = bootstrap_sample.mean()\n predictions[i] = indicator(x_value, y_bootstrap)\n return predictions.mean()", "def bootstrap(data, num_samples, statistic, alpha):\n n = len(data)\n idx = npr.randint(0, n, (num_samples, n))\n samples = x[idx]\n stat = np.sort(statistic(samples, 1))\n return (stat[int((alpha/2.0)*num_samples)],\n stat[int((1-alpha/2.0)*num_samples)])", "def sample(self, size=1):\n pass", "def generate_samples(self, n_samples):", "def generate_samples(self, n_samples):", "def subsample(self, dataset):\n sample_idx = np.random.choice(\n dataset.shape[0], self.sample_size, replace=True)\n sample = dataset[sample_idx,...]\n return sample", "def generate_bootstrap_samples(num_samples, test_universe, test_set_sizes):\n for sample_idx, sample_size in zip(range(num_samples), cycle(test_set_sizes)):\n yield random.sample(test_universe, sample_size)", "def bootstrap_mean(x, B=10000, alpha=0.05, plot=False):\n\n # Deterministic things\n n = len(x) # sample size\n orig = x.mean() # sample mean\n se_mean = x.std()/np.sqrt(n) # standard error of the mean\n qt = stats.t.ppf(q=1 - alpha/2, df=n - 1) # Student quantile\n\n # Generate boostrap distribution of sample mean\n xboot = boot_matrix(x, B=B)\n sampling_distribution = xboot.mean(axis=1)\n\n # Standard error and sample quantiles\n se_mean_boot = sampling_distribution.std()\n quantile_boot = np.percentile(sampling_distribution, q=(100*alpha/2, 100*(1-alpha/2)))\n\n # # RESULTS\n # print(\"Estimated mean:\", orig)\n # print(\"Classic standard error:\", se_mean)\n # print(\"Classic student c.i.:\", orig + np.array([-qt, qt])*se_mean)\n # print(\"\\nBootstrap results:\")\n # print(\"Standard error:\", se_mean_boot)\n # print(\"t-type c.i.:\", orig + np.array([-qt, qt])*se_mean_boot)\n # print(\"Percentile c.i.:\", quantile_boot)\n # print(\"Basic c.i.:\", 2*orig - quantile_boot[::-1])\n\n if plot:\n plt.hist(sampling_distribution, bins=\"fd\")\n # return sampling_distribution\n return np.round(orig, decimals=2), np.round(quantile_boot, decimals=2)", "def plot_sample(x, y, axis):\n img = x.reshape(96, 96)\n axis.imshow(img, cmap='gray')\n axis.scatter(y[0::2] * 48 + 48, y[1::2] * 48 + 48, marker='x', s=10)" ]
[ "0.68372715", "0.65911174", "0.65090394", "0.6487167", "0.61641103", "0.6111652", "0.60635245", "0.5922539", "0.5889676", "0.5822204", "0.58034134", "0.5802497", "0.5789218", "0.56030756", "0.5595742", "0.5585387", "0.5578551", "0.5560512", "0.55390865", "0.5533374", "0.55205745", "0.54664415", "0.5454008", "0.5448732", "0.5444255", "0.5444255", "0.54435587", "0.543657", "0.54326123", "0.5405288" ]
0.7062562
0
Draw a pairs bootstrap sample.
def draw_bs_pairs(x, y): inds = np.arange(len(x)) bs_inds = draw_bs_sample(inds) return x[bs_inds], y[bs_inds]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw_bootstrap_pairs(x, y, func, size=1):\n\n # Set up array of indices to sample from: inds\n inds = np.arange(len(x))\n\n # Initialize replicates: bs_replicates\n bs_replicates = np.empty(size)\n\n # Generate replicates\n for i in range(size):\n bs_inds = np.random.choice(inds, len(inds))\n bs_x, bs_y = x[bs_inds], y[bs_inds]\n bs_replicates[i] = func(bs_x, bs_y)\n\n return bs_replicates", "def draw_bs_sample(data):\n return np.random.choice(data, size=len(data))", "def sample_and_plot(self):\n fig = plt.figure()\n ax = plt.axes(projection = '3d')\n ax.plot_surface(self.X, self.Y, self.sample(), cmap = plt.cm.jet, rstride = 2, cstride = 2, linewidth = 1)\n plt.show()", "def draw_bs_sample(data):\n return rg.choice(data, size=len(data))", "def sample_plot(self, sample_pre, sample_post, circle_size=10.0):\n colored_samples = [[] for i in range(5)]\n for inputs, label in zip(sample_pre, sample_post):\n x, y = self.attacker_position(inputs)\n colored_samples[label].append((x, y))\n for label, points in enumerate(colored_samples):\n plt.scatter([x for x, y in points], [y for x, y in points],\n color=self.color(int(label)), s=circle_size)", "def _get_bootstrap_sample(x, y, num_reps):\r\n combined = array(list(x) + list(y))\r\n total_obs = len(combined)\r\n num_x = len(x)\r\n for i in range(num_reps):\r\n # sampling with replacement\r\n indices = randint(0, total_obs, total_obs)\r\n sampled = combined.take(indices)\r\n # split into the two populations\r\n sampled_x = sampled[:num_x]\r\n sampled_y = sampled[num_x:]\r\n yield sampled_x, sampled_y", "def draw_samplers(self):\n raise NotImplementedError(\" The draw_samplers() method has not been implemented \")", "def draw_bs_pairs(x, y, func, size=1):\n\n # Set up array of indices to sample from: inds\n inds = np.arange(len(x))\n\n # Initialize replicates\n bs_slope_reps = np.empty(size)\n bs_intercept_reps = np.empty(size)\n\n # Generate replicates\n for i in range(size):\n bs_inds = np.random.choice(inds,size=len(inds))\n bs_x, bs_y = x[bs_inds],y[bs_inds]\n bs_replicates[i] = func(bs_x,bs_y)\n\n return bs_replicates", "def plot_samples(s):\r\n assert len(s[0, :]) >= 2, ('The Phase space dimensions are less than two.', ' Need at least two to plot.')\r\n fig = plt.figure(1)\r\n if len(s[0, :]) >= 3:\r\n ax = fig.add_subplot(111, projection='3d')\r\n ax.scatter(s[:, 0], s[:, 1], s[:, 2])\r\n fig = plt.figure(2)\r\n plt.scatter(s[:, 0], s[:, 1])\r\n plt.show()", "def draw_bs_pairs(x, y, func, size=1):\n\n # Set up array of indices to sample from: inds\n inds = np.arange(len(x))\n\n # Initialize replicates: bs_replicates\n bs_replicates = np.empty(size)\n\n # Generate replicates\n for i in range(size):\n bs_inds = np.random.choice(inds,size=len(inds))\n bs_x, bs_y = x[bs_inds],y[bs_inds]\n bs_replicates[i] = func(bs_x,bs_y)\n\n return bs_replicates", "def draw_bootstrap_pairs_linreg(x, y, size=1):\n\n # Set up array of indices to sample from: inds\n inds = np.arange(len(x))\n\n # Initialize replicates: bs_slope_reps, bs_intercept_reps\n bs_slope_reps = np.empty(size)\n bs_intercept_reps = np.empty(size)\n\n # Generate replicates\n for i in range(size):\n bs_inds = np.random.choice(inds, size=len(inds))\n bs_x, bs_y = x[bs_inds], y[bs_inds]\n bs_slope_reps[i], bs_intercept_reps[i] = np.polyfit(bs_x, bs_y, 1)\n\n return bs_slope_reps, bs_intercept_reps", "def draw_sample(self, samp_pts=None, means=None, covar=None):\n return self.gp_core.draw_samples(1, X_test=samp_pts, mean_vals=means,\n covar=covar).ravel()", "def bootstrap_two_sample_diff_in_proportions(p_group_1_count, p_group_1_sample_size, p_group_2_count, p_group_2_sample_size, p_size=1, p_alpha=.05):\n\n\n # -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n original_group_1_proportion = p_group_1_count / p_group_1_sample_size \n original_group_2_proportion = p_group_2_count / p_group_2_sample_size \n \n original_proportion_diff = original_group_1_proportion - original_group_2_proportion\n # -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n\n # -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ \n # Create an array of zeros and ones with the same proportions as group_1\n\n arr_1 = np.array([0] * (p_group_1_sample_size - p_group_1_count) + [1] * p_group_1_count)\n\n np.random.shuffle(arr_1)\n # -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n\n # -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ \n # Create an array of zeros and ones with the same proportions as group_2\n\n arr_2 = np.array([0] * (p_group_2_sample_size - p_group_2_count) + [1] * p_group_2_count)\n\n np.random.shuffle(arr_2)\n # -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n\n # -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n # Initialize array of replicates: \n bs_replicates = np.empty(p_size)\n\n # Generate replicates\n for i in range(p_size):\n\n # Generate bootstrap sample for arr_1 and arr_2 \n bs_sample_arr_1 = np.random.choice(arr_1, len(arr_1))\n bs_sample_arr_2 = np.random.choice(arr_2, len(arr_2))\n \n # Get the proportions on the bootstrap samples \n bs_sample_arr_1_proportion = np.sum(bs_sample_arr_1) / len(bs_sample_arr_1)\n bs_sample_arr_2_proportion = np.sum(bs_sample_arr_2) / len(bs_sample_arr_2)\n\n # Add the diff of those proportions to the bs_replicates array:\n bs_replicates[i] = bs_sample_arr_1_proportion - bs_sample_arr_2_proportion\n # -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n\n # -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n # Get the bootstrap mean and standard error\n bs_mean = np.mean(bs_replicates)\n bs_std = np.std(bs_replicates)\n # -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n\n # -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n # Get the confidence interval\n lower_critical_value, upper_critical_value = get_two_tailed_critical_values(p_alpha = p_alpha)\n\n ci_lower, ci_upper = np.percentile(bs_replicates, [lower_critical_value*100, upper_critical_value*100])\n # -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n\n # -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n Result = namedtuple(\n 'Result', [\n 'original_group_1_proportion', \n 'original_group_2_proportion', \n 'original_proportion_diff', \n 'bs_mean', \n 'bs_std', \n 'ci_lower',\n 'ci_upper',\n 'bs_replicates'\n ]\n )\n\n result = Result(\n original_group_1_proportion,\n original_group_2_proportion,\n original_proportion_diff,\n bs_mean,\n bs_std,\n ci_lower,\n ci_upper,\n bs_replicates\n )\n\n return result\n # -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", "def show_pair(data, labeled=True, ordering='channel_last'):\n xb, yb = data\n batch_size = len(xb)\n \n fig = plt.figure(figsize=(5 * 2, 5 * batch_size))\n for i in range(batch_size):\n fig.add_subplot(batch_size, 2, (2 * i) + 1) \n show_image(xb[i], ordering=ordering)\n \n fig.add_subplot(batch_size, 2, (2 * i) + 2) \n if yb[i] is None:\n y = np.zeros_like(xb[i])\n else:\n y = yb[i] \n\n if labeled:\n show_label(y, ordering=ordering)\n else:\n show_image(y, ordering=ordering)\n \n return fig", "def add_sample(self, stddev, corrcoef, *args, **kwargs):\n sc = self.ax.scatter(NP.arccos(corrcoef), stddev,\n *args, **kwargs) # (theta,radius)\n self.samplePoints.append(sc)\n return sc\n\n #l, = self.ax.plot(NP.arccos(corrcoef), stddev,\n # *args, **kwargs) # (theta,radius)\n #self.samplePoints.append(l)#\n #return l", "def add_sample(self, stddev, corrcoef, *args, **kwargs):\n sc = self.ax.scatter(NP.arccos(corrcoef), stddev,\n *args, **kwargs) # (theta,radius)\n self.samplePoints.append(sc)\n return sc\n\n #l, = self.ax.plot(NP.arccos(corrcoef), stddev,\n # *args, **kwargs) # (theta,radius)\n #self.samplePoints.append(l)#\n #return l", "def generate_samples(self, n_samples):", "def generate_samples(self, n_samples):", "def generate_random_scatter(x_range, w, b, k):\n\tx_1 = []\n\ty_1 = []\n\tx_2 = []\n\ty_2 = []\n\tfor i in range(k):\n\t\txx = random.random() * (x_range[1] - x_range[0]) + x_range[0]\n\t\tx_1.append(xx)\n\t\tamplitude = random.randint(4, 15)\n\t\tyy = w * xx + b + amplitude\n\t\ty_1.append(yy)\n\n\t\txx = random.random() * (x_range[1] - x_range[0]) + x_range[0]\n\t\tx_2.append(xx)\n\t\tamplitude = random.randint(4, 15)\n\t\tyy = w * xx + b - amplitude\n\t\ty_2.append(yy)\n\treturn x_1, y_1, x_2, y_2", "def eg_pre():\n\n print(\"\\teg3\")\n\n d = 1\n\n for _ in range(10):\n t1 = []\n t2 = []\n\n for _ in range(32):\n t1.append(utils.gaussian(10, 1))\n t2.append(utils.gaussian(d * 10, 1))\n\n print(\"\", \"\", d, d < 1.1, stats.bootstrap(\n t1, t2), stats.bootstrap(t1, t1), sep=\"\\t\")\n\n d = round(d + .05, 2)", "def drawSample(self, index, color):\n t = Trajectory.createFromTuples(self.graphs[index])\n l = t.sampleSegments(self.sample_number, include=self.include)\n for e in l:\n p = Point(*e, radius=5, conversion=False)\n p.show(self.context)", "def draw_sample(self, samp_pts=None, means=None, covar=None):\n raise NotImplementedError('Abstract Method')", "def generation(self,rounds):\n a = []\n b = []\n for i in range(rounds):\n self.fight()\n c = self.avgFitness()\n a.append(c[0])\n b.append(c[1])\n self.sort()\n self.cull()\n self.rePop()\n self.refresh()\n self.fight()\n self.sort()\n print self\n plt.scatter([x for x in range(len(a))],a,color = \"red\")\n plt.scatter([x for x in range(len(b))],b,color = \"green\")\n plt.show()", "def generation(self,rounds):\n a = []\n b = []\n for i in range(rounds):\n self.fight()\n c = self.avgFitness()\n a.append(c[0])\n b.append(c[1])\n self.sort()\n self.cull()\n self.rePop()\n self.refresh()\n self.fight()\n self.sort()\n print self\n plt.scatter([x for x in range(len(a))],a,color = \"red\")\n plt.scatter([x for x in range(len(b))],b,color = \"green\")\n plt.show()", "def bootstrap_sample_generator_3D(samples: Union[NumpyFloatArray, NumpyIntArray]):\n n_samples = samples.shape[1]\n array_shape = samples.shape[1:]\n num_cols = samples.shape[2]\n cols = np.arange(num_cols)\n\n while True:\n _indices = np.random.randint(0, high=n_samples, size=array_shape)\n\n yield samples[:, _indices, cols]", "def bootstrap_plot(\n series: Series,\n fig: Figure | None = None,\n size: int = 50,\n samples: int = 500,\n **kwds,\n) -> Figure:\n plot_backend = _get_plot_backend(\"matplotlib\")\n return plot_backend.bootstrap_plot(\n series=series, fig=fig, size=size, samples=samples, **kwds\n )", "def bootstrap_sample_generator_2D(samples: Union[NumpyFloatArray, NumpyIntArray]):\n n_samples = samples.shape[0]\n\n num_cols = samples.shape[1]\n cols = np.arange(num_cols)\n\n while True:\n # generate indices to pick N values from f_A, f_B and f_C_i\n _indices = np.random.randint(0, high=n_samples, size=samples.shape)\n\n yield samples[_indices, cols]", "def pickup_samples(pts1, pts2):\n\n #\n # Your code here\n #\n\n x = min(len(pts1), len(pts2))\n return np.random.choice(range(x), min_num_pairs(), replace=False)", "def draw_samples(nsubj, ngroups, split_method='default'):\n if split_method == 'default':\n if nsubj > 10 * ngroups:\n samples = split_group(nsubj, ngroups)\n else:\n samples = bootstrap_group(nsubj, ngroups)\n elif split_method == 'bootstrap':\n samples = bootstrap_group(nsubj, ngroups)\n elif split_method == '':\n samples = split_group(nsubj, ngroups)\n else:\n raise ValueError('unknown splitting method')\n\n return samples", "def viz_samples(data, trace, num_sweeps, K, viz_interval=3, figure_size=3, title_fontsize=20, marker_size=1.0, opacity=0.3, bound=20, colors=['#AA3377','#0077BB', '#EE7733', '#009988', '#BBBBBB', '#EE3377', '#DDCC77'], save_name=None):\n E_tau, E_mu, E_z = trace['E_tau'].cpu(), trace['E_mu'].cpu(), trace['E_z'].cpu()\n num_rows = len(data)\n num_cols = 2 + int((num_sweeps-1) / viz_interval)\n gs = gridspec.GridSpec(num_rows, num_cols)\n gs.update(left=0.0 , bottom=0.0, right=1.0, top=1.0, wspace=0, hspace=0)\n fig = plt.figure(figsize=(figure_size * num_cols, figure_size * num_rows))\n for row_ind in range(num_rows):\n ax = fig.add_subplot(gs[row_ind, 0])\n viz_gmm(ax, data[row_ind], K, marker_size, opacity, bound, colors, latents=None) ## visualize raw dataset in the 1st column\n if row_ind == 0:\n ax.set_title('Data', fontsize=title_fontsize)\n# col_ind = 1\n for col_ind in range(num_cols-1):\n sweep = col_ind * viz_interval\n ax = fig.add_subplot(gs[row_ind, col_ind+1])\n viz_gmm(ax, data[row_ind], K, marker_size, opacity, bound, colors, latents=(E_tau[sweep, row_ind], E_mu[sweep, row_ind], E_z[sweep, row_ind]))\n if row_ind == 0:\n if sweep == 0:\n ax.set_title('RWS', fontsize=title_fontsize)\n else:\n ax.set_title('sweep %d' % sweep, fontsize=title_fontsize)\n if save_name is not None:\n plt.savefig(save_name + '.svg', dpi=300)" ]
[ "0.6408204", "0.619511", "0.6144731", "0.6112099", "0.6074894", "0.6004757", "0.5883876", "0.57678866", "0.5728837", "0.5718084", "0.57075393", "0.5656741", "0.56540686", "0.55870754", "0.5527278", "0.5527278", "0.54886264", "0.54886264", "0.5477913", "0.5466753", "0.5445432", "0.5442295", "0.5418179", "0.5418179", "0.5401388", "0.539368", "0.5387475", "0.5362962", "0.5335158", "0.53214157" ]
0.625779
1
Draw boostrap replicates of the mean from 1D data set.
def draw_bs_reps_mean(data, size=1): out = np.empty(size) for i in range(size): out[i] = np.mean(draw_bs_sample(data)) return out
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw_bs_reps(data, func, reps=1):\n bs_replicates = np.empty(reps)\n for i in range(reps):\n bs = np.random.choice(data, replace=True, size=len(data))\n bs_replicates[i] = np.mean(bs)\n return func(bs_replicates)", "def expanding_mean_1d_nb(a, minp=1):\n return rolling_mean_1d_nb(a, a.shape[0], minp=minp)", "def bootstrap_replicate_1d(data, func):\r\n bs_sample = np.random.choice(data, len(data))\r\n return func(bs_sample)", "def bootstrap_mean(x, n=100):\n out = []\n\n for i in range(n):\n idx = pd.Series(np.arange(len(x))).sample(frac=1.0, replace=True).values\n out.append(x[idx].mean(0))\n outm = np.stack(out)\n return outm.mean(0), outm.std(0)", "def get_mean(self):\r\n for i in range(1,len(self.data[0])):\r\n self.prom.append(np.mean(self.data[:,i]))", "def reconstructions_mean(self):\n self.assert_sampled()\n return [[j.mean().numpy() for j in i] for i in self._reconstructions]", "def eg_pre():\n\n print(\"\\teg3\")\n\n d = 1\n\n for _ in range(10):\n t1 = []\n t2 = []\n\n for _ in range(32):\n t1.append(utils.gaussian(10, 1))\n t2.append(utils.gaussian(d * 10, 1))\n\n print(\"\", \"\", d, d < 1.1, stats.bootstrap(\n t1, t2), stats.bootstrap(t1, t1), sep=\"\\t\")\n\n d = round(d + .05, 2)", "def _overlay_bubble_means(self, img, means):\n for (i, j) in product(*map(range, self.size)):\n i0, i1, j0, j1 = self.coords[i, j, :]\n img[i0:i1, j0:j1] = means[i, j]\n\n return img", "def bootstrap_replicate_1d(data, func):\n bs_sample = np.random.choice(data, len(data))\n return func(bs_sample)", "def bootstrap_replicate_1d(data, func):\n bs_sample = np.random.choice(data, len(data))\n\n return func(bs_sample)", "def bootstrap(data, iterations=10000):\n\n boot_mean = []\n\n for n in range(0, iterations):\n\n boot = resample(data, replace=True, n_samples=None,\n random_state=None)\n\n boot_mean.append(np.mean(boot))\n\n final_mean = np.mean(boot_mean)\n\n final_std = np.std(boot_mean, dtype=np.float64)\n\n return final_mean, final_std", "def sample_list2(data_list, rows=15, start_with=0, show_every=2, scale=4, fig_name=None, start_inx=0, n_class=5):\n input_cols = len(data_list[0]['input'])\n if input_cols == 1:\n input_names = ['input']\n elif input_cols == 2:\n input_names = ['input(cal)', 'input(non-cal)']\n cols = 4 + input_cols - 1\n # n_class = data_list[0]['pred'].shape[0]\n n_batch = len(data_list)\n _, ax = plt.subplots(rows, cols, figsize=[scale * cols, scale * rows])\n\n for ind in range(n_batch):\n # read data and calculate average precision\n input = data_list[ind]['input']\n # output = data_list[ind]['output']\n label = data_list[ind]['GT']\n pred = data_list[ind]['pred']\n\n # calculate F score and average precision\n # output = np.transpose(output, (1, 2, 0))\n # output = np.reshape(output, (-1, n_class))\n label_binary = label_binarize(label.flatten(), classes=range(n_class))\n pred_binary = label_binarize(pred.flatten(), classes=range(n_class))\n\n f_score = np.zeros(n_class, dtype=np.float32)\n slice_effect_class = 0\n for i in range(n_class):\n if np.sum(label_binary[:,i]) == 0:\n f_score[i] = 0.0\n else:\n slice_effect_class += 1\n f_score[i] = f1_score(label_binary[:,i], pred_binary[:,i])\n\n ave_f_score = np.sum(f_score)/slice_effect_class\n\n if (ind - start_with) % show_every == 0:\n i = (ind - start_with) // show_every\n if i < rows:\n for col in range(input_cols):\n ax[i, col].imshow(input[col], cmap='gray')\n ax[i, col].set_title(\"Slice {} : {}\".format(ind+start_inx, input_names[col]))\n ax[i, col].axis('off')\n\n ax[i, input_cols].imshow(mask2rgb(label))\n ax[i, input_cols].set_title('Slice %d : %s' % (ind+start_inx, 'ground truth'))\n ax[i, input_cols].axis('off')\n\n ax[i, input_cols+1].imshow(mask2rgb(pred))\n ax[i, input_cols+1].set_title('Slice %d : %s' % (ind+start_inx, 'prediction'))\n ax[i, input_cols+1].axis('off')\n\n ax[i, input_cols+2].scatter(range(0,n_class), f_score)\n ax[i, input_cols+2].set_title('Slice %d : Ave F-score = %0.2f' % (ind+start_inx, ave_f_score))\n ax[i, input_cols+2].set_ylabel('F score')\n ax[i, input_cols+2].set_ylim([-0.1, 1.1])\n\n # plt.show()\n if fig_name:\n plt.savefig(fig_name + '.png')\n plt.close()", "def bootstrap_mean(x, B=10000, alpha=0.05, plot=False):\n\n # Deterministic things\n n = len(x) # sample size\n orig = x.mean() # sample mean\n se_mean = x.std()/np.sqrt(n) # standard error of the mean\n qt = stats.t.ppf(q=1 - alpha/2, df=n - 1) # Student quantile\n\n # Generate boostrap distribution of sample mean\n xboot = boot_matrix(x, B=B)\n sampling_distribution = xboot.mean(axis=1)\n\n # Standard error and sample quantiles\n se_mean_boot = sampling_distribution.std()\n quantile_boot = np.percentile(sampling_distribution, q=(100*alpha/2, 100*(1-alpha/2)))\n\n # # RESULTS\n # print(\"Estimated mean:\", orig)\n # print(\"Classic standard error:\", se_mean)\n # print(\"Classic student c.i.:\", orig + np.array([-qt, qt])*se_mean)\n # print(\"\\nBootstrap results:\")\n # print(\"Standard error:\", se_mean_boot)\n # print(\"t-type c.i.:\", orig + np.array([-qt, qt])*se_mean_boot)\n # print(\"Percentile c.i.:\", quantile_boot)\n # print(\"Basic c.i.:\", 2*orig - quantile_boot[::-1])\n\n if plot:\n plt.hist(sampling_distribution, bins=\"fd\")\n # return sampling_distribution\n return np.round(orig, decimals=2), np.round(quantile_boot, decimals=2)", "def _average_duplicates(outputs, target, batch_first=True):\r\n batch_size = target.size(0)\r\n reduce_dim = 1 if batch_first else 0\r\n if batch_first:\r\n outputs = outputs.view(batch_size, -1, *outputs.shape[1:])\r\n else:\r\n outputs = outputs.view(-1, batch_size, *outputs.shape[1:])\r\n outputs = outputs.mean(dim=reduce_dim)\r\n return outputs", "def make_flat_avg(images, out):\n image = Image(avg_images(images, out))\n image.normalise()\n return out", "def zstack_normalize_mean(instack):\n stack = np.copy(instack) \n stackmean = stack.mean()\n for x in range(0,stack.shape[0]):\n immean = stack[x].mean()\n stack[x] = stack[x] / immean * stackmean\n return(stack)", "def representations_mean(self):\n self.assert_sampled()\n return [z.mean().numpy() for z in self.representations]", "def dataset_augmentation(data_start, bootstrapping = 1, epurate = 1, shuffle = True):\n data = data_start\n for ii in range(bootstrapping):\n data = data.append(data_start.apply(bootstrap_sample, axis=1), ignore_index=True)\n\n#Bugged version that weirdly works well....\n# for ii in range(bootstrapping):\n # data = data.append(bootstrap_sample(data_start), ignore_index=True)\n\n for ii in range(epurate):\n data = data.append(data_start.apply(epurate_sample, axis=1), ignore_index=True)\n\n # Shuffling (Important)\n if shuffle == True:\n data = data.sample(frac=1)\n return data", "def meancol(source):\n\tonepix = source.copy()\n\tonepix.thumbnail((1,1),Image.ANTIALIAS)\n\treturn onepix.getpixel((0,0))", "def expanding_mean_nb(a, minp=1):\n return rolling_mean_nb(a, a.shape[0], minp=minp)", "def reconstruct(self,data):\n k,d = self.w.shape\n return dot(data,self.w)+self.mean.reshape(1,d)", "def rolling_mean_nb(a, window, minp=None):\n out = np.empty_like(a, dtype=np.float_)\n for col in range(a.shape[1]):\n out[:, col] = rolling_mean_1d_nb(a[:, col], window, minp=minp)\n return out", "def bagged_indicator(x_value, sample, b_iterations=50):\n\n predictions = np.ones(b_iterations) * np.nan\n for i in range(b_iterations):\n # Draw a new bootstrap sample.\n bootstrap_sample = (\n np.random.choice(sample, size=(sample.size,), replace=True)\n )\n # Calculate the bootstrap prediction.\n y_bootstrap = bootstrap_sample.mean()\n predictions[i] = indicator(x_value, y_bootstrap)\n return predictions.mean()", "def sample_wnet(data_list, rows=15, start_with=0, show_every=2, scale=4, fig_name=None, start_inx=0,\n n_class=5, width=1):\n\n n_probmaps = data_list[0]['bound'].shape[0] # number of bounds\n cols = 5 + n_probmaps - 1\n n_batch = len(data_list)\n _, ax = plt.subplots(rows, cols, figsize=[scale * cols, scale * rows])\n\n for ind in range(n_batch):\n input = data_list[ind]['input']\n # print(\"input shape: {}\".format(input.shape))\n label = data_list[ind]['GT']\n pred = data_list[ind]['pred']\n bound_probmap = data_list[ind]['bound'] # predicted bound probmap\n\n # calculate average F1 score\n label_binary = label_binarize(label.flatten(), classes=range(n_class))\n pred_binary = label_binarize(pred.flatten(), classes=range(n_class))\n\n f_score = np.zeros(n_class, dtype=np.float32)\n slice_effect_class = 0\n for i in range(n_class):\n if np.sum(label_binary[:,i]) == 0:\n f_score[i] = 0.0\n else:\n slice_effect_class += 1\n f_score[i] = f1_score(label_binary[:,i], pred_binary[:,i])\n\n ave_f_score = np.sum(f_score)/slice_effect_class\n\n # calculate average HFD\n label_bound = mask2innerouterbound(label, width=width)\n pred_bound = mask2innerouterbound(pred, width=width)\n hdf = slicewise_hd95(pred_bound, label_bound, n_class)\n\n if (ind - start_with) % show_every == 0:\n i = (ind - start_with) // show_every\n if i < rows:\n ax[i, 0].imshow(input, cmap='gray')\n ax[i, 0].set_title(\"Slice {} : {}\".format(ind+start_inx, 'input'))\n ax[i, 0].axis('off')\n\n ax[i, 1].imshow(mask2rgb(label))\n ax[i, 1].set_title('Slice %d : %s' % (ind+start_inx, 'ground truth'))\n ax[i, 1].axis('off')\n\n ax[i, 2].imshow(mask2rgb(pred))\n ax[i, 2].set_title('Slice %d : %s' % (ind+start_inx, 'prediction'))\n ax[i, 2].axis('off')\n\n # plot overlapping between pred_bound and label_bound\n overlap = pred_bound.copy()\n overlap[label_bound != 0] = 4\n ax[i, 3].imshow(mask2rgb(overlap))\n ax[i, 3].set_title(\"Slice {:d} : bound hdf={:.4f}\".format(ind + start_inx, hdf))\n ax[i, 3].axis('off')\n\n # plot prob maps for intermediate bounds\n output_title = ['prob map (inner bound)', 'prob map (outer bound)'] if n_probmaps >= 3 else ['prob map']\n for c_inx in range(1, n_probmaps):\n ax[i, 3 + c_inx].imshow(bound_probmap[c_inx], cmap='seismic')\n ax[i, 3 + c_inx].set_title(\"Slice {:d} : {}\".format(ind + start_inx, output_title[c_inx - 1]))\n ax[i, 3 + c_inx].axis('off')\n\n ax[i, 3 + n_probmaps].scatter(range(0, n_class), f_score)\n ax[i, 3 + n_probmaps].set_title('Slice %d : Ave F-score = %0.2f' % (ind+start_inx, ave_f_score))\n ax[i, 3 + n_probmaps].set_ylabel('F score')\n ax[i, 3 + n_probmaps].set_ylim([-0.1, 1.1])\n\n if fig_name:\n plt.savefig(fig_name + '.pdf')\n plt.close()", "def draw_normal(self):\n means, scale = self.get_means_and_scales()\n return np.random.normal(means,scale,size=[self.sims,means.shape[0]]).T", "def get_mean_accuracy(all_means, nbins=10):\n ## Add a columns of bin assignments\n # bins = np.linspace(0, all_means['posterior'].max(), nbins)\n bins = np.linspace(0, 1, nbins)\n all_means['bin'] = np.digitize(all_means['posterior'], bins)\n\n ## Add upper bound to right-most bin\n all_means.replace(to_replace={'bin':{nbins: nbins-1}}, inplace=True)\n\n ## Bin ancestors by mean bootstrapped probability, adding columns for\n ## whether they were the true generating ancestor, and the number of\n ## ancestors in each bin\n bin_count = lambda x: len(x)\n binned = all_means[['generator', 'bin']].pivot_table(index='bin',\n aggfunc=[np.mean, bin_count], fill_value=0)\n binned.columns = [['observed_prob', 'bin_count']]\n binned['n_successes'] = binned['observed_prob'].values * \\\n binned['bin_count'].values\n\n ## Estimate means and confidence intervals as sampling from a binomial\n ## distribution, with a uniform prior on success rates - Done using\n ## a beta distribution\n binned['alpha'] = binned['n_successes'] + 1\n binned['beta'] = binned['bin_count'].values - binned['n_successes'].values + 1\n beta_mean = lambda row: beta.mean(float(row['alpha']), float(row['beta']))\n binned['posterior_mean'] = binned.apply(beta_mean, axis=1)\n\n ## Add confidence intercals\n beta_025CI = lambda row: beta.ppf(0.025, float(row['alpha']), float(row['beta']))\n beta_975CI = lambda row: beta.ppf(0.975, float(row['alpha']), float(row['beta']))\n binned['CI2.5'] = binned.apply(beta_025CI, axis=1)\n binned['CI97.5'] = binned.apply(beta_975CI, axis=1)\n\n ## Convert to values relative to mean, to fit plotting convention\n binned['CI2.5'] = binned['posterior_mean'].values - binned['CI2.5'].values\n binned['CI97.5'] = binned['CI97.5'].values - binned['posterior_mean'].values\n\n ## Add column with bin centre for plotting\n binned['bin_centre'] = all_means[['posterior', 'bin']].groupby('bin').mean()\n\n return binned", "def normalize_data(X_genesets):\n normalized_datasets = []\n for Xg in X_genesets:\n averages = np.average(Xg, axis=0)\n Xg_centered = Xg - np.repeat(averages, Xg.shape[0], axis=0)\n std_devs = np.sqrt(np.var(Xg_centered, axis=0))\n Xg_normalized = np.divide(Xg, np.repeat(std_devs, Xg.shape[0], axis=0))\n normalized_datasets.append(Xg_normalized)\n return normalized_datasets", "def AvgGrids( A ):\n Sum = numpy.zeros( A[0].data.shape ) * 1.\n Count = numpy.zeros( A[0].data.shape ) * 1.\n for a in A:\n Count = Count+numpy.where(a.data != a.nodata, 1., 0.)\n Sum = Sum+numpy.where(a.data != a.nodata, a.data, 0.)\n Mean = numpy.where( Count>0, Sum/Count, A[0].nodata )\n Anom = []\n for a in A:\n b = a.copy()\n b.data = numpy.where(b.data != b.nodata, b.data-Mean, b.nodata)\n b.name = 'anom-'+b.name\n Anom.append( b )\n New = grid(Mean, A[0].xllcorner, A[0].yllcorner, A[0].cellsize, 'mean.grd', A[0].nodata)\n return New, Anom", "def standard_bootstrap(dataset):\n randseed=np.random.randint(0,10000)\n np.random.seed(randseed)\n \n n = dataset.shape[0]\n b = np.random.randint(0, high=n-1, size=n)\n return dataset[b]", "def plot_mean_of_each_neuron(self, title:str, show:bool=True, dest_path:str=None, std=True):\n d = Data(self.populations, self.path)\n d.split_trial_wise()\n X, x, Y, y = d.get_data()\n X = np.concatenate((X, x))\n Y = np.concatenate((Y, y))\n\n d = {}\n for i in range(len(Y)):\n if Y[i] in d:\n d[Y[i]].append(X[i])\n else:\n d[Y[i]] = [X[i]]\n\n stds = {}\n for key in d.keys():\n d[key] = np.asarray(d[key], dtype=float)\n stds[key] = np.std(d[key], axis=0)[::-1]\n d[key] = np.mean(d[key], axis=0)[::-1]\n \n\n c = {\"1->1\": \"magenta\", \"0->0\": \"cyan\", \"1->0\":\"red\", \"0->1\": \"green\"}\n fig, ax = plt.subplots()\n for key in d.keys():\n ax.plot(range(1, len(X[0]) + 1), d[key], color=c[key], label=key)\n if std:\n ax.fill_between(range(1, len(X[0]) + 1), d[key] + stds[key], alpha=0.1, color=c[key])\n \n plt.rcParams.update({'font.size': 13})\n plt.yticks(fontsize=13)\n plt.xticks(fontsize=13)\n plt.xlabel('{} most active Neurons'.format(len(X[0])), fontsize=15)\n plt.ylabel(\"Neuron-wise mean per class\", fontsize=15)\n plt.title(title)\n plt.legend(fontsize=13)\n\n if show:\n plt.show()\n\n if dest_path !=None:\n plt.savefig(dest_path + '\\\\{}.png'.format(title))\n\n plt.clf()\n plt.cla()\n plt.close()" ]
[ "0.6356295", "0.5573147", "0.5505484", "0.54908717", "0.5478007", "0.54538715", "0.5435412", "0.5399054", "0.5367195", "0.5365157", "0.5319779", "0.5285857", "0.5244797", "0.5215043", "0.5209573", "0.51538163", "0.5143565", "0.51350296", "0.51278985", "0.5111824", "0.5110092", "0.5097943", "0.50831133", "0.5076851", "0.50579107", "0.50264156", "0.50219244", "0.50097585", "0.5007567", "0.49927375" ]
0.69946057
0
Function to split excel files into indivdual csv's and then delete.
def split_excel_files(self): for x in self.files: if x[-4:] not in [".xls", "xlsx"]: continue else: files = pd.read_excel(x, sheet_name=None) for k, v in files.items(): #get name with the extension stripped name = k.split(".")[0] out_path = x.split(".")[0] try: os.mkdir(out_path) except: print("directory exists") v.to_csv(f"{out_path}/{name}.csv", index=False) os.remove(x) self.files = [os.path.join(dp, f) for dp, dn, fn in os.walk(self.path) for f in fn] self.csv_files = [x for x in self.files if x[-3:] == "csv"]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_csvs():\n for oneFile in glob.glob('excel/*.xlsx'):\n pd = pandas.read_excel(oneFile)\n outName = os.path.splitext(os.path.basename(oneFile))[0]\n pd.to_csv('lists/'+outName+'.csv',index=False)", "def csv_from_excel(path=os.getcwd()):\n path = path + '/*.xls*'\n files = glob.glob(path)\n\n for i in files:\n file = os.path.basename(i)\n filename = os.path.splitext(file)[0]\n xls_file = pd.ExcelFile(i, index_col=None, dtype=object)\n if len(xls_file.sheet_names) > 1:\n try:\n os.mkdir(filename)\n except OSError:\n print('Could not create directory to output to.')\n for x in xls_file.sheet_names:\n file = pd.read_excel(xls_file, x, index_col=None, dtype=object)\n file.to_csv(filename + '/' + x + '.csv', quoting=1, index=False)\n\n else:\n file = xls_file.parse()\n file.to_csv(filename + '.csv', quoting=1, index=False)", "def split_data_into_exchanges(source_path, destination_path):\n for subdir, dirs, files in os.walk(source_path):\n for file in files:\n source_full_file = os.path.join(subdir, file)\n print(source_full_file)\n df = pd.read_csv(source_full_file)\n for group_name, df in df.groupby(['Ticker', 'Exchange']):\n file_name = destination_path / str(df['Date'].iloc[0]) / convertTuple(group_name)\n utils.make_dir(file_name)\n with open(file_name, \"w+\") as f:\n df.to_csv(f, index=False)", "def excelToCsv(folder):\n for excelFile in os.listdir(folder):\n # Skip non-xlsx files, load the workbook object.\n if not excelFile.endswith('xlsx'):\n continue\n wb = openpyxl.load_workbook(excelFile)\n\n for sheetName in wb.get_sheet_names():\n # Loop through every sheet in the workbook.\n sheet = wb.get_sheet_by_name(sheetName)\n\n # Create the CSV filename from the Excel filename and sheet title.\n csvFilename = excelFile.split('.')[0]+'_'+sheet.title+'.csv'\n csvFileObj = open(csvFilename, 'w', newline='')\n\n # Create the csv.writer object for this CSV file.\n csvWriter = csv.writer(csvFileObj)\n\n # Loop through every row in the sheet.\n for rowObj in sheet.rows:\n rowData = [] # append each cell to this list\n # Loop through each cell in the row.\n for cellObj in rowObj:\n # Append each cell's data to rowData.\n rowData.append(cellObj.value)\n # Write the rowData list to the CSV file.\n csvWriter.writerow(rowData)\n\n csvFileObj.close()", "def clean_chunk_files(dirpath):\n workdir = os.getcwd()\n os.chdir(dirpath)\n for filename in glob.glob(\"[0-9]*_[0-9]*_[0-9]*.hdf5\"):\n os.remove(filename)\n os.chdir(workdir)", "def delete_xls(inp_dict):\n if inp_dict[\"del\"]:\n for del_f in inp_dict[\"del\"]:\n os.remove(os.path.abspath(del_f))", "def processData(stud,new_path,processed_path,errors_path,server):\n files = os.listdir(new_path)\n #print(files)\n frames=[]\n\n try:\n processed_files = 0\n for file in files:\n processed_files += 1\n\n if str(file).endswith(\".csv\") and str(stud) in str(file):\n dataframe = pd.read_csv(new_path + '\\\\' + file) #changed this from read_excel to read_csv\n print(file.lower())\n frames.append(dataframe)\n #cleanFrame = CleanDataFrame(dataframe)\n # ex.ExportToSql(cleanFrame, server, 'Welding')\n #shutil.move(new_path + '\\\\' + file, processed_path + '\\\\' + file)\n # os.remove(new_path + '\\\\' + file)\n\n print(\"Processed files :\" + str(processed_files) + \", Estimated row count :\" + str(\n processed_files * 5000))\n\n finalFrame = pd.concat(frames, ignore_index=True)\n return CleanDataFrame(finalFrame)\n except:\n #os.remove(new_path + '\\\\' + file) commented this out to prevent deletion of unprocessed files\n shutil.move(new_path + '\\\\' + file, errors_path + '\\\\' + file)\n print(file+ \" removed...........\")\n #Restart the process\n #processData(new_files_path)", "def save_clean_data(self):\n for data in self.clean_data:\n file_name = \"../data/clean_data/\" + data.file.name + data.file.extension\n data.save_csv(file_name)", "def pre_process_multispace(filepath, delimiter=\" \"):\n newpath = filepath+\".rev.csv\"\n with open(filepath, \"r\") as src_csv_file:\n with open(newpath, \"w\") as dst_csv_file:\n for src_line in src_csv_file:\n dst_csv_file.write(delimiter.join(src_line.split())+\"\\n\")", "def del_results_csv(request):\n if request.method == \"POST\":\n try:\n sources = set()\n dataset = request.FILES['dataset']\n handle_uploaded_file(dataset, 'temp/del_rels_csv.csv')\n df = pd.read_csv('temp/del_rels_csv.csv')\n for i, row in df.iterrows():\n rel_id = row['rel_id']\n objs = ExtractedRelation.objects.filter(rel_id=rel_id)\n for o in objs:\n sources.add(o.source)\n objs.delete()\n for s in sources:\n if len(ExtractedRelation.objects.filter(source=s)) == 0:\n Source.objects.filter(source_id=s.source_id).delete()\n except Exception as e:\n print(str(e))\n tb = traceback.format_exc()\n print(tb)\n \n return HttpResponse(\n json.dumps({\"status\": \"error\"}),\n content_type=\"application/json\"\n )\n \n return HttpResponse(\n json.dumps({\"status\": \"success\"}),\n content_type=\"application/json\"\n )", "def managecsv(data):\n\n checkfolderdata()\n if not datafileexist(data[7]):\n createcsv(data[7])\n managecsv(data)\n else:\n addcsv(data, data[7])", "def remove_idats_not_in_samplesheet(samplesheet_filepath, sample_path):\n samples = pd.read_csv(samplesheet_filepath)\n all_idats = list(Path(sample_path).rglob('*.idat')) + list(Path(sample_path).rglob('*.idat.gz'))\n all_idats_names = [i.name for i in all_idats]\n # these are VALID idats to retain\n save_list = []\n try:\n idat_fileparts = [f\"{row['GSM_ID']}_{row['Sentrix_ID']}_{row['Sentrix_Position']}\" for (idx,row) in samples.iterrows()]\n except KeyError as e:\n LOGGER.error(f\"Samplesheet is missing {e}.\")\n return \n for file in idat_fileparts:\n files = [f\"{file}_Grn.idat\", f\"{file}_Grn.idat.gz\", f\"{file}_Red.idat\", f\"{file}_Red.idat.gz\"]\n for idat in files:\n if idat in all_idats_names:\n save_list.append(idat)\n #files = [f\"{file}_Grn.idat\", f\"{file}_Grn.idat.gz\", f\"{file}_Red.idat\", f\"{file}_Red.idat.gz\"]\n #if Path(idat).exists():\n remove_list = [idat for idat in all_idats if idat.name not in save_list]\n #LOGGER.info(f\"removing {len(remove_list)} idats out of a total of {len(all_idats)} found,\")\n worked = 'OK' if len(samples.index) == len(save_list)/2 else 'ERROR'\n if worked != 'OK':\n return\n removed = 0\n for idat in all_idats:\n if idat.name in save_list:\n continue\n if Path(idat).exists():\n Path(idat).unlink()\n #print('-',idat)\n removed += 1\n #LOGGER.info(f'removed {removed} idat files not in samplesheet. ready to process remaining ones.')\n LOGGER.info(f\"retaining {len(save_list)} files for {len(samples.index)} samples ({worked}). (Dropped {len(remove_list)} idats)\")", "def remove_files(file, filename_excel):\n os.remove(file)\n print(\"Remove the data input sent by the user\")\n os.remove(velocity_input + \"/\" + filename_excel + \"_v_input.txt\")\n print(\"Remove the input file for velocity calculations\")\n os.remove(velocity_output + \"/\" + filename_excel + \"_vx.output\")\n print(\"Remove the vx output file\")\n os.remove(velocity_output + \"/\" + filename_excel + \"_vy.output\")\n print(\"Remove the vy output file\")\n os.remove(velocity_output + \"/\" + filename_excel + \"_vz.output\")\n print(\"Remove the vy output file\")\n os.remove(data_output + \"/\" + filename_excel + \"_results.xlsx\")\n print(\"Remove the data output sent to the user's email.\")", "def split_per(folderin, folderout, split_col='ECO_ID', colNms=['i_h100','i_cd',\n 'doy','i_wflen','i_acqdate','b1','vcf','ECO_NAME','ECO_ID','BIOME','geometry']):\n\n split_files = glob.glob(folderin + '*.shp')\n\n for filename in split_files:\n print(filename)\n basename = os.path.splitext(os.path.basename(filename))[0]\n dfa = gpd.read_file(filename)\n df = dfa.astype({split_col: 'int32'}) \n ecoNames = list(np.unique(df[split_col]))#get list of unique ecoregions \n \n for eco in ecoNames:\n #create new df with just columns I want\n df2 = gpd.GeoDataFrame(df, columns=colNms)\n ID = str(eco)\n df_eco = df.loc[df2[split_col]==eco, colNms]\n df_eco.to_file(folderout + '/{}_eco_{}.shp'.format(basename, ID))", "def fix_turnstile_data(filenames):\n for name in filenames:\n f_in = open(name, 'r')\n f_out = open('updated_' + name, 'w')\n reader_in = csv.reader(f_in, delimiter=',')\n writer_out = csv.writer(f_out, delimiter=',')\n\n for line in reader_in:\n part_1 = line[0]\n part_2 = line[1]\n part_3 = line[2]\n for i in range(3, len(line), 5):\n writer_out.writerow(\n [part_1, part_2, part_3, line[i], line[i + 1], line[i + 2], line[i + 3], line[i + 4]])\n f_in.close()\n f_out.close()", "def path_delete_emp():\n\twhile True:\n\t\ttry:\n\t\t\tfile_path = input(\"Enter the path of your file or enter 'quit' to go back to menu.\\n File Path: \")\n\t\texcept FileNotFoundError:\n\t\t\tprint(\"File Not Found Error.\")\n\t\t\tcontinue\n\t\tif file_path == \"quit\":\n\t\t\treturn\n\t\telif not os.path.exists(file_path) and not os.path.isfile(file_path):\n\t\t\tprint(\"Invalid Path.\")\n\t\t\tcontinue\n\t\telif file_path.lower().endswith(('.csv')) == False:\n\t\t\tprint(\"Please Choose a CSV File!\")\n\t\t\tcontinue\n\t\telse:\n\t\t\tprint(\"File Found!\")\n\t\t\tbreak\t\t\n\tnew_lines = list()\n\tlines = list()\n\twith open(file_path, 'r') as f:\n\t\treader = csv.reader(f)\n\t\tfor row in reader:\n\t\t\tnew_lines.append(row)\n\twith open(\"employees.csv\", 'r') as readFile:\n\t\treader = csv.reader(readFile)\n\t\tnext(reader, None)\n\t\tfor row in reader:\n\t\t\tlines.append(row)\n\tnew_list = [x for x in lines if x not in new_lines]\n\twith open('employees.csv', 'w', newline='') as writeFile:\n\t\twriter = csv.writer(writeFile)\n\t\twriter.writerows(new_list)\n\t\tprint(\"Employees Deleted.\")\n\t\treturn", "def cleanUp(self):\n print(\" cleaning up\",self.folderSave)\n for fname in glob.glob(self.folderSave+\"/*.*\"):\n if not fname.endswith(\".npy\") and not fname.endswith(\".csv\"):\n print(\" deleting\",os.path.basename(fname))\n os.remove(fname)", "def csvs_scattered_to_grouped(path_dir, inlist, outlist, gcols,\n sort=1, scols=None, catalog=\"\", supersede=False):\n\n filelist=[os.path.join(path_dir,i) for i in inlist]\n n_split=len(outlist)\n\n pdfs=pd.read_csv(filelist[0],usecols=gcols)\n pdfs.drop_duplicates(inplace=True)\n\n print(\"csvs_scattered_to_grouped: Collecting items for group.\\n\")\n for i in range(1,len(filelist)):\n pdfs=pdfs.append(pd.read_csv(filelist[i],usecols=gcols),ignore_index=True)\n pdfs.drop_duplicates(inplace=True)\n\n if sort==1:\n pdfs.sort_values(gcols,inplace=True, ascending=True)\n elif sort==-1:\n pdfs.sort_values(gcols,inplace=True, ascending=False)\n\n aa_ed=np.array_split(pdfs, n_split)\n\n if supersede:\n for i in outlist:\n if os.path.isfile(os.path.join(path_dir,i)):\n os.remove(os.path.join(path_dir,i))\n if os.path.isfile(os.path.join(path_dir,str(catalog))):\n os.remove(os.path.join(path_dir,str(catalog)))\n\n print(\"csvs_scattered_to_grouped: Start processing files:\\n\")\n for i in range(0,len(filelist)):\n fi=pd.read_csv(filelist[i],usecols=scols)\n for j,ja in enumerate(aa_ed):\n wrtj=pd.merge(ja, fi, how='inner', on=gcols)\n append_to_csv(wrtj, os.path.join(path_dir,outlist[j]))\n print('csvs_scattered_to_grouped: '+str(i)+' file(s) finished.')\n\n if catalog:\n for i, d in enumerate(aa_ed):\n d['_@_FILE_']=outlist[i]\n append_to_csv(d, os.path.join(path_dir,str(catalog)))\n print('csvs_scattered_to_grouped: Catalog file created.')", "def excel2csv(excel_path: str) -> str:\n csv_path = excel_path.replace('xlsx', 'csv')\n\n # Create pandas Data Frame from an excel file and save it into csv.\n df_excel = pd.read_excel(excel_path)\n df_excel.to_csv(csv_path, index=False)\n\n os.remove(excel_path)\n\n excel_filename = excel_path.split('/')[-1]\n csv_filename = csv_path.split('/')[-1]\n\n print(f\"Extracting csv from xlsx... {excel_filename} Saving... {csv_filename}'\")\n print(f\"Deleting... {excel_filename}\")\n\n return csv_path", "def splitter(file_in,\n aantal_banen,\n afwijkings_waarde,\n totaal,\n aantal_rollen,\n ongeveer_per_baan,\n outgoing_posix_pad):\n # afwijkings_waarde = 0 deze komt nu uit def\n\n file_in = pd.read_csv(file_in, \";\")\n a = 0\n\n begin_eind_lijst = []\n be_LIJST = []\n\n for num in range(aantal_rollen):\n b = file_in.aantal.iloc[a:num+1].sum()\n # print(a, num)\n\n if num == (len(file_in) - 1):\n c = file_in.aantal.iloc[a:num].sum()\n begin_eind_lijst.append([c, a, num + 1])\n be_LIJST.append([a, num + 1])\n\n csv_naam = Path(f\"{outgoing_posix_pad}/{a:>{0}{5}}.csv\")\n print(csv_naam)\n file_in.iloc[a : (num + 1)].to_csv(csv_naam)\n print(\"splitter klaar\")\n\n elif b >= ongeveer_per_baan + afwijkings_waarde:\n\n csv_naam = Path(f\"{outgoing_posix_pad}/{a:>{0}{5}}.csv\")\n print(csv_naam)\n file_in.iloc[a : (num + 1)].to_csv(csv_naam) # num + 1 ??\n\n begin_eind_lijst.append([b, a, num])\n be_LIJST.append([a, num + 1])\n be_LIJST.append(f\"[{a}:{num}]\")\n a = num + 1\n\n continue\n\n return print(begin_eind_lijst), print(be_LIJST)", "def batch_delete_import_data(importTaskIds=None):\n pass", "def main(in_path, keep_path, out_path):\n\t# First open the input csv\n\tcsv_hndl = lambda x: np.array([np.array(r) for r in x])\n\tdata, headers = read_csv(in_path, csv_hndl, use_headers=True, delimiter=',')\n\n\t# Read headers to keep\n\tkeeps = []\n\n\t# Regex for ignoring comments\n\tcmnt_re = re.compile(\"^#\")\n\n\t# Open and read the file\n\twith open(keep_path) as f_obj:\n\t\tfor line in f_obj:\n\t\t\tline = line.strip()\n\t\t\t# If line is commented out, ignore\n\t\t\tif cmnt_re.match(line):\n\t\t\t\tcontinue\n\t\t\t# Otherwise add to list of keeps\n\t\t\tkeeps.append(line)\n\n\t# Prune the csv\n\tnew_data, new_headers = prune_csv(data,headers,keeps)\n\n\t# Write to output csv file\n\twrite_csv(\n\t\tout_path, \n\t\tnew_data, \n\t\tnew_headers, \n\t\tdelimiter=',', \n\t\tquotechar='\"',\n\t\tquoting=csv.QUOTE_MINIMAL\n\t)", "def clean_folder(self):\n # Remove the 1st output\n # Remove the 2nd output\n # Remove the calibrated output\n try:\n os.remove(\"output1.csv\")\n except:\n pass\n try: \n os.remove(\"output2.csv\")\n except:\n pass\n try:\n os.remove(self.__add_output_file_location(self._output_filename))\n except:\n pass\n \n list = os.listdir(\"edited\")\n for file in list:\n file = os.path.join(\"edited\", file)\n try:\n os.remove(file)\n except:\n pass\n \n list = os.listdir(\"extracted\")\n for file in list:\n file = os.path.join(\"extracted\", file)\n try:\n os.remove(file)\n except:\n pass", "def clean_data():\n for clean_file in clean_files:\n file_list = [f for f in os.listdir(\".\") if f.endswith(clean_file)]\n for f in file_list:\n os.remove(f)", "def truncate(self):\n for file_name in os.listdir(self.path):\n if file_name[0:4] == 'data':\n os.remove(self.path + '/' + file_name)\n self.current_row = 0", "def isolate_subreddit(csv_location, subreddit):\r\n\r\n individual_subreddit_csvs = csv_location + \"_\" + subreddit + '.*.csv'\r\n\r\n df = dd.read_csv(csv_location + \".csv\", header=0, sep='\\t')\r\n sub_df = df.loc[df['subreddit'] == subreddit]\r\n\r\n sub_df.to_csv(individual_subreddit_csvs)\r\n filenames = glob(individual_subreddit_csvs)\r\n with open(csv_location + \"_\" + subreddit + '.csv', 'w') as out:\r\n for fn in filenames:\r\n with open(fn) as f:\r\n out.write(f.read())\r\n os.remove(fn)", "def ListDataToExcel(listdata,filename):\n\n # file_backup=f = codecs.open(parent+info.QryPositionExchangeID+\"/\"+filename,'wb','utf-8')\n csvfile = file(filename.decode(\"utf-8\"), 'wb')\n csvfile.write(codecs.BOM_UTF8)\n writer=csv.writer(csvfile)\n writer.writerows(listdata)\n csvfile.close()\n df_new = pd.read_csv(filename, encoding='utf-8')\n writer = pd.ExcelWriter(filename.replace(\".csv\",\".xlsx\"))\n df_new.to_excel(writer, index=False)\n writer.save()\n os.remove(filename)", "def clean_retrosheet_files(self):\n # Get zipped and unzipped folder names\n zippedFileFolder = Filepath.get_retrosheet_folder(folder='zipped')\n unzippedFileFolder = Filepath.get_retrosheet_folder(folder='unzipped')\n\n # Clean out all files in both folders\n for folder in (zippedFileFolder, unzippedFileFolder):\n os.chdir(folder)\n for file in os.listdir(os.getcwd()): \n if os.path.isdir(file): \n shutil.rmtree(file)\n else: \n os.remove(file)", "def cleanSpreadsheet(filepath):\n # import dat\n data = pd.ExcelFile(filepath)\n # create dataframe\n df = data.parse(skiprows=5)\n # drop the following columns\n cleanedDF = df.drop(['Unnamed: 0', 'Unnamed: 1',\n 'Trans#', 'Record#', 'Unnamed: 3', 'Description/Job',\n 'Vendor/Employee/Equipment',\n 'Unnamed: 8'], axis=1)\n # drops row if any of the cells are blank\n cleanedDF = cleanedDF.dropna(axis=0, how='any')\n cleanedDF = convertIndex(cleanedDF)\n return cleanedDF", "def delete_piece_files(self):\n\n\t\tprint \"Removing piece files and cleaning up...\"\n\t\tpieces_dir = os.path.join(PATH, 'Pieces')\n\t\ttry:\n\t\t\tos.makedirs(pieces_dir)\n\t\texcept:\n\t\t\tpass\n\t\tfor i in range(self.torrent.no_of_subpieces):\n\t\t\tpiece_src = os.path.join(PATH, self.torrent.name+'.'+'00'+str(i))\n\t\t\tpiece_dest = os.path.join(pieces_dir, self.torrent.name+'.'+'00'+str(i))\n\t\t\tshutil.move(piece_src, piece_dest)\n\t\t\t# Or I just delete them:\n\t\t\t# os.remove(piece_src)" ]
[ "0.59048206", "0.58225185", "0.5787659", "0.57107836", "0.5645881", "0.5633174", "0.5561667", "0.5561204", "0.5554499", "0.55346644", "0.5507042", "0.5490549", "0.5482629", "0.54694206", "0.54608023", "0.545365", "0.544657", "0.5445508", "0.5395078", "0.53660935", "0.53414905", "0.5321918", "0.53008807", "0.528759", "0.52690554", "0.5267659", "0.5267584", "0.5250741", "0.5250296", "0.5238653" ]
0.7902608
0
Returns the diff header.
def header_diff(diff): return diff[:diff.find(clean_diff(diff))]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getHeader():\n return _HEADER", "def get_header(self):\n return self._header", "def getHeader(self):\n return self.data.header", "def header(self):\r\n return self.__header", "def header(self):\n return self._header", "def header(self):\n return self._header", "def header(self):\n return self._header", "def header(self):\n return self[0]", "def get_header_format(cls):\n return cls.newFormatHeader", "def mail_header(self):\n return self._hdr", "def getTableHeader(self, filename):\n hdr = \"\"\n with open(filename, \"r\") as f:\n for line in f:\n if line[0] == \">\":\n hdr += line\n else:\n return hdr", "def GetHeader(self) -> \"char const *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_GetHeader(self)", "def get_heading(self):\n return self.heading[0]", "def get_heading(self):\n return self.__heading", "def get_heading(self):\n return self.__heading", "def get_header_revision(filing) -> dict:\n _header = filing.json['filing']['header']\n return _header", "def tsv_header(self):\n return self.tsv_lines[0]", "def to_header(self):\n\n return self._header_block", "def get_header():\n title = \"\"\"\n ___ __\n | o _|_ _|_ _ ._ (_ _ ._ _|_ o ._ _ _ ._ _|_ /\\ ._ _. | _ o _\n | \\/\\/ | |_ |_ (/_ | __) (/_ | | |_ | | | | (/_ | | |_ /--\\ | | (_| | \\/ _> | _>\n /\"\"\"\n\n sub_title = \"Get sentiments from your tweets fast and easy!\"\n header = bcolors.HEADER + title + bcolors.ENDC + \"\\n\" + bcolors.WARNING + \"\\t\\t\" + sub_title + bcolors.ENDC + \"\\n\"\n return header", "def GetHeader(self) -> \"char const *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_GetHeader(self)", "def block_header(self):\n return self._current_block[0]", "def header(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"header\")", "def header(self) -> str:\n value = self.kind\n if self.options:\n value += '; ' + '; '.join(f'{k}={v}' for k, v in self.options.items())\n return value", "def GetHeader(self) -> \"char const *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_GetHeader(self)", "def GetHeader(self) -> \"char const *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_GetHeader(self)", "def header():\n return [\"adjustment type\", \"comparison method\"]", "def headerstring(self):\n sss = 'IVO LEGEND:\\n'\n sss += ' Created from 152 or 155\\n'\n sss += ' Pct number\\n'\n sss += ' Found in 152 (Y/N)\\n'\n sss += ' Found in 155 (Y/N)\\n'\n sss += ' Ivo serial number\\n'\n sss += ' PEB used for opening\\n'\n sss += ' Opening date/time\\n'\n sss += ' Date/time of first vote\\n'\n sss += ' PEB used for closing\\n'\n sss += ' Closing date/time\\n'\n sss += ' Date/time of last vote\\n'\n sss += ' Number of vote events 152\\n'\n sss += ' Number of vote events 155\\n'\n sss += ' Number of vote events 155 by precinct\\n'\n sss += ' Number of late vote events 152\\n'\n sss += ' Pct numbers\\n'\n sss += ' Ballot styles\\n'\n sss += ' Memory collection times\\n'\n return sss", "def get_header():\n try:\n yml_iter = cfg.yml_config[\"header\"]\n except:\n # Probably no \"comments\" section in the yml-file.\n return \"\"\n\n return (\"\\n\".join(yml_iter) + \"\\n\\n\") if yml_iter is not None else \"\\n\"", "def generate_header(self, header=None):\n if header is None:\n header = self.header\n\n lines = [self.PREFIX_HEAD + '!b']\n for k, v in header.items():\n if k in ('labels', 'categories'):\n v = ', '.join(v)\n elif k == 'draft':\n v = repr(v)\n lines.append(self.HEADER_FMT % (k, v))\n lines.append(self.PREFIX_END)\n return '\\n'.join([_f for _f in lines if _f]) + '\\n'", "def header(self):\r\n raise NotImplementedError" ]
[ "0.74034965", "0.731021", "0.7231756", "0.70983106", "0.7060388", "0.7060388", "0.7060388", "0.68315625", "0.6825599", "0.6802016", "0.67298836", "0.67139643", "0.6710982", "0.6694566", "0.6694566", "0.66496134", "0.6633935", "0.66212666", "0.6615713", "0.6597214", "0.6579236", "0.6559837", "0.65270126", "0.64800024", "0.64669514", "0.6448605", "0.64190567", "0.6418494", "0.64031756", "0.6400363" ]
0.7866826
0
Writes a patch tempfile.
def write_tmp_patch(diff, filename=None): if not filename: prefix = 'cugit-' suffix = '-patch' filename = mkstemp(suffix, prefix)[1] with open(filename, 'w') as f: f.write(diff) return filename
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def write_patch(self, patch_str:str, offset:int):\n self.cycle_stream_mode(True) #set io as read\n old_file = self.io_stream.read()\n\n old_file_front = old_file[:offset]\n old_file_back = old_file[offset+len(patch_str):]\n\n new_file = old_file_front + patch_str + old_file_back\n assert len(new_file) == len(old_file)\n\n self.cycle_stream_mode(False) #set io as write\n self.io_stream.write(new_file)", "def write_tmp_patches(diffs):\n files = []\n for idx, diff in enumerate(diffs):\n prefix = 'cugit-%s-' % str(idx).zfill(5)\n suffix = '-patch'\n filename = mkstemp(suffix, prefix)[1]\n write_tmp_patch(diff, filename)\n files.append(filename)\n return files", "def save_patch(fname, mesh='hemi'):\n verts, pts, edges = _get_pts_edges(mesh)\n write_patch(fname, pts, edges)", "def test_write_file():\n with tempfile.NamedTemporaryFile(delete=False) as t:\n temp_fn = t.name\n try:\n z = XPIManager(temp_fn, mode='w')\n f, d = 'install.rdf', '注目のコレクション'.decode('utf-8')\n z.write(f, d)\n assert z.read(f) == d.encode('utf-8')\n finally:\n os.unlink(temp_fn)", "def write_patch(filename, pts, edges=None):\n if edges is None:\n edges = set()\n\n with open(filename, 'wb') as fp:\n fp.write(struct.pack('>2i', -1, len(pts)))\n for i, pt in pts:\n if i in edges:\n fp.write(struct.pack('>i3f', -i-1, *pt))\n else:\n fp.write(struct.pack('>i3f', i+1, *pt))", "def dump_to_tmpfile(obj):\n\n import tempfile\n\n fname = tempfile.mktemp()\n with open(fname, \"w\") as txtfile:\n txtfile.write(str(obj))\n\n print(\"str(obj) was written to {}\".format(fname))\n\n return fname", "def makeTempFile(self,perms=0o600,keep=False):\n\n fd=os.open(self.temp,os.O_RDWR|os.O_CREAT|os.O_EXCL|os.O_TRUNC,perms)\n f=os.fdopen(fd,'w+') \n if not keep:\n atexit.register(os.remove,self.temp)\n return f", "def _generate_to_tempfile(self, generator):\r\n (output_fd, output_path) = tempfile.mkstemp()\r\n with os.fdopen(output_fd, 'w') as output:\r\n generator.write(output)\r\n return output_path", "def test_WriteAndFlushFile():\n\n try:\n # Create a directory. Make sure to remove it at the end.\n dirname = tempfile.mkdtemp()\n filename = 'filename.txt'\n text1 = 'The quick brown fox\\n'\n text2 = 'The lazy dog'\n full_path = os.path.join(dirname, filename)\n\n # Open a file and write using both changed methods\n f = prefork.WriteAndFlushFile(full_path, 'w')\n f.write(text1)\n f.writelines(text2)\n f.close()\n\n # Read everything back\n f = open(full_path, 'r')\n data = f.readlines()\n f.close()\n\n assert data[0] == text1\n assert data[1] == text2\n\n finally:\n # Always remove it\n shutil.rmtree(dirname)", "def test_write(self):\n temp_file = tempfile.mkstemp()[1]\n try:\n with open(temp_file, \"w+\") as fh:\n self.new_manifest.write(fh)\n tools.eq_(self.new_manifest, load_manifest(temp_file))\n finally:\n os.unlink(temp_file)", "def touch(path):\n with open(path, 'wt') as f:\n pass", "def WriteOnDiff(filename):\n\n class Writer(object):\n \"\"\"Wrapper around file which only covers the target if it differs.\"\"\"\n def __init__(self):\n # Pick temporary file.\n tmp_fd, self.tmp_path = tempfile.mkstemp(\n suffix='.tmp',\n prefix=os.path.split(filename)[1] + '.gyp.',\n dir=os.path.split(filename)[0]\n )\n try:\n self.tmp_file = os.fdopen(tmp_fd, 'w')\n except Exception:\n # Don't leave turds behind.\n os.unlink(self.tmp_path)\n raise\n\n def __getattr__(self, attrname):\n # Delegate everything else to self.tmp_file\n return getattr(self.tmp_file, attrname)\n\n def close(self):\n try:\n # Close tmp file.\n self.tmp_file.close()\n # Determine if different.\n same = False\n try:\n same = filecmp.cmp(self.tmp_path, filename, False)\n except OSError as e:\n if e.errno != errno.ENOENT:\n raise\n\n if same:\n # The new file is identical to the old one, just get rid of the new one.\n os.unlink(self.tmp_path)\n else:\n # The new file is different from the old one, or there is no old one.\n # Rename the new file to the permanent name.\n #\n # tempfile.mkstemp uses an overly restrictive mode, resulting in a\n # file that can only be read by the owner, regardless of the umask.\n # There's no reason to not respect the umask here, which means that\n # an extra hoop is required to fetch it and reset the new file's mode.\n #\n # No way to get the umask without setting a new one? Set a safe one\n # and then set it back to the old value.\n umask = os.umask(0o77)\n os.umask(umask)\n os.chmod(self.tmp_path, 0o666 & ~umask)\n if sys.platform == 'win32' and os.path.exists(filename):\n # NOTE: on windows (but not cygwin) rename will not replace an\n # existing file, so it must be preceded with a remove. Sadly there\n # is no way to make the switch atomic.\n os.remove(filename)\n os.rename(self.tmp_path, filename)\n except Exception:\n # Don't leave turds behind.\n os.unlink(self.tmp_path)\n raise\n\n return Writer()", "def wrap_in_tempfile(data):\n wrapped = tempfile.SpooledTemporaryFile()\n wrapped.write(data)\n wrapped.flush()\n wrapped.seek(0)\n return wrapped", "def makeTestFile(text):\n f = tempfile.NamedTemporaryFile()\n f.write(text)\n f.flush()\n return f", "def mock_write_test_path(tmp_path, monkeypatch):\n write_test_path = tmp_path / \"mock_write_test\"\n # Wrap the path so read-only methods can be replaced\n write_test_path = MagicMock(wraps=write_test_path)\n monkeypatch.setattr(Docker, \"_write_test_path\", lambda self: write_test_path)\n return write_test_path", "def _temp_file(self, val):\n fd, fn = tempfile.mkstemp()\n fp = os.fdopen(fd, \"wb\")\n if val:\n if not isinstance(val, bytes):\n fp.write(val.encode(\"utf-8\", \"surrogateescape\"))\n else:\n fp.write(val)\n fp.close()\n return fn", "def mspatchc_CreatePatchFile(jitter, get_str, set_str):\n ret_ad, args = jitter.func_args_stdcall([\"OldFileName\", \"NewFileName\", \"PatchFileName\", \"OptionFlags\", \"OptionData\"])\n raise RuntimeError('API not implemented')\n jitter.func_ret_stdcall(ret_ad, ret_value)", "def FileWrite(offset, buf):\r\n return _hiew.HiewGate_FileWrite(offset, buf)", "def _tempfile(self):\n fd, path = tempfile.mkstemp(dir = os.path.join(self.root, \"temporary\"))\n try:\n return os.fdopen(fd, \"wb\"), path\n except:\n os.unlink(path)\n os.close(fd)\n raise", "def _write(self, timeout_at, overwrite=True):\n\n if overwrite or not os.path.exists(self.filename):\n dirname = os.path.dirname(self.filename)\n filename = os.path.basename(self.filename)\n\n fs.mkdir_safe(dirname)\n with tempfile.NamedTemporaryFile(dir=dirname,\n prefix='.' + filename,\n delete=False,\n mode='w') as tmpfile:\n os.chmod(tmpfile.name, 0o600)\n tmpfile.write(self.content)\n # We have to flush now to make sure utime is the last\n # operation we do on the file.\n tmpfile.flush()\n os.utime(tmpfile.name, (timeout_at, timeout_at))\n\n os.rename(tmpfile.name, self.filename)", "def _touch(path):\n open(path, \"w\").close()", "def write(self, fname):\n pass", "def touch(path):\n open(path, 'wb').close()", "def test_file(tmpdir):\n file_path = tmpdir / 'test.txt'\n file_path = file_path.write_binary(b'This is some test data!')\n return file_path", "def _write_file(template, localcontext, output_path, name, override):\r\n old_locale = locale.setlocale(locale.LC_ALL)\r\n locale.setlocale(locale.LC_ALL, str('C'))\r\n try:\r\n output = template.render(localcontext)\r\n finally:\r\n locale.setlocale(locale.LC_ALL, old_locale)\r\n path = os.path.join(output_path, name)\r\n try:\r\n os.makedirs(os.path.dirname(path))\r\n except Exception:\r\n pass\r\n\r\n with self._open_w(path, 'utf-8', override=override) as f:\r\n f.write(output)\r\n logger.info('writing {}'.format(path))\r\n\r\n # Send a signal to say we're writing a file with some specific\r\n # local context.\r\n signals.content_written.send(path, context=localcontext)", "def mspatchc_CreatePatchFileEx(jitter, get_str, set_str):\n ret_ad, args = jitter.func_args_stdcall([\"OldFileCount\", \"OldFileInfoArray\", \"NewFileName\", \"PatchFileName\", \"OptionFlags\", \"OptionData\", \"ProgressCallback\", \"CallbackContext\"])\n raise RuntimeError('API not implemented')\n jitter.func_ret_stdcall(ret_ad, ret_value)", "def mspatchc_CreatePatchFileByHandles(jitter):\n ret_ad, args = jitter.func_args_stdcall([\"OldFileHandle\", \"NewFileHandle\", \"PatchFileHandle\", \"OptionFlags\", \"OptionData\"])\n raise RuntimeError('API not implemented')\n jitter.func_ret_stdcall(ret_ad, ret_value)", "def make_temp_file(dir, data):\n fd, path = tempfile.mkstemp(dir=dir)\n if PY3:\n with os.fdopen(fd, 'w', encoding='utf-8') as f:\n f.write(data)\n else:\n with os.fdopen(fd, 'w') as f:\n f.write(data)\n return path", "def save_tmp_file(self, data):\n with open(self.tmp_file, 'wb') as f:\n f.write(data)", "def file_write(stuff, file_path):\n with open(file_path, \"wt\") as fo:\n fo.write(stuff)" ]
[ "0.61978513", "0.6094686", "0.6089081", "0.5811616", "0.570289", "0.56202227", "0.56129915", "0.5567952", "0.54997003", "0.54888415", "0.5467721", "0.5421477", "0.5417318", "0.5410641", "0.5394676", "0.53655595", "0.53639346", "0.5354897", "0.5350714", "0.5337507", "0.53264195", "0.5317737", "0.5311987", "0.5296932", "0.52888685", "0.5275394", "0.52661043", "0.5263606", "0.52520293", "0.52376103" ]
0.7572888
0
Returns a list with all the chunks in this diff.
def get_chunks(diff): diff = clean_diff(diff) chunk = [] chunks = [] for line in diff.split('\n'): if not line: continue if line.startswith('@@ '): if chunk: chunks.append('\n'.join(chunk) + '\n') chunk = [line] else: chunk.append(line) if chunk: chunks.append('\n'.join(chunk) + '\n') return chunks
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getChunks():", "def chunks(self) -> List[OctreeChunk]:\n return [tile_data.octree_chunk for tile_data in self._tiles.values()]", "def chunk_set(self) -> Set[OctreeChunk]:\n return self._chunks", "def smallfile_blocks(self):\n return self._smallfile_blocks", "def chunks(self, n):\n return _([self._[i:i+n] for i in range(0, self.size()._, n)])", "def split(self):\n overall_chunks = []\n for filename in self.get_all_files():\n file_chunks = self.split_single_file(filename)\n overall_chunks.extend(file_chunks)\n return overall_chunks", "def list_fragments(self):\n return list(self.data.fragments)", "def Chunks(l):\n return_list = [[]]\n counter = 0\n index = 0\n for i in l:\n # Size is split in half due to the max size being a sum of src and dst.\n if counter > (self._ADDRESS_LENGTH_LIMIT/2):\n counter = 0\n index += 1\n return_list.append([])\n if i.version == 6:\n counter += self._IPV6_SIZE\n else:\n counter += 1\n return_list[index].append(i)\n return return_list", "def get_chunks(self, l, n):\r\n for i in range(0, len(l), n):\r\n yield l[i:i+n]", "def get_blocks(fname):\n with open(fname, 'r') as fh:\n lines = iter(fh.readlines())\n parts = []\n line = next(lines)\n while True:\n if line.startswith('diff --git'):\n block = [line]\n for line in lines:\n if line.startswith('@@'):\n break\n block.append(line)\n parts.append(block)\n if line.startswith('@@'):\n block = [line]\n for line in lines:\n if line.startswith('@@') or line.startswith('diff --git'):\n break\n block.append(line)\n parts.append(block)\n if line.startswith('\\\\ No newline'):\n parts[-1].append(line)\n try:\n line = next(lines)\n except StopIteration:\n break\n if not lines:\n break\n return parts", "def getChunks(inp_list, chunk_size):\n return [inp_list[x:x + chunk_size] for x in range(0, len(inp_list), chunk_size)]", "def _get_chunks(l, n = 10):\n \n for i in range(0, len(l), n): yield l[i: i + n]", "def chunk(self, size: int) -> 'List':\n if size <= 0:\n raise ValueError('The chunk size should be a positive number')\n\n return List(self[idx:idx+size] for idx in range(0, len(self), size))", "def get_fragments_for_mdv_calculation(self):\n return list(self.fragments_for_mdv_calculation)", "def _calculate_chunk_offsets(self):\n offset = 0\n offsets = []\n for chunk in self.data.iterchunks():\n offsets.append(offset)\n offset += len(chunk)\n return np.array(offsets)", "def segments(self):\n return (self._subset((i,i+1)) for i in range(len(self)-1))", "def _chunk_data(self):\n for n in range(0, len(self.data) + 1, len(self.data) //\n self.num_of_chunks):\n yield self.data[0 + n:len(self.data) // self.num_of_chunks + n]", "def getModifiedBlockList(self):\n if self.binary:\n return []\n block_list = []\n for child in self.children:\n old_line_list = [line.strip() for line, color in child.getOldCodeList()\n if line is not None and color in (MODIFIED_DIFF_COLOR,\n DELETED_DIFF_COLOR)]\n new_line_list = [line.strip() for line, color in child.getNewCodeList()\n if line is not None and color in (MODIFIED_DIFF_COLOR,\n ADDITION_DIFF_COLOR)]\n if old_line_list or new_line_list:\n block_list.append((child,(old_line_list, new_line_list)))\n return block_list", "def GetBlocks(state):\n result = []\n last_pos = 0\n for entry in state:\n pos = entry['pos']\n # Calculate block start points from the beginning of individual lines.\n blocks = [(s[0]-last_pos, s[1]-s[0]) for s in entry['blocks']]\n # Add one end marker block.\n blocks.append((pos-last_pos, 0))\n result.append(blocks)\n last_pos = pos\n return result", "def chunk_list(exps):\n import numpy as np\n batch_idx = [exp['batch_id'] for exp in exps]\n unique_batch_idx = np.unique(batch_idx)\n exp_chunks = []\n for batch in unique_batch_idx:\n idx = [i for i, batch_id in enumerate(batch_idx)\n if batch_id == batch]\n size = exps[idx[0]]['slurm']['experiments_per_job']\n exp_chunks.extend(([exps[i] for i in idx[pos:pos + size]] for pos in range(0, len(idx), size)))\n return exp_chunks", "def changed_chunks(self) -> Generator[DimensionCoordinates, None, None]:\n changed_chunks = set()\n for chunk_location, chunk in self._chunk_cache.items():\n if chunk is None:\n # if the chunk is None and the saved record is not None, the chunk has changed.\n if chunk_location in self._chunk_index:\n _, save_chunk_index = self._chunk_index[chunk_location]\n chunk_storage = self._chunk_history[chunk_location]\n if chunk_storage[save_chunk_index] is not None:\n changed_chunks.add(chunk_location)\n yield chunk_location\n else:\n changed_chunks.add(chunk_location)\n yield chunk_location\n\n elif chunk.changed:\n changed_chunks.add(chunk_location)\n yield chunk_location\n for chunk_location, (index, save_index) in self._chunk_index.items():\n if index != save_index and chunk_location not in changed_chunks:\n yield chunk_location", "def __chunks(l, n):\n for i in range(0, len(l), n):\n yield l[i:i + n]", "def chunk(it, size):\n it = iter(it)\n return iter(lambda: list(islice(it, size)), [])", "def chunks(self, list_to_chunk, size):\n for i in range(0, len(list_to_chunk), size):\n yield list_to_chunk[i:i + size]", "def iter_fragments(self):\n if self.segment is None:\n return iter(list())\n return self.segment.iter_fragments()", "def iter_fragments(self):\n if self.segment is None:\n return iter(list())\n return self.segment.iter_fragments()", "def _split_on_chunks(self, iterable, size):\n return utils.split_on_chunks(iterable, size)", "def get_blocks(self):\n block_positions = product(range(self.block_size), range(self.block_size))\n return [self._get_block(i, j) for i, j in block_positions]", "def _chunklines(self):\r\n text = self.textwnd.toPlainText()\r\n lines_in_chunk = len(text.split(\"\\n\"))\r\n logger.debug(\"Lines in chunk: {}\".format(lines_in_chunk))\r\n return lines_in_chunk", "def portions(self) -> List[IngredientObjectPortions]:\n return self._portions" ]
[ "0.7277695", "0.70243096", "0.6752348", "0.66361326", "0.6562482", "0.64825803", "0.6463599", "0.6388702", "0.63127536", "0.62394726", "0.6157505", "0.6129312", "0.6112083", "0.61033", "0.60561705", "0.60495746", "0.6049434", "0.6047188", "0.6032273", "0.6024378", "0.60216916", "0.60207105", "0.60198367", "0.5990227", "0.5971981", "0.5971981", "0.5965622", "0.5964281", "0.5958649", "0.5942751" ]
0.76115656
0
Checks if chunk is inside text.
def chunk_in_text(chunk, text): chunk = clean_chunk(chunk) return text.find(chunk) >= 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def hasRawText(self, text):\n r = re.compile(r'<(p|blockquote|div|form|table|ul|ol|dl|pre|h\\d)[^>]*?>.*</\\1>',\n re.S).sub('', text.strip()).strip()\n r = re.compile(r'<(hr|br)[^>]*?/>').sub('', r)\n return '' != r", "def has_text(self, page: fitz.Page) -> bool:\n return page.get_text(clip=page.trimbox).strip() != \"\"", "def is_text(content):\n if b\"\\0\" in content:\n return False\n if not content: # Empty files are considered text\n return True\n # Try to decode as UTF-8\n try:\n content.decode(\"utf8\")\n except UnicodeDecodeError:\n return False\n else:\n return True", "def is_text( self ):\n return self.get_main_type() == 'text'", "def text_exists(self, text: str)-> bool:\n result = self.__content.find(text)\n if result == -1:\n return False\n else:\n return True", "def hasContents():", "def is_tagged_text(text):\n return len(text) > len(strip_tags(text))", "def _is_text_tag(tag):\n return tag.name not in ['script', 'style']", "def is_plain_text(self):\n return self._tag == 'plain_text'", "def has_text(self):\n try:\n first = self.text_planets()[0]\n except IndexError:\n first = None\n\n return first is not None", "def is_text(line, start, end, line_number, code_blocks):\n if any(c[0] <= line_number <= c[1] for c in code_blocks):\n return False\n else:\n n = len(line)\n idx = -1\n last_block_was_text = False\n in_link = False\n in_url = False\n while idx < start:\n if in_link:\n link_idx = line[idx+1:].find(')')\n assert link_idx != -1\n code_idx = n\n url_idx = n\n elif in_url:\n url_idx = line[idx+1:].find('>')\n assert url_idx != -1\n code_idx = n\n link_idx = n\n else:\n code_idx = line[idx+1:].find('`')\n link_idx = line[idx+1:].find('](')\n url_idx = line[idx+1:].find('<')\n if code_idx == -1:\n code_idx = n\n if link_idx == -1:\n link_idx = n\n if url_idx == -1:\n url_idx = n\n\n nearest_match = min(code_idx, link_idx, url_idx)\n\n if nearest_match == url_idx:\n in_url = not in_url\n elif nearest_match == link_idx:\n in_link = not in_link\n idx += nearest_match+1\n last_block_was_text = not last_block_was_text\n\n return last_block_was_text", "def isText(self):\n return _libsbml.XMLToken_isText(self)", "def is_tagged_text(*a, **kw):\n return is_tagged_text(*a, **kw)", "def has_tags_in_content(self):\n\t\treturn self.get_content() and re_tag.search(self.get_content())", "def check(self, text):\n\n try:\n console.print(self.parser.parse(text)[\"result\"][1:], style=\"green\")\n return True\n\n except:\n console.print(\"An error has occurred while trying to parse the typo!\", style=\"red\")\n return False", "def maybe_text(instream):\n if instream.mode == 'w':\n # output binary streams *could* hold text\n # (this is not about the file type, but about the content)\n return True\n try:\n sample = instream.peek(_TEXT_SAMPLE_SIZE)\n except EnvironmentError:\n return None\n if set(sample) & set(_NON_TEXT_BYTES):\n logging.debug(\n 'Found unexpected bytes: identifying unknown input stream as binary.'\n )\n return False\n try:\n sample.decode('utf-8')\n except UnicodeDecodeError as err:\n # need to ensure we ignore errors due to clipping inside a utf-8 sequence\n if err.reason != 'unexpected end of data':\n logging.debug(\n 'Found non-UTF8: identifying unknown input stream as binary.'\n )\n return False\n logging.debug('Tentatively identifying unknown input stream as text.')\n return True", "def isText(self):\n ret = libxml2mod.xmlNodeIsText(self._o)\n return ret", "def text_search(self, text, stuff_to_cop):\n if any(ext in text for ext in stuff_to_cop):\n return(True)\n else:\n return(False)", "def is_content(cls, path_or_content):\n return any(path_or_content.lstrip().startswith(s) for s in cls.valid_content_start)", "def _ConsumeTextForPlugin(self):\n return (self._plugin_stack and\n self._plugin_stack[-1][\"id\"] in self._RAW_PLUGINS)", "def is_in_file(file_path, text):\n with open(file_path, 'r') as f:\n content = f.read()\n return text in content", "def verify_text(self, text):\n pass", "def check_contain(file_content, check_text):\n for line in file_content:\n if check_text in line:\n return True\n return False", "def has_contents(self):\n return len(self.byteruns())>0", "def testIsText(self):\n parser = text_parser.PyparsingSingleLineTextParser()\n\n bytes_in = b'this is My Weird ASCII and non whatever string.'\n self.assertTrue(parser._IsText(bytes_in))\n\n bytes_in = 'Plaso Síar Og Raðar Þessu'\n self.assertTrue(parser._IsText(bytes_in))\n\n bytes_in = b'\\x01\\\\62LSO\\xFF'\n self.assertFalse(parser._IsText(bytes_in))\n\n bytes_in = b'T\\x00h\\x00i\\x00s\\x00\\x20\\x00'\n self.assertTrue(parser._IsText(bytes_in))\n\n bytes_in = b'Ascii\\x00'\n self.assertTrue(parser._IsText(bytes_in))\n\n bytes_in = b'Ascii Open then...\\x00\\x99\\x23'\n self.assertFalse(parser._IsText(bytes_in))", "def has_content(line):\n return re.sub(r'\\W+', '', line)", "def check_partition(self, sectioned_text, full_text):\n\n restitched_text = self.restitch_text(sectioned_text)\n\n length_check = (len(restitched_text) == len(full_text))\n\n return length_check", "def is_text(self):\n return self.value_type in (str, unicode)", "def isHTML(content):\n\n return '<html' in content or 'html>' in content", "def contains_octree_chunk(self, octree_chunk: OctreeChunk) -> bool:\n return octree_chunk in self._chunks" ]
[ "0.6827184", "0.6780171", "0.6645555", "0.661451", "0.6532266", "0.64886785", "0.6441595", "0.6326669", "0.63196266", "0.63009065", "0.6285457", "0.61925936", "0.6188996", "0.6134521", "0.61328065", "0.6126684", "0.5998353", "0.59940386", "0.59071785", "0.5866151", "0.58565104", "0.5839467", "0.58366126", "0.5814165", "0.58047676", "0.5778417", "0.5767264", "0.57672423", "0.5752517", "0.5678636" ]
0.8283613
0
Checks if this file is binary withi diff command.
def is_binary(check_file): res = diff[devnull, check_file](retcode=(0,1,2)) return res.startswith('Binary files')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_binary_diff(self):\n diff = (\n b'diff --git a/pysvn-1.5.1.tar.gz b/pysvn-1.5.1.tar.gz\\n'\n b'new file mode 100644\\n'\n b'index 0000000..86b520c\\n'\n b'Binary files /dev/null and b/pysvn-1.5.1.tar.gz differ\\n'\n )\n\n parsed_files = self.tool.get_parser(diff).parse()\n self.assertEqual(len(parsed_files), 1)\n\n self.assert_parsed_diff_file(\n parsed_files[0],\n orig_filename=b'pysvn-1.5.1.tar.gz',\n orig_file_details=PRE_CREATION,\n modified_filename=b'pysvn-1.5.1.tar.gz',\n modified_file_details=b'86b520c',\n new_unix_mode='100644',\n binary=True,\n data=diff)", "def IsBinary(self, filename):\n mimetype = mimetypes.guess_type(filename)[0]\n if not mimetype:\n return False # e.g. README, \"real\" binaries usually have an extension\n # special case for text files which don't start with text/\n if mimetype in TEXT_MIMETYPES:\n return False\n return not mimetype.startswith(\"text/\")", "def check_binary(self):\n if shutil.which(self.binary):\n return True\n else:\n logging.warning(R+'The supplied binary or path does not exist... Exiting'+W)\n exit(1)", "def test_git_new_single_binary_diff(self):\n full_diff, diffs = self._read_diff_fixture(\n 'git_new_single_binary.diff',\n expected_num_diffs=2)\n\n parsed_files = self.tool.get_parser(full_diff).parse()\n self.assertEqual(len(parsed_files), 2)\n\n self.assert_parsed_diff_file(\n parsed_files[0],\n orig_filename=b'Checked.svg',\n orig_file_details=PRE_CREATION,\n modified_filename=b'Checked.svg',\n modified_file_details=b'',\n new_unix_mode='100644',\n insert_count=9,\n data=diffs[0])\n\n self.assert_parsed_diff_file(\n parsed_files[1],\n orig_filename=b'dialog.jpg',\n orig_file_details=b'e69de29bb2d1d6434b8b29ae775ad8c2e48c5391',\n modified_filename=b'dialog.jpg',\n modified_file_details=b'5503573346e25878d57775ed7caf88f2eb7a7d98',\n new_unix_mode='100644',\n binary=True,\n data=diffs[1])", "def is_binary_format(content, maxline=20):\n for lc in content[:maxline]:\n if b'format' in lc:\n if b'binary' in lc:\n return True\n return False\n return False", "def is_binary(self):\n return self._vtype is self.BINARY", "def is_patch_binary_copy_modify_with_no_change(patch):\n diff_header = split_header(patch.text)[0]\n return patch.is_binary and is_copy_modify_with_no_change(diff_header)", "def isBinaryFormat(content, maxline=20):\n for lc in content[:maxline]:\n if b'format' in lc:\n if b'binary' in lc:\n return True\n return False\n return False", "def is_binary(filename):\n with open(filename, 'rb') as f:\n for block in f:\n if '\\0' in block:\n return True\n return False", "def _is_binary(bytes_data):\n # From: https://stackoverflow.com/a/7392391\n return bool(bytes_data.translate(None, _TEXTCHARS))", "def _is_binary(name):\n\n return name in unidata.unicode_binary or name in unidata.unicode_alias['binary']", "def CompareBinaries(ref_bin, dec_bin):\n with open(ref_bin, 'rb') as reff:\n ref_data = reff.read()\n\n with open(dec_bin, 'rb') as decf:\n dec_data = decf.read()\n\n if ref_data != dec_data:\n raise ConformanceTestError('Binary files mismatch: %s %s' %\n (ref_bin, dec_bin))", "def test_parse_diff_with_deleted_binary_files(self):\n diff1 = (\n b'diff --git a/foo.bin b/foo.bin\\n'\n b'deleted file mode 100644\\n'\n b'Binary file foo.bin has changed\\n'\n )\n diff2 = (\n b'diff --git a/bar.bin b/bar.bin\\n'\n b'deleted file mode 100644\\n'\n b'Binary file bar.bin has changed\\n'\n )\n diff = diff1 + diff2\n\n parsed_files = self.tool.get_parser(diff).parse()\n self.assertEqual(len(parsed_files), 2)\n\n self.assert_parsed_diff_file(\n parsed_files[0],\n orig_filename=b'foo.bin',\n orig_file_details=b'',\n modified_filename=b'foo.bin',\n modified_file_details=b'',\n old_unix_mode='100644',\n deleted=True,\n binary=True,\n data=diff1)\n\n self.assert_parsed_diff_file(\n parsed_files[1],\n orig_filename=b'bar.bin',\n orig_file_details=b'',\n modified_filename=b'bar.bin',\n modified_file_details=b'',\n old_unix_mode='100644',\n deleted=True,\n binary=True,\n data=diff2)", "def no_diff(file1, file2):\n # Using subprocess again. \n process = subprocess.run(['diff',file1,file2], capture_output=True)\n # Subprocess outputs bytes objects to stdout. If diff doesn't find\n # anything, it outputs an empty string. As a bytes object, that\n # is b''.\n if process.stdout==b'':\n return True\n return False", "def IsBinaryData(self, data):\r\n # Derived from how Mercurial's heuristic, see\r\n # http://selenic.com/hg/file/848a6658069e/mercurial/util.py#l229\r\n return bool(data and \"\\0\" in data)", "def DataIsBinaryData(self):\n return self.data_type == definitions.REG_BINARY", "def is_binary(*args):\n return _ida_hexrays.is_binary(*args)", "def compatible_file(file: Union[BinaryIO, str, Path]) -> bool:\n try:\n fd = open(file, 'rb') if isinstance(file, (str, Path)) else file\n\n offset = fd.tell()\n fd.seek(0)\n result = _bytes_match(fd, ELF_MAGIC) or _bytes_match(fd, ARCHIVE_MAGIC)\n fd.seek(offset)\n finally:\n if isinstance(file, (str, Path)):\n fd.close()\n\n return result", "def _bytes_match(fd: BinaryIO, expected: bytes) -> bool:\n try:\n offset = fd.tell()\n data = fd.read(len(expected))\n fd.seek(offset)\n return data == expected\n except IOError:\n return False", "def is_binary(filename):\n fin = open(filename, 'rb')\n try:\n CHUNKSIZE = 1024\n while 1:\n chunk = fin.read(CHUNKSIZE)\n if '\\0' in chunk: # found null byte\n return True\n if len(chunk) < CHUNKSIZE:\n break # done\n # A-wooo! Mira, python no necesita el \"except:\". Achis... Que listo es.\n finally:\n fin.close()\n\n return False", "def test_git_new_binaries_diff(self):\n full_diff, diffs = self._read_diff_fixture(\n 'git_new_binaries.diff',\n expected_num_diffs=3)\n\n parsed_files = self.tool.get_parser(full_diff).parse()\n self.assertEqual(len(parsed_files), 3)\n\n self.assert_parsed_diff_file(\n parsed_files[0],\n orig_filename=b'other.png',\n orig_file_details=b'e69de29bb2d1d6434b8b29ae775ad8c2e48c5391',\n modified_filename=b'other.png',\n modified_file_details=b'fddeadc701ac6dd751b8fc70fe128bd29e54b9b0',\n new_unix_mode='100644',\n binary=True,\n data=diffs[0])\n\n self.assert_parsed_diff_file(\n parsed_files[1],\n orig_filename=b'initial.png',\n orig_file_details=b'fddeadc701ac6dd751b8fc70fe128bd29e54b9b0',\n modified_filename=b'initial.png',\n modified_file_details=b'532716ada15dc62ddf8c59618b926f34d4727d77',\n binary=True,\n data=diffs[1])\n\n self.assert_parsed_diff_file(\n parsed_files[2],\n orig_filename=b'xtxt.txt',\n orig_file_details=PRE_CREATION,\n modified_filename=b'xtxt.txt',\n modified_file_details=b'',\n new_unix_mode='100644',\n insert_count=1,\n data=diffs[2])", "def inbinary(self):\n from telnetlib3.telopt import BINARY\n return self.force_binary or self.stream.remote_option.enabled(BINARY)", "def test_file_bin_readwrite(self):\n FileWriter(self.binary_path).write_bin(self.binary_string)\n bin_data = FileReader(self.binary_path).read_bin()\n self.assertEqual(bin_data, self.binary_string)", "def _should_send_binary(self) -> bool:\n if not self.binary_support:\n return False\n\n content_type = self._get_content_type()\n if not content_type.startswith(self.non_binary_content_type_prefixes):\n return True\n\n content_encoding = self._get_content_encoding()\n # Content type is non-binary but the content encoding might be.\n return \"gzip\" in content_encoding.lower()", "def checkForBinary(binary):\n try:\n fullPath = subprocess.check_output(['which',binary])\n return True\n except subprocess.CalledProcessError as e:\n return False", "def binary_mode(self, filename: str) -> bool:\n raise NotImplementedError(SpectrumIOBase.binary_mode.__qualname__)", "def verify_binary_image(self, image_path):\n raise NotImplementedError", "def is_binary_payload(cls) -> bool:\n return True", "def test_get_file_binary_content(self):\n content = image_helper.get_file_binary_content(self.subject)\n\n self.assertGreater(len(content), 0)\n\n with open(self.subject, \"rb\") as f:\n original_content = f.read()\n\n self.assertEqual(content, original_content)", "def check_magic(self, target: str):\n\t\twith open(target, \"rb+\") as archive:\n\t\t\tmagic = archive.read(4)\n\t\t\tif magic == struct.pack(\"I\", self.magic):\n\t\t\t\treturn True\n\t\t\telse:\n\t\t\t\treturn False" ]
[ "0.7357386", "0.6626895", "0.66226614", "0.6603421", "0.6584796", "0.6577146", "0.65536094", "0.6459666", "0.64458364", "0.6394738", "0.63663214", "0.63508457", "0.6341169", "0.61732894", "0.61545825", "0.6121706", "0.61196023", "0.60302585", "0.59410125", "0.5940805", "0.5934761", "0.5870259", "0.5860144", "0.58407235", "0.58207804", "0.58150965", "0.5801911", "0.569694", "0.569083", "0.56873435" ]
0.7674245
0
Functions that transforms measurement data with samples taken it any (possibly irregular) sample rate and outputs the same measurements evenly spanced according to a given step length.
def recreate_sampling_times( data: DataFrame, step_length: float, start_time: float, end_time: float, plot_col=None, ) -> DataFrame: first_time_in_df = data[DFKeys.TIME.value].iloc[0] if start_time < first_time_in_df: raise ValueError("start time cannot precede first time in df") get_shifted_time = lambda row: row[DFKeys.TIME.value] - start_time shifted_timestamps = data.apply(get_shifted_time, axis=1).rename( DFKeys.TIME.value, axis=1 ) duration = end_time - start_time timesteps = np.arange(0, duration, step_length) new_columns = [pd.Series(timesteps, name=DFKeys.TIME.value)] columns_except_time = data.columns.difference( [ DFKeys.TIME.value, "child_frame_id", "header.frame_id", "header.seq", "header.stamp.nsecs", "header.stamp.secs", "pose.covariance", "twist.covariance", "pins_0", "pins_1", "pins_2", "pins_3", "pins_4", "pins_5", "pins_6", "pins_7", ] ) for col_name in columns_except_time: f = interp1d(shifted_timestamps.values, data[col_name].values) new_columns.append(pd.Series(f(timesteps), name=col_name)) data_new = pd.concat(new_columns, axis=1) if plot_col in data.columns: SAVEDIR = Path("results/interpolation") sea.set_style("white") # plt.figure(figsize=(5, 2.5)) sea.lineplot(x=shifted_timestamps.values, y=data[plot_col], label="original") sea.lineplot( x=DFKeys.TIME.value, y=plot_col, data=data_new, label="interpolated" ) # plt.ylabel("Velocity") # plt.savefig(SAVEDIR.joinpath("%s.pdf" % plot_col)) plt.show() return data_new
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def smoothed(sequence, step=1, start=0):\n next_index = start + 1\n last = len(sequence) \n new_sequence = []\n if not step:\n return sequence\n ratio_step = step + 1\n for item in sequence:\n new_sequence.append(item)\n if next_index < last:\n next_item = sequence[next_index]\n ratio = (item + next_item) / (step + 1)\n ratio = int(ratio)\n for x in range(step):\n value = (ratio * x) + item\n new_sequence.append(int(value))\n next_index = next_index + 1\n return new_sequence", "def step(self, dt_usec):\n\n # If we have no listeners, don't waste time calculating samples\n # @todo: Maybe calculate self.next_step so that we can add sensors during sim, but only if it turns out to be necessary\n if len(self.step_listeners) == 0:\n return\n \n # If the start of our next sample is greater than 1 (step), skip creating samples for this step\n if self.next_start >= 1.0:\n self.next_start -= 1\n return\n \n samples_per_step = self.sampling_rate * dt_usec / 1000000.\n sample_pct_of_step = 1.0/samples_per_step + 0.00000001 # For lerping -- add a tiny amount to eliminate floating point errors (doesn't affect the sim at this scale)\n\n self.step_lerp_pcts = np.arange(self.next_start, 1.0, sample_pct_of_step)\n\n # Call get_step_samples() (implemented in subclasses) to get the samples and add them to the buffer\n samples = self.create_step_samples(dt_usec) # Format np.array([<sample time>, <sample data 1>, ...])\n\n # Send our data to any attached listeners\n #self.logger.debug(\"Sending samples to {} step listeners\".format(len(self.step_listeners)))\n for step_listener in self.step_listeners:\n step_listener.step_callback(self, samples)\n\n # Update or start pct for the next step\n # @TODO: If we don't add .0000001 (or any tiny number, really) here the number of samples taken will be off by quite a bit at smaller step sizes. Probably floating point error....\n #self.next_start = sample_pct_of_step - (1 - self.step_lerp_pcts[-1]) +.0000001 # Works, but moved this to sample_pct_of_step calculation\n self.next_start = sample_pct_of_step - (1 - self.step_lerp_pcts[-1])", "def apply_fourier_transform(chunked_audio):\n pass", "def create_step_samples(self):\n pass # Deferred to subclasses\n\n \"\"\" Example using pod height:\n start_value = self.sim.pod.last_height\n end_value = self.sim.pod.height\n\n # Lerp values to get samples\n samples = start_value + self.step_lerp_pcts * (end_value - start_value) # Or use self.lerp(start_value, end_value), but doing it directly is faster since no function call\n if self.noise_scale > 0:\n # Add gaussian noise if specified\n return samples + np.random.normal(0.0, noise_scale, len(samples))\n else:\n # No noise\n return samples \n \"\"\"", "def resample_tractogram(tractogram, step_size):\n lengths=list(length(tractogram))\n tractogram_res = []\n for i, f in enumerate(tractogram):\n\tnb_res_points = np.int(np.floor(lengths[i]/step_size))\n\ttmp = set_number_of_points(f, nb_res_points)\n\ttractogram_res.append(tmp)\n tractogram_res = nib.streamlines.array_sequence.ArraySequence(tractogram_res)\n return tractogram_res", "def steps(self, length):\n steps = max(1, round(self.length / length, 0))\n return 1.0 / steps, int(steps)", "def iter_time_series(ts, in_len, out_len, step):\n\n for i in reversed(range(len(ts) - out_len, in_len - 1, -step)):\n x = ts[i - in_len:i]\n y = ts[i:i + out_len]\n yield x, y", "def test_fold_along_delay_even_length_units_unchanged():\n delays = (np.arange(-10, 10) + 0.5) * units.s\n array = np.ones((1, 10, 20)) * units.mK**2 * units.Mpc**3\n errs = np.ones((1, 10, 20)) * units.mK**2 * units.Mpc**3\n axis = -1\n array_out, errs_out = utils.fold_along_delay(delays, array, errs, axis=axis)\n assert units.mK**2 * units.Mpc**3 == array_out.unit", "def _resample_vectors(signal, delay, fs, n=None):\n signal = signal.samples()\n if n is None:\n n = signal.peek()\n signal = _stream_split_iterators(signal, n)\n\n apply_delay = auraliser.realtime.apply_doppler\n\n signal = Stream(map(np.array, zip(*[apply_delay(s, d, fs) for s, d in zip(signal, delay.tee(n))])))\n return signal", "def test_make_sampled_format(self):\n for num_inputs in [1, 3]:\n for num_outputs in [1, 2, 4]:\n for num_time_steps in [4, 10, 12]:\n # Generate data\n # P=2 format [0, 1, 2, 3, ...]\n sample_interval = 2\n dt_system = np.random.random()\n dt_sample = sample_interval * dt_system\n outputs = np.random.random(\n (num_time_steps, num_outputs, num_inputs))\n time_steps = make_time_steps(\n num_time_steps, sample_interval)\n time_values = time_steps * dt_system\n\n # Compute using modred\n my_ERA = era.ERA()\n time_steps_computed, outputs_computed =\\\n era.make_sampled_format(time_values, outputs)\n #self.assertEqual(dt_system_computed, dt_system)\n\n # Reference values\n num_time_steps_true = (num_time_steps - 1) * 2\n time_steps_true = make_time_steps(num_time_steps_true, 1)\n outputs_true = np.zeros(\n (num_time_steps_true, num_outputs, num_inputs))\n outputs_true[::2] = outputs[:-1]\n outputs_true[1::2] = outputs[1:]\n\n # Compare values\n np.testing.assert_equal(\n time_steps_computed, time_steps_true)\n np.testing.assert_equal(outputs_computed, outputs_true)\n\n # Test that if there is a wrong time value, get an error\n time_values[num_time_steps // 2] = -1\n self.assertRaises(\n ValueError, era.make_sampled_format, time_values,\n outputs)", "def fake_data(sample_rate=512,psd_segment_length=60,nsegs=16):\n epoch = 1153742417.0\n ts_data = numpy.random.normal(0,1,sample_rate*psd_segment_length*nsegs)\n ts_data = types.TimeSeries(ts_data,delta_t=1.0/sample_rate,epoch=epoch)\n return ts_data", "def step(amplitude, t_stop):\n times = np.array([0, t_stop/10, t_stop])\n amps = np.array([0, amplitude, amplitude])\n return times, amps", "def impulse_data(sample_rate=512,psd_segment_length=60):\n epoch = 1153742417.0\n ts_data = numpy.zeros(sample_rate * psd_segment_length)\n ts_data = types.TimeSeries(ts_data, delta_t=1.0/sample_rate, epoch=epoch)\n return ts_data", "def resample_to_delta_t(timeseries, delta_t, method='butterworth'):\n\n if not isinstance(timeseries,TimeSeries):\n raise TypeError(\"Can only resample time series\")\n\n if timeseries.kind is not 'real':\n raise TypeError(\"Time series must be real\")\n\n if timeseries.delta_t == delta_t:\n return timeseries * 1\n\n if method == 'butterworth':\n lal_data = timeseries.lal()\n _resample_func[timeseries.dtype](lal_data, delta_t)\n data = lal_data.data.data \n \n elif method == 'ldas': \n factor = int(delta_t / timeseries.delta_t)\n \n if factor == 8:\n timeseries = resample_to_delta_t(timeseries, timeseries.delta_t * 4.0, method='ldas')\n factor = 2\n elif factor == 16:\n timeseries = resample_to_delta_t(timeseries, timeseries.delta_t * 4.0, method='ldas')\n factor = 4 \n elif factor == 32:\n timeseries = resample_to_delta_t(timeseries, timeseries.delta_t * 8.0, method='ldas')\n factor = 4 \n elif factor == 64:\n timeseries = resample_to_delta_t(timeseries, timeseries.delta_t * 16.0, method='ldas')\n factor = 4 \n\n try:\n filter_coefficients = LDAS_FIR_LP[factor]\n except:\n raise ValueError('Unsupported resample factor, %s, given' %factor)\n \n # apply the filter\n series = scipy.signal.lfilter(filter_coefficients, 1.0, \n timeseries.numpy())\n \n # reverse the time shift caused by the filter\n corruption_length = len(filter_coefficients)\n data = numpy.zeros(len(timeseries))\n data[:len(data)-corruption_length/2] = series[corruption_length/2:]\n \n # zero out corrupted region\n data[0:corruption_length/2] = 0\n data[len(data)-corruption_length/2:] = 0 \n\n # Decimate the time series\n data = data[::factor] * 1\n \n else:\n raise ValueError('Invalid resampling method: %s' % method)\n \n return TimeSeries(data, delta_t = delta_t,\n dtype=timeseries.dtype, \n epoch=timeseries._epoch)", "def slice(ds, timedelta_input, timedelta_output, to_predict, stepwidth, input_sampling, output_sampling):\n\n inputs = []\n outputs = []\n\n start_input_frame = ds.index[0]\n while start_input_frame + timedelta_input + timedelta_output <= ds.index[-1]:\n\n end_input_frame = start_input_frame + timedelta_input\n end_output_frame = end_input_frame+timedelta_output\n\n input_frame = ds[start_input_frame:end_input_frame]\n output_frame = ds[end_input_frame:end_output_frame]\n\n input_frame = input_frame.resample(input_sampling)\n output_frame = output_frame.resample(output_sampling)\n\n for k in output_frame.keys():\n if k not in to_predict:\n del output_frame[k]\n\n input_shape = input_frame.shape\n output_shape = output_frame.shape\n\n inputs.append(input_frame.as_matrix().flatten())\n outputs.append(output_frame.as_matrix().flatten())\n\n #Move forward\n start_input_frame = start_input_frame + stepwidth\n\n\n return (inputs, input_shape), (outputs, output_shape)", "def speed_perturb(data, speeds=None):\n if speeds is None:\n speeds = [0.9, 1.0, 1.1]\n for sample in data:\n assert \"sample_rate\" in sample\n assert \"wav\" in sample\n sample_rate = sample[\"sample_rate\"]\n waveform = sample[\"wav\"]\n speed = random.choice(speeds)\n if speed != 1.0:\n wav, _ = torchaudio.sox_effects.apply_effects_tensor(\n waveform, sample_rate, [[\"speed\", str(speed)], [\"rate\", str(sample_rate)]]\n )\n sample[\"wav\"] = wav\n\n yield sample", "def unchunkify(chunks):\n recreated_chunks = list(map(lambda x: np.fft.irfft(combine_phase_and_power(*x)), chunks))\n total_length = len(recreated_chunks) * CHUNK_SIZE // 2\n output = np.zeros(total_length)\n window = np.power(np.sin(np.linspace(0, np.pi, CHUNK_SIZE)), 2)\n \n for i, j in enumerate(xrange(0, total_length - CHUNK_SIZE, CHUNK_SIZE // 2)):\n o = window * recreated_chunks[i]\n \n output[j: j+CHUNK_SIZE] += o\n return output", "def test_speed_accuracy(self, factor, use_lengths):\n n_to_trim = 20\n\n sample_rate = 1000\n freq = 2\n times = torch.arange(0, 5, 1.0 / sample_rate)\n waveform = torch.cos(2 * math.pi * freq * times).unsqueeze(0).to(self.device, self.dtype)\n\n if use_lengths:\n lengths = torch.tensor([waveform.size(1)])\n else:\n lengths = None\n\n output, output_lengths = F.speed(waveform, orig_freq=sample_rate, factor=factor, lengths=lengths)\n\n if use_lengths:\n self.assertEqual(output.size(1), output_lengths[0])\n else:\n self.assertEqual(None, output_lengths)\n\n new_times = torch.arange(0, 5 / factor, 1.0 / sample_rate)\n expected_waveform = torch.cos(2 * math.pi * freq * factor * new_times).unsqueeze(0).to(self.device, self.dtype)\n\n self.assertEqual(\n expected_waveform[..., n_to_trim:-n_to_trim], output[..., n_to_trim:-n_to_trim], atol=1e-1, rtol=1e-4\n )", "def delay_times_linear(min_t, max_t, step_size):\n return np.flip(np.arange(max_t, min_t - step_size, -step_size))", "def eased_step_gen(track, t=None, srate=None):\n if t is None:\n t = time_gen(srate=srate)\n else:\n t = iter(t)\n t0 = next(t)\n t1 = t0\n old_value = None\n duration = 1\n ease_duration = 0\n for tple in track:\n if hasattr(tple,'__getitem__'):\n value = tple[0]\n if len(tple) > 1:\n duration = tple[1]\n if len(tple) > 2:\n ease_duration = tple[2]\n else:\n value = tple\n if old_value is None:\n old_value = value\n #print(value, duration, ease_duration)\n local_ease_duration = min(ease_duration, duration)\n while t0 + duration > t1:\n local_t = t1 - t0\n if local_t < local_ease_duration:\n mu = local_t / local_ease_duration\n yield old_value + mu * (value - old_value)\n else:\n yield value\n t1 = next(t)\n t0 = t1\n old_value = value", "def simulate(self, steps, measure_segregation = True):\n if measure_segregation:\n store = [self.measure_segregation(self.array)]\n for i in range(steps):\n store.append(self.step(self.array))\n return store\n else:\n for i in range(steps):\n self.step(self.array)\n return [1 for i in range(steps)]", "def regular(step, start=0.):\n\n def output(low, high):\n newstart = math.ceil((low - start)/step) * step + start\n return numpy.arange(newstart, high, step, dtype=numpy.float)\n output.func_name = \"regular(%g, start=%g)\" % (step, start)\n return output", "def test_fold_along_delay_odd_length_units_unchanged():\n delays = np.arange(-10, 11) * units.s\n array = np.ones((1, 10, 21)) * units.mK**2 * units.Mpc**3\n errs = np.ones((1, 10, 21)) * units.mK**2 * units.Mpc**3\n axis = -1\n array_out, errs_out = utils.fold_along_delay(delays, array, errs, axis=axis)\n assert units.mK**2 * units.Mpc**3 == array_out.unit", "def effectiveSampleSize(data, stepSize = 1) :\r\n samples = len(data)\r\n\r\n assert len(data) > 1,\"no stats for short sequences\"\r\n \r\n maxLag = min(samples//3, 1000)\r\n\r\n gammaStat = [0,]*maxLag\r\n #varGammaStat = [0,]*maxLag\r\n\r\n varStat = 0.0\r\n\r\n if type(data) != np.ndarray :\r\n data = np.array(data)\r\n\r\n normalizedData = data - data.mean()\r\n \r\n for lag in range(maxLag) :\r\n v1 = normalizedData[:samples-lag]\r\n v2 = normalizedData[lag:]\r\n v = v1 * v2\r\n gammaStat[lag] = sum(v) / len(v)\r\n #varGammaStat[lag] = sum(v*v) / len(v)\r\n #varGammaStat[lag] -= gammaStat[0] ** 2\r\n\r\n # print lag, gammaStat[lag], varGammaStat[lag]\r\n \r\n if lag == 0 :\r\n varStat = gammaStat[0]\r\n elif lag % 2 == 0 :\r\n s = gammaStat[lag-1] + gammaStat[lag]\r\n if s > 0 :\r\n varStat += 2.0*s\r\n else :\r\n break\r\n \r\n # standard error of mean\r\n # stdErrorOfMean = Math.sqrt(varStat/samples);\r\n\r\n # auto correlation time\r\n act = stepSize * varStat / gammaStat[0]\r\n\r\n # effective sample size\r\n ess = (stepSize * samples) / act\r\n\r\n return ess", "def _sample2sequences(sample, timesteps):\r\n x = list()\r\n y = list()\r\n sample_len = len(sample)\r\n for i in range(sample_len - timesteps):\r\n seq_in = sample[i:i + timesteps]\r\n seq_out = sample[i + timesteps]\r\n x.append(seq_in)\r\n y.append(seq_out)\r\n return x, y", "def turn_to_intermediate_data(data, data_point_size, batch_size, num_steps, hop_step):\n samples = 0 # Number of samples of interpolating\n \n #counters = [Counter() for _ in xrange(num_labels)]\n\n sample_counter = 0\n for session_data in data:\n \n # This should be the correct number of sample for each session\n # But it could be different with the number of events in the session\n # There is some difference in the way events in session is created\n # For example, when create and annotate a session having frame from 0 to 79\n # I actually create events [0,20] to [60,80] so the right hand side brace should be \n # [0,20) -> Excluding last frame \n correct_no_samples = ( len(session_data[SESSION_DATA]) - num_steps ) // hop_step + 1\n# print ('session name = %s' % session_data[SESSION_NAME])\n# print ('len %d ' % len(session_data[SESSION_DATA]))\n# print ('correct %d ' % correct_no_samples)\n \n if correct_no_samples != len(session_data[SESSION_EVENTS]):\n # A step to find session that has problem to fix\n print (session_data[SESSION_NAME])\n print (\"correct_no_samples \" + str(correct_no_samples))\n print (\"session_data_events \" + str(len(session_data[SESSION_EVENTS])))\n \n print (\"=========================PROBLEMATIC========================\")\n else:\n samples += len(session_data[SESSION_EVENTS])\n \n print('Total number of samples' + str(samples))\n \n interpolated_data = np.zeros([samples * num_steps, data_point_size], dtype=np.float32)\n interpolated_lbls = np.zeros([samples, num_labels], dtype=np.int32)\n # Use a string of maximum 16 characters to store some info about a data sample \n interpolated_info = np.zeros([samples], dtype='|S16')\n \n for session_data in data:\n session_data_vals = session_data[SESSION_DATA]\n session_data_events = session_data[SESSION_EVENTS]\n \n correct_no_samples = ( len(session_data_vals) - num_steps ) // hop_step + 1\n if correct_no_samples == len(session_data_events):\n for i in range(len(session_data_events)):\n for j in range(num_steps):\n interpolated_data[( ( sample_counter + i ) * num_steps + j)] =\\\n session_data_vals[i * hop_step + j]\n\n event_labels = session_data[SESSION_EVENTS][i]['label']\n \n interpolated_lbls[sample_counter + i] = list(event_labels)\n\n interpolated_info[sample_counter + i] = session_data[SESSION_NAME] + '_' + str(i)\n \n sample_counter += len(session_data_events)\n \n # Number of epoch, each epoch has a batch_size of data \n epoch_size = samples // batch_size\n \n # Divide the first dimension from samples * num_steps -> (samples, num_steps)\n rearranged_data = interpolated_data.reshape((samples, num_steps, data_point_size))\n # Divide first dimenstion from samples -> epoch_size * batch_size (remove remaining)\n rearranged_data = rearranged_data[:epoch_size * batch_size].\\\n reshape((epoch_size, batch_size, num_steps, data_point_size))\n \n rearranged_lbls = interpolated_lbls[:epoch_size * batch_size].\\\n reshape((epoch_size, batch_size, num_labels))\n\n rearranged_info = interpolated_info[:epoch_size * batch_size].\\\n reshape((epoch_size, batch_size))\n \n return (rearranged_data, rearranged_lbls, rearranged_info)", "def test_sampling2 () :\n delta = 2 * np.pi / 3\n r = Reward(partial(stepFunction, \n xRange=(-delta/2, delta/2), \n yRange=(-delta/2, delta/2)), \n (-1, 0))\n states = []\n xs = np.arange(-np.pi, np.pi, delta)\n ys = np.arange(-np.pi, np.pi, delta)\n for x, y in product(xs, ys) : \n states.append(\n toExternalStateRep([x + delta / 2, y + delta / 2, 0, 0]).astype(float)\n )\n agent = findOptimalAgent(r)\n vals = estimateValueFromAgent(states, agent, r)\n for s, v in zip(states, vals) : \n print(toInternalStateRep(s)[:2], v)", "def _discrete_fourier_transform(\n series,\n dft_length,\n norm,\n inverse_sqrt_win_size,\n lower_bounding,\n apply_normalising_factor=True,\n cut_start_if_norm=True,\n):\n start = 2 if norm else 0\n output_length = start + dft_length\n\n if cut_start_if_norm:\n c = int(start / 2)\n else:\n c = 0\n start = 0\n\n dft = np.zeros(output_length - start)\n for i in range(c, int(output_length / 2)):\n for n in range(len(series)):\n dft[(i - c) * 2] += series[n] * math.cos(2 * math.pi * n * i / len(series))\n dft[(i - c) * 2 + 1] += -series[n] * math.sin(\n 2 * math.pi * n * i / len(series)\n )\n\n if apply_normalising_factor:\n if lower_bounding:\n dft[1::2] = dft[1::2] * -1 # lower bounding\n\n std = np.std(series)\n if std == 0:\n std = 1\n dft *= inverse_sqrt_win_size / std\n\n return dft", "def create_signal_chunks(ds, length_ms, step_ms, max_pad_ms=0, deterministic_output_order=True, max_num_chunks_per_signal=int(1e6), avg_num_chunks_from_signals=100):\n logger.info(\"Dividing every signal in the dataset into new signals by creating signal chunks of length %d ms and offset %d ms. Maximum amount of padding allowed in the last chunk is %d ms.\", length_ms, step_ms, max_pad_ms)\n\n chunk_length_sec = tf.constant(1e-3 * length_ms, tf.float32)\n chunk_step_sec = tf.constant(1e-3 * step_ms, tf.float32)\n max_pad_sec = tf.constant(1e-3 * max_pad_ms, tf.float32)\n id_str_padding = tf.cast(tf.round(audio_features.log10(tf.cast(max_num_chunks_per_signal, tf.float32))), tf.int32)\n\n def chunks_to_elements(chunk, chunk_num, x):\n chunk_num_str = tf.strings.as_string(chunk_num, width=id_str_padding, fill='0')\n chunk_id = tf.strings.join((x[\"id\"], chunk_num_str), separator='-')\n s = tf.reshape(chunk, [-1])\n out = dict(x, signal=s, id=chunk_id)\n if \"duration\" in x:\n out = dict(out, duration=tf.cast(tf.size(s) / x[\"sample_rate\"], tf.float32))\n return out\n\n def chunk_signal_and_flatten(x):\n signal = x[\"signal\"]\n sample_rate = tf.cast(x[\"sample_rate\"], tf.float32)\n\n chunk_length = tf.cast(sample_rate * chunk_length_sec, tf.int32)\n chunk_step = tf.cast(sample_rate * chunk_step_sec, tf.int32)\n max_pad = tf.cast(sample_rate * max_pad_sec, tf.int32)\n num_full_chunks = tf.math.maximum(0, 1 + (tf.size(signal) - chunk_length) // chunk_step)\n tf.debugging.assert_less(num_full_chunks, max_num_chunks_per_signal, message=\"Too many chunks created from signal, cannot create unique utterance ids, raise the max_num_chunks_per_signal parameter\")\n\n last_chunk_length = tf.size(signal) - num_full_chunks * chunk_step\n if last_chunk_length < chunk_length and chunk_length <= last_chunk_length + max_pad:\n signal = tf.pad(signal, [[0, chunk_length - last_chunk_length]])\n\n chunks = tf.signal.frame(signal, chunk_length, chunk_step, axis=0)\n num_chunks = tf.cast(tf.shape(chunks)[0], tf.int64)\n chunk_ds = tf.data.Dataset.from_tensor_slices(chunks)\n chunk_nums_ds = tf.data.Dataset.range(1, num_chunks + 1)\n repeat_x_ds = tf.data.Dataset.from_tensors(x).repeat(num_chunks)\n\n return (tf.data.Dataset\n .zip((chunk_ds, chunk_nums_ds, repeat_x_ds))\n .map(chunks_to_elements))\n\n interleave_kwargs = {\n \"block_length\": avg_num_chunks_from_signals,\n \"num_parallel_calls\": TF_AUTOTUNE,\n \"deterministic\": deterministic_output_order}\n if TF_VERSION_MAJOR == 2 and TF_VERSION_MINOR < 2:\n del interleave_kwargs[\"deterministic\"]\n logger.warning(\"Deleted unsupported 'deterministic' kwarg from tf.data.Dataset.interleave call, TF version >= 2.2 is required.\")\n\n return ds.interleave(chunk_signal_and_flatten, **interleave_kwargs)", "def stepify(times, values):\n new_times = np.empty((2*times.size - 1,))\n new_values = np.empty_like(new_times)\n new_times[::2] = times\n new_times[1::2] = times[1:]\n new_values[::2] = values\n new_values[1::2] = values[:-1]\n return new_times, new_values" ]
[ "0.61137354", "0.5560548", "0.5529538", "0.544452", "0.5436568", "0.54274285", "0.54136103", "0.53967273", "0.53775096", "0.5371207", "0.5365279", "0.53076035", "0.5302743", "0.52841", "0.5283588", "0.5236762", "0.52195495", "0.52108365", "0.52093816", "0.52066445", "0.5199559", "0.51867336", "0.5184453", "0.5148394", "0.5147871", "0.51270795", "0.5124305", "0.50981295", "0.5089842", "0.50756615" ]
0.587361
1
Function for converting messages on topics in a bag into a single dataframe with equal timesteps. Uses 1d interpolation to synchronize topics.
def bag_to_dataframe( bagpath: Path, topics: List[str], step_length: float, plot_col=None ) -> DataFrame: # convert bag to dataframes dataframes = bag_to_dataframes(bagpath, topics) # find global start and end times start_times = list() end_times = list() for topic in topics: df = dataframes[topic] start_times.append(df[DFKeys.TIME.value].iloc[0]) end_times.append(df[DFKeys.TIME.value].iloc[-1]) start_time = max(start_times) end_time = min(end_times) # give all dataframes equal timesteps synchronized_dataframes = [] for topic in topics: df = recreate_sampling_times( dataframes[topic], step_length, start_time, end_time, plot_col=plot_col ) synchronized_dataframes.append(df) # merge dataframes df_merged = reduce( lambda left, right: pd.merge(left, right, on=[DFKeys.TIME.value], how="outer"), synchronized_dataframes, ) return df_merged
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_topic_history(data, bow_corpus, topic_ID, lda_model,\r\n timestep_code='1A', agg=np.mean):\r\n\r\n agg_results = {}\r\n for time_step, data in data.resample(timestep_code, on='year'):\r\n\r\n # Get indices for data in this range\r\n indices = data.index.tolist()\r\n \r\n # Get list of topic probs for docs in this timestep\r\n topic_probs = [\r\n get_topic_prob(bow_corpus[ix], topic_ID, lda_model) \r\n for ix in indices\r\n ]\r\n\r\n # Aggregate and add to results list\r\n agg_results[time_step.year] = agg(topic_probs)\r\n \r\n # Show Progress\r\n print(time_step.year)\r\n if time_step.year % 20 == 0:\r\n print(f'Mean prob: {agg(topic_probs)}')\r\n \r\n return pd.Series(agg_results)", "def _interpolate_single_values(A, topic):\n interp_dic = {}\n k, d = get_sorted_keys_and_data(A[topic])\n for i in range(0, len(k) - 1):\n for j in range(int(k[i] * 1000), int(k[i + 1] * 1000)):\n v = round((d[i + 1] - d[i]) / (k[i + 1] - k[i]) * (j / 1000. - k[i]) + d[i], 3)\n interp_dic[j / 1000.] = v\n return interp_dic", "def get_topic_data(bagFile, topic, return_t=False):\n\n all_msg = []\n if return_t:\n all_t = []\n\n\n # Initialize rosbag object\n bag = rosbag.Bag(bagFile)\n\n for topic, msg, t in bag.read_messages(topics=[topic]):\n all_msg = np.append(all_msg, msg)\n if return_t:\n all_t = np.append(all_t, t.to_sec())\n\n if return_t:\n return all_msg, all_t\n else:\n return all_msg", "def topic(df, num_topics=5):\r\n# X, y = df[df.columns[:-1]], df[df.columns[-1]]\r\n lda = LatentDirichletAllocation(n_topics=num_topics,\r\n max_iter=5,\r\n learning_method='online',\r\n learning_offset=50.,\r\n random_state=0)\r\n return lda.fit_transform(df)", "def reshape_data(tbl):\n\n \n # Indexes where the sentece starts\n sentStarters = tbl.loc[tbl['index'] == 'I1']\n\n # Add indicator for group and fill that forward for the group\n tbl.loc[tbl['index'] == 'I1', 'sent'] = range(sentStarters.shape[0])\n tbl['sent'] = tbl['sent'].fillna(method='ffill')\n\n def reshape_recipe(recipe):\n tokens = [token for token in recipe['token']]\n tags = [tag for tag in recipe['tag']]\n return pd.DataFrame({'sents': [tokens], 'tags': [tags]})\n\n return tbl.groupby('sent').apply(reshape_recipe)", "def topic(df, num_topics=5):\r\n\r\n lda = LatentDirichletAllocation(n_topics=num_topics,\r\n max_iter=5,\r\n learning_method='online',\r\n learning_offset=50.,\r\n random_state=0)\r\n return lda.fit_transform(df)", "def dt_comp(self,sampled_topics):\n\n\t\tsamples = sampled_topics.shape[0]\n\t\tdt = np.zeros((self.D,self.K,samples))\n\n\t\tfor s in xrange(samples):\n\t\t\tdt[:,:,s] = samplers_lda.dt_comp(self.docid, sampled_topics[s,:], self.N, self.K, self.D, self.alpha)\n\n\t\treturn dt", "def ptb_producer(raw_data, batch_size, num_steps, word_to_id):\n x = []\n y = []\n n_batches = len(raw_data) // batch_size\n for sentence in raw_data:\n mask_index = get_mask_index(sentence)\n current_label = sentence[mask_index]\n sentence[mask_index] = word_to_id['<mask>']\n y.append(current_label)\n x.append(sentence)\n x = np.array(x)\n x = x[:n_batches*batch_size]\n x = np.reshape(x, [n_batches, batch_size, num_steps])\n y = np.array(y)\n y = y[:n_batches * batch_size]\n y = np.reshape(y, [n_batches, batch_size])\n return x, y", "def transform(self):\n result = []\n for item in self.doc_topic_matrix:\n result.append(item / np.sum(item))\n result = np.array(result)\n return result", "def get_topic_prob_in_timestep(bow_subset, lda_model, topic_ID):\r\n # Progress\r\n print(multiprocessing.current_process().name)\r\n \r\n # Get list of topic probs for docs in this timestep\r\n topic_probs = [\r\n get_topic_prob(bow_doc, topic_ID, lda_model) \r\n for bow_doc in bow_subset\r\n ]\r\n \r\n return pd.Series(topic_probs)", "def print_topic_times(self, topic, top_terms=20):\n topics = []\n for time in range(self.num_time_slices):\n topics.append(self.print_topic(topic, time, top_terms))\n\n return topics", "def model_topics(df):\n\n data = df.text.values.tolist()\n data_words = list(sent_to_words(data))\n\n # Build the bigram and trigram models\n bigram = gensim.models.Phrases(data_words, min_count=5, threshold=100)\n trigram = gensim.models.Phrases(bigram[data_words], threshold=100) \n\n # Faster way to get a sentence clubbed as a trigram/bigram\n bigram_mod = gensim.models.phrases.Phraser(bigram)\n trigram_mod = gensim.models.phrases.Phraser(trigram)\n\n # Remove Stop Words\n data_words_nostops = remove_stopwords(data_words)\n\n # Form Bigrams\n data_words_bigrams = make_bigrams(data_words_nostops,bigram_mod)\n\n # Initialize spacy 'en' model, keeping only tagger component (for efficiency)\n nlp = spacy.load('en', disable=['parser', 'ner'])\n\n # Do lemmatization keeping only noun, adj, vb, adv\n data_lemmatized = lemmatization(data_words_bigrams, allowed_postags=['NOUN', 'ADJ', 'VERB', 'ADV'])\n\n # Create Dictionary\n id2word = corpora.Dictionary(data_lemmatized)\n\n # Create Corpus\n texts = data_lemmatized\n\n # Term Document Frequency\n corpus = [id2word.doc2bow(text) for text in texts]\n\n # Perform Topic Modeling for number of topics ranging from 5 to 50 in steps of 5\n model_list, coherence_values = compute_coherence_values(dictionary=id2word, corpus=corpus, texts=data_lemmatized, start=5, limit=50, step=5)\n\n return model_list,coherence_values,corpus,id2word", "def test_topic_reduction_edge_cases(base_bertopic):\n\n nr_topics = 5\n base_bertopic.nr_topics = 100\n old_documents = pd.DataFrame({\"Document\": newsgroup_docs,\n \"ID\": range(len(newsgroup_docs)),\n \"Topic\": np.random.randint(-1, nr_topics-1, len(newsgroup_docs))})\n base_bertopic._update_topic_size(old_documents)\n c_tf_idf = base_bertopic._extract_topics(old_documents, topic_reduction=True)\n old_freq = base_bertopic.get_topics_freq()\n\n new_documents = base_bertopic._reduce_topics(old_documents, c_tf_idf)\n new_freq = base_bertopic.get_topics_freq()\n\n assert not set(old_documents.Topic).difference(set(new_documents.Topic))\n pd.testing.assert_frame_equal(old_documents, new_documents)\n pd.testing.assert_frame_equal(old_freq, new_freq)", "def group_topics(sent_topics_sorteddf):\n new_topics=pd.concat([sent_topics_sorteddf.groupby('Topic_Num').head()[['Keywords']],\n topic_contribution.sort_index(),\n pd.Series(['Economy','Immigration','Environment','Event',\n 'Civil Rights','Civil Rights','Healthcare',\n 'Defense','Trump','Community','Event','Event',\n 'Thanks','Legislation','Trump','Community',\n 'Community','Trump','Defense',\n 'Legislation','Thanks','Economy','Thanks','Healthcare',\n 'Legislation'])],axis=1).groupby(0).sum()\n plt.pie(new_topics,labels=new_topics.index,autopct='%.0f',pctdistance=.8)\n plt.title('Topic Share %');\n\n new_topic_words = pd.concat([sent_topics_sorteddf.groupby('Topic_Num').head()[['Keywords']],\n topic_contribution.sort_index(),\n pd.Series(['Economy','Immigration','Environment','Event',\n 'Civil Rights','Civil Rights','Healthcare',\n 'Defense','Trump','Community','Event','Event',\n 'Thanks','Legislation','Trump','Community',\n 'Community','Trump','Defense',\n 'Legislation','Thanks','Economy','Thanks','Healthcare',\n 'Legislation'])],axis=1).groupby(0)['Keywords'].sum()\n [print(f'{topic}: ' + words) for topic,words in zip(new_topic_words.index,new_topic_words)]", "def transform_new_tweets(twitter_name, count, interval):\n while True:\n messages = twitter.fetch_tweets(twitter_name=twitter_name, count=count, interval=interval)\n\n if len(messages) > 0:\n for message in messages:\n translate_to_trump_version(message)\n else:\n print \"@%s has not tweeted in the last %d minutes.\" % (twitter_name, int(interval / MINUTES))\n\n time.sleep(interval)", "def test_topic_reduction_edge_cases():\n model = BERTopic()\n nr_topics = 5\n model.nr_topics = 100\n old_documents = pd.DataFrame({\"Document\": newsgroup_docs,\n \"ID\": range(len(newsgroup_docs)),\n \"Topic\": np.random.randint(-1, nr_topics-1, len(newsgroup_docs))})\n model._update_topic_size(old_documents)\n model._extract_topics(old_documents)\n old_freq = model.get_topic_freq()\n\n new_documents = model._reduce_topics(old_documents)\n new_freq = model.get_topic_freq()\n\n assert not set(old_documents.Topic).difference(set(new_documents.Topic))\n pd.testing.assert_frame_equal(old_documents, new_documents)\n pd.testing.assert_frame_equal(old_freq, new_freq)", "def test_extract_topics(base_bertopic):\n nr_topics = 5\n documents = pd.DataFrame({\"Document\": newsgroup_docs,\n \"ID\": range(len(newsgroup_docs)),\n \"Topic\": np.random.randint(-1, nr_topics-1, len(newsgroup_docs))})\n base_bertopic._update_topic_size(documents)\n c_tf_idf = base_bertopic._extract_topics(documents, topic_reduction=False)\n freq = base_bertopic.get_topics_freq()\n\n assert c_tf_idf.shape[0] == 5\n assert c_tf_idf.shape[1] > 100\n assert isinstance(freq, pd.DataFrame)\n assert nr_topics == len(freq.Topic.unique())\n assert freq.Count.sum() == len(documents)\n assert len(freq.Topic.unique()) == len(freq)", "def convert_ms_df(df):\n df_lst = []\n\n df.apply(lambda x: df_lst.append(create_ext_df(x, np.int64)), axis=1)\n\n ms_df = pd.concat(df_lst)\n\n sorted_df = ms_df.sort_values(by=['Time'])\n\n sorted_df['Time'] = pd.to_datetime(sorted_df['Time'], unit='ms')\n\n grouped_ms_src = sorted_df.groupby(['Time', 'pkt_src']).agg(\n {'pkt_size':'sum'}).reset_index()\n\n return grouped_ms_src", "def _aggregate_messages(\n self, messages: torch.Tensor, message_targets: torch.Tensor, num_nodes, aggregation_fn: str\n ):\n # Support AMP\n msg_dtype = messages.dtype\n return scatter(\n messages.to(torch.float32),\n index=message_targets,\n dim=0,\n dim_size=num_nodes,\n reduce=aggregation_fn,\n ).to(msg_dtype)", "def tt_comp(self,sampled_topics):\n\n\t\tsamples = sampled_topics.shape[0]\n\t\ttt = np.zeros((self.V,self.K,samples))\n\n\t\tfor s in xrange(samples):\n\t\t\ttt[:,:,s] = samplers_lda.tt_comp(self.tokens, sampled_topics[s,:], self.N, self.V, self.K, self.beta)\n\n\t\treturn tt", "def bitfinex2_on_message(caller, msg):\n msg = json.loads(msg)\n if caller.subbed_count == 7:\n if msg[1] == \"te\":\n chnl = msg[0]\n body = msg[2]\n df = pd.DataFrame.from_records(\n data=[{\n \"tid\": int(body[0]),\n \"price\": float(body[3]),\n \"volume\": float(body[2]),\n \"datetime\": pd.to_datetime(body[1], unit='ms')\n }],\n index=\"datetime\"\n )\n # print (df)\n df.index = df.index.tz_localize(\"GMT0\")\n caller.write(chnl, df)\n\n return chnl, df\n\n if type(msg) is dict and \"event\" in msg and msg[\"event\"] == \"subscribed\":\n caller.config[\"channel_symbol\"][msg[\"chanId\"]] = \"bitfinex2\" + \":\" + bdic[msg[\"symbol\"]]\n caller.subbed_count += 1\n return\n\n\n chnl = msg[0]\n body = msg[2]\n df = pd.DataFrame.from_records(\n data=[{\n \"tid\": int(body[0]),\n \"price\": float(body[3]),\n \"volume\": float(body[2]),\n \"datetime\": pd.to_datetime(body[1], unit='ms')\n }],\n index=\"datetime\"\n )\n df.index = df.index.tz_convert(\"GMT0\")\n caller.write(chnl, df)\n\n return chnl, df", "def variable_time_collate_fn2(batch, args, device = torch.device(\"cpu\"), data_type = \"train\", \n data_min = None, data_max = None):\n D = batch[0][2].shape[1]\n len_tt = [ex[1].size(0) for ex in batch]\n maxlen = np.max(len_tt)\n enc_combined_tt = torch.zeros([len(batch), maxlen]).to(device)\n enc_combined_vals = torch.zeros([len(batch), maxlen, D]).to(device)\n enc_combined_mask = torch.zeros([len(batch), maxlen, D]).to(device)\n for b, (record_id, tt, vals, mask, labels) in enumerate(batch):\n currlen = tt.size(0)\n enc_combined_tt[b, :currlen] = tt.to(device) \n enc_combined_vals[b, :currlen] = vals.to(device) \n enc_combined_mask[b, :currlen] = mask.to(device) \n \n combined_tt, inverse_indices = torch.unique(torch.cat([ex[1] for ex in batch]), sorted=True, return_inverse=True)\n combined_tt = combined_tt.to(device)\n\n offset = 0\n combined_vals = torch.zeros([len(batch), len(combined_tt), D]).to(device)\n combined_mask = torch.zeros([len(batch), len(combined_tt), D]).to(device)\n\n combined_labels = None\n N_labels = 1\n\n combined_labels = torch.zeros(len(batch), N_labels) + torch.tensor(float('nan'))\n combined_labels = combined_labels.to(device = device)\n\n for b, (record_id, tt, vals, mask, labels) in enumerate(batch):\n tt = tt.to(device)\n vals = vals.to(device)\n mask = mask.to(device)\n if labels is not None:\n labels = labels.to(device)\n\n indices = inverse_indices[offset:offset + len(tt)]\n offset += len(tt)\n\n combined_vals[b, indices] = vals\n combined_mask[b, indices] = mask\n\n if labels is not None:\n combined_labels[b] = labels\n\n combined_vals, _, _ = utils.normalize_masked_data(combined_vals, combined_mask, \n att_min = data_min, att_max = data_max)\n enc_combined_vals, _, _ = utils.normalize_masked_data(enc_combined_vals, enc_combined_mask, \n att_min = data_min, att_max = data_max)\n\n if torch.max(combined_tt) != 0.:\n combined_tt = combined_tt / torch.max(combined_tt)\n enc_combined_tt = enc_combined_tt / torch.max(enc_combined_tt)\n \n data_dict = {\n \"enc_data\":enc_combined_vals,\n \"enc_mask\":enc_combined_mask,\n \"enc_time_steps\":enc_combined_tt,\n \"data\": combined_vals, \n \"time_steps\": combined_tt,\n \"mask\": combined_mask,\n \"labels\": combined_labels}\n\n data_dict = utils.split_and_subsample_batch(data_dict, args, data_type = data_type)\n return data_dict", "def _expand_burst_msg(self, burst_msg):\n assert \"burst\" in burst_msg.meas_type\n burst_list = []\n\n num_msgs = int(burst_msg.data)\n avg_latency = ( burst_msg.data - int(burst_msg.data) )*10\n # burst_msg.meas_type = burst_msg.meas_type.split(\"_burst\")[0]\n msg_id = self._get_meas_identifier( burst_msg )\n # print(\"Reconstructed msg_id: {}\".format(msg_id))\n # print(\"Avg latency: {}\".format(avg_latency))\n # print(\"Num msgs: {}\".format(num_msgs))\n for i in range(num_msgs):\n new_msg = self._get_meas_identifier(msg_id, undo=True)\n new_msg.stamp = burst_msg.stamp + rospy.Duration( i * avg_latency )\n new_msg.meas_type += \"_implicit\"\n new_msg.variance = burst_msg.variance\n new_msg.et_delta = burst_msg.et_delta\n burst_list.append( new_msg )\n return burst_list, avg_latency", "def build_t(p, ind):\n with np.errstate(divide=\"ignore\"):\n k = len(ind)\n trans = pd.DataFrame(np.log(float(0)), ind, ind)\n lp = np.log(p)\n trans.loc['B']['B'] = np.log(1 - p)\n trans.loc['B']['M1'] = lp\n for i in range(1, k):\n if i == k - 1:\n trans.loc['M' + str(k - 1)]['B'] = np.log(float(1))\n else:\n trans.loc['M' + str(i)]['M' + str(i + 1)] = np.log(float(1))\n return trans", "def variable_time_collate_fn(batch, args, device = torch.device(\"cpu\"), data_type = \"train\", \n\tdata_min = None, data_max = None):\n\tD = batch[0][2].shape[1]\n\tcombined_tt, inverse_indices = torch.unique(torch.cat([ex[1] for ex in batch]), sorted=True, return_inverse=True)\n\tcombined_tt = combined_tt.to(device)\n\n\toffset = 0\n\tcombined_vals = torch.zeros([len(batch), len(combined_tt), D]).to(device)\n\tcombined_mask = torch.zeros([len(batch), len(combined_tt), D]).to(device)\n\t\n\tcombined_labels = None\n\tN_labels = 1\n\n\tcombined_labels = torch.zeros(len(batch), N_labels) + torch.tensor(float('nan'))\n\tcombined_labels = combined_labels.to(device = device)\n\t\n\tfor b, (record_id, tt, vals, mask, labels) in enumerate(batch):\n\t\ttt = tt.to(device)\n\t\tvals = vals.to(device)\n\t\tmask = mask.to(device)\n\t\tif labels is not None:\n\t\t\tlabels = labels.to(device)\n\n\t\tindices = inverse_indices[offset:offset + len(tt)]\n\t\toffset += len(tt)\n\n\t\tcombined_vals[b, indices] = vals\n\t\tcombined_mask[b, indices] = mask\n\n\t\tif labels is not None:\n\t\t\tcombined_labels[b] = labels\n\n\tcombined_vals, _, _ = utils.normalize_masked_data(combined_vals, combined_mask, \n\t\tatt_min = data_min, att_max = data_max)\n\n\tif torch.max(combined_tt) != 0.:\n\t\tcombined_tt = combined_tt / torch.max(combined_tt)\n\t\t\n\tdata_dict = {\n\t\t\"data\": combined_vals, \n\t\t\"time_steps\": combined_tt,\n\t\t\"mask\": combined_mask,\n\t\t\"labels\": combined_labels}\n\n\tdata_dict = utils.split_and_subsample_batch(data_dict, args, data_type = data_type)\n\treturn data_dict", "def turn_to_intermediate_data(data, data_point_size, batch_size, num_steps, hop_step):\n samples = 0 # Number of samples of interpolating\n \n #counters = [Counter() for _ in xrange(num_labels)]\n\n sample_counter = 0\n for session_data in data:\n \n # This should be the correct number of sample for each session\n # But it could be different with the number of events in the session\n # There is some difference in the way events in session is created\n # For example, when create and annotate a session having frame from 0 to 79\n # I actually create events [0,20] to [60,80] so the right hand side brace should be \n # [0,20) -> Excluding last frame \n correct_no_samples = ( len(session_data[SESSION_DATA]) - num_steps ) // hop_step + 1\n# print ('session name = %s' % session_data[SESSION_NAME])\n# print ('len %d ' % len(session_data[SESSION_DATA]))\n# print ('correct %d ' % correct_no_samples)\n \n if correct_no_samples != len(session_data[SESSION_EVENTS]):\n # A step to find session that has problem to fix\n print (session_data[SESSION_NAME])\n print (\"correct_no_samples \" + str(correct_no_samples))\n print (\"session_data_events \" + str(len(session_data[SESSION_EVENTS])))\n \n print (\"=========================PROBLEMATIC========================\")\n else:\n samples += len(session_data[SESSION_EVENTS])\n \n print('Total number of samples' + str(samples))\n \n interpolated_data = np.zeros([samples * num_steps, data_point_size], dtype=np.float32)\n interpolated_lbls = np.zeros([samples, num_labels], dtype=np.int32)\n # Use a string of maximum 16 characters to store some info about a data sample \n interpolated_info = np.zeros([samples], dtype='|S16')\n \n for session_data in data:\n session_data_vals = session_data[SESSION_DATA]\n session_data_events = session_data[SESSION_EVENTS]\n \n correct_no_samples = ( len(session_data_vals) - num_steps ) // hop_step + 1\n if correct_no_samples == len(session_data_events):\n for i in range(len(session_data_events)):\n for j in range(num_steps):\n interpolated_data[( ( sample_counter + i ) * num_steps + j)] =\\\n session_data_vals[i * hop_step + j]\n\n event_labels = session_data[SESSION_EVENTS][i]['label']\n \n interpolated_lbls[sample_counter + i] = list(event_labels)\n\n interpolated_info[sample_counter + i] = session_data[SESSION_NAME] + '_' + str(i)\n \n sample_counter += len(session_data_events)\n \n # Number of epoch, each epoch has a batch_size of data \n epoch_size = samples // batch_size\n \n # Divide the first dimension from samples * num_steps -> (samples, num_steps)\n rearranged_data = interpolated_data.reshape((samples, num_steps, data_point_size))\n # Divide first dimenstion from samples -> epoch_size * batch_size (remove remaining)\n rearranged_data = rearranged_data[:epoch_size * batch_size].\\\n reshape((epoch_size, batch_size, num_steps, data_point_size))\n \n rearranged_lbls = interpolated_lbls[:epoch_size * batch_size].\\\n reshape((epoch_size, batch_size, num_labels))\n\n rearranged_info = interpolated_info[:epoch_size * batch_size].\\\n reshape((epoch_size, batch_size))\n \n return (rearranged_data, rearranged_lbls, rearranged_info)", "def create_msgs():\n getcontext().prec = 3 # will round to 3 decimal places\n orig_times = sorted(dat)\n for n in range(len(dat) - 1):\n linfun = interp1d([orig_times[n], orig_times[n+1]], \\\n [dat[orig_times[n]], dat[orig_times[n+1]]])\n dt = orig_times[n+1] - orig_times[n] # current\n freq = 1/dt # current\n if dt < (1/desHz):\n print('found instance where Freq already at/above desired Freq')\n else:\n new_dt = dt*freq/desHz\n new_times = linspace(orig_times[n],orig_times[n+1],floor(dt/new_dt))\n # print(new_times)\n new_values = linfun(new_times)\n # rounded_values = [float(Decimal(\"%.3f\" % e)) for e in new_values]\n rounded_times = [float(Decimal(\"%.3f\" % e)) for e in new_times]\n for m in range(len(rounded_times)):\n # this_time = int(new_times[m]*100000)/100000 # 5 decimal places in timstamp\n self.outData[sens][meas][rounded_times[m]] = new_values[m]", "def tweetSent(df_key):\n print('1/3 sent_transformer, initializing.')\n\n df_sent = df_key.copy()\n root = 'proc_data'\n year = df_sent['date'][0].year\n month = df_sent['date'][0].month\n pos_list = []\n neu_list = []\n neg_list = []\n cmp_list = []\n analyzer = SentimentIntensityAnalyzer()\n\n print('2/3 sent_transformer, transforming tweets to sentiments.')\n\n for _,nrows in tqdm_notebook(df_sent.iterrows()):\n vs = analyzer.polarity_scores(nrows['cleantext'])\n pos_list.append(vs['pos'])\n neu_list.append(vs['neu'])\n neg_list.append(vs['neg'])\n cmp_list.append(vs['compound'])\n\n df_sent['vad_positive'] = pos_list\n df_sent['vad_neutral'] = neu_list\n df_sent['vad_negative'] = neg_list\n df_sent['vad_compound'] = cmp_list\n\n filename = 'proc_sent_{}_{}.pkl'.format(year,month)\n filestring = os.path.join(root, filename)\n\n # output to pickle\n with open(filestring, 'wb') as filehandle: \n # store the data\n pickle.dump(df_sent, filehandle)\n\n print('3/3 sent_transformer, process complete.')\n return df_sent", "def make_lda_seq_slice(self, lda, time):\n for k in range(self.num_topics):\n lda.topics[:, k] = self.topic_chains[k].e_log_prob[:, time]\n\n lda.alpha = np.copy(self.alphas)\n return lda", "def time_delay_embed(array, dimension, time_dif):\r\n emb = array.values # Converts the panda dataframe to an array\r\n emb = np.squeeze(np.asarray(emb)) # Make a 1-d array of all values\r\n i = len(emb) - 1 # sets up a counter\r\n new_vec = [] # target for each row\r\n embed = [] # target for full set\r\n while i >= dimension-1:\r\n a = 0 # the dimensional counter\r\n b = 0 # time_dif counter\r\n while a< dimension:\r\n new_vec.append(emb[i-b])\r\n a+=1\r\n b+= time_dif\r\n embed.append(new_vec)\r\n new_vec = []\r\n i -=1\r\n \r\n X = np.array(embed)\r\n \r\n return np.flipud(X)" ]
[ "0.57238615", "0.5574867", "0.53912854", "0.5151434", "0.5077015", "0.49671167", "0.49580783", "0.49085027", "0.48793778", "0.4872549", "0.486572", "0.48051518", "0.47753602", "0.47723496", "0.47499838", "0.47255534", "0.46917838", "0.4691524", "0.4651114", "0.46419352", "0.46174344", "0.45976877", "0.4587916", "0.45800844", "0.45576012", "0.4551544", "0.45504156", "0.45365024", "0.45297462", "0.45188993" ]
0.66636413
0
Gets a dictionary of all input files related to a problem
def get_problem_files(problem_name, base_data_dir=None): raw_problem_data_dir, _, __ = _get_base_dirs(base_data_dir) problem_file_templates = _get_problem_file_templates(raw_problem_data_dir) ensure_dir_exists(raw_problem_data_dir.format(problem_name=problem_name)) problem_files = problem_file_templates[problem_name] return {k: v.format(problem_name=problem_name) for k, v in problem_files.items()}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def inputFiles(self):\n inputfiles = set()\n for f in self:\n [ inputfiles.add(x) for x in f['input']]\n return list(inputfiles)", "def inputs(self):\n return {\n \"path_to_result_folder\": File_IO(self.node.inputs[0]),\n \"pathToLanguageMapping\": File_IO(self.node.inputs[1]),\n \"pathToToc\": File_IO(self.node.inputs[2])\n }", "def get_input_files(self, action):\n assert action == \"run\", \"Unsupported action\"\n return {\n \"bam\": \"work/{mapper_lib}/out/{mapper_lib}.bam\",\n \"bai\": \"work/{mapper_lib}/out/{mapper_lib}.bam.bai\",\n }", "def get_input_files(self, action):\n assert action == \"run\", \"Unsupported action\"\n return {\n \"bam\": \"work/{mapper_lib}/out/{mapper_lib}.bam\",\n \"bai\": \"work/{mapper_lib}/out/{mapper_lib}.bam.bai\",\n }", "def inputFiles(self):\n return (self.matrixFile,)", "def inputs(self):\n return {\n \"path_to_result_folder\": File_IO(self.node.inputs[0]),\n \"path_to_dtb_json_file\": File_IO(self.node.inputs[1]),\n \"path_to_reference_DTB\": File_IO(self.node.inputs[2])\n }", "def inputs(self):\n return {\n \"path_to_result_folder\": File_IO(self.node.inputs[0]),\n \"pathToToc\": File_IO(self.node.inputs[1]),\n }", "def read_input(self):\n self._pars = dict()\n for infile in f90_infiles:\n self._pars[infile] = io.read_f90_input(infile)\n for infile in str_infiles:\n self._pars[infile] = io.read_str_input(infile)", "def _create_input_file_dict(self, job_data):\n\n files_dict = {}\n\n for input_name, file_ids in job_data.get_input_file_ids_by_input().items():\n file_list = []\n file_names = set()\n for file_id in file_ids:\n scale_file_model = self._input_files[file_id]\n input_file = InputFile(scale_file_model)\n # Check for file name collision and use Scale file ID to ensure names are unique\n file_name = scale_file_model.file_name\n\n if file_name in file_names:\n file_name = '%d.%s' % (scale_file_model.id, file_name)\n input_file.local_file_name = file_name\n file_names.add(file_name)\n file_list.append(input_file)\n files_dict[input_name] = file_list\n\n return files_dict", "def get_input_contents(self):\n try:\n ret_files = []\n coll = self.collections[self._primary_input_collection]\n ret_file = {'coll_id': coll['coll_id'],\n 'scope': coll['scope'],\n 'name': coll['name'],\n 'bytes': coll.coll_metadata['bytes'],\n 'adler32': None,\n 'min_id': 0,\n 'max_id': coll.coll_metadata['total_files'],\n 'content_type': ContentType.File,\n 'content_metadata': {'total_files': coll['coll_metadata']['total_files']}\n }\n ret_files.append(ret_file)\n return ret_files\n except Exception as ex:\n self.logger.error(ex)\n self.logger.error(traceback.format_exc())\n raise exceptions.IDDSException('%s: %s' % (str(ex), traceback.format_exc()))", "def find_all_infilepaths(in_dir):\n workdir = os.getcwd()\n os.chdir(in_dir)\n\n infiles_paths = dict()\n for infilename in glob.glob(\"[0-9]*_[0-9]*_[0-9]*.hdf5\"):\n pos = infilename.split('_')\n pos[-1] = pos[-1].split('.')[0]\n pos = tuple(list(map(lambda s: int(s), pos)))\n num_pos = _3d_to_numeric\n infiles_paths[num_pos] = os.path.join(in_dir, infilename)\n\n os.chdir(workdir)\n return infiles_paths", "def input_info(path_input):\n #path for your data directory, path for your data save, and names for the lists\n #Import with yaml file: input path and prefix information for files\n input_file = glob.glob('ExoTRed_input.yaml')\n if input_file: #if exist the input file, the code will obtain all information We need to run all tasks\n if len(input_file) == 1: #if there is only one yaml file, obtain data and save paths, and return that with a dictionary with information\n print 'reading input file ... \\n'\n file = yaml.load(open(input_file[0])) #creating our dictionary of input variables\n data_path = file['data_path']\n save_path = file['save_path']\n print '.... done! \\n'\n if len(input_file) > 1: #if are more than one yaml file,. the code will ask to you remove the others.\n print 'reading input file ... \\n'\n print '.... there is more than 1 input_path*.yaml.\\n \\nPlease, remove the others files that you do not need. \\n'\n raise SystemExit\n else:\n #if aren't a yaml file, the code ask for you to put a valid yaml file path.\n print 'There is no input_path*.yaml. \\nPlease, create a input file describe in INPUT_PARAMETERS.'\n raise SystemExit\n input_file = file #creating a better name to our dictionary info\n return data_path, save_path, input_file", "def _load_files(self):\n files = {}\n for fn_ in self.opts[\"src\"]:\n if os.path.isfile(fn_):\n files.update(self._file_dict(fn_))\n elif os.path.isdir(fn_):\n salt.utils.stringutils.print_cli(\n \"{} is a directory, only files are supported \"\n 'in non-chunked mode. Use \"--chunked\" command '\n \"line argument.\".format(fn_)\n )\n sys.exit(1)\n return files", "def get_input_files():\n\n raw_list = abspath(get('input_files'))\n valid_types = ['image/jpeg', 'image/tiff']\n images = [x for x in raw_list if mimetypes.guess_type(x)[0] in valid_types]\n print('* Input images: {}'.format(len(images)))\n return images", "def _get_file_names():\n file_names = {}\n file_names['train'] = ['data_batch_%d' % i for i in xrange(1, 6)]\n file_names['test'] = ['test_batch']\n\n return file_names", "def get_input_files(dir_path):\n return [os.path.join(dir_path,f) for f in os.listdir(dir_path)\n if os.path.isfile(os.path.join(dir_path,f))]", "def collect_graphs(inputfiles):\n graphs = {}\n global _open_files\n for inf in inputfiles:\n logging.debug('Opening file {}'.format(inf))\n f = r.TFile.Open(inf)\n _open_files.append(f)\n\n graphs[inf] = collectGraphs(f)\n\n logging.debug('Collected {} graphs'.format(len(graphs[inf])))\n\n return graphs", "def input_models():\n return [\n PDBFile(\n Path(golden_data, \"protdna_complex_1.pdb\"),\n path=golden_data,\n score=42.0,\n restr_fname=Path(golden_data, \"example_ambig_1.tbl\")\n ),\n PDBFile(\n Path(golden_data, \"protdna_complex_2.pdb\"),\n path=golden_data,\n score=28.0,\n restr_fname=Path(golden_data, \"example_ambig_2.tbl\")\n )]", "def set_inputs(subj):\n inputs = []\n for seq in range(1, 5):\n infname = 'v8.%s_%s.Powered.cleanEPI.uncensored.nii.gz' % (subj, seq)\n infile = os.path.join(os.environ['avp'], 'nii', infname)\n inputs.append(infile)\n inputfiles = ' '.join(inputs)\n\n return inputfiles", "def inputs(self):\n return {\n \"DS_DATA_HOME\": File_IO(self.node.inputs[0]),\n \"pathTocredentials\": File_IO(self.node.inputs[1]),\n \"pathToPDF\": File_IO(self.node.inputs[2])\n }", "def filenames(self) -> dict[str, str]:\r\n ...", "def parse_infiles(self):\n\n errs = 0\n # check file existence first\n for ifile in self.infiles:\n if ifile in ['-', 'stdin']: pass\n elif not os.path.isfile(ifile):\n print('** input file not found: %s' % ifile)\n errs += 1\n if errs: return 1\n \n # check for existence separately\n for ifile in self.infiles:\n if self.verb > 2: print('++ processing %s ...' % ifile)\n\n # open, read, close\n if ifile in ['-', 'stdin']: fp = sys.stdin\n else:\n try: fp = open(ifile)\n except:\n print(\"** failed to open input file %s\" % ifile)\n return 1\n ilines = fp.readlines()\n if ifile != sys.stdin: fp.close()\n\n # empty should be a terminal failure\n if len(ilines) < 1:\n print('** empty input for file %s' % ifile)\n return 1\n\n if len(self.labels) == 0:\n rv, self.labels = self.make_labels(ilines)\n self.parents = [self.find_parent_label(lab) for lab in self.labels]\n if rv: return 1\n\n rv, ldict = self.make_dict(ilines)\n if rv: return 1\n\n self.ldict.append(ldict)\n\n return 0", "def get_input_metadata(self):\n return []", "def open_input_files(self):\n self.dictionaryFile = open(self.dictionaryFile, 'r', encoding=self.encoding)\n\n if self.annotationFile :\n self.annotationFile = open(self.annotationFile, 'r', encoding=self.encoding)\n elif self.annotationFile is None:\n try:\n self.annotationFile = open(os.path.join(self.dictionaryPath, self.dictionaryName + '.ann'), 'r', encoding=self.encoding)\n except FileNotFoundError:\n if self.verbose >= 2:\n sys.stdout.write (\"Warning: annotation file is not found.\\n\")\n\n if self.abbreviationsFile :\n self.abbreviationsFile = open(self.abbreviationsFile, 'r', encoding=self.encoding)\n elif self.abbreviationsFile is None:\n try:\n self.abbreviationsFile = open(os.path.join(self.dictionaryPath, self.dictionaryName + '_abrv.dsl'), 'r', encoding=self.encoding)\n except FileNotFoundError:\n if self.verbose >= 2:\n sys.stdout.write (\"Warning: abbreviations file is not found.\\n\")", "def load_files(self):\n print('Saving numpy mask arrays in {0}'.format(self.ProcDir))\n\n if not os.path.isdir(self.ProcDir): os.mkdir(self.ProcDir)\n if not os.path.isdir(self.OutDir): os.mkdir(self.OutDir)\n\n self.Files = {}\n for ig in self.Set:\n phase = roipy.tools.load_half(ig,2)\n # convert wavelength to displacements\n # NOTE: make attributes of commonly used values in rsc: float(ig.Rsc['WAVELENGTH'])\n disp = phase * (ig.Wavelength / (4*np.pi))\n igram = ma.array(disp, mask=ma.nomask)\n name = self.save_ma(ig, igram) #Mask_ array is just zeros at this point..\n self.Files[ig.ID] = name\n\n print('load_files() complete: {0} interferograms'.format(self.Set.Nig))", "def inputs(self):\n return {\n \"path_to_dtb_json_file\": File_IO(self.node.inputs[0]),\n \"pathToToc\": File_IO(self.node.inputs[1]),\n \"pathToActualDTB\": File_IO(self.node.inputs[2])\n }", "def process_input_files(list_input_files):\n global dict_models_results\n global list_spacy_docs\n \n for input_file in list_input_files:\n prefix = prefix_from_filename(input_file)\n \n with open(input_file) as f:\n list_cases = json.load(f)\n dict_models_results[prefix] = list_cases\n \n \n #extract list of questions from all vignettes and create a mapping page -> vignette question\n dict_questions = {}\n for prefix, list_cases in dict_models_results.items():\n for vignette in list_cases:\n dict_questions[vignette[\"book_page\"]] = vignette[\"question\"]\n \n \n for book_page,question in dict_questions.items():\n doc_q = load_bner_onto_tokens_extension(question, book_page)\n list_spacy_docs.append(doc_q)\n \n return", "def files(cls, pdb_object):\n return [pdb_object.flags_relax,\n pdb_object.constraints,\n pdb_object.minimized.hidden_complexes,\n pdb_object.minimized.complex.pdb,\n pdb_object.minimized.scores]", "def get_files_from_time(self, time_info):\n file_dict = super().get_files_from_time(time_info)\n\n input_files = self.find_input_files(time_info, fill_missing=True)\n if input_files is None:\n return file_dict\n\n for key, value in input_files.items():\n file_dict[key] = value\n\n return file_dict", "def get_train_files(self):\n train_dir = os.path.join(self.data_dir, \"train_{}\".format(self.patient_no))\n filenames = os.listdir(train_dir)\n interm = ((os.path.splitext(f)[0].split(\"_\"), os.path.join(train_dir, f)) for f in filenames)\n return [(int(p[0][0]), int(p[0][1]), int(p[0][2]), p[1]) for p in interm]" ]
[ "0.68964374", "0.6508628", "0.6470385", "0.6470385", "0.6335087", "0.6294588", "0.62819946", "0.61027294", "0.60853153", "0.6044708", "0.6009861", "0.5923437", "0.5883391", "0.58639646", "0.5841919", "0.58342904", "0.5831044", "0.57976127", "0.5794483", "0.5770861", "0.57563263", "0.56754565", "0.5673721", "0.5673274", "0.56686586", "0.566363", "0.5654314", "0.56189847", "0.5578362", "0.5571973" ]
0.66878194
1
Static function responsible for creating a new instance of Proxy. This implementation aims to create a single instance of Proxy in the first assignment and then only return this instance. This current implementation doesn't allow multiple assignments to Proxy. Once assigned, the instance will not take a new value, unless explicitly set by using `instance_variable.url = 'new value'`
def __new__(cls, urls=None): if Proxy.__instance is None: Proxy.__instance = object.__new__(cls) Proxy.__instance.urls = urls return Proxy.__instance
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create(self, upstream, name, listen=None, enabled=None):\n\n if name in self.proxies():\n raise_with_traceback(ProxyExists(\"This proxy already exists.\"))\n\n # Lets build a dictionary to send the data to the Toxiproxy server\n json = {\n \"upstream\": upstream,\n \"name\": name\n }\n\n if listen is not None:\n json[\"listen\"] = listen\n else:\n json[\"listen\"] = \"127.0.0.1:0\"\n if enabled is not None:\n json[\"enabled\"] = enabled\n\n proxy_info = APIConsumer.post(\"/proxies\", json=json).json()\n proxy_info[\"api_consumer\"] = APIConsumer\n\n # Lets create a Proxy object to hold all its data\n proxy = Proxy(**proxy_info)\n\n return proxy", "def new(configuration: Mapping[str, Any], loop: AbstractEventLoop) \\\n -> ProxyProtocol:\n return SocksProxy(loop)", "def __call__(self, proxy):\n return LocalProxy(self, proxy)", "def __call__(self, proxy):\n return LocalProxy(self, proxy)", "def proxy(self):\n\t\tif self.__proxy is None:\n\t\t\treturn PROXY_DEFAULT\n\t\telif isinstance(self.__proxy, basestring):\n\t\t\treturn self.__proxy\n\t\telse:\n\t\t\traise ValueError(\"Proxy!!\")", "def test_settings_proxies_creation() -> None:\n settings = Settings()\n\n settings_proxy = settings.create_proxy() # We have one proxy\n assert len(settings._proxies) == 1\n\n second_proxy = settings.create_proxy() # Now we have two proxies\n assert len(settings._proxies) == 2\n\n # We are creating the third one, but the original `settings_proxy` is dereferenced\n settings_proxy = settings.create_proxy()\n assert len(settings._proxies) == 2\n\n del second_proxy\n assert len(settings._proxies) == 1\n\n del settings_proxy\n assert len(settings._proxies) == 0", "def new(sock, info=None):\n return ProxyPlayer(sock, info)", "def __init__(self, username=None, password=None, proxy_url=None, proxy_username=None, proxy_password=None):\n\n if not self._INSTANCE_INIT:\n self._init_object(username, password, proxy_url, proxy_username, proxy_password)\n self._INSTANCE_INIT = True\n\n self._try_set_proxy(proxy_url, proxy_username, proxy_password)\n self._try_login(username, password)", "async def _create_proxy(self):\n self._proxy = await self._controller.fopen_tcp_proxy(\n Cellular._DRONE_WEB_API_PORT\n )\n\n self._drone_http_url = f\"http://{self._proxy.address}:{self._proxy.port}\"\n\n if self._autoconfigure and self._user_apc_token is None:\n self.logger.info(\"cellular auto pairing and configuration\")\n # generate a new anonymous user APC token and configure the cellular.\n self._fautoconfigure_with_new_token()", "def make_instance(cls):\r\n def get_value(name):\r\n if name in attributes:\r\n return attributes[name]\r\n else:\r\n value = cls['get'](name)\r\n return bind_method(value, instance)\r\n\r\n def set_value(name, value):\r\n attributes[name] = value\r\n\r\n attributes = {}\r\n instance = {'get': get_value, 'set': set_value}\r\n return instance", "def __new__(cls, obj, *args, **kwargs):\n try:\n cache = cls.__dict__[\"_class_proxy_cache\"]\n except KeyError:\n cls._class_proxy_cache = cache = {}\n try:\n theclass = cache[obj.__class__]\n except KeyError:\n cache[obj.__class__] = theclass = cls._create_class_proxy(\n obj.__class__)\n ins = object.__new__(theclass)\n theclass.__init__(ins, obj, *args, **kwargs)\n return ins", "def __new__(cls, obj, *args, **kwargs):\n try:\n cache = cls.__dict__[\"_class_proxy_cache\"]\n except KeyError:\n cls._class_proxy_cache = cache = {}\n try:\n theclass = cache[obj.__class__]\n except KeyError:\n cache[obj.__class__] = theclass = cls._create_class_proxy(obj.__class__)\n ins = object.__new__(theclass)\n theclass.__init__(ins, obj, *args, **kwargs)\n return ins", "def set_proxy(proxy_url=None, proxy_username=None, proxy_password=None):\n\n instance = Ceic._get_instance()\n\n instance._ceic_configuration.set_proxy(proxy_url, proxy_username, proxy_password)\n\n return instance", "def __new__(cls, *args, **kwargs):\n if cls._instance is None:\n cls._instance = object.__new__(cls)\n\n return cls._instance", "def pick_a_new_proxy_ip(self):\n\t\t\n\t\tdef __all_alt_ips(alt_ip):\n\t\t\tif alt_ip.startswith(\"[\") and alt_ip.endswith(\"]\"):\n\t\t\t\talt_ip = alt_ip[1:-1]\n\t\t\t\talt_ip_list = []\n\t\t\t\tfor chunk in alt_ip.split(\",\"):\n\t\t\t\t\tif '-' in chunk:\n\t\t\t\t\t\ta, b = chunk.split(\"-\")\n\t\t\t\t\t\talt_ip_list.extend(str(x) for x in xrange(int(a), int(b)+1))\n\t\t\t\t\telse:\n\t\t\t\t\t\talt_ip_list.append(chunk)\n\t\t\t\treturn alt_ip_list\n\t\t\telse:\n\t\t\t\treturn [ alt_ip ]\n\t\t\n\t\t# parse the self.proxy string\n\t\tif self.proxy is None:\n\t\t\tproxy, alt_ip = None, None\n\t\telif \"/\" in self.proxy:\n\t\t\tproxy, alt_ip_range = self.proxy.split(\"/\", 1)\n\t\t\talt_ip = random.choice(__all_alt_ips(alt_ip_range))\n\t\t\talt_ip = int(alt_ip) # 0 is default server - bool(0) == False\n\t\telse:\n\t\t\tproxy, alt_ip = self.proxy, None\n\t\t\n\t\tif proxy:\n\t\t\t# validate proxy name\n\t\t\tif '://' in proxy:\n\t\t\t\traise ValueError(\"Proxy value %r invalid (expected host[:port])\" % (proxy))\n\t\t\tif not ':' in proxy:\n\t\t\t\tproxy += \":3128\"\n\t\t\n\t\t# remember the current proxy string so we know if it's changed\n\t\tself.__current_proxy = self.proxy\n\t\t\n\t\t# set (proxy, alt_ip) as the proxy we want to use\n\t\tself.__use_this_proxy = (proxy, alt_ip)\n\t\t\n\t\t# run self.reset()\n\t\tself.reset()", "def _instantiate(cls, **kwargs):\n return cls(**kwargs)", "def clone(self):\n return type(self)(self.endpoint)", "def get_instance(cls):\n return cls.__new__(cls)", "def get_instance(cls):\n return cls.__new__(cls)", "def get_instance(cls):\n return cls.__new__(cls)", "def test_Proxy(self) -> None:\n\n class Subject:\n def foo(self) -> int:\n return 1\n\n def bar(self) -> int:\n return 2\n\n s = Subject()\n s.baz = 3\n\n class ProxyTest(Proxy):\n def bar(self) -> int:\n return 4\n\n p = ProxyTest(s)\n\n assert p.foo() == 1, p.foo()\n assert p.bar() == 4, p.bar()\n assert p.baz == 3, p.baz\n\n p.baz = 5\n s.baz = 6\n\n assert p.baz == 5, p.baz\n assert p.get() == s, p.get()", "def buildProtocol(self, addr):\n proto = portforward.ProxyFactory.buildProtocol(self, addr)\n self.clientFactoryInstance = TestableProxyClientFactory()\n # Force the use of this specific instance\n proto.clientProtocolFactory = lambda: self.clientFactoryInstance\n self.protoInstance = proto\n return proto", "def __new__(cls, *args: Any, **kwargs: Any) -> Any:\n if not cls._instance:\n cls._instance = super(Singleton, cls).__new__( # type: ignore\n cls, *args, **kwargs\n )\n return cls._instance", "def __init__(self, obj):\n obj.Proxy = self", "def __init__(self, obj):\n obj.Proxy = self", "def get_proxy(self):\n return self.proxy()", "def __init__(self, proxy_enabled=None, proxy_host=None, proxy_user=None, proxy_password=None, proxy_ntlm_host=None, proxy_ntlm_domain=None, proxy_exceptions=None): # noqa: E501 # noqa: E501\n\n self._proxy_enabled = None\n self._proxy_host = None\n self._proxy_user = None\n self._proxy_password = None\n self._proxy_ntlm_host = None\n self._proxy_ntlm_domain = None\n self._proxy_exceptions = None\n self.discriminator = None\n\n if proxy_enabled is not None:\n self.proxy_enabled = proxy_enabled\n if proxy_host is not None:\n self.proxy_host = proxy_host\n if proxy_user is not None:\n self.proxy_user = proxy_user\n if proxy_password is not None:\n self.proxy_password = proxy_password\n if proxy_ntlm_host is not None:\n self.proxy_ntlm_host = proxy_ntlm_host\n if proxy_ntlm_domain is not None:\n self.proxy_ntlm_domain = proxy_ntlm_domain\n if proxy_exceptions is not None:\n self.proxy_exceptions = proxy_exceptions", "def __new__(cls):\n if not cls.INSTANCE:\n cls.INSTANCE = super().__new__(cls)\n return cls.INSTANCE", "def CachedProxy(getter):\n container = LazyContainer(getter)\n return LocalProxy(container.get)", "def __init__(self, url, proxy=None, **kwargs):\n self.proxy = proxy\n self.query_params = urllib.parse.urlencode(kwargs)\n self.url = url if not self.query_params else f\"{url}?{self.query_params}\"\n logger.info(\"UrllibHandler initialized: url=%s, proxy=%s\", self.url, self.proxy)" ]
[ "0.6240453", "0.6203818", "0.59883887", "0.59883887", "0.59512824", "0.58887017", "0.5730821", "0.5691852", "0.55683607", "0.5517402", "0.54947203", "0.5489108", "0.54607826", "0.5457385", "0.5434269", "0.5401339", "0.5399806", "0.5329499", "0.5329499", "0.5329499", "0.53285885", "0.53272873", "0.53071547", "0.530066", "0.530066", "0.5298951", "0.52773523", "0.5264804", "0.52512336", "0.52467257" ]
0.7647028
0
Fill the draw with ordered set of card
def fill_draw(self): self.draw = [x + str(y) for x in COLOR for y in CARD_VALUE]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw_a_card(cards):\n import random\n card_drawn = random.choices(card_deck)\n cards.append(card_drawn[0])\n return", "def draw_card(self,card):\n self.hand.append(card)", "def draw_card(self, card):\n self.current_hand.append(card)", "def draw(self, canvas, pos):\r\n # Draw every card in Hand\r\n for card in self.hand:\r\n card.draw(canvas, pos)\r\n # Hide the dealer's first card until the round ends\r\n if (self.tag.upper() == \"DEALER\" and\r\n self.hand.index(card) == 0 and in_play):\r\n canvas.draw_image(\r\n card_back,\r\n CARD_BACK_CENTER, CARD_BACK_SIZE,\r\n pos, CARD_SIZE)\r\n # Update the cards' coordinates to stack them appropriately\r\n pos[0] -= 15 * self.side", "def refresh(self):\n self.deck = []\n\n for _suit in Suit:\n for _face in Face:\n self.insert(Card(_suit, _face, self))", "def draw(self):\n c = self.cards[0]\n self.cards = self.cards[1:]\n self.discards.append(c)\n return(c)", "def initial_draw(self):\n self.player.take_card(self.deck)\n self.dealer.take_card(self.deck)\n self.player.take_card(self.deck)\n self.dealer.put_face_down(self.deck)", "def draw(self, canvas, yloc):\n \n for card in self.hand:\n card.draw(canvas, (xloc+(self.hand.index(card)*CARD_SIZE[0]), yloc))", "def draw(self, canvas, pos):\r\n for card in self.deck:\r\n canvas.draw_image(\r\n card_back, CARD_BACK_CENTER,\r\n CARD_BACK_SIZE, pos, CARD_BACK_SIZE)", "def fill_standard_deck(self):\n for name in [\"ace\", \"two\", \"three\", \"four\", \"five\", \"six\", \"seven\", \"eight\", \"nine\", \"ten\", \"jack\",\n \"queen\", \"king\"]:\n for suit in [\"hearts\", \"diamonds\", \"spades\", \"clubs\"]:\n self.cards.append(card.Card(name, suit, self.card_values[name]))", "def draw(self):\n if len(self.cards) > 0:\n return self.cards.pop(0)\n else:\n self.reshuffle()\n return self.cards.pop(0)", "def draw_card(dealer,player): \n depth = 100\n x0,y0 = 100,100\n x1,y1 = 100,300\n\n bj_board.clear()\n for i in range(len(dealer)):\n if dealer[i].state==True:\n bj_board.add(dealer[i].image)\n dealer[i].image.moveTo(x0+i*20,y0)\n dealer[i].image.setDepth(depth-10*i)\n elif dealer[i].state==False:\n img=Image(img_path+\"Back.png\")\n bj_board.add(img)\n img.moveTo(x0+i*20,y0)\n img.setDepth(depth-10*i)\n for i in range(len(player)):\n bj_board.add(player[i].image)\n player[i].image.moveTo(x1+i*20,y1)\n player[i].image.setDepth(depth-10*i) \n \n text=Text(\"Your Total: \" + str(hand_value(player)))\n text.moveTo(300,300)\n bj_board.add(text)\n \n if dealer[0].state==True:\n text=Text(\"Dealer Total: \" + str(hand_value(dealer)))\n text.moveTo(300,100)\n bj_board.add(text)", "def update_deck_display(self):\n self.deck_surface.fill(CLEARCOLOUR)\n self.deck_surface.blit(self.background, (0, 0))\n if not self.is_empty():\n cards_to_draw = self.cards\n if self.draw_from_last:\n cards_to_draw = reversed(cards_to_draw)\n\n for i, card in enumerate(cards_to_draw):\n selected = (i == self.selected_card)\n image_to_draw = card.image\n\n if self.deck_reveal == DeckReveal.HIDE_ALL:\n image_to_draw = card.backimage\n\n if self.flip:\n image_to_draw = pygame.transform.flip(image_to_draw, self.vert_orientation,\n not self.vert_orientation)\n\n self.deck_surface.blit(image_to_draw, (card.x - selected * card.x * 0.5 *\n (-1)**self.flip * self.vert_orientation,\n card.y - selected * card.y * 0.5 *\n (-1)**self.flip * (not self.vert_orientation)))", "def draw(deck, hand): \r\n # Remakes deck if it becomes empty.\r\n if len(deck) == 0:\r\n deck = create_deck()\r\n \r\n i = random.randint(0,len(deck)-1)\r\n card = deck.pop(i)\r\n hand.append(card)\r\n \r\n return deck, hand", "def testDraw(self):\n self.initDeck([1,2,3,4,5,6,7,8,9,10])\n\n card = self.testDeck.draw()\n assert len(self.testDeck.deck) == 9, \"Test Deck did not reduce\"\n assert len(self.testDeck.discardPile) == 1, \"card was not added to the discard pile\" \n assert self.testDeck.discardPile[0] == card, \"Card in the discardPile is not the same card that was drawned\"\n card = self.testDeck.draw()\n assert len(self.testDeck.deck) == 8, \"Test Deck did not reduce\"\n assert len(self.testDeck.discardPile) == 2, \"card was not added to the discard pile\"", "def redraw(self):\n offset = self.actual_row * self.row_size\n x = 5\n y = 5\n ind = 0\n self.scene.clear()\n for _ in range(self.column_size):\n for _ in range(self.row_size):\n if ind+offset < len(self.cards):\n self.draw_card(x, y, ind+offset)\n x += 90\n ind += 1\n x = 5\n y += 120", "def draw_card(dealer,player):\n # hidden_img = Image(img_path+\"back.png\")\n depth = 100\n x0,y0 = 100,100\n x1,y1 = 100,300\n ix = 30\n\n bj_board.clear()\n for card in dealer:\n if card.state:\n card.image.moveTo(x0, y0)\n card.image.setDepth(depth)\n bj_board.add(card.image)\n else:\n img = Image(img_path+\"Back.png\")\n img.moveTo(x0, y0)\n img.setDepth(depth)\n bj_board.add(img)\n x0 += ix\n \n for card in player:\n if card.state:\n card.image.moveTo(x1, y1)\n card.image.setDepth(depth)\n bj_board.add(card.image)\n else:\n img = Image(img_path+\"back.png\")\n img.moveTo(x1, y1)\n img.setDepth(depth)\n bj_board.add(img)\n x1 += ix", "def hit(self, deck):\n self.cards.append(deck.draw_card())", "def draw_by_insufficient(self):\n \n if self.cap_counter > 100:\n self.draw_loop(\"draw due to none in 50\")\n \n if self.board.count(self.empty) == 62:\n self.draw_loop(\"draw due to insufficient\")\n\n \n if self.board.count(self.empty) == 61:\n for i in self.board:\n if i != self.empty:\n if i.graphic == piece_class.PIECEDICT[WHITE][piece_class.Bishop] or i.graphic == piece_class.PIECEDICT[BLACK][piece_class.Bishop]:\n self.draw_loop(\"draw due to insufficient\")\n if i.graphic == piece_class.PIECEDICT[WHITE][piece_class.Knight] or i.graphic == piece_class.PIECEDICT[BLACK][piece_class.Knight]:\n self.draw_loop(\"draw due to insufficient\")", "def redraw(self, cards):\n for i in range(len(cards)): # Convert list of input strings to ints\n cards[i] = int(cards[i]) \n \n self.discard(cards)\n self.draw()\n self.redrew = True\n print(f\"{self.playerClass} has re-drew {len(cards)} cards.\")\n print(f\"Hand of {self.playerClass}: {self.updateHand()}\")", "def draw(self, card):\n self._cards.append(card) # add card to hand\n self.value = self._set_value() # determine value", "def draw(self):\r\n if len(self.matchPileManager.piles) > 0:\r\n print \"Match Piles\\r\"\r\n for matchPile in self.matchPileManager.piles:\r\n cardListView = CardListView(matchPile.cards)\r\n print \"{0}\\r\".format(cardListView.draw())", "def draw_card(self):\n if self.draw:\n return self.draw.pop()\n else:\n return NO_CARD", "def play(self):\r\n self.num_players = int(input(\"Welcome to card drawing game, Please enter number of players:\"))\r\n #contains all the draws from different players as list of draws per player.\r\n #with player number as key\r\n print(f\"Num players:{self.num_players}\")\r\n \r\n #initialize player points and draws\r\n self._initialize_player_stats()\r\n \r\n for y in range(DrawCardsGame.num_turns):\r\n for x in range(self.num_players):\r\n input(f\"Press enter for turn no {y+1} to draw for player {x+1}:\")\r\n card_drawn = self.cards.get_top_card()\r\n self.player_draws[f'{x}'].append(card_drawn)\r\n print(f\"card_drawn {card_drawn}\")\r\n self.player_points[f'{x}']+= DrawCardsGame.shades_points_dict[card_drawn[0]] * card_drawn[1]\r\n print(f\"player_points {self.player_points}\")\r\n \r\n print(repr(self.player_draws)) \r\n print(repr(self.player_points)) \r\n self.determine_winner(self.player_draws['0'],self.player_draws['1'])\r\n self.determine_winner1()", "def draw(self, deck):\n self.hand.append(deck.draw())\n return self", "def conclude_hand(self):\n for position in self.positions.keys():\n if position not in self.cards:\n self.cards[position] = (Card(), Card())", "def draw(self, canvas, pos):\n \n # Draw a \"Hand\" as a horizontal sequence of \"Cards\" \n # where the parameter \"pos\" is the position of the\n # upper left corner of the leftmost \"Card\". \n # Note: assume generally that only the first five \n # \"Cards\" of a player's \"Hand\" need to be visible \n # on the \"canvas\".\n for card in self.hand:\n card.draw(canvas, pos) \n pos[0] += CARD_SIZE[0] + X_MARGIN\n \n return None", "def draw_card(self, x, y, card_pos):\n card_id = self.cards[card_pos]\n item = CommonCardHandle(card_pos, self)\n pixmap = QPixmap()\n if not pixmap.loadFromData(self.parent.database.get_data(card_id, 'low_res')):\n # TODO: throw error or something\n pass\n item.setPixmap(pixmap)\n item.setPos(x, y)\n self.scene.addItem(item)\n\n if card_pos in self.get_selected_cards():\n self.draw_highlight(x, x + 85, y, y + 115, QColor(255, 0, 0))", "def draw(self):\n return self.cards.pop()", "def draw_card(person, deck, times=1):\n drawing_player = \"Dealer\" if person.dealer else \"User\"\n for _ in range(times):\n try:\n card = deck.top_draw()\n except IndexError:\n print(\"We've reached the bottom of the deck!\")\n else:\n if card.face == \"Ace\":\n person.aceCount += 1\n person.hand.append(card)\n person.total += card.value\n print(f\"\\n-----> {drawing_player} draws {card.__str__()}\\n\"\n f\"Their current total hand is {person.total}.\")\n time.sleep(1)" ]
[ "0.72226983", "0.6849346", "0.67760706", "0.6662149", "0.6661963", "0.6635878", "0.66047674", "0.6596281", "0.65947175", "0.656584", "0.6545951", "0.6444289", "0.64289516", "0.63977695", "0.63781214", "0.6375881", "0.6361973", "0.63425326", "0.6213734", "0.6210681", "0.61957484", "0.6108861", "0.61064094", "0.60956526", "0.6059943", "0.6046307", "0.5999884", "0.5996995", "0.5989364", "0.59552264" ]
0.7324256
0
Draw CARD_IN_HANDS cards and put it in the hands of each players
def draw_initial_hands(self): self.hands = [[] for i in range(self.num_players)] for i in range(self.num_players): for _ in range(CARD_IN_HANDS): self.hands[i].append(self.draw_card())
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw_hands(n_players=1):\n if n_players > 6:\n assert \"too many players. someone can't play.\"\n\n deck = make_deck()\n\n random.shuffle(deck)\n\n hands = []\n\n for i in range(n_players):\n hands.append(deck[15*i:15*(i+1)])\n\n bag = deck[n_players*15:]\n\n return hands, bag", "def deal_poker_hand(self, deck):\n\n for i in range(5):\n self.hand.append(deck.drawCard())", "def deal(self, num_cards=7):\n self.deck.shuffle()\n for player in self.players:\n for i in range(num_cards):\n self.hands[player].append(self.deck.draw())", "def draw_hand(self, deck, num_cards):\n\n self.hand = deck.deal_cards(num_cards)", "def draw(self, canvas, pos):\r\n # Draw every card in Hand\r\n for card in self.hand:\r\n card.draw(canvas, pos)\r\n # Hide the dealer's first card until the round ends\r\n if (self.tag.upper() == \"DEALER\" and\r\n self.hand.index(card) == 0 and in_play):\r\n canvas.draw_image(\r\n card_back,\r\n CARD_BACK_CENTER, CARD_BACK_SIZE,\r\n pos, CARD_SIZE)\r\n # Update the cards' coordinates to stack them appropriately\r\n pos[0] -= 15 * self.side", "def draw_card(self,card):\n self.hand.append(card)", "def draw(self):\n while len(self.hand) < 6: # While player doesn't have 6 cards in hand\n if self.HP == 0:\n print(\"You died.\")\n break\n else:\n self.hand.append(self.deck.pop(0)) # Draw from their deck\n self.HP = self.getHP() # Update HP", "def draw_card(self, card):\n self.current_hand.append(card)", "def draw(self, canvas, yloc):\n \n for card in self.hand:\n card.draw(canvas, (xloc+(self.hand.index(card)*CARD_SIZE[0]), yloc))", "def draw(self, canvas, pos):\n \n # Draw a \"Hand\" as a horizontal sequence of \"Cards\" \n # where the parameter \"pos\" is the position of the\n # upper left corner of the leftmost \"Card\". \n # Note: assume generally that only the first five \n # \"Cards\" of a player's \"Hand\" need to be visible \n # on the \"canvas\".\n for card in self.hand:\n card.draw(canvas, pos) \n pos[0] += CARD_SIZE[0] + X_MARGIN\n \n return None", "def draw(deck, hand): \r\n # Remakes deck if it becomes empty.\r\n if len(deck) == 0:\r\n deck = create_deck()\r\n \r\n i = random.randint(0,len(deck)-1)\r\n card = deck.pop(i)\r\n hand.append(card)\r\n \r\n return deck, hand", "def play(self):\r\n self.num_players = int(input(\"Welcome to card drawing game, Please enter number of players:\"))\r\n #contains all the draws from different players as list of draws per player.\r\n #with player number as key\r\n print(f\"Num players:{self.num_players}\")\r\n \r\n #initialize player points and draws\r\n self._initialize_player_stats()\r\n \r\n for y in range(DrawCardsGame.num_turns):\r\n for x in range(self.num_players):\r\n input(f\"Press enter for turn no {y+1} to draw for player {x+1}:\")\r\n card_drawn = self.cards.get_top_card()\r\n self.player_draws[f'{x}'].append(card_drawn)\r\n print(f\"card_drawn {card_drawn}\")\r\n self.player_points[f'{x}']+= DrawCardsGame.shades_points_dict[card_drawn[0]] * card_drawn[1]\r\n print(f\"player_points {self.player_points}\")\r\n \r\n print(repr(self.player_draws)) \r\n print(repr(self.player_points)) \r\n self.determine_winner(self.player_draws['0'],self.player_draws['1'])\r\n self.determine_winner1()", "def deal_cards(self):\r\n\t\tself.player.double = False\r\n\t\tif self.cardstack.reshuffle:\r\n\t\t\tself.cardstack.shuffle(self.decks)\r\n\t\t\tself.cardstack.reshuffle = False\r\n\t\tself.hands.append(Hand())\r\n\t\tfor i in range(2):\r\n\t\t\tself.hands[0].add_card(self.cardstack.draw())\r\n\t\t\tself.dealer.add_card(self.cardstack.draw())", "def draw_card(dealer,player):\n # hidden_img = Image(img_path+\"back.png\")\n depth = 100\n x0,y0 = 100,100\n x1,y1 = 100,300\n ix = 30\n\n bj_board.clear()\n for card in dealer:\n if card.state:\n card.image.moveTo(x0, y0)\n card.image.setDepth(depth)\n bj_board.add(card.image)\n else:\n img = Image(img_path+\"Back.png\")\n img.moveTo(x0, y0)\n img.setDepth(depth)\n bj_board.add(img)\n x0 += ix\n \n for card in player:\n if card.state:\n card.image.moveTo(x1, y1)\n card.image.setDepth(depth)\n bj_board.add(card.image)\n else:\n img = Image(img_path+\"back.png\")\n img.moveTo(x1, y1)\n img.setDepth(depth)\n bj_board.add(img)\n x1 += ix", "def deal(self, amount=7):\n for _ in range(amount):\n for hand in self.hands:\n hand.draw()", "def startGame(d_hand, p_hand, deck1):\n NUM_CARDS = 2\n\n for i in range(NUM_CARDS):\n d_hand.getCard(deck1.drawCard())\n p_hand.getCard(deck1.drawCard())", "def player_show_hand(self):\n for card in self.get_hand():\n print(card.get_card())", "def deal(deck): \r\n hand = []\r\n for n in range(2): \r\n deck, hand = draw(deck, hand)\r\n \r\n return deck, hand", "def conclude_hand(self):\n for position in self.positions.keys():\n if position not in self.cards:\n self.cards[position] = (Card(), Card())", "def setup_newgame(self):\n global chips\n self.bet = 100\n if chips < self.bet: \n self.game_over = True\n chips -= self.bet\n \n\n self.cards_list = arcade.SpriteList()\n\n #resets on newgame\n self.top_card_int = 0 ## this had to be moved here to make it so that you are not drawing over the 52 card limit\n self.player_hand = []\n self.dealer_hand = []\n self.player_value = 0\n self.dealer_value = 0\n self.player_ace_count = 0\n self.dealer_ace_count = 0\n self.player_almost_bust = 0\n self.dealer_almost_bust = 0\n self.blackjack = False\n self.victory = False\n self.defeat = False\n \n #creates deck\n for card_suit in CARD_SUITS:\n for card_value in CARD_VALUES:\n card = Card(card_suit, card_value, CARD_SCALE)\n self.cards_list.append(card)\n #shuffles deck\n for pos1 in range(len(self.cards_list)):\n pos2 = random.randrange(len(self.cards_list))\n self.cards_list.swap(pos1, pos2)\n \n #Current way to add cards to player and dealer hands since using .pop() on self.cards_list deletes the card itself even in the other hands\n \n #self.dealer_hand.append(self.top_card_int)\n self.hit(\"dealer\")\n self.dealer_hand[0].face_down()\n #first_card = self.dealer_hand[0]\n #first_card.face_down()\n #self.dealer_hand[0].face_down()\n self.hit(\"player\")\n self.player_hand[0].face_down()\n self.hit(\"dealer\")\n self.dealer_hand[1].face_down()\n self.hit(\"player\")\n self.player_hand[1].face_down()\n self.update_card_positions()", "def draw_a_card(cards):\n import random\n card_drawn = random.choices(card_deck)\n cards.append(card_drawn[0])\n return", "def show_hand(self):\n for card in self.hand:\n print(card)", "def draw_card(dealer,player): \n depth = 100\n x0,y0 = 100,100\n x1,y1 = 100,300\n\n bj_board.clear()\n for i in range(len(dealer)):\n if dealer[i].state==True:\n bj_board.add(dealer[i].image)\n dealer[i].image.moveTo(x0+i*20,y0)\n dealer[i].image.setDepth(depth-10*i)\n elif dealer[i].state==False:\n img=Image(img_path+\"Back.png\")\n bj_board.add(img)\n img.moveTo(x0+i*20,y0)\n img.setDepth(depth-10*i)\n for i in range(len(player)):\n bj_board.add(player[i].image)\n player[i].image.moveTo(x1+i*20,y1)\n player[i].image.setDepth(depth-10*i) \n \n text=Text(\"Your Total: \" + str(hand_value(player)))\n text.moveTo(300,300)\n bj_board.add(text)\n \n if dealer[0].state==True:\n text=Text(\"Dealer Total: \" + str(hand_value(dealer)))\n text.moveTo(300,100)\n bj_board.add(text)", "def draw(self, deck):\n self.hand.append(deck.draw())\n return self", "def battle_screen_my_hand_card_display(screen,buttons, screen_status, button_status, card_database_filter, user):\n rect_position_x = 100\n rect_position_y = 610\n row_number = 1\n if screen_status.battle_screen_action_indicator == 'stage-0':\n pass\n else :\n\n if screen_status.battle_screen_my_hand_page_id <= 0:\n screen_status.battle_screen_my_hand_page_id = 1\n # Edge cases when len() = 6,12,18....\n if len(user.hand_list) % 7 == 0 and len(user.hand_list) != 0:\n if screen_status.battle_screen_my_hand_page_id >= (len(user.hand_list))//7 + 1:\n screen_status.battle_screen_my_hand_page_id = (len(user.hand_list))//7 + 0\n\n else:\n if screen_status.battle_screen_my_hand_page_id >= (len(user.hand_list))//7 + 2:\n screen_status.battle_screen_my_hand_page_id = (len(user.hand_list))//7 + 1\n # Algorithm to draw all cards in local_store_list, 6 card per page.\n for card in user.hand_list[7*(screen_status.battle_screen_my_hand_page_id - 1):7 * screen_status.battle_screen_my_hand_page_id]:\n if row_number <= 7:\n card.rect.x = rect_position_x\n card.rect.y = rect_position_y\n screen.blit(card.image, card.rect)\n rect_position_x += 145\n row_number += 1\n if row_number >= 8:\n row_number = 1", "async def draw_start(self):\n for i, player in enumerate(self.players):\n def bet_check(m):\n \"\"\"If the value can be converted to a float and is within the bounds return true, else false\"\"\"\n try:\n value = float(m.content)\n if 0 <= value <= player.coins:\n return True\n else:\n return False\n except:\n return False\n\n if not player.out:\n await self.ctx.send(f\"{self.users[i].name}, How much would you like to bet? You have {player.coins} in the bank: \")\n try:\n bet = await self.client.wait_for('message', timeout=120.0, check=bet_check)\n bet = float(bet.content)\n if bet == 0:\n player.out = True\n self.total_players_out += 1\n else:\n player.debit(bet)\n player.bet = bet\n except:\n await self.ctx.send(\"Timed Out!\")\n player.out = True\n self.total_players_out += 1\n # shuffle cards and dealer draws one, send the dealers hand to the channel, loop through all players that aren't out and show their hand\n # if all players arent out\n if self.total_players_out < len(self.players):\n self.deck.shuffle()\n self.dealer.clear()\n self.deck.move_cards(self.dealer, 1)\n\n embed_dealer = discord.Embed(title='Dealer', color=0x00ff00)\n embed_dealer.add_field(\n name=\"Hand\", value=self.dealer, inline=False)\n self.dealer_msg = await self.ctx.send(embed=embed_dealer)\n\n embed_players = discord.Embed(title='Players', color=0x0000fd)\n for i, player in enumerate(self.players):\n if not player.out:\n player.clear()\n self.deck.move_cards(player, 2)\n # name=their discord name and value = their hand\n embed_players.add_field(\n name=self.users[i].name, value=player, inline=True)\n if player.get_value() == 21:\n player.has_bj = True\n self.players_msg = await self.ctx.send(embed=embed_players)", "def deal(self, hands, card_per_hand=1):\n for rounds in range(card_per_hand):\n for hand in hands:\n if self.cards:\n top_card = self.cards[0]\n self.give(top_card, hand)\n else:\n print(\"Can't continue deal. Out of cards!\")", "def find_flush(self, hands_list, cards_list):\n card_color_counts = Counter(map(lambda c: c.color, cards_list))\n for value in card_color_counts.values():\n if value == 5:\n hands_list.append(HandDescription('Flush', None, None))", "def Deal():\r\n cardsout = []\r\n cardoptions = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23]\r\n topcardoptions = [0,2,3,4,5,6]\r\n topcard = topcardoptions[random.randint(0,5)]\r\n cardoptions.pop(cardoptions.index(topcard))\r\n cardsout.append(topcard)\r\n\r\n if SHOWHAPPENINGS == True:\r\n disp = card_dict[topcard]\r\n print(\"Topcard is: {}\".format(disp)) \r\n\r\n for i in range(4):\r\n numcards = 0\r\n while numcards < 5:\r\n possiblerange = len(cardoptions) - 1\r\n cardindex = random.randint(0,possiblerange)\r\n card = cardoptions[cardindex]\r\n cardsout.append(card)\r\n cardoptions.pop(cardoptions.index(card))\r\n PlayerHands[i].append(card)\r\n numcards += 1\r\n PlayerHands[i] = sorted(PlayerHands[i]) #putting into ascending order\r\n if i == 0 or i == 2:\r\n PlayerHands[i].append(\"RedTeam\")\r\n else: \r\n PlayerHands[i].append(\"BlackTeam\")\r\n \r\n PlayerHands[0].append(PLAYER1)\r\n PlayerHands[1].append(PLAYER2)\r\n PlayerHands[2].append(PLAYER3)\r\n PlayerHands[3].append(PLAYER4)\r\n #PlayerHand format = [card1,card2,card3,card4,card5,Team,Name]\r\n\r\n return topcard", "def hand_over_cards(round_winner, players):\n\n # go through each of the players\n for player in players:\n\n # hand over all their cards\n while True:\n try:\n round_winner.deck.append(\n player.cards_in_play.popleft())\n except IndexError:\n # no more cards\n break" ]
[ "0.7210847", "0.7161282", "0.7158964", "0.7154756", "0.71415424", "0.7067022", "0.7014355", "0.6951996", "0.6947632", "0.6837969", "0.6819214", "0.68042874", "0.6789673", "0.67616475", "0.67540085", "0.6737133", "0.66947967", "0.6661551", "0.6657943", "0.66329676", "0.6595771", "0.65815234", "0.6578196", "0.6560693", "0.6538027", "0.6448613", "0.64479744", "0.64364326", "0.64293206", "0.6425411" ]
0.77261215
0
discard the card in player_index hand at card_index position
def discard_card(self, player_index, card_index): self.discard.append(self.hands[player_index][card_index]) self.nb_blue_stone = min(self.nb_blue_stone + 1, MAX_BLUE_STONE) self.hands[player_index][card_index] = self.draw_card() return self.hands[player_index][card_index]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_card(self, card_idx: int) -> None:", "def discard_card(self, card_index):\n if self.game.discard_card(self.index, card_index) == NO_CARD:\n self.know[card_index] = NO_CARD\n else:\n self.know[card_index] = \"??\"", "def discard(self, card: Card) -> None:\n\n success = False\n for i in range(len(self._hand)):\n if self._hand[i] == card:\n self._hand[i] = Card.EMPTY\n success = True\n break\n\n if not success:\n raise ValueError(f\"Player hand does not contain {card.name}\")", "def discard(self, index=0):\n try:\n card = self._cards.pop(index)\n self.value = self._set_value()\n return card\n\n except IndexError:\n raise IndexError", "def hit(hand=bj.player1.hand):\r\n hand.append(bj.deck.remove_card())", "def update_hand(self, card_num):\n self.hand.pop(card_num)", "def removeCardFromHand(self, card):\r\n self.hand.remove(card)", "def choose_card_to_discard(self):\n random.choice(self.hand.card_list).use()", "def end_turn(self):\n for _ in range(self._hand.size()):\n card = self._hand.pop()\n self._discard.push(card)\n\n for _ in range(self._active.size()):\n card = self._active.pop()\n self._discard.push(card)\n\n for _ in range(self._handsize):\n if self._deck.size() == 0:\n self._discard.shuffle_collection()\n self._deck.replace(self._discard)\n self._discard.clear_collection()\n card = self._deck.pop()\n self._hand.push(card)\n self._money = 0\n self._attack = 0", "def remove_card(deck, card):\n card_index = deck.index(card)\n del deck[card_index]\n return deck", "def play_card(self, index):\n if index < self._hand.size():\n card = self._hand.pop(index)\n self._active.push(card)\n self._money = self._money + card.money\n self._attack = self._attack + card.attack\n print '\\nCard played:\\n%s' % card\n else:\n print \"\\nInvalid index number! Please type a valid number!\"", "def remove_card(self):\n return self.hand.pop()", "def discard(self):\n # Would need more logic...\n # Should be specified, won't use in 'war'\n return self.hand.pop()", "def discard(self, cards_to_discard: Sequence[int]):\r\n self._discard.extend(cards_to_discard)\r\n self._num_cards_available += len(cards_to_discard)", "def conclude_hand(self):\n for position in self.positions.keys():\n if position not in self.cards:\n self.cards[position] = (Card(), Card())", "def deal_cards(self, player: TarotPlayer, num: int) -> None:\n player.hand = []\n for _ in range(num):\n player.hand.append(self.deck.pop())", "def discard(self, card: Card):\n if self.discard_amount == 0:\n raise CanNotDiscardError()\n self.cards.remove(card)", "def remove_from_hand(self):\n pass", "def pop_card(self, i=-1):\n\treturn self.deckcards.pop(i)", "def deal_cards(self):\n aux = random.randint(0, len(self.deck))\n card = self.deck[aux]\n self.deck.pop(aux)\n print(f\"Received: {card}\")\n return card", "def player_discard(self, inpt):\n \n if inpt.isdigit() == False:\n return 0\n if int(inpt) > len(self.player_hand):\n print(\"\\nNumber of card entered is greater than number of cards\")\n print(\"Please try again \\n\")\n return 0\n if self.player_hand[int(inpt)-1][1] == '8':\n self.discard_pile = self.player_hand.pop(int(inpt)-1)\n self.new_suit = ''\n while self.new_suit not in ['h','d','s','c']:\n self.new_suit = input(\"Please enter new suit: h, d, s, c\\n\")\n print(\"\\nNew suit is: \", self.new_suit)\n return 1\n if self.new_suit != '':\n if self.player_hand[int(inpt)-1][0] == self.new_suit:\n self.discard_pile = self.player_hand.pop(int(inpt)-1)\n self.new_suit = ''\n return 1\n else:\n print(\"\\nYou need to match new suit\")\n print(\"Please try again\\n\")\n return 0\n if self.new_suit == '':\n if self.player_hand[int(inpt)-1][0] == self.discard_pile[0] or \\\n self.player_hand[int(inpt)-1][1] == self.discard_pile[1]:\n self.discard_pile = self.player_hand.pop(int(inpt)-1)\n return 1\n else:\n print(\"\\nYou need to match discard pile card suit or rank\")\n print(\"Please try again\\n\")\n return 0", "def pop(self, index=-1):\n return self._cards.pop(index)", "def deal_cards(self):\n for i in range(0, self.num_players):\n # start at first player index\n player = self.players[self.index_wrap(i)]\n for c in range(Evolution.BASE_CARD_DRAW + player.num_species):\n self.check_for_empty_deck()\n # actually pops last element so pretend deck is flipped\n player.hand.add_card(self.deck.pop())\n # Don't actually set cards left until all cards have been handed out\n if self.continue_game:\n self.cards_left.set(len(self.deck))\n else:\n self.cards_left.set(0)", "def deal_card(self):\n return self.deck.pop()", "def player_card(self):\n card = random.choice(self.hand.cards)\n self.hand.cards.remove(card)\n print(f\"{self.name}:{card!r:<3} \", end=\"\")\n return card", "def pop(self, index: int = 0) -> Card:\n return self.cards.pop(index)", "def deal_card(self):\n \n return self.deck.pop()", "def play(self, player, game):\n player.get_hand().get_cards().remove(self)\n card = game.pick_card()\n player.get_hand().add_cards(card)\n game.set_action(\"NO_ACTION\")", "def pop_card(self, i=-1):\n return self.cards.pop(i)", "def pop_card(self, i=-1):\n return self.cards.pop(i)" ]
[ "0.7953479", "0.7897812", "0.7351607", "0.735034", "0.71947783", "0.7015518", "0.69786286", "0.6937379", "0.6872043", "0.68690753", "0.6841711", "0.68187994", "0.68112737", "0.67882866", "0.6767954", "0.67522264", "0.6749074", "0.6715408", "0.668885", "0.66758704", "0.6662474", "0.6657043", "0.66421366", "0.65660423", "0.65597427", "0.6511504", "0.6473574", "0.6467672", "0.64399296", "0.64399296" ]
0.8206749
0
Try to use the card in player_index hands at card_index position to complete one fireworks
def play_card(self, player_index, card_index): card = self.hands[player_index][card_index] color_index = COLOR.index(card[0]) if self.is_card_playable(card): # the color and the number match, add the card self.firework[color_index].append(card) # if we complete the firework for a color, we get an extra # blue stone if len(self.firework[color_index]) == 5: self.nb_blue_stone = min(self.nb_blue_stone + 1, MAX_BLUE_STONE) else: # error, the card cannot be played, remove a red_stone if self.nb_red_stone == 0: raise GameOverError("The card " + card + " cannot be\ played and there is no red stone anymore") self.nb_red_stone = self.nb_red_stone - 1 self.hands[player_index][card_index] = self.draw_card() return self.hands[player_index][card_index]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def hit(self, hand_index: int) -> None:\n\n self._hands[hand_index].deal_card()", "def hit(self, hand):\n if hand == \"player\":\n self.player_hand.append(self.cards_list[self.top_card_int])\n self.calculate_value(\"player\")\n elif hand == \"dealer\":\n self.dealer_hand.append(self.cards_list[self.top_card_int])\n self.calculate_value(\"dealer\")\n self.top_card_int += 1\n self.update_card_positions()", "def play_card(self, index):\n if index < self._hand.size():\n card = self._hand.pop(index)\n self._active.push(card)\n self._money = self._money + card.money\n self._attack = self._attack + card.attack\n print '\\nCard played:\\n%s' % card\n else:\n print \"\\nInvalid index number! Please type a valid number!\"", "def hit(self, hand_idx=0):\n player = self.players[hand_idx]\n if player['active']:\n player['hand'].append(self._pick_card())\n if self.dealer_hand_value() < self.dealer_min:\n self.dealer_hand.append(self._pick_card())\n if self.is_bust(hand_idx):\n self.stand(hand_idx) # Force Stand and compute game result\n # Turn Off Split and Double Down after the first hit\n if player['allow_dd']: # Don't allow double down after the first hit\n player['allow_dd'] = False\n if self.allow_split: # Don't allow split after the first hit\n self.allow_split = False", "def FullHouse(handIn, indx):", "def hit(hand=bj.player1.hand):\r\n hand.append(bj.deck.remove_card())", "def play_card(self, card_index):\n if self.game.play_card(self.index, card_index) == NO_CARD:\n self.know[card_index] = NO_CARD\n else:\n self.know[card_index] = \"??\"", "def play_minion(num_cards_in_hand, card_idx):\n click_on_card(num_cards_in_hand, card_idx)\n mouseclick(510, 472)", "def deal(list_of_Player, Multideck,dealer_index):\n for i in range(9):\n [player.hand.cards.append(Multideck.draw_top()) for player in list_of_Player]\n dealer_index = (dealer_index+1) % len(list_of_Player)\n return dealer_index", "def hit(\n self,\n card: List[Tuple[int, str, str]],\n card_index: int = 0\n ) -> None:\n self._cards[card_index].extend(card)", "def additionalMatch(handIn, indx):", "def comp10001bo_play(player_no, hand, stockpiles, discard_piles, build_piles,\n play_history):", "def hit(player):\n deal_random_card(player)", "def play_card_by_index(self, index, **kwargs):\n target = kwargs.get('target', None)\n self.hand[index].play(target=target)", "def handle_card_click_event(index, sid, methods=[\"GET\", \"POST\"]):\n if game.state == \"playing\":\n print(index)\n active_player = game.ordered_players[game.active_player_index]\n print(active_player.hand)\n if active_player.sid == sid:\n game.play_card(int(index), active_player)\n if game.card_played == True:\n game.card_played = False\n for player, card in game.trick.items():\n card_list = [card.value, card.suit]\n game.trick_obj[player.name]=card_list\n hand_dict = {}\n for i, card in enumerate(player.hand):\n hand_dict[i]=[card.value, card.suit]\n print(active_player.hand)\n print(active_player.sid)\n active_player.can_follow_suit = False\n socketio.emit(\"update hand\", hand_dict, room=active_player.sid)\n socketio.emit(\"show trick\", game.trick_obj)\n print(f\"{game.trick_obj}\")\n game.active_player_index += 1\n if game.active_player_index == len(game.players):\n trick_table_dict = {}\n for player in game.ordered_players:\n trick_table_dict[player.name.title()]=player.tricks\n socketio.emit(\"end trick\", game.winner_message)\n socketio.emit(\"update trick table\", trick_table_dict)\n else:\n next_player = game.ordered_players[game.active_player_index]\n socketio.emit(\"your turn\", room=next_player.sid)\n else:(\"Its not your turn\")\n else:\n print(\"its not time for that\")", "def deal_cards(self, players):\n hand_num = (len(self.deck)) // len(players)\n for index, player in enumerate(players):\n current_hand = self.deck[index * hand_num:(index + 1) * hand_num]\n current_hand.sort(key=functools.cmp_to_key(self.round.sort_card))\n player.set_current_hand(current_hand)\n player.initial_hand = cards2str(player.current_hand)", "def deal_cards(self):\n for i in range(0, self.num_players):\n # start at first player index\n player = self.players[self.index_wrap(i)]\n for c in range(Evolution.BASE_CARD_DRAW + player.num_species):\n self.check_for_empty_deck()\n # actually pops last element so pretend deck is flipped\n player.hand.add_card(self.deck.pop())\n # Don't actually set cards left until all cards have been handed out\n if self.continue_game:\n self.cards_left.set(len(self.deck))\n else:\n self.cards_left.set(0)", "def BestPlay(cardsout, trickcards,hand): #FIXME\r\n\r\n trump = []\r\n ss = []\r\n os1 = []\r\n os2 = []\r\n\r\n for i in range(len(hand) - 2):\r\n if hand[i] < 7:\r\n trump.append(hand[i])\r\n elif hand[i] < 12:\r\n ss.append(hand[i])\r\n elif hand[i] < 18:\r\n os1.append(hand[i]) \r\n else: \r\n os2.append(hand[i])\r\n\r\n if len(trickcards) == 0: #Lead\r\n return 0 #FIXME-----------------------------------------------------------\r\n else: #Not lead\r\n if trickcards[0] < 7: #Trump led\r\n if len(trump) == 1:\r\n return hand.index(trump[0])\r\n #Led trump decision play FIXME---------------------------------\r\n elif trickcards[0] < 12: #SS led\r\n if len(ss) == 1:\r\n return hand.index(ss[0])\r\n #Led ss decision FIXME-------------------------------------------------\r\n elif trickcards[0] < 18: #OS1 led\r\n if len(os1) == 1:\r\n return hand.index(os1[0])\r\n #Led os1 decision FIXME--------------------------------------------\r\n else: #OS2 led\r\n if len(os2) == 1:\r\n return hand.index(os2[0]) \r\n #led os2 decision FIXME--------------------------------------------\r\n \r\n return 0", "def deal_opening_cards(self) -> None:\r\n for i in range(self.num_of_players):\r\n self.dealer.deal_cards_to(self.players[i].cards_stack, PokerRules.CARDS_PER_PLAYER)", "def click_on_card(num_in_hand, card_idx):\n card_idx -= 1\n logger.debug(\"Clicking on hand card index {} with {} cards in hand\".\\\n format(card_idx, num_in_hand))\n coords = card_coords[num_in_hand]\n game_click(coords[card_idx])", "def update_card_positions(self):\n dealer_y = 350\n player_y = 150\n x_position = 100\n for i in self.dealer_hand:\n i.position = (x_position, dealer_y)\n x_position += 100\n x_position = 100\n for j in self.player_hand:\n j.position = (x_position, player_y)\n x_position += 100", "def setup_newgame(self):\n global chips\n self.bet = 100\n if chips < self.bet: \n self.game_over = True\n chips -= self.bet\n \n\n self.cards_list = arcade.SpriteList()\n\n #resets on newgame\n self.top_card_int = 0 ## this had to be moved here to make it so that you are not drawing over the 52 card limit\n self.player_hand = []\n self.dealer_hand = []\n self.player_value = 0\n self.dealer_value = 0\n self.player_ace_count = 0\n self.dealer_ace_count = 0\n self.player_almost_bust = 0\n self.dealer_almost_bust = 0\n self.blackjack = False\n self.victory = False\n self.defeat = False\n \n #creates deck\n for card_suit in CARD_SUITS:\n for card_value in CARD_VALUES:\n card = Card(card_suit, card_value, CARD_SCALE)\n self.cards_list.append(card)\n #shuffles deck\n for pos1 in range(len(self.cards_list)):\n pos2 = random.randrange(len(self.cards_list))\n self.cards_list.swap(pos1, pos2)\n \n #Current way to add cards to player and dealer hands since using .pop() on self.cards_list deletes the card itself even in the other hands\n \n #self.dealer_hand.append(self.top_card_int)\n self.hit(\"dealer\")\n self.dealer_hand[0].face_down()\n #first_card = self.dealer_hand[0]\n #first_card.face_down()\n #self.dealer_hand[0].face_down()\n self.hit(\"player\")\n self.player_hand[0].face_down()\n self.hit(\"dealer\")\n self.dealer_hand[1].face_down()\n self.hit(\"player\")\n self.player_hand[1].face_down()\n self.update_card_positions()", "def play(self):\n\n start_player = random.choice(self.names)\n turn_order = self.player_order(start=start_player)\n\n \"\"\"Play card from player hand when it is empty\"\"\"\n while self.hands[start_player].hand.cards:\n for name in turn_order:\n self.hands[name].player_card()\n print()", "def conclude_hand(self):\n for position in self.positions.keys():\n if position not in self.cards:\n self.cards[position] = (Card(), Card())", "def discard_card(self, player_index, card_index):\n self.discard.append(self.hands[player_index][card_index])\n self.nb_blue_stone = min(self.nb_blue_stone + 1, MAX_BLUE_STONE)\n self.hands[player_index][card_index] = self.draw_card()\n return self.hands[player_index][card_index]", "def step(self, action):\n assert self.completed_rounds < self.num_rounds\n\n player = self.players[self.current_player_id]\n card = action\n\n if card not in player.hand:\n raise ValueError(\"Action not allowed because the card is not in the player's hand\")\n\n player.hand.remove(card)\n player.played.add(card)\n # print(f\"Player {self.current_player_id} with hand {[c.id for c in player.hand]} played the card {card.id}\")\n best_combination_on_the_table = self._get_best_combination(card)\n if best_combination_on_the_table:\n self.last_player_capturing_id = self.current_player_id\n player.captured.add(card)\n for c in best_combination_on_the_table:\n self.table.remove(c)\n player.captured.add(c)\n if not self.table and not (self._is_last_round and self._is_round_over()):\n player.scope += 1\n else:\n self.table.add(card)\n # print(f\"Cards on the table after play: {[c.id for c in self.table]}\")\n\n if self._is_round_over():\n self.completed_rounds += 1\n # print(f\"=========== Round {self.current_round} completed ============\")\n self.current_player_id = (self.current_player_id + 1) % self.num_players\n\n if self.is_over():\n last_player_capturing = self.players[self.last_player_capturing_id]\n # print(f\"Giving the remaining cards to player {last_player_capturing.player_id}\")\n for card in self.table:\n last_player_capturing.captured.add(card)\n self.table = set()\n assert all([len(p.played) == 10 for p in self.players])\n assert all([len(p.hand) == 0 for p in self.players])\n return self.get_state(), self.current_player_id", "def bomb(self, index):\n\n coords = self.canvas.coords(self.squares[index])\n x, y = coords[0] + 10, coords[1] + 10\n tag = self.hit[index]\n\n # Count moves for player (used for scoring)\n if self.players.winCondition == 1:\n self.players.moves[self.playerNumber][0] += 1\n\n # Hit\n if tag != 0:\n self.tracker[tag] -= 1\n\n # Count moves for player (used in scoring)\n if self.players.winCondition == 1:\n self.players.moves[self.playerNumber].append(\\\n self.players.moves[self.playerNumber][0])\n self.players.moves[self.playerNumber][0] = 0\n\n # Ship was sunk\n if self.tracker[tag] == 0:\n text = []\n tagname = 'tag%s' % tag\n\n # Bonus points equal to the size of ship\n # awarded for sinking entire ship\n if self.players.winCondition == 0:\n self.players.score[self.playerNumber] += \\\n self.counter_copy[tag]\n\n # Show bombed location with black & orange flashing bar\n for i in range(5):\n text.append(self.canvas.create_text(\\\n x, y, text='O', fill='red'))\n self.canvas.addtag_withtag('text', text[-1])\n self.canvas.tag_raise(tagname, 'square')\n for i in range(3): # Flashing bar\n self.canvas.itemconfig(tagname, {'fill': 'black'})\n self.canvas.update()\n self.myframe.after(100)\n self.canvas.itemconfig(tagname, {'fill': 'orange'})\n self.canvas.update()\n self.myframe.after(100)\n\n self.hit[index] = 5\n self.players.message[not self.playerNumber] = \\\n '%s,\\nYour ship of size %s was sunk by enemy' % \\\n (self.players.usernames[not self.playerNumber], \\\n self.counter_copy[tag])\n self.players.endOfTurn(self.tracker)\n return\n\n # Hit, but not sunk. Player gets only 1 point\n if self.players.winCondition == 0:\n self.players.score[self.playerNumber] += 1\n\n # Show hit location with flashing black & red circle\n text = []\n for i in range(3):\n del text[:]\n for i in range(5): # flash black circle\n text.append(self.canvas.create_text(\\\n x, y, text='O', fill='black'))\n self.canvas.addtag_withtag('text', text[-1])\n self.canvas.update()\n self.myframe.after(100)\n del text[:]\n for i in range(5): # flash red circle\n text.append(self.canvas.create_text(\\\n x, y, text='O', fill='red'))\n self.canvas.addtag_withtag('text', text[-1])\n self.canvas.update()\n self.myframe.after(100)\n\n # Complete miss. Draw 'X'\n else:\n for i in range(5):\n text = self.canvas.create_text(x, y, text='X', fill='yellow')\n self.canvas.addtag_withtag('text', text)\n self.canvas.update()\n self.myframe.after(250)\n self.hit[index] = 5\n self.players.endOfTurn(self.tracker)", "def deal_cards(self):\r\n\t\tself.player.double = False\r\n\t\tif self.cardstack.reshuffle:\r\n\t\t\tself.cardstack.shuffle(self.decks)\r\n\t\t\tself.cardstack.reshuffle = False\r\n\t\tself.hands.append(Hand())\r\n\t\tfor i in range(2):\r\n\t\t\tself.hands[0].add_card(self.cardstack.draw())\r\n\t\t\tself.dealer.add_card(self.cardstack.draw())", "def hit(self, card):\n self.cards.hit(card)\n if self.cards.hand ==-1:\n self.state ='burst'", "def deal(self, hands, card_per_hand=1):\n for rounds in range(card_per_hand):\n for hand in hands:\n if self.cards:\n top_card = self.cards[0]\n self.give(top_card, hand)\n else:\n print(\"Can't continue deal. Out of cards!\")" ]
[ "0.6926654", "0.66886723", "0.6646441", "0.66020334", "0.6499319", "0.6453483", "0.64293414", "0.6423775", "0.63456625", "0.6319759", "0.6314192", "0.6274448", "0.62138325", "0.61962587", "0.61802703", "0.61195254", "0.6112211", "0.60624474", "0.6038025", "0.6034104", "0.60151833", "0.6013806", "0.5987455", "0.59347713", "0.5933948", "0.5930936", "0.5921329", "0.58946514", "0.58927953", "0.5891542" ]
0.7138669
0
check if the card is in other players hands
def is_card_in_other_hands(self, own_hand_index, card): for i, hand in enumerate(self.hands): if i == own_hand_index: continue if card in hand: return True return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def still_in_hand(self):\n return len(self.hand.cards)!=0", "def does_player_have_card(self, player, card):\n return card in self.hands[player]", "def hitMe(hand, deck):\n if deck.cardsLeft == 0:\n return False\n hand.getCard(deck.drawCard())\n return True", "def hasBlackjack(self):\n return len(self.cards) == 2 and self.getPoints() == 21", "def isGameOver(self):\n for i in range(self.rows):\n for j in range(self.columns):\n if self.grid[i][j].face == 'down':\n return False\n #if here then all cards must be face up\n return True", "def is_blackjack(self):\n if self.hand == 21 and len(list(self)) ==2:\n print '%s = Blackjack'%self\n return True", "def highCard(p1name, p2name, p1hand, p2hand):\n\tplayer1 = list(p1hand)\n\tplayer2 = list(p2hand)\n\n\tif player1[0] == \"A\" or player1[1] == \"A\":\n\t\tprint(\"%s wins!\" % p1name)", "def is_card_playable(self, card):\n color_index = COLOR.index(card[0])\n return len(self.firework[color_index]) == int(card[1]) - 1", "def is_miss_deal(hand: list, mighty: Card) -> bool:\n point_card_count = 0\n for card in hand:\n if card.is_pointcard() and card != mighty:\n point_card_count += 1\n\n if point_card_count <= 1:\n return True\n else:\n return False", "def is_match(self, card):\n\t\treturn self.suit == card.suit or self.value == card.value", "def is_game_win(self):\n return not self.deck and not self.hand", "def is_pair(hand):\n\tis_a_pair = False\n\ti = 0\n\twhile i < 13:\n\t\tif hand[i] == 2:\n\t\t\tis_a_pair = True\n\t\ti += 1 \n\thigh_card = 0\n\tj = 0\n\twhile j < 13 and is_a_pair == True:\n\t\tif hand[j] == 2 and j >= high_card:\n\t\t\thigh_card = j\n\t\tj += 1\n\tif is_a_pair:\n\t\treturn True, high_card\n\telse:\n\t\treturn False", "def has_cards(self):\n return self.hand.len() > 0", "def is_high_card(hand):\n\tis_a_high_card = True\n\ti = 0\n\twhile i < 13:\n\t\tif hand[i] > 1:\n\t\t\tis_high_card = False\n\t\ti += 1\n\t\t\n\thigh_card = 0\n\tj = 0\n\twhile j < 13 and is_a_high_card == True:\n\t\tif hand[j] == 1 and j >= high_card:\n\t\t\thigh_card = j\n\t\tj += 1\n\tif is_a_high_card:\n\t\treturn True, high_card\n\telse:\n\t\treturn False", "def check_for_blackjack(self):\n if (self.dealer.hand.value + self.dealer.face_down.value) == 21:\n if self.player.hand.blackjack:\n return self.blackjack_push()\n else:\n return self.blackjack_dealer_win()\n\n if self.player.hand.blackjack():\n return self.blackjack_player_win()\n lost_insurance_bet(self.side_bet)\n return False", "def cardPlayable(self, card):\n return self.field[Suit.toInt(card.getSuit()) - 1] == card.getValue() - 1", "def can_play(self) -> bool:\n purple_card = self.game.board.purple\n return (\n self.game.current_player != self\n and purple_card is not None\n and purple_card.space > len(self.game.board.yellow[self])\n )", "def _find_joker(hand, joker):\n\n if joker in hand:\n return True\n else:\n return False", "def is_blackjack(self) -> bool:\n if self.score == 21 and len(self.cards) == 2:\n return True\n else:\n return False", "def won_game(self):\n for player in self.players:\n if len(player.cards) == 0:\n\n return True\n return False", "def check_winner(self):\r\n if all(heap == 0 for heap in self.heaps):\r\n if self.misere:\r\n self.winner = self.other_player\r\n self.loser = self.current_player\r\n else:\r\n self.winner = self.current_player\r\n self.loser = self.other_player", "def is_round_over(whose_turn,players):\n if ((len(players[whose_turn].hand.cards) == 0) and (players[whose_turn].has_discarded == True)):\n round_over = True\n else:\n round_over = False\n return round_over", "def check_selected_card(_player1, _player2):\n if _player1.selected_card and _player2.selected_card:\n color = _player1.selected_card.suit\n if _player2.selected_card.suit != color and check_color_card(_player2, color):\n _player2.selected_card = None", "def _ischeckopponent(self, from_, to_):\n opp_color = 'W' if self.to_move == 'B' else 'B'\n opp_king_pos = self.board.get_occupants(color=opp_color, notation='K')[0]\n\n diff = (\n opp_king_pos[0] - to_cartesian(to_)[0],\n opp_king_pos[1] - to_cartesian(to_)[1]\n )\n\n if diff in self.board[from_].occupant.get_captures():\n if self.board[from_].occupant.hopping:\n return True\n\n if not self.board.isblocked(to_, opp_king_pos):\n return True\n\n return False", "def __eq__(self, other_card):\n if self.rank == other_card.rank or self.suit == other_card.suit:\n return True\n else:\n return False", "def check_winner(self):\n if self.player1.chips <= BIG_BLIND_BET:\n return 2\n elif self.player2.chips <= BIG_BLIND_BET:\n return 1\n else:\n return 0", "def is_same_sign(self, cards):\n\n jokers = 0\n w_o_jokers = []\n for card in cards:\n if self.num_to_card(int(card)) == 0:\n jokers += 1\n else:\n w_o_jokers.append(int(card))\n\n w_o_jokers = sorted(w_o_jokers)\n print(\"whitout jokers: \", w_o_jokers)\n if w_o_jokers[0] <= 12: # if the cards are CLUBS\n if w_o_jokers[-1] > 12:\n return False\n if w_o_jokers[0] <= 25: # if the cards are DIAMONDS\n if w_o_jokers[-1] > 25:\n return False\n if w_o_jokers[0] <= 38: # HEARTS\n if w_o_jokers[-1] > 38:\n return False\n if w_o_jokers[0] <= 51:\n if w_o_jokers[-1] > 51:\n return False\n return True", "def checkif_suitpile(self, from_col, from_row) -> bool:\n card_to_move = self.solitaire[from_col, from_row]\n\n if self.m_suit_pile.can_move_to_pile(card_to_move):\n return True\n else:\n return False", "def can_be_moved(self, card):\n return is_one_rank_apart(self.waste_card, card)", "def get_game_ending_hands(self) -> bool:\n end = False\n if 10 in self.house.hand.value: # Check if house's first card is a 10\n if self.action_peek_cards() == 1: # Peek the card to check for and ace. CardValue.ACE has a value of 1\n self.event_house_blackjack()\n end = True\n elif 11 in self.house.hand.value: # Check if house's first card is an ace\n if self.action_peek_cards() in (10, 11, 12, 13): # TEN, JACK, QUEEN, KING in respective order\n self.event_house_blackjack()\n end = True\n elif min(self.house.hand.value) > 21: # Check if house has gone bust\n self.event_house_bust()\n end = True\n elif max(self.user.hand.value) == 21: # Check for player blackjack\n self.event_player_blackjack()\n end = True\n elif min(self.user.hand.value) > 21: # Check if player has gone bust\n self.event_player_bust()\n end = True\n self.has_game_ending_hand = end\n return end" ]
[ "0.7485425", "0.731124", "0.69145197", "0.6886143", "0.6801055", "0.6795613", "0.6710412", "0.6705385", "0.6697481", "0.66499996", "0.6609032", "0.6579931", "0.6578903", "0.655311", "0.65114313", "0.64970493", "0.6496383", "0.64959776", "0.64737856", "0.6472519", "0.64617413", "0.6422633", "0.6420375", "0.6411018", "0.64002776", "0.6382672", "0.6371568", "0.63668704", "0.63438404", "0.6340929" ]
0.7958578
0
Function that creates a sudoku matrix
def make_sudoku(size): def mutate_list_1(lst, size): """Helper function for removing part of a list from the beginning and add it to the end.""" count = 0 while count < size: elem = lst[0] lst.remove(elem) lst.append(elem) count += 1 return lst def mutate_list_2(lst): """Helper function for removing element from the beginning of a list and add it to the end.""" elem = lst[0] lst.remove(elem) lst.append(elem) return lst count = 0 matrix_length = size ** 2 # define a size of matrix matrix = [[] * matrix_length] # create an empty matrix matrix[0] = range(1, matrix_length + 1) # set a first row to a range from 1 to size ** 2 while count < matrix_length - 1: l = matrix[count][:] # create a new list object that is a copy of previous row in a matrix if (count + 1) % size == 0: # check if a row in inner square of a matrix l = matrix[count - (size-1)][:] # if it is, l set to the first row of previous square matrix.append(mutate_list_2(l)) else: matrix.append(mutate_list_1(l, size)) # mutate l and add it to the matrix count += 1 return matrix
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_sudoku(self)->list:\n grid = [[None for x in range(9)] for row in range(9)]\n for row in range(0,9):\n for column in range(0,9):\n if row <= 2 and column <=2:\n grid[row][column] = cell.Cell(0)\n elif row <= 2 and 3 <= column <= 5:\n grid[row][column] = cell.Cell(1)\n elif row <= 2 and 6 <= column <= 8:\n grid[row][column] = cell.Cell(2)\n elif 3 <= row <= 5 and column <= 2:\n grid[row][column] = cell.Cell(3)\n elif 3 <= row <= 5 and 3 <= column <= 5:\n grid[row][column] = cell.Cell(4)\n elif 3 <= row <= 5 and 6 <= column <= 8:\n grid[row][column] = cell.Cell(5)\n elif 6 <= row <= 8 and column <= 2:\n grid[row][column] = cell.Cell(6)\n elif 6 <= row <= 8 and 3 <= column <= 5:\n grid[row][column] = cell.Cell(7)\n elif 6 <= row <= 8 and 6 <= column <= 8:\n grid[row][column] = cell.Cell(8)\n return grid", "def set_up_matrix():\n matrix= []\n row= \"1 9 3 4 5\"\n row= to_int(row)\n matrix.append(row)\n row= \"2 30 4 5 6\"\n row= to_int(row)\n matrix.append(row)\n row= \"3 8 5 6 7\"\n row= to_int(row)\n matrix.append(row)\n row= \"4 5 6 7 8\"\n row= to_int(row)\n matrix.append(row)\n row= \"5 6 7 8 9\"\n row= to_int(row)\n matrix.append(row)\n return matrix", "def makeMatrix():\n listOfChars = []\n for ascii in range(32, 128):\n listOfChars.append(chr(ascii))\n random.shuffle(listOfChars)\n matrix = Grid(8, 12)\n i = 0\n for row in range(matrix.getHeight()):\n for column in range(matrix.getWidth()):\n matrix[row][column] = listOfChars[i]\n i += 1\n return matrix", "def generate_s_matrix(number: int):\n matrix_zero = np.ones((number, number))\n matrix_zero[1:-1, 1:-1] = 0\n return matrix_zero", "def make_board():\n return [[0 for i in range(8)] for i in range(8)]", "def get_sudoku_matrix(self):\n return self.sudoku_matrix", "def make_matrix():\n row, col = [int(x) for x in input().split()]\n island = [[int(x) for x in input().split()] for _ in range(row)]\n return row, col, island", "def build(xaxis, yaxis, zaxis):\n matrix = []\n for floor in range(zaxis):\n roomnum = 1\n matrix.append([])\n for row in range(yaxis):\n matrix[floor].append([])\n for column in range(xaxis):\n matrix[floor][row].append(str(roomnum))\n roomnum += 1\n return matrix", "def sudoku_solver(m):\n square_sides = int(sqrt(len(m)))\n dicts = initialize_dicts(m, square_sides)\n dicts, square_coords = populate_dicts(m, square_sides, dicts)\n dicts = get_missing(dicts)\n candidates = get_candidates(m, dicts, square_coords)\n m, candidates = scan_sudoku(m, dicts, square_coords, candidates)\n single_candidates = single_candidate(candidates, square_coords, dicts)\n m, candidates = fill_fit(m, dicts, square_coords, single_candidates=single_candidates)\n candidates = get_candidates(m, dicts, square_coords)\n naked_sets_fields_row, naked_sets_fields_cols = find_naked_sets(candidates, dicts, setlength=2)\n candidates, naked_sets = remove_naked_sets_from_candidates(candidates, naked_sets_fields_row, naked_sets_fields_cols)\n candidates = get_candidates(m, dicts, square_coords, naked_sets)\n naked_sets_fields_row, naked_sets_fields_cols = find_naked_sets(candidates, dicts, setlength=3)\n return m", "def make_matrix(rows, columns):\n\tmatrix = []\n\tfor row in range(rows):\n\t\tmatrix += [[0] * columns]\n\t\t\n\treturn matrix", "def createboard(rows,columns):\n row_size = ''\n for rows in range(rows):\n if rows == 0:\n row_size = row_size + '0'\n else:\n row_size = row_size + ',0'\n fullmatrix = ''\n for cols in range(columns):\n if cols == 0:\n fullmatrix = fullmatrix + row_size\n else:\n fullmatrix = fullmatrix + '; ' + row_size\n return fullmatrix", "def create_matrix(self):\n import random\n random.seed()\n #pecas_disponiveis = [\"escudo\", \"espada\", \"espada_dupla\", \"machadinha\", \"adaga\", \"punhais\"]\n pecas_disponiveis = [\"escudo\", \"punhais\", \"espada_dupla\", \"machadinha\", \"adaga\"]\n x_start, y_start = 10, self.running.top_bar\n x, y = x_start, y_start\n anterior_esq = [None] * self.running.linhas\n anterior_acima = None\n\n for i in range(self.running.colunas):\n coluna = []\n for j in range(self.running.linhas):\n possiveis_escolhas = pecas_disponiveis.copy()\n if possiveis_escolhas.count(anterior_esq[j]) > 0: possiveis_escolhas.remove(anterior_esq[j])\n if possiveis_escolhas.count(anterior_acima) > 0 : possiveis_escolhas.remove(anterior_acima)\n \n e_type = random.choice(possiveis_escolhas)\n tile = Tile(self.game, x, y, e_type)\n coluna.append(tile)\n self.running.game_images.append(tile.game_image)\n y += self.running.y_space\n anterior_esq[j] = e_type\n anterior_acima = e_type\n self.running.tabuleiro.append(coluna)\n x += self.running.x_space\n y = y_start\n return", "def get_table(self):\n \n # During testing, this'll speed the process update\n \n row = 0\n while row < 9:\n sudoku_row = input(\"Please enter the contents of row {}, using 0 to represent blanks:\".format(row+1))\n if len(sudoku_row) == 9:\n column = 0\n while column < 9:\n number_in_box = int(sudoku_row[column])\n self.table[row][column] = number_in_box\n column += 1\n row += 1\n else:\n print(\"You can only enter 9 numbers. Not letters. Not more. Not fewer. 9 numbers.\")", "def create_board(rows, columns):\n res = [[0 for i in range(columns)] for j in range(rows)]\n return res", "def solve_sudoku(sudoku):\n # Define the solution matrix that represents the sudoku puzzle\n solution = Matrix(9, 9, 1, 9)\n\n # Set up the model\n model = Model()\n\n # Set the constraints for the filled in cells\n for i in xrange(0, 9):\n for j in xrange(0, 9):\n if sudoku[i, j] > 0:\n model.add(solution[i, j] == int(sudoku[i, j]))\n\n # Add the constraint that all rows need to be different\n model.add([AllDiff(x) for x in solution.row])\n # Add the constraint that all columns need to be different\n model.add([AllDiff(y) for y in solution.col])\n \n # Add the constraint that all cells need to be different\n for i in xrange(0, 3):\n for j in xrange(0, 3):\n # Generate the constraint for each cell\n # x goes over the rows in each cell\n # y goes over the columns in each cell\n model.add(AllDiff(\n [solution[x, y] for x in xrange(i*3, (i+1)*3) for y in xrange(j*3, (j+1)*3)]))\n\n # Load a solver and solve the problem\n solver = model.load('MiniSat')\n solver.solve()\n return solution", "def make_matrix():\n print(\"Create n*m Matrix\")\n print(\"Enter matrix parameters:\")\n print(\"n = \")\n n = int(input())\n print(\"m = \")\n m = int(input())\n matrix = create_matrix(n, m)\n matrix = fill_matr(matrix)\n return matrix", "def crear_matrix(nxn):\n matrix =[]\n for i in range(nxn):\n matrix.append([])\n for e in range(nxn):\n matrix[i].append(\"\")\n return matrix", "def make_matrix(rows, cols, value=0):\n return Matrix([[value for i in range(cols)] for j in range(rows)])", "def generate_sudoku(self):\n\n # randomly generate the first row \n random_order_number = [x for x in range(1, 10)]\n random.shuffle(random_order_number)\n for x in range(9):\n value = random_order_number[x]\n this_cell = self.grid[0][x]\n this_cell.value = value\n self.remove_value(this_cell, 0, x, value)\n\n row = 1\n column = 0\n while row <9 and column < 9:\n time.sleep(0.05)\n # search for options\n # should only be done once for each cell\n this_cell = self.grid[row][column]\n if this_cell.options == None:\n this_cell.options = self.find_options(row, column, this_cell.grid)\n\n if not this_cell.options:\n # backtrace should only happen when there is no options for this cell\n row, column = self.backtrace(this_cell, row, column)\n\n else:\n # case 3: the number has options and the number returned from the cell is valid\n if this_cell.value != None:\n self.add_value(this_cell, row, column)\n this_cell.get_value_from_options()\n # when you switch the value for a value from the option, put the current value back into the row\n self.remove_value(this_cell, row, column, this_cell.value)\n if column == 8:\n row += 1\n column = 0\n else:\n column += 1\n try:\n self.print_detail(this_cell, row, column)\n except IndexError:\n pass", "def solveSudoku(board):\n # represents all numbers in a specific row, col, box\n # format: if (5,9) is in rows, that means row 5 contains digit 9\n\t\t# format: if (3, 2) is in cols, that means col 3 contains digit 2\n\t\t# format: if (0,2,8) is in boxes, that means box (0,2) contains 8\n\t\t# cellsToFill is a stack that holds all the (i,j) cells we need to fill\n rows, cols, boxes = set(), set(), set()\n cellsToFill = []\n m, n = len(board), len(board[0])\n \n def initDataSets():\n for i in range(m):\n for j in range(n):\n char = board[i][j]\n if char == '.':\n cellsToFill.append((i,j))\n else:\n addToDataSets((i, char), (j, char), (i//3, j//3, char))\n\n def addToDataSets(curRow, curCol, curBox):\n rows.add(curRow)\n cols.add(curCol)\n boxes.add(curBox)\n \n def removeFromDataSets(curRow, curCol, curBox):\n rows.remove(curRow)\n cols.remove(curCol)\n boxes.remove(curBox)\n \n def backtrack():\n if not cellsToFill:\n return True\n \n i, j = cellsToFill.pop()\n for char in '123456789':\n # check if the number is already in a row/col/box, if it is then skip to the next number\n curRow, curCol, curBox = (i, char), (j, char), (i//3, j//3, char)\n if curRow in rows or curCol in cols or curBox in boxes: continue\n \n # if not, add the number to the row/col/box\n addToDataSets(curRow, curCol, curBox)\n board[i][j] = char\n \n # start the recursive call for inserting the next number\n if (backtrack()):\n return True\n \n # backtrack wasn't successful, remove the number from the row/col/box\n removeFromDataSets(curRow, curCol, curBox)\n board[i][j] = '.'\n \n cellsToFill.append((i,j))\n return False\n \n initDataSets()\n print(board)\n backtrack()", "def create_sudoku_csp(sudoku):\n print sudoku\n constraints = []\n variables = []\n tupel_list = np.empty((9,9), dtype = csp.Variable)\n domain = range(1,10)#['1','2','3','4','5','6','7','8','9']\n for i,row in enumerate(sudoku):\n for j,field in enumerate(row):\n if field == 0:\n field = None\n \"\"\" do = domain\n else:\n do = [field]\"\"\"\n var = csp.Variable((i,j),domain)\n var.set_value(field)\n variables.append(var)\n tupel_list[i][j]=var \n \n for i in range(0,9):\n for j in range(0,9):\n for r in range(i+1,9):\n constraints.append(csp.UnequalConstraint(tupel_list[i][j],tupel_list[r][j]))\n for l in range(j+1,9):\n constraints.append(csp.UnequalConstraint(tupel_list[i][j],tupel_list[i][l]))\n for outer in range(0,3):\n for inner in range(0,3):\n if outer is not i % 3 and inner is not j % 3:\n constraints.append(csp.UnequalConstraint(tupel_list[i][j],tupel_list[3*(i/3)+outer][3*(j/3)+inner]))\n return csp.ConstrainedSatisfactionProblem(variables,constraints)", "def solveSudoku(self, board: List[List[str]]) -> None:\n def dfs(idx):\n if idx == len(blankIdx):\n return True\n else:\n i, j = blankIdx[idx]\n for num in rg:\n num += 1\n if (num not in rows[i] and\n num not in cols[j] and\n num not in boxs[i//3][j//3]):\n board[i][j]=str(num)\n rows[i].add(num)\n cols[j].add(num)\n boxs[i//3][j//3].add(num)\n if dfs(idx+1):\n return True\n board[i][j] = blank\n rows[i].remove(num)\n cols[j].remove(num)\n boxs[i//3][j//3].remove(num)\n \n rg,blank = range(9), \".\"\n rows = [set() for _ in rg]\n cols = [set() for _ in rg]\n boxs = [[set() for _ in range(3)] for j in range(3)]\n blankIdx = list()\n for i in rg:\n for j in rg:\n if board[i][j]!=blank:\n ele = int(board[i][j])\n rows[i].add(ele)\n cols[j].add(ele)\n boxs[i//3][j//3].add(ele)\n else:\n blankIdx.append((i,j))\n dfs(0)", "def solveSudoku(self, board: List[List[str]]) -> None:\n def getLocs(board):#初始化,获取需要填充的位置,记录为一个栈\n locs = []\n for row in range(9):\n for col in range(9):\n if board[row][col] == '.':\n locs.append((row, col))\n return locs\n\n def getMaps(board):#定义三个字典,跟踪9行、9列和9块的已填充数字,采用数据结构为defaultdict\n from collections import defaultdict as dd\n rowMap = [dd(int) for _ in range(9)]\n colMap = [dd(int) for _ in range(9)]\n blockMap = [dd(int) for _ in range(9)]\n for row in range(9):\n for col in range(9):\n if board[row][col] != '.':\n num = int(board[row][col])\n rowMap[row][num] += 1\n colMap[col][num] += 1\n bolckIndex = int(row/3)*3+int(col/3)\n blockMap[bolckIndex][num] += 1\n return rowMap, colMap, blockMap\n\n def fillBoard(board, locs):#递归填充剩余的数独空位置\n if not locs:\n return True\n row, col = locs.pop()#弹出一个待填充位置\n bolckIndex = int(row/3)*3+int(col/3)\n found = False\n for num in range(1, 10):\n if found:\n break\n if not rowMap[row][num] and not colMap[col][num] and not blockMap[bolckIndex][num]:\n ##如果当前行、当前列和当前块均不存在该数字,则将数字更新到相应行、列、块,并尝试填充\n rowMap[row][num] = 1\n colMap[col][num] = 1\n blockMap[bolckIndex][num] = 1\n board[row][col] = str(num)\n found = fillBoard(board, locs)#递归到下一层填充\n rowMap[row][num] = 0##状态回溯,将填充的位置清空\n colMap[col][num] = 0\n blockMap[bolckIndex][num] = 0\n if not found:##如果本轮都无法求解,则回溯到初始状态,继续从前面再填充\n locs.append((row, col))\n board[row][col] = '.'\n return found\n\n rowMap, colMap, blockMap = getMaps(board)\n locs = getLocs(board)\n fillBoard(board, locs)", "def new_game(n):\n matrix = []\n\n for i in range(n):\n matrix.append([0] * n)\n return matrix", "def create_grid(grid):\r\n for i in range (4):\r\n grid.append ([])\r\n for j in range (4):\r\n grid[i].append (0)", "def create_grid(grid):\r\n for i in range(4):\r\n grid.append([0]*4)\r\n return grid", "def create_board_matrix(self, height, width):\n return [[' '] * width for _ in xrange(height)]", "def create_grid(size):\n grid = []\n for i in range(size):\n row = ['0']*size\n grid.append(row)\n\n return grid", "def print_sudoku_matrix(self):\n row_list = 'ABCDEFGHI'\n\n print \" 1 2 3 4 5 6 7 8 9 \"\n for i in range(9):\n if i % 3 == 0:\n print \" +-------+-------+-------+\"\n var = row_list[i] + \" \"\n for j in range(9):\n if j % 3 == 0:\n var += \"| \"\n if self.sudoku_matrix[i][j].get_cell_value() == 0:\n var += \".\"\n else:\n var += str(self.sudoku_matrix[i][j].get_cell_value())\n var += \" \"\n print var + \"|\"\n print \" +-------+-------+-------+ \\n\"", "def make_matrix(sizex, sizey):\n return [[0] * sizey for i in range(sizex)]" ]
[ "0.77155757", "0.70223457", "0.68659484", "0.67353135", "0.6707213", "0.66946906", "0.6685877", "0.66664875", "0.6648201", "0.6629207", "0.662782", "0.66213465", "0.6606546", "0.6600456", "0.6567484", "0.65639156", "0.65273124", "0.6484139", "0.6478473", "0.64574647", "0.6436354", "0.6433187", "0.6421283", "0.6391149", "0.63904184", "0.6373052", "0.6356406", "0.63328075", "0.63302994", "0.63200927" ]
0.79173344
0