repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
linode/linode_api4-python
linode_api4/objects/nodebalancer.py
NodeBalancerConfig.nodes
def nodes(self): """ This is a special derived_class relationship because NodeBalancerNode is the only api object that requires two parent_ids """ if not hasattr(self, '_nodes'): base_url = "{}/{}".format(NodeBalancerConfig.api_endpoint, NodeBalancerNode.derived_url_path) result = self._client._get_objects(base_url, NodeBalancerNode, model=self, parent_id=(self.id, self.nodebalancer_id)) self._set('_nodes', result) return self._nodes
python
def nodes(self): """ This is a special derived_class relationship because NodeBalancerNode is the only api object that requires two parent_ids """ if not hasattr(self, '_nodes'): base_url = "{}/{}".format(NodeBalancerConfig.api_endpoint, NodeBalancerNode.derived_url_path) result = self._client._get_objects(base_url, NodeBalancerNode, model=self, parent_id=(self.id, self.nodebalancer_id)) self._set('_nodes', result) return self._nodes
[ "def", "nodes", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'_nodes'", ")", ":", "base_url", "=", "\"{}/{}\"", ".", "format", "(", "NodeBalancerConfig", ".", "api_endpoint", ",", "NodeBalancerNode", ".", "derived_url_path", ")", "result", "=", "self", ".", "_client", ".", "_get_objects", "(", "base_url", ",", "NodeBalancerNode", ",", "model", "=", "self", ",", "parent_id", "=", "(", "self", ".", "id", ",", "self", ".", "nodebalancer_id", ")", ")", "self", ".", "_set", "(", "'_nodes'", ",", "result", ")", "return", "self", ".", "_nodes" ]
This is a special derived_class relationship because NodeBalancerNode is the only api object that requires two parent_ids
[ "This", "is", "a", "special", "derived_class", "relationship", "because", "NodeBalancerNode", "is", "the", "only", "api", "object", "that", "requires", "two", "parent_ids" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/nodebalancer.py#L73-L84
train
linode/linode_api4-python
linode_api4/objects/volume.py
Volume.attach
def attach(self, to_linode, config=None): """ Attaches this Volume to the given Linode """ result = self._client.post('{}/attach'.format(Volume.api_endpoint), model=self, data={ "linode_id": to_linode.id if issubclass(type(to_linode), Base) else to_linode, "config": None if not config else config.id if issubclass(type(config), Base) else config, }) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when attaching volume!', json=result) self._populate(result) return True
python
def attach(self, to_linode, config=None): """ Attaches this Volume to the given Linode """ result = self._client.post('{}/attach'.format(Volume.api_endpoint), model=self, data={ "linode_id": to_linode.id if issubclass(type(to_linode), Base) else to_linode, "config": None if not config else config.id if issubclass(type(config), Base) else config, }) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when attaching volume!', json=result) self._populate(result) return True
[ "def", "attach", "(", "self", ",", "to_linode", ",", "config", "=", "None", ")", ":", "result", "=", "self", ".", "_client", ".", "post", "(", "'{}/attach'", ".", "format", "(", "Volume", ".", "api_endpoint", ")", ",", "model", "=", "self", ",", "data", "=", "{", "\"linode_id\"", ":", "to_linode", ".", "id", "if", "issubclass", "(", "type", "(", "to_linode", ")", ",", "Base", ")", "else", "to_linode", ",", "\"config\"", ":", "None", "if", "not", "config", "else", "config", ".", "id", "if", "issubclass", "(", "type", "(", "config", ")", ",", "Base", ")", "else", "config", ",", "}", ")", "if", "not", "'id'", "in", "result", ":", "raise", "UnexpectedResponseError", "(", "'Unexpected response when attaching volume!'", ",", "json", "=", "result", ")", "self", ".", "_populate", "(", "result", ")", "return", "True" ]
Attaches this Volume to the given Linode
[ "Attaches", "this", "Volume", "to", "the", "given", "Linode" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/volume.py#L22-L36
train
linode/linode_api4-python
linode_api4/objects/volume.py
Volume.detach
def detach(self): """ Detaches this Volume if it is attached """ self._client.post('{}/detach'.format(Volume.api_endpoint), model=self) return True
python
def detach(self): """ Detaches this Volume if it is attached """ self._client.post('{}/detach'.format(Volume.api_endpoint), model=self) return True
[ "def", "detach", "(", "self", ")", ":", "self", ".", "_client", ".", "post", "(", "'{}/detach'", ".", "format", "(", "Volume", ".", "api_endpoint", ")", ",", "model", "=", "self", ")", "return", "True" ]
Detaches this Volume if it is attached
[ "Detaches", "this", "Volume", "if", "it", "is", "attached" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/volume.py#L38-L44
train
linode/linode_api4-python
linode_api4/objects/volume.py
Volume.resize
def resize(self, size): """ Resizes this Volume """ result = self._client.post('{}/resize'.format(Volume.api_endpoint, model=self, data={ "size": size })) self._populate(result.json) return True
python
def resize(self, size): """ Resizes this Volume """ result = self._client.post('{}/resize'.format(Volume.api_endpoint, model=self, data={ "size": size })) self._populate(result.json) return True
[ "def", "resize", "(", "self", ",", "size", ")", ":", "result", "=", "self", ".", "_client", ".", "post", "(", "'{}/resize'", ".", "format", "(", "Volume", ".", "api_endpoint", ",", "model", "=", "self", ",", "data", "=", "{", "\"size\"", ":", "size", "}", ")", ")", "self", ".", "_populate", "(", "result", ".", "json", ")", "return", "True" ]
Resizes this Volume
[ "Resizes", "this", "Volume" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/volume.py#L46-L55
train
linode/linode_api4-python
linode_api4/objects/volume.py
Volume.clone
def clone(self, label): """ Clones this volume to a new volume in the same region with the given label :param label: The label for the new volume. :returns: The new volume object. """ result = self._client.post('{}/clone'.format(Volume.api_endpoint), model=self, data={'label': label}) if not 'id' in result: raise UnexpectedResponseError('Unexpected response cloning volume!') return Volume(self._client, result['id'], result)
python
def clone(self, label): """ Clones this volume to a new volume in the same region with the given label :param label: The label for the new volume. :returns: The new volume object. """ result = self._client.post('{}/clone'.format(Volume.api_endpoint), model=self, data={'label': label}) if not 'id' in result: raise UnexpectedResponseError('Unexpected response cloning volume!') return Volume(self._client, result['id'], result)
[ "def", "clone", "(", "self", ",", "label", ")", ":", "result", "=", "self", ".", "_client", ".", "post", "(", "'{}/clone'", ".", "format", "(", "Volume", ".", "api_endpoint", ")", ",", "model", "=", "self", ",", "data", "=", "{", "'label'", ":", "label", "}", ")", "if", "not", "'id'", "in", "result", ":", "raise", "UnexpectedResponseError", "(", "'Unexpected response cloning volume!'", ")", "return", "Volume", "(", "self", ".", "_client", ",", "result", "[", "'id'", "]", ",", "result", ")" ]
Clones this volume to a new volume in the same region with the given label :param label: The label for the new volume. :returns: The new volume object.
[ "Clones", "this", "volume", "to", "a", "new", "volume", "in", "the", "same", "region", "with", "the", "given", "label" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/volume.py#L57-L71
train
linode/linode_api4-python
linode_api4/objects/tag.py
Tag._get_raw_objects
def _get_raw_objects(self): """ Helper function to populate the first page of raw objects for this tag. This has the side effect of creating the ``_raw_objects`` attribute of this object. """ if not hasattr(self, '_raw_objects'): result = self._client.get(type(self).api_endpoint, model=self) # I want to cache this to avoid making duplicate requests, but I don't # want it in the __init__ self._raw_objects = result # pylint: disable=attribute-defined-outside-init return self._raw_objects
python
def _get_raw_objects(self): """ Helper function to populate the first page of raw objects for this tag. This has the side effect of creating the ``_raw_objects`` attribute of this object. """ if not hasattr(self, '_raw_objects'): result = self._client.get(type(self).api_endpoint, model=self) # I want to cache this to avoid making duplicate requests, but I don't # want it in the __init__ self._raw_objects = result # pylint: disable=attribute-defined-outside-init return self._raw_objects
[ "def", "_get_raw_objects", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'_raw_objects'", ")", ":", "result", "=", "self", ".", "_client", ".", "get", "(", "type", "(", "self", ")", ".", "api_endpoint", ",", "model", "=", "self", ")", "# I want to cache this to avoid making duplicate requests, but I don't", "# want it in the __init__", "self", ".", "_raw_objects", "=", "result", "# pylint: disable=attribute-defined-outside-init", "return", "self", ".", "_raw_objects" ]
Helper function to populate the first page of raw objects for this tag. This has the side effect of creating the ``_raw_objects`` attribute of this object.
[ "Helper", "function", "to", "populate", "the", "first", "page", "of", "raw", "objects", "for", "this", "tag", ".", "This", "has", "the", "side", "effect", "of", "creating", "the", "_raw_objects", "attribute", "of", "this", "object", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/tag.py#L32-L45
train
linode/linode_api4-python
linode_api4/objects/tag.py
Tag.objects
def objects(self): """ Returns a list of objects with this Tag. This list may contain any taggable object type. """ data = self._get_raw_objects() return PaginatedList.make_paginated_list(data, self._client, TaggedObjectProxy, page_url=type(self).api_endpoint.format(**vars(self)))
python
def objects(self): """ Returns a list of objects with this Tag. This list may contain any taggable object type. """ data = self._get_raw_objects() return PaginatedList.make_paginated_list(data, self._client, TaggedObjectProxy, page_url=type(self).api_endpoint.format(**vars(self)))
[ "def", "objects", "(", "self", ")", ":", "data", "=", "self", ".", "_get_raw_objects", "(", ")", "return", "PaginatedList", ".", "make_paginated_list", "(", "data", ",", "self", ".", "_client", ",", "TaggedObjectProxy", ",", "page_url", "=", "type", "(", "self", ")", ".", "api_endpoint", ".", "format", "(", "*", "*", "vars", "(", "self", ")", ")", ")" ]
Returns a list of objects with this Tag. This list may contain any taggable object type.
[ "Returns", "a", "list", "of", "objects", "with", "this", "Tag", ".", "This", "list", "may", "contain", "any", "taggable", "object", "type", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/tag.py#L60-L68
train
linode/linode_api4-python
linode_api4/objects/tag.py
TaggedObjectProxy.make_instance
def make_instance(cls, id, client, parent_id=None, json=None): """ Overrides Base's ``make_instance`` to allow dynamic creation of objects based on the defined type in the response json. :param cls: The class this was called on :param id: The id of the instance to create :param client: The client to use for this instance :param parent_id: The parent id for derived classes :param json: The JSON to populate the instance with :returns: A new instance of this type, populated with json """ make_cls = CLASS_MAP.get(id) # in this case, ID is coming in as the type if make_cls is None: # we don't recognize this entity type - do nothing? return None # discard the envelope real_json = json['data'] real_id = real_json['id'] # make the real object type return Base.make(real_id, client, make_cls, parent_id=None, json=real_json)
python
def make_instance(cls, id, client, parent_id=None, json=None): """ Overrides Base's ``make_instance`` to allow dynamic creation of objects based on the defined type in the response json. :param cls: The class this was called on :param id: The id of the instance to create :param client: The client to use for this instance :param parent_id: The parent id for derived classes :param json: The JSON to populate the instance with :returns: A new instance of this type, populated with json """ make_cls = CLASS_MAP.get(id) # in this case, ID is coming in as the type if make_cls is None: # we don't recognize this entity type - do nothing? return None # discard the envelope real_json = json['data'] real_id = real_json['id'] # make the real object type return Base.make(real_id, client, make_cls, parent_id=None, json=real_json)
[ "def", "make_instance", "(", "cls", ",", "id", ",", "client", ",", "parent_id", "=", "None", ",", "json", "=", "None", ")", ":", "make_cls", "=", "CLASS_MAP", ".", "get", "(", "id", ")", "# in this case, ID is coming in as the type", "if", "make_cls", "is", "None", ":", "# we don't recognize this entity type - do nothing?", "return", "None", "# discard the envelope", "real_json", "=", "json", "[", "'data'", "]", "real_id", "=", "real_json", "[", "'id'", "]", "# make the real object type", "return", "Base", ".", "make", "(", "real_id", ",", "client", ",", "make_cls", ",", "parent_id", "=", "None", ",", "json", "=", "real_json", ")" ]
Overrides Base's ``make_instance`` to allow dynamic creation of objects based on the defined type in the response json. :param cls: The class this was called on :param id: The id of the instance to create :param client: The client to use for this instance :param parent_id: The parent id for derived classes :param json: The JSON to populate the instance with :returns: A new instance of this type, populated with json
[ "Overrides", "Base", "s", "make_instance", "to", "allow", "dynamic", "creation", "of", "objects", "based", "on", "the", "defined", "type", "in", "the", "response", "json", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/tag.py#L89-L113
train
linode/linode_api4-python
linode_api4/objects/linode.py
Disk.resize
def resize(self, new_size): """ Resizes this disk. The Linode Instance this disk belongs to must have sufficient space available to accommodate the new size, and must be offline. **NOTE** If resizing a disk down, the filesystem on the disk must still fit on the new disk size. You may need to resize the filesystem on the disk first before performing this action. :param new_size: The intended new size of the disk, in MB :type new_size: int :returns: True if the resize was initiated successfully. :rtype: bool """ self._client.post('{}/resize'.format(Disk.api_endpoint), model=self, data={"size": new_size}) return True
python
def resize(self, new_size): """ Resizes this disk. The Linode Instance this disk belongs to must have sufficient space available to accommodate the new size, and must be offline. **NOTE** If resizing a disk down, the filesystem on the disk must still fit on the new disk size. You may need to resize the filesystem on the disk first before performing this action. :param new_size: The intended new size of the disk, in MB :type new_size: int :returns: True if the resize was initiated successfully. :rtype: bool """ self._client.post('{}/resize'.format(Disk.api_endpoint), model=self, data={"size": new_size}) return True
[ "def", "resize", "(", "self", ",", "new_size", ")", ":", "self", ".", "_client", ".", "post", "(", "'{}/resize'", ".", "format", "(", "Disk", ".", "api_endpoint", ")", ",", "model", "=", "self", ",", "data", "=", "{", "\"size\"", ":", "new_size", "}", ")", "return", "True" ]
Resizes this disk. The Linode Instance this disk belongs to must have sufficient space available to accommodate the new size, and must be offline. **NOTE** If resizing a disk down, the filesystem on the disk must still fit on the new disk size. You may need to resize the filesystem on the disk first before performing this action. :param new_size: The intended new size of the disk, in MB :type new_size: int :returns: True if the resize was initiated successfully. :rtype: bool
[ "Resizes", "this", "disk", ".", "The", "Linode", "Instance", "this", "disk", "belongs", "to", "must", "have", "sufficient", "space", "available", "to", "accommodate", "the", "new", "size", "and", "must", "be", "offline", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/linode.py#L98-L116
train
linode/linode_api4-python
linode_api4/objects/linode.py
Config._populate
def _populate(self, json): """ Map devices more nicely while populating. """ from .volume import Volume DerivedBase._populate(self, json) devices = {} for device_index, device in json['devices'].items(): if not device: devices[device_index] = None continue dev = None if 'disk_id' in device and device['disk_id']: # this is a disk dev = Disk.make_instance(device['disk_id'], self._client, parent_id=self.linode_id) else: dev = Volume.make_instance(device['volume_id'], self._client, parent_id=self.linode_id) devices[device_index] = dev self._set('devices', MappedObject(**devices))
python
def _populate(self, json): """ Map devices more nicely while populating. """ from .volume import Volume DerivedBase._populate(self, json) devices = {} for device_index, device in json['devices'].items(): if not device: devices[device_index] = None continue dev = None if 'disk_id' in device and device['disk_id']: # this is a disk dev = Disk.make_instance(device['disk_id'], self._client, parent_id=self.linode_id) else: dev = Volume.make_instance(device['volume_id'], self._client, parent_id=self.linode_id) devices[device_index] = dev self._set('devices', MappedObject(**devices))
[ "def", "_populate", "(", "self", ",", "json", ")", ":", "from", ".", "volume", "import", "Volume", "DerivedBase", ".", "_populate", "(", "self", ",", "json", ")", "devices", "=", "{", "}", "for", "device_index", ",", "device", "in", "json", "[", "'devices'", "]", ".", "items", "(", ")", ":", "if", "not", "device", ":", "devices", "[", "device_index", "]", "=", "None", "continue", "dev", "=", "None", "if", "'disk_id'", "in", "device", "and", "device", "[", "'disk_id'", "]", ":", "# this is a disk", "dev", "=", "Disk", ".", "make_instance", "(", "device", "[", "'disk_id'", "]", ",", "self", ".", "_client", ",", "parent_id", "=", "self", ".", "linode_id", ")", "else", ":", "dev", "=", "Volume", ".", "make_instance", "(", "device", "[", "'volume_id'", "]", ",", "self", ".", "_client", ",", "parent_id", "=", "self", ".", "linode_id", ")", "devices", "[", "device_index", "]", "=", "dev", "self", ".", "_set", "(", "'devices'", ",", "MappedObject", "(", "*", "*", "devices", ")", ")" ]
Map devices more nicely while populating.
[ "Map", "devices", "more", "nicely", "while", "populating", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/linode.py#L172-L195
train
linode/linode_api4-python
linode_api4/objects/linode.py
Instance.ips
def ips(self): """ The ips related collection is not normalized like the others, so we have to make an ad-hoc object to return for its response """ if not hasattr(self, '_ips'): result = self._client.get("{}/ips".format(Instance.api_endpoint), model=self) if not "ipv4" in result: raise UnexpectedResponseError('Unexpected response loading IPs', json=result) v4pub = [] for c in result['ipv4']['public']: i = IPAddress(self._client, c['address'], c) v4pub.append(i) v4pri = [] for c in result['ipv4']['private']: i = IPAddress(self._client, c['address'], c) v4pri.append(i) shared_ips = [] for c in result['ipv4']['shared']: i = IPAddress(self._client, c['address'], c) shared_ips.append(i) slaac = IPAddress(self._client, result['ipv6']['slaac']['address'], result['ipv6']['slaac']) link_local = IPAddress(self._client, result['ipv6']['link_local']['address'], result['ipv6']['link_local']) pools = [] for p in result['ipv6']['global']: pools.append(IPv6Pool(self._client, p['range'])) ips = MappedObject(**{ "ipv4": { "public": v4pub, "private": v4pri, "shared": shared_ips, }, "ipv6": { "slaac": slaac, "link_local": link_local, "pools": pools, }, }) self._set('_ips', ips) return self._ips
python
def ips(self): """ The ips related collection is not normalized like the others, so we have to make an ad-hoc object to return for its response """ if not hasattr(self, '_ips'): result = self._client.get("{}/ips".format(Instance.api_endpoint), model=self) if not "ipv4" in result: raise UnexpectedResponseError('Unexpected response loading IPs', json=result) v4pub = [] for c in result['ipv4']['public']: i = IPAddress(self._client, c['address'], c) v4pub.append(i) v4pri = [] for c in result['ipv4']['private']: i = IPAddress(self._client, c['address'], c) v4pri.append(i) shared_ips = [] for c in result['ipv4']['shared']: i = IPAddress(self._client, c['address'], c) shared_ips.append(i) slaac = IPAddress(self._client, result['ipv6']['slaac']['address'], result['ipv6']['slaac']) link_local = IPAddress(self._client, result['ipv6']['link_local']['address'], result['ipv6']['link_local']) pools = [] for p in result['ipv6']['global']: pools.append(IPv6Pool(self._client, p['range'])) ips = MappedObject(**{ "ipv4": { "public": v4pub, "private": v4pri, "shared": shared_ips, }, "ipv6": { "slaac": slaac, "link_local": link_local, "pools": pools, }, }) self._set('_ips', ips) return self._ips
[ "def", "ips", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'_ips'", ")", ":", "result", "=", "self", ".", "_client", ".", "get", "(", "\"{}/ips\"", ".", "format", "(", "Instance", ".", "api_endpoint", ")", ",", "model", "=", "self", ")", "if", "not", "\"ipv4\"", "in", "result", ":", "raise", "UnexpectedResponseError", "(", "'Unexpected response loading IPs'", ",", "json", "=", "result", ")", "v4pub", "=", "[", "]", "for", "c", "in", "result", "[", "'ipv4'", "]", "[", "'public'", "]", ":", "i", "=", "IPAddress", "(", "self", ".", "_client", ",", "c", "[", "'address'", "]", ",", "c", ")", "v4pub", ".", "append", "(", "i", ")", "v4pri", "=", "[", "]", "for", "c", "in", "result", "[", "'ipv4'", "]", "[", "'private'", "]", ":", "i", "=", "IPAddress", "(", "self", ".", "_client", ",", "c", "[", "'address'", "]", ",", "c", ")", "v4pri", ".", "append", "(", "i", ")", "shared_ips", "=", "[", "]", "for", "c", "in", "result", "[", "'ipv4'", "]", "[", "'shared'", "]", ":", "i", "=", "IPAddress", "(", "self", ".", "_client", ",", "c", "[", "'address'", "]", ",", "c", ")", "shared_ips", ".", "append", "(", "i", ")", "slaac", "=", "IPAddress", "(", "self", ".", "_client", ",", "result", "[", "'ipv6'", "]", "[", "'slaac'", "]", "[", "'address'", "]", ",", "result", "[", "'ipv6'", "]", "[", "'slaac'", "]", ")", "link_local", "=", "IPAddress", "(", "self", ".", "_client", ",", "result", "[", "'ipv6'", "]", "[", "'link_local'", "]", "[", "'address'", "]", ",", "result", "[", "'ipv6'", "]", "[", "'link_local'", "]", ")", "pools", "=", "[", "]", "for", "p", "in", "result", "[", "'ipv6'", "]", "[", "'global'", "]", ":", "pools", ".", "append", "(", "IPv6Pool", "(", "self", ".", "_client", ",", "p", "[", "'range'", "]", ")", ")", "ips", "=", "MappedObject", "(", "*", "*", "{", "\"ipv4\"", ":", "{", "\"public\"", ":", "v4pub", ",", "\"private\"", ":", "v4pri", ",", "\"shared\"", ":", "shared_ips", ",", "}", ",", "\"ipv6\"", ":", "{", "\"slaac\"", ":", "slaac", ",", "\"link_local\"", ":", "link_local", ",", "\"pools\"", ":", "pools", ",", "}", ",", "}", ")", "self", ".", "_set", "(", "'_ips'", ",", "ips", ")", "return", "self", ".", "_ips" ]
The ips related collection is not normalized like the others, so we have to make an ad-hoc object to return for its response
[ "The", "ips", "related", "collection", "is", "not", "normalized", "like", "the", "others", "so", "we", "have", "to", "make", "an", "ad", "-", "hoc", "object", "to", "return", "for", "its", "response" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/linode.py#L222-L272
train
linode/linode_api4-python
linode_api4/objects/linode.py
Instance.available_backups
def available_backups(self): """ The backups response contains what backups are available to be restored. """ if not hasattr(self, '_avail_backups'): result = self._client.get("{}/backups".format(Instance.api_endpoint), model=self) if not 'automatic' in result: raise UnexpectedResponseError('Unexpected response loading available backups!', json=result) automatic = [] for a in result['automatic']: cur = Backup(self._client, a['id'], self.id, a) automatic.append(cur) snap = None if result['snapshot']['current']: snap = Backup(self._client, result['snapshot']['current']['id'], self.id, result['snapshot']['current']) psnap = None if result['snapshot']['in_progress']: psnap = Backup(self._client, result['snapshot']['in_progress']['id'], self.id, result['snapshot']['in_progress']) self._set('_avail_backups', MappedObject(**{ "automatic": automatic, "snapshot": { "current": snap, "in_progress": psnap, } })) return self._avail_backups
python
def available_backups(self): """ The backups response contains what backups are available to be restored. """ if not hasattr(self, '_avail_backups'): result = self._client.get("{}/backups".format(Instance.api_endpoint), model=self) if not 'automatic' in result: raise UnexpectedResponseError('Unexpected response loading available backups!', json=result) automatic = [] for a in result['automatic']: cur = Backup(self._client, a['id'], self.id, a) automatic.append(cur) snap = None if result['snapshot']['current']: snap = Backup(self._client, result['snapshot']['current']['id'], self.id, result['snapshot']['current']) psnap = None if result['snapshot']['in_progress']: psnap = Backup(self._client, result['snapshot']['in_progress']['id'], self.id, result['snapshot']['in_progress']) self._set('_avail_backups', MappedObject(**{ "automatic": automatic, "snapshot": { "current": snap, "in_progress": psnap, } })) return self._avail_backups
[ "def", "available_backups", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'_avail_backups'", ")", ":", "result", "=", "self", ".", "_client", ".", "get", "(", "\"{}/backups\"", ".", "format", "(", "Instance", ".", "api_endpoint", ")", ",", "model", "=", "self", ")", "if", "not", "'automatic'", "in", "result", ":", "raise", "UnexpectedResponseError", "(", "'Unexpected response loading available backups!'", ",", "json", "=", "result", ")", "automatic", "=", "[", "]", "for", "a", "in", "result", "[", "'automatic'", "]", ":", "cur", "=", "Backup", "(", "self", ".", "_client", ",", "a", "[", "'id'", "]", ",", "self", ".", "id", ",", "a", ")", "automatic", ".", "append", "(", "cur", ")", "snap", "=", "None", "if", "result", "[", "'snapshot'", "]", "[", "'current'", "]", ":", "snap", "=", "Backup", "(", "self", ".", "_client", ",", "result", "[", "'snapshot'", "]", "[", "'current'", "]", "[", "'id'", "]", ",", "self", ".", "id", ",", "result", "[", "'snapshot'", "]", "[", "'current'", "]", ")", "psnap", "=", "None", "if", "result", "[", "'snapshot'", "]", "[", "'in_progress'", "]", ":", "psnap", "=", "Backup", "(", "self", ".", "_client", ",", "result", "[", "'snapshot'", "]", "[", "'in_progress'", "]", "[", "'id'", "]", ",", "self", ".", "id", ",", "result", "[", "'snapshot'", "]", "[", "'in_progress'", "]", ")", "self", ".", "_set", "(", "'_avail_backups'", ",", "MappedObject", "(", "*", "*", "{", "\"automatic\"", ":", "automatic", ",", "\"snapshot\"", ":", "{", "\"current\"", ":", "snap", ",", "\"in_progress\"", ":", "psnap", ",", "}", "}", ")", ")", "return", "self", ".", "_avail_backups" ]
The backups response contains what backups are available to be restored.
[ "The", "backups", "response", "contains", "what", "backups", "are", "available", "to", "be", "restored", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/linode.py#L275-L308
train
linode/linode_api4-python
linode_api4/objects/linode.py
Instance.invalidate
def invalidate(self): """ Clear out cached properties """ if hasattr(self, '_avail_backups'): del self._avail_backups if hasattr(self, '_ips'): del self._ips Base.invalidate(self)
python
def invalidate(self): """ Clear out cached properties """ if hasattr(self, '_avail_backups'): del self._avail_backups if hasattr(self, '_ips'): del self._ips Base.invalidate(self)
[ "def", "invalidate", "(", "self", ")", ":", "if", "hasattr", "(", "self", ",", "'_avail_backups'", ")", ":", "del", "self", ".", "_avail_backups", "if", "hasattr", "(", "self", ",", "'_ips'", ")", ":", "del", "self", ".", "_ips", "Base", ".", "invalidate", "(", "self", ")" ]
Clear out cached properties
[ "Clear", "out", "cached", "properties" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/linode.py#L321-L328
train
linode/linode_api4-python
linode_api4/objects/linode.py
Instance.config_create
def config_create(self, kernel=None, label=None, devices=[], disks=[], volumes=[], **kwargs): """ Creates a Linode Config with the given attributes. :param kernel: The kernel to boot with. :param label: The config label :param disks: The list of disks, starting at sda, to map to this config. :param volumes: The volumes, starting after the last disk, to map to this config :param devices: A list of devices to assign to this config, in device index order. Values must be of type Disk or Volume. If this is given, you may not include disks or volumes. :param **kwargs: Any other arguments accepted by the api. :returns: A new Linode Config """ from .volume import Volume hypervisor_prefix = 'sd' if self.hypervisor == 'kvm' else 'xvd' device_names = [hypervisor_prefix + string.ascii_lowercase[i] for i in range(0, 8)] device_map = {device_names[i]: None for i in range(0, len(device_names))} if devices and (disks or volumes): raise ValueError('You may not call config_create with "devices" and ' 'either of "disks" or "volumes" specified!') if not devices: if not isinstance(disks, list): disks = [disks] if not isinstance(volumes, list): volumes = [volumes] devices = [] for d in disks: if d is None: devices.append(None) elif isinstance(d, Disk): devices.append(d) else: devices.append(Disk(self._client, int(d), self.id)) for v in volumes: if v is None: devices.append(None) elif isinstance(v, Volume): devices.append(v) else: devices.append(Volume(self._client, int(v))) if not devices: raise ValueError('Must include at least one disk or volume!') for i, d in enumerate(devices): if d is None: pass elif isinstance(d, Disk): device_map[device_names[i]] = {'disk_id': d.id } elif isinstance(d, Volume): device_map[device_names[i]] = {'volume_id': d.id } else: raise TypeError('Disk or Volume expected!') params = { 'kernel': kernel.id if issubclass(type(kernel), Base) else kernel, 'label': label if label else "{}_config_{}".format(self.label, len(self.configs)), 'devices': device_map, } params.update(kwargs) result = self._client.post("{}/configs".format(Instance.api_endpoint), model=self, data=params) self.invalidate() if not 'id' in result: raise UnexpectedResponseError('Unexpected response creating config!', json=result) c = Config(self._client, result['id'], self.id, result) return c
python
def config_create(self, kernel=None, label=None, devices=[], disks=[], volumes=[], **kwargs): """ Creates a Linode Config with the given attributes. :param kernel: The kernel to boot with. :param label: The config label :param disks: The list of disks, starting at sda, to map to this config. :param volumes: The volumes, starting after the last disk, to map to this config :param devices: A list of devices to assign to this config, in device index order. Values must be of type Disk or Volume. If this is given, you may not include disks or volumes. :param **kwargs: Any other arguments accepted by the api. :returns: A new Linode Config """ from .volume import Volume hypervisor_prefix = 'sd' if self.hypervisor == 'kvm' else 'xvd' device_names = [hypervisor_prefix + string.ascii_lowercase[i] for i in range(0, 8)] device_map = {device_names[i]: None for i in range(0, len(device_names))} if devices and (disks or volumes): raise ValueError('You may not call config_create with "devices" and ' 'either of "disks" or "volumes" specified!') if not devices: if not isinstance(disks, list): disks = [disks] if not isinstance(volumes, list): volumes = [volumes] devices = [] for d in disks: if d is None: devices.append(None) elif isinstance(d, Disk): devices.append(d) else: devices.append(Disk(self._client, int(d), self.id)) for v in volumes: if v is None: devices.append(None) elif isinstance(v, Volume): devices.append(v) else: devices.append(Volume(self._client, int(v))) if not devices: raise ValueError('Must include at least one disk or volume!') for i, d in enumerate(devices): if d is None: pass elif isinstance(d, Disk): device_map[device_names[i]] = {'disk_id': d.id } elif isinstance(d, Volume): device_map[device_names[i]] = {'volume_id': d.id } else: raise TypeError('Disk or Volume expected!') params = { 'kernel': kernel.id if issubclass(type(kernel), Base) else kernel, 'label': label if label else "{}_config_{}".format(self.label, len(self.configs)), 'devices': device_map, } params.update(kwargs) result = self._client.post("{}/configs".format(Instance.api_endpoint), model=self, data=params) self.invalidate() if not 'id' in result: raise UnexpectedResponseError('Unexpected response creating config!', json=result) c = Config(self._client, result['id'], self.id, result) return c
[ "def", "config_create", "(", "self", ",", "kernel", "=", "None", ",", "label", "=", "None", ",", "devices", "=", "[", "]", ",", "disks", "=", "[", "]", ",", "volumes", "=", "[", "]", ",", "*", "*", "kwargs", ")", ":", "from", ".", "volume", "import", "Volume", "hypervisor_prefix", "=", "'sd'", "if", "self", ".", "hypervisor", "==", "'kvm'", "else", "'xvd'", "device_names", "=", "[", "hypervisor_prefix", "+", "string", ".", "ascii_lowercase", "[", "i", "]", "for", "i", "in", "range", "(", "0", ",", "8", ")", "]", "device_map", "=", "{", "device_names", "[", "i", "]", ":", "None", "for", "i", "in", "range", "(", "0", ",", "len", "(", "device_names", ")", ")", "}", "if", "devices", "and", "(", "disks", "or", "volumes", ")", ":", "raise", "ValueError", "(", "'You may not call config_create with \"devices\" and '", "'either of \"disks\" or \"volumes\" specified!'", ")", "if", "not", "devices", ":", "if", "not", "isinstance", "(", "disks", ",", "list", ")", ":", "disks", "=", "[", "disks", "]", "if", "not", "isinstance", "(", "volumes", ",", "list", ")", ":", "volumes", "=", "[", "volumes", "]", "devices", "=", "[", "]", "for", "d", "in", "disks", ":", "if", "d", "is", "None", ":", "devices", ".", "append", "(", "None", ")", "elif", "isinstance", "(", "d", ",", "Disk", ")", ":", "devices", ".", "append", "(", "d", ")", "else", ":", "devices", ".", "append", "(", "Disk", "(", "self", ".", "_client", ",", "int", "(", "d", ")", ",", "self", ".", "id", ")", ")", "for", "v", "in", "volumes", ":", "if", "v", "is", "None", ":", "devices", ".", "append", "(", "None", ")", "elif", "isinstance", "(", "v", ",", "Volume", ")", ":", "devices", ".", "append", "(", "v", ")", "else", ":", "devices", ".", "append", "(", "Volume", "(", "self", ".", "_client", ",", "int", "(", "v", ")", ")", ")", "if", "not", "devices", ":", "raise", "ValueError", "(", "'Must include at least one disk or volume!'", ")", "for", "i", ",", "d", "in", "enumerate", "(", "devices", ")", ":", "if", "d", "is", "None", ":", "pass", "elif", "isinstance", "(", "d", ",", "Disk", ")", ":", "device_map", "[", "device_names", "[", "i", "]", "]", "=", "{", "'disk_id'", ":", "d", ".", "id", "}", "elif", "isinstance", "(", "d", ",", "Volume", ")", ":", "device_map", "[", "device_names", "[", "i", "]", "]", "=", "{", "'volume_id'", ":", "d", ".", "id", "}", "else", ":", "raise", "TypeError", "(", "'Disk or Volume expected!'", ")", "params", "=", "{", "'kernel'", ":", "kernel", ".", "id", "if", "issubclass", "(", "type", "(", "kernel", ")", ",", "Base", ")", "else", "kernel", ",", "'label'", ":", "label", "if", "label", "else", "\"{}_config_{}\"", ".", "format", "(", "self", ".", "label", ",", "len", "(", "self", ".", "configs", ")", ")", ",", "'devices'", ":", "device_map", ",", "}", "params", ".", "update", "(", "kwargs", ")", "result", "=", "self", ".", "_client", ".", "post", "(", "\"{}/configs\"", ".", "format", "(", "Instance", ".", "api_endpoint", ")", ",", "model", "=", "self", ",", "data", "=", "params", ")", "self", ".", "invalidate", "(", ")", "if", "not", "'id'", "in", "result", ":", "raise", "UnexpectedResponseError", "(", "'Unexpected response creating config!'", ",", "json", "=", "result", ")", "c", "=", "Config", "(", "self", ".", "_client", ",", "result", "[", "'id'", "]", ",", "self", ".", "id", ",", "result", ")", "return", "c" ]
Creates a Linode Config with the given attributes. :param kernel: The kernel to boot with. :param label: The config label :param disks: The list of disks, starting at sda, to map to this config. :param volumes: The volumes, starting after the last disk, to map to this config :param devices: A list of devices to assign to this config, in device index order. Values must be of type Disk or Volume. If this is given, you may not include disks or volumes. :param **kwargs: Any other arguments accepted by the api. :returns: A new Linode Config
[ "Creates", "a", "Linode", "Config", "with", "the", "given", "attributes", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/linode.py#L378-L456
train
linode/linode_api4-python
linode_api4/objects/linode.py
Instance.enable_backups
def enable_backups(self): """ Enable Backups for this Instance. When enabled, we will automatically backup your Instance's data so that it can be restored at a later date. For more information on Instance's Backups service and pricing, see our `Backups Page`_ .. _Backups Page: https://www.linode.com/backups """ self._client.post("{}/backups/enable".format(Instance.api_endpoint), model=self) self.invalidate() return True
python
def enable_backups(self): """ Enable Backups for this Instance. When enabled, we will automatically backup your Instance's data so that it can be restored at a later date. For more information on Instance's Backups service and pricing, see our `Backups Page`_ .. _Backups Page: https://www.linode.com/backups """ self._client.post("{}/backups/enable".format(Instance.api_endpoint), model=self) self.invalidate() return True
[ "def", "enable_backups", "(", "self", ")", ":", "self", ".", "_client", ".", "post", "(", "\"{}/backups/enable\"", ".", "format", "(", "Instance", ".", "api_endpoint", ")", ",", "model", "=", "self", ")", "self", ".", "invalidate", "(", ")", "return", "True" ]
Enable Backups for this Instance. When enabled, we will automatically backup your Instance's data so that it can be restored at a later date. For more information on Instance's Backups service and pricing, see our `Backups Page`_ .. _Backups Page: https://www.linode.com/backups
[ "Enable", "Backups", "for", "this", "Instance", ".", "When", "enabled", "we", "will", "automatically", "backup", "your", "Instance", "s", "data", "so", "that", "it", "can", "be", "restored", "at", "a", "later", "date", ".", "For", "more", "information", "on", "Instance", "s", "Backups", "service", "and", "pricing", "see", "our", "Backups", "Page", "_" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/linode.py#L502-L513
train
linode/linode_api4-python
linode_api4/objects/linode.py
Instance.mutate
def mutate(self): """ Upgrades this Instance to the latest generation type """ self._client.post('{}/mutate'.format(Instance.api_endpoint), model=self) return True
python
def mutate(self): """ Upgrades this Instance to the latest generation type """ self._client.post('{}/mutate'.format(Instance.api_endpoint), model=self) return True
[ "def", "mutate", "(", "self", ")", ":", "self", ".", "_client", ".", "post", "(", "'{}/mutate'", ".", "format", "(", "Instance", ".", "api_endpoint", ")", ",", "model", "=", "self", ")", "return", "True" ]
Upgrades this Instance to the latest generation type
[ "Upgrades", "this", "Instance", "to", "the", "latest", "generation", "type" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/linode.py#L635-L641
train
linode/linode_api4-python
linode_api4/objects/linode.py
Instance.initiate_migration
def initiate_migration(self): """ Initiates a pending migration that is already scheduled for this Linode Instance """ self._client.post('{}/migrate'.format(Instance.api_endpoint), model=self)
python
def initiate_migration(self): """ Initiates a pending migration that is already scheduled for this Linode Instance """ self._client.post('{}/migrate'.format(Instance.api_endpoint), model=self)
[ "def", "initiate_migration", "(", "self", ")", ":", "self", ".", "_client", ".", "post", "(", "'{}/migrate'", ".", "format", "(", "Instance", ".", "api_endpoint", ")", ",", "model", "=", "self", ")" ]
Initiates a pending migration that is already scheduled for this Linode Instance
[ "Initiates", "a", "pending", "migration", "that", "is", "already", "scheduled", "for", "this", "Linode", "Instance" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/linode.py#L643-L648
train
linode/linode_api4-python
linode_api4/objects/linode.py
Instance.clone
def clone(self, to_linode=None, region=None, service=None, configs=[], disks=[], label=None, group=None, with_backups=None): """ Clones this linode into a new linode or into a new linode in the given region """ if to_linode and region: raise ValueError('You may only specify one of "to_linode" and "region"') if region and not service: raise ValueError('Specifying a region requires a "service" as well') if not isinstance(configs, list) and not isinstance(configs, PaginatedList): configs = [configs] if not isinstance(disks, list) and not isinstance(disks, PaginatedList): disks = [disks] cids = [ c.id if issubclass(type(c), Base) else c for c in configs ] dids = [ d.id if issubclass(type(d), Base) else d for d in disks ] params = { "linode_id": to_linode.id if issubclass(type(to_linode), Base) else to_linode, "region": region.id if issubclass(type(region), Base) else region, "type": service.id if issubclass(type(service), Base) else service, "configs": cids if cids else None, "disks": dids if dids else None, "label": label, "group": group, "with_backups": with_backups, } result = self._client.post('{}/clone'.format(Instance.api_endpoint), model=self, data=params) if not 'id' in result: raise UnexpectedResponseError('Unexpected response cloning Instance!', json=result) l = Instance(self._client, result['id'], result) return l
python
def clone(self, to_linode=None, region=None, service=None, configs=[], disks=[], label=None, group=None, with_backups=None): """ Clones this linode into a new linode or into a new linode in the given region """ if to_linode and region: raise ValueError('You may only specify one of "to_linode" and "region"') if region and not service: raise ValueError('Specifying a region requires a "service" as well') if not isinstance(configs, list) and not isinstance(configs, PaginatedList): configs = [configs] if not isinstance(disks, list) and not isinstance(disks, PaginatedList): disks = [disks] cids = [ c.id if issubclass(type(c), Base) else c for c in configs ] dids = [ d.id if issubclass(type(d), Base) else d for d in disks ] params = { "linode_id": to_linode.id if issubclass(type(to_linode), Base) else to_linode, "region": region.id if issubclass(type(region), Base) else region, "type": service.id if issubclass(type(service), Base) else service, "configs": cids if cids else None, "disks": dids if dids else None, "label": label, "group": group, "with_backups": with_backups, } result = self._client.post('{}/clone'.format(Instance.api_endpoint), model=self, data=params) if not 'id' in result: raise UnexpectedResponseError('Unexpected response cloning Instance!', json=result) l = Instance(self._client, result['id'], result) return l
[ "def", "clone", "(", "self", ",", "to_linode", "=", "None", ",", "region", "=", "None", ",", "service", "=", "None", ",", "configs", "=", "[", "]", ",", "disks", "=", "[", "]", ",", "label", "=", "None", ",", "group", "=", "None", ",", "with_backups", "=", "None", ")", ":", "if", "to_linode", "and", "region", ":", "raise", "ValueError", "(", "'You may only specify one of \"to_linode\" and \"region\"'", ")", "if", "region", "and", "not", "service", ":", "raise", "ValueError", "(", "'Specifying a region requires a \"service\" as well'", ")", "if", "not", "isinstance", "(", "configs", ",", "list", ")", "and", "not", "isinstance", "(", "configs", ",", "PaginatedList", ")", ":", "configs", "=", "[", "configs", "]", "if", "not", "isinstance", "(", "disks", ",", "list", ")", "and", "not", "isinstance", "(", "disks", ",", "PaginatedList", ")", ":", "disks", "=", "[", "disks", "]", "cids", "=", "[", "c", ".", "id", "if", "issubclass", "(", "type", "(", "c", ")", ",", "Base", ")", "else", "c", "for", "c", "in", "configs", "]", "dids", "=", "[", "d", ".", "id", "if", "issubclass", "(", "type", "(", "d", ")", ",", "Base", ")", "else", "d", "for", "d", "in", "disks", "]", "params", "=", "{", "\"linode_id\"", ":", "to_linode", ".", "id", "if", "issubclass", "(", "type", "(", "to_linode", ")", ",", "Base", ")", "else", "to_linode", ",", "\"region\"", ":", "region", ".", "id", "if", "issubclass", "(", "type", "(", "region", ")", ",", "Base", ")", "else", "region", ",", "\"type\"", ":", "service", ".", "id", "if", "issubclass", "(", "type", "(", "service", ")", ",", "Base", ")", "else", "service", ",", "\"configs\"", ":", "cids", "if", "cids", "else", "None", ",", "\"disks\"", ":", "dids", "if", "dids", "else", "None", ",", "\"label\"", ":", "label", ",", "\"group\"", ":", "group", ",", "\"with_backups\"", ":", "with_backups", ",", "}", "result", "=", "self", ".", "_client", ".", "post", "(", "'{}/clone'", ".", "format", "(", "Instance", ".", "api_endpoint", ")", ",", "model", "=", "self", ",", "data", "=", "params", ")", "if", "not", "'id'", "in", "result", ":", "raise", "UnexpectedResponseError", "(", "'Unexpected response cloning Instance!'", ",", "json", "=", "result", ")", "l", "=", "Instance", "(", "self", ".", "_client", ",", "result", "[", "'id'", "]", ",", "result", ")", "return", "l" ]
Clones this linode into a new linode or into a new linode in the given region
[ "Clones", "this", "linode", "into", "a", "new", "linode", "or", "into", "a", "new", "linode", "in", "the", "given", "region" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/linode.py#L650-L684
train
linode/linode_api4-python
linode_api4/objects/linode.py
Instance.stats
def stats(self): """ Returns the JSON stats for this Instance """ # TODO - this would be nicer if we formatted the stats return self._client.get('{}/stats'.format(Instance.api_endpoint), model=self)
python
def stats(self): """ Returns the JSON stats for this Instance """ # TODO - this would be nicer if we formatted the stats return self._client.get('{}/stats'.format(Instance.api_endpoint), model=self)
[ "def", "stats", "(", "self", ")", ":", "# TODO - this would be nicer if we formatted the stats", "return", "self", ".", "_client", ".", "get", "(", "'{}/stats'", ".", "format", "(", "Instance", ".", "api_endpoint", ")", ",", "model", "=", "self", ")" ]
Returns the JSON stats for this Instance
[ "Returns", "the", "JSON", "stats", "for", "this", "Instance" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/linode.py#L687-L692
train
linode/linode_api4-python
linode_api4/objects/linode.py
Instance.stats_for
def stats_for(self, dt): """ Returns stats for the month containing the given datetime """ # TODO - this would be nicer if we formatted the stats if not isinstance(dt, datetime): raise TypeError('stats_for requires a datetime object!') return self._client.get('{}/stats/'.format(dt.strftime('%Y/%m')))
python
def stats_for(self, dt): """ Returns stats for the month containing the given datetime """ # TODO - this would be nicer if we formatted the stats if not isinstance(dt, datetime): raise TypeError('stats_for requires a datetime object!') return self._client.get('{}/stats/'.format(dt.strftime('%Y/%m')))
[ "def", "stats_for", "(", "self", ",", "dt", ")", ":", "# TODO - this would be nicer if we formatted the stats", "if", "not", "isinstance", "(", "dt", ",", "datetime", ")", ":", "raise", "TypeError", "(", "'stats_for requires a datetime object!'", ")", "return", "self", ".", "_client", ".", "get", "(", "'{}/stats/'", ".", "format", "(", "dt", ".", "strftime", "(", "'%Y/%m'", ")", ")", ")" ]
Returns stats for the month containing the given datetime
[ "Returns", "stats", "for", "the", "month", "containing", "the", "given", "datetime" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/linode.py#L694-L701
train
linode/linode_api4-python
linode_api4/objects/linode.py
StackScript._populate
def _populate(self, json): """ Override the populate method to map user_defined_fields to fancy values """ Base._populate(self, json) mapped_udfs = [] for udf in self.user_defined_fields: t = UserDefinedFieldType.text choices = None if hasattr(udf, 'oneof'): t = UserDefinedFieldType.select_one choices = udf.oneof.split(',') elif hasattr(udf, 'manyof'): t = UserDefinedFieldType.select_many choices = udf.manyof.split(',') mapped_udfs.append(UserDefinedField(udf.name, udf.label if hasattr(udf, 'label') else None, udf.example if hasattr(udf, 'example') else None, t, choices=choices)) self._set('user_defined_fields', mapped_udfs) ndist = [ Image(self._client, d) for d in self.images ] self._set('images', ndist)
python
def _populate(self, json): """ Override the populate method to map user_defined_fields to fancy values """ Base._populate(self, json) mapped_udfs = [] for udf in self.user_defined_fields: t = UserDefinedFieldType.text choices = None if hasattr(udf, 'oneof'): t = UserDefinedFieldType.select_one choices = udf.oneof.split(',') elif hasattr(udf, 'manyof'): t = UserDefinedFieldType.select_many choices = udf.manyof.split(',') mapped_udfs.append(UserDefinedField(udf.name, udf.label if hasattr(udf, 'label') else None, udf.example if hasattr(udf, 'example') else None, t, choices=choices)) self._set('user_defined_fields', mapped_udfs) ndist = [ Image(self._client, d) for d in self.images ] self._set('images', ndist)
[ "def", "_populate", "(", "self", ",", "json", ")", ":", "Base", ".", "_populate", "(", "self", ",", "json", ")", "mapped_udfs", "=", "[", "]", "for", "udf", "in", "self", ".", "user_defined_fields", ":", "t", "=", "UserDefinedFieldType", ".", "text", "choices", "=", "None", "if", "hasattr", "(", "udf", ",", "'oneof'", ")", ":", "t", "=", "UserDefinedFieldType", ".", "select_one", "choices", "=", "udf", ".", "oneof", ".", "split", "(", "','", ")", "elif", "hasattr", "(", "udf", ",", "'manyof'", ")", ":", "t", "=", "UserDefinedFieldType", ".", "select_many", "choices", "=", "udf", ".", "manyof", ".", "split", "(", "','", ")", "mapped_udfs", ".", "append", "(", "UserDefinedField", "(", "udf", ".", "name", ",", "udf", ".", "label", "if", "hasattr", "(", "udf", ",", "'label'", ")", "else", "None", ",", "udf", ".", "example", "if", "hasattr", "(", "udf", ",", "'example'", ")", "else", "None", ",", "t", ",", "choices", "=", "choices", ")", ")", "self", ".", "_set", "(", "'user_defined_fields'", ",", "mapped_udfs", ")", "ndist", "=", "[", "Image", "(", "self", ".", "_client", ",", "d", ")", "for", "d", "in", "self", ".", "images", "]", "self", ".", "_set", "(", "'images'", ",", "ndist", ")" ]
Override the populate method to map user_defined_fields to fancy values
[ "Override", "the", "populate", "method", "to", "map", "user_defined_fields", "to", "fancy", "values" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/linode.py#L738-L763
train
linode/linode_api4-python
linode_api4/objects/account.py
InvoiceItem._populate
def _populate(self, json): """ Allows population of "from_date" from the returned "from" attribute which is a reserved word in python. Also populates "to_date" to be complete. """ super(InvoiceItem, self)._populate(json) self.from_date = datetime.strptime(json['from'], DATE_FORMAT) self.to_date = datetime.strptime(json['to'], DATE_FORMAT)
python
def _populate(self, json): """ Allows population of "from_date" from the returned "from" attribute which is a reserved word in python. Also populates "to_date" to be complete. """ super(InvoiceItem, self)._populate(json) self.from_date = datetime.strptime(json['from'], DATE_FORMAT) self.to_date = datetime.strptime(json['to'], DATE_FORMAT)
[ "def", "_populate", "(", "self", ",", "json", ")", ":", "super", "(", "InvoiceItem", ",", "self", ")", ".", "_populate", "(", "json", ")", "self", ".", "from_date", "=", "datetime", ".", "strptime", "(", "json", "[", "'from'", "]", ",", "DATE_FORMAT", ")", "self", ".", "to_date", "=", "datetime", ".", "strptime", "(", "json", "[", "'to'", "]", ",", "DATE_FORMAT", ")" ]
Allows population of "from_date" from the returned "from" attribute which is a reserved word in python. Also populates "to_date" to be complete.
[ "Allows", "population", "of", "from_date", "from", "the", "returned", "from", "attribute", "which", "is", "a", "reserved", "word", "in", "python", ".", "Also", "populates", "to_date", "to", "be", "complete", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/account.py#L128-L136
train
linode/linode_api4-python
linode_api4/objects/account.py
OAuthClient.reset_secret
def reset_secret(self): """ Resets the client secret for this client. """ result = self._client.post("{}/reset_secret".format(OAuthClient.api_endpoint), model=self) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when resetting secret!', json=result) self._populate(result) return self.secret
python
def reset_secret(self): """ Resets the client secret for this client. """ result = self._client.post("{}/reset_secret".format(OAuthClient.api_endpoint), model=self) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when resetting secret!', json=result) self._populate(result) return self.secret
[ "def", "reset_secret", "(", "self", ")", ":", "result", "=", "self", ".", "_client", ".", "post", "(", "\"{}/reset_secret\"", ".", "format", "(", "OAuthClient", ".", "api_endpoint", ")", ",", "model", "=", "self", ")", "if", "not", "'id'", "in", "result", ":", "raise", "UnexpectedResponseError", "(", "'Unexpected response when resetting secret!'", ",", "json", "=", "result", ")", "self", ".", "_populate", "(", "result", ")", "return", "self", ".", "secret" ]
Resets the client secret for this client.
[ "Resets", "the", "client", "secret", "for", "this", "client", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/account.py#L163-L173
train
linode/linode_api4-python
linode_api4/objects/account.py
OAuthClient.thumbnail
def thumbnail(self, dump_to=None): """ This returns binary data that represents a 128x128 image. If dump_to is given, attempts to write the image to a file at the given location. """ headers = { "Authorization": "token {}".format(self._client.token) } result = requests.get('{}/{}/thumbnail'.format(self._client.base_url, OAuthClient.api_endpoint.format(id=self.id)), headers=headers) if not result.status_code == 200: raise ApiError('No thumbnail found for OAuthClient {}'.format(self.id)) if dump_to: with open(dump_to, 'wb+') as f: f.write(result.content) return result.content
python
def thumbnail(self, dump_to=None): """ This returns binary data that represents a 128x128 image. If dump_to is given, attempts to write the image to a file at the given location. """ headers = { "Authorization": "token {}".format(self._client.token) } result = requests.get('{}/{}/thumbnail'.format(self._client.base_url, OAuthClient.api_endpoint.format(id=self.id)), headers=headers) if not result.status_code == 200: raise ApiError('No thumbnail found for OAuthClient {}'.format(self.id)) if dump_to: with open(dump_to, 'wb+') as f: f.write(result.content) return result.content
[ "def", "thumbnail", "(", "self", ",", "dump_to", "=", "None", ")", ":", "headers", "=", "{", "\"Authorization\"", ":", "\"token {}\"", ".", "format", "(", "self", ".", "_client", ".", "token", ")", "}", "result", "=", "requests", ".", "get", "(", "'{}/{}/thumbnail'", ".", "format", "(", "self", ".", "_client", ".", "base_url", ",", "OAuthClient", ".", "api_endpoint", ".", "format", "(", "id", "=", "self", ".", "id", ")", ")", ",", "headers", "=", "headers", ")", "if", "not", "result", ".", "status_code", "==", "200", ":", "raise", "ApiError", "(", "'No thumbnail found for OAuthClient {}'", ".", "format", "(", "self", ".", "id", ")", ")", "if", "dump_to", ":", "with", "open", "(", "dump_to", ",", "'wb+'", ")", "as", "f", ":", "f", ".", "write", "(", "result", ".", "content", ")", "return", "result", ".", "content" ]
This returns binary data that represents a 128x128 image. If dump_to is given, attempts to write the image to a file at the given location.
[ "This", "returns", "binary", "data", "that", "represents", "a", "128x128", "image", ".", "If", "dump_to", "is", "given", "attempts", "to", "write", "the", "image", "to", "a", "file", "at", "the", "given", "location", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/account.py#L175-L195
train
linode/linode_api4-python
linode_api4/objects/account.py
OAuthClient.set_thumbnail
def set_thumbnail(self, thumbnail): """ Sets the thumbnail for this OAuth Client. If thumbnail is bytes, uploads it as a png. Otherwise, assumes thumbnail is a path to the thumbnail and reads it in as bytes before uploading. """ headers = { "Authorization": "token {}".format(self._client.token), "Content-type": "image/png", } # TODO this check needs to be smarter - python2 doesn't do it right if not isinstance(thumbnail, bytes): with open(thumbnail, 'rb') as f: thumbnail = f.read() result = requests.put('{}/{}/thumbnail'.format(self._client.base_url, OAuthClient.api_endpoint.format(id=self.id)), headers=headers, data=thumbnail) if not result.status_code == 200: errors = [] j = result.json() if 'errors' in j: errors = [ e['reason'] for e in j['errors'] ] raise ApiError('{}: {}'.format(result.status_code, errors), json=j) return True
python
def set_thumbnail(self, thumbnail): """ Sets the thumbnail for this OAuth Client. If thumbnail is bytes, uploads it as a png. Otherwise, assumes thumbnail is a path to the thumbnail and reads it in as bytes before uploading. """ headers = { "Authorization": "token {}".format(self._client.token), "Content-type": "image/png", } # TODO this check needs to be smarter - python2 doesn't do it right if not isinstance(thumbnail, bytes): with open(thumbnail, 'rb') as f: thumbnail = f.read() result = requests.put('{}/{}/thumbnail'.format(self._client.base_url, OAuthClient.api_endpoint.format(id=self.id)), headers=headers, data=thumbnail) if not result.status_code == 200: errors = [] j = result.json() if 'errors' in j: errors = [ e['reason'] for e in j['errors'] ] raise ApiError('{}: {}'.format(result.status_code, errors), json=j) return True
[ "def", "set_thumbnail", "(", "self", ",", "thumbnail", ")", ":", "headers", "=", "{", "\"Authorization\"", ":", "\"token {}\"", ".", "format", "(", "self", ".", "_client", ".", "token", ")", ",", "\"Content-type\"", ":", "\"image/png\"", ",", "}", "# TODO this check needs to be smarter - python2 doesn't do it right", "if", "not", "isinstance", "(", "thumbnail", ",", "bytes", ")", ":", "with", "open", "(", "thumbnail", ",", "'rb'", ")", "as", "f", ":", "thumbnail", "=", "f", ".", "read", "(", ")", "result", "=", "requests", ".", "put", "(", "'{}/{}/thumbnail'", ".", "format", "(", "self", ".", "_client", ".", "base_url", ",", "OAuthClient", ".", "api_endpoint", ".", "format", "(", "id", "=", "self", ".", "id", ")", ")", ",", "headers", "=", "headers", ",", "data", "=", "thumbnail", ")", "if", "not", "result", ".", "status_code", "==", "200", ":", "errors", "=", "[", "]", "j", "=", "result", ".", "json", "(", ")", "if", "'errors'", "in", "j", ":", "errors", "=", "[", "e", "[", "'reason'", "]", "for", "e", "in", "j", "[", "'errors'", "]", "]", "raise", "ApiError", "(", "'{}: {}'", ".", "format", "(", "result", ".", "status_code", ",", "errors", ")", ",", "json", "=", "j", ")", "return", "True" ]
Sets the thumbnail for this OAuth Client. If thumbnail is bytes, uploads it as a png. Otherwise, assumes thumbnail is a path to the thumbnail and reads it in as bytes before uploading.
[ "Sets", "the", "thumbnail", "for", "this", "OAuth", "Client", ".", "If", "thumbnail", "is", "bytes", "uploads", "it", "as", "a", "png", ".", "Otherwise", "assumes", "thumbnail", "is", "a", "path", "to", "the", "thumbnail", "and", "reads", "it", "in", "as", "bytes", "before", "uploading", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/account.py#L197-L224
train
linode/linode_api4-python
linode_api4/objects/account.py
User.grants
def grants(self): """ Retrieves the grants for this user. If the user is unrestricted, this will result in an ApiError. This is smart, and will only fetch from the api once unless the object is invalidated. :returns: The grants for this user. :rtype: linode.objects.account.UserGrants """ from linode_api4.objects.account import UserGrants if not hasattr(self, '_grants'): resp = self._client.get(UserGrants.api_endpoint.format(username=self.username)) grants = UserGrants(self._client, self.username, resp) self._set('_grants', grants) return self._grants
python
def grants(self): """ Retrieves the grants for this user. If the user is unrestricted, this will result in an ApiError. This is smart, and will only fetch from the api once unless the object is invalidated. :returns: The grants for this user. :rtype: linode.objects.account.UserGrants """ from linode_api4.objects.account import UserGrants if not hasattr(self, '_grants'): resp = self._client.get(UserGrants.api_endpoint.format(username=self.username)) grants = UserGrants(self._client, self.username, resp) self._set('_grants', grants) return self._grants
[ "def", "grants", "(", "self", ")", ":", "from", "linode_api4", ".", "objects", ".", "account", "import", "UserGrants", "if", "not", "hasattr", "(", "self", ",", "'_grants'", ")", ":", "resp", "=", "self", ".", "_client", ".", "get", "(", "UserGrants", ".", "api_endpoint", ".", "format", "(", "username", "=", "self", ".", "username", ")", ")", "grants", "=", "UserGrants", "(", "self", ".", "_client", ",", "self", ".", "username", ",", "resp", ")", "self", ".", "_set", "(", "'_grants'", ",", "grants", ")", "return", "self", ".", "_grants" ]
Retrieves the grants for this user. If the user is unrestricted, this will result in an ApiError. This is smart, and will only fetch from the api once unless the object is invalidated. :returns: The grants for this user. :rtype: linode.objects.account.UserGrants
[ "Retrieves", "the", "grants", "for", "this", "user", ".", "If", "the", "user", "is", "unrestricted", "this", "will", "result", "in", "an", "ApiError", ".", "This", "is", "smart", "and", "will", "only", "fetch", "from", "the", "api", "once", "unless", "the", "object", "is", "invalidated", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/account.py#L248-L264
train
linode/linode_api4-python
linode_api4/objects/base.py
Base.save
def save(self): """ Send this object's mutable values to the server in a PUT request """ resp = self._client.put(type(self).api_endpoint, model=self, data=self._serialize()) if 'error' in resp: return False return True
python
def save(self): """ Send this object's mutable values to the server in a PUT request """ resp = self._client.put(type(self).api_endpoint, model=self, data=self._serialize()) if 'error' in resp: return False return True
[ "def", "save", "(", "self", ")", ":", "resp", "=", "self", ".", "_client", ".", "put", "(", "type", "(", "self", ")", ".", "api_endpoint", ",", "model", "=", "self", ",", "data", "=", "self", ".", "_serialize", "(", ")", ")", "if", "'error'", "in", "resp", ":", "return", "False", "return", "True" ]
Send this object's mutable values to the server in a PUT request
[ "Send", "this", "object", "s", "mutable", "values", "to", "the", "server", "in", "a", "PUT", "request" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/base.py#L150-L159
train
linode/linode_api4-python
linode_api4/objects/base.py
Base.delete
def delete(self): """ Sends a DELETE request for this object """ resp = self._client.delete(type(self).api_endpoint, model=self) if 'error' in resp: return False self.invalidate() return True
python
def delete(self): """ Sends a DELETE request for this object """ resp = self._client.delete(type(self).api_endpoint, model=self) if 'error' in resp: return False self.invalidate() return True
[ "def", "delete", "(", "self", ")", ":", "resp", "=", "self", ".", "_client", ".", "delete", "(", "type", "(", "self", ")", ".", "api_endpoint", ",", "model", "=", "self", ")", "if", "'error'", "in", "resp", ":", "return", "False", "self", ".", "invalidate", "(", ")", "return", "True" ]
Sends a DELETE request for this object
[ "Sends", "a", "DELETE", "request", "for", "this", "object" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/base.py#L161-L170
train
linode/linode_api4-python
linode_api4/objects/base.py
Base.invalidate
def invalidate(self): """ Invalidates all non-identifier Properties this object has locally, causing the next access to re-fetch them from the server """ for key in [k for k in type(self).properties.keys() if not type(self).properties[k].identifier]: self._set(key, None) self._set('_populated', False)
python
def invalidate(self): """ Invalidates all non-identifier Properties this object has locally, causing the next access to re-fetch them from the server """ for key in [k for k in type(self).properties.keys() if not type(self).properties[k].identifier]: self._set(key, None) self._set('_populated', False)
[ "def", "invalidate", "(", "self", ")", ":", "for", "key", "in", "[", "k", "for", "k", "in", "type", "(", "self", ")", ".", "properties", ".", "keys", "(", ")", "if", "not", "type", "(", "self", ")", ".", "properties", "[", "k", "]", ".", "identifier", "]", ":", "self", ".", "_set", "(", "key", ",", "None", ")", "self", ".", "_set", "(", "'_populated'", ",", "False", ")" ]
Invalidates all non-identifier Properties this object has locally, causing the next access to re-fetch them from the server
[ "Invalidates", "all", "non", "-", "identifier", "Properties", "this", "object", "has", "locally", "causing", "the", "next", "access", "to", "re", "-", "fetch", "them", "from", "the", "server" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/base.py#L172-L181
train
linode/linode_api4-python
linode_api4/objects/base.py
Base._serialize
def _serialize(self): """ A helper method to build a dict of all mutable Properties of this object """ result = { a: getattr(self, a) for a in type(self).properties if type(self).properties[a].mutable } for k, v in result.items(): if isinstance(v, Base): result[k] = v.id return result
python
def _serialize(self): """ A helper method to build a dict of all mutable Properties of this object """ result = { a: getattr(self, a) for a in type(self).properties if type(self).properties[a].mutable } for k, v in result.items(): if isinstance(v, Base): result[k] = v.id return result
[ "def", "_serialize", "(", "self", ")", ":", "result", "=", "{", "a", ":", "getattr", "(", "self", ",", "a", ")", "for", "a", "in", "type", "(", "self", ")", ".", "properties", "if", "type", "(", "self", ")", ".", "properties", "[", "a", "]", ".", "mutable", "}", "for", "k", ",", "v", "in", "result", ".", "items", "(", ")", ":", "if", "isinstance", "(", "v", ",", "Base", ")", ":", "result", "[", "k", "]", "=", "v", ".", "id", "return", "result" ]
A helper method to build a dict of all mutable Properties of this object
[ "A", "helper", "method", "to", "build", "a", "dict", "of", "all", "mutable", "Properties", "of", "this", "object" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/base.py#L183-L195
train
linode/linode_api4-python
linode_api4/objects/base.py
Base._api_get
def _api_get(self): """ A helper method to GET this object from the server """ json = self._client.get(type(self).api_endpoint, model=self) self._populate(json)
python
def _api_get(self): """ A helper method to GET this object from the server """ json = self._client.get(type(self).api_endpoint, model=self) self._populate(json)
[ "def", "_api_get", "(", "self", ")", ":", "json", "=", "self", ".", "_client", ".", "get", "(", "type", "(", "self", ")", ".", "api_endpoint", ",", "model", "=", "self", ")", "self", ".", "_populate", "(", "json", ")" ]
A helper method to GET this object from the server
[ "A", "helper", "method", "to", "GET", "this", "object", "from", "the", "server" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/base.py#L197-L202
train
linode/linode_api4-python
linode_api4/objects/base.py
Base._populate
def _populate(self, json): """ A helper method that, given a JSON object representing this object, assigns values based on the properties dict and the attributes of its Properties. """ if not json: return # hide the raw JSON away in case someone needs it self._set('_raw_json', json) for key in json: if key in (k for k in type(self).properties.keys() if not type(self).properties[k].identifier): if type(self).properties[key].relationship \ and not json[key] is None: if isinstance(json[key], list): objs = [] for d in json[key]: if not 'id' in d: continue new_class = type(self).properties[key].relationship obj = new_class.make_instance(d['id'], getattr(self,'_client')) if obj: obj._populate(d) objs.append(obj) self._set(key, objs) else: if isinstance(json[key], dict): related_id = json[key]['id'] else: related_id = json[key] new_class = type(self).properties[key].relationship obj = new_class.make_instance(related_id, getattr(self,'_client')) if obj and isinstance(json[key], dict): obj._populate(json[key]) self._set(key, obj) elif type(self).properties[key].slug_relationship \ and not json[key] is None: # create an object of the expected type with the given slug self._set(key, type(self).properties[key].slug_relationship(self._client, json[key])) elif type(json[key]) is dict: self._set(key, MappedObject(**json[key])) elif type(json[key]) is list: # we're going to use MappedObject's behavior with lists to # expand these, then grab the resulting value to set mapping = MappedObject(_list=json[key]) self._set(key, mapping._list) # pylint: disable=no-member elif type(self).properties[key].is_datetime: try: t = time.strptime(json[key], DATE_FORMAT) self._set(key, datetime.fromtimestamp(time.mktime(t))) except: #TODO - handle this better (or log it?) self._set(key, json[key]) else: self._set(key, json[key]) self._set('_populated', True) self._set('_last_updated', datetime.now())
python
def _populate(self, json): """ A helper method that, given a JSON object representing this object, assigns values based on the properties dict and the attributes of its Properties. """ if not json: return # hide the raw JSON away in case someone needs it self._set('_raw_json', json) for key in json: if key in (k for k in type(self).properties.keys() if not type(self).properties[k].identifier): if type(self).properties[key].relationship \ and not json[key] is None: if isinstance(json[key], list): objs = [] for d in json[key]: if not 'id' in d: continue new_class = type(self).properties[key].relationship obj = new_class.make_instance(d['id'], getattr(self,'_client')) if obj: obj._populate(d) objs.append(obj) self._set(key, objs) else: if isinstance(json[key], dict): related_id = json[key]['id'] else: related_id = json[key] new_class = type(self).properties[key].relationship obj = new_class.make_instance(related_id, getattr(self,'_client')) if obj and isinstance(json[key], dict): obj._populate(json[key]) self._set(key, obj) elif type(self).properties[key].slug_relationship \ and not json[key] is None: # create an object of the expected type with the given slug self._set(key, type(self).properties[key].slug_relationship(self._client, json[key])) elif type(json[key]) is dict: self._set(key, MappedObject(**json[key])) elif type(json[key]) is list: # we're going to use MappedObject's behavior with lists to # expand these, then grab the resulting value to set mapping = MappedObject(_list=json[key]) self._set(key, mapping._list) # pylint: disable=no-member elif type(self).properties[key].is_datetime: try: t = time.strptime(json[key], DATE_FORMAT) self._set(key, datetime.fromtimestamp(time.mktime(t))) except: #TODO - handle this better (or log it?) self._set(key, json[key]) else: self._set(key, json[key]) self._set('_populated', True) self._set('_last_updated', datetime.now())
[ "def", "_populate", "(", "self", ",", "json", ")", ":", "if", "not", "json", ":", "return", "# hide the raw JSON away in case someone needs it", "self", ".", "_set", "(", "'_raw_json'", ",", "json", ")", "for", "key", "in", "json", ":", "if", "key", "in", "(", "k", "for", "k", "in", "type", "(", "self", ")", ".", "properties", ".", "keys", "(", ")", "if", "not", "type", "(", "self", ")", ".", "properties", "[", "k", "]", ".", "identifier", ")", ":", "if", "type", "(", "self", ")", ".", "properties", "[", "key", "]", ".", "relationship", "and", "not", "json", "[", "key", "]", "is", "None", ":", "if", "isinstance", "(", "json", "[", "key", "]", ",", "list", ")", ":", "objs", "=", "[", "]", "for", "d", "in", "json", "[", "key", "]", ":", "if", "not", "'id'", "in", "d", ":", "continue", "new_class", "=", "type", "(", "self", ")", ".", "properties", "[", "key", "]", ".", "relationship", "obj", "=", "new_class", ".", "make_instance", "(", "d", "[", "'id'", "]", ",", "getattr", "(", "self", ",", "'_client'", ")", ")", "if", "obj", ":", "obj", ".", "_populate", "(", "d", ")", "objs", ".", "append", "(", "obj", ")", "self", ".", "_set", "(", "key", ",", "objs", ")", "else", ":", "if", "isinstance", "(", "json", "[", "key", "]", ",", "dict", ")", ":", "related_id", "=", "json", "[", "key", "]", "[", "'id'", "]", "else", ":", "related_id", "=", "json", "[", "key", "]", "new_class", "=", "type", "(", "self", ")", ".", "properties", "[", "key", "]", ".", "relationship", "obj", "=", "new_class", ".", "make_instance", "(", "related_id", ",", "getattr", "(", "self", ",", "'_client'", ")", ")", "if", "obj", "and", "isinstance", "(", "json", "[", "key", "]", ",", "dict", ")", ":", "obj", ".", "_populate", "(", "json", "[", "key", "]", ")", "self", ".", "_set", "(", "key", ",", "obj", ")", "elif", "type", "(", "self", ")", ".", "properties", "[", "key", "]", ".", "slug_relationship", "and", "not", "json", "[", "key", "]", "is", "None", ":", "# create an object of the expected type with the given slug", "self", ".", "_set", "(", "key", ",", "type", "(", "self", ")", ".", "properties", "[", "key", "]", ".", "slug_relationship", "(", "self", ".", "_client", ",", "json", "[", "key", "]", ")", ")", "elif", "type", "(", "json", "[", "key", "]", ")", "is", "dict", ":", "self", ".", "_set", "(", "key", ",", "MappedObject", "(", "*", "*", "json", "[", "key", "]", ")", ")", "elif", "type", "(", "json", "[", "key", "]", ")", "is", "list", ":", "# we're going to use MappedObject's behavior with lists to", "# expand these, then grab the resulting value to set", "mapping", "=", "MappedObject", "(", "_list", "=", "json", "[", "key", "]", ")", "self", ".", "_set", "(", "key", ",", "mapping", ".", "_list", ")", "# pylint: disable=no-member", "elif", "type", "(", "self", ")", ".", "properties", "[", "key", "]", ".", "is_datetime", ":", "try", ":", "t", "=", "time", ".", "strptime", "(", "json", "[", "key", "]", ",", "DATE_FORMAT", ")", "self", ".", "_set", "(", "key", ",", "datetime", ".", "fromtimestamp", "(", "time", ".", "mktime", "(", "t", ")", ")", ")", "except", ":", "#TODO - handle this better (or log it?)", "self", ".", "_set", "(", "key", ",", "json", "[", "key", "]", ")", "else", ":", "self", ".", "_set", "(", "key", ",", "json", "[", "key", "]", ")", "self", ".", "_set", "(", "'_populated'", ",", "True", ")", "self", ".", "_set", "(", "'_last_updated'", ",", "datetime", ".", "now", "(", ")", ")" ]
A helper method that, given a JSON object representing this object, assigns values based on the properties dict and the attributes of its Properties.
[ "A", "helper", "method", "that", "given", "a", "JSON", "object", "representing", "this", "object", "assigns", "values", "based", "on", "the", "properties", "dict", "and", "the", "attributes", "of", "its", "Properties", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/base.py#L204-L265
train
linode/linode_api4-python
linode_api4/objects/base.py
Base.make
def make(id, client, cls, parent_id=None, json=None): """ Makes an api object based on an id and class. :param id: The id of the object to create :param client: The LinodeClient to give the new object :param cls: The class type to instantiate :param parent_id: The parent id for derived classes :param json: The JSON to use to populate the new class :returns: An instance of cls with the given id """ from .dbase import DerivedBase if issubclass(cls, DerivedBase): return cls(client, id, parent_id, json) else: return cls(client, id, json)
python
def make(id, client, cls, parent_id=None, json=None): """ Makes an api object based on an id and class. :param id: The id of the object to create :param client: The LinodeClient to give the new object :param cls: The class type to instantiate :param parent_id: The parent id for derived classes :param json: The JSON to use to populate the new class :returns: An instance of cls with the given id """ from .dbase import DerivedBase if issubclass(cls, DerivedBase): return cls(client, id, parent_id, json) else: return cls(client, id, json)
[ "def", "make", "(", "id", ",", "client", ",", "cls", ",", "parent_id", "=", "None", ",", "json", "=", "None", ")", ":", "from", ".", "dbase", "import", "DerivedBase", "if", "issubclass", "(", "cls", ",", "DerivedBase", ")", ":", "return", "cls", "(", "client", ",", "id", ",", "parent_id", ",", "json", ")", "else", ":", "return", "cls", "(", "client", ",", "id", ",", "json", ")" ]
Makes an api object based on an id and class. :param id: The id of the object to create :param client: The LinodeClient to give the new object :param cls: The class type to instantiate :param parent_id: The parent id for derived classes :param json: The JSON to use to populate the new class :returns: An instance of cls with the given id
[ "Makes", "an", "api", "object", "based", "on", "an", "id", "and", "class", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/base.py#L283-L300
train
linode/linode_api4-python
linode_api4/objects/base.py
Base.make_instance
def make_instance(cls, id, client, parent_id=None, json=None): """ Makes an instance of the class this is called on and returns it. The intended usage is: instance = Linode.make_instance(123, client, json=response) :param cls: The class this was called on. :param id: The id of the instance to create :param client: The client to use for this instance :param parent_id: The parent id for derived classes :param json: The JSON to populate the instance with :returns: A new instance of this type, populated with json """ return Base.make(id, client, cls, parent_id=parent_id, json=json)
python
def make_instance(cls, id, client, parent_id=None, json=None): """ Makes an instance of the class this is called on and returns it. The intended usage is: instance = Linode.make_instance(123, client, json=response) :param cls: The class this was called on. :param id: The id of the instance to create :param client: The client to use for this instance :param parent_id: The parent id for derived classes :param json: The JSON to populate the instance with :returns: A new instance of this type, populated with json """ return Base.make(id, client, cls, parent_id=parent_id, json=json)
[ "def", "make_instance", "(", "cls", ",", "id", ",", "client", ",", "parent_id", "=", "None", ",", "json", "=", "None", ")", ":", "return", "Base", ".", "make", "(", "id", ",", "client", ",", "cls", ",", "parent_id", "=", "parent_id", ",", "json", "=", "json", ")" ]
Makes an instance of the class this is called on and returns it. The intended usage is: instance = Linode.make_instance(123, client, json=response) :param cls: The class this was called on. :param id: The id of the instance to create :param client: The client to use for this instance :param parent_id: The parent id for derived classes :param json: The JSON to populate the instance with :returns: A new instance of this type, populated with json
[ "Makes", "an", "instance", "of", "the", "class", "this", "is", "called", "on", "and", "returns", "it", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/base.py#L303-L318
train
linode/linode_api4-python
linode_api4/objects/networking.py
IPAddress.to
def to(self, linode): """ This is a helper method for ip-assign, and should not be used outside of that context. It's used to cleanly build an IP Assign request with pretty python syntax. """ from .linode import Instance if not isinstance(linode, Instance): raise ValueError("IP Address can only be assigned to a Linode!") return { "address": self.address, "linode_id": linode.id }
python
def to(self, linode): """ This is a helper method for ip-assign, and should not be used outside of that context. It's used to cleanly build an IP Assign request with pretty python syntax. """ from .linode import Instance if not isinstance(linode, Instance): raise ValueError("IP Address can only be assigned to a Linode!") return { "address": self.address, "linode_id": linode.id }
[ "def", "to", "(", "self", ",", "linode", ")", ":", "from", ".", "linode", "import", "Instance", "if", "not", "isinstance", "(", "linode", ",", "Instance", ")", ":", "raise", "ValueError", "(", "\"IP Address can only be assigned to a Linode!\"", ")", "return", "{", "\"address\"", ":", "self", ".", "address", ",", "\"linode_id\"", ":", "linode", ".", "id", "}" ]
This is a helper method for ip-assign, and should not be used outside of that context. It's used to cleanly build an IP Assign request with pretty python syntax.
[ "This", "is", "a", "helper", "method", "for", "ip", "-", "assign", "and", "should", "not", "be", "used", "outside", "of", "that", "context", ".", "It", "s", "used", "to", "cleanly", "build", "an", "IP", "Assign", "request", "with", "pretty", "python", "syntax", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/networking.py#L49-L58
train
linode/linode_api4-python
linode_api4/linode_client.py
ProfileGroup.token_create
def token_create(self, label=None, expiry=None, scopes=None, **kwargs): """ Creates and returns a new Personal Access Token """ if label: kwargs['label'] = label if expiry: if isinstance(expiry, datetime): expiry = datetime.strftime(expiry, "%Y-%m-%dT%H:%M:%S") kwargs['expiry'] = expiry if scopes: kwargs['scopes'] = scopes result = self.client.post('/profile/tokens', data=kwargs) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when creating Personal Access ' 'Token!', json=result) token = PersonalAccessToken(self.client, result['id'], result) return token
python
def token_create(self, label=None, expiry=None, scopes=None, **kwargs): """ Creates and returns a new Personal Access Token """ if label: kwargs['label'] = label if expiry: if isinstance(expiry, datetime): expiry = datetime.strftime(expiry, "%Y-%m-%dT%H:%M:%S") kwargs['expiry'] = expiry if scopes: kwargs['scopes'] = scopes result = self.client.post('/profile/tokens', data=kwargs) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when creating Personal Access ' 'Token!', json=result) token = PersonalAccessToken(self.client, result['id'], result) return token
[ "def", "token_create", "(", "self", ",", "label", "=", "None", ",", "expiry", "=", "None", ",", "scopes", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "label", ":", "kwargs", "[", "'label'", "]", "=", "label", "if", "expiry", ":", "if", "isinstance", "(", "expiry", ",", "datetime", ")", ":", "expiry", "=", "datetime", ".", "strftime", "(", "expiry", ",", "\"%Y-%m-%dT%H:%M:%S\"", ")", "kwargs", "[", "'expiry'", "]", "=", "expiry", "if", "scopes", ":", "kwargs", "[", "'scopes'", "]", "=", "scopes", "result", "=", "self", ".", "client", ".", "post", "(", "'/profile/tokens'", ",", "data", "=", "kwargs", ")", "if", "not", "'id'", "in", "result", ":", "raise", "UnexpectedResponseError", "(", "'Unexpected response when creating Personal Access '", "'Token!'", ",", "json", "=", "result", ")", "token", "=", "PersonalAccessToken", "(", "self", ".", "client", ",", "result", "[", "'id'", "]", ",", "result", ")", "return", "token" ]
Creates and returns a new Personal Access Token
[ "Creates", "and", "returns", "a", "new", "Personal", "Access", "Token" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/linode_client.py#L358-L378
train
linode/linode_api4-python
linode_api4/linode_client.py
ProfileGroup.ssh_key_upload
def ssh_key_upload(self, key, label): """ Uploads a new SSH Public Key to your profile This key can be used in later Linode deployments. :param key: The ssh key, or a path to the ssh key. If a path is provided, the file at the path must exist and be readable or an exception will be thrown. :type key: str :param label: The name to give this key. This is purely aesthetic. :type label: str :returns: The newly uploaded SSH Key :rtype: SSHKey :raises ValueError: If the key provided does not appear to be valid, and does not appear to be a path to a valid key. """ if not key.startswith(SSH_KEY_TYPES): # this might be a file path - look for it path = os.path.expanduser(key) if os.path.isfile(path): with open(path) as f: key = f.read().strip() if not key.startswith(SSH_KEY_TYPES): raise ValueError('Invalid SSH Public Key') params = { 'ssh_key': key, 'label': label, } result = self.client.post('/profile/sshkeys', data=params) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when uploading SSH Key!', json=result) ssh_key = SSHKey(self.client, result['id'], result) return ssh_key
python
def ssh_key_upload(self, key, label): """ Uploads a new SSH Public Key to your profile This key can be used in later Linode deployments. :param key: The ssh key, or a path to the ssh key. If a path is provided, the file at the path must exist and be readable or an exception will be thrown. :type key: str :param label: The name to give this key. This is purely aesthetic. :type label: str :returns: The newly uploaded SSH Key :rtype: SSHKey :raises ValueError: If the key provided does not appear to be valid, and does not appear to be a path to a valid key. """ if not key.startswith(SSH_KEY_TYPES): # this might be a file path - look for it path = os.path.expanduser(key) if os.path.isfile(path): with open(path) as f: key = f.read().strip() if not key.startswith(SSH_KEY_TYPES): raise ValueError('Invalid SSH Public Key') params = { 'ssh_key': key, 'label': label, } result = self.client.post('/profile/sshkeys', data=params) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when uploading SSH Key!', json=result) ssh_key = SSHKey(self.client, result['id'], result) return ssh_key
[ "def", "ssh_key_upload", "(", "self", ",", "key", ",", "label", ")", ":", "if", "not", "key", ".", "startswith", "(", "SSH_KEY_TYPES", ")", ":", "# this might be a file path - look for it", "path", "=", "os", ".", "path", ".", "expanduser", "(", "key", ")", "if", "os", ".", "path", ".", "isfile", "(", "path", ")", ":", "with", "open", "(", "path", ")", "as", "f", ":", "key", "=", "f", ".", "read", "(", ")", ".", "strip", "(", ")", "if", "not", "key", ".", "startswith", "(", "SSH_KEY_TYPES", ")", ":", "raise", "ValueError", "(", "'Invalid SSH Public Key'", ")", "params", "=", "{", "'ssh_key'", ":", "key", ",", "'label'", ":", "label", ",", "}", "result", "=", "self", ".", "client", ".", "post", "(", "'/profile/sshkeys'", ",", "data", "=", "params", ")", "if", "not", "'id'", "in", "result", ":", "raise", "UnexpectedResponseError", "(", "'Unexpected response when uploading SSH Key!'", ",", "json", "=", "result", ")", "ssh_key", "=", "SSHKey", "(", "self", ".", "client", ",", "result", "[", "'id'", "]", ",", "result", ")", "return", "ssh_key" ]
Uploads a new SSH Public Key to your profile This key can be used in later Linode deployments. :param key: The ssh key, or a path to the ssh key. If a path is provided, the file at the path must exist and be readable or an exception will be thrown. :type key: str :param label: The name to give this key. This is purely aesthetic. :type label: str :returns: The newly uploaded SSH Key :rtype: SSHKey :raises ValueError: If the key provided does not appear to be valid, and does not appear to be a path to a valid key.
[ "Uploads", "a", "new", "SSH", "Public", "Key", "to", "your", "profile", "This", "key", "can", "be", "used", "in", "later", "Linode", "deployments", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/linode_client.py#L392-L430
train
linode/linode_api4-python
linode_api4/linode_client.py
LongviewGroup.client_create
def client_create(self, label=None): """ Creates a new LongviewClient, optionally with a given label. :param label: The label for the new client. If None, a default label based on the new client's ID will be used. :returns: A new LongviewClient :raises ApiError: If a non-200 status code is returned :raises UnexpectedResponseError: If the returned data from the api does not look as expected. """ result = self.client.post('/longview/clients', data={ "label": label }) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when creating Longivew ' 'Client!', json=result) c = LongviewClient(self.client, result['id'], result) return c
python
def client_create(self, label=None): """ Creates a new LongviewClient, optionally with a given label. :param label: The label for the new client. If None, a default label based on the new client's ID will be used. :returns: A new LongviewClient :raises ApiError: If a non-200 status code is returned :raises UnexpectedResponseError: If the returned data from the api does not look as expected. """ result = self.client.post('/longview/clients', data={ "label": label }) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when creating Longivew ' 'Client!', json=result) c = LongviewClient(self.client, result['id'], result) return c
[ "def", "client_create", "(", "self", ",", "label", "=", "None", ")", ":", "result", "=", "self", ".", "client", ".", "post", "(", "'/longview/clients'", ",", "data", "=", "{", "\"label\"", ":", "label", "}", ")", "if", "not", "'id'", "in", "result", ":", "raise", "UnexpectedResponseError", "(", "'Unexpected response when creating Longivew '", "'Client!'", ",", "json", "=", "result", ")", "c", "=", "LongviewClient", "(", "self", ".", "client", ",", "result", "[", "'id'", "]", ",", "result", ")", "return", "c" ]
Creates a new LongviewClient, optionally with a given label. :param label: The label for the new client. If None, a default label based on the new client's ID will be used. :returns: A new LongviewClient :raises ApiError: If a non-200 status code is returned :raises UnexpectedResponseError: If the returned data from the api does not look as expected.
[ "Creates", "a", "new", "LongviewClient", "optionally", "with", "a", "given", "label", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/linode_client.py#L441-L463
train
linode/linode_api4-python
linode_api4/linode_client.py
AccountGroup.events_mark_seen
def events_mark_seen(self, event): """ Marks event as the last event we have seen. If event is an int, it is treated as an event_id, otherwise it should be an event object whose id will be used. """ last_seen = event if isinstance(event, int) else event.id self.client.post('{}/seen'.format(Event.api_endpoint), model=Event(self.client, last_seen))
python
def events_mark_seen(self, event): """ Marks event as the last event we have seen. If event is an int, it is treated as an event_id, otherwise it should be an event object whose id will be used. """ last_seen = event if isinstance(event, int) else event.id self.client.post('{}/seen'.format(Event.api_endpoint), model=Event(self.client, last_seen))
[ "def", "events_mark_seen", "(", "self", ",", "event", ")", ":", "last_seen", "=", "event", "if", "isinstance", "(", "event", ",", "int", ")", "else", "event", ".", "id", "self", ".", "client", ".", "post", "(", "'{}/seen'", ".", "format", "(", "Event", ".", "api_endpoint", ")", ",", "model", "=", "Event", "(", "self", ".", "client", ",", "last_seen", ")", ")" ]
Marks event as the last event we have seen. If event is an int, it is treated as an event_id, otherwise it should be an event object whose id will be used.
[ "Marks", "event", "as", "the", "last", "event", "we", "have", "seen", ".", "If", "event", "is", "an", "int", "it", "is", "treated", "as", "an", "event_id", "otherwise", "it", "should", "be", "an", "event", "object", "whose", "id", "will", "be", "used", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/linode_client.py#L495-L501
train
linode/linode_api4-python
linode_api4/linode_client.py
AccountGroup.settings
def settings(self): """ Resturns the account settings data for this acocunt. This is not a listing endpoint. """ result = self.client.get('/account/settings') if not 'managed' in result: raise UnexpectedResponseError('Unexpected response when getting account settings!', json=result) s = AccountSettings(self.client, result['managed'], result) return s
python
def settings(self): """ Resturns the account settings data for this acocunt. This is not a listing endpoint. """ result = self.client.get('/account/settings') if not 'managed' in result: raise UnexpectedResponseError('Unexpected response when getting account settings!', json=result) s = AccountSettings(self.client, result['managed'], result) return s
[ "def", "settings", "(", "self", ")", ":", "result", "=", "self", ".", "client", ".", "get", "(", "'/account/settings'", ")", "if", "not", "'managed'", "in", "result", ":", "raise", "UnexpectedResponseError", "(", "'Unexpected response when getting account settings!'", ",", "json", "=", "result", ")", "s", "=", "AccountSettings", "(", "self", ".", "client", ",", "result", "[", "'managed'", "]", ",", "result", ")", "return", "s" ]
Resturns the account settings data for this acocunt. This is not a listing endpoint.
[ "Resturns", "the", "account", "settings", "data", "for", "this", "acocunt", ".", "This", "is", "not", "a", "listing", "endpoint", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/linode_client.py#L503-L515
train
linode/linode_api4-python
linode_api4/linode_client.py
AccountGroup.oauth_client_create
def oauth_client_create(self, name, redirect_uri, **kwargs): """ Make a new OAuth Client and return it """ params = { "label": name, "redirect_uri": redirect_uri, } params.update(kwargs) result = self.client.post('/account/oauth-clients', data=params) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when creating OAuth Client!', json=result) c = OAuthClient(self.client, result['id'], result) return c
python
def oauth_client_create(self, name, redirect_uri, **kwargs): """ Make a new OAuth Client and return it """ params = { "label": name, "redirect_uri": redirect_uri, } params.update(kwargs) result = self.client.post('/account/oauth-clients', data=params) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when creating OAuth Client!', json=result) c = OAuthClient(self.client, result['id'], result) return c
[ "def", "oauth_client_create", "(", "self", ",", "name", ",", "redirect_uri", ",", "*", "*", "kwargs", ")", ":", "params", "=", "{", "\"label\"", ":", "name", ",", "\"redirect_uri\"", ":", "redirect_uri", ",", "}", "params", ".", "update", "(", "kwargs", ")", "result", "=", "self", ".", "client", ".", "post", "(", "'/account/oauth-clients'", ",", "data", "=", "params", ")", "if", "not", "'id'", "in", "result", ":", "raise", "UnexpectedResponseError", "(", "'Unexpected response when creating OAuth Client!'", ",", "json", "=", "result", ")", "c", "=", "OAuthClient", "(", "self", ".", "client", ",", "result", "[", "'id'", "]", ",", "result", ")", "return", "c" ]
Make a new OAuth Client and return it
[ "Make", "a", "new", "OAuth", "Client", "and", "return", "it" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/linode_client.py#L535-L552
train
linode/linode_api4-python
linode_api4/linode_client.py
AccountGroup.transfer
def transfer(self): """ Returns a MappedObject containing the account's transfer pool data """ result = self.client.get('/account/transfer') if not 'used' in result: raise UnexpectedResponseError('Unexpected response when getting Transfer Pool!') return MappedObject(**result)
python
def transfer(self): """ Returns a MappedObject containing the account's transfer pool data """ result = self.client.get('/account/transfer') if not 'used' in result: raise UnexpectedResponseError('Unexpected response when getting Transfer Pool!') return MappedObject(**result)
[ "def", "transfer", "(", "self", ")", ":", "result", "=", "self", ".", "client", ".", "get", "(", "'/account/transfer'", ")", "if", "not", "'used'", "in", "result", ":", "raise", "UnexpectedResponseError", "(", "'Unexpected response when getting Transfer Pool!'", ")", "return", "MappedObject", "(", "*", "*", "result", ")" ]
Returns a MappedObject containing the account's transfer pool data
[ "Returns", "a", "MappedObject", "containing", "the", "account", "s", "transfer", "pool", "data" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/linode_client.py#L560-L569
train
linode/linode_api4-python
linode_api4/linode_client.py
NetworkingGroup.ip_allocate
def ip_allocate(self, linode, public=True): """ Allocates an IP to a Instance you own. Additional IPs must be requested by opening a support ticket first. :param linode: The Instance to allocate the new IP for. :type linode: Instance or int :param public: If True, allocate a public IP address. Defaults to True. :type public: bool :returns: The new IPAddress :rtype: IPAddress """ result = self.client.post('/networking/ipv4/', data={ "linode_id": linode.id if isinstance(linode, Base) else linode, "type": "ipv4", "public": public, }) if not 'address' in result: raise UnexpectedResponseError('Unexpected response when adding IPv4 address!', json=result) ip = IPAddress(self.client, result['address'], result) return ip
python
def ip_allocate(self, linode, public=True): """ Allocates an IP to a Instance you own. Additional IPs must be requested by opening a support ticket first. :param linode: The Instance to allocate the new IP for. :type linode: Instance or int :param public: If True, allocate a public IP address. Defaults to True. :type public: bool :returns: The new IPAddress :rtype: IPAddress """ result = self.client.post('/networking/ipv4/', data={ "linode_id": linode.id if isinstance(linode, Base) else linode, "type": "ipv4", "public": public, }) if not 'address' in result: raise UnexpectedResponseError('Unexpected response when adding IPv4 address!', json=result) ip = IPAddress(self.client, result['address'], result) return ip
[ "def", "ip_allocate", "(", "self", ",", "linode", ",", "public", "=", "True", ")", ":", "result", "=", "self", ".", "client", ".", "post", "(", "'/networking/ipv4/'", ",", "data", "=", "{", "\"linode_id\"", ":", "linode", ".", "id", "if", "isinstance", "(", "linode", ",", "Base", ")", "else", "linode", ",", "\"type\"", ":", "\"ipv4\"", ",", "\"public\"", ":", "public", ",", "}", ")", "if", "not", "'address'", "in", "result", ":", "raise", "UnexpectedResponseError", "(", "'Unexpected response when adding IPv4 address!'", ",", "json", "=", "result", ")", "ip", "=", "IPAddress", "(", "self", ".", "client", ",", "result", "[", "'address'", "]", ",", "result", ")", "return", "ip" ]
Allocates an IP to a Instance you own. Additional IPs must be requested by opening a support ticket first. :param linode: The Instance to allocate the new IP for. :type linode: Instance or int :param public: If True, allocate a public IP address. Defaults to True. :type public: bool :returns: The new IPAddress :rtype: IPAddress
[ "Allocates", "an", "IP", "to", "a", "Instance", "you", "own", ".", "Additional", "IPs", "must", "be", "requested", "by", "opening", "a", "support", "ticket", "first", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/linode_client.py#L658-L682
train
linode/linode_api4-python
linode_api4/linode_client.py
LinodeClient.load
def load(self, target_type, target_id, target_parent_id=None): """ Constructs and immediately loads the object, circumventing the lazy-loading scheme by immediately making an API request. Does not load related objects. For example, if you wanted to load an :any:`Instance` object with ID 123, you could do this:: loaded_linode = client.load(Instance, 123) Similarly, if you instead wanted to load a :any:`NodeBalancerConfig`, you could do so like this:: loaded_nodebalancer_config = client.load(NodeBalancerConfig, 456, 432) :param target_type: The type of object to create. :type target_type: type :param target_id: The ID of the object to create. :type target_id: int or str :param target_parent_id: The parent ID of the object to create, if applicable. :type target_parent_id: int, str, or None :returns: The resulting object, fully loaded. :rtype: target_type :raise ApiError: if the requested object could not be loaded. """ result = target_type.make_instance(target_id, self, parent_id=target_parent_id) result._api_get() return result
python
def load(self, target_type, target_id, target_parent_id=None): """ Constructs and immediately loads the object, circumventing the lazy-loading scheme by immediately making an API request. Does not load related objects. For example, if you wanted to load an :any:`Instance` object with ID 123, you could do this:: loaded_linode = client.load(Instance, 123) Similarly, if you instead wanted to load a :any:`NodeBalancerConfig`, you could do so like this:: loaded_nodebalancer_config = client.load(NodeBalancerConfig, 456, 432) :param target_type: The type of object to create. :type target_type: type :param target_id: The ID of the object to create. :type target_id: int or str :param target_parent_id: The parent ID of the object to create, if applicable. :type target_parent_id: int, str, or None :returns: The resulting object, fully loaded. :rtype: target_type :raise ApiError: if the requested object could not be loaded. """ result = target_type.make_instance(target_id, self, parent_id=target_parent_id) result._api_get() return result
[ "def", "load", "(", "self", ",", "target_type", ",", "target_id", ",", "target_parent_id", "=", "None", ")", ":", "result", "=", "target_type", ".", "make_instance", "(", "target_id", ",", "self", ",", "parent_id", "=", "target_parent_id", ")", "result", ".", "_api_get", "(", ")", "return", "result" ]
Constructs and immediately loads the object, circumventing the lazy-loading scheme by immediately making an API request. Does not load related objects. For example, if you wanted to load an :any:`Instance` object with ID 123, you could do this:: loaded_linode = client.load(Instance, 123) Similarly, if you instead wanted to load a :any:`NodeBalancerConfig`, you could do so like this:: loaded_nodebalancer_config = client.load(NodeBalancerConfig, 456, 432) :param target_type: The type of object to create. :type target_type: type :param target_id: The ID of the object to create. :type target_id: int or str :param target_parent_id: The parent ID of the object to create, if applicable. :type target_parent_id: int, str, or None :returns: The resulting object, fully loaded. :rtype: target_type :raise ApiError: if the requested object could not be loaded.
[ "Constructs", "and", "immediately", "loads", "the", "object", "circumventing", "the", "lazy", "-", "loading", "scheme", "by", "immediately", "making", "an", "API", "request", ".", "Does", "not", "load", "related", "objects", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/linode_client.py#L808-L839
train
linode/linode_api4-python
linode_api4/linode_client.py
LinodeClient._api_call
def _api_call(self, endpoint, model=None, method=None, data=None, filters=None): """ Makes a call to the linode api. Data should only be given if the method is POST or PUT, and should be a dictionary """ if not self.token: raise RuntimeError("You do not have an API token!") if not method: raise ValueError("Method is required for API calls!") if model: endpoint = endpoint.format(**vars(model)) url = '{}{}'.format(self.base_url, endpoint) headers = { 'Authorization': "Bearer {}".format(self.token), 'Content-Type': 'application/json', 'User-Agent': self._user_agent, } if filters: headers['X-Filter'] = json.dumps(filters) body = None if data is not None: body = json.dumps(data) response = method(url, headers=headers, data=body) warning = response.headers.get('Warning', None) if warning: logger.warning('Received warning from server: {}'.format(warning)) if 399 < response.status_code < 600: j = None error_msg = '{}: '.format(response.status_code) try: j = response.json() if 'errors' in j.keys(): for e in j['errors']: error_msg += '{}; '.format(e['reason']) \ if 'reason' in e.keys() else '' except: pass raise ApiError(error_msg, status=response.status_code, json=j) if response.status_code != 204: j = response.json() else: j = None # handle no response body return j
python
def _api_call(self, endpoint, model=None, method=None, data=None, filters=None): """ Makes a call to the linode api. Data should only be given if the method is POST or PUT, and should be a dictionary """ if not self.token: raise RuntimeError("You do not have an API token!") if not method: raise ValueError("Method is required for API calls!") if model: endpoint = endpoint.format(**vars(model)) url = '{}{}'.format(self.base_url, endpoint) headers = { 'Authorization': "Bearer {}".format(self.token), 'Content-Type': 'application/json', 'User-Agent': self._user_agent, } if filters: headers['X-Filter'] = json.dumps(filters) body = None if data is not None: body = json.dumps(data) response = method(url, headers=headers, data=body) warning = response.headers.get('Warning', None) if warning: logger.warning('Received warning from server: {}'.format(warning)) if 399 < response.status_code < 600: j = None error_msg = '{}: '.format(response.status_code) try: j = response.json() if 'errors' in j.keys(): for e in j['errors']: error_msg += '{}; '.format(e['reason']) \ if 'reason' in e.keys() else '' except: pass raise ApiError(error_msg, status=response.status_code, json=j) if response.status_code != 204: j = response.json() else: j = None # handle no response body return j
[ "def", "_api_call", "(", "self", ",", "endpoint", ",", "model", "=", "None", ",", "method", "=", "None", ",", "data", "=", "None", ",", "filters", "=", "None", ")", ":", "if", "not", "self", ".", "token", ":", "raise", "RuntimeError", "(", "\"You do not have an API token!\"", ")", "if", "not", "method", ":", "raise", "ValueError", "(", "\"Method is required for API calls!\"", ")", "if", "model", ":", "endpoint", "=", "endpoint", ".", "format", "(", "*", "*", "vars", "(", "model", ")", ")", "url", "=", "'{}{}'", ".", "format", "(", "self", ".", "base_url", ",", "endpoint", ")", "headers", "=", "{", "'Authorization'", ":", "\"Bearer {}\"", ".", "format", "(", "self", ".", "token", ")", ",", "'Content-Type'", ":", "'application/json'", ",", "'User-Agent'", ":", "self", ".", "_user_agent", ",", "}", "if", "filters", ":", "headers", "[", "'X-Filter'", "]", "=", "json", ".", "dumps", "(", "filters", ")", "body", "=", "None", "if", "data", "is", "not", "None", ":", "body", "=", "json", ".", "dumps", "(", "data", ")", "response", "=", "method", "(", "url", ",", "headers", "=", "headers", ",", "data", "=", "body", ")", "warning", "=", "response", ".", "headers", ".", "get", "(", "'Warning'", ",", "None", ")", "if", "warning", ":", "logger", ".", "warning", "(", "'Received warning from server: {}'", ".", "format", "(", "warning", ")", ")", "if", "399", "<", "response", ".", "status_code", "<", "600", ":", "j", "=", "None", "error_msg", "=", "'{}: '", ".", "format", "(", "response", ".", "status_code", ")", "try", ":", "j", "=", "response", ".", "json", "(", ")", "if", "'errors'", "in", "j", ".", "keys", "(", ")", ":", "for", "e", "in", "j", "[", "'errors'", "]", ":", "error_msg", "+=", "'{}; '", ".", "format", "(", "e", "[", "'reason'", "]", ")", "if", "'reason'", "in", "e", ".", "keys", "(", ")", "else", "''", "except", ":", "pass", "raise", "ApiError", "(", "error_msg", ",", "status", "=", "response", ".", "status_code", ",", "json", "=", "j", ")", "if", "response", ".", "status_code", "!=", "204", ":", "j", "=", "response", ".", "json", "(", ")", "else", ":", "j", "=", "None", "# handle no response body", "return", "j" ]
Makes a call to the linode api. Data should only be given if the method is POST or PUT, and should be a dictionary
[ "Makes", "a", "call", "to", "the", "linode", "api", ".", "Data", "should", "only", "be", "given", "if", "the", "method", "is", "POST", "or", "PUT", "and", "should", "be", "a", "dictionary" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/linode_client.py#L841-L892
train
linode/linode_api4-python
linode_api4/linode_client.py
LinodeClient.image_create
def image_create(self, disk, label=None, description=None): """ Creates a new Image from a disk you own. :param disk: The Disk to imagize. :type disk: Disk or int :param label: The label for the resulting Image (defaults to the disk's label. :type label: str :param description: The description for the new Image. :type description: str :returns: The new Image. :rtype: Image """ params = { "disk_id": disk.id if issubclass(type(disk), Base) else disk, } if label is not None: params["label"] = label if description is not None: params["description"] = description result = self.post('/images', data=params) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when creating an ' 'Image from disk {}'.format(disk)) return Image(self, result['id'], result)
python
def image_create(self, disk, label=None, description=None): """ Creates a new Image from a disk you own. :param disk: The Disk to imagize. :type disk: Disk or int :param label: The label for the resulting Image (defaults to the disk's label. :type label: str :param description: The description for the new Image. :type description: str :returns: The new Image. :rtype: Image """ params = { "disk_id": disk.id if issubclass(type(disk), Base) else disk, } if label is not None: params["label"] = label if description is not None: params["description"] = description result = self.post('/images', data=params) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when creating an ' 'Image from disk {}'.format(disk)) return Image(self, result['id'], result)
[ "def", "image_create", "(", "self", ",", "disk", ",", "label", "=", "None", ",", "description", "=", "None", ")", ":", "params", "=", "{", "\"disk_id\"", ":", "disk", ".", "id", "if", "issubclass", "(", "type", "(", "disk", ")", ",", "Base", ")", "else", "disk", ",", "}", "if", "label", "is", "not", "None", ":", "params", "[", "\"label\"", "]", "=", "label", "if", "description", "is", "not", "None", ":", "params", "[", "\"description\"", "]", "=", "description", "result", "=", "self", ".", "post", "(", "'/images'", ",", "data", "=", "params", ")", "if", "not", "'id'", "in", "result", ":", "raise", "UnexpectedResponseError", "(", "'Unexpected response when creating an '", "'Image from disk {}'", ".", "format", "(", "disk", ")", ")", "return", "Image", "(", "self", ",", "result", "[", "'id'", "]", ",", "result", ")" ]
Creates a new Image from a disk you own. :param disk: The Disk to imagize. :type disk: Disk or int :param label: The label for the resulting Image (defaults to the disk's label. :type label: str :param description: The description for the new Image. :type description: str :returns: The new Image. :rtype: Image
[ "Creates", "a", "new", "Image", "from", "a", "disk", "you", "own", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/linode_client.py#L950-L981
train
linode/linode_api4-python
linode_api4/linode_client.py
LinodeClient.nodebalancer_create
def nodebalancer_create(self, region, **kwargs): """ Creates a new NodeBalancer in the given Region. :param region: The Region in which to create the NodeBalancer. :type region: Region or str :returns: The new NodeBalancer :rtype: NodeBalancer """ params = { "region": region.id if isinstance(region, Base) else region, } params.update(kwargs) result = self.post('/nodebalancers', data=params) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when creating Nodebalaner!', json=result) n = NodeBalancer(self, result['id'], result) return n
python
def nodebalancer_create(self, region, **kwargs): """ Creates a new NodeBalancer in the given Region. :param region: The Region in which to create the NodeBalancer. :type region: Region or str :returns: The new NodeBalancer :rtype: NodeBalancer """ params = { "region": region.id if isinstance(region, Base) else region, } params.update(kwargs) result = self.post('/nodebalancers', data=params) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when creating Nodebalaner!', json=result) n = NodeBalancer(self, result['id'], result) return n
[ "def", "nodebalancer_create", "(", "self", ",", "region", ",", "*", "*", "kwargs", ")", ":", "params", "=", "{", "\"region\"", ":", "region", ".", "id", "if", "isinstance", "(", "region", ",", "Base", ")", "else", "region", ",", "}", "params", ".", "update", "(", "kwargs", ")", "result", "=", "self", ".", "post", "(", "'/nodebalancers'", ",", "data", "=", "params", ")", "if", "not", "'id'", "in", "result", ":", "raise", "UnexpectedResponseError", "(", "'Unexpected response when creating Nodebalaner!'", ",", "json", "=", "result", ")", "n", "=", "NodeBalancer", "(", "self", ",", "result", "[", "'id'", "]", ",", "result", ")", "return", "n" ]
Creates a new NodeBalancer in the given Region. :param region: The Region in which to create the NodeBalancer. :type region: Region or str :returns: The new NodeBalancer :rtype: NodeBalancer
[ "Creates", "a", "new", "NodeBalancer", "in", "the", "given", "Region", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/linode_client.py#L1005-L1026
train
linode/linode_api4-python
linode_api4/linode_client.py
LinodeClient.domain_create
def domain_create(self, domain, master=True, **kwargs): """ Registers a new Domain on the acting user's account. Make sure to point your registrar to Linode's nameservers so that Linode's DNS manager will correctly serve your domain. :param domain: The domain to register to Linode's DNS manager. :type domain: str :param master: Whether this is a master (defaults to true) :type master: bool :returns: The new Domain object. :rtype: Domain """ params = { 'domain': domain, 'type': 'master' if master else 'slave', } params.update(kwargs) result = self.post('/domains', data=params) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when creating Domain!', json=result) d = Domain(self, result['id'], result) return d
python
def domain_create(self, domain, master=True, **kwargs): """ Registers a new Domain on the acting user's account. Make sure to point your registrar to Linode's nameservers so that Linode's DNS manager will correctly serve your domain. :param domain: The domain to register to Linode's DNS manager. :type domain: str :param master: Whether this is a master (defaults to true) :type master: bool :returns: The new Domain object. :rtype: Domain """ params = { 'domain': domain, 'type': 'master' if master else 'slave', } params.update(kwargs) result = self.post('/domains', data=params) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when creating Domain!', json=result) d = Domain(self, result['id'], result) return d
[ "def", "domain_create", "(", "self", ",", "domain", ",", "master", "=", "True", ",", "*", "*", "kwargs", ")", ":", "params", "=", "{", "'domain'", ":", "domain", ",", "'type'", ":", "'master'", "if", "master", "else", "'slave'", ",", "}", "params", ".", "update", "(", "kwargs", ")", "result", "=", "self", ".", "post", "(", "'/domains'", ",", "data", "=", "params", ")", "if", "not", "'id'", "in", "result", ":", "raise", "UnexpectedResponseError", "(", "'Unexpected response when creating Domain!'", ",", "json", "=", "result", ")", "d", "=", "Domain", "(", "self", ",", "result", "[", "'id'", "]", ",", "result", ")", "return", "d" ]
Registers a new Domain on the acting user's account. Make sure to point your registrar to Linode's nameservers so that Linode's DNS manager will correctly serve your domain. :param domain: The domain to register to Linode's DNS manager. :type domain: str :param master: Whether this is a master (defaults to true) :type master: bool :returns: The new Domain object. :rtype: Domain
[ "Registers", "a", "new", "Domain", "on", "the", "acting", "user", "s", "account", ".", "Make", "sure", "to", "point", "your", "registrar", "to", "Linode", "s", "nameservers", "so", "that", "Linode", "s", "DNS", "manager", "will", "correctly", "serve", "your", "domain", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/linode_client.py#L1028-L1054
train
linode/linode_api4-python
linode_api4/linode_client.py
LinodeClient.tag_create
def tag_create(self, label, instances=None, domains=None, nodebalancers=None, volumes=None, entities=[]): """ Creates a new Tag and optionally applies it to the given entities. :param label: The label for the new Tag :type label: str :param entities: A list of objects to apply this Tag to upon creation. May only be taggable types (Linode Instances, Domains, NodeBalancers, or Volumes). These are applied *in addition to* any IDs specified with ``instances``, ``domains``, ``nodebalancers``, or ``volumes``, and is a convenience for sending multiple entity types without sorting them yourself. :type entities: list of Instance, Domain, NodeBalancer, and/or Volume :param instances: A list of Linode Instances to apply this Tag to upon creation :type instances: list of Instance or list of int :param domains: A list of Domains to apply this Tag to upon creation :type domains: list of Domain or list of int :param nodebalancers: A list of NodeBalancers to apply this Tag to upon creation :type nodebalancers: list of NodeBalancer or list of int :param volumes: A list of Volumes to apply this Tag to upon creation :type volumes: list of Volumes or list of int :returns: The new Tag :rtype: Tag """ linode_ids, nodebalancer_ids, domain_ids, volume_ids = [], [], [], [] # filter input into lists of ids sorter = zip((linode_ids, nodebalancer_ids, domain_ids, volume_ids), (instances, nodebalancers, domains, volumes)) for id_list, input_list in sorter: # if we got something, we need to find its ID if input_list is not None: for cur in input_list: if isinstance(cur, int): id_list.append(cur) else: id_list.append(cur.id) # filter entities into id lists too type_map = { Instance: linode_ids, NodeBalancer: nodebalancer_ids, Domain: domain_ids, Volume: volume_ids, } for e in entities: if type(e) in type_map: type_map[type(e)].append(e.id) else: raise ValueError('Unsupported entity type {}'.format(type(e))) # finally, omit all id lists that are empty params = { 'label': label, 'linodes': linode_ids or None, 'nodebalancers': nodebalancer_ids or None, 'domains': domain_ids or None, 'volumes': volume_ids or None, } result = self.post('/tags', data=params) if not 'label' in result: raise UnexpectedResponseError('Unexpected response when creating Tag!', json=result) t = Tag(self, result['label'], result) return t
python
def tag_create(self, label, instances=None, domains=None, nodebalancers=None, volumes=None, entities=[]): """ Creates a new Tag and optionally applies it to the given entities. :param label: The label for the new Tag :type label: str :param entities: A list of objects to apply this Tag to upon creation. May only be taggable types (Linode Instances, Domains, NodeBalancers, or Volumes). These are applied *in addition to* any IDs specified with ``instances``, ``domains``, ``nodebalancers``, or ``volumes``, and is a convenience for sending multiple entity types without sorting them yourself. :type entities: list of Instance, Domain, NodeBalancer, and/or Volume :param instances: A list of Linode Instances to apply this Tag to upon creation :type instances: list of Instance or list of int :param domains: A list of Domains to apply this Tag to upon creation :type domains: list of Domain or list of int :param nodebalancers: A list of NodeBalancers to apply this Tag to upon creation :type nodebalancers: list of NodeBalancer or list of int :param volumes: A list of Volumes to apply this Tag to upon creation :type volumes: list of Volumes or list of int :returns: The new Tag :rtype: Tag """ linode_ids, nodebalancer_ids, domain_ids, volume_ids = [], [], [], [] # filter input into lists of ids sorter = zip((linode_ids, nodebalancer_ids, domain_ids, volume_ids), (instances, nodebalancers, domains, volumes)) for id_list, input_list in sorter: # if we got something, we need to find its ID if input_list is not None: for cur in input_list: if isinstance(cur, int): id_list.append(cur) else: id_list.append(cur.id) # filter entities into id lists too type_map = { Instance: linode_ids, NodeBalancer: nodebalancer_ids, Domain: domain_ids, Volume: volume_ids, } for e in entities: if type(e) in type_map: type_map[type(e)].append(e.id) else: raise ValueError('Unsupported entity type {}'.format(type(e))) # finally, omit all id lists that are empty params = { 'label': label, 'linodes': linode_ids or None, 'nodebalancers': nodebalancer_ids or None, 'domains': domain_ids or None, 'volumes': volume_ids or None, } result = self.post('/tags', data=params) if not 'label' in result: raise UnexpectedResponseError('Unexpected response when creating Tag!', json=result) t = Tag(self, result['label'], result) return t
[ "def", "tag_create", "(", "self", ",", "label", ",", "instances", "=", "None", ",", "domains", "=", "None", ",", "nodebalancers", "=", "None", ",", "volumes", "=", "None", ",", "entities", "=", "[", "]", ")", ":", "linode_ids", ",", "nodebalancer_ids", ",", "domain_ids", ",", "volume_ids", "=", "[", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", "# filter input into lists of ids", "sorter", "=", "zip", "(", "(", "linode_ids", ",", "nodebalancer_ids", ",", "domain_ids", ",", "volume_ids", ")", ",", "(", "instances", ",", "nodebalancers", ",", "domains", ",", "volumes", ")", ")", "for", "id_list", ",", "input_list", "in", "sorter", ":", "# if we got something, we need to find its ID", "if", "input_list", "is", "not", "None", ":", "for", "cur", "in", "input_list", ":", "if", "isinstance", "(", "cur", ",", "int", ")", ":", "id_list", ".", "append", "(", "cur", ")", "else", ":", "id_list", ".", "append", "(", "cur", ".", "id", ")", "# filter entities into id lists too", "type_map", "=", "{", "Instance", ":", "linode_ids", ",", "NodeBalancer", ":", "nodebalancer_ids", ",", "Domain", ":", "domain_ids", ",", "Volume", ":", "volume_ids", ",", "}", "for", "e", "in", "entities", ":", "if", "type", "(", "e", ")", "in", "type_map", ":", "type_map", "[", "type", "(", "e", ")", "]", ".", "append", "(", "e", ".", "id", ")", "else", ":", "raise", "ValueError", "(", "'Unsupported entity type {}'", ".", "format", "(", "type", "(", "e", ")", ")", ")", "# finally, omit all id lists that are empty", "params", "=", "{", "'label'", ":", "label", ",", "'linodes'", ":", "linode_ids", "or", "None", ",", "'nodebalancers'", ":", "nodebalancer_ids", "or", "None", ",", "'domains'", ":", "domain_ids", "or", "None", ",", "'volumes'", ":", "volume_ids", "or", "None", ",", "}", "result", "=", "self", ".", "post", "(", "'/tags'", ",", "data", "=", "params", ")", "if", "not", "'label'", "in", "result", ":", "raise", "UnexpectedResponseError", "(", "'Unexpected response when creating Tag!'", ",", "json", "=", "result", ")", "t", "=", "Tag", "(", "self", ",", "result", "[", "'label'", "]", ",", "result", ")", "return", "t" ]
Creates a new Tag and optionally applies it to the given entities. :param label: The label for the new Tag :type label: str :param entities: A list of objects to apply this Tag to upon creation. May only be taggable types (Linode Instances, Domains, NodeBalancers, or Volumes). These are applied *in addition to* any IDs specified with ``instances``, ``domains``, ``nodebalancers``, or ``volumes``, and is a convenience for sending multiple entity types without sorting them yourself. :type entities: list of Instance, Domain, NodeBalancer, and/or Volume :param instances: A list of Linode Instances to apply this Tag to upon creation :type instances: list of Instance or list of int :param domains: A list of Domains to apply this Tag to upon creation :type domains: list of Domain or list of int :param nodebalancers: A list of NodeBalancers to apply this Tag to upon creation :type nodebalancers: list of NodeBalancer or list of int :param volumes: A list of Volumes to apply this Tag to upon creation :type volumes: list of Volumes or list of int :returns: The new Tag :rtype: Tag
[ "Creates", "a", "new", "Tag", "and", "optionally", "applies", "it", "to", "the", "given", "entities", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/linode_client.py#L1068-L1143
train
linode/linode_api4-python
linode_api4/linode_client.py
LinodeClient.volume_create
def volume_create(self, label, region=None, linode=None, size=20, **kwargs): """ Creates a new Block Storage Volume, either in the given Region or attached to the given Instance. :param label: The label for the new Volume. :type label: str :param region: The Region to create this Volume in. Not required if `linode` is provided. :type region: Region or str :param linode: The Instance to attach this Volume to. If not given, the new Volume will not be attached to anything. :type linode: Instance or int :param size: The size, in GB, of the new Volume. Defaults to 20. :type size: int :returns: The new Volume. :rtype: Volume """ if not (region or linode): raise ValueError('region or linode required!') params = { "label": label, "size": size, "region": region.id if issubclass(type(region), Base) else region, "linode_id": linode.id if issubclass(type(linode), Base) else linode, } params.update(kwargs) result = self.post('/volumes', data=params) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when creating volume!', json=result) v = Volume(self, result['id'], result) return v
python
def volume_create(self, label, region=None, linode=None, size=20, **kwargs): """ Creates a new Block Storage Volume, either in the given Region or attached to the given Instance. :param label: The label for the new Volume. :type label: str :param region: The Region to create this Volume in. Not required if `linode` is provided. :type region: Region or str :param linode: The Instance to attach this Volume to. If not given, the new Volume will not be attached to anything. :type linode: Instance or int :param size: The size, in GB, of the new Volume. Defaults to 20. :type size: int :returns: The new Volume. :rtype: Volume """ if not (region or linode): raise ValueError('region or linode required!') params = { "label": label, "size": size, "region": region.id if issubclass(type(region), Base) else region, "linode_id": linode.id if issubclass(type(linode), Base) else linode, } params.update(kwargs) result = self.post('/volumes', data=params) if not 'id' in result: raise UnexpectedResponseError('Unexpected response when creating volume!', json=result) v = Volume(self, result['id'], result) return v
[ "def", "volume_create", "(", "self", ",", "label", ",", "region", "=", "None", ",", "linode", "=", "None", ",", "size", "=", "20", ",", "*", "*", "kwargs", ")", ":", "if", "not", "(", "region", "or", "linode", ")", ":", "raise", "ValueError", "(", "'region or linode required!'", ")", "params", "=", "{", "\"label\"", ":", "label", ",", "\"size\"", ":", "size", ",", "\"region\"", ":", "region", ".", "id", "if", "issubclass", "(", "type", "(", "region", ")", ",", "Base", ")", "else", "region", ",", "\"linode_id\"", ":", "linode", ".", "id", "if", "issubclass", "(", "type", "(", "linode", ")", ",", "Base", ")", "else", "linode", ",", "}", "params", ".", "update", "(", "kwargs", ")", "result", "=", "self", ".", "post", "(", "'/volumes'", ",", "data", "=", "params", ")", "if", "not", "'id'", "in", "result", ":", "raise", "UnexpectedResponseError", "(", "'Unexpected response when creating volume!'", ",", "json", "=", "result", ")", "v", "=", "Volume", "(", "self", ",", "result", "[", "'id'", "]", ",", "result", ")", "return", "v" ]
Creates a new Block Storage Volume, either in the given Region or attached to the given Instance. :param label: The label for the new Volume. :type label: str :param region: The Region to create this Volume in. Not required if `linode` is provided. :type region: Region or str :param linode: The Instance to attach this Volume to. If not given, the new Volume will not be attached to anything. :type linode: Instance or int :param size: The size, in GB, of the new Volume. Defaults to 20. :type size: int :returns: The new Volume. :rtype: Volume
[ "Creates", "a", "new", "Block", "Storage", "Volume", "either", "in", "the", "given", "Region", "or", "attached", "to", "the", "given", "Instance", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/linode_client.py#L1156-L1192
train
linode/linode_api4-python
linode_api4/login_client.py
LinodeLoginClient.expire_token
def expire_token(self, token): """ Given a token, makes a request to the authentication server to expire it immediately. This is considered a responsible way to log out a user. If you simply remove the session your application has for the user without expiring their token, the user is not _really_ logged out. :param token: The OAuth token you wish to expire :type token: str :returns: If the expiration attempt succeeded. :rtype: bool :raises ApiError: If the expiration attempt failed. """ r = requests.post(self._login_uri("/oauth/token/expire"), data={ "client_id": self.client_id, "client_secret": self.client_secret, "token": token, }) if r.status_code != 200: raise ApiError("Failed to expire token!", r) return True
python
def expire_token(self, token): """ Given a token, makes a request to the authentication server to expire it immediately. This is considered a responsible way to log out a user. If you simply remove the session your application has for the user without expiring their token, the user is not _really_ logged out. :param token: The OAuth token you wish to expire :type token: str :returns: If the expiration attempt succeeded. :rtype: bool :raises ApiError: If the expiration attempt failed. """ r = requests.post(self._login_uri("/oauth/token/expire"), data={ "client_id": self.client_id, "client_secret": self.client_secret, "token": token, }) if r.status_code != 200: raise ApiError("Failed to expire token!", r) return True
[ "def", "expire_token", "(", "self", ",", "token", ")", ":", "r", "=", "requests", ".", "post", "(", "self", ".", "_login_uri", "(", "\"/oauth/token/expire\"", ")", ",", "data", "=", "{", "\"client_id\"", ":", "self", ".", "client_id", ",", "\"client_secret\"", ":", "self", ".", "client_secret", ",", "\"token\"", ":", "token", ",", "}", ")", "if", "r", ".", "status_code", "!=", "200", ":", "raise", "ApiError", "(", "\"Failed to expire token!\"", ",", "r", ")", "return", "True" ]
Given a token, makes a request to the authentication server to expire it immediately. This is considered a responsible way to log out a user. If you simply remove the session your application has for the user without expiring their token, the user is not _really_ logged out. :param token: The OAuth token you wish to expire :type token: str :returns: If the expiration attempt succeeded. :rtype: bool :raises ApiError: If the expiration attempt failed.
[ "Given", "a", "token", "makes", "a", "request", "to", "the", "authentication", "server", "to", "expire", "it", "immediately", ".", "This", "is", "considered", "a", "responsible", "way", "to", "log", "out", "a", "user", ".", "If", "you", "simply", "remove", "the", "session", "your", "application", "has", "for", "the", "user", "without", "expiring", "their", "token", "the", "user", "is", "not", "_really_", "logged", "out", "." ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/login_client.py#L417-L441
train
linode/linode_api4-python
linode_api4/objects/profile.py
Profile.grants
def grants(self): """ Returns grants for the current user """ from linode_api4.objects.account import UserGrants resp = self._client.get('/profile/grants') # use special endpoint for restricted users grants = None if resp is not None: # if resp is None, we're unrestricted and do not have grants grants = UserGrants(self._client, self.username, resp) return grants
python
def grants(self): """ Returns grants for the current user """ from linode_api4.objects.account import UserGrants resp = self._client.get('/profile/grants') # use special endpoint for restricted users grants = None if resp is not None: # if resp is None, we're unrestricted and do not have grants grants = UserGrants(self._client, self.username, resp) return grants
[ "def", "grants", "(", "self", ")", ":", "from", "linode_api4", ".", "objects", ".", "account", "import", "UserGrants", "resp", "=", "self", ".", "_client", ".", "get", "(", "'/profile/grants'", ")", "# use special endpoint for restricted users", "grants", "=", "None", "if", "resp", "is", "not", "None", ":", "# if resp is None, we're unrestricted and do not have grants", "grants", "=", "UserGrants", "(", "self", ".", "_client", ",", "self", ".", "username", ",", "resp", ")", "return", "grants" ]
Returns grants for the current user
[ "Returns", "grants", "for", "the", "current", "user" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/profile.py#L91-L103
train
linode/linode_api4-python
linode_api4/objects/profile.py
Profile.add_whitelist_entry
def add_whitelist_entry(self, address, netmask, note=None): """ Adds a new entry to this user's IP whitelist, if enabled """ result = self._client.post("{}/whitelist".format(Profile.api_endpoint), data={ "address": address, "netmask": netmask, "note": note, }) if not 'id' in result: raise UnexpectedResponseError("Unexpected response creating whitelist entry!") return WhitelistEntry(result['id'], self._client, json=result)
python
def add_whitelist_entry(self, address, netmask, note=None): """ Adds a new entry to this user's IP whitelist, if enabled """ result = self._client.post("{}/whitelist".format(Profile.api_endpoint), data={ "address": address, "netmask": netmask, "note": note, }) if not 'id' in result: raise UnexpectedResponseError("Unexpected response creating whitelist entry!") return WhitelistEntry(result['id'], self._client, json=result)
[ "def", "add_whitelist_entry", "(", "self", ",", "address", ",", "netmask", ",", "note", "=", "None", ")", ":", "result", "=", "self", ".", "_client", ".", "post", "(", "\"{}/whitelist\"", ".", "format", "(", "Profile", ".", "api_endpoint", ")", ",", "data", "=", "{", "\"address\"", ":", "address", ",", "\"netmask\"", ":", "netmask", ",", "\"note\"", ":", "note", ",", "}", ")", "if", "not", "'id'", "in", "result", ":", "raise", "UnexpectedResponseError", "(", "\"Unexpected response creating whitelist entry!\"", ")", "return", "WhitelistEntry", "(", "result", "[", "'id'", "]", ",", "self", ".", "_client", ",", "json", "=", "result", ")" ]
Adds a new entry to this user's IP whitelist, if enabled
[ "Adds", "a", "new", "entry", "to", "this", "user", "s", "IP", "whitelist", "if", "enabled" ]
1dd7318d2aed014c746d48c7957464c57af883ca
https://github.com/linode/linode_api4-python/blob/1dd7318d2aed014c746d48c7957464c57af883ca/linode_api4/objects/profile.py#L112-L126
train
tmm/django-username-email
cuser/forms.py
AuthenticationForm.confirm_login_allowed
def confirm_login_allowed(self, user): """ Controls whether the given User may log in. This is a policy setting, independent of end-user authentication. This default behavior is to allow login by active users, and reject login by inactive users. If the given user cannot log in, this method should raise a ``forms.ValidationError``. If the given user may log in, this method should return None. """ if not user.is_active: raise forms.ValidationError( self.error_messages['inactive'], code='inactive', )
python
def confirm_login_allowed(self, user): """ Controls whether the given User may log in. This is a policy setting, independent of end-user authentication. This default behavior is to allow login by active users, and reject login by inactive users. If the given user cannot log in, this method should raise a ``forms.ValidationError``. If the given user may log in, this method should return None. """ if not user.is_active: raise forms.ValidationError( self.error_messages['inactive'], code='inactive', )
[ "def", "confirm_login_allowed", "(", "self", ",", "user", ")", ":", "if", "not", "user", ".", "is_active", ":", "raise", "forms", ".", "ValidationError", "(", "self", ".", "error_messages", "[", "'inactive'", "]", ",", "code", "=", "'inactive'", ",", ")" ]
Controls whether the given User may log in. This is a policy setting, independent of end-user authentication. This default behavior is to allow login by active users, and reject login by inactive users. If the given user cannot log in, this method should raise a ``forms.ValidationError``. If the given user may log in, this method should return None.
[ "Controls", "whether", "the", "given", "User", "may", "log", "in", ".", "This", "is", "a", "policy", "setting", "independent", "of", "end", "-", "user", "authentication", ".", "This", "default", "behavior", "is", "to", "allow", "login", "by", "active", "users", "and", "reject", "login", "by", "inactive", "users", "." ]
36e56bcbf79d46af101ba4c8f4bd848856306329
https://github.com/tmm/django-username-email/blob/36e56bcbf79d46af101ba4c8f4bd848856306329/cuser/forms.py#L68-L83
train
dwavesystems/dwave-system
dwave/embedding/chain_breaks.py
broken_chains
def broken_chains(samples, chains): """Find the broken chains. Args: samples (array_like): Samples as a nS x nV array_like object where nS is the number of samples and nV is the number of variables. The values should all be 0/1 or -1/+1. chains (list[array_like]): List of chains of length nC where nC is the number of chains. Each chain should be an array_like collection of column indices in samples. Returns: :obj:`numpy.ndarray`: A nS x nC boolean array. If i, j is True, then chain j in sample i is broken. Examples: >>> samples = np.array([[-1, +1, -1, +1], [-1, -1, +1, +1]], dtype=np.int8) >>> chains = [[0, 1], [2, 3]] >>> dwave.embedding.broken_chains(samples, chains) array([[True, True], [ False, False]]) >>> samples = np.array([[-1, +1, -1, +1], [-1, -1, +1, +1]], dtype=np.int8) >>> chains = [[0, 2], [1, 3]] >>> dwave.embedding.broken_chains(samples, chains) array([[False, False], [ True, True]]) """ samples = np.asarray(samples) if samples.ndim != 2: raise ValueError("expected samples to be a numpy 2D array") num_samples, num_variables = samples.shape num_chains = len(chains) broken = np.zeros((num_samples, num_chains), dtype=bool, order='F') for cidx, chain in enumerate(chains): if isinstance(chain, set): chain = list(chain) chain = np.asarray(chain) if chain.ndim > 1: raise ValueError("chains should be 1D array_like objects") # chains of length 1, or 0 cannot be broken if len(chain) <= 1: continue all_ = (samples[:, chain] == 1).all(axis=1) any_ = (samples[:, chain] == 1).any(axis=1) broken[:, cidx] = np.bitwise_xor(all_, any_) return broken
python
def broken_chains(samples, chains): """Find the broken chains. Args: samples (array_like): Samples as a nS x nV array_like object where nS is the number of samples and nV is the number of variables. The values should all be 0/1 or -1/+1. chains (list[array_like]): List of chains of length nC where nC is the number of chains. Each chain should be an array_like collection of column indices in samples. Returns: :obj:`numpy.ndarray`: A nS x nC boolean array. If i, j is True, then chain j in sample i is broken. Examples: >>> samples = np.array([[-1, +1, -1, +1], [-1, -1, +1, +1]], dtype=np.int8) >>> chains = [[0, 1], [2, 3]] >>> dwave.embedding.broken_chains(samples, chains) array([[True, True], [ False, False]]) >>> samples = np.array([[-1, +1, -1, +1], [-1, -1, +1, +1]], dtype=np.int8) >>> chains = [[0, 2], [1, 3]] >>> dwave.embedding.broken_chains(samples, chains) array([[False, False], [ True, True]]) """ samples = np.asarray(samples) if samples.ndim != 2: raise ValueError("expected samples to be a numpy 2D array") num_samples, num_variables = samples.shape num_chains = len(chains) broken = np.zeros((num_samples, num_chains), dtype=bool, order='F') for cidx, chain in enumerate(chains): if isinstance(chain, set): chain = list(chain) chain = np.asarray(chain) if chain.ndim > 1: raise ValueError("chains should be 1D array_like objects") # chains of length 1, or 0 cannot be broken if len(chain) <= 1: continue all_ = (samples[:, chain] == 1).all(axis=1) any_ = (samples[:, chain] == 1).any(axis=1) broken[:, cidx] = np.bitwise_xor(all_, any_) return broken
[ "def", "broken_chains", "(", "samples", ",", "chains", ")", ":", "samples", "=", "np", ".", "asarray", "(", "samples", ")", "if", "samples", ".", "ndim", "!=", "2", ":", "raise", "ValueError", "(", "\"expected samples to be a numpy 2D array\"", ")", "num_samples", ",", "num_variables", "=", "samples", ".", "shape", "num_chains", "=", "len", "(", "chains", ")", "broken", "=", "np", ".", "zeros", "(", "(", "num_samples", ",", "num_chains", ")", ",", "dtype", "=", "bool", ",", "order", "=", "'F'", ")", "for", "cidx", ",", "chain", "in", "enumerate", "(", "chains", ")", ":", "if", "isinstance", "(", "chain", ",", "set", ")", ":", "chain", "=", "list", "(", "chain", ")", "chain", "=", "np", ".", "asarray", "(", "chain", ")", "if", "chain", ".", "ndim", ">", "1", ":", "raise", "ValueError", "(", "\"chains should be 1D array_like objects\"", ")", "# chains of length 1, or 0 cannot be broken", "if", "len", "(", "chain", ")", "<=", "1", ":", "continue", "all_", "=", "(", "samples", "[", ":", ",", "chain", "]", "==", "1", ")", ".", "all", "(", "axis", "=", "1", ")", "any_", "=", "(", "samples", "[", ":", ",", "chain", "]", "==", "1", ")", ".", "any", "(", "axis", "=", "1", ")", "broken", "[", ":", ",", "cidx", "]", "=", "np", ".", "bitwise_xor", "(", "all_", ",", "any_", ")", "return", "broken" ]
Find the broken chains. Args: samples (array_like): Samples as a nS x nV array_like object where nS is the number of samples and nV is the number of variables. The values should all be 0/1 or -1/+1. chains (list[array_like]): List of chains of length nC where nC is the number of chains. Each chain should be an array_like collection of column indices in samples. Returns: :obj:`numpy.ndarray`: A nS x nC boolean array. If i, j is True, then chain j in sample i is broken. Examples: >>> samples = np.array([[-1, +1, -1, +1], [-1, -1, +1, +1]], dtype=np.int8) >>> chains = [[0, 1], [2, 3]] >>> dwave.embedding.broken_chains(samples, chains) array([[True, True], [ False, False]]) >>> samples = np.array([[-1, +1, -1, +1], [-1, -1, +1, +1]], dtype=np.int8) >>> chains = [[0, 2], [1, 3]] >>> dwave.embedding.broken_chains(samples, chains) array([[False, False], [ True, True]])
[ "Find", "the", "broken", "chains", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/chain_breaks.py#L33-L88
train
dwavesystems/dwave-system
dwave/embedding/chain_breaks.py
discard
def discard(samples, chains): """Discard broken chains. Args: samples (array_like): Samples as a nS x nV array_like object where nS is the number of samples and nV is the number of variables. The values should all be 0/1 or -1/+1. chains (list[array_like]): List of chains of length nC where nC is the number of chains. Each chain should be an array_like collection of column indices in samples. Returns: tuple: A 2-tuple containing: :obj:`numpy.ndarray`: An array of unembedded samples. Broken chains are discarded. The array has dtype 'int8'. :obj:`numpy.ndarray`: The indicies of the rows with unbroken chains. Examples: This example unembeds two samples that chains nodes 0 and 1 to represent a single source node. The first sample has an unbroken chain, the second a broken chain. >>> import dimod >>> import numpy as np ... >>> chains = [(0, 1), (2,)] >>> samples = np.array([[1, 1, 0], [1, 0, 0]], dtype=np.int8) >>> unembedded, idx = dwave.embedding.discard(samples, chains) >>> unembedded array([[1, 0]], dtype=int8) >>> idx array([0]) """ samples = np.asarray(samples) if samples.ndim != 2: raise ValueError("expected samples to be a numpy 2D array") num_samples, num_variables = samples.shape num_chains = len(chains) broken = broken_chains(samples, chains) unbroken_idxs, = np.where(~broken.any(axis=1)) chain_variables = np.fromiter((np.asarray(tuple(chain))[0] if isinstance(chain, set) else np.asarray(chain)[0] for chain in chains), count=num_chains, dtype=int) return samples[np.ix_(unbroken_idxs, chain_variables)], unbroken_idxs
python
def discard(samples, chains): """Discard broken chains. Args: samples (array_like): Samples as a nS x nV array_like object where nS is the number of samples and nV is the number of variables. The values should all be 0/1 or -1/+1. chains (list[array_like]): List of chains of length nC where nC is the number of chains. Each chain should be an array_like collection of column indices in samples. Returns: tuple: A 2-tuple containing: :obj:`numpy.ndarray`: An array of unembedded samples. Broken chains are discarded. The array has dtype 'int8'. :obj:`numpy.ndarray`: The indicies of the rows with unbroken chains. Examples: This example unembeds two samples that chains nodes 0 and 1 to represent a single source node. The first sample has an unbroken chain, the second a broken chain. >>> import dimod >>> import numpy as np ... >>> chains = [(0, 1), (2,)] >>> samples = np.array([[1, 1, 0], [1, 0, 0]], dtype=np.int8) >>> unembedded, idx = dwave.embedding.discard(samples, chains) >>> unembedded array([[1, 0]], dtype=int8) >>> idx array([0]) """ samples = np.asarray(samples) if samples.ndim != 2: raise ValueError("expected samples to be a numpy 2D array") num_samples, num_variables = samples.shape num_chains = len(chains) broken = broken_chains(samples, chains) unbroken_idxs, = np.where(~broken.any(axis=1)) chain_variables = np.fromiter((np.asarray(tuple(chain))[0] if isinstance(chain, set) else np.asarray(chain)[0] for chain in chains), count=num_chains, dtype=int) return samples[np.ix_(unbroken_idxs, chain_variables)], unbroken_idxs
[ "def", "discard", "(", "samples", ",", "chains", ")", ":", "samples", "=", "np", ".", "asarray", "(", "samples", ")", "if", "samples", ".", "ndim", "!=", "2", ":", "raise", "ValueError", "(", "\"expected samples to be a numpy 2D array\"", ")", "num_samples", ",", "num_variables", "=", "samples", ".", "shape", "num_chains", "=", "len", "(", "chains", ")", "broken", "=", "broken_chains", "(", "samples", ",", "chains", ")", "unbroken_idxs", ",", "=", "np", ".", "where", "(", "~", "broken", ".", "any", "(", "axis", "=", "1", ")", ")", "chain_variables", "=", "np", ".", "fromiter", "(", "(", "np", ".", "asarray", "(", "tuple", "(", "chain", ")", ")", "[", "0", "]", "if", "isinstance", "(", "chain", ",", "set", ")", "else", "np", ".", "asarray", "(", "chain", ")", "[", "0", "]", "for", "chain", "in", "chains", ")", ",", "count", "=", "num_chains", ",", "dtype", "=", "int", ")", "return", "samples", "[", "np", ".", "ix_", "(", "unbroken_idxs", ",", "chain_variables", ")", "]", ",", "unbroken_idxs" ]
Discard broken chains. Args: samples (array_like): Samples as a nS x nV array_like object where nS is the number of samples and nV is the number of variables. The values should all be 0/1 or -1/+1. chains (list[array_like]): List of chains of length nC where nC is the number of chains. Each chain should be an array_like collection of column indices in samples. Returns: tuple: A 2-tuple containing: :obj:`numpy.ndarray`: An array of unembedded samples. Broken chains are discarded. The array has dtype 'int8'. :obj:`numpy.ndarray`: The indicies of the rows with unbroken chains. Examples: This example unembeds two samples that chains nodes 0 and 1 to represent a single source node. The first sample has an unbroken chain, the second a broken chain. >>> import dimod >>> import numpy as np ... >>> chains = [(0, 1), (2,)] >>> samples = np.array([[1, 1, 0], [1, 0, 0]], dtype=np.int8) >>> unembedded, idx = dwave.embedding.discard(samples, chains) >>> unembedded array([[1, 0]], dtype=int8) >>> idx array([0])
[ "Discard", "broken", "chains", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/chain_breaks.py#L91-L142
train
dwavesystems/dwave-system
dwave/embedding/chain_breaks.py
majority_vote
def majority_vote(samples, chains): """Use the most common element in broken chains. Args: samples (array_like): Samples as a nS x nV array_like object where nS is the number of samples and nV is the number of variables. The values should all be 0/1 or -1/+1. chains (list[array_like]): List of chains of length nC where nC is the number of chains. Each chain should be an array_like collection of column indices in samples. Returns: tuple: A 2-tuple containing: :obj:`numpy.ndarray`: A nS x nC array of unembedded samples. The array has dtype 'int8'. Where there is a chain break, the value is chosen to match the most common value in the chain. For broken chains without a majority, the value is chosen arbitrarily. :obj:`numpy.ndarray`: Equivalent to :code:`np.arange(nS)` because all samples are kept and no samples are added. Examples: This example unembeds samples from a target graph that chains nodes 0 and 1 to represent one source node and nodes 2, 3, and 4 to represent another. Both samples have one broken chain, with different majority values. >>> import dimod >>> import numpy as np ... >>> chains = [(0, 1), (2, 3, 4)] >>> samples = np.array([[1, 1, 0, 0, 1], [1, 1, 1, 0, 1]], dtype=np.int8) >>> unembedded, idx = dwave.embedding.majority_vote(samples, chains) >>> unembedded array([[1, 0], [1, 1]], dtype=int8) >>> idx array([0, 1]) """ samples = np.asarray(samples) if samples.ndim != 2: raise ValueError("expected samples to be a numpy 2D array") num_samples, num_variables = samples.shape num_chains = len(chains) unembedded = np.empty((num_samples, num_chains), dtype='int8', order='F') # determine if spin or binary. If samples are all 1, then either method works, so we use spin # because it is faster if samples.all(): # spin-valued for cidx, chain in enumerate(chains): # we just need the sign for spin. We don't use np.sign because in that can return 0 # and fixing the 0s is slow. unembedded[:, cidx] = 2*(samples[:, chain].sum(axis=1) >= 0) - 1 else: # binary-valued for cidx, chain in enumerate(chains): mid = len(chain) / 2 unembedded[:, cidx] = (samples[:, chain].sum(axis=1) >= mid) return unembedded, np.arange(num_samples)
python
def majority_vote(samples, chains): """Use the most common element in broken chains. Args: samples (array_like): Samples as a nS x nV array_like object where nS is the number of samples and nV is the number of variables. The values should all be 0/1 or -1/+1. chains (list[array_like]): List of chains of length nC where nC is the number of chains. Each chain should be an array_like collection of column indices in samples. Returns: tuple: A 2-tuple containing: :obj:`numpy.ndarray`: A nS x nC array of unembedded samples. The array has dtype 'int8'. Where there is a chain break, the value is chosen to match the most common value in the chain. For broken chains without a majority, the value is chosen arbitrarily. :obj:`numpy.ndarray`: Equivalent to :code:`np.arange(nS)` because all samples are kept and no samples are added. Examples: This example unembeds samples from a target graph that chains nodes 0 and 1 to represent one source node and nodes 2, 3, and 4 to represent another. Both samples have one broken chain, with different majority values. >>> import dimod >>> import numpy as np ... >>> chains = [(0, 1), (2, 3, 4)] >>> samples = np.array([[1, 1, 0, 0, 1], [1, 1, 1, 0, 1]], dtype=np.int8) >>> unembedded, idx = dwave.embedding.majority_vote(samples, chains) >>> unembedded array([[1, 0], [1, 1]], dtype=int8) >>> idx array([0, 1]) """ samples = np.asarray(samples) if samples.ndim != 2: raise ValueError("expected samples to be a numpy 2D array") num_samples, num_variables = samples.shape num_chains = len(chains) unembedded = np.empty((num_samples, num_chains), dtype='int8', order='F') # determine if spin or binary. If samples are all 1, then either method works, so we use spin # because it is faster if samples.all(): # spin-valued for cidx, chain in enumerate(chains): # we just need the sign for spin. We don't use np.sign because in that can return 0 # and fixing the 0s is slow. unembedded[:, cidx] = 2*(samples[:, chain].sum(axis=1) >= 0) - 1 else: # binary-valued for cidx, chain in enumerate(chains): mid = len(chain) / 2 unembedded[:, cidx] = (samples[:, chain].sum(axis=1) >= mid) return unembedded, np.arange(num_samples)
[ "def", "majority_vote", "(", "samples", ",", "chains", ")", ":", "samples", "=", "np", ".", "asarray", "(", "samples", ")", "if", "samples", ".", "ndim", "!=", "2", ":", "raise", "ValueError", "(", "\"expected samples to be a numpy 2D array\"", ")", "num_samples", ",", "num_variables", "=", "samples", ".", "shape", "num_chains", "=", "len", "(", "chains", ")", "unembedded", "=", "np", ".", "empty", "(", "(", "num_samples", ",", "num_chains", ")", ",", "dtype", "=", "'int8'", ",", "order", "=", "'F'", ")", "# determine if spin or binary. If samples are all 1, then either method works, so we use spin", "# because it is faster", "if", "samples", ".", "all", "(", ")", ":", "# spin-valued", "for", "cidx", ",", "chain", "in", "enumerate", "(", "chains", ")", ":", "# we just need the sign for spin. We don't use np.sign because in that can return 0", "# and fixing the 0s is slow.", "unembedded", "[", ":", ",", "cidx", "]", "=", "2", "*", "(", "samples", "[", ":", ",", "chain", "]", ".", "sum", "(", "axis", "=", "1", ")", ">=", "0", ")", "-", "1", "else", ":", "# binary-valued", "for", "cidx", ",", "chain", "in", "enumerate", "(", "chains", ")", ":", "mid", "=", "len", "(", "chain", ")", "/", "2", "unembedded", "[", ":", ",", "cidx", "]", "=", "(", "samples", "[", ":", ",", "chain", "]", ".", "sum", "(", "axis", "=", "1", ")", ">=", "mid", ")", "return", "unembedded", ",", "np", ".", "arange", "(", "num_samples", ")" ]
Use the most common element in broken chains. Args: samples (array_like): Samples as a nS x nV array_like object where nS is the number of samples and nV is the number of variables. The values should all be 0/1 or -1/+1. chains (list[array_like]): List of chains of length nC where nC is the number of chains. Each chain should be an array_like collection of column indices in samples. Returns: tuple: A 2-tuple containing: :obj:`numpy.ndarray`: A nS x nC array of unembedded samples. The array has dtype 'int8'. Where there is a chain break, the value is chosen to match the most common value in the chain. For broken chains without a majority, the value is chosen arbitrarily. :obj:`numpy.ndarray`: Equivalent to :code:`np.arange(nS)` because all samples are kept and no samples are added. Examples: This example unembeds samples from a target graph that chains nodes 0 and 1 to represent one source node and nodes 2, 3, and 4 to represent another. Both samples have one broken chain, with different majority values. >>> import dimod >>> import numpy as np ... >>> chains = [(0, 1), (2, 3, 4)] >>> samples = np.array([[1, 1, 0, 0, 1], [1, 1, 1, 0, 1]], dtype=np.int8) >>> unembedded, idx = dwave.embedding.majority_vote(samples, chains) >>> unembedded array([[1, 0], [1, 1]], dtype=int8) >>> idx array([0, 1])
[ "Use", "the", "most", "common", "element", "in", "broken", "chains", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/chain_breaks.py#L145-L206
train
dwavesystems/dwave-system
dwave/embedding/chain_breaks.py
weighted_random
def weighted_random(samples, chains): """Determine the sample values of chains by weighed random choice. Args: samples (array_like): Samples as a nS x nV array_like object where nS is the number of samples and nV is the number of variables. The values should all be 0/1 or -1/+1. chains (list[array_like]): List of chains of length nC where nC is the number of chains. Each chain should be an array_like collection of column indices in samples. Returns: tuple: A 2-tuple containing: :obj:`numpy.ndarray`: A nS x nC array of unembedded samples. The array has dtype 'int8'. Where there is a chain break, the value is chosen randomly, weighted by frequency of the chain's value. :obj:`numpy.ndarray`: Equivalent to :code:`np.arange(nS)` because all samples are kept and no samples are added. Examples: This example unembeds samples from a target graph that chains nodes 0 and 1 to represent one source node and nodes 2, 3, and 4 to represent another. The sample has broken chains for both source nodes. >>> import dimod >>> import numpy as np ... >>> chains = [(0, 1), (2, 3, 4)] >>> samples = np.array([[1, 0, 1, 0, 1]], dtype=np.int8) >>> unembedded, idx = dwave.embedding.weighted_random(samples, chains) # doctest: +SKIP >>> unembedded # doctest: +SKIP array([[1, 1]], dtype=int8) >>> idx # doctest: +SKIP array([0, 1]) """ samples = np.asarray(samples) if samples.ndim != 2: raise ValueError("expected samples to be a numpy 2D array") # it sufficies to choose a random index from each chain and use that to construct the matrix idx = [np.random.choice(chain) for chain in chains] num_samples, num_variables = samples.shape return samples[:, idx], np.arange(num_samples)
python
def weighted_random(samples, chains): """Determine the sample values of chains by weighed random choice. Args: samples (array_like): Samples as a nS x nV array_like object where nS is the number of samples and nV is the number of variables. The values should all be 0/1 or -1/+1. chains (list[array_like]): List of chains of length nC where nC is the number of chains. Each chain should be an array_like collection of column indices in samples. Returns: tuple: A 2-tuple containing: :obj:`numpy.ndarray`: A nS x nC array of unembedded samples. The array has dtype 'int8'. Where there is a chain break, the value is chosen randomly, weighted by frequency of the chain's value. :obj:`numpy.ndarray`: Equivalent to :code:`np.arange(nS)` because all samples are kept and no samples are added. Examples: This example unembeds samples from a target graph that chains nodes 0 and 1 to represent one source node and nodes 2, 3, and 4 to represent another. The sample has broken chains for both source nodes. >>> import dimod >>> import numpy as np ... >>> chains = [(0, 1), (2, 3, 4)] >>> samples = np.array([[1, 0, 1, 0, 1]], dtype=np.int8) >>> unembedded, idx = dwave.embedding.weighted_random(samples, chains) # doctest: +SKIP >>> unembedded # doctest: +SKIP array([[1, 1]], dtype=int8) >>> idx # doctest: +SKIP array([0, 1]) """ samples = np.asarray(samples) if samples.ndim != 2: raise ValueError("expected samples to be a numpy 2D array") # it sufficies to choose a random index from each chain and use that to construct the matrix idx = [np.random.choice(chain) for chain in chains] num_samples, num_variables = samples.shape return samples[:, idx], np.arange(num_samples)
[ "def", "weighted_random", "(", "samples", ",", "chains", ")", ":", "samples", "=", "np", ".", "asarray", "(", "samples", ")", "if", "samples", ".", "ndim", "!=", "2", ":", "raise", "ValueError", "(", "\"expected samples to be a numpy 2D array\"", ")", "# it sufficies to choose a random index from each chain and use that to construct the matrix", "idx", "=", "[", "np", ".", "random", ".", "choice", "(", "chain", ")", "for", "chain", "in", "chains", "]", "num_samples", ",", "num_variables", "=", "samples", ".", "shape", "return", "samples", "[", ":", ",", "idx", "]", ",", "np", ".", "arange", "(", "num_samples", ")" ]
Determine the sample values of chains by weighed random choice. Args: samples (array_like): Samples as a nS x nV array_like object where nS is the number of samples and nV is the number of variables. The values should all be 0/1 or -1/+1. chains (list[array_like]): List of chains of length nC where nC is the number of chains. Each chain should be an array_like collection of column indices in samples. Returns: tuple: A 2-tuple containing: :obj:`numpy.ndarray`: A nS x nC array of unembedded samples. The array has dtype 'int8'. Where there is a chain break, the value is chosen randomly, weighted by frequency of the chain's value. :obj:`numpy.ndarray`: Equivalent to :code:`np.arange(nS)` because all samples are kept and no samples are added. Examples: This example unembeds samples from a target graph that chains nodes 0 and 1 to represent one source node and nodes 2, 3, and 4 to represent another. The sample has broken chains for both source nodes. >>> import dimod >>> import numpy as np ... >>> chains = [(0, 1), (2, 3, 4)] >>> samples = np.array([[1, 0, 1, 0, 1]], dtype=np.int8) >>> unembedded, idx = dwave.embedding.weighted_random(samples, chains) # doctest: +SKIP >>> unembedded # doctest: +SKIP array([[1, 1]], dtype=int8) >>> idx # doctest: +SKIP array([0, 1])
[ "Determine", "the", "sample", "values", "of", "chains", "by", "weighed", "random", "choice", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/chain_breaks.py#L209-L256
train
dwavesystems/dwave-system
dwave/system/samplers/dwave_sampler.py
DWaveSampler.validate_anneal_schedule
def validate_anneal_schedule(self, anneal_schedule): """Raise an exception if the specified schedule is invalid for the sampler. Args: anneal_schedule (list): An anneal schedule variation is defined by a series of pairs of floating-point numbers identifying points in the schedule at which to change slope. The first element in the pair is time t in microseconds; the second, normalized persistent current s in the range [0,1]. The resulting schedule is the piecewise-linear curve that connects the provided points. Raises: ValueError: If the schedule violates any of the conditions listed below. RuntimeError: If the sampler does not accept the `anneal_schedule` parameter or if it does not have `annealing_time_range` or `max_anneal_schedule_points` properties. An anneal schedule must satisfy the following conditions: * Time t must increase for all points in the schedule. * For forward annealing, the first point must be (0,0) and the anneal fraction s must increase monotonically. * For reverse annealing, the anneal fraction s must start and end at s=1. * In the final point, anneal fraction s must equal 1 and time t must not exceed the maximum value in the `annealing_time_range` property. * The number of points must be >=2. * The upper bound is system-dependent; check the `max_anneal_schedule_points` property. For reverse annealing, the maximum number of points allowed is one more than the number given by this property. Examples: This example sets a quench schedule on a D-Wave system selected by the user's default :std:doc:`D-Wave Cloud Client configuration file <cloud-client:intro>`. >>> from dwave.system.samplers import DWaveSampler >>> sampler = DWaveSampler() >>> quench_schedule=[[0.0, 0.0], [12.0, 0.6], [12.8, 1.0]] >>> DWaveSampler().validate_anneal_schedule(quench_schedule) # doctest: +SKIP >>> """ if 'anneal_schedule' not in self.parameters: raise RuntimeError("anneal_schedule is not an accepted parameter for this sampler") properties = self.properties try: min_anneal_time, max_anneal_time = properties['annealing_time_range'] max_anneal_schedule_points = properties['max_anneal_schedule_points'] except KeyError: raise RuntimeError("annealing_time_range and max_anneal_schedule_points are not properties of this solver") # The number of points must be >= 2. # The upper bound is system-dependent; check the max_anneal_schedule_points property if not isinstance(anneal_schedule, list): raise TypeError("anneal_schedule should be a list") elif len(anneal_schedule) < 2 or len(anneal_schedule) > max_anneal_schedule_points: msg = ("anneal_schedule must contain between 2 and {} points (contains {})" ).format(max_anneal_schedule_points, len(anneal_schedule)) raise ValueError(msg) try: t_list, s_list = zip(*anneal_schedule) except ValueError: raise ValueError("anneal_schedule should be a list of 2-tuples") # Time t must increase for all points in the schedule. if not all(tail_t < lead_t for tail_t, lead_t in zip(t_list, t_list[1:])): raise ValueError("Time t must increase for all points in the schedule") # max t cannot exceed max_anneal_time if t_list[-1] > max_anneal_time: raise ValueError("schedule cannot be longer than the maximum anneal time of {}".format(max_anneal_time)) start_s, end_s = s_list[0], s_list[-1] if end_s != 1: raise ValueError("In the final point, anneal fraction s must equal 1.") if start_s == 1: # reverse annealing pass elif start_s == 0: # forward annealing, s must monotonically increase. if not all(tail_s <= lead_s for tail_s, lead_s in zip(s_list, s_list[1:])): raise ValueError("For forward anneals, anneal fraction s must monotonically increase") else: msg = ("In the first point, anneal fraction s must equal 0 for forward annealing or " "1 for reverse annealing") raise ValueError(msg) # finally check the slope abs(slope) < 1/min_anneal_time max_slope = 1.0 / min_anneal_time for (t0, s0), (t1, s1) in zip(anneal_schedule, anneal_schedule[1:]): if abs((s0 - s1) / (t0 - t1)) > max_slope: raise ValueError("the maximum slope cannot exceed {}".format(max_slope))
python
def validate_anneal_schedule(self, anneal_schedule): """Raise an exception if the specified schedule is invalid for the sampler. Args: anneal_schedule (list): An anneal schedule variation is defined by a series of pairs of floating-point numbers identifying points in the schedule at which to change slope. The first element in the pair is time t in microseconds; the second, normalized persistent current s in the range [0,1]. The resulting schedule is the piecewise-linear curve that connects the provided points. Raises: ValueError: If the schedule violates any of the conditions listed below. RuntimeError: If the sampler does not accept the `anneal_schedule` parameter or if it does not have `annealing_time_range` or `max_anneal_schedule_points` properties. An anneal schedule must satisfy the following conditions: * Time t must increase for all points in the schedule. * For forward annealing, the first point must be (0,0) and the anneal fraction s must increase monotonically. * For reverse annealing, the anneal fraction s must start and end at s=1. * In the final point, anneal fraction s must equal 1 and time t must not exceed the maximum value in the `annealing_time_range` property. * The number of points must be >=2. * The upper bound is system-dependent; check the `max_anneal_schedule_points` property. For reverse annealing, the maximum number of points allowed is one more than the number given by this property. Examples: This example sets a quench schedule on a D-Wave system selected by the user's default :std:doc:`D-Wave Cloud Client configuration file <cloud-client:intro>`. >>> from dwave.system.samplers import DWaveSampler >>> sampler = DWaveSampler() >>> quench_schedule=[[0.0, 0.0], [12.0, 0.6], [12.8, 1.0]] >>> DWaveSampler().validate_anneal_schedule(quench_schedule) # doctest: +SKIP >>> """ if 'anneal_schedule' not in self.parameters: raise RuntimeError("anneal_schedule is not an accepted parameter for this sampler") properties = self.properties try: min_anneal_time, max_anneal_time = properties['annealing_time_range'] max_anneal_schedule_points = properties['max_anneal_schedule_points'] except KeyError: raise RuntimeError("annealing_time_range and max_anneal_schedule_points are not properties of this solver") # The number of points must be >= 2. # The upper bound is system-dependent; check the max_anneal_schedule_points property if not isinstance(anneal_schedule, list): raise TypeError("anneal_schedule should be a list") elif len(anneal_schedule) < 2 or len(anneal_schedule) > max_anneal_schedule_points: msg = ("anneal_schedule must contain between 2 and {} points (contains {})" ).format(max_anneal_schedule_points, len(anneal_schedule)) raise ValueError(msg) try: t_list, s_list = zip(*anneal_schedule) except ValueError: raise ValueError("anneal_schedule should be a list of 2-tuples") # Time t must increase for all points in the schedule. if not all(tail_t < lead_t for tail_t, lead_t in zip(t_list, t_list[1:])): raise ValueError("Time t must increase for all points in the schedule") # max t cannot exceed max_anneal_time if t_list[-1] > max_anneal_time: raise ValueError("schedule cannot be longer than the maximum anneal time of {}".format(max_anneal_time)) start_s, end_s = s_list[0], s_list[-1] if end_s != 1: raise ValueError("In the final point, anneal fraction s must equal 1.") if start_s == 1: # reverse annealing pass elif start_s == 0: # forward annealing, s must monotonically increase. if not all(tail_s <= lead_s for tail_s, lead_s in zip(s_list, s_list[1:])): raise ValueError("For forward anneals, anneal fraction s must monotonically increase") else: msg = ("In the first point, anneal fraction s must equal 0 for forward annealing or " "1 for reverse annealing") raise ValueError(msg) # finally check the slope abs(slope) < 1/min_anneal_time max_slope = 1.0 / min_anneal_time for (t0, s0), (t1, s1) in zip(anneal_schedule, anneal_schedule[1:]): if abs((s0 - s1) / (t0 - t1)) > max_slope: raise ValueError("the maximum slope cannot exceed {}".format(max_slope))
[ "def", "validate_anneal_schedule", "(", "self", ",", "anneal_schedule", ")", ":", "if", "'anneal_schedule'", "not", "in", "self", ".", "parameters", ":", "raise", "RuntimeError", "(", "\"anneal_schedule is not an accepted parameter for this sampler\"", ")", "properties", "=", "self", ".", "properties", "try", ":", "min_anneal_time", ",", "max_anneal_time", "=", "properties", "[", "'annealing_time_range'", "]", "max_anneal_schedule_points", "=", "properties", "[", "'max_anneal_schedule_points'", "]", "except", "KeyError", ":", "raise", "RuntimeError", "(", "\"annealing_time_range and max_anneal_schedule_points are not properties of this solver\"", ")", "# The number of points must be >= 2.", "# The upper bound is system-dependent; check the max_anneal_schedule_points property", "if", "not", "isinstance", "(", "anneal_schedule", ",", "list", ")", ":", "raise", "TypeError", "(", "\"anneal_schedule should be a list\"", ")", "elif", "len", "(", "anneal_schedule", ")", "<", "2", "or", "len", "(", "anneal_schedule", ")", ">", "max_anneal_schedule_points", ":", "msg", "=", "(", "\"anneal_schedule must contain between 2 and {} points (contains {})\"", ")", ".", "format", "(", "max_anneal_schedule_points", ",", "len", "(", "anneal_schedule", ")", ")", "raise", "ValueError", "(", "msg", ")", "try", ":", "t_list", ",", "s_list", "=", "zip", "(", "*", "anneal_schedule", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "\"anneal_schedule should be a list of 2-tuples\"", ")", "# Time t must increase for all points in the schedule.", "if", "not", "all", "(", "tail_t", "<", "lead_t", "for", "tail_t", ",", "lead_t", "in", "zip", "(", "t_list", ",", "t_list", "[", "1", ":", "]", ")", ")", ":", "raise", "ValueError", "(", "\"Time t must increase for all points in the schedule\"", ")", "# max t cannot exceed max_anneal_time", "if", "t_list", "[", "-", "1", "]", ">", "max_anneal_time", ":", "raise", "ValueError", "(", "\"schedule cannot be longer than the maximum anneal time of {}\"", ".", "format", "(", "max_anneal_time", ")", ")", "start_s", ",", "end_s", "=", "s_list", "[", "0", "]", ",", "s_list", "[", "-", "1", "]", "if", "end_s", "!=", "1", ":", "raise", "ValueError", "(", "\"In the final point, anneal fraction s must equal 1.\"", ")", "if", "start_s", "==", "1", ":", "# reverse annealing", "pass", "elif", "start_s", "==", "0", ":", "# forward annealing, s must monotonically increase.", "if", "not", "all", "(", "tail_s", "<=", "lead_s", "for", "tail_s", ",", "lead_s", "in", "zip", "(", "s_list", ",", "s_list", "[", "1", ":", "]", ")", ")", ":", "raise", "ValueError", "(", "\"For forward anneals, anneal fraction s must monotonically increase\"", ")", "else", ":", "msg", "=", "(", "\"In the first point, anneal fraction s must equal 0 for forward annealing or \"", "\"1 for reverse annealing\"", ")", "raise", "ValueError", "(", "msg", ")", "# finally check the slope abs(slope) < 1/min_anneal_time", "max_slope", "=", "1.0", "/", "min_anneal_time", "for", "(", "t0", ",", "s0", ")", ",", "(", "t1", ",", "s1", ")", "in", "zip", "(", "anneal_schedule", ",", "anneal_schedule", "[", "1", ":", "]", ")", ":", "if", "abs", "(", "(", "s0", "-", "s1", ")", "/", "(", "t0", "-", "t1", ")", ")", ">", "max_slope", ":", "raise", "ValueError", "(", "\"the maximum slope cannot exceed {}\"", ".", "format", "(", "max_slope", ")", ")" ]
Raise an exception if the specified schedule is invalid for the sampler. Args: anneal_schedule (list): An anneal schedule variation is defined by a series of pairs of floating-point numbers identifying points in the schedule at which to change slope. The first element in the pair is time t in microseconds; the second, normalized persistent current s in the range [0,1]. The resulting schedule is the piecewise-linear curve that connects the provided points. Raises: ValueError: If the schedule violates any of the conditions listed below. RuntimeError: If the sampler does not accept the `anneal_schedule` parameter or if it does not have `annealing_time_range` or `max_anneal_schedule_points` properties. An anneal schedule must satisfy the following conditions: * Time t must increase for all points in the schedule. * For forward annealing, the first point must be (0,0) and the anneal fraction s must increase monotonically. * For reverse annealing, the anneal fraction s must start and end at s=1. * In the final point, anneal fraction s must equal 1 and time t must not exceed the maximum value in the `annealing_time_range` property. * The number of points must be >=2. * The upper bound is system-dependent; check the `max_anneal_schedule_points` property. For reverse annealing, the maximum number of points allowed is one more than the number given by this property. Examples: This example sets a quench schedule on a D-Wave system selected by the user's default :std:doc:`D-Wave Cloud Client configuration file <cloud-client:intro>`. >>> from dwave.system.samplers import DWaveSampler >>> sampler = DWaveSampler() >>> quench_schedule=[[0.0, 0.0], [12.0, 0.6], [12.8, 1.0]] >>> DWaveSampler().validate_anneal_schedule(quench_schedule) # doctest: +SKIP >>>
[ "Raise", "an", "exception", "if", "the", "specified", "schedule", "is", "invalid", "for", "the", "sampler", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/system/samplers/dwave_sampler.py#L318-L412
train
dwavesystems/dwave-system
dwave/embedding/utils.py
target_to_source
def target_to_source(target_adjacency, embedding): """Derive the source adjacency from an embedding and target adjacency. Args: target_adjacency (dict/:class:`networkx.Graph`): A dict of the form {v: Nv, ...} where v is a node in the target graph and Nv is the neighbors of v as an iterable. This can also be a networkx graph. embedding (dict): A mapping from a source graph to a target graph. Returns: dict: The adjacency of the source graph. Raises: ValueError: If any node in the target_adjacency is assigned more than one node in the source graph by embedding. Examples: >>> target_adjacency = {0: {1, 3}, 1: {0, 2}, 2: {1, 3}, 3: {0, 2}} # a square graph >>> embedding = {'a': {0}, 'b': {1}, 'c': {2, 3}} >>> source_adjacency = dimod.embedding.target_to_source(target_adjacency, embedding) >>> source_adjacency # triangle {'a': {'b', 'c'}, 'b': {'a', 'c'}, 'c': {'a', 'b'}} This function also works with networkx graphs. >>> import networkx as nx >>> target_graph = nx.complete_graph(5) >>> embedding = {'a': {0, 1, 2}, 'b': {3, 4}} >>> dimod.embedding.target_to_source(target_graph, embedding) """ # the nodes in the source adjacency are just the keys of the embedding source_adjacency = {v: set() for v in embedding} # we need the mapping from each node in the target to its source node reverse_embedding = {} for v, chain in iteritems(embedding): for u in chain: if u in reverse_embedding: raise ValueError("target node {} assigned to more than one source node".format(u)) reverse_embedding[u] = v # v is node in target, n node in source for v, n in iteritems(reverse_embedding): neighbors = target_adjacency[v] # u is node in target for u in neighbors: # some nodes might not be assigned to chains if u not in reverse_embedding: continue # m is node in source m = reverse_embedding[u] if m == n: continue source_adjacency[n].add(m) source_adjacency[m].add(n) return source_adjacency
python
def target_to_source(target_adjacency, embedding): """Derive the source adjacency from an embedding and target adjacency. Args: target_adjacency (dict/:class:`networkx.Graph`): A dict of the form {v: Nv, ...} where v is a node in the target graph and Nv is the neighbors of v as an iterable. This can also be a networkx graph. embedding (dict): A mapping from a source graph to a target graph. Returns: dict: The adjacency of the source graph. Raises: ValueError: If any node in the target_adjacency is assigned more than one node in the source graph by embedding. Examples: >>> target_adjacency = {0: {1, 3}, 1: {0, 2}, 2: {1, 3}, 3: {0, 2}} # a square graph >>> embedding = {'a': {0}, 'b': {1}, 'c': {2, 3}} >>> source_adjacency = dimod.embedding.target_to_source(target_adjacency, embedding) >>> source_adjacency # triangle {'a': {'b', 'c'}, 'b': {'a', 'c'}, 'c': {'a', 'b'}} This function also works with networkx graphs. >>> import networkx as nx >>> target_graph = nx.complete_graph(5) >>> embedding = {'a': {0, 1, 2}, 'b': {3, 4}} >>> dimod.embedding.target_to_source(target_graph, embedding) """ # the nodes in the source adjacency are just the keys of the embedding source_adjacency = {v: set() for v in embedding} # we need the mapping from each node in the target to its source node reverse_embedding = {} for v, chain in iteritems(embedding): for u in chain: if u in reverse_embedding: raise ValueError("target node {} assigned to more than one source node".format(u)) reverse_embedding[u] = v # v is node in target, n node in source for v, n in iteritems(reverse_embedding): neighbors = target_adjacency[v] # u is node in target for u in neighbors: # some nodes might not be assigned to chains if u not in reverse_embedding: continue # m is node in source m = reverse_embedding[u] if m == n: continue source_adjacency[n].add(m) source_adjacency[m].add(n) return source_adjacency
[ "def", "target_to_source", "(", "target_adjacency", ",", "embedding", ")", ":", "# the nodes in the source adjacency are just the keys of the embedding", "source_adjacency", "=", "{", "v", ":", "set", "(", ")", "for", "v", "in", "embedding", "}", "# we need the mapping from each node in the target to its source node", "reverse_embedding", "=", "{", "}", "for", "v", ",", "chain", "in", "iteritems", "(", "embedding", ")", ":", "for", "u", "in", "chain", ":", "if", "u", "in", "reverse_embedding", ":", "raise", "ValueError", "(", "\"target node {} assigned to more than one source node\"", ".", "format", "(", "u", ")", ")", "reverse_embedding", "[", "u", "]", "=", "v", "# v is node in target, n node in source", "for", "v", ",", "n", "in", "iteritems", "(", "reverse_embedding", ")", ":", "neighbors", "=", "target_adjacency", "[", "v", "]", "# u is node in target", "for", "u", "in", "neighbors", ":", "# some nodes might not be assigned to chains", "if", "u", "not", "in", "reverse_embedding", ":", "continue", "# m is node in source", "m", "=", "reverse_embedding", "[", "u", "]", "if", "m", "==", "n", ":", "continue", "source_adjacency", "[", "n", "]", ".", "add", "(", "m", ")", "source_adjacency", "[", "m", "]", ".", "add", "(", "n", ")", "return", "source_adjacency" ]
Derive the source adjacency from an embedding and target adjacency. Args: target_adjacency (dict/:class:`networkx.Graph`): A dict of the form {v: Nv, ...} where v is a node in the target graph and Nv is the neighbors of v as an iterable. This can also be a networkx graph. embedding (dict): A mapping from a source graph to a target graph. Returns: dict: The adjacency of the source graph. Raises: ValueError: If any node in the target_adjacency is assigned more than one node in the source graph by embedding. Examples: >>> target_adjacency = {0: {1, 3}, 1: {0, 2}, 2: {1, 3}, 3: {0, 2}} # a square graph >>> embedding = {'a': {0}, 'b': {1}, 'c': {2, 3}} >>> source_adjacency = dimod.embedding.target_to_source(target_adjacency, embedding) >>> source_adjacency # triangle {'a': {'b', 'c'}, 'b': {'a', 'c'}, 'c': {'a', 'b'}} This function also works with networkx graphs. >>> import networkx as nx >>> target_graph = nx.complete_graph(5) >>> embedding = {'a': {0, 1, 2}, 'b': {3, 4}} >>> dimod.embedding.target_to_source(target_graph, embedding)
[ "Derive", "the", "source", "adjacency", "from", "an", "embedding", "and", "target", "adjacency", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/utils.py#L30-L95
train
dwavesystems/dwave-system
dwave/embedding/utils.py
chain_to_quadratic
def chain_to_quadratic(chain, target_adjacency, chain_strength): """Determine the quadratic biases that induce the given chain. Args: chain (iterable): The variables that make up a chain. target_adjacency (dict/:class:`networkx.Graph`): Should be a dict of the form {s: Ns, ...} where s is a variable in the target graph and Ns is the set of neighbours of s. chain_strength (float): The magnitude of the quadratic bias that should be used to create chains. Returns: dict[edge, float]: The quadratic biases that induce the given chain. Raises: ValueError: If the variables in chain do not form a connected subgraph of target. Examples: >>> chain = {1, 2} >>> target_adjacency = {0: {1, 2}, 1: {0, 2}, 2: {0, 1}} >>> dimod.embedding.chain_to_quadratic(chain, target_adjacency, 1) {(1, 2): -1} """ quadratic = {} # we will be adding the edges that make the chain here # do a breadth first search seen = set() try: next_level = {next(iter(chain))} except StopIteration: raise ValueError("chain must have at least one variable") while next_level: this_level = next_level next_level = set() for v in this_level: if v not in seen: seen.add(v) for u in target_adjacency[v]: if u not in chain: continue next_level.add(u) if u != v and (u, v) not in quadratic: quadratic[(v, u)] = -chain_strength if len(chain) != len(seen): raise ValueError('{} is not a connected chain'.format(chain)) return quadratic
python
def chain_to_quadratic(chain, target_adjacency, chain_strength): """Determine the quadratic biases that induce the given chain. Args: chain (iterable): The variables that make up a chain. target_adjacency (dict/:class:`networkx.Graph`): Should be a dict of the form {s: Ns, ...} where s is a variable in the target graph and Ns is the set of neighbours of s. chain_strength (float): The magnitude of the quadratic bias that should be used to create chains. Returns: dict[edge, float]: The quadratic biases that induce the given chain. Raises: ValueError: If the variables in chain do not form a connected subgraph of target. Examples: >>> chain = {1, 2} >>> target_adjacency = {0: {1, 2}, 1: {0, 2}, 2: {0, 1}} >>> dimod.embedding.chain_to_quadratic(chain, target_adjacency, 1) {(1, 2): -1} """ quadratic = {} # we will be adding the edges that make the chain here # do a breadth first search seen = set() try: next_level = {next(iter(chain))} except StopIteration: raise ValueError("chain must have at least one variable") while next_level: this_level = next_level next_level = set() for v in this_level: if v not in seen: seen.add(v) for u in target_adjacency[v]: if u not in chain: continue next_level.add(u) if u != v and (u, v) not in quadratic: quadratic[(v, u)] = -chain_strength if len(chain) != len(seen): raise ValueError('{} is not a connected chain'.format(chain)) return quadratic
[ "def", "chain_to_quadratic", "(", "chain", ",", "target_adjacency", ",", "chain_strength", ")", ":", "quadratic", "=", "{", "}", "# we will be adding the edges that make the chain here", "# do a breadth first search", "seen", "=", "set", "(", ")", "try", ":", "next_level", "=", "{", "next", "(", "iter", "(", "chain", ")", ")", "}", "except", "StopIteration", ":", "raise", "ValueError", "(", "\"chain must have at least one variable\"", ")", "while", "next_level", ":", "this_level", "=", "next_level", "next_level", "=", "set", "(", ")", "for", "v", "in", "this_level", ":", "if", "v", "not", "in", "seen", ":", "seen", ".", "add", "(", "v", ")", "for", "u", "in", "target_adjacency", "[", "v", "]", ":", "if", "u", "not", "in", "chain", ":", "continue", "next_level", ".", "add", "(", "u", ")", "if", "u", "!=", "v", "and", "(", "u", ",", "v", ")", "not", "in", "quadratic", ":", "quadratic", "[", "(", "v", ",", "u", ")", "]", "=", "-", "chain_strength", "if", "len", "(", "chain", ")", "!=", "len", "(", "seen", ")", ":", "raise", "ValueError", "(", "'{} is not a connected chain'", ".", "format", "(", "chain", ")", ")", "return", "quadratic" ]
Determine the quadratic biases that induce the given chain. Args: chain (iterable): The variables that make up a chain. target_adjacency (dict/:class:`networkx.Graph`): Should be a dict of the form {s: Ns, ...} where s is a variable in the target graph and Ns is the set of neighbours of s. chain_strength (float): The magnitude of the quadratic bias that should be used to create chains. Returns: dict[edge, float]: The quadratic biases that induce the given chain. Raises: ValueError: If the variables in chain do not form a connected subgraph of target. Examples: >>> chain = {1, 2} >>> target_adjacency = {0: {1, 2}, 1: {0, 2}, 2: {0, 1}} >>> dimod.embedding.chain_to_quadratic(chain, target_adjacency, 1) {(1, 2): -1}
[ "Determine", "the", "quadratic", "biases", "that", "induce", "the", "given", "chain", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/utils.py#L98-L150
train
dwavesystems/dwave-system
dwave/embedding/utils.py
chain_break_frequency
def chain_break_frequency(samples_like, embedding): """Determine the frequency of chain breaks in the given samples. Args: samples_like (samples_like/:obj:`dimod.SampleSet`): A collection of raw samples. 'samples_like' is an extension of NumPy's array_like. See :func:`dimod.as_samples`. embedding (dict): Mapping from source graph to target graph as a dict of form {s: {t, ...}, ...}, where s is a source-model variable and t is a target-model variable. Returns: dict: Frequency of chain breaks as a dict in the form {s: f, ...}, where s is a variable in the source graph, and frequency, a float, is the fraction of broken chains. Examples: This example embeds a single source node, 'a', as a chain of two target nodes (0, 1) and uses :func:`.chain_break_frequency` to show that out of two synthetic samples, one ([-1, +1]) represents a broken chain. >>> import dimod >>> import numpy as np >>> samples = np.array([[-1, +1], [+1, +1]]) >>> embedding = {'a': {0, 1}} >>> print(dimod.chain_break_frequency(samples, embedding)['a']) 0.5 This example embeds a single source node (0) as a chain of two target nodes (a, b) and uses :func:`.chain_break_frequency` to show that out of two samples in a dimod response, one ({'a': 1, 'b': 0}) represents a broken chain. >>> import dimod ... >>> response = dimod.SampleSet.from_samples([{'a': 1, 'b': 0}, {'a': 0, 'b': 0}], ... {'energy': [1, 0]}, {}, dimod.BINARY) >>> embedding = {0: {'a', 'b'}} >>> print(dimod.chain_break_frequency(response, embedding)[0]) 0.5 """ if isinstance(samples_like, dimod.SampleSet): labels = samples_like.variables samples = samples_like.record.sample num_occurrences = samples_like.record.num_occurrences else: samples, labels = dimod.as_samples(samples_like) num_occurrences = np.ones(samples.shape[0]) if not all(v == idx for idx, v in enumerate(labels)): labels_to_idx = {v: idx for idx, v in enumerate(labels)} embedding = {v: {labels_to_idx[u] for u in chain} for v, chain in embedding.items()} if not embedding: return {} variables, chains = zip(*embedding.items()) broken = broken_chains(samples, chains) return {v: float(np.average(broken[:, cidx], weights=num_occurrences)) for cidx, v in enumerate(variables)}
python
def chain_break_frequency(samples_like, embedding): """Determine the frequency of chain breaks in the given samples. Args: samples_like (samples_like/:obj:`dimod.SampleSet`): A collection of raw samples. 'samples_like' is an extension of NumPy's array_like. See :func:`dimod.as_samples`. embedding (dict): Mapping from source graph to target graph as a dict of form {s: {t, ...}, ...}, where s is a source-model variable and t is a target-model variable. Returns: dict: Frequency of chain breaks as a dict in the form {s: f, ...}, where s is a variable in the source graph, and frequency, a float, is the fraction of broken chains. Examples: This example embeds a single source node, 'a', as a chain of two target nodes (0, 1) and uses :func:`.chain_break_frequency` to show that out of two synthetic samples, one ([-1, +1]) represents a broken chain. >>> import dimod >>> import numpy as np >>> samples = np.array([[-1, +1], [+1, +1]]) >>> embedding = {'a': {0, 1}} >>> print(dimod.chain_break_frequency(samples, embedding)['a']) 0.5 This example embeds a single source node (0) as a chain of two target nodes (a, b) and uses :func:`.chain_break_frequency` to show that out of two samples in a dimod response, one ({'a': 1, 'b': 0}) represents a broken chain. >>> import dimod ... >>> response = dimod.SampleSet.from_samples([{'a': 1, 'b': 0}, {'a': 0, 'b': 0}], ... {'energy': [1, 0]}, {}, dimod.BINARY) >>> embedding = {0: {'a', 'b'}} >>> print(dimod.chain_break_frequency(response, embedding)[0]) 0.5 """ if isinstance(samples_like, dimod.SampleSet): labels = samples_like.variables samples = samples_like.record.sample num_occurrences = samples_like.record.num_occurrences else: samples, labels = dimod.as_samples(samples_like) num_occurrences = np.ones(samples.shape[0]) if not all(v == idx for idx, v in enumerate(labels)): labels_to_idx = {v: idx for idx, v in enumerate(labels)} embedding = {v: {labels_to_idx[u] for u in chain} for v, chain in embedding.items()} if not embedding: return {} variables, chains = zip(*embedding.items()) broken = broken_chains(samples, chains) return {v: float(np.average(broken[:, cidx], weights=num_occurrences)) for cidx, v in enumerate(variables)}
[ "def", "chain_break_frequency", "(", "samples_like", ",", "embedding", ")", ":", "if", "isinstance", "(", "samples_like", ",", "dimod", ".", "SampleSet", ")", ":", "labels", "=", "samples_like", ".", "variables", "samples", "=", "samples_like", ".", "record", ".", "sample", "num_occurrences", "=", "samples_like", ".", "record", ".", "num_occurrences", "else", ":", "samples", ",", "labels", "=", "dimod", ".", "as_samples", "(", "samples_like", ")", "num_occurrences", "=", "np", ".", "ones", "(", "samples", ".", "shape", "[", "0", "]", ")", "if", "not", "all", "(", "v", "==", "idx", "for", "idx", ",", "v", "in", "enumerate", "(", "labels", ")", ")", ":", "labels_to_idx", "=", "{", "v", ":", "idx", "for", "idx", ",", "v", "in", "enumerate", "(", "labels", ")", "}", "embedding", "=", "{", "v", ":", "{", "labels_to_idx", "[", "u", "]", "for", "u", "in", "chain", "}", "for", "v", ",", "chain", "in", "embedding", ".", "items", "(", ")", "}", "if", "not", "embedding", ":", "return", "{", "}", "variables", ",", "chains", "=", "zip", "(", "*", "embedding", ".", "items", "(", ")", ")", "broken", "=", "broken_chains", "(", "samples", ",", "chains", ")", "return", "{", "v", ":", "float", "(", "np", ".", "average", "(", "broken", "[", ":", ",", "cidx", "]", ",", "weights", "=", "num_occurrences", ")", ")", "for", "cidx", ",", "v", "in", "enumerate", "(", "variables", ")", "}" ]
Determine the frequency of chain breaks in the given samples. Args: samples_like (samples_like/:obj:`dimod.SampleSet`): A collection of raw samples. 'samples_like' is an extension of NumPy's array_like. See :func:`dimod.as_samples`. embedding (dict): Mapping from source graph to target graph as a dict of form {s: {t, ...}, ...}, where s is a source-model variable and t is a target-model variable. Returns: dict: Frequency of chain breaks as a dict in the form {s: f, ...}, where s is a variable in the source graph, and frequency, a float, is the fraction of broken chains. Examples: This example embeds a single source node, 'a', as a chain of two target nodes (0, 1) and uses :func:`.chain_break_frequency` to show that out of two synthetic samples, one ([-1, +1]) represents a broken chain. >>> import dimod >>> import numpy as np >>> samples = np.array([[-1, +1], [+1, +1]]) >>> embedding = {'a': {0, 1}} >>> print(dimod.chain_break_frequency(samples, embedding)['a']) 0.5 This example embeds a single source node (0) as a chain of two target nodes (a, b) and uses :func:`.chain_break_frequency` to show that out of two samples in a dimod response, one ({'a': 1, 'b': 0}) represents a broken chain. >>> import dimod ... >>> response = dimod.SampleSet.from_samples([{'a': 1, 'b': 0}, {'a': 0, 'b': 0}], ... {'energy': [1, 0]}, {}, dimod.BINARY) >>> embedding = {0: {'a', 'b'}} >>> print(dimod.chain_break_frequency(response, embedding)[0]) 0.5
[ "Determine", "the", "frequency", "of", "chain", "breaks", "in", "the", "given", "samples", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/utils.py#L153-L216
train
dwavesystems/dwave-system
dwave/embedding/utils.py
edgelist_to_adjacency
def edgelist_to_adjacency(edgelist): """Converts an iterator of edges to an adjacency dict. Args: edgelist (iterable): An iterator over 2-tuples where each 2-tuple is an edge. Returns: dict: The adjacency dict. A dict of the form {v: Nv, ...} where v is a node in a graph and Nv is the neighbors of v as an set. """ adjacency = dict() for u, v in edgelist: if u in adjacency: adjacency[u].add(v) else: adjacency[u] = {v} if v in adjacency: adjacency[v].add(u) else: adjacency[v] = {u} return adjacency
python
def edgelist_to_adjacency(edgelist): """Converts an iterator of edges to an adjacency dict. Args: edgelist (iterable): An iterator over 2-tuples where each 2-tuple is an edge. Returns: dict: The adjacency dict. A dict of the form {v: Nv, ...} where v is a node in a graph and Nv is the neighbors of v as an set. """ adjacency = dict() for u, v in edgelist: if u in adjacency: adjacency[u].add(v) else: adjacency[u] = {v} if v in adjacency: adjacency[v].add(u) else: adjacency[v] = {u} return adjacency
[ "def", "edgelist_to_adjacency", "(", "edgelist", ")", ":", "adjacency", "=", "dict", "(", ")", "for", "u", ",", "v", "in", "edgelist", ":", "if", "u", "in", "adjacency", ":", "adjacency", "[", "u", "]", ".", "add", "(", "v", ")", "else", ":", "adjacency", "[", "u", "]", "=", "{", "v", "}", "if", "v", "in", "adjacency", ":", "adjacency", "[", "v", "]", ".", "add", "(", "u", ")", "else", ":", "adjacency", "[", "v", "]", "=", "{", "u", "}", "return", "adjacency" ]
Converts an iterator of edges to an adjacency dict. Args: edgelist (iterable): An iterator over 2-tuples where each 2-tuple is an edge. Returns: dict: The adjacency dict. A dict of the form {v: Nv, ...} where v is a node in a graph and Nv is the neighbors of v as an set.
[ "Converts", "an", "iterator", "of", "edges", "to", "an", "adjacency", "dict", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/utils.py#L219-L241
train
dwavesystems/dwave-system
dwave/system/composites/tiling.py
TilingComposite.sample
def sample(self, bqm, **kwargs): """Sample from the specified binary quadratic model. Args: bqm (:obj:`dimod.BinaryQuadraticModel`): Binary quadratic model to be sampled from. **kwargs: Optional keyword arguments for the sampling method, specified per solver. Returns: :class:`dimod.SampleSet` Examples: This example submits a simple Ising problem of just two variables on a D-Wave system selected by the user's default :std:doc:`D-Wave Cloud Client configuration file <cloud-client:intro>`. Because the problem fits in a single :term:`Chimera` unit cell, it is tiled across the solver's entire Chimera graph, resulting in multiple samples (the exact number depends on the working Chimera graph of the D-Wave system). >>> from dwave.system.samplers import DWaveSampler >>> from dwave.system.composites import EmbeddingComposite >>> from dwave.system.composites import EmbeddingComposite, TilingComposite ... >>> sampler = EmbeddingComposite(TilingComposite(DWaveSampler(), 1, 1, 4)) >>> response = sampler.sample_ising({},{('a', 'b'): 1}) >>> len(response) # doctest: +SKIP 246 See `Ocean Glossary <https://docs.ocean.dwavesys.com/en/latest/glossary.html>`_ for explanations of technical terms in descriptions of Ocean tools. """ # apply the embeddings to the given problem to tile it across the child sampler embedded_bqm = dimod.BinaryQuadraticModel.empty(bqm.vartype) __, __, target_adjacency = self.child.structure for embedding in self.embeddings: embedded_bqm.update(dwave.embedding.embed_bqm(bqm, embedding, target_adjacency)) # solve the problem on the child system tiled_response = self.child.sample(embedded_bqm, **kwargs) responses = [] for embedding in self.embeddings: embedding = {v: chain for v, chain in embedding.items() if v in bqm.variables} responses.append(dwave.embedding.unembed_sampleset(tiled_response, embedding, bqm)) return dimod.concatenate(responses)
python
def sample(self, bqm, **kwargs): """Sample from the specified binary quadratic model. Args: bqm (:obj:`dimod.BinaryQuadraticModel`): Binary quadratic model to be sampled from. **kwargs: Optional keyword arguments for the sampling method, specified per solver. Returns: :class:`dimod.SampleSet` Examples: This example submits a simple Ising problem of just two variables on a D-Wave system selected by the user's default :std:doc:`D-Wave Cloud Client configuration file <cloud-client:intro>`. Because the problem fits in a single :term:`Chimera` unit cell, it is tiled across the solver's entire Chimera graph, resulting in multiple samples (the exact number depends on the working Chimera graph of the D-Wave system). >>> from dwave.system.samplers import DWaveSampler >>> from dwave.system.composites import EmbeddingComposite >>> from dwave.system.composites import EmbeddingComposite, TilingComposite ... >>> sampler = EmbeddingComposite(TilingComposite(DWaveSampler(), 1, 1, 4)) >>> response = sampler.sample_ising({},{('a', 'b'): 1}) >>> len(response) # doctest: +SKIP 246 See `Ocean Glossary <https://docs.ocean.dwavesys.com/en/latest/glossary.html>`_ for explanations of technical terms in descriptions of Ocean tools. """ # apply the embeddings to the given problem to tile it across the child sampler embedded_bqm = dimod.BinaryQuadraticModel.empty(bqm.vartype) __, __, target_adjacency = self.child.structure for embedding in self.embeddings: embedded_bqm.update(dwave.embedding.embed_bqm(bqm, embedding, target_adjacency)) # solve the problem on the child system tiled_response = self.child.sample(embedded_bqm, **kwargs) responses = [] for embedding in self.embeddings: embedding = {v: chain for v, chain in embedding.items() if v in bqm.variables} responses.append(dwave.embedding.unembed_sampleset(tiled_response, embedding, bqm)) return dimod.concatenate(responses)
[ "def", "sample", "(", "self", ",", "bqm", ",", "*", "*", "kwargs", ")", ":", "# apply the embeddings to the given problem to tile it across the child sampler", "embedded_bqm", "=", "dimod", ".", "BinaryQuadraticModel", ".", "empty", "(", "bqm", ".", "vartype", ")", "__", ",", "__", ",", "target_adjacency", "=", "self", ".", "child", ".", "structure", "for", "embedding", "in", "self", ".", "embeddings", ":", "embedded_bqm", ".", "update", "(", "dwave", ".", "embedding", ".", "embed_bqm", "(", "bqm", ",", "embedding", ",", "target_adjacency", ")", ")", "# solve the problem on the child system", "tiled_response", "=", "self", ".", "child", ".", "sample", "(", "embedded_bqm", ",", "*", "*", "kwargs", ")", "responses", "=", "[", "]", "for", "embedding", "in", "self", ".", "embeddings", ":", "embedding", "=", "{", "v", ":", "chain", "for", "v", ",", "chain", "in", "embedding", ".", "items", "(", ")", "if", "v", "in", "bqm", ".", "variables", "}", "responses", ".", "append", "(", "dwave", ".", "embedding", ".", "unembed_sampleset", "(", "tiled_response", ",", "embedding", ",", "bqm", ")", ")", "return", "dimod", ".", "concatenate", "(", "responses", ")" ]
Sample from the specified binary quadratic model. Args: bqm (:obj:`dimod.BinaryQuadraticModel`): Binary quadratic model to be sampled from. **kwargs: Optional keyword arguments for the sampling method, specified per solver. Returns: :class:`dimod.SampleSet` Examples: This example submits a simple Ising problem of just two variables on a D-Wave system selected by the user's default :std:doc:`D-Wave Cloud Client configuration file <cloud-client:intro>`. Because the problem fits in a single :term:`Chimera` unit cell, it is tiled across the solver's entire Chimera graph, resulting in multiple samples (the exact number depends on the working Chimera graph of the D-Wave system). >>> from dwave.system.samplers import DWaveSampler >>> from dwave.system.composites import EmbeddingComposite >>> from dwave.system.composites import EmbeddingComposite, TilingComposite ... >>> sampler = EmbeddingComposite(TilingComposite(DWaveSampler(), 1, 1, 4)) >>> response = sampler.sample_ising({},{('a', 'b'): 1}) >>> len(response) # doctest: +SKIP 246 See `Ocean Glossary <https://docs.ocean.dwavesys.com/en/latest/glossary.html>`_ for explanations of technical terms in descriptions of Ocean tools.
[ "Sample", "from", "the", "specified", "binary", "quadratic", "model", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/system/composites/tiling.py#L199-L250
train
dwavesystems/dwave-system
dwave/system/cache/database_manager.py
cache_connect
def cache_connect(database=None): """Returns a connection object to a sqlite database. Args: database (str, optional): The path to the database the user wishes to connect to. If not specified, a default is chosen using :func:`.cache_file`. If the special database name ':memory:' is given, then a temporary database is created in memory. Returns: :class:`sqlite3.Connection` """ if database is None: database = cache_file() if os.path.isfile(database): # just connect to the database as-is conn = sqlite3.connect(database) else: # we need to populate the database conn = sqlite3.connect(database) conn.executescript(schema) with conn as cur: # turn on foreign keys, allows deletes to cascade. cur.execute("PRAGMA foreign_keys = ON;") conn.row_factory = sqlite3.Row return conn
python
def cache_connect(database=None): """Returns a connection object to a sqlite database. Args: database (str, optional): The path to the database the user wishes to connect to. If not specified, a default is chosen using :func:`.cache_file`. If the special database name ':memory:' is given, then a temporary database is created in memory. Returns: :class:`sqlite3.Connection` """ if database is None: database = cache_file() if os.path.isfile(database): # just connect to the database as-is conn = sqlite3.connect(database) else: # we need to populate the database conn = sqlite3.connect(database) conn.executescript(schema) with conn as cur: # turn on foreign keys, allows deletes to cascade. cur.execute("PRAGMA foreign_keys = ON;") conn.row_factory = sqlite3.Row return conn
[ "def", "cache_connect", "(", "database", "=", "None", ")", ":", "if", "database", "is", "None", ":", "database", "=", "cache_file", "(", ")", "if", "os", ".", "path", ".", "isfile", "(", "database", ")", ":", "# just connect to the database as-is", "conn", "=", "sqlite3", ".", "connect", "(", "database", ")", "else", ":", "# we need to populate the database", "conn", "=", "sqlite3", ".", "connect", "(", "database", ")", "conn", ".", "executescript", "(", "schema", ")", "with", "conn", "as", "cur", ":", "# turn on foreign keys, allows deletes to cascade.", "cur", ".", "execute", "(", "\"PRAGMA foreign_keys = ON;\"", ")", "conn", ".", "row_factory", "=", "sqlite3", ".", "Row", "return", "conn" ]
Returns a connection object to a sqlite database. Args: database (str, optional): The path to the database the user wishes to connect to. If not specified, a default is chosen using :func:`.cache_file`. If the special database name ':memory:' is given, then a temporary database is created in memory. Returns: :class:`sqlite3.Connection`
[ "Returns", "a", "connection", "object", "to", "a", "sqlite", "database", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/system/cache/database_manager.py#L37-L67
train
dwavesystems/dwave-system
dwave/system/cache/database_manager.py
insert_chain
def insert_chain(cur, chain, encoded_data=None): """Insert a chain into the cache. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. chain (iterable): A collection of nodes. Chains in embedding act as one node. encoded_data (dict, optional): If a dictionary is provided, it will be populated with the serialized data. This is useful for preventing encoding the same information many times. Notes: This function assumes that the nodes in chain are index-labeled. """ if encoded_data is None: encoded_data = {} if 'nodes' not in encoded_data: encoded_data['nodes'] = json.dumps(sorted(chain), separators=(',', ':')) if 'chain_length' not in encoded_data: encoded_data['chain_length'] = len(chain) insert = "INSERT OR IGNORE INTO chain(chain_length, nodes) VALUES (:chain_length, :nodes);" cur.execute(insert, encoded_data)
python
def insert_chain(cur, chain, encoded_data=None): """Insert a chain into the cache. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. chain (iterable): A collection of nodes. Chains in embedding act as one node. encoded_data (dict, optional): If a dictionary is provided, it will be populated with the serialized data. This is useful for preventing encoding the same information many times. Notes: This function assumes that the nodes in chain are index-labeled. """ if encoded_data is None: encoded_data = {} if 'nodes' not in encoded_data: encoded_data['nodes'] = json.dumps(sorted(chain), separators=(',', ':')) if 'chain_length' not in encoded_data: encoded_data['chain_length'] = len(chain) insert = "INSERT OR IGNORE INTO chain(chain_length, nodes) VALUES (:chain_length, :nodes);" cur.execute(insert, encoded_data)
[ "def", "insert_chain", "(", "cur", ",", "chain", ",", "encoded_data", "=", "None", ")", ":", "if", "encoded_data", "is", "None", ":", "encoded_data", "=", "{", "}", "if", "'nodes'", "not", "in", "encoded_data", ":", "encoded_data", "[", "'nodes'", "]", "=", "json", ".", "dumps", "(", "sorted", "(", "chain", ")", ",", "separators", "=", "(", "','", ",", "':'", ")", ")", "if", "'chain_length'", "not", "in", "encoded_data", ":", "encoded_data", "[", "'chain_length'", "]", "=", "len", "(", "chain", ")", "insert", "=", "\"INSERT OR IGNORE INTO chain(chain_length, nodes) VALUES (:chain_length, :nodes);\"", "cur", ".", "execute", "(", "insert", ",", "encoded_data", ")" ]
Insert a chain into the cache. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. chain (iterable): A collection of nodes. Chains in embedding act as one node. encoded_data (dict, optional): If a dictionary is provided, it will be populated with the serialized data. This is useful for preventing encoding the same information many times. Notes: This function assumes that the nodes in chain are index-labeled.
[ "Insert", "a", "chain", "into", "the", "cache", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/system/cache/database_manager.py#L70-L98
train
dwavesystems/dwave-system
dwave/system/cache/database_manager.py
iter_chain
def iter_chain(cur): """Iterate over all of the chains in the database. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. Yields: list: The chain. """ select = "SELECT nodes FROM chain" for nodes, in cur.execute(select): yield json.loads(nodes)
python
def iter_chain(cur): """Iterate over all of the chains in the database. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. Yields: list: The chain. """ select = "SELECT nodes FROM chain" for nodes, in cur.execute(select): yield json.loads(nodes)
[ "def", "iter_chain", "(", "cur", ")", ":", "select", "=", "\"SELECT nodes FROM chain\"", "for", "nodes", ",", "in", "cur", ".", "execute", "(", "select", ")", ":", "yield", "json", ".", "loads", "(", "nodes", ")" ]
Iterate over all of the chains in the database. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. Yields: list: The chain.
[ "Iterate", "over", "all", "of", "the", "chains", "in", "the", "database", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/system/cache/database_manager.py#L101-L114
train
dwavesystems/dwave-system
dwave/system/cache/database_manager.py
insert_system
def insert_system(cur, system_name, encoded_data=None): """Insert a system name into the cache. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. system_name (str): The unique name of a system encoded_data (dict, optional): If a dictionary is provided, it will be populated with the serialized data. This is useful for preventing encoding the same information many times. """ if encoded_data is None: encoded_data = {} if 'system_name' not in encoded_data: encoded_data['system_name'] = system_name insert = "INSERT OR IGNORE INTO system(system_name) VALUES (:system_name);" cur.execute(insert, encoded_data)
python
def insert_system(cur, system_name, encoded_data=None): """Insert a system name into the cache. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. system_name (str): The unique name of a system encoded_data (dict, optional): If a dictionary is provided, it will be populated with the serialized data. This is useful for preventing encoding the same information many times. """ if encoded_data is None: encoded_data = {} if 'system_name' not in encoded_data: encoded_data['system_name'] = system_name insert = "INSERT OR IGNORE INTO system(system_name) VALUES (:system_name);" cur.execute(insert, encoded_data)
[ "def", "insert_system", "(", "cur", ",", "system_name", ",", "encoded_data", "=", "None", ")", ":", "if", "encoded_data", "is", "None", ":", "encoded_data", "=", "{", "}", "if", "'system_name'", "not", "in", "encoded_data", ":", "encoded_data", "[", "'system_name'", "]", "=", "system_name", "insert", "=", "\"INSERT OR IGNORE INTO system(system_name) VALUES (:system_name);\"", "cur", ".", "execute", "(", "insert", ",", "encoded_data", ")" ]
Insert a system name into the cache. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. system_name (str): The unique name of a system encoded_data (dict, optional): If a dictionary is provided, it will be populated with the serialized data. This is useful for preventing encoding the same information many times.
[ "Insert", "a", "system", "name", "into", "the", "cache", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/system/cache/database_manager.py#L117-L139
train
dwavesystems/dwave-system
dwave/system/cache/database_manager.py
insert_flux_bias
def insert_flux_bias(cur, chain, system, flux_bias, chain_strength, encoded_data=None): """Insert a flux bias offset into the cache. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. chain (iterable): A collection of nodes. Chains in embedding act as one node. system (str): The unique name of a system. flux_bias (float): The flux bias offset associated with the given chain. chain_strength (float): The magnitude of the negative quadratic bias that induces the given chain in an Ising problem. encoded_data (dict, optional): If a dictionary is provided, it will be populated with the serialized data. This is useful for preventing encoding the same information many times. """ if encoded_data is None: encoded_data = {} insert_chain(cur, chain, encoded_data) insert_system(cur, system, encoded_data) if 'flux_bias' not in encoded_data: encoded_data['flux_bias'] = _encode_real(flux_bias) if 'chain_strength' not in encoded_data: encoded_data['chain_strength'] = _encode_real(chain_strength) if 'insert_time' not in encoded_data: encoded_data['insert_time'] = datetime.datetime.now() insert = \ """ INSERT OR REPLACE INTO flux_bias(chain_id, system_id, insert_time, flux_bias, chain_strength) SELECT chain.id, system.id, :insert_time, :flux_bias, :chain_strength FROM chain, system WHERE chain.chain_length = :chain_length AND chain.nodes = :nodes AND system.system_name = :system_name; """ cur.execute(insert, encoded_data)
python
def insert_flux_bias(cur, chain, system, flux_bias, chain_strength, encoded_data=None): """Insert a flux bias offset into the cache. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. chain (iterable): A collection of nodes. Chains in embedding act as one node. system (str): The unique name of a system. flux_bias (float): The flux bias offset associated with the given chain. chain_strength (float): The magnitude of the negative quadratic bias that induces the given chain in an Ising problem. encoded_data (dict, optional): If a dictionary is provided, it will be populated with the serialized data. This is useful for preventing encoding the same information many times. """ if encoded_data is None: encoded_data = {} insert_chain(cur, chain, encoded_data) insert_system(cur, system, encoded_data) if 'flux_bias' not in encoded_data: encoded_data['flux_bias'] = _encode_real(flux_bias) if 'chain_strength' not in encoded_data: encoded_data['chain_strength'] = _encode_real(chain_strength) if 'insert_time' not in encoded_data: encoded_data['insert_time'] = datetime.datetime.now() insert = \ """ INSERT OR REPLACE INTO flux_bias(chain_id, system_id, insert_time, flux_bias, chain_strength) SELECT chain.id, system.id, :insert_time, :flux_bias, :chain_strength FROM chain, system WHERE chain.chain_length = :chain_length AND chain.nodes = :nodes AND system.system_name = :system_name; """ cur.execute(insert, encoded_data)
[ "def", "insert_flux_bias", "(", "cur", ",", "chain", ",", "system", ",", "flux_bias", ",", "chain_strength", ",", "encoded_data", "=", "None", ")", ":", "if", "encoded_data", "is", "None", ":", "encoded_data", "=", "{", "}", "insert_chain", "(", "cur", ",", "chain", ",", "encoded_data", ")", "insert_system", "(", "cur", ",", "system", ",", "encoded_data", ")", "if", "'flux_bias'", "not", "in", "encoded_data", ":", "encoded_data", "[", "'flux_bias'", "]", "=", "_encode_real", "(", "flux_bias", ")", "if", "'chain_strength'", "not", "in", "encoded_data", ":", "encoded_data", "[", "'chain_strength'", "]", "=", "_encode_real", "(", "chain_strength", ")", "if", "'insert_time'", "not", "in", "encoded_data", ":", "encoded_data", "[", "'insert_time'", "]", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "insert", "=", "\"\"\"\n INSERT OR REPLACE INTO flux_bias(chain_id, system_id, insert_time, flux_bias, chain_strength)\n SELECT\n chain.id,\n system.id,\n :insert_time,\n :flux_bias,\n :chain_strength\n FROM chain, system\n WHERE\n chain.chain_length = :chain_length AND\n chain.nodes = :nodes AND\n system.system_name = :system_name;\n \"\"\"", "cur", ".", "execute", "(", "insert", ",", "encoded_data", ")" ]
Insert a flux bias offset into the cache. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. chain (iterable): A collection of nodes. Chains in embedding act as one node. system (str): The unique name of a system. flux_bias (float): The flux bias offset associated with the given chain. chain_strength (float): The magnitude of the negative quadratic bias that induces the given chain in an Ising problem. encoded_data (dict, optional): If a dictionary is provided, it will be populated with the serialized data. This is useful for preventing encoding the same information many times.
[ "Insert", "a", "flux", "bias", "offset", "into", "the", "cache", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/system/cache/database_manager.py#L158-L212
train
dwavesystems/dwave-system
dwave/system/cache/database_manager.py
get_flux_biases_from_cache
def get_flux_biases_from_cache(cur, chains, system_name, chain_strength, max_age=3600): """Determine the flux biases for all of the the given chains, system and chain strength. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. chains (iterable): An iterable of chains. Each chain is a collection of nodes. Chains in embedding act as one node. system_name (str): The unique name of a system. chain_strength (float): The magnitude of the negative quadratic bias that induces the given chain in an Ising problem. max_age (int, optional, default=3600): The maximum age (in seconds) for the flux_bias offsets. Returns: dict: A dict where the keys are the nodes in the chains and the values are the flux biases. """ select = \ """ SELECT flux_bias FROM flux_bias_view WHERE chain_length = :chain_length AND nodes = :nodes AND chain_strength = :chain_strength AND system_name = :system_name AND insert_time >= :time_limit; """ encoded_data = {'chain_strength': _encode_real(chain_strength), 'system_name': system_name, 'time_limit': datetime.datetime.now() + datetime.timedelta(seconds=-max_age)} flux_biases = {} for chain in chains: encoded_data['chain_length'] = len(chain) encoded_data['nodes'] = json.dumps(sorted(chain), separators=(',', ':')) row = cur.execute(select, encoded_data).fetchone() if row is None: raise MissingFluxBias flux_bias = _decode_real(*row) if flux_bias == 0: continue flux_biases.update({v: flux_bias for v in chain}) return flux_biases
python
def get_flux_biases_from_cache(cur, chains, system_name, chain_strength, max_age=3600): """Determine the flux biases for all of the the given chains, system and chain strength. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. chains (iterable): An iterable of chains. Each chain is a collection of nodes. Chains in embedding act as one node. system_name (str): The unique name of a system. chain_strength (float): The magnitude of the negative quadratic bias that induces the given chain in an Ising problem. max_age (int, optional, default=3600): The maximum age (in seconds) for the flux_bias offsets. Returns: dict: A dict where the keys are the nodes in the chains and the values are the flux biases. """ select = \ """ SELECT flux_bias FROM flux_bias_view WHERE chain_length = :chain_length AND nodes = :nodes AND chain_strength = :chain_strength AND system_name = :system_name AND insert_time >= :time_limit; """ encoded_data = {'chain_strength': _encode_real(chain_strength), 'system_name': system_name, 'time_limit': datetime.datetime.now() + datetime.timedelta(seconds=-max_age)} flux_biases = {} for chain in chains: encoded_data['chain_length'] = len(chain) encoded_data['nodes'] = json.dumps(sorted(chain), separators=(',', ':')) row = cur.execute(select, encoded_data).fetchone() if row is None: raise MissingFluxBias flux_bias = _decode_real(*row) if flux_bias == 0: continue flux_biases.update({v: flux_bias for v in chain}) return flux_biases
[ "def", "get_flux_biases_from_cache", "(", "cur", ",", "chains", ",", "system_name", ",", "chain_strength", ",", "max_age", "=", "3600", ")", ":", "select", "=", "\"\"\"\n SELECT\n flux_bias\n FROM flux_bias_view WHERE\n chain_length = :chain_length AND\n nodes = :nodes AND\n chain_strength = :chain_strength AND\n system_name = :system_name AND\n insert_time >= :time_limit;\n \"\"\"", "encoded_data", "=", "{", "'chain_strength'", ":", "_encode_real", "(", "chain_strength", ")", ",", "'system_name'", ":", "system_name", ",", "'time_limit'", ":", "datetime", ".", "datetime", ".", "now", "(", ")", "+", "datetime", ".", "timedelta", "(", "seconds", "=", "-", "max_age", ")", "}", "flux_biases", "=", "{", "}", "for", "chain", "in", "chains", ":", "encoded_data", "[", "'chain_length'", "]", "=", "len", "(", "chain", ")", "encoded_data", "[", "'nodes'", "]", "=", "json", ".", "dumps", "(", "sorted", "(", "chain", ")", ",", "separators", "=", "(", "','", ",", "':'", ")", ")", "row", "=", "cur", ".", "execute", "(", "select", ",", "encoded_data", ")", ".", "fetchone", "(", ")", "if", "row", "is", "None", ":", "raise", "MissingFluxBias", "flux_bias", "=", "_decode_real", "(", "*", "row", ")", "if", "flux_bias", "==", "0", ":", "continue", "flux_biases", ".", "update", "(", "{", "v", ":", "flux_bias", "for", "v", "in", "chain", "}", ")", "return", "flux_biases" ]
Determine the flux biases for all of the the given chains, system and chain strength. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. chains (iterable): An iterable of chains. Each chain is a collection of nodes. Chains in embedding act as one node. system_name (str): The unique name of a system. chain_strength (float): The magnitude of the negative quadratic bias that induces the given chain in an Ising problem. max_age (int, optional, default=3600): The maximum age (in seconds) for the flux_bias offsets. Returns: dict: A dict where the keys are the nodes in the chains and the values are the flux biases.
[ "Determine", "the", "flux", "biases", "for", "all", "of", "the", "the", "given", "chains", "system", "and", "chain", "strength", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/system/cache/database_manager.py#L255-L312
train
dwavesystems/dwave-system
dwave/system/cache/database_manager.py
insert_graph
def insert_graph(cur, nodelist, edgelist, encoded_data=None): """Insert a graph into the cache. A graph is stored by number of nodes, number of edges and a json-encoded list of edges. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. nodelist (list): The nodes in the graph. edgelist (list): The edges in the graph. encoded_data (dict, optional): If a dictionary is provided, it will be populated with the serialized data. This is useful for preventing encoding the same information many times. Notes: This function assumes that the nodes are index-labeled and range from 0 to num_nodes - 1. In order to minimize the total size of the cache, it is a good idea to sort the nodelist and edgelist before inserting. Examples: >>> nodelist = [0, 1, 2] >>> edgelist = [(0, 1), (1, 2)] >>> with pmc.cache_connect(':memory:') as cur: ... pmc.insert_graph(cur, nodelist, edgelist) >>> nodelist = [0, 1, 2] >>> edgelist = [(0, 1), (1, 2)] >>> encoded_data = {} >>> with pmc.cache_connect(':memory:') as cur: ... pmc.insert_graph(cur, nodelist, edgelist, encoded_data) >>> encoded_data['num_nodes'] 3 >>> encoded_data['num_edges'] 2 >>> encoded_data['edges'] '[[0,1],[1,2]]' """ if encoded_data is None: encoded_data = {} if 'num_nodes' not in encoded_data: encoded_data['num_nodes'] = len(nodelist) if 'num_edges' not in encoded_data: encoded_data['num_edges'] = len(edgelist) if 'edges' not in encoded_data: encoded_data['edges'] = json.dumps(edgelist, separators=(',', ':')) insert = \ """ INSERT OR IGNORE INTO graph(num_nodes, num_edges, edges) VALUES (:num_nodes, :num_edges, :edges); """ cur.execute(insert, encoded_data)
python
def insert_graph(cur, nodelist, edgelist, encoded_data=None): """Insert a graph into the cache. A graph is stored by number of nodes, number of edges and a json-encoded list of edges. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. nodelist (list): The nodes in the graph. edgelist (list): The edges in the graph. encoded_data (dict, optional): If a dictionary is provided, it will be populated with the serialized data. This is useful for preventing encoding the same information many times. Notes: This function assumes that the nodes are index-labeled and range from 0 to num_nodes - 1. In order to minimize the total size of the cache, it is a good idea to sort the nodelist and edgelist before inserting. Examples: >>> nodelist = [0, 1, 2] >>> edgelist = [(0, 1), (1, 2)] >>> with pmc.cache_connect(':memory:') as cur: ... pmc.insert_graph(cur, nodelist, edgelist) >>> nodelist = [0, 1, 2] >>> edgelist = [(0, 1), (1, 2)] >>> encoded_data = {} >>> with pmc.cache_connect(':memory:') as cur: ... pmc.insert_graph(cur, nodelist, edgelist, encoded_data) >>> encoded_data['num_nodes'] 3 >>> encoded_data['num_edges'] 2 >>> encoded_data['edges'] '[[0,1],[1,2]]' """ if encoded_data is None: encoded_data = {} if 'num_nodes' not in encoded_data: encoded_data['num_nodes'] = len(nodelist) if 'num_edges' not in encoded_data: encoded_data['num_edges'] = len(edgelist) if 'edges' not in encoded_data: encoded_data['edges'] = json.dumps(edgelist, separators=(',', ':')) insert = \ """ INSERT OR IGNORE INTO graph(num_nodes, num_edges, edges) VALUES (:num_nodes, :num_edges, :edges); """ cur.execute(insert, encoded_data)
[ "def", "insert_graph", "(", "cur", ",", "nodelist", ",", "edgelist", ",", "encoded_data", "=", "None", ")", ":", "if", "encoded_data", "is", "None", ":", "encoded_data", "=", "{", "}", "if", "'num_nodes'", "not", "in", "encoded_data", ":", "encoded_data", "[", "'num_nodes'", "]", "=", "len", "(", "nodelist", ")", "if", "'num_edges'", "not", "in", "encoded_data", ":", "encoded_data", "[", "'num_edges'", "]", "=", "len", "(", "edgelist", ")", "if", "'edges'", "not", "in", "encoded_data", ":", "encoded_data", "[", "'edges'", "]", "=", "json", ".", "dumps", "(", "edgelist", ",", "separators", "=", "(", "','", ",", "':'", ")", ")", "insert", "=", "\"\"\"\n INSERT OR IGNORE INTO graph(num_nodes, num_edges, edges)\n VALUES (:num_nodes, :num_edges, :edges);\n \"\"\"", "cur", ".", "execute", "(", "insert", ",", "encoded_data", ")" ]
Insert a graph into the cache. A graph is stored by number of nodes, number of edges and a json-encoded list of edges. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. nodelist (list): The nodes in the graph. edgelist (list): The edges in the graph. encoded_data (dict, optional): If a dictionary is provided, it will be populated with the serialized data. This is useful for preventing encoding the same information many times. Notes: This function assumes that the nodes are index-labeled and range from 0 to num_nodes - 1. In order to minimize the total size of the cache, it is a good idea to sort the nodelist and edgelist before inserting. Examples: >>> nodelist = [0, 1, 2] >>> edgelist = [(0, 1), (1, 2)] >>> with pmc.cache_connect(':memory:') as cur: ... pmc.insert_graph(cur, nodelist, edgelist) >>> nodelist = [0, 1, 2] >>> edgelist = [(0, 1), (1, 2)] >>> encoded_data = {} >>> with pmc.cache_connect(':memory:') as cur: ... pmc.insert_graph(cur, nodelist, edgelist, encoded_data) >>> encoded_data['num_nodes'] 3 >>> encoded_data['num_edges'] 2 >>> encoded_data['edges'] '[[0,1],[1,2]]'
[ "Insert", "a", "graph", "into", "the", "cache", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/system/cache/database_manager.py#L315-L372
train
dwavesystems/dwave-system
dwave/system/cache/database_manager.py
select_embedding_from_tag
def select_embedding_from_tag(cur, embedding_tag, target_nodelist, target_edgelist): """Select an embedding from the given tag and target graph. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. source_nodelist (list): The nodes in the source graph. Should be integer valued. source_edgelist (list): The edges in the source graph. target_nodelist (list): The nodes in the target graph. Should be integer valued. target_edgelist (list): The edges in the target graph. Returns: dict: The mapping from the source graph to the target graph. In the form {v: {s, ...}, ...} where v is a variable in the source model and s is a variable in the target model. """ encoded_data = {'num_nodes': len(target_nodelist), 'num_edges': len(target_edgelist), 'edges': json.dumps(target_edgelist, separators=(',', ':')), 'tag': embedding_tag} select = \ """ SELECT source_node, chain FROM embedding_component_view WHERE embedding_tag = :tag AND target_edges = :edges AND target_num_nodes = :num_nodes AND target_num_edges = :num_edges """ embedding = {v: json.loads(chain) for v, chain in cur.execute(select, encoded_data)} return embedding
python
def select_embedding_from_tag(cur, embedding_tag, target_nodelist, target_edgelist): """Select an embedding from the given tag and target graph. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. source_nodelist (list): The nodes in the source graph. Should be integer valued. source_edgelist (list): The edges in the source graph. target_nodelist (list): The nodes in the target graph. Should be integer valued. target_edgelist (list): The edges in the target graph. Returns: dict: The mapping from the source graph to the target graph. In the form {v: {s, ...}, ...} where v is a variable in the source model and s is a variable in the target model. """ encoded_data = {'num_nodes': len(target_nodelist), 'num_edges': len(target_edgelist), 'edges': json.dumps(target_edgelist, separators=(',', ':')), 'tag': embedding_tag} select = \ """ SELECT source_node, chain FROM embedding_component_view WHERE embedding_tag = :tag AND target_edges = :edges AND target_num_nodes = :num_nodes AND target_num_edges = :num_edges """ embedding = {v: json.loads(chain) for v, chain in cur.execute(select, encoded_data)} return embedding
[ "def", "select_embedding_from_tag", "(", "cur", ",", "embedding_tag", ",", "target_nodelist", ",", "target_edgelist", ")", ":", "encoded_data", "=", "{", "'num_nodes'", ":", "len", "(", "target_nodelist", ")", ",", "'num_edges'", ":", "len", "(", "target_edgelist", ")", ",", "'edges'", ":", "json", ".", "dumps", "(", "target_edgelist", ",", "separators", "=", "(", "','", ",", "':'", ")", ")", ",", "'tag'", ":", "embedding_tag", "}", "select", "=", "\"\"\"\n SELECT\n source_node,\n chain\n FROM\n embedding_component_view\n WHERE\n embedding_tag = :tag AND\n target_edges = :edges AND\n target_num_nodes = :num_nodes AND\n target_num_edges = :num_edges\n \"\"\"", "embedding", "=", "{", "v", ":", "json", ".", "loads", "(", "chain", ")", "for", "v", ",", "chain", "in", "cur", ".", "execute", "(", "select", ",", "encoded_data", ")", "}", "return", "embedding" ]
Select an embedding from the given tag and target graph. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. source_nodelist (list): The nodes in the source graph. Should be integer valued. source_edgelist (list): The edges in the source graph. target_nodelist (list): The nodes in the target graph. Should be integer valued. target_edgelist (list): The edges in the target graph. Returns: dict: The mapping from the source graph to the target graph. In the form {v: {s, ...}, ...} where v is a variable in the source model and s is a variable in the target model.
[ "Select", "an", "embedding", "from", "the", "given", "tag", "and", "target", "graph", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/system/cache/database_manager.py#L512-L557
train
dwavesystems/dwave-system
dwave/system/cache/database_manager.py
select_embedding_from_source
def select_embedding_from_source(cur, source_nodelist, source_edgelist, target_nodelist, target_edgelist): """Select an embedding from the source graph and target graph. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. target_nodelist (list): The nodes in the target graph. Should be integer valued. target_edgelist (list): The edges in the target graph. embedding_tag (str): A string tag to associate with the embedding. Returns: dict: The mapping from the source graph to the target graph. In the form {v: {s, ...}, ...} where v is a variable in the source model and s is a variable in the target model. """ encoded_data = {'target_num_nodes': len(target_nodelist), 'target_num_edges': len(target_edgelist), 'target_edges': json.dumps(target_edgelist, separators=(',', ':')), 'source_num_nodes': len(source_nodelist), 'source_num_edges': len(source_edgelist), 'source_edges': json.dumps(source_edgelist, separators=(',', ':'))} select = \ """ SELECT source_node, chain FROM embedding_component_view WHERE source_num_edges = :source_num_edges AND source_edges = :source_edges AND source_num_nodes = :source_num_nodes AND target_num_edges = :target_num_edges AND target_edges = :target_edges AND target_num_nodes = :target_num_nodes """ embedding = {v: json.loads(chain) for v, chain in cur.execute(select, encoded_data)} return embedding
python
def select_embedding_from_source(cur, source_nodelist, source_edgelist, target_nodelist, target_edgelist): """Select an embedding from the source graph and target graph. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. target_nodelist (list): The nodes in the target graph. Should be integer valued. target_edgelist (list): The edges in the target graph. embedding_tag (str): A string tag to associate with the embedding. Returns: dict: The mapping from the source graph to the target graph. In the form {v: {s, ...}, ...} where v is a variable in the source model and s is a variable in the target model. """ encoded_data = {'target_num_nodes': len(target_nodelist), 'target_num_edges': len(target_edgelist), 'target_edges': json.dumps(target_edgelist, separators=(',', ':')), 'source_num_nodes': len(source_nodelist), 'source_num_edges': len(source_edgelist), 'source_edges': json.dumps(source_edgelist, separators=(',', ':'))} select = \ """ SELECT source_node, chain FROM embedding_component_view WHERE source_num_edges = :source_num_edges AND source_edges = :source_edges AND source_num_nodes = :source_num_nodes AND target_num_edges = :target_num_edges AND target_edges = :target_edges AND target_num_nodes = :target_num_nodes """ embedding = {v: json.loads(chain) for v, chain in cur.execute(select, encoded_data)} return embedding
[ "def", "select_embedding_from_source", "(", "cur", ",", "source_nodelist", ",", "source_edgelist", ",", "target_nodelist", ",", "target_edgelist", ")", ":", "encoded_data", "=", "{", "'target_num_nodes'", ":", "len", "(", "target_nodelist", ")", ",", "'target_num_edges'", ":", "len", "(", "target_edgelist", ")", ",", "'target_edges'", ":", "json", ".", "dumps", "(", "target_edgelist", ",", "separators", "=", "(", "','", ",", "':'", ")", ")", ",", "'source_num_nodes'", ":", "len", "(", "source_nodelist", ")", ",", "'source_num_edges'", ":", "len", "(", "source_edgelist", ")", ",", "'source_edges'", ":", "json", ".", "dumps", "(", "source_edgelist", ",", "separators", "=", "(", "','", ",", "':'", ")", ")", "}", "select", "=", "\"\"\"\n SELECT\n source_node,\n chain\n FROM\n embedding_component_view\n WHERE\n source_num_edges = :source_num_edges AND\n source_edges = :source_edges AND\n source_num_nodes = :source_num_nodes AND\n\n target_num_edges = :target_num_edges AND\n target_edges = :target_edges AND\n target_num_nodes = :target_num_nodes\n \"\"\"", "embedding", "=", "{", "v", ":", "json", ".", "loads", "(", "chain", ")", "for", "v", ",", "chain", "in", "cur", ".", "execute", "(", "select", ",", "encoded_data", ")", "}", "return", "embedding" ]
Select an embedding from the source graph and target graph. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. target_nodelist (list): The nodes in the target graph. Should be integer valued. target_edgelist (list): The edges in the target graph. embedding_tag (str): A string tag to associate with the embedding. Returns: dict: The mapping from the source graph to the target graph. In the form {v: {s, ...}, ...} where v is a variable in the source model and s is a variable in the target model.
[ "Select", "an", "embedding", "from", "the", "source", "graph", "and", "target", "graph", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/system/cache/database_manager.py#L560-L608
train
dwavesystems/dwave-system
dwave/embedding/drawing.py
draw_chimera_bqm
def draw_chimera_bqm(bqm, width=None, height=None): """Draws a Chimera Graph representation of a Binary Quadratic Model. If cell width and height not provided assumes square cell dimensions. Throws an error if drawing onto a Chimera graph of the given dimensions fails. Args: bqm (:obj:`dimod.BinaryQuadraticModel`): Should be equivalent to a Chimera graph or a subgraph of a Chimera graph produced by dnx.chimera_graph. The nodes and edges should have integer variables as in the dnx.chimera_graph. width (int, optional): An integer representing the number of cells of the Chimera graph will be in width. height (int, optional): An integer representing the number of cells of the Chimera graph will be in height. Examples: >>> from dwave.embedding.drawing import draw_chimera_bqm >>> from dimod import BinaryQuadraticModel >>> Q={(0, 0): 2, (1, 1): 1, (2, 2): 0, (3, 3): -1, (4, 4): -2, (5, 5): -2, (6, 6): -2, (7, 7): -2, ... (0, 4): 2, (0, 4): -1, (1, 7): 1, (1, 5): 0, (2, 5): -2, (2, 6): -2, (3, 4): -2, (3, 7): -2} >>> draw_chimera_bqm(BinaryQuadraticModel.from_qubo(Q), width=1, height=1) """ linear = bqm.linear.keys() quadratic = bqm.quadratic.keys() if width is None and height is None: # Create a graph large enough to fit the input networkx graph. graph_size = ceil(sqrt((max(linear) + 1) / 8.0)) width = graph_size height = graph_size if not width or not height: raise Exception("Both dimensions must be defined, not just one.") # A background image of the same size is created to show the complete graph. G0 = chimera_graph(height, width, 4) G = chimera_graph(height, width, 4) # Check if input graph is chimera graph shaped, by making sure that no edges are invalid. # Invalid edges can also appear if the size of the chimera graph is incompatible with the input graph in cell dimensions. non_chimera_nodes = [] non_chimera_edges = [] for node in linear: if not node in G.nodes: non_chimera_nodes.append(node) for edge in quadratic: if not edge in G.edges: non_chimera_edges.append(edge) linear_set = set(linear) g_node_set = set(G.nodes) quadratic_set = set(map(frozenset, quadratic)) g_edge_set = set(map(frozenset, G.edges)) non_chimera_nodes = linear_set - g_node_set non_chimera_edges = quadratic_set - g_edge_set if non_chimera_nodes or non_chimera_edges: raise Exception("Input graph is not a chimera graph: Nodes: %s Edges: %s" % (non_chimera_nodes, non_chimera_edges)) # Get lists of nodes and edges to remove from the complete graph to turn the complete graph into your graph. remove_nodes = list(g_node_set - linear_set) remove_edges = list(g_edge_set - quadratic_set) # Remove the nodes and edges from the graph. for edge in remove_edges: G.remove_edge(*edge) for node in remove_nodes: G.remove_node(node) node_size = 100 # Draw the complete chimera graph as the background. draw_chimera(G0, node_size=node_size*0.5, node_color='black', edge_color='black') # Draw your graph over the complete graph to show the connectivity. draw_chimera(G, node_size=node_size, linear_biases=bqm.linear, quadratic_biases=bqm.quadratic, width=3) return
python
def draw_chimera_bqm(bqm, width=None, height=None): """Draws a Chimera Graph representation of a Binary Quadratic Model. If cell width and height not provided assumes square cell dimensions. Throws an error if drawing onto a Chimera graph of the given dimensions fails. Args: bqm (:obj:`dimod.BinaryQuadraticModel`): Should be equivalent to a Chimera graph or a subgraph of a Chimera graph produced by dnx.chimera_graph. The nodes and edges should have integer variables as in the dnx.chimera_graph. width (int, optional): An integer representing the number of cells of the Chimera graph will be in width. height (int, optional): An integer representing the number of cells of the Chimera graph will be in height. Examples: >>> from dwave.embedding.drawing import draw_chimera_bqm >>> from dimod import BinaryQuadraticModel >>> Q={(0, 0): 2, (1, 1): 1, (2, 2): 0, (3, 3): -1, (4, 4): -2, (5, 5): -2, (6, 6): -2, (7, 7): -2, ... (0, 4): 2, (0, 4): -1, (1, 7): 1, (1, 5): 0, (2, 5): -2, (2, 6): -2, (3, 4): -2, (3, 7): -2} >>> draw_chimera_bqm(BinaryQuadraticModel.from_qubo(Q), width=1, height=1) """ linear = bqm.linear.keys() quadratic = bqm.quadratic.keys() if width is None and height is None: # Create a graph large enough to fit the input networkx graph. graph_size = ceil(sqrt((max(linear) + 1) / 8.0)) width = graph_size height = graph_size if not width or not height: raise Exception("Both dimensions must be defined, not just one.") # A background image of the same size is created to show the complete graph. G0 = chimera_graph(height, width, 4) G = chimera_graph(height, width, 4) # Check if input graph is chimera graph shaped, by making sure that no edges are invalid. # Invalid edges can also appear if the size of the chimera graph is incompatible with the input graph in cell dimensions. non_chimera_nodes = [] non_chimera_edges = [] for node in linear: if not node in G.nodes: non_chimera_nodes.append(node) for edge in quadratic: if not edge in G.edges: non_chimera_edges.append(edge) linear_set = set(linear) g_node_set = set(G.nodes) quadratic_set = set(map(frozenset, quadratic)) g_edge_set = set(map(frozenset, G.edges)) non_chimera_nodes = linear_set - g_node_set non_chimera_edges = quadratic_set - g_edge_set if non_chimera_nodes or non_chimera_edges: raise Exception("Input graph is not a chimera graph: Nodes: %s Edges: %s" % (non_chimera_nodes, non_chimera_edges)) # Get lists of nodes and edges to remove from the complete graph to turn the complete graph into your graph. remove_nodes = list(g_node_set - linear_set) remove_edges = list(g_edge_set - quadratic_set) # Remove the nodes and edges from the graph. for edge in remove_edges: G.remove_edge(*edge) for node in remove_nodes: G.remove_node(node) node_size = 100 # Draw the complete chimera graph as the background. draw_chimera(G0, node_size=node_size*0.5, node_color='black', edge_color='black') # Draw your graph over the complete graph to show the connectivity. draw_chimera(G, node_size=node_size, linear_biases=bqm.linear, quadratic_biases=bqm.quadratic, width=3) return
[ "def", "draw_chimera_bqm", "(", "bqm", ",", "width", "=", "None", ",", "height", "=", "None", ")", ":", "linear", "=", "bqm", ".", "linear", ".", "keys", "(", ")", "quadratic", "=", "bqm", ".", "quadratic", ".", "keys", "(", ")", "if", "width", "is", "None", "and", "height", "is", "None", ":", "# Create a graph large enough to fit the input networkx graph.", "graph_size", "=", "ceil", "(", "sqrt", "(", "(", "max", "(", "linear", ")", "+", "1", ")", "/", "8.0", ")", ")", "width", "=", "graph_size", "height", "=", "graph_size", "if", "not", "width", "or", "not", "height", ":", "raise", "Exception", "(", "\"Both dimensions must be defined, not just one.\"", ")", "# A background image of the same size is created to show the complete graph.", "G0", "=", "chimera_graph", "(", "height", ",", "width", ",", "4", ")", "G", "=", "chimera_graph", "(", "height", ",", "width", ",", "4", ")", "# Check if input graph is chimera graph shaped, by making sure that no edges are invalid.", "# Invalid edges can also appear if the size of the chimera graph is incompatible with the input graph in cell dimensions.", "non_chimera_nodes", "=", "[", "]", "non_chimera_edges", "=", "[", "]", "for", "node", "in", "linear", ":", "if", "not", "node", "in", "G", ".", "nodes", ":", "non_chimera_nodes", ".", "append", "(", "node", ")", "for", "edge", "in", "quadratic", ":", "if", "not", "edge", "in", "G", ".", "edges", ":", "non_chimera_edges", ".", "append", "(", "edge", ")", "linear_set", "=", "set", "(", "linear", ")", "g_node_set", "=", "set", "(", "G", ".", "nodes", ")", "quadratic_set", "=", "set", "(", "map", "(", "frozenset", ",", "quadratic", ")", ")", "g_edge_set", "=", "set", "(", "map", "(", "frozenset", ",", "G", ".", "edges", ")", ")", "non_chimera_nodes", "=", "linear_set", "-", "g_node_set", "non_chimera_edges", "=", "quadratic_set", "-", "g_edge_set", "if", "non_chimera_nodes", "or", "non_chimera_edges", ":", "raise", "Exception", "(", "\"Input graph is not a chimera graph: Nodes: %s Edges: %s\"", "%", "(", "non_chimera_nodes", ",", "non_chimera_edges", ")", ")", "# Get lists of nodes and edges to remove from the complete graph to turn the complete graph into your graph.", "remove_nodes", "=", "list", "(", "g_node_set", "-", "linear_set", ")", "remove_edges", "=", "list", "(", "g_edge_set", "-", "quadratic_set", ")", "# Remove the nodes and edges from the graph.", "for", "edge", "in", "remove_edges", ":", "G", ".", "remove_edge", "(", "*", "edge", ")", "for", "node", "in", "remove_nodes", ":", "G", ".", "remove_node", "(", "node", ")", "node_size", "=", "100", "# Draw the complete chimera graph as the background.", "draw_chimera", "(", "G0", ",", "node_size", "=", "node_size", "*", "0.5", ",", "node_color", "=", "'black'", ",", "edge_color", "=", "'black'", ")", "# Draw your graph over the complete graph to show the connectivity.", "draw_chimera", "(", "G", ",", "node_size", "=", "node_size", ",", "linear_biases", "=", "bqm", ".", "linear", ",", "quadratic_biases", "=", "bqm", ".", "quadratic", ",", "width", "=", "3", ")", "return" ]
Draws a Chimera Graph representation of a Binary Quadratic Model. If cell width and height not provided assumes square cell dimensions. Throws an error if drawing onto a Chimera graph of the given dimensions fails. Args: bqm (:obj:`dimod.BinaryQuadraticModel`): Should be equivalent to a Chimera graph or a subgraph of a Chimera graph produced by dnx.chimera_graph. The nodes and edges should have integer variables as in the dnx.chimera_graph. width (int, optional): An integer representing the number of cells of the Chimera graph will be in width. height (int, optional): An integer representing the number of cells of the Chimera graph will be in height. Examples: >>> from dwave.embedding.drawing import draw_chimera_bqm >>> from dimod import BinaryQuadraticModel >>> Q={(0, 0): 2, (1, 1): 1, (2, 2): 0, (3, 3): -1, (4, 4): -2, (5, 5): -2, (6, 6): -2, (7, 7): -2, ... (0, 4): 2, (0, 4): -1, (1, 7): 1, (1, 5): 0, (2, 5): -2, (2, 6): -2, (3, 4): -2, (3, 7): -2} >>> draw_chimera_bqm(BinaryQuadraticModel.from_qubo(Q), width=1, height=1)
[ "Draws", "a", "Chimera", "Graph", "representation", "of", "a", "Binary", "Quadratic", "Model", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/drawing.py#L23-L104
train
dwavesystems/dwave-system
dwave/embedding/transforms.py
embed_bqm
def embed_bqm(source_bqm, embedding, target_adjacency, chain_strength=1.0, smear_vartype=None): """Embed a binary quadratic model onto a target graph. Args: source_bqm (:obj:`.BinaryQuadraticModel`): Binary quadratic model to embed. embedding (dict): Mapping from source graph to target graph as a dict of form {s: {t, ...}, ...}, where s is a source-model variable and t is a target-model variable. target_adjacency (dict/:class:`networkx.Graph`): Adjacency of the target graph as a dict of form {t: Nt, ...}, where t is a variable in the target graph and Nt is its set of neighbours. chain_strength (float, optional): Magnitude of the quadratic bias (in SPIN-space) applied between variables to create chains. Note that the energy penalty of chain breaks is 2 * `chain_strength`. smear_vartype (:class:`.Vartype`, optional, default=None): When a single variable is embedded, it's linear bias is 'smeared' evenly over the chain. This parameter determines whether the variable is smeared in SPIN or BINARY space. By default the embedding is done according to the given source_bqm. Returns: :obj:`.BinaryQuadraticModel`: Target binary quadratic model. Examples: This example embeds a fully connected :math:`K_3` graph onto a square target graph. Embedding is accomplished by an edge contraction operation on the target graph: target-nodes 2 and 3 are chained to represent source-node c. >>> import dimod >>> import networkx as nx >>> # Binary quadratic model for a triangular source graph >>> bqm = dimod.BinaryQuadraticModel.from_ising({}, {('a', 'b'): 1, ('b', 'c'): 1, ('a', 'c'): 1}) >>> # Target graph is a graph >>> target = nx.cycle_graph(4) >>> # Embedding from source to target graphs >>> embedding = {'a': {0}, 'b': {1}, 'c': {2, 3}} >>> # Embed the BQM >>> target_bqm = dimod.embed_bqm(bqm, embedding, target) >>> target_bqm.quadratic[(0, 1)] == bqm.quadratic[('a', 'b')] True >>> target_bqm.quadratic # doctest: +SKIP {(0, 1): 1.0, (0, 3): 1.0, (1, 2): 1.0, (2, 3): -1.0} This example embeds a fully connected :math:`K_3` graph onto the target graph of a dimod reference structured sampler, `StructureComposite`, using the dimod reference `ExactSolver` sampler with a square graph specified. Target-nodes 2 and 3 are chained to represent source-node c. >>> import dimod >>> # Binary quadratic model for a triangular source graph >>> bqm = dimod.BinaryQuadraticModel.from_ising({}, {('a', 'b'): 1, ('b', 'c'): 1, ('a', 'c'): 1}) >>> # Structured dimod sampler with a structure defined by a square graph >>> sampler = dimod.StructureComposite(dimod.ExactSolver(), [0, 1, 2, 3], [(0, 1), (1, 2), (2, 3), (0, 3)]) >>> # Embedding from source to target graph >>> embedding = {'a': {0}, 'b': {1}, 'c': {2, 3}} >>> # Embed the BQM >>> target_bqm = dimod.embed_bqm(bqm, embedding, sampler.adjacency) >>> # Sample >>> samples = sampler.sample(target_bqm) >>> samples.record.sample # doctest: +SKIP array([[-1, -1, -1, -1], [ 1, -1, -1, -1], [ 1, 1, -1, -1], [-1, 1, -1, -1], [-1, 1, 1, -1], >>> # Snipped above samples for brevity """ if smear_vartype is dimod.SPIN and source_bqm.vartype is dimod.BINARY: return embed_bqm(source_bqm.spin, embedding, target_adjacency, chain_strength=chain_strength, smear_vartype=None).binary elif smear_vartype is dimod.BINARY and source_bqm.vartype is dimod.SPIN: return embed_bqm(source_bqm.binary, embedding, target_adjacency, chain_strength=chain_strength, smear_vartype=None).spin # create a new empty binary quadratic model with the same class as source_bqm target_bqm = source_bqm.empty(source_bqm.vartype) # add the offset target_bqm.add_offset(source_bqm.offset) # start with the linear biases, spreading the source bias equally over the target variables in # the chain for v, bias in iteritems(source_bqm.linear): if v in embedding: chain = embedding[v] else: raise MissingChainError(v) if any(u not in target_adjacency for u in chain): raise InvalidNodeError(v, next(u not in target_adjacency for u in chain)) b = bias / len(chain) target_bqm.add_variables_from({u: b for u in chain}) # next up the quadratic biases, spread the quadratic biases evenly over the available # interactions for (u, v), bias in iteritems(source_bqm.quadratic): available_interactions = {(s, t) for s in embedding[u] for t in embedding[v] if s in target_adjacency[t]} if not available_interactions: raise MissingEdgeError(u, v) b = bias / len(available_interactions) target_bqm.add_interactions_from((u, v, b) for u, v in available_interactions) for chain in itervalues(embedding): # in the case where the chain has length 1, there are no chain quadratic biases, but we # none-the-less want the chain variables to appear in the target_bqm if len(chain) == 1: v, = chain target_bqm.add_variable(v, 0.0) continue quadratic_chain_biases = chain_to_quadratic(chain, target_adjacency, chain_strength) target_bqm.add_interactions_from(quadratic_chain_biases, vartype=dimod.SPIN) # these are spin # add the energy for satisfied chains to the offset energy_diff = -sum(itervalues(quadratic_chain_biases)) target_bqm.add_offset(energy_diff) return target_bqm
python
def embed_bqm(source_bqm, embedding, target_adjacency, chain_strength=1.0, smear_vartype=None): """Embed a binary quadratic model onto a target graph. Args: source_bqm (:obj:`.BinaryQuadraticModel`): Binary quadratic model to embed. embedding (dict): Mapping from source graph to target graph as a dict of form {s: {t, ...}, ...}, where s is a source-model variable and t is a target-model variable. target_adjacency (dict/:class:`networkx.Graph`): Adjacency of the target graph as a dict of form {t: Nt, ...}, where t is a variable in the target graph and Nt is its set of neighbours. chain_strength (float, optional): Magnitude of the quadratic bias (in SPIN-space) applied between variables to create chains. Note that the energy penalty of chain breaks is 2 * `chain_strength`. smear_vartype (:class:`.Vartype`, optional, default=None): When a single variable is embedded, it's linear bias is 'smeared' evenly over the chain. This parameter determines whether the variable is smeared in SPIN or BINARY space. By default the embedding is done according to the given source_bqm. Returns: :obj:`.BinaryQuadraticModel`: Target binary quadratic model. Examples: This example embeds a fully connected :math:`K_3` graph onto a square target graph. Embedding is accomplished by an edge contraction operation on the target graph: target-nodes 2 and 3 are chained to represent source-node c. >>> import dimod >>> import networkx as nx >>> # Binary quadratic model for a triangular source graph >>> bqm = dimod.BinaryQuadraticModel.from_ising({}, {('a', 'b'): 1, ('b', 'c'): 1, ('a', 'c'): 1}) >>> # Target graph is a graph >>> target = nx.cycle_graph(4) >>> # Embedding from source to target graphs >>> embedding = {'a': {0}, 'b': {1}, 'c': {2, 3}} >>> # Embed the BQM >>> target_bqm = dimod.embed_bqm(bqm, embedding, target) >>> target_bqm.quadratic[(0, 1)] == bqm.quadratic[('a', 'b')] True >>> target_bqm.quadratic # doctest: +SKIP {(0, 1): 1.0, (0, 3): 1.0, (1, 2): 1.0, (2, 3): -1.0} This example embeds a fully connected :math:`K_3` graph onto the target graph of a dimod reference structured sampler, `StructureComposite`, using the dimod reference `ExactSolver` sampler with a square graph specified. Target-nodes 2 and 3 are chained to represent source-node c. >>> import dimod >>> # Binary quadratic model for a triangular source graph >>> bqm = dimod.BinaryQuadraticModel.from_ising({}, {('a', 'b'): 1, ('b', 'c'): 1, ('a', 'c'): 1}) >>> # Structured dimod sampler with a structure defined by a square graph >>> sampler = dimod.StructureComposite(dimod.ExactSolver(), [0, 1, 2, 3], [(0, 1), (1, 2), (2, 3), (0, 3)]) >>> # Embedding from source to target graph >>> embedding = {'a': {0}, 'b': {1}, 'c': {2, 3}} >>> # Embed the BQM >>> target_bqm = dimod.embed_bqm(bqm, embedding, sampler.adjacency) >>> # Sample >>> samples = sampler.sample(target_bqm) >>> samples.record.sample # doctest: +SKIP array([[-1, -1, -1, -1], [ 1, -1, -1, -1], [ 1, 1, -1, -1], [-1, 1, -1, -1], [-1, 1, 1, -1], >>> # Snipped above samples for brevity """ if smear_vartype is dimod.SPIN and source_bqm.vartype is dimod.BINARY: return embed_bqm(source_bqm.spin, embedding, target_adjacency, chain_strength=chain_strength, smear_vartype=None).binary elif smear_vartype is dimod.BINARY and source_bqm.vartype is dimod.SPIN: return embed_bqm(source_bqm.binary, embedding, target_adjacency, chain_strength=chain_strength, smear_vartype=None).spin # create a new empty binary quadratic model with the same class as source_bqm target_bqm = source_bqm.empty(source_bqm.vartype) # add the offset target_bqm.add_offset(source_bqm.offset) # start with the linear biases, spreading the source bias equally over the target variables in # the chain for v, bias in iteritems(source_bqm.linear): if v in embedding: chain = embedding[v] else: raise MissingChainError(v) if any(u not in target_adjacency for u in chain): raise InvalidNodeError(v, next(u not in target_adjacency for u in chain)) b = bias / len(chain) target_bqm.add_variables_from({u: b for u in chain}) # next up the quadratic biases, spread the quadratic biases evenly over the available # interactions for (u, v), bias in iteritems(source_bqm.quadratic): available_interactions = {(s, t) for s in embedding[u] for t in embedding[v] if s in target_adjacency[t]} if not available_interactions: raise MissingEdgeError(u, v) b = bias / len(available_interactions) target_bqm.add_interactions_from((u, v, b) for u, v in available_interactions) for chain in itervalues(embedding): # in the case where the chain has length 1, there are no chain quadratic biases, but we # none-the-less want the chain variables to appear in the target_bqm if len(chain) == 1: v, = chain target_bqm.add_variable(v, 0.0) continue quadratic_chain_biases = chain_to_quadratic(chain, target_adjacency, chain_strength) target_bqm.add_interactions_from(quadratic_chain_biases, vartype=dimod.SPIN) # these are spin # add the energy for satisfied chains to the offset energy_diff = -sum(itervalues(quadratic_chain_biases)) target_bqm.add_offset(energy_diff) return target_bqm
[ "def", "embed_bqm", "(", "source_bqm", ",", "embedding", ",", "target_adjacency", ",", "chain_strength", "=", "1.0", ",", "smear_vartype", "=", "None", ")", ":", "if", "smear_vartype", "is", "dimod", ".", "SPIN", "and", "source_bqm", ".", "vartype", "is", "dimod", ".", "BINARY", ":", "return", "embed_bqm", "(", "source_bqm", ".", "spin", ",", "embedding", ",", "target_adjacency", ",", "chain_strength", "=", "chain_strength", ",", "smear_vartype", "=", "None", ")", ".", "binary", "elif", "smear_vartype", "is", "dimod", ".", "BINARY", "and", "source_bqm", ".", "vartype", "is", "dimod", ".", "SPIN", ":", "return", "embed_bqm", "(", "source_bqm", ".", "binary", ",", "embedding", ",", "target_adjacency", ",", "chain_strength", "=", "chain_strength", ",", "smear_vartype", "=", "None", ")", ".", "spin", "# create a new empty binary quadratic model with the same class as source_bqm", "target_bqm", "=", "source_bqm", ".", "empty", "(", "source_bqm", ".", "vartype", ")", "# add the offset", "target_bqm", ".", "add_offset", "(", "source_bqm", ".", "offset", ")", "# start with the linear biases, spreading the source bias equally over the target variables in", "# the chain", "for", "v", ",", "bias", "in", "iteritems", "(", "source_bqm", ".", "linear", ")", ":", "if", "v", "in", "embedding", ":", "chain", "=", "embedding", "[", "v", "]", "else", ":", "raise", "MissingChainError", "(", "v", ")", "if", "any", "(", "u", "not", "in", "target_adjacency", "for", "u", "in", "chain", ")", ":", "raise", "InvalidNodeError", "(", "v", ",", "next", "(", "u", "not", "in", "target_adjacency", "for", "u", "in", "chain", ")", ")", "b", "=", "bias", "/", "len", "(", "chain", ")", "target_bqm", ".", "add_variables_from", "(", "{", "u", ":", "b", "for", "u", "in", "chain", "}", ")", "# next up the quadratic biases, spread the quadratic biases evenly over the available", "# interactions", "for", "(", "u", ",", "v", ")", ",", "bias", "in", "iteritems", "(", "source_bqm", ".", "quadratic", ")", ":", "available_interactions", "=", "{", "(", "s", ",", "t", ")", "for", "s", "in", "embedding", "[", "u", "]", "for", "t", "in", "embedding", "[", "v", "]", "if", "s", "in", "target_adjacency", "[", "t", "]", "}", "if", "not", "available_interactions", ":", "raise", "MissingEdgeError", "(", "u", ",", "v", ")", "b", "=", "bias", "/", "len", "(", "available_interactions", ")", "target_bqm", ".", "add_interactions_from", "(", "(", "u", ",", "v", ",", "b", ")", "for", "u", ",", "v", "in", "available_interactions", ")", "for", "chain", "in", "itervalues", "(", "embedding", ")", ":", "# in the case where the chain has length 1, there are no chain quadratic biases, but we", "# none-the-less want the chain variables to appear in the target_bqm", "if", "len", "(", "chain", ")", "==", "1", ":", "v", ",", "=", "chain", "target_bqm", ".", "add_variable", "(", "v", ",", "0.0", ")", "continue", "quadratic_chain_biases", "=", "chain_to_quadratic", "(", "chain", ",", "target_adjacency", ",", "chain_strength", ")", "target_bqm", ".", "add_interactions_from", "(", "quadratic_chain_biases", ",", "vartype", "=", "dimod", ".", "SPIN", ")", "# these are spin", "# add the energy for satisfied chains to the offset", "energy_diff", "=", "-", "sum", "(", "itervalues", "(", "quadratic_chain_biases", ")", ")", "target_bqm", ".", "add_offset", "(", "energy_diff", ")", "return", "target_bqm" ]
Embed a binary quadratic model onto a target graph. Args: source_bqm (:obj:`.BinaryQuadraticModel`): Binary quadratic model to embed. embedding (dict): Mapping from source graph to target graph as a dict of form {s: {t, ...}, ...}, where s is a source-model variable and t is a target-model variable. target_adjacency (dict/:class:`networkx.Graph`): Adjacency of the target graph as a dict of form {t: Nt, ...}, where t is a variable in the target graph and Nt is its set of neighbours. chain_strength (float, optional): Magnitude of the quadratic bias (in SPIN-space) applied between variables to create chains. Note that the energy penalty of chain breaks is 2 * `chain_strength`. smear_vartype (:class:`.Vartype`, optional, default=None): When a single variable is embedded, it's linear bias is 'smeared' evenly over the chain. This parameter determines whether the variable is smeared in SPIN or BINARY space. By default the embedding is done according to the given source_bqm. Returns: :obj:`.BinaryQuadraticModel`: Target binary quadratic model. Examples: This example embeds a fully connected :math:`K_3` graph onto a square target graph. Embedding is accomplished by an edge contraction operation on the target graph: target-nodes 2 and 3 are chained to represent source-node c. >>> import dimod >>> import networkx as nx >>> # Binary quadratic model for a triangular source graph >>> bqm = dimod.BinaryQuadraticModel.from_ising({}, {('a', 'b'): 1, ('b', 'c'): 1, ('a', 'c'): 1}) >>> # Target graph is a graph >>> target = nx.cycle_graph(4) >>> # Embedding from source to target graphs >>> embedding = {'a': {0}, 'b': {1}, 'c': {2, 3}} >>> # Embed the BQM >>> target_bqm = dimod.embed_bqm(bqm, embedding, target) >>> target_bqm.quadratic[(0, 1)] == bqm.quadratic[('a', 'b')] True >>> target_bqm.quadratic # doctest: +SKIP {(0, 1): 1.0, (0, 3): 1.0, (1, 2): 1.0, (2, 3): -1.0} This example embeds a fully connected :math:`K_3` graph onto the target graph of a dimod reference structured sampler, `StructureComposite`, using the dimod reference `ExactSolver` sampler with a square graph specified. Target-nodes 2 and 3 are chained to represent source-node c. >>> import dimod >>> # Binary quadratic model for a triangular source graph >>> bqm = dimod.BinaryQuadraticModel.from_ising({}, {('a', 'b'): 1, ('b', 'c'): 1, ('a', 'c'): 1}) >>> # Structured dimod sampler with a structure defined by a square graph >>> sampler = dimod.StructureComposite(dimod.ExactSolver(), [0, 1, 2, 3], [(0, 1), (1, 2), (2, 3), (0, 3)]) >>> # Embedding from source to target graph >>> embedding = {'a': {0}, 'b': {1}, 'c': {2, 3}} >>> # Embed the BQM >>> target_bqm = dimod.embed_bqm(bqm, embedding, sampler.adjacency) >>> # Sample >>> samples = sampler.sample(target_bqm) >>> samples.record.sample # doctest: +SKIP array([[-1, -1, -1, -1], [ 1, -1, -1, -1], [ 1, 1, -1, -1], [-1, 1, -1, -1], [-1, 1, 1, -1], >>> # Snipped above samples for brevity
[ "Embed", "a", "binary", "quadratic", "model", "onto", "a", "target", "graph", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/transforms.py#L38-L168
train
dwavesystems/dwave-system
dwave/embedding/transforms.py
embed_ising
def embed_ising(source_h, source_J, embedding, target_adjacency, chain_strength=1.0): """Embed an Ising problem onto a target graph. Args: source_h (dict[variable, bias]/list[bias]): Linear biases of the Ising problem. If a list, the list's indices are used as variable labels. source_J (dict[(variable, variable), bias]): Quadratic biases of the Ising problem. embedding (dict): Mapping from source graph to target graph as a dict of form {s: {t, ...}, ...}, where s is a source-model variable and t is a target-model variable. target_adjacency (dict/:class:`networkx.Graph`): Adjacency of the target graph as a dict of form {t: Nt, ...}, where t is a target-graph variable and Nt is its set of neighbours. chain_strength (float, optional): Magnitude of the quadratic bias (in SPIN-space) applied between variables to form a chain. Note that the energy penalty of chain breaks is 2 * `chain_strength`. Returns: tuple: A 2-tuple: dict[variable, bias]: Linear biases of the target Ising problem. dict[(variable, variable), bias]: Quadratic biases of the target Ising problem. Examples: This example embeds a fully connected :math:`K_3` graph onto a square target graph. Embedding is accomplished by an edge contraction operation on the target graph: target-nodes 2 and 3 are chained to represent source-node c. >>> import dimod >>> import networkx as nx >>> # Ising problem for a triangular source graph >>> h = {} >>> J = {('a', 'b'): 1, ('b', 'c'): 1, ('a', 'c'): 1} >>> # Target graph is a square graph >>> target = nx.cycle_graph(4) >>> # Embedding from source to target graph >>> embedding = {'a': {0}, 'b': {1}, 'c': {2, 3}} >>> # Embed the Ising problem >>> target_h, target_J = dimod.embed_ising(h, J, embedding, target) >>> target_J[(0, 1)] == J[('a', 'b')] True >>> target_J # doctest: +SKIP {(0, 1): 1.0, (0, 3): 1.0, (1, 2): 1.0, (2, 3): -1.0} This example embeds a fully connected :math:`K_3` graph onto the target graph of a dimod reference structured sampler, `StructureComposite`, using the dimod reference `ExactSolver` sampler with a square graph specified. Target-nodes 2 and 3 are chained to represent source-node c. >>> import dimod >>> # Ising problem for a triangular source graph >>> h = {} >>> J = {('a', 'b'): 1, ('b', 'c'): 1, ('a', 'c'): 1} >>> # Structured dimod sampler with a structure defined by a square graph >>> sampler = dimod.StructureComposite(dimod.ExactSolver(), [0, 1, 2, 3], [(0, 1), (1, 2), (2, 3), (0, 3)]) >>> # Embedding from source to target graph >>> embedding = {'a': {0}, 'b': {1}, 'c': {2, 3}} >>> # Embed the Ising problem >>> target_h, target_J = dimod.embed_ising(h, J, embedding, sampler.adjacency) >>> # Sample >>> samples = sampler.sample_ising(target_h, target_J) >>> for sample in samples.samples(n=3, sorted_by='energy'): # doctest: +SKIP ... print(sample) ... {0: 1, 1: -1, 2: -1, 3: -1} {0: 1, 1: 1, 2: -1, 3: -1} {0: -1, 1: 1, 2: -1, 3: -1} """ source_bqm = dimod.BinaryQuadraticModel.from_ising(source_h, source_J) target_bqm = embed_bqm(source_bqm, embedding, target_adjacency, chain_strength=chain_strength) target_h, target_J, __ = target_bqm.to_ising() return target_h, target_J
python
def embed_ising(source_h, source_J, embedding, target_adjacency, chain_strength=1.0): """Embed an Ising problem onto a target graph. Args: source_h (dict[variable, bias]/list[bias]): Linear biases of the Ising problem. If a list, the list's indices are used as variable labels. source_J (dict[(variable, variable), bias]): Quadratic biases of the Ising problem. embedding (dict): Mapping from source graph to target graph as a dict of form {s: {t, ...}, ...}, where s is a source-model variable and t is a target-model variable. target_adjacency (dict/:class:`networkx.Graph`): Adjacency of the target graph as a dict of form {t: Nt, ...}, where t is a target-graph variable and Nt is its set of neighbours. chain_strength (float, optional): Magnitude of the quadratic bias (in SPIN-space) applied between variables to form a chain. Note that the energy penalty of chain breaks is 2 * `chain_strength`. Returns: tuple: A 2-tuple: dict[variable, bias]: Linear biases of the target Ising problem. dict[(variable, variable), bias]: Quadratic biases of the target Ising problem. Examples: This example embeds a fully connected :math:`K_3` graph onto a square target graph. Embedding is accomplished by an edge contraction operation on the target graph: target-nodes 2 and 3 are chained to represent source-node c. >>> import dimod >>> import networkx as nx >>> # Ising problem for a triangular source graph >>> h = {} >>> J = {('a', 'b'): 1, ('b', 'c'): 1, ('a', 'c'): 1} >>> # Target graph is a square graph >>> target = nx.cycle_graph(4) >>> # Embedding from source to target graph >>> embedding = {'a': {0}, 'b': {1}, 'c': {2, 3}} >>> # Embed the Ising problem >>> target_h, target_J = dimod.embed_ising(h, J, embedding, target) >>> target_J[(0, 1)] == J[('a', 'b')] True >>> target_J # doctest: +SKIP {(0, 1): 1.0, (0, 3): 1.0, (1, 2): 1.0, (2, 3): -1.0} This example embeds a fully connected :math:`K_3` graph onto the target graph of a dimod reference structured sampler, `StructureComposite`, using the dimod reference `ExactSolver` sampler with a square graph specified. Target-nodes 2 and 3 are chained to represent source-node c. >>> import dimod >>> # Ising problem for a triangular source graph >>> h = {} >>> J = {('a', 'b'): 1, ('b', 'c'): 1, ('a', 'c'): 1} >>> # Structured dimod sampler with a structure defined by a square graph >>> sampler = dimod.StructureComposite(dimod.ExactSolver(), [0, 1, 2, 3], [(0, 1), (1, 2), (2, 3), (0, 3)]) >>> # Embedding from source to target graph >>> embedding = {'a': {0}, 'b': {1}, 'c': {2, 3}} >>> # Embed the Ising problem >>> target_h, target_J = dimod.embed_ising(h, J, embedding, sampler.adjacency) >>> # Sample >>> samples = sampler.sample_ising(target_h, target_J) >>> for sample in samples.samples(n=3, sorted_by='energy'): # doctest: +SKIP ... print(sample) ... {0: 1, 1: -1, 2: -1, 3: -1} {0: 1, 1: 1, 2: -1, 3: -1} {0: -1, 1: 1, 2: -1, 3: -1} """ source_bqm = dimod.BinaryQuadraticModel.from_ising(source_h, source_J) target_bqm = embed_bqm(source_bqm, embedding, target_adjacency, chain_strength=chain_strength) target_h, target_J, __ = target_bqm.to_ising() return target_h, target_J
[ "def", "embed_ising", "(", "source_h", ",", "source_J", ",", "embedding", ",", "target_adjacency", ",", "chain_strength", "=", "1.0", ")", ":", "source_bqm", "=", "dimod", ".", "BinaryQuadraticModel", ".", "from_ising", "(", "source_h", ",", "source_J", ")", "target_bqm", "=", "embed_bqm", "(", "source_bqm", ",", "embedding", ",", "target_adjacency", ",", "chain_strength", "=", "chain_strength", ")", "target_h", ",", "target_J", ",", "__", "=", "target_bqm", ".", "to_ising", "(", ")", "return", "target_h", ",", "target_J" ]
Embed an Ising problem onto a target graph. Args: source_h (dict[variable, bias]/list[bias]): Linear biases of the Ising problem. If a list, the list's indices are used as variable labels. source_J (dict[(variable, variable), bias]): Quadratic biases of the Ising problem. embedding (dict): Mapping from source graph to target graph as a dict of form {s: {t, ...}, ...}, where s is a source-model variable and t is a target-model variable. target_adjacency (dict/:class:`networkx.Graph`): Adjacency of the target graph as a dict of form {t: Nt, ...}, where t is a target-graph variable and Nt is its set of neighbours. chain_strength (float, optional): Magnitude of the quadratic bias (in SPIN-space) applied between variables to form a chain. Note that the energy penalty of chain breaks is 2 * `chain_strength`. Returns: tuple: A 2-tuple: dict[variable, bias]: Linear biases of the target Ising problem. dict[(variable, variable), bias]: Quadratic biases of the target Ising problem. Examples: This example embeds a fully connected :math:`K_3` graph onto a square target graph. Embedding is accomplished by an edge contraction operation on the target graph: target-nodes 2 and 3 are chained to represent source-node c. >>> import dimod >>> import networkx as nx >>> # Ising problem for a triangular source graph >>> h = {} >>> J = {('a', 'b'): 1, ('b', 'c'): 1, ('a', 'c'): 1} >>> # Target graph is a square graph >>> target = nx.cycle_graph(4) >>> # Embedding from source to target graph >>> embedding = {'a': {0}, 'b': {1}, 'c': {2, 3}} >>> # Embed the Ising problem >>> target_h, target_J = dimod.embed_ising(h, J, embedding, target) >>> target_J[(0, 1)] == J[('a', 'b')] True >>> target_J # doctest: +SKIP {(0, 1): 1.0, (0, 3): 1.0, (1, 2): 1.0, (2, 3): -1.0} This example embeds a fully connected :math:`K_3` graph onto the target graph of a dimod reference structured sampler, `StructureComposite`, using the dimod reference `ExactSolver` sampler with a square graph specified. Target-nodes 2 and 3 are chained to represent source-node c. >>> import dimod >>> # Ising problem for a triangular source graph >>> h = {} >>> J = {('a', 'b'): 1, ('b', 'c'): 1, ('a', 'c'): 1} >>> # Structured dimod sampler with a structure defined by a square graph >>> sampler = dimod.StructureComposite(dimod.ExactSolver(), [0, 1, 2, 3], [(0, 1), (1, 2), (2, 3), (0, 3)]) >>> # Embedding from source to target graph >>> embedding = {'a': {0}, 'b': {1}, 'c': {2, 3}} >>> # Embed the Ising problem >>> target_h, target_J = dimod.embed_ising(h, J, embedding, sampler.adjacency) >>> # Sample >>> samples = sampler.sample_ising(target_h, target_J) >>> for sample in samples.samples(n=3, sorted_by='energy'): # doctest: +SKIP ... print(sample) ... {0: 1, 1: -1, 2: -1, 3: -1} {0: 1, 1: 1, 2: -1, 3: -1} {0: -1, 1: 1, 2: -1, 3: -1}
[ "Embed", "an", "Ising", "problem", "onto", "a", "target", "graph", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/transforms.py#L171-L250
train
dwavesystems/dwave-system
dwave/embedding/transforms.py
embed_qubo
def embed_qubo(source_Q, embedding, target_adjacency, chain_strength=1.0): """Embed a QUBO onto a target graph. Args: source_Q (dict[(variable, variable), bias]): Coefficients of a quadratic unconstrained binary optimization (QUBO) model. embedding (dict): Mapping from source graph to target graph as a dict of form {s: {t, ...}, ...}, where s is a source-model variable and t is a target-model variable. target_adjacency (dict/:class:`networkx.Graph`): Adjacency of the target graph as a dict of form {t: Nt, ...}, where t is a target-graph variable and Nt is its set of neighbours. chain_strength (float, optional): Magnitude of the quadratic bias (in SPIN-space) applied between variables to form a chain. Note that the energy penalty of chain breaks is 2 * `chain_strength`. Returns: dict[(variable, variable), bias]: Quadratic biases of the target QUBO. Examples: This example embeds a square source graph onto fully connected :math:`K_5` graph. Embedding is accomplished by an edge deletion operation on the target graph: target-node 0 is not used. >>> import dimod >>> import networkx as nx >>> # QUBO problem for a square graph >>> Q = {(1, 1): -4.0, (1, 2): 4.0, (2, 2): -4.0, (2, 3): 4.0, ... (3, 3): -4.0, (3, 4): 4.0, (4, 1): 4.0, (4, 4): -4.0} >>> # Target graph is a fully connected k5 graph >>> K_5 = nx.complete_graph(5) >>> 0 in K_5 True >>> # Embedding from source to target graph >>> embedding = {1: {4}, 2: {3}, 3: {1}, 4: {2}} >>> # Embed the QUBO >>> target_Q = dimod.embed_qubo(Q, embedding, K_5) >>> (0, 0) in target_Q False >>> target_Q # doctest: +SKIP {(1, 1): -4.0, (1, 2): 4.0, (2, 2): -4.0, (2, 4): 4.0, (3, 1): 4.0, (3, 3): -4.0, (4, 3): 4.0, (4, 4): -4.0} This example embeds a square graph onto the target graph of a dimod reference structured sampler, `StructureComposite`, using the dimod reference `ExactSolver` sampler with a fully connected :math:`K_5` graph specified. >>> import dimod >>> import networkx as nx >>> # QUBO problem for a square graph >>> Q = {(1, 1): -4.0, (1, 2): 4.0, (2, 2): -4.0, (2, 3): 4.0, ... (3, 3): -4.0, (3, 4): 4.0, (4, 1): 4.0, (4, 4): -4.0} >>> # Structured dimod sampler with a structure defined by a K5 graph >>> sampler = dimod.StructureComposite(dimod.ExactSolver(), list(K_5.nodes), list(K_5.edges)) >>> sampler.adjacency # doctest: +SKIP {0: {1, 2, 3, 4}, 1: {0, 2, 3, 4}, 2: {0, 1, 3, 4}, 3: {0, 1, 2, 4}, 4: {0, 1, 2, 3}} >>> # Embedding from source to target graph >>> embedding = {0: [4], 1: [3], 2: [1], 3: [2], 4: [0]} >>> # Embed the QUBO >>> target_Q = dimod.embed_qubo(Q, embedding, sampler.adjacency) >>> # Sample >>> samples = sampler.sample_qubo(target_Q) >>> for datum in samples.data(): # doctest: +SKIP ... print(datum) ... Sample(sample={1: 0, 2: 1, 3: 1, 4: 0}, energy=-8.0) Sample(sample={1: 1, 2: 0, 3: 0, 4: 1}, energy=-8.0) Sample(sample={1: 1, 2: 0, 3: 0, 4: 0}, energy=-4.0) Sample(sample={1: 1, 2: 1, 3: 0, 4: 0}, energy=-4.0) Sample(sample={1: 0, 2: 1, 3: 0, 4: 0}, energy=-4.0) Sample(sample={1: 1, 2: 1, 3: 1, 4: 0}, energy=-4.0) >>> # Snipped above samples for brevity """ source_bqm = dimod.BinaryQuadraticModel.from_qubo(source_Q) target_bqm = embed_bqm(source_bqm, embedding, target_adjacency, chain_strength=chain_strength) target_Q, __ = target_bqm.to_qubo() return target_Q
python
def embed_qubo(source_Q, embedding, target_adjacency, chain_strength=1.0): """Embed a QUBO onto a target graph. Args: source_Q (dict[(variable, variable), bias]): Coefficients of a quadratic unconstrained binary optimization (QUBO) model. embedding (dict): Mapping from source graph to target graph as a dict of form {s: {t, ...}, ...}, where s is a source-model variable and t is a target-model variable. target_adjacency (dict/:class:`networkx.Graph`): Adjacency of the target graph as a dict of form {t: Nt, ...}, where t is a target-graph variable and Nt is its set of neighbours. chain_strength (float, optional): Magnitude of the quadratic bias (in SPIN-space) applied between variables to form a chain. Note that the energy penalty of chain breaks is 2 * `chain_strength`. Returns: dict[(variable, variable), bias]: Quadratic biases of the target QUBO. Examples: This example embeds a square source graph onto fully connected :math:`K_5` graph. Embedding is accomplished by an edge deletion operation on the target graph: target-node 0 is not used. >>> import dimod >>> import networkx as nx >>> # QUBO problem for a square graph >>> Q = {(1, 1): -4.0, (1, 2): 4.0, (2, 2): -4.0, (2, 3): 4.0, ... (3, 3): -4.0, (3, 4): 4.0, (4, 1): 4.0, (4, 4): -4.0} >>> # Target graph is a fully connected k5 graph >>> K_5 = nx.complete_graph(5) >>> 0 in K_5 True >>> # Embedding from source to target graph >>> embedding = {1: {4}, 2: {3}, 3: {1}, 4: {2}} >>> # Embed the QUBO >>> target_Q = dimod.embed_qubo(Q, embedding, K_5) >>> (0, 0) in target_Q False >>> target_Q # doctest: +SKIP {(1, 1): -4.0, (1, 2): 4.0, (2, 2): -4.0, (2, 4): 4.0, (3, 1): 4.0, (3, 3): -4.0, (4, 3): 4.0, (4, 4): -4.0} This example embeds a square graph onto the target graph of a dimod reference structured sampler, `StructureComposite`, using the dimod reference `ExactSolver` sampler with a fully connected :math:`K_5` graph specified. >>> import dimod >>> import networkx as nx >>> # QUBO problem for a square graph >>> Q = {(1, 1): -4.0, (1, 2): 4.0, (2, 2): -4.0, (2, 3): 4.0, ... (3, 3): -4.0, (3, 4): 4.0, (4, 1): 4.0, (4, 4): -4.0} >>> # Structured dimod sampler with a structure defined by a K5 graph >>> sampler = dimod.StructureComposite(dimod.ExactSolver(), list(K_5.nodes), list(K_5.edges)) >>> sampler.adjacency # doctest: +SKIP {0: {1, 2, 3, 4}, 1: {0, 2, 3, 4}, 2: {0, 1, 3, 4}, 3: {0, 1, 2, 4}, 4: {0, 1, 2, 3}} >>> # Embedding from source to target graph >>> embedding = {0: [4], 1: [3], 2: [1], 3: [2], 4: [0]} >>> # Embed the QUBO >>> target_Q = dimod.embed_qubo(Q, embedding, sampler.adjacency) >>> # Sample >>> samples = sampler.sample_qubo(target_Q) >>> for datum in samples.data(): # doctest: +SKIP ... print(datum) ... Sample(sample={1: 0, 2: 1, 3: 1, 4: 0}, energy=-8.0) Sample(sample={1: 1, 2: 0, 3: 0, 4: 1}, energy=-8.0) Sample(sample={1: 1, 2: 0, 3: 0, 4: 0}, energy=-4.0) Sample(sample={1: 1, 2: 1, 3: 0, 4: 0}, energy=-4.0) Sample(sample={1: 0, 2: 1, 3: 0, 4: 0}, energy=-4.0) Sample(sample={1: 1, 2: 1, 3: 1, 4: 0}, energy=-4.0) >>> # Snipped above samples for brevity """ source_bqm = dimod.BinaryQuadraticModel.from_qubo(source_Q) target_bqm = embed_bqm(source_bqm, embedding, target_adjacency, chain_strength=chain_strength) target_Q, __ = target_bqm.to_qubo() return target_Q
[ "def", "embed_qubo", "(", "source_Q", ",", "embedding", ",", "target_adjacency", ",", "chain_strength", "=", "1.0", ")", ":", "source_bqm", "=", "dimod", ".", "BinaryQuadraticModel", ".", "from_qubo", "(", "source_Q", ")", "target_bqm", "=", "embed_bqm", "(", "source_bqm", ",", "embedding", ",", "target_adjacency", ",", "chain_strength", "=", "chain_strength", ")", "target_Q", ",", "__", "=", "target_bqm", ".", "to_qubo", "(", ")", "return", "target_Q" ]
Embed a QUBO onto a target graph. Args: source_Q (dict[(variable, variable), bias]): Coefficients of a quadratic unconstrained binary optimization (QUBO) model. embedding (dict): Mapping from source graph to target graph as a dict of form {s: {t, ...}, ...}, where s is a source-model variable and t is a target-model variable. target_adjacency (dict/:class:`networkx.Graph`): Adjacency of the target graph as a dict of form {t: Nt, ...}, where t is a target-graph variable and Nt is its set of neighbours. chain_strength (float, optional): Magnitude of the quadratic bias (in SPIN-space) applied between variables to form a chain. Note that the energy penalty of chain breaks is 2 * `chain_strength`. Returns: dict[(variable, variable), bias]: Quadratic biases of the target QUBO. Examples: This example embeds a square source graph onto fully connected :math:`K_5` graph. Embedding is accomplished by an edge deletion operation on the target graph: target-node 0 is not used. >>> import dimod >>> import networkx as nx >>> # QUBO problem for a square graph >>> Q = {(1, 1): -4.0, (1, 2): 4.0, (2, 2): -4.0, (2, 3): 4.0, ... (3, 3): -4.0, (3, 4): 4.0, (4, 1): 4.0, (4, 4): -4.0} >>> # Target graph is a fully connected k5 graph >>> K_5 = nx.complete_graph(5) >>> 0 in K_5 True >>> # Embedding from source to target graph >>> embedding = {1: {4}, 2: {3}, 3: {1}, 4: {2}} >>> # Embed the QUBO >>> target_Q = dimod.embed_qubo(Q, embedding, K_5) >>> (0, 0) in target_Q False >>> target_Q # doctest: +SKIP {(1, 1): -4.0, (1, 2): 4.0, (2, 2): -4.0, (2, 4): 4.0, (3, 1): 4.0, (3, 3): -4.0, (4, 3): 4.0, (4, 4): -4.0} This example embeds a square graph onto the target graph of a dimod reference structured sampler, `StructureComposite`, using the dimod reference `ExactSolver` sampler with a fully connected :math:`K_5` graph specified. >>> import dimod >>> import networkx as nx >>> # QUBO problem for a square graph >>> Q = {(1, 1): -4.0, (1, 2): 4.0, (2, 2): -4.0, (2, 3): 4.0, ... (3, 3): -4.0, (3, 4): 4.0, (4, 1): 4.0, (4, 4): -4.0} >>> # Structured dimod sampler with a structure defined by a K5 graph >>> sampler = dimod.StructureComposite(dimod.ExactSolver(), list(K_5.nodes), list(K_5.edges)) >>> sampler.adjacency # doctest: +SKIP {0: {1, 2, 3, 4}, 1: {0, 2, 3, 4}, 2: {0, 1, 3, 4}, 3: {0, 1, 2, 4}, 4: {0, 1, 2, 3}} >>> # Embedding from source to target graph >>> embedding = {0: [4], 1: [3], 2: [1], 3: [2], 4: [0]} >>> # Embed the QUBO >>> target_Q = dimod.embed_qubo(Q, embedding, sampler.adjacency) >>> # Sample >>> samples = sampler.sample_qubo(target_Q) >>> for datum in samples.data(): # doctest: +SKIP ... print(datum) ... Sample(sample={1: 0, 2: 1, 3: 1, 4: 0}, energy=-8.0) Sample(sample={1: 1, 2: 0, 3: 0, 4: 1}, energy=-8.0) Sample(sample={1: 1, 2: 0, 3: 0, 4: 0}, energy=-4.0) Sample(sample={1: 1, 2: 1, 3: 0, 4: 0}, energy=-4.0) Sample(sample={1: 0, 2: 1, 3: 0, 4: 0}, energy=-4.0) Sample(sample={1: 1, 2: 1, 3: 1, 4: 0}, energy=-4.0) >>> # Snipped above samples for brevity
[ "Embed", "a", "QUBO", "onto", "a", "target", "graph", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/transforms.py#L253-L343
train
dwavesystems/dwave-system
dwave/embedding/transforms.py
unembed_sampleset
def unembed_sampleset(target_sampleset, embedding, source_bqm, chain_break_method=None, chain_break_fraction=False): """Unembed the samples set. Construct a sample set for the source binary quadratic model (BQM) by unembedding the given samples from the target BQM. Args: target_sampleset (:obj:`dimod.SampleSet`): SampleSet from the target BQM. embedding (dict): Mapping from source graph to target graph as a dict of form {s: {t, ...}, ...}, where s is a source variable and t is a target variable. source_bqm (:obj:`dimod.BinaryQuadraticModel`): Source binary quadratic model. chain_break_method (function, optional): Method used to resolve chain breaks. See :mod:`dwave.embedding.chain_breaks`. chain_break_fraction (bool, optional, default=False): If True, a 'chain_break_fraction' field is added to the unembedded samples which report what fraction of the chains were broken before unembedding. Returns: :obj:`.SampleSet`: Examples: >>> import dimod ... >>> # say we have a bqm on a triangle and an embedding >>> J = {('a', 'b'): -1, ('b', 'c'): -1, ('a', 'c'): -1} >>> bqm = dimod.BinaryQuadraticModel.from_ising({}, J) >>> embedding = {'a': [0, 1], 'b': [2], 'c': [3]} ... >>> # and some samples from the embedding >>> samples = [{0: -1, 1: -1, 2: -1, 3: -1}, # [0, 1] is unbroken {0: -1, 1: +1, 2: +1, 3: +1}] # [0, 1] is broken >>> energies = [-3, 1] >>> embedded = dimod.SampleSet.from_samples(samples, dimod.SPIN, energies) ... >>> # unembed >>> samples = dwave.embedding.unembed_sampleset(embedded, embedding, bqm) >>> samples.record.sample # doctest: +SKIP array([[-1, -1, -1], [ 1, 1, 1]], dtype=int8) """ if chain_break_method is None: chain_break_method = majority_vote variables = list(source_bqm) try: chains = [embedding[v] for v in variables] except KeyError: raise ValueError("given bqm does not match the embedding") chain_idxs = [[target_sampleset.variables.index[v] for v in chain] for chain in chains] record = target_sampleset.record unembedded, idxs = chain_break_method(record.sample, chain_idxs) # dev note: this is a bug in dimod that empty unembedded is not handled, # in the future this try-except can be removed try: energies = source_bqm.energies((unembedded, variables)) except ValueError: datatypes = [('sample', np.dtype(np.int8), (len(variables),)), ('energy', np.float)] datatypes.extend((name, record[name].dtype, record[name].shape[1:]) for name in record.dtype.names if name not in {'sample', 'energy'}) if chain_break_fraction: datatypes.append(('chain_break_fraction', np.float64)) # there are no samples so everything is empty data = np.rec.array(np.empty(0, dtype=datatypes)) return dimod.SampleSet(data, variables, target_sampleset.info.copy(), target_sampleset.vartype) reserved = {'sample', 'energy'} vectors = {name: record[name][idxs] for name in record.dtype.names if name not in reserved} if chain_break_fraction: vectors['chain_break_fraction'] = broken_chains(record.sample, chain_idxs).mean(axis=1)[idxs] return dimod.SampleSet.from_samples((unembedded, variables), target_sampleset.vartype, energy=energies, info=target_sampleset.info.copy(), **vectors)
python
def unembed_sampleset(target_sampleset, embedding, source_bqm, chain_break_method=None, chain_break_fraction=False): """Unembed the samples set. Construct a sample set for the source binary quadratic model (BQM) by unembedding the given samples from the target BQM. Args: target_sampleset (:obj:`dimod.SampleSet`): SampleSet from the target BQM. embedding (dict): Mapping from source graph to target graph as a dict of form {s: {t, ...}, ...}, where s is a source variable and t is a target variable. source_bqm (:obj:`dimod.BinaryQuadraticModel`): Source binary quadratic model. chain_break_method (function, optional): Method used to resolve chain breaks. See :mod:`dwave.embedding.chain_breaks`. chain_break_fraction (bool, optional, default=False): If True, a 'chain_break_fraction' field is added to the unembedded samples which report what fraction of the chains were broken before unembedding. Returns: :obj:`.SampleSet`: Examples: >>> import dimod ... >>> # say we have a bqm on a triangle and an embedding >>> J = {('a', 'b'): -1, ('b', 'c'): -1, ('a', 'c'): -1} >>> bqm = dimod.BinaryQuadraticModel.from_ising({}, J) >>> embedding = {'a': [0, 1], 'b': [2], 'c': [3]} ... >>> # and some samples from the embedding >>> samples = [{0: -1, 1: -1, 2: -1, 3: -1}, # [0, 1] is unbroken {0: -1, 1: +1, 2: +1, 3: +1}] # [0, 1] is broken >>> energies = [-3, 1] >>> embedded = dimod.SampleSet.from_samples(samples, dimod.SPIN, energies) ... >>> # unembed >>> samples = dwave.embedding.unembed_sampleset(embedded, embedding, bqm) >>> samples.record.sample # doctest: +SKIP array([[-1, -1, -1], [ 1, 1, 1]], dtype=int8) """ if chain_break_method is None: chain_break_method = majority_vote variables = list(source_bqm) try: chains = [embedding[v] for v in variables] except KeyError: raise ValueError("given bqm does not match the embedding") chain_idxs = [[target_sampleset.variables.index[v] for v in chain] for chain in chains] record = target_sampleset.record unembedded, idxs = chain_break_method(record.sample, chain_idxs) # dev note: this is a bug in dimod that empty unembedded is not handled, # in the future this try-except can be removed try: energies = source_bqm.energies((unembedded, variables)) except ValueError: datatypes = [('sample', np.dtype(np.int8), (len(variables),)), ('energy', np.float)] datatypes.extend((name, record[name].dtype, record[name].shape[1:]) for name in record.dtype.names if name not in {'sample', 'energy'}) if chain_break_fraction: datatypes.append(('chain_break_fraction', np.float64)) # there are no samples so everything is empty data = np.rec.array(np.empty(0, dtype=datatypes)) return dimod.SampleSet(data, variables, target_sampleset.info.copy(), target_sampleset.vartype) reserved = {'sample', 'energy'} vectors = {name: record[name][idxs] for name in record.dtype.names if name not in reserved} if chain_break_fraction: vectors['chain_break_fraction'] = broken_chains(record.sample, chain_idxs).mean(axis=1)[idxs] return dimod.SampleSet.from_samples((unembedded, variables), target_sampleset.vartype, energy=energies, info=target_sampleset.info.copy(), **vectors)
[ "def", "unembed_sampleset", "(", "target_sampleset", ",", "embedding", ",", "source_bqm", ",", "chain_break_method", "=", "None", ",", "chain_break_fraction", "=", "False", ")", ":", "if", "chain_break_method", "is", "None", ":", "chain_break_method", "=", "majority_vote", "variables", "=", "list", "(", "source_bqm", ")", "try", ":", "chains", "=", "[", "embedding", "[", "v", "]", "for", "v", "in", "variables", "]", "except", "KeyError", ":", "raise", "ValueError", "(", "\"given bqm does not match the embedding\"", ")", "chain_idxs", "=", "[", "[", "target_sampleset", ".", "variables", ".", "index", "[", "v", "]", "for", "v", "in", "chain", "]", "for", "chain", "in", "chains", "]", "record", "=", "target_sampleset", ".", "record", "unembedded", ",", "idxs", "=", "chain_break_method", "(", "record", ".", "sample", ",", "chain_idxs", ")", "# dev note: this is a bug in dimod that empty unembedded is not handled,", "# in the future this try-except can be removed", "try", ":", "energies", "=", "source_bqm", ".", "energies", "(", "(", "unembedded", ",", "variables", ")", ")", "except", "ValueError", ":", "datatypes", "=", "[", "(", "'sample'", ",", "np", ".", "dtype", "(", "np", ".", "int8", ")", ",", "(", "len", "(", "variables", ")", ",", ")", ")", ",", "(", "'energy'", ",", "np", ".", "float", ")", "]", "datatypes", ".", "extend", "(", "(", "name", ",", "record", "[", "name", "]", ".", "dtype", ",", "record", "[", "name", "]", ".", "shape", "[", "1", ":", "]", ")", "for", "name", "in", "record", ".", "dtype", ".", "names", "if", "name", "not", "in", "{", "'sample'", ",", "'energy'", "}", ")", "if", "chain_break_fraction", ":", "datatypes", ".", "append", "(", "(", "'chain_break_fraction'", ",", "np", ".", "float64", ")", ")", "# there are no samples so everything is empty", "data", "=", "np", ".", "rec", ".", "array", "(", "np", ".", "empty", "(", "0", ",", "dtype", "=", "datatypes", ")", ")", "return", "dimod", ".", "SampleSet", "(", "data", ",", "variables", ",", "target_sampleset", ".", "info", ".", "copy", "(", ")", ",", "target_sampleset", ".", "vartype", ")", "reserved", "=", "{", "'sample'", ",", "'energy'", "}", "vectors", "=", "{", "name", ":", "record", "[", "name", "]", "[", "idxs", "]", "for", "name", "in", "record", ".", "dtype", ".", "names", "if", "name", "not", "in", "reserved", "}", "if", "chain_break_fraction", ":", "vectors", "[", "'chain_break_fraction'", "]", "=", "broken_chains", "(", "record", ".", "sample", ",", "chain_idxs", ")", ".", "mean", "(", "axis", "=", "1", ")", "[", "idxs", "]", "return", "dimod", ".", "SampleSet", ".", "from_samples", "(", "(", "unembedded", ",", "variables", ")", ",", "target_sampleset", ".", "vartype", ",", "energy", "=", "energies", ",", "info", "=", "target_sampleset", ".", "info", ".", "copy", "(", ")", ",", "*", "*", "vectors", ")" ]
Unembed the samples set. Construct a sample set for the source binary quadratic model (BQM) by unembedding the given samples from the target BQM. Args: target_sampleset (:obj:`dimod.SampleSet`): SampleSet from the target BQM. embedding (dict): Mapping from source graph to target graph as a dict of form {s: {t, ...}, ...}, where s is a source variable and t is a target variable. source_bqm (:obj:`dimod.BinaryQuadraticModel`): Source binary quadratic model. chain_break_method (function, optional): Method used to resolve chain breaks. See :mod:`dwave.embedding.chain_breaks`. chain_break_fraction (bool, optional, default=False): If True, a 'chain_break_fraction' field is added to the unembedded samples which report what fraction of the chains were broken before unembedding. Returns: :obj:`.SampleSet`: Examples: >>> import dimod ... >>> # say we have a bqm on a triangle and an embedding >>> J = {('a', 'b'): -1, ('b', 'c'): -1, ('a', 'c'): -1} >>> bqm = dimod.BinaryQuadraticModel.from_ising({}, J) >>> embedding = {'a': [0, 1], 'b': [2], 'c': [3]} ... >>> # and some samples from the embedding >>> samples = [{0: -1, 1: -1, 2: -1, 3: -1}, # [0, 1] is unbroken {0: -1, 1: +1, 2: +1, 3: +1}] # [0, 1] is broken >>> energies = [-3, 1] >>> embedded = dimod.SampleSet.from_samples(samples, dimod.SPIN, energies) ... >>> # unembed >>> samples = dwave.embedding.unembed_sampleset(embedded, embedding, bqm) >>> samples.record.sample # doctest: +SKIP array([[-1, -1, -1], [ 1, 1, 1]], dtype=int8)
[ "Unembed", "the", "samples", "set", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/transforms.py#L346-L441
train
dwavesystems/dwave-system
dwave/system/composites/embedding.py
LazyFixedEmbeddingComposite.sample
def sample(self, bqm, chain_strength=1.0, chain_break_fraction=True, **parameters): """Sample the binary quadratic model. Note: At the initial sample(..) call, it will find a suitable embedding and initialize the remaining attributes before sampling the bqm. All following sample(..) calls will reuse that initial embedding. Args: bqm (:obj:`dimod.BinaryQuadraticModel`): Binary quadratic model to be sampled from. chain_strength (float, optional, default=1.0): Magnitude of the quadratic bias (in SPIN-space) applied between variables to create chains. Note that the energy penalty of chain breaks is 2 * `chain_strength`. chain_break_fraction (bool, optional, default=True): If True, a ‘chain_break_fraction’ field is added to the unembedded response which report what fraction of the chains were broken before unembedding. **parameters: Parameters for the sampling method, specified by the child sampler. Returns: :class:`dimod.SampleSet` """ if self.embedding is None: # Find embedding child = self.child # Solve the problem on the child system __, target_edgelist, target_adjacency = child.structure source_edgelist = list(bqm.quadratic) + [(v, v) for v in bqm.linear] # Add self-loops for single variables embedding = minorminer.find_embedding(source_edgelist, target_edgelist) # Initialize properties that need embedding super(LazyFixedEmbeddingComposite, self)._set_graph_related_init(embedding=embedding) return super(LazyFixedEmbeddingComposite, self).sample(bqm, chain_strength=chain_strength, chain_break_fraction=chain_break_fraction, **parameters)
python
def sample(self, bqm, chain_strength=1.0, chain_break_fraction=True, **parameters): """Sample the binary quadratic model. Note: At the initial sample(..) call, it will find a suitable embedding and initialize the remaining attributes before sampling the bqm. All following sample(..) calls will reuse that initial embedding. Args: bqm (:obj:`dimod.BinaryQuadraticModel`): Binary quadratic model to be sampled from. chain_strength (float, optional, default=1.0): Magnitude of the quadratic bias (in SPIN-space) applied between variables to create chains. Note that the energy penalty of chain breaks is 2 * `chain_strength`. chain_break_fraction (bool, optional, default=True): If True, a ‘chain_break_fraction’ field is added to the unembedded response which report what fraction of the chains were broken before unembedding. **parameters: Parameters for the sampling method, specified by the child sampler. Returns: :class:`dimod.SampleSet` """ if self.embedding is None: # Find embedding child = self.child # Solve the problem on the child system __, target_edgelist, target_adjacency = child.structure source_edgelist = list(bqm.quadratic) + [(v, v) for v in bqm.linear] # Add self-loops for single variables embedding = minorminer.find_embedding(source_edgelist, target_edgelist) # Initialize properties that need embedding super(LazyFixedEmbeddingComposite, self)._set_graph_related_init(embedding=embedding) return super(LazyFixedEmbeddingComposite, self).sample(bqm, chain_strength=chain_strength, chain_break_fraction=chain_break_fraction, **parameters)
[ "def", "sample", "(", "self", ",", "bqm", ",", "chain_strength", "=", "1.0", ",", "chain_break_fraction", "=", "True", ",", "*", "*", "parameters", ")", ":", "if", "self", ".", "embedding", "is", "None", ":", "# Find embedding", "child", "=", "self", ".", "child", "# Solve the problem on the child system", "__", ",", "target_edgelist", ",", "target_adjacency", "=", "child", ".", "structure", "source_edgelist", "=", "list", "(", "bqm", ".", "quadratic", ")", "+", "[", "(", "v", ",", "v", ")", "for", "v", "in", "bqm", ".", "linear", "]", "# Add self-loops for single variables", "embedding", "=", "minorminer", ".", "find_embedding", "(", "source_edgelist", ",", "target_edgelist", ")", "# Initialize properties that need embedding", "super", "(", "LazyFixedEmbeddingComposite", ",", "self", ")", ".", "_set_graph_related_init", "(", "embedding", "=", "embedding", ")", "return", "super", "(", "LazyFixedEmbeddingComposite", ",", "self", ")", ".", "sample", "(", "bqm", ",", "chain_strength", "=", "chain_strength", ",", "chain_break_fraction", "=", "chain_break_fraction", ",", "*", "*", "parameters", ")" ]
Sample the binary quadratic model. Note: At the initial sample(..) call, it will find a suitable embedding and initialize the remaining attributes before sampling the bqm. All following sample(..) calls will reuse that initial embedding. Args: bqm (:obj:`dimod.BinaryQuadraticModel`): Binary quadratic model to be sampled from. chain_strength (float, optional, default=1.0): Magnitude of the quadratic bias (in SPIN-space) applied between variables to create chains. Note that the energy penalty of chain breaks is 2 * `chain_strength`. chain_break_fraction (bool, optional, default=True): If True, a ‘chain_break_fraction’ field is added to the unembedded response which report what fraction of the chains were broken before unembedding. **parameters: Parameters for the sampling method, specified by the child sampler. Returns: :class:`dimod.SampleSet`
[ "Sample", "the", "binary", "quadratic", "model", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/system/composites/embedding.py#L461-L495
train
dwavesystems/dwave-system
dwave/embedding/polynomialembedder.py
_accumulate_random
def _accumulate_random(count, found, oldthing, newthing): """This performs on-line random selection. We have a stream of objects o_1,c_1; o_2,c_2; ... where there are c_i equivalent objects like o_1. We'd like to pick a random object o uniformly at random from the list [o_1]*c_1 + [o_2]*c_2 + ... (actually, this algorithm allows arbitrary positive weights, not necessarily integers) without spending the time&space to actually create that list. Luckily, the following works: thing = None c_tot for o_n, c_n in things: c_tot += c_n if randint(1,c_tot) <= c_n: thing = o_n This function is written in an accumulator format, so it can be used one call at a time: EXAMPLE: > thing = None > count = 0 > for i in range(10): > c = 10-i > count, thing = accumulate_random(count,c,thing,i) INPUTS: count: integer, sum of weights found before newthing found: integer, weight for newthing oldthing: previously selected object (will never be selected if count == 0) newthing: incoming object OUTPUT: (newcount, pick): newcount is count+found, pick is the newly selected object. """ if randint(1, count + found) <= found: return count + found, newthing else: return count + found, oldthing
python
def _accumulate_random(count, found, oldthing, newthing): """This performs on-line random selection. We have a stream of objects o_1,c_1; o_2,c_2; ... where there are c_i equivalent objects like o_1. We'd like to pick a random object o uniformly at random from the list [o_1]*c_1 + [o_2]*c_2 + ... (actually, this algorithm allows arbitrary positive weights, not necessarily integers) without spending the time&space to actually create that list. Luckily, the following works: thing = None c_tot for o_n, c_n in things: c_tot += c_n if randint(1,c_tot) <= c_n: thing = o_n This function is written in an accumulator format, so it can be used one call at a time: EXAMPLE: > thing = None > count = 0 > for i in range(10): > c = 10-i > count, thing = accumulate_random(count,c,thing,i) INPUTS: count: integer, sum of weights found before newthing found: integer, weight for newthing oldthing: previously selected object (will never be selected if count == 0) newthing: incoming object OUTPUT: (newcount, pick): newcount is count+found, pick is the newly selected object. """ if randint(1, count + found) <= found: return count + found, newthing else: return count + found, oldthing
[ "def", "_accumulate_random", "(", "count", ",", "found", ",", "oldthing", ",", "newthing", ")", ":", "if", "randint", "(", "1", ",", "count", "+", "found", ")", "<=", "found", ":", "return", "count", "+", "found", ",", "newthing", "else", ":", "return", "count", "+", "found", ",", "oldthing" ]
This performs on-line random selection. We have a stream of objects o_1,c_1; o_2,c_2; ... where there are c_i equivalent objects like o_1. We'd like to pick a random object o uniformly at random from the list [o_1]*c_1 + [o_2]*c_2 + ... (actually, this algorithm allows arbitrary positive weights, not necessarily integers) without spending the time&space to actually create that list. Luckily, the following works: thing = None c_tot for o_n, c_n in things: c_tot += c_n if randint(1,c_tot) <= c_n: thing = o_n This function is written in an accumulator format, so it can be used one call at a time: EXAMPLE: > thing = None > count = 0 > for i in range(10): > c = 10-i > count, thing = accumulate_random(count,c,thing,i) INPUTS: count: integer, sum of weights found before newthing found: integer, weight for newthing oldthing: previously selected object (will never be selected if count == 0) newthing: incoming object OUTPUT: (newcount, pick): newcount is count+found, pick is the newly selected object.
[ "This", "performs", "on", "-", "line", "random", "selection", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/polynomialembedder.py#L60-L108
train
dwavesystems/dwave-system
dwave/embedding/polynomialembedder.py
_bulk_to_linear
def _bulk_to_linear(M, N, L, qubits): "Converts a list of chimera coordinates to linear indices." return [2 * L * N * x + 2 * L * y + L * u + k for x, y, u, k in qubits]
python
def _bulk_to_linear(M, N, L, qubits): "Converts a list of chimera coordinates to linear indices." return [2 * L * N * x + 2 * L * y + L * u + k for x, y, u, k in qubits]
[ "def", "_bulk_to_linear", "(", "M", ",", "N", ",", "L", ",", "qubits", ")", ":", "return", "[", "2", "*", "L", "*", "N", "*", "x", "+", "2", "*", "L", "*", "y", "+", "L", "*", "u", "+", "k", "for", "x", ",", "y", ",", "u", ",", "k", "in", "qubits", "]" ]
Converts a list of chimera coordinates to linear indices.
[ "Converts", "a", "list", "of", "chimera", "coordinates", "to", "linear", "indices", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/polynomialembedder.py#L1161-L1163
train
dwavesystems/dwave-system
dwave/embedding/polynomialembedder.py
_to_linear
def _to_linear(M, N, L, q): "Converts a qubit in chimera coordinates to its linear index." (x, y, u, k) = q return 2 * L * N * x + 2 * L * y + L * u + k
python
def _to_linear(M, N, L, q): "Converts a qubit in chimera coordinates to its linear index." (x, y, u, k) = q return 2 * L * N * x + 2 * L * y + L * u + k
[ "def", "_to_linear", "(", "M", ",", "N", ",", "L", ",", "q", ")", ":", "(", "x", ",", "y", ",", "u", ",", "k", ")", "=", "q", "return", "2", "*", "L", "*", "N", "*", "x", "+", "2", "*", "L", "*", "y", "+", "L", "*", "u", "+", "k" ]
Converts a qubit in chimera coordinates to its linear index.
[ "Converts", "a", "qubit", "in", "chimera", "coordinates", "to", "its", "linear", "index", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/polynomialembedder.py#L1166-L1169
train
dwavesystems/dwave-system
dwave/embedding/polynomialembedder.py
_bulk_to_chimera
def _bulk_to_chimera(M, N, L, qubits): "Converts a list of linear indices to chimera coordinates." return [(q // N // L // 2, (q // L // 2) % N, (q // L) % 2, q % L) for q in qubits]
python
def _bulk_to_chimera(M, N, L, qubits): "Converts a list of linear indices to chimera coordinates." return [(q // N // L // 2, (q // L // 2) % N, (q // L) % 2, q % L) for q in qubits]
[ "def", "_bulk_to_chimera", "(", "M", ",", "N", ",", "L", ",", "qubits", ")", ":", "return", "[", "(", "q", "//", "N", "//", "L", "//", "2", ",", "(", "q", "//", "L", "//", "2", ")", "%", "N", ",", "(", "q", "//", "L", ")", "%", "2", ",", "q", "%", "L", ")", "for", "q", "in", "qubits", "]" ]
Converts a list of linear indices to chimera coordinates.
[ "Converts", "a", "list", "of", "linear", "indices", "to", "chimera", "coordinates", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/polynomialembedder.py#L1172-L1174
train
dwavesystems/dwave-system
dwave/embedding/polynomialembedder.py
_to_chimera
def _to_chimera(M, N, L, q): "Converts a qubit's linear index to chimera coordinates." return (q // N // L // 2, (q // L // 2) % N, (q // L) % 2, q % L)
python
def _to_chimera(M, N, L, q): "Converts a qubit's linear index to chimera coordinates." return (q // N // L // 2, (q // L // 2) % N, (q // L) % 2, q % L)
[ "def", "_to_chimera", "(", "M", ",", "N", ",", "L", ",", "q", ")", ":", "return", "(", "q", "//", "N", "//", "L", "//", "2", ",", "(", "q", "//", "L", "//", "2", ")", "%", "N", ",", "(", "q", "//", "L", ")", "%", "2", ",", "q", "%", "L", ")" ]
Converts a qubit's linear index to chimera coordinates.
[ "Converts", "a", "qubit", "s", "linear", "index", "to", "chimera", "coordinates", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/polynomialembedder.py#L1177-L1179
train
dwavesystems/dwave-system
dwave/embedding/polynomialembedder.py
eden_processor._compute_vline_scores
def _compute_vline_scores(self): """Does the hard work to prepare ``vline_score``. """ M, N, L = self.M, self.N, self.L vline_score = {} for x in range(M): laststart = [0 if (x, 0, 1, k) in self else None for k in range(L)] for y in range(N): block = [0] * (y + 1) for k in range(L): if (x, y, 1, k) not in self: laststart[k] = None elif laststart[k] is None: laststart[k] = y block[y] += 1 elif y and (x, y, 1, k) not in self[x, y - 1, 1, k]: laststart[k] = y else: for y1 in range(laststart[k], y + 1): block[y1] += 1 for y1 in range(y + 1): vline_score[x, y1, y] = block[y1] self._vline_score = vline_score
python
def _compute_vline_scores(self): """Does the hard work to prepare ``vline_score``. """ M, N, L = self.M, self.N, self.L vline_score = {} for x in range(M): laststart = [0 if (x, 0, 1, k) in self else None for k in range(L)] for y in range(N): block = [0] * (y + 1) for k in range(L): if (x, y, 1, k) not in self: laststart[k] = None elif laststart[k] is None: laststart[k] = y block[y] += 1 elif y and (x, y, 1, k) not in self[x, y - 1, 1, k]: laststart[k] = y else: for y1 in range(laststart[k], y + 1): block[y1] += 1 for y1 in range(y + 1): vline_score[x, y1, y] = block[y1] self._vline_score = vline_score
[ "def", "_compute_vline_scores", "(", "self", ")", ":", "M", ",", "N", ",", "L", "=", "self", ".", "M", ",", "self", ".", "N", ",", "self", ".", "L", "vline_score", "=", "{", "}", "for", "x", "in", "range", "(", "M", ")", ":", "laststart", "=", "[", "0", "if", "(", "x", ",", "0", ",", "1", ",", "k", ")", "in", "self", "else", "None", "for", "k", "in", "range", "(", "L", ")", "]", "for", "y", "in", "range", "(", "N", ")", ":", "block", "=", "[", "0", "]", "*", "(", "y", "+", "1", ")", "for", "k", "in", "range", "(", "L", ")", ":", "if", "(", "x", ",", "y", ",", "1", ",", "k", ")", "not", "in", "self", ":", "laststart", "[", "k", "]", "=", "None", "elif", "laststart", "[", "k", "]", "is", "None", ":", "laststart", "[", "k", "]", "=", "y", "block", "[", "y", "]", "+=", "1", "elif", "y", "and", "(", "x", ",", "y", ",", "1", ",", "k", ")", "not", "in", "self", "[", "x", ",", "y", "-", "1", ",", "1", ",", "k", "]", ":", "laststart", "[", "k", "]", "=", "y", "else", ":", "for", "y1", "in", "range", "(", "laststart", "[", "k", "]", ",", "y", "+", "1", ")", ":", "block", "[", "y1", "]", "+=", "1", "for", "y1", "in", "range", "(", "y", "+", "1", ")", ":", "vline_score", "[", "x", ",", "y1", ",", "y", "]", "=", "block", "[", "y1", "]", "self", ".", "_vline_score", "=", "vline_score" ]
Does the hard work to prepare ``vline_score``.
[ "Does", "the", "hard", "work", "to", "prepare", "vline_score", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/polynomialembedder.py#L188-L210
train
dwavesystems/dwave-system
dwave/embedding/polynomialembedder.py
eden_processor._compute_hline_scores
def _compute_hline_scores(self): """Does the hard work to prepare ``hline_score``. """ M, N, L = self.M, self.N, self.L hline_score = {} for y in range(N): laststart = [0 if (0, y, 0, k) in self else None for k in range(L)] for x in range(M): block = [0] * (x + 1) for k in range(L): if (x, y, 0, k) not in self: laststart[k] = None elif laststart[k] is None: laststart[k] = x block[x] += 1 elif x and (x, y, 0, k) not in self[x - 1, y, 0, k]: laststart[k] = x else: for x1 in range(laststart[k], x + 1): block[x1] += 1 for x1 in range(x + 1): hline_score[y, x1, x] = block[x1] self._hline_score = hline_score
python
def _compute_hline_scores(self): """Does the hard work to prepare ``hline_score``. """ M, N, L = self.M, self.N, self.L hline_score = {} for y in range(N): laststart = [0 if (0, y, 0, k) in self else None for k in range(L)] for x in range(M): block = [0] * (x + 1) for k in range(L): if (x, y, 0, k) not in self: laststart[k] = None elif laststart[k] is None: laststart[k] = x block[x] += 1 elif x and (x, y, 0, k) not in self[x - 1, y, 0, k]: laststart[k] = x else: for x1 in range(laststart[k], x + 1): block[x1] += 1 for x1 in range(x + 1): hline_score[y, x1, x] = block[x1] self._hline_score = hline_score
[ "def", "_compute_hline_scores", "(", "self", ")", ":", "M", ",", "N", ",", "L", "=", "self", ".", "M", ",", "self", ".", "N", ",", "self", ".", "L", "hline_score", "=", "{", "}", "for", "y", "in", "range", "(", "N", ")", ":", "laststart", "=", "[", "0", "if", "(", "0", ",", "y", ",", "0", ",", "k", ")", "in", "self", "else", "None", "for", "k", "in", "range", "(", "L", ")", "]", "for", "x", "in", "range", "(", "M", ")", ":", "block", "=", "[", "0", "]", "*", "(", "x", "+", "1", ")", "for", "k", "in", "range", "(", "L", ")", ":", "if", "(", "x", ",", "y", ",", "0", ",", "k", ")", "not", "in", "self", ":", "laststart", "[", "k", "]", "=", "None", "elif", "laststart", "[", "k", "]", "is", "None", ":", "laststart", "[", "k", "]", "=", "x", "block", "[", "x", "]", "+=", "1", "elif", "x", "and", "(", "x", ",", "y", ",", "0", ",", "k", ")", "not", "in", "self", "[", "x", "-", "1", ",", "y", ",", "0", ",", "k", "]", ":", "laststart", "[", "k", "]", "=", "x", "else", ":", "for", "x1", "in", "range", "(", "laststart", "[", "k", "]", ",", "x", "+", "1", ")", ":", "block", "[", "x1", "]", "+=", "1", "for", "x1", "in", "range", "(", "x", "+", "1", ")", ":", "hline_score", "[", "y", ",", "x1", ",", "x", "]", "=", "block", "[", "x1", "]", "self", ".", "_hline_score", "=", "hline_score" ]
Does the hard work to prepare ``hline_score``.
[ "Does", "the", "hard", "work", "to", "prepare", "hline_score", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/polynomialembedder.py#L212-L234
train
dwavesystems/dwave-system
dwave/embedding/polynomialembedder.py
eden_processor.biclique
def biclique(self, xmin, xmax, ymin, ymax): """Compute a maximum-sized complete bipartite graph contained in the rectangle defined by ``xmin, xmax, ymin, ymax`` where each chain of qubits is either a vertical line or a horizontal line. INPUTS: xmin,xmax,ymin,ymax: integers defining the bounds of a rectangle where we look for unbroken chains. These ranges include both endpoints. OUTPUT: (A_side, B_side): a tuple of two lists containing lists of qubits. the lists found in ``A_side`` and ``B_side`` are chains of qubits. These lists of qubits are arranged so that >>> [zip(chain,chain[1:]) for chain in A_side] and >>> [zip(chain,chain[1:]) for chain in B_side] are lists of valid couplers. """ Aside = sum((self.maximum_hline_bundle(y, xmin, xmax) for y in range(ymin, ymax + 1)), []) Bside = sum((self.maximum_vline_bundle(x, ymin, ymax) for x in range(xmin, xmax + 1)), []) return Aside, Bside
python
def biclique(self, xmin, xmax, ymin, ymax): """Compute a maximum-sized complete bipartite graph contained in the rectangle defined by ``xmin, xmax, ymin, ymax`` where each chain of qubits is either a vertical line or a horizontal line. INPUTS: xmin,xmax,ymin,ymax: integers defining the bounds of a rectangle where we look for unbroken chains. These ranges include both endpoints. OUTPUT: (A_side, B_side): a tuple of two lists containing lists of qubits. the lists found in ``A_side`` and ``B_side`` are chains of qubits. These lists of qubits are arranged so that >>> [zip(chain,chain[1:]) for chain in A_side] and >>> [zip(chain,chain[1:]) for chain in B_side] are lists of valid couplers. """ Aside = sum((self.maximum_hline_bundle(y, xmin, xmax) for y in range(ymin, ymax + 1)), []) Bside = sum((self.maximum_vline_bundle(x, ymin, ymax) for x in range(xmin, xmax + 1)), []) return Aside, Bside
[ "def", "biclique", "(", "self", ",", "xmin", ",", "xmax", ",", "ymin", ",", "ymax", ")", ":", "Aside", "=", "sum", "(", "(", "self", ".", "maximum_hline_bundle", "(", "y", ",", "xmin", ",", "xmax", ")", "for", "y", "in", "range", "(", "ymin", ",", "ymax", "+", "1", ")", ")", ",", "[", "]", ")", "Bside", "=", "sum", "(", "(", "self", ".", "maximum_vline_bundle", "(", "x", ",", "ymin", ",", "ymax", ")", "for", "x", "in", "range", "(", "xmin", ",", "xmax", "+", "1", ")", ")", ",", "[", "]", ")", "return", "Aside", ",", "Bside" ]
Compute a maximum-sized complete bipartite graph contained in the rectangle defined by ``xmin, xmax, ymin, ymax`` where each chain of qubits is either a vertical line or a horizontal line. INPUTS: xmin,xmax,ymin,ymax: integers defining the bounds of a rectangle where we look for unbroken chains. These ranges include both endpoints. OUTPUT: (A_side, B_side): a tuple of two lists containing lists of qubits. the lists found in ``A_side`` and ``B_side`` are chains of qubits. These lists of qubits are arranged so that >>> [zip(chain,chain[1:]) for chain in A_side] and >>> [zip(chain,chain[1:]) for chain in B_side] are lists of valid couplers.
[ "Compute", "a", "maximum", "-", "sized", "complete", "bipartite", "graph", "contained", "in", "the", "rectangle", "defined", "by", "xmin", "xmax", "ymin", "ymax", "where", "each", "chain", "of", "qubits", "is", "either", "a", "vertical", "line", "or", "a", "horizontal", "line", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/polynomialembedder.py#L291-L319
train
dwavesystems/dwave-system
dwave/embedding/polynomialembedder.py
eden_processor._contains_line
def _contains_line(self, line): """Test if a chain of qubits is completely contained in ``self``. In particular, test if all qubits are present and the couplers connecting those qubits are also connected. NOTE: this function assumes that ``line`` is a list or tuple of qubits which satisfies the precondition that ``(line[i],line[i+1])`` is supposed to be a coupler for all ``i``. INPUTS: line: a list of qubits satisfying the above precondition OUTPUT: boolean """ return all(v in self for v in line) and all(u in self[v] for u, v in zip(line, line[1::]))
python
def _contains_line(self, line): """Test if a chain of qubits is completely contained in ``self``. In particular, test if all qubits are present and the couplers connecting those qubits are also connected. NOTE: this function assumes that ``line`` is a list or tuple of qubits which satisfies the precondition that ``(line[i],line[i+1])`` is supposed to be a coupler for all ``i``. INPUTS: line: a list of qubits satisfying the above precondition OUTPUT: boolean """ return all(v in self for v in line) and all(u in self[v] for u, v in zip(line, line[1::]))
[ "def", "_contains_line", "(", "self", ",", "line", ")", ":", "return", "all", "(", "v", "in", "self", "for", "v", "in", "line", ")", "and", "all", "(", "u", "in", "self", "[", "v", "]", "for", "u", ",", "v", "in", "zip", "(", "line", ",", "line", "[", "1", ":", ":", "]", ")", ")" ]
Test if a chain of qubits is completely contained in ``self``. In particular, test if all qubits are present and the couplers connecting those qubits are also connected. NOTE: this function assumes that ``line`` is a list or tuple of qubits which satisfies the precondition that ``(line[i],line[i+1])`` is supposed to be a coupler for all ``i``. INPUTS: line: a list of qubits satisfying the above precondition OUTPUT: boolean
[ "Test", "if", "a", "chain", "of", "qubits", "is", "completely", "contained", "in", "self", ".", "In", "particular", "test", "if", "all", "qubits", "are", "present", "and", "the", "couplers", "connecting", "those", "qubits", "are", "also", "connected", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/polynomialembedder.py#L322-L337
train
dwavesystems/dwave-system
dwave/embedding/polynomialembedder.py
eden_processor.maximum_ell_bundle
def maximum_ell_bundle(self, ell): """Return a maximum ell bundle in the rectangle bounded by :math:`\{x0,x1\} \\times \{y0,y1\}` with vertical component :math:`(x0,y0) ... (x0,y1) = {x0} \\times \{y0,...,y1\}` and horizontal component :math:`(x0,y0) ... (x1,y0) = \{x0,...,x1\} \\times \{y0\}`. Note that we don't require :math:`x0 \leq x1` or :math:`y0 \leq y1`. We go through some shenanigans so that the qubits we return are all in a path. A nice side-effect of this is that >>> chains = maximum_ell_bundle(...) >>> edges = [zip(path,path[:-1]) for path in chains] where ``edges`` will be a list of lists of chain edges. INPUTS:: ell: a tuple of 4 integers defining the ell, ``(x0, x1, y0, y1)`` OUTPUT:: chains: list of lists of qubits Note: this function only to be called to construct a native clique embedding *after* the block embedding has been constructed. Using this to evaluate the goodness of an ell block will be slow. """ (x0, x1, y0, y1) = ell hlines = self.maximum_hline_bundle(y0, x0, x1) vlines = self.maximum_vline_bundle(x0, y0, y1) if self.random_bundles: shuffle(hlines) shuffle(vlines) return [v + h for h, v in zip(hlines, vlines)]
python
def maximum_ell_bundle(self, ell): """Return a maximum ell bundle in the rectangle bounded by :math:`\{x0,x1\} \\times \{y0,y1\}` with vertical component :math:`(x0,y0) ... (x0,y1) = {x0} \\times \{y0,...,y1\}` and horizontal component :math:`(x0,y0) ... (x1,y0) = \{x0,...,x1\} \\times \{y0\}`. Note that we don't require :math:`x0 \leq x1` or :math:`y0 \leq y1`. We go through some shenanigans so that the qubits we return are all in a path. A nice side-effect of this is that >>> chains = maximum_ell_bundle(...) >>> edges = [zip(path,path[:-1]) for path in chains] where ``edges`` will be a list of lists of chain edges. INPUTS:: ell: a tuple of 4 integers defining the ell, ``(x0, x1, y0, y1)`` OUTPUT:: chains: list of lists of qubits Note: this function only to be called to construct a native clique embedding *after* the block embedding has been constructed. Using this to evaluate the goodness of an ell block will be slow. """ (x0, x1, y0, y1) = ell hlines = self.maximum_hline_bundle(y0, x0, x1) vlines = self.maximum_vline_bundle(x0, y0, y1) if self.random_bundles: shuffle(hlines) shuffle(vlines) return [v + h for h, v in zip(hlines, vlines)]
[ "def", "maximum_ell_bundle", "(", "self", ",", "ell", ")", ":", "(", "x0", ",", "x1", ",", "y0", ",", "y1", ")", "=", "ell", "hlines", "=", "self", ".", "maximum_hline_bundle", "(", "y0", ",", "x0", ",", "x1", ")", "vlines", "=", "self", ".", "maximum_vline_bundle", "(", "x0", ",", "y0", ",", "y1", ")", "if", "self", ".", "random_bundles", ":", "shuffle", "(", "hlines", ")", "shuffle", "(", "vlines", ")", "return", "[", "v", "+", "h", "for", "h", ",", "v", "in", "zip", "(", "hlines", ",", "vlines", ")", "]" ]
Return a maximum ell bundle in the rectangle bounded by :math:`\{x0,x1\} \\times \{y0,y1\}` with vertical component :math:`(x0,y0) ... (x0,y1) = {x0} \\times \{y0,...,y1\}` and horizontal component :math:`(x0,y0) ... (x1,y0) = \{x0,...,x1\} \\times \{y0\}`. Note that we don't require :math:`x0 \leq x1` or :math:`y0 \leq y1`. We go through some shenanigans so that the qubits we return are all in a path. A nice side-effect of this is that >>> chains = maximum_ell_bundle(...) >>> edges = [zip(path,path[:-1]) for path in chains] where ``edges`` will be a list of lists of chain edges. INPUTS:: ell: a tuple of 4 integers defining the ell, ``(x0, x1, y0, y1)`` OUTPUT:: chains: list of lists of qubits Note: this function only to be called to construct a native clique embedding *after* the block embedding has been constructed. Using this to evaluate the goodness of an ell block will be slow.
[ "Return", "a", "maximum", "ell", "bundle", "in", "the", "rectangle", "bounded", "by" ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/polynomialembedder.py#L368-L409
train
dwavesystems/dwave-system
dwave/embedding/polynomialembedder.py
eden_processor.nativeCliqueEmbed
def nativeCliqueEmbed(self, width): """Compute a maximum-sized native clique embedding in an induced subgraph of chimera with all chainlengths ``width+1``. INPUTS: width: width of the squares to search, also `chainlength`-1 OUTPUT: score: the score for the returned clique (just ``len(clique)`` in the class :class:`eden_processor`; may differ in subclasses) clique: a list containing lists of qubits, each associated to a chain. These lists of qubits are carefully arranged so that >>> [zip(chain,chain[1:]) for chain in clique] is a list of valid couplers. """ maxCWR = {} M, N = self.M, self.N maxscore = None count = 0 key = None for w in range(width + 2): h = width - w - 2 for ymin in range(N - h): ymax = ymin + h for xmin in range(M - w): xmax = xmin + w R = (xmin, xmax, ymin, ymax) score, best = self.maxCliqueWithRectangle(R, maxCWR) maxCWR[R] = best if maxscore is None or (score is not None and maxscore < score): maxscore = score key = None # this gets overwritten immediately count = 0 # this gets overwritten immediately if maxscore == score: count, key = _accumulate_random(count, best[3], key, R) clique = [] while key in maxCWR: score, ell, key, num = maxCWR[key] if ell is not None: meb = self.maximum_ell_bundle(ell) clique.extend(meb) return maxscore, clique
python
def nativeCliqueEmbed(self, width): """Compute a maximum-sized native clique embedding in an induced subgraph of chimera with all chainlengths ``width+1``. INPUTS: width: width of the squares to search, also `chainlength`-1 OUTPUT: score: the score for the returned clique (just ``len(clique)`` in the class :class:`eden_processor`; may differ in subclasses) clique: a list containing lists of qubits, each associated to a chain. These lists of qubits are carefully arranged so that >>> [zip(chain,chain[1:]) for chain in clique] is a list of valid couplers. """ maxCWR = {} M, N = self.M, self.N maxscore = None count = 0 key = None for w in range(width + 2): h = width - w - 2 for ymin in range(N - h): ymax = ymin + h for xmin in range(M - w): xmax = xmin + w R = (xmin, xmax, ymin, ymax) score, best = self.maxCliqueWithRectangle(R, maxCWR) maxCWR[R] = best if maxscore is None or (score is not None and maxscore < score): maxscore = score key = None # this gets overwritten immediately count = 0 # this gets overwritten immediately if maxscore == score: count, key = _accumulate_random(count, best[3], key, R) clique = [] while key in maxCWR: score, ell, key, num = maxCWR[key] if ell is not None: meb = self.maximum_ell_bundle(ell) clique.extend(meb) return maxscore, clique
[ "def", "nativeCliqueEmbed", "(", "self", ",", "width", ")", ":", "maxCWR", "=", "{", "}", "M", ",", "N", "=", "self", ".", "M", ",", "self", ".", "N", "maxscore", "=", "None", "count", "=", "0", "key", "=", "None", "for", "w", "in", "range", "(", "width", "+", "2", ")", ":", "h", "=", "width", "-", "w", "-", "2", "for", "ymin", "in", "range", "(", "N", "-", "h", ")", ":", "ymax", "=", "ymin", "+", "h", "for", "xmin", "in", "range", "(", "M", "-", "w", ")", ":", "xmax", "=", "xmin", "+", "w", "R", "=", "(", "xmin", ",", "xmax", ",", "ymin", ",", "ymax", ")", "score", ",", "best", "=", "self", ".", "maxCliqueWithRectangle", "(", "R", ",", "maxCWR", ")", "maxCWR", "[", "R", "]", "=", "best", "if", "maxscore", "is", "None", "or", "(", "score", "is", "not", "None", "and", "maxscore", "<", "score", ")", ":", "maxscore", "=", "score", "key", "=", "None", "# this gets overwritten immediately", "count", "=", "0", "# this gets overwritten immediately", "if", "maxscore", "==", "score", ":", "count", ",", "key", "=", "_accumulate_random", "(", "count", ",", "best", "[", "3", "]", ",", "key", ",", "R", ")", "clique", "=", "[", "]", "while", "key", "in", "maxCWR", ":", "score", ",", "ell", ",", "key", ",", "num", "=", "maxCWR", "[", "key", "]", "if", "ell", "is", "not", "None", ":", "meb", "=", "self", ".", "maximum_ell_bundle", "(", "ell", ")", "clique", ".", "extend", "(", "meb", ")", "return", "maxscore", ",", "clique" ]
Compute a maximum-sized native clique embedding in an induced subgraph of chimera with all chainlengths ``width+1``. INPUTS: width: width of the squares to search, also `chainlength`-1 OUTPUT: score: the score for the returned clique (just ``len(clique)`` in the class :class:`eden_processor`; may differ in subclasses) clique: a list containing lists of qubits, each associated to a chain. These lists of qubits are carefully arranged so that >>> [zip(chain,chain[1:]) for chain in clique] is a list of valid couplers.
[ "Compute", "a", "maximum", "-", "sized", "native", "clique", "embedding", "in", "an", "induced", "subgraph", "of", "chimera", "with", "all", "chainlengths", "width", "+", "1", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/polynomialembedder.py#L496-L544
train
dwavesystems/dwave-system
dwave/embedding/polynomialembedder.py
processor._compute_all_deletions
def _compute_all_deletions(self): """Returns all minimal edge covers of the set of evil edges. """ minimum_evil = [] for disabled_qubits in map(set, product(*self._evil)): newmin = [] for s in minimum_evil: if s < disabled_qubits: break elif disabled_qubits < s: continue newmin.append(s) else: minimum_evil = newmin + [disabled_qubits] return minimum_evil
python
def _compute_all_deletions(self): """Returns all minimal edge covers of the set of evil edges. """ minimum_evil = [] for disabled_qubits in map(set, product(*self._evil)): newmin = [] for s in minimum_evil: if s < disabled_qubits: break elif disabled_qubits < s: continue newmin.append(s) else: minimum_evil = newmin + [disabled_qubits] return minimum_evil
[ "def", "_compute_all_deletions", "(", "self", ")", ":", "minimum_evil", "=", "[", "]", "for", "disabled_qubits", "in", "map", "(", "set", ",", "product", "(", "*", "self", ".", "_evil", ")", ")", ":", "newmin", "=", "[", "]", "for", "s", "in", "minimum_evil", ":", "if", "s", "<", "disabled_qubits", ":", "break", "elif", "disabled_qubits", "<", "s", ":", "continue", "newmin", ".", "append", "(", "s", ")", "else", ":", "minimum_evil", "=", "newmin", "+", "[", "disabled_qubits", "]", "return", "minimum_evil" ]
Returns all minimal edge covers of the set of evil edges.
[ "Returns", "all", "minimal", "edge", "covers", "of", "the", "set", "of", "evil", "edges", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/polynomialembedder.py#L810-L824
train
dwavesystems/dwave-system
dwave/embedding/polynomialembedder.py
processor._compute_deletions
def _compute_deletions(self): """If there are fewer than self._proc_limit possible deletion sets, compute all subprocessors obtained by deleting a minimal subset of qubits. """ M, N, L, edgelist = self.M, self.N, self.L, self._edgelist if 2**len(self._evil) <= self._proc_limit: deletions = self._compute_all_deletions() self._processors = [self._subprocessor(d) for d in deletions] else: self._processors = None
python
def _compute_deletions(self): """If there are fewer than self._proc_limit possible deletion sets, compute all subprocessors obtained by deleting a minimal subset of qubits. """ M, N, L, edgelist = self.M, self.N, self.L, self._edgelist if 2**len(self._evil) <= self._proc_limit: deletions = self._compute_all_deletions() self._processors = [self._subprocessor(d) for d in deletions] else: self._processors = None
[ "def", "_compute_deletions", "(", "self", ")", ":", "M", ",", "N", ",", "L", ",", "edgelist", "=", "self", ".", "M", ",", "self", ".", "N", ",", "self", ".", "L", ",", "self", ".", "_edgelist", "if", "2", "**", "len", "(", "self", ".", "_evil", ")", "<=", "self", ".", "_proc_limit", ":", "deletions", "=", "self", ".", "_compute_all_deletions", "(", ")", "self", ".", "_processors", "=", "[", "self", ".", "_subprocessor", "(", "d", ")", "for", "d", "in", "deletions", "]", "else", ":", "self", ".", "_processors", "=", "None" ]
If there are fewer than self._proc_limit possible deletion sets, compute all subprocessors obtained by deleting a minimal subset of qubits.
[ "If", "there", "are", "fewer", "than", "self", ".", "_proc_limit", "possible", "deletion", "sets", "compute", "all", "subprocessors", "obtained", "by", "deleting", "a", "minimal", "subset", "of", "qubits", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/polynomialembedder.py#L836-L846
train
dwavesystems/dwave-system
dwave/embedding/polynomialembedder.py
processor._random_subprocessor
def _random_subprocessor(self): """Creates a random subprocessor where there is a coupler between every pair of working qubits on opposite sides of the same cell. This is guaranteed to be minimal in that adding a qubit back in will reintroduce a bad coupler, but not to have minimum size. OUTPUT: an :class:`eden_processor` instance """ deletion = set() for e in self._evil: if e[0] in deletion or e[1] in deletion: continue deletion.add(choice(e)) return self._subprocessor(deletion)
python
def _random_subprocessor(self): """Creates a random subprocessor where there is a coupler between every pair of working qubits on opposite sides of the same cell. This is guaranteed to be minimal in that adding a qubit back in will reintroduce a bad coupler, but not to have minimum size. OUTPUT: an :class:`eden_processor` instance """ deletion = set() for e in self._evil: if e[0] in deletion or e[1] in deletion: continue deletion.add(choice(e)) return self._subprocessor(deletion)
[ "def", "_random_subprocessor", "(", "self", ")", ":", "deletion", "=", "set", "(", ")", "for", "e", "in", "self", ".", "_evil", ":", "if", "e", "[", "0", "]", "in", "deletion", "or", "e", "[", "1", "]", "in", "deletion", ":", "continue", "deletion", ".", "add", "(", "choice", "(", "e", ")", ")", "return", "self", ".", "_subprocessor", "(", "deletion", ")" ]
Creates a random subprocessor where there is a coupler between every pair of working qubits on opposite sides of the same cell. This is guaranteed to be minimal in that adding a qubit back in will reintroduce a bad coupler, but not to have minimum size. OUTPUT: an :class:`eden_processor` instance
[ "Creates", "a", "random", "subprocessor", "where", "there", "is", "a", "coupler", "between", "every", "pair", "of", "working", "qubits", "on", "opposite", "sides", "of", "the", "same", "cell", ".", "This", "is", "guaranteed", "to", "be", "minimal", "in", "that", "adding", "a", "qubit", "back", "in", "will", "reintroduce", "a", "bad", "coupler", "but", "not", "to", "have", "minimum", "size", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/polynomialembedder.py#L848-L862
train
dwavesystems/dwave-system
dwave/embedding/polynomialembedder.py
processor._objective_bestscore
def _objective_bestscore(self, old, new): """An objective function that returns True if new has a better score than old, and ``False`` otherwise. INPUTS: old (tuple): a tuple (score, embedding) new (tuple): a tuple (score, embedding) """ (oldscore, oldthing) = old (newscore, newthing) = new if oldscore is None: return True if newscore is None: return False return oldscore < newscore
python
def _objective_bestscore(self, old, new): """An objective function that returns True if new has a better score than old, and ``False`` otherwise. INPUTS: old (tuple): a tuple (score, embedding) new (tuple): a tuple (score, embedding) """ (oldscore, oldthing) = old (newscore, newthing) = new if oldscore is None: return True if newscore is None: return False return oldscore < newscore
[ "def", "_objective_bestscore", "(", "self", ",", "old", ",", "new", ")", ":", "(", "oldscore", ",", "oldthing", ")", "=", "old", "(", "newscore", ",", "newthing", ")", "=", "new", "if", "oldscore", "is", "None", ":", "return", "True", "if", "newscore", "is", "None", ":", "return", "False", "return", "oldscore", "<", "newscore" ]
An objective function that returns True if new has a better score than old, and ``False`` otherwise. INPUTS: old (tuple): a tuple (score, embedding) new (tuple): a tuple (score, embedding)
[ "An", "objective", "function", "that", "returns", "True", "if", "new", "has", "a", "better", "score", "than", "old", "and", "False", "otherwise", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/polynomialembedder.py#L913-L929
train
dwavesystems/dwave-system
dwave/embedding/polynomialembedder.py
processor.nativeCliqueEmbed
def nativeCliqueEmbed(self, width): """Compute a maximum-sized native clique embedding in an induced subgraph of chimera with chainsize ``width+1``. If possible, returns a uniform choice among all largest cliques. INPUTS: width: width of the squares to search, also `chainlength-1` OUTPUT: clique: a list containing lists of qubits, each associated to a chain. These lists of qubits are carefully arranged so that >>> [zip(chain,chain[1:]) for chain in clique] is a list of valid couplers. Note: this fails to return a uniform choice if there are broken intra-cell couplers between working qubits. (the choice is uniform on a particular subprocessor) """ def f(x): return x.nativeCliqueEmbed(width) objective = self._objective_bestscore return self._translate(self._map_to_processors(f, objective))
python
def nativeCliqueEmbed(self, width): """Compute a maximum-sized native clique embedding in an induced subgraph of chimera with chainsize ``width+1``. If possible, returns a uniform choice among all largest cliques. INPUTS: width: width of the squares to search, also `chainlength-1` OUTPUT: clique: a list containing lists of qubits, each associated to a chain. These lists of qubits are carefully arranged so that >>> [zip(chain,chain[1:]) for chain in clique] is a list of valid couplers. Note: this fails to return a uniform choice if there are broken intra-cell couplers between working qubits. (the choice is uniform on a particular subprocessor) """ def f(x): return x.nativeCliqueEmbed(width) objective = self._objective_bestscore return self._translate(self._map_to_processors(f, objective))
[ "def", "nativeCliqueEmbed", "(", "self", ",", "width", ")", ":", "def", "f", "(", "x", ")", ":", "return", "x", ".", "nativeCliqueEmbed", "(", "width", ")", "objective", "=", "self", ".", "_objective_bestscore", "return", "self", ".", "_translate", "(", "self", ".", "_map_to_processors", "(", "f", ",", "objective", ")", ")" ]
Compute a maximum-sized native clique embedding in an induced subgraph of chimera with chainsize ``width+1``. If possible, returns a uniform choice among all largest cliques. INPUTS: width: width of the squares to search, also `chainlength-1` OUTPUT: clique: a list containing lists of qubits, each associated to a chain. These lists of qubits are carefully arranged so that >>> [zip(chain,chain[1:]) for chain in clique] is a list of valid couplers. Note: this fails to return a uniform choice if there are broken intra-cell couplers between working qubits. (the choice is uniform on a particular subprocessor)
[ "Compute", "a", "maximum", "-", "sized", "native", "clique", "embedding", "in", "an", "induced", "subgraph", "of", "chimera", "with", "chainsize", "width", "+", "1", ".", "If", "possible", "returns", "a", "uniform", "choice", "among", "all", "largest", "cliques", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/polynomialembedder.py#L1038-L1062
train
dwavesystems/dwave-system
dwave/embedding/polynomialembedder.py
processor._translate
def _translate(self, embedding): "Translates an embedding back to linear coordinates if necessary." if embedding is None: return None if not self._linear: return embedding return [_bulk_to_linear(self.M, self.N, self.L, chain) for chain in embedding]
python
def _translate(self, embedding): "Translates an embedding back to linear coordinates if necessary." if embedding is None: return None if not self._linear: return embedding return [_bulk_to_linear(self.M, self.N, self.L, chain) for chain in embedding]
[ "def", "_translate", "(", "self", ",", "embedding", ")", ":", "if", "embedding", "is", "None", ":", "return", "None", "if", "not", "self", ".", "_linear", ":", "return", "embedding", "return", "[", "_bulk_to_linear", "(", "self", ".", "M", ",", "self", ".", "N", ",", "self", ".", "L", ",", "chain", ")", "for", "chain", "in", "embedding", "]" ]
Translates an embedding back to linear coordinates if necessary.
[ "Translates", "an", "embedding", "back", "to", "linear", "coordinates", "if", "necessary", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/polynomialembedder.py#L1152-L1158
train
dwavesystems/dwave-system
dwave/system/composites/virtual_graph.py
_validate_chain_strength
def _validate_chain_strength(sampler, chain_strength): """Validate the provided chain strength, checking J-ranges of the sampler's children. Args: chain_strength (float) The provided chain strength. Use None to use J-range. Returns (float): A valid chain strength, either provided or based on available J-range. Positive finite float. """ properties = sampler.properties if 'extended_j_range' in properties: max_chain_strength = - min(properties['extended_j_range']) elif 'j_range' in properties: max_chain_strength = - min(properties['j_range']) else: raise ValueError("input sampler should have 'j_range' and/or 'extended_j_range' property.") if chain_strength is None: chain_strength = max_chain_strength elif chain_strength > max_chain_strength: raise ValueError("Provided chain strength exceedds the allowed range.") return chain_strength
python
def _validate_chain_strength(sampler, chain_strength): """Validate the provided chain strength, checking J-ranges of the sampler's children. Args: chain_strength (float) The provided chain strength. Use None to use J-range. Returns (float): A valid chain strength, either provided or based on available J-range. Positive finite float. """ properties = sampler.properties if 'extended_j_range' in properties: max_chain_strength = - min(properties['extended_j_range']) elif 'j_range' in properties: max_chain_strength = - min(properties['j_range']) else: raise ValueError("input sampler should have 'j_range' and/or 'extended_j_range' property.") if chain_strength is None: chain_strength = max_chain_strength elif chain_strength > max_chain_strength: raise ValueError("Provided chain strength exceedds the allowed range.") return chain_strength
[ "def", "_validate_chain_strength", "(", "sampler", ",", "chain_strength", ")", ":", "properties", "=", "sampler", ".", "properties", "if", "'extended_j_range'", "in", "properties", ":", "max_chain_strength", "=", "-", "min", "(", "properties", "[", "'extended_j_range'", "]", ")", "elif", "'j_range'", "in", "properties", ":", "max_chain_strength", "=", "-", "min", "(", "properties", "[", "'j_range'", "]", ")", "else", ":", "raise", "ValueError", "(", "\"input sampler should have 'j_range' and/or 'extended_j_range' property.\"", ")", "if", "chain_strength", "is", "None", ":", "chain_strength", "=", "max_chain_strength", "elif", "chain_strength", ">", "max_chain_strength", ":", "raise", "ValueError", "(", "\"Provided chain strength exceedds the allowed range.\"", ")", "return", "chain_strength" ]
Validate the provided chain strength, checking J-ranges of the sampler's children. Args: chain_strength (float) The provided chain strength. Use None to use J-range. Returns (float): A valid chain strength, either provided or based on available J-range. Positive finite float.
[ "Validate", "the", "provided", "chain", "strength", "checking", "J", "-", "ranges", "of", "the", "sampler", "s", "children", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/system/composites/virtual_graph.py#L368-L392
train
dwavesystems/dwave-system
dwave/system/composites/virtual_graph.py
VirtualGraphComposite.sample
def sample(self, bqm, apply_flux_bias_offsets=True, **kwargs): """Sample from the given Ising model. Args: h (list/dict): Linear biases of the Ising model. If a list, the list's indices are used as variable labels. J (dict of (int, int):float): Quadratic biases of the Ising model. apply_flux_bias_offsets (bool, optional): If True, use the calculated flux_bias offsets (if available). **kwargs: Optional keyword arguments for the sampling method, specified per solver. Examples: This example uses :class:`.VirtualGraphComposite` to instantiate a composed sampler that submits an Ising problem to a D-Wave solver selected by the user's default :std:doc:`D-Wave Cloud Client configuration file <cloud-client:intro>`. The problem represents a logical NOT gate using penalty function :math:`P = xy`, where variable x is the gate's input and y the output. This simple two-variable problem is manually minor-embedded to a single :std:doc:`Chimera <system:intro>` unit cell: each variable is represented by a chain of half the cell's qubits, x as qubits 0, 1, 4, 5, and y as qubits 2, 3, 6, 7. The chain strength is set to half the maximum allowed found from querying the solver's extended J range. In this example, the ten returned samples all represent valid states of the NOT gate. >>> from dwave.system.samplers import DWaveSampler >>> from dwave.system.composites import VirtualGraphComposite >>> embedding = {'x': {0, 4, 1, 5}, 'y': {2, 6, 3, 7}} >>> DWaveSampler().properties['extended_j_range'] # doctest: +SKIP [-2.0, 1.0] >>> sampler = VirtualGraphComposite(DWaveSampler(), embedding, chain_strength=1) # doctest: +SKIP >>> h = {} >>> J = {('x', 'y'): 1} >>> response = sampler.sample_ising(h, J, num_reads=10) # doctest: +SKIP >>> for sample in response.samples(): # doctest: +SKIP ... print(sample) ... {'y': -1, 'x': 1} {'y': 1, 'x': -1} {'y': -1, 'x': 1} {'y': -1, 'x': 1} {'y': -1, 'x': 1} {'y': 1, 'x': -1} {'y': 1, 'x': -1} {'y': 1, 'x': -1} {'y': -1, 'x': 1} {'y': 1, 'x': -1} See `Ocean Glossary <https://docs.ocean.dwavesys.com/en/latest/glossary.html>`_ for explanations of technical terms in descriptions of Ocean tools. """ child = self.child if apply_flux_bias_offsets: if self.flux_biases is not None: kwargs[FLUX_BIAS_KWARG] = self.flux_biases return child.sample(bqm, **kwargs)
python
def sample(self, bqm, apply_flux_bias_offsets=True, **kwargs): """Sample from the given Ising model. Args: h (list/dict): Linear biases of the Ising model. If a list, the list's indices are used as variable labels. J (dict of (int, int):float): Quadratic biases of the Ising model. apply_flux_bias_offsets (bool, optional): If True, use the calculated flux_bias offsets (if available). **kwargs: Optional keyword arguments for the sampling method, specified per solver. Examples: This example uses :class:`.VirtualGraphComposite` to instantiate a composed sampler that submits an Ising problem to a D-Wave solver selected by the user's default :std:doc:`D-Wave Cloud Client configuration file <cloud-client:intro>`. The problem represents a logical NOT gate using penalty function :math:`P = xy`, where variable x is the gate's input and y the output. This simple two-variable problem is manually minor-embedded to a single :std:doc:`Chimera <system:intro>` unit cell: each variable is represented by a chain of half the cell's qubits, x as qubits 0, 1, 4, 5, and y as qubits 2, 3, 6, 7. The chain strength is set to half the maximum allowed found from querying the solver's extended J range. In this example, the ten returned samples all represent valid states of the NOT gate. >>> from dwave.system.samplers import DWaveSampler >>> from dwave.system.composites import VirtualGraphComposite >>> embedding = {'x': {0, 4, 1, 5}, 'y': {2, 6, 3, 7}} >>> DWaveSampler().properties['extended_j_range'] # doctest: +SKIP [-2.0, 1.0] >>> sampler = VirtualGraphComposite(DWaveSampler(), embedding, chain_strength=1) # doctest: +SKIP >>> h = {} >>> J = {('x', 'y'): 1} >>> response = sampler.sample_ising(h, J, num_reads=10) # doctest: +SKIP >>> for sample in response.samples(): # doctest: +SKIP ... print(sample) ... {'y': -1, 'x': 1} {'y': 1, 'x': -1} {'y': -1, 'x': 1} {'y': -1, 'x': 1} {'y': -1, 'x': 1} {'y': 1, 'x': -1} {'y': 1, 'x': -1} {'y': 1, 'x': -1} {'y': -1, 'x': 1} {'y': 1, 'x': -1} See `Ocean Glossary <https://docs.ocean.dwavesys.com/en/latest/glossary.html>`_ for explanations of technical terms in descriptions of Ocean tools. """ child = self.child if apply_flux_bias_offsets: if self.flux_biases is not None: kwargs[FLUX_BIAS_KWARG] = self.flux_biases return child.sample(bqm, **kwargs)
[ "def", "sample", "(", "self", ",", "bqm", ",", "apply_flux_bias_offsets", "=", "True", ",", "*", "*", "kwargs", ")", ":", "child", "=", "self", ".", "child", "if", "apply_flux_bias_offsets", ":", "if", "self", ".", "flux_biases", "is", "not", "None", ":", "kwargs", "[", "FLUX_BIAS_KWARG", "]", "=", "self", ".", "flux_biases", "return", "child", ".", "sample", "(", "bqm", ",", "*", "*", "kwargs", ")" ]
Sample from the given Ising model. Args: h (list/dict): Linear biases of the Ising model. If a list, the list's indices are used as variable labels. J (dict of (int, int):float): Quadratic biases of the Ising model. apply_flux_bias_offsets (bool, optional): If True, use the calculated flux_bias offsets (if available). **kwargs: Optional keyword arguments for the sampling method, specified per solver. Examples: This example uses :class:`.VirtualGraphComposite` to instantiate a composed sampler that submits an Ising problem to a D-Wave solver selected by the user's default :std:doc:`D-Wave Cloud Client configuration file <cloud-client:intro>`. The problem represents a logical NOT gate using penalty function :math:`P = xy`, where variable x is the gate's input and y the output. This simple two-variable problem is manually minor-embedded to a single :std:doc:`Chimera <system:intro>` unit cell: each variable is represented by a chain of half the cell's qubits, x as qubits 0, 1, 4, 5, and y as qubits 2, 3, 6, 7. The chain strength is set to half the maximum allowed found from querying the solver's extended J range. In this example, the ten returned samples all represent valid states of the NOT gate. >>> from dwave.system.samplers import DWaveSampler >>> from dwave.system.composites import VirtualGraphComposite >>> embedding = {'x': {0, 4, 1, 5}, 'y': {2, 6, 3, 7}} >>> DWaveSampler().properties['extended_j_range'] # doctest: +SKIP [-2.0, 1.0] >>> sampler = VirtualGraphComposite(DWaveSampler(), embedding, chain_strength=1) # doctest: +SKIP >>> h = {} >>> J = {('x', 'y'): 1} >>> response = sampler.sample_ising(h, J, num_reads=10) # doctest: +SKIP >>> for sample in response.samples(): # doctest: +SKIP ... print(sample) ... {'y': -1, 'x': 1} {'y': 1, 'x': -1} {'y': -1, 'x': 1} {'y': -1, 'x': 1} {'y': -1, 'x': 1} {'y': 1, 'x': -1} {'y': 1, 'x': -1} {'y': 1, 'x': -1} {'y': -1, 'x': 1} {'y': 1, 'x': -1} See `Ocean Glossary <https://docs.ocean.dwavesys.com/en/latest/glossary.html>`_ for explanations of technical terms in descriptions of Ocean tools.
[ "Sample", "from", "the", "given", "Ising", "model", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/system/composites/virtual_graph.py#L299-L365
train
dwavesystems/dwave-system
dwave/system/flux_bias_offsets.py
get_flux_biases
def get_flux_biases(sampler, embedding, chain_strength, num_reads=1000, max_age=3600): """Get the flux bias offsets for sampler and embedding. Args: sampler (:obj:`.DWaveSampler`): A D-Wave sampler. embedding (dict[hashable, iterable]): Mapping from a source graph to the specified sampler’s graph (the target graph). The keys of embedding should be nodes in the source graph, the values should be an iterable of nodes in the target graph. chain_strength (number): Desired chain coupling strength. This is the magnitude of couplings between qubits in a chain. num_reads (int, optional, default=1000): The number of reads per system call if new flux biases need to be calculated. max_age (int, optional, default=3600): The maximum age (in seconds) allowed for previously calculated flux bias offsets. Returns: dict: A dict where the keys are the nodes in the chains and the values are the flux biases. """ if not isinstance(sampler, dimod.Sampler): raise TypeError("input sampler should be DWaveSampler") # try to read the chip_id, otherwise get the name system_name = sampler.properties.get('chip_id', str(sampler.__class__)) try: with cache_connect() as cur: fbo = get_flux_biases_from_cache(cur, embedding.values(), system_name, chain_strength=chain_strength, max_age=max_age) return fbo except MissingFluxBias: pass # if dwave-drivers is not available, then we can't calculate the biases try: import dwave.drivers as drivers except ImportError: msg = ("dwave-drivers not found, cannot calculate flux biases. dwave-drivers can be " "installed with " "'pip install dwave-drivers --extra-index-url https://pypi.dwavesys.com/simple'. " "See documentation for dwave-drivers license.") raise RuntimeError(msg) fbo = drivers.oneshot_flux_bias(sampler, embedding.values(), num_reads=num_reads, chain_strength=chain_strength) # store them in the cache with cache_connect() as cur: for chain in embedding.values(): v = next(iter(chain)) flux_bias = fbo.get(v, 0.0) insert_flux_bias(cur, chain, system_name, flux_bias, chain_strength) return fbo
python
def get_flux_biases(sampler, embedding, chain_strength, num_reads=1000, max_age=3600): """Get the flux bias offsets for sampler and embedding. Args: sampler (:obj:`.DWaveSampler`): A D-Wave sampler. embedding (dict[hashable, iterable]): Mapping from a source graph to the specified sampler’s graph (the target graph). The keys of embedding should be nodes in the source graph, the values should be an iterable of nodes in the target graph. chain_strength (number): Desired chain coupling strength. This is the magnitude of couplings between qubits in a chain. num_reads (int, optional, default=1000): The number of reads per system call if new flux biases need to be calculated. max_age (int, optional, default=3600): The maximum age (in seconds) allowed for previously calculated flux bias offsets. Returns: dict: A dict where the keys are the nodes in the chains and the values are the flux biases. """ if not isinstance(sampler, dimod.Sampler): raise TypeError("input sampler should be DWaveSampler") # try to read the chip_id, otherwise get the name system_name = sampler.properties.get('chip_id', str(sampler.__class__)) try: with cache_connect() as cur: fbo = get_flux_biases_from_cache(cur, embedding.values(), system_name, chain_strength=chain_strength, max_age=max_age) return fbo except MissingFluxBias: pass # if dwave-drivers is not available, then we can't calculate the biases try: import dwave.drivers as drivers except ImportError: msg = ("dwave-drivers not found, cannot calculate flux biases. dwave-drivers can be " "installed with " "'pip install dwave-drivers --extra-index-url https://pypi.dwavesys.com/simple'. " "See documentation for dwave-drivers license.") raise RuntimeError(msg) fbo = drivers.oneshot_flux_bias(sampler, embedding.values(), num_reads=num_reads, chain_strength=chain_strength) # store them in the cache with cache_connect() as cur: for chain in embedding.values(): v = next(iter(chain)) flux_bias = fbo.get(v, 0.0) insert_flux_bias(cur, chain, system_name, flux_bias, chain_strength) return fbo
[ "def", "get_flux_biases", "(", "sampler", ",", "embedding", ",", "chain_strength", ",", "num_reads", "=", "1000", ",", "max_age", "=", "3600", ")", ":", "if", "not", "isinstance", "(", "sampler", ",", "dimod", ".", "Sampler", ")", ":", "raise", "TypeError", "(", "\"input sampler should be DWaveSampler\"", ")", "# try to read the chip_id, otherwise get the name", "system_name", "=", "sampler", ".", "properties", ".", "get", "(", "'chip_id'", ",", "str", "(", "sampler", ".", "__class__", ")", ")", "try", ":", "with", "cache_connect", "(", ")", "as", "cur", ":", "fbo", "=", "get_flux_biases_from_cache", "(", "cur", ",", "embedding", ".", "values", "(", ")", ",", "system_name", ",", "chain_strength", "=", "chain_strength", ",", "max_age", "=", "max_age", ")", "return", "fbo", "except", "MissingFluxBias", ":", "pass", "# if dwave-drivers is not available, then we can't calculate the biases", "try", ":", "import", "dwave", ".", "drivers", "as", "drivers", "except", "ImportError", ":", "msg", "=", "(", "\"dwave-drivers not found, cannot calculate flux biases. dwave-drivers can be \"", "\"installed with \"", "\"'pip install dwave-drivers --extra-index-url https://pypi.dwavesys.com/simple'. \"", "\"See documentation for dwave-drivers license.\"", ")", "raise", "RuntimeError", "(", "msg", ")", "fbo", "=", "drivers", ".", "oneshot_flux_bias", "(", "sampler", ",", "embedding", ".", "values", "(", ")", ",", "num_reads", "=", "num_reads", ",", "chain_strength", "=", "chain_strength", ")", "# store them in the cache", "with", "cache_connect", "(", ")", "as", "cur", ":", "for", "chain", "in", "embedding", ".", "values", "(", ")", ":", "v", "=", "next", "(", "iter", "(", "chain", ")", ")", "flux_bias", "=", "fbo", ".", "get", "(", "v", ",", "0.0", ")", "insert_flux_bias", "(", "cur", ",", "chain", ",", "system_name", ",", "flux_bias", ",", "chain_strength", ")", "return", "fbo" ]
Get the flux bias offsets for sampler and embedding. Args: sampler (:obj:`.DWaveSampler`): A D-Wave sampler. embedding (dict[hashable, iterable]): Mapping from a source graph to the specified sampler’s graph (the target graph). The keys of embedding should be nodes in the source graph, the values should be an iterable of nodes in the target graph. chain_strength (number): Desired chain coupling strength. This is the magnitude of couplings between qubits in a chain. num_reads (int, optional, default=1000): The number of reads per system call if new flux biases need to be calculated. max_age (int, optional, default=3600): The maximum age (in seconds) allowed for previously calculated flux bias offsets. Returns: dict: A dict where the keys are the nodes in the chains and the values are the flux biases.
[ "Get", "the", "flux", "bias", "offsets", "for", "sampler", "and", "embedding", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/system/flux_bias_offsets.py#L26-L88
train
dwavesystems/dwave-system
dwave/embedding/chimera.py
find_clique_embedding
def find_clique_embedding(k, m, n=None, t=None, target_edges=None): """Find an embedding for a clique in a Chimera graph. Given a target :term:`Chimera` graph size, and a clique (fully connect graph), attempts to find an embedding. Args: k (int/iterable): Clique to embed. If k is an integer, generates an embedding for a clique of size k labelled [0,k-1]. If k is an iterable, generates an embedding for a clique of size len(k), where iterable k is the variable labels. m (int): Number of rows in the Chimera lattice. n (int, optional, default=m): Number of columns in the Chimera lattice. t (int, optional, default 4): Size of the shore within each Chimera tile. target_edges (iterable[edge]): A list of edges in the target Chimera graph. Nodes are labelled as returned by :func:`~dwave_networkx.generators.chimera_graph`. Returns: dict: An embedding mapping a clique to the Chimera lattice. Examples: The first example finds an embedding for a :math:`K_4` complete graph in a single Chimera unit cell. The second for an alphanumerically labeled :math:`K_3` graph in 4 unit cells. >>> from dwave.embedding.chimera import find_clique_embedding ... >>> embedding = find_clique_embedding(4, 1, 1) >>> embedding # doctest: +SKIP {0: [4, 0], 1: [5, 1], 2: [6, 2], 3: [7, 3]} >>> from dwave.embedding.chimera import find_clique_embedding ... >>> embedding = find_clique_embedding(['a', 'b', 'c'], m=2, n=2, t=4) >>> embedding # doctest: +SKIP {'a': [20, 16], 'b': [21, 17], 'c': [22, 18]} """ import random _, nodes = k m, n, t, target_edges = _chimera_input(m, n, t, target_edges) # Special cases to return optimal embeddings for small k. The general clique embedder uses chains of length # at least 2, whereas cliques of size 1 and 2 can be embedded with single-qubit chains. if len(nodes) == 1: # If k == 1 we simply return a single chain consisting of a randomly sampled qubit. qubits = set().union(*target_edges) qubit = random.choice(tuple(qubits)) embedding = [[qubit]] elif len(nodes) == 2: # If k == 2 we simply return two one-qubit chains that are the endpoints of a randomly sampled coupler. if not isinstance(target_edges, list): edges = list(target_edges) edge = edges[random.randrange(len(edges))] embedding = [[edge[0]], [edge[1]]] else: # General case for k > 2. embedding = processor(target_edges, M=m, N=n, L=t).tightestNativeClique(len(nodes)) if not embedding: raise ValueError("cannot find a K{} embedding for given Chimera lattice".format(k)) return dict(zip(nodes, embedding))
python
def find_clique_embedding(k, m, n=None, t=None, target_edges=None): """Find an embedding for a clique in a Chimera graph. Given a target :term:`Chimera` graph size, and a clique (fully connect graph), attempts to find an embedding. Args: k (int/iterable): Clique to embed. If k is an integer, generates an embedding for a clique of size k labelled [0,k-1]. If k is an iterable, generates an embedding for a clique of size len(k), where iterable k is the variable labels. m (int): Number of rows in the Chimera lattice. n (int, optional, default=m): Number of columns in the Chimera lattice. t (int, optional, default 4): Size of the shore within each Chimera tile. target_edges (iterable[edge]): A list of edges in the target Chimera graph. Nodes are labelled as returned by :func:`~dwave_networkx.generators.chimera_graph`. Returns: dict: An embedding mapping a clique to the Chimera lattice. Examples: The first example finds an embedding for a :math:`K_4` complete graph in a single Chimera unit cell. The second for an alphanumerically labeled :math:`K_3` graph in 4 unit cells. >>> from dwave.embedding.chimera import find_clique_embedding ... >>> embedding = find_clique_embedding(4, 1, 1) >>> embedding # doctest: +SKIP {0: [4, 0], 1: [5, 1], 2: [6, 2], 3: [7, 3]} >>> from dwave.embedding.chimera import find_clique_embedding ... >>> embedding = find_clique_embedding(['a', 'b', 'c'], m=2, n=2, t=4) >>> embedding # doctest: +SKIP {'a': [20, 16], 'b': [21, 17], 'c': [22, 18]} """ import random _, nodes = k m, n, t, target_edges = _chimera_input(m, n, t, target_edges) # Special cases to return optimal embeddings for small k. The general clique embedder uses chains of length # at least 2, whereas cliques of size 1 and 2 can be embedded with single-qubit chains. if len(nodes) == 1: # If k == 1 we simply return a single chain consisting of a randomly sampled qubit. qubits = set().union(*target_edges) qubit = random.choice(tuple(qubits)) embedding = [[qubit]] elif len(nodes) == 2: # If k == 2 we simply return two one-qubit chains that are the endpoints of a randomly sampled coupler. if not isinstance(target_edges, list): edges = list(target_edges) edge = edges[random.randrange(len(edges))] embedding = [[edge[0]], [edge[1]]] else: # General case for k > 2. embedding = processor(target_edges, M=m, N=n, L=t).tightestNativeClique(len(nodes)) if not embedding: raise ValueError("cannot find a K{} embedding for given Chimera lattice".format(k)) return dict(zip(nodes, embedding))
[ "def", "find_clique_embedding", "(", "k", ",", "m", ",", "n", "=", "None", ",", "t", "=", "None", ",", "target_edges", "=", "None", ")", ":", "import", "random", "_", ",", "nodes", "=", "k", "m", ",", "n", ",", "t", ",", "target_edges", "=", "_chimera_input", "(", "m", ",", "n", ",", "t", ",", "target_edges", ")", "# Special cases to return optimal embeddings for small k. The general clique embedder uses chains of length", "# at least 2, whereas cliques of size 1 and 2 can be embedded with single-qubit chains.", "if", "len", "(", "nodes", ")", "==", "1", ":", "# If k == 1 we simply return a single chain consisting of a randomly sampled qubit.", "qubits", "=", "set", "(", ")", ".", "union", "(", "*", "target_edges", ")", "qubit", "=", "random", ".", "choice", "(", "tuple", "(", "qubits", ")", ")", "embedding", "=", "[", "[", "qubit", "]", "]", "elif", "len", "(", "nodes", ")", "==", "2", ":", "# If k == 2 we simply return two one-qubit chains that are the endpoints of a randomly sampled coupler.", "if", "not", "isinstance", "(", "target_edges", ",", "list", ")", ":", "edges", "=", "list", "(", "target_edges", ")", "edge", "=", "edges", "[", "random", ".", "randrange", "(", "len", "(", "edges", ")", ")", "]", "embedding", "=", "[", "[", "edge", "[", "0", "]", "]", ",", "[", "edge", "[", "1", "]", "]", "]", "else", ":", "# General case for k > 2.", "embedding", "=", "processor", "(", "target_edges", ",", "M", "=", "m", ",", "N", "=", "n", ",", "L", "=", "t", ")", ".", "tightestNativeClique", "(", "len", "(", "nodes", ")", ")", "if", "not", "embedding", ":", "raise", "ValueError", "(", "\"cannot find a K{} embedding for given Chimera lattice\"", ".", "format", "(", "k", ")", ")", "return", "dict", "(", "zip", "(", "nodes", ",", "embedding", ")", ")" ]
Find an embedding for a clique in a Chimera graph. Given a target :term:`Chimera` graph size, and a clique (fully connect graph), attempts to find an embedding. Args: k (int/iterable): Clique to embed. If k is an integer, generates an embedding for a clique of size k labelled [0,k-1]. If k is an iterable, generates an embedding for a clique of size len(k), where iterable k is the variable labels. m (int): Number of rows in the Chimera lattice. n (int, optional, default=m): Number of columns in the Chimera lattice. t (int, optional, default 4): Size of the shore within each Chimera tile. target_edges (iterable[edge]): A list of edges in the target Chimera graph. Nodes are labelled as returned by :func:`~dwave_networkx.generators.chimera_graph`. Returns: dict: An embedding mapping a clique to the Chimera lattice. Examples: The first example finds an embedding for a :math:`K_4` complete graph in a single Chimera unit cell. The second for an alphanumerically labeled :math:`K_3` graph in 4 unit cells. >>> from dwave.embedding.chimera import find_clique_embedding ... >>> embedding = find_clique_embedding(4, 1, 1) >>> embedding # doctest: +SKIP {0: [4, 0], 1: [5, 1], 2: [6, 2], 3: [7, 3]} >>> from dwave.embedding.chimera import find_clique_embedding ... >>> embedding = find_clique_embedding(['a', 'b', 'c'], m=2, n=2, t=4) >>> embedding # doctest: +SKIP {'a': [20, 16], 'b': [21, 17], 'c': [22, 18]}
[ "Find", "an", "embedding", "for", "a", "clique", "in", "a", "Chimera", "graph", "." ]
86a1698f15ccd8b0ece0ed868ee49292d3f67f5b
https://github.com/dwavesystems/dwave-system/blob/86a1698f15ccd8b0ece0ed868ee49292d3f67f5b/dwave/embedding/chimera.py#L27-L106
train