Code
stringlengths 103
85.9k
| Summary
listlengths 0
94
|
---|---|
Please provide a description of the function:def line_plot(df, x='year', y='value', ax=None, legend=None, title=True,
color=None, marker=None, linestyle=None, cmap=None,
fill_between=None, final_ranges=None,
rm_legend_label=[], **kwargs):
if ax is None:
fig, ax = plt.subplots()
# assign styling properties
props = assign_style_props(df, color=color, marker=marker,
linestyle=linestyle, cmap=cmap)
if fill_between and 'color' not in props:
raise ValueError('Must use `color` kwarg if using `fill_between`')
if final_ranges and 'color' not in props:
raise ValueError('Must use `color` kwarg if using `final_ranges`')
# reshape data for use in line_plot
df = reshape_line_plot(df, x, y) # long form to one column per line
# determine index of column name in reshaped dataframe
prop_idx = {}
for kind, var in [('color', color), ('marker', marker),
('linestyle', linestyle)]:
if var is not None and var in df.columns.names:
prop_idx[kind] = df.columns.names.index(var)
# plot data, keeping track of which legend labels to apply
no_label = [rm_legend_label] if isstr(rm_legend_label) else rm_legend_label
for col, data in df.iteritems():
pargs = {}
labels = []
# build plotting args and line legend labels
for key, kind, var in [('c', 'color', color),
('marker', 'marker', marker),
('linestyle', 'linestyle', linestyle)]:
if kind in props:
label = col[prop_idx[kind]]
pargs[key] = props[kind][label]
if kind not in no_label:
labels.append(repr(label).lstrip("u'").strip("'"))
else:
pargs[key] = var
kwargs.update(pargs)
data = data.dropna()
data.plot(ax=ax, **kwargs)
if labels:
ax.lines[-1].set_label(' '.join(labels))
if fill_between:
_kwargs = {'alpha': 0.25} if fill_between in [True, None] \
else fill_between
data = df.T
columns = data.columns
# get outer boundary mins and maxes
allmins = data.groupby(color).min()
intermins = (
data.dropna(axis=1).groupby(color).min() # nonan data
.reindex(columns=columns) # refill with nans
.T.interpolate(method='index').T # interpolate
)
mins = pd.concat([allmins, intermins]).min(level=0)
allmaxs = data.groupby(color).max()
intermaxs = (
data.dropna(axis=1).groupby(color).max() # nonan data
.reindex(columns=columns) # refill with nans
.T.interpolate(method='index').T # interpolate
)
maxs = pd.concat([allmaxs, intermaxs]).max(level=0)
# do the fill
for idx in mins.index:
ymin = mins.loc[idx]
ymax = maxs.loc[idx]
ax.fill_between(ymin.index, ymin, ymax,
facecolor=props['color'][idx], **_kwargs)
# add bars to the end of the plot showing range
if final_ranges:
# have to explicitly draw it to get the tick labels (these change once
# you add the vlines)
plt.gcf().canvas.draw()
_kwargs = {'linewidth': 2} if final_ranges in [True, None] \
else final_ranges
first = df.index[0]
final = df.index[-1]
mins = df.T.groupby(color).min()[final]
maxs = df.T.groupby(color).max()[final]
ymin, ymax = ax.get_ylim()
ydiff = ymax - ymin
xmin, xmax = ax.get_xlim()
xdiff = xmax - xmin
xticks = ax.get_xticks()
xlabels = ax.get_xticklabels()
# 1.5% increase seems to be ok per extra line
extra_space = 0.015
for i, idx in enumerate(mins.index):
xpos = final + xdiff * extra_space * (i + 1)
_ymin = (mins[idx] - ymin) / ydiff
_ymax = (maxs[idx] - ymin) / ydiff
ax.axvline(xpos, ymin=_ymin, ymax=_ymax,
color=props['color'][idx], **_kwargs)
# for equal spacing between xmin and first datapoint and xmax and last
# line
ax.set_xlim(xmin, xpos + first - xmin)
ax.set_xticks(xticks)
ax.set_xticklabels(xlabels)
# build unique legend handles and labels
handles, labels = ax.get_legend_handles_labels()
handles, labels = np.array(handles), np.array(labels)
_, idx = np.unique(labels, return_index=True)
handles, labels = handles[idx], labels[idx]
if legend is not False:
_add_legend(ax, handles, labels, legend)
# add default labels if possible
ax.set_xlabel(x.title())
units = df.columns.get_level_values('unit').unique()
units_for_ylabel = len(units) == 1 and x == 'year' and y == 'value'
ylabel = units[0] if units_for_ylabel else y.title()
ax.set_ylabel(ylabel)
# build a default title if possible
if title:
default_title = []
for var in ['model', 'scenario', 'region', 'variable']:
if var in df.columns.names:
values = df.columns.get_level_values(var).unique()
if len(values) == 1:
default_title.append('{}: {}'.format(var, values[0]))
title = ' '.join(default_title) if title is True else title
ax.set_title(title)
return ax, handles, labels | [
"Plot data as lines with or without markers.\n\n Parameters\n ----------\n df : pd.DataFrame\n Data to plot as a long-form data frame\n x : string, optional\n The column to use for x-axis values\n default: year\n y : string, optional\n The column to use for y-axis values\n default: value\n ax : matplotlib.Axes, optional\n legend : bool or dictionary, optional\n Add a legend. If a dictionary is provided, it will be used as keyword\n arguments in creating the legend.\n default: None (displays legend only if less than 13 entries)\n title : bool or string, optional\n Display a default or custom title.\n color : string, optional\n A valid matplotlib color or column name. If a column name, common\n values will be provided the same color.\n default: None\n marker : string, optional\n A valid matplotlib marker or column name. If a column name, common\n values will be provided the same marker.\n default: None\n linestyle : string, optional\n A valid matplotlib linestyle or column name. If a column name, common\n values will be provided the same linestyle.\n default: None\n cmap : string, optional\n A colormap to use.\n default: None\n fill_between : boolean or dict, optional\n Fill lines between minima/maxima of the 'color' argument. This can only\n be used if also providing a 'color' argument. If this is True, then\n default arguments will be provided to `ax.fill_between()`. If this is a\n dictionary, those arguments will be provided instead of defaults.\n default: None\n final_ranges : boolean or dict, optional\n Add vertical line between minima/maxima of the 'color' argument in the\n last period plotted. This can only be used if also providing a 'color'\n argument. If this is True, then default arguments will be provided to\n `ax.axvline()`. If this is a dictionary, those arguments will be\n provided instead of defaults.\n default: None\n rm_legend_label : string, list, optional\n Remove the color, marker, or linestyle label in the legend.\n default: []\n kwargs : Additional arguments to pass to the pd.DataFrame.plot() function\n "
]
|
Please provide a description of the function:def set_panel_label(label, ax=None, x=0.05, y=0.9):
def _lim_loc(lim, loc):
return lim[0] + (lim[1] - lim[0]) * loc
if ax is not None:
ax.text(_lim_loc(ax.get_xlim(), x), _lim_loc(ax.get_ylim(), y), label)
else:
plt.text(_lim_loc(plt.xlim(), x), _lim_loc(plt.ylim(), y), label) | [
"Add a panel label to the figure/axes, by default in the top-left corner\n\n Parameters\n ----------\n label : str\n text to be added as panel label\n ax : matplotlib.Axes, optional\n panel to which to add the panel label\n x : number, default 0.05\n relative location of label to x-axis\n y : number, default 0.9\n relative location of label to y-axis\n "
]
|
Please provide a description of the function:def logger():
global _LOGGER
if _LOGGER is None:
logging.basicConfig()
_LOGGER = logging.getLogger()
_LOGGER.setLevel('INFO')
return _LOGGER | [
"Access global logger"
]
|
Please provide a description of the function:def nodes(self):
if not hasattr(self, '_nodes'):
base_url = "{}/{}".format(NodeBalancerConfig.api_endpoint, NodeBalancerNode.derived_url_path)
result = self._client._get_objects(base_url, NodeBalancerNode, model=self, parent_id=(self.id, self.nodebalancer_id))
self._set('_nodes', result)
return self._nodes | [
"\n This is a special derived_class relationship because NodeBalancerNode is the\n only api object that requires two parent_ids\n "
]
|
Please provide a description of the function:def load_ssl_data(self, cert_file, key_file):
# we're disabling warnings here because these attributes are defined dynamically
# through linode.objects.Base, and pylint isn't privy
if os.path.isfile(os.path.expanduser(cert_file)):
with open(os.path.expanduser(cert_file)) as f:
self.ssl_cert = f.read() # pylint: disable=attribute-defined-outside-init
if os.path.isfile(os.path.expanduser(key_file)):
with open(os.path.expanduser(key_file)) as f:
self.ssl_key = f.read() | [
"\n A convenience method that loads a cert and a key from files and sets them\n on this object. This can make enabling ssl easier (instead of you needing\n to load the files yourself).\n\n This does *not* change protocol/port for you, or save anything. Once this\n is called, you must still call `save()` on this object for the changes to\n take effect.\n\n :param cert_file: A path to the file containing the public certificate\n :type cert_file: str\n :param key_file: A path to the file containing the unpassphrased private key\n :type key_file: str\n "
]
|
Please provide a description of the function:def load_and_validate_keys(authorized_keys):
if not authorized_keys:
return None
if not isinstance(authorized_keys, list):
authorized_keys = [authorized_keys]
ret = []
for k in authorized_keys:
accepted_types = ('ssh-dss', 'ssh-rsa', 'ecdsa-sha2-nistp', 'ssh-ed25519')
if any([ t for t in accepted_types if k.startswith(t) ]):
# this looks like a key, cool
ret.append(k)
else:
# it doesn't appear to be a key.. is it a path to the key?
k = os.path.expanduser(k)
if os.path.isfile(k):
with open(k) as f:
ret.append(f.read().rstrip())
else:
raise ValueError("authorized_keys must either be paths "
"to the key files or a list of raw "
"public key of one of these types: {}".format(accepted_types))
return ret | [
"\n Loads authorized_keys as taken by :any:`instance_create`,\n :any:`disk_create` or :any:`rebuild`, and loads in any keys from any files\n provided.\n\n :param authorized_keys: A list of keys or paths to keys, or a single key\n\n :returns: A list of raw keys\n :raises: ValueError if keys in authorized_keys don't appear to be a raw\n key and can't be opened.\n "
]
|
Please provide a description of the function:def attach(self, to_linode, config=None):
result = self._client.post('{}/attach'.format(Volume.api_endpoint), model=self,
data={
"linode_id": to_linode.id if issubclass(type(to_linode), Base) else to_linode,
"config": None if not config else config.id if issubclass(type(config), Base) else config,
})
if not 'id' in result:
raise UnexpectedResponseError('Unexpected response when attaching volume!', json=result)
self._populate(result)
return True | [
"\n Attaches this Volume to the given Linode\n "
]
|
Please provide a description of the function:def detach(self):
self._client.post('{}/detach'.format(Volume.api_endpoint), model=self)
return True | [
"\n Detaches this Volume if it is attached\n "
]
|
Please provide a description of the function:def resize(self, size):
result = self._client.post('{}/resize'.format(Volume.api_endpoint, model=self,
data={ "size": size }))
self._populate(result.json)
return True | [
"\n Resizes this Volume\n "
]
|
Please provide a description of the function:def clone(self, label):
result = self._client.post('{}/clone'.format(Volume.api_endpoint),
model=self, data={'label': label})
if not 'id' in result:
raise UnexpectedResponseError('Unexpected response cloning volume!')
return Volume(self._client, result['id'], result) | [
"\n Clones this volume to a new volume in the same region with the given label\n\n :param label: The label for the new volume.\n\n :returns: The new volume object.\n "
]
|
Please provide a description of the function:def _get_raw_objects(self):
if not hasattr(self, '_raw_objects'):
result = self._client.get(type(self).api_endpoint, model=self)
# I want to cache this to avoid making duplicate requests, but I don't
# want it in the __init__
self._raw_objects = result # pylint: disable=attribute-defined-outside-init
return self._raw_objects | [
"\n Helper function to populate the first page of raw objects for this tag.\n This has the side effect of creating the ``_raw_objects`` attribute of\n this object.\n "
]
|
Please provide a description of the function:def objects(self):
data = self._get_raw_objects()
return PaginatedList.make_paginated_list(data, self._client, TaggedObjectProxy,
page_url=type(self).api_endpoint.format(**vars(self))) | [
"\n Returns a list of objects with this Tag. This list may contain any\n taggable object type.\n "
]
|
Please provide a description of the function:def make_instance(cls, id, client, parent_id=None, json=None):
make_cls = CLASS_MAP.get(id) # in this case, ID is coming in as the type
if make_cls is None:
# we don't recognize this entity type - do nothing?
return None
# discard the envelope
real_json = json['data']
real_id = real_json['id']
# make the real object type
return Base.make(real_id, client, make_cls, parent_id=None, json=real_json) | [
"\n Overrides Base's ``make_instance`` to allow dynamic creation of objects\n based on the defined type in the response json.\n\n :param cls: The class this was called on\n :param id: The id of the instance to create\n :param client: The client to use for this instance\n :param parent_id: The parent id for derived classes\n :param json: The JSON to populate the instance with\n\n :returns: A new instance of this type, populated with json\n "
]
|
Please provide a description of the function:def resize(self, new_size):
self._client.post('{}/resize'.format(Disk.api_endpoint), model=self, data={"size": new_size})
return True | [
"\n Resizes this disk. The Linode Instance this disk belongs to must have\n sufficient space available to accommodate the new size, and must be\n offline.\n\n **NOTE** If resizing a disk down, the filesystem on the disk must still\n fit on the new disk size. You may need to resize the filesystem on the\n disk first before performing this action.\n\n :param new_size: The intended new size of the disk, in MB\n :type new_size: int\n\n :returns: True if the resize was initiated successfully.\n :rtype: bool\n "
]
|
Please provide a description of the function:def _populate(self, json):
from .volume import Volume
DerivedBase._populate(self, json)
devices = {}
for device_index, device in json['devices'].items():
if not device:
devices[device_index] = None
continue
dev = None
if 'disk_id' in device and device['disk_id']: # this is a disk
dev = Disk.make_instance(device['disk_id'], self._client,
parent_id=self.linode_id)
else:
dev = Volume.make_instance(device['volume_id'], self._client,
parent_id=self.linode_id)
devices[device_index] = dev
self._set('devices', MappedObject(**devices)) | [
"\n Map devices more nicely while populating.\n "
]
|
Please provide a description of the function:def ips(self):
if not hasattr(self, '_ips'):
result = self._client.get("{}/ips".format(Instance.api_endpoint), model=self)
if not "ipv4" in result:
raise UnexpectedResponseError('Unexpected response loading IPs', json=result)
v4pub = []
for c in result['ipv4']['public']:
i = IPAddress(self._client, c['address'], c)
v4pub.append(i)
v4pri = []
for c in result['ipv4']['private']:
i = IPAddress(self._client, c['address'], c)
v4pri.append(i)
shared_ips = []
for c in result['ipv4']['shared']:
i = IPAddress(self._client, c['address'], c)
shared_ips.append(i)
slaac = IPAddress(self._client, result['ipv6']['slaac']['address'],
result['ipv6']['slaac'])
link_local = IPAddress(self._client, result['ipv6']['link_local']['address'],
result['ipv6']['link_local'])
pools = []
for p in result['ipv6']['global']:
pools.append(IPv6Pool(self._client, p['range']))
ips = MappedObject(**{
"ipv4": {
"public": v4pub,
"private": v4pri,
"shared": shared_ips,
},
"ipv6": {
"slaac": slaac,
"link_local": link_local,
"pools": pools,
},
})
self._set('_ips', ips)
return self._ips | [
"\n The ips related collection is not normalized like the others, so we have to\n make an ad-hoc object to return for its response\n "
]
|
Please provide a description of the function:def available_backups(self):
if not hasattr(self, '_avail_backups'):
result = self._client.get("{}/backups".format(Instance.api_endpoint), model=self)
if not 'automatic' in result:
raise UnexpectedResponseError('Unexpected response loading available backups!', json=result)
automatic = []
for a in result['automatic']:
cur = Backup(self._client, a['id'], self.id, a)
automatic.append(cur)
snap = None
if result['snapshot']['current']:
snap = Backup(self._client, result['snapshot']['current']['id'], self.id,
result['snapshot']['current'])
psnap = None
if result['snapshot']['in_progress']:
psnap = Backup(self._client, result['snapshot']['in_progress']['id'], self.id,
result['snapshot']['in_progress'])
self._set('_avail_backups', MappedObject(**{
"automatic": automatic,
"snapshot": {
"current": snap,
"in_progress": psnap,
}
}))
return self._avail_backups | [
"\n The backups response contains what backups are available to be restored.\n "
]
|
Please provide a description of the function:def invalidate(self):
if hasattr(self, '_avail_backups'):
del self._avail_backups
if hasattr(self, '_ips'):
del self._ips
Base.invalidate(self) | [
" Clear out cached properties "
]
|
Please provide a description of the function:def config_create(self, kernel=None, label=None, devices=[], disks=[],
volumes=[], **kwargs):
from .volume import Volume
hypervisor_prefix = 'sd' if self.hypervisor == 'kvm' else 'xvd'
device_names = [hypervisor_prefix + string.ascii_lowercase[i] for i in range(0, 8)]
device_map = {device_names[i]: None for i in range(0, len(device_names))}
if devices and (disks or volumes):
raise ValueError('You may not call config_create with "devices" and '
'either of "disks" or "volumes" specified!')
if not devices:
if not isinstance(disks, list):
disks = [disks]
if not isinstance(volumes, list):
volumes = [volumes]
devices = []
for d in disks:
if d is None:
devices.append(None)
elif isinstance(d, Disk):
devices.append(d)
else:
devices.append(Disk(self._client, int(d), self.id))
for v in volumes:
if v is None:
devices.append(None)
elif isinstance(v, Volume):
devices.append(v)
else:
devices.append(Volume(self._client, int(v)))
if not devices:
raise ValueError('Must include at least one disk or volume!')
for i, d in enumerate(devices):
if d is None:
pass
elif isinstance(d, Disk):
device_map[device_names[i]] = {'disk_id': d.id }
elif isinstance(d, Volume):
device_map[device_names[i]] = {'volume_id': d.id }
else:
raise TypeError('Disk or Volume expected!')
params = {
'kernel': kernel.id if issubclass(type(kernel), Base) else kernel,
'label': label if label else "{}_config_{}".format(self.label, len(self.configs)),
'devices': device_map,
}
params.update(kwargs)
result = self._client.post("{}/configs".format(Instance.api_endpoint), model=self, data=params)
self.invalidate()
if not 'id' in result:
raise UnexpectedResponseError('Unexpected response creating config!', json=result)
c = Config(self._client, result['id'], self.id, result)
return c | [
"\n Creates a Linode Config with the given attributes.\n\n :param kernel: The kernel to boot with.\n :param label: The config label\n :param disks: The list of disks, starting at sda, to map to this config.\n :param volumes: The volumes, starting after the last disk, to map to this\n config\n :param devices: A list of devices to assign to this config, in device\n index order. Values must be of type Disk or Volume. If this is\n given, you may not include disks or volumes.\n :param **kwargs: Any other arguments accepted by the api.\n\n :returns: A new Linode Config\n "
]
|
Please provide a description of the function:def enable_backups(self):
self._client.post("{}/backups/enable".format(Instance.api_endpoint), model=self)
self.invalidate()
return True | [
"\n Enable Backups for this Instance. When enabled, we will automatically\n backup your Instance's data so that it can be restored at a later date.\n For more information on Instance's Backups service and pricing, see our\n `Backups Page`_\n\n .. _Backups Page: https://www.linode.com/backups\n "
]
|
Please provide a description of the function:def ip_allocate(self, public=False):
result = self._client.post(
"{}/ips".format(Instance.api_endpoint),
model=self,
data={
"type": "ipv4",
"public": public,
})
if not 'address' in result:
raise UnexpectedResponseError('Unexpected response allocating IP!',
json=result)
i = IPAddress(self._client, result['address'], result)
return i | [
"\n Allocates a new :any:`IPAddress` for this Instance. Additional public\n IPs require justification, and you may need to open a :any:`SupportTicket`\n before you can add one. You may only have, at most, one private IP per\n Instance.\n\n :param public: If the new IP should be public or private. Defaults to\n private.\n :type public: bool\n\n :returns: The new IPAddress\n :rtype: IPAddress\n "
]
|
Please provide a description of the function:def rebuild(self, image, root_pass=None, authorized_keys=None, **kwargs):
ret_pass = None
if not root_pass:
ret_pass = Instance.generate_root_password()
root_pass = ret_pass
authorized_keys = load_and_validate_keys(authorized_keys)
params = {
'image': image.id if issubclass(type(image), Base) else image,
'root_pass': root_pass,
'authorized_keys': authorized_keys,
}
params.update(kwargs)
result = self._client.post('{}/rebuild'.format(Instance.api_endpoint), model=self, data=params)
if not 'id' in result:
raise UnexpectedResponseError('Unexpected response issuing rebuild!', json=result)
# update ourself with the newly-returned information
self._populate(result)
if not ret_pass:
return True
else:
return ret_pass | [
"\n Rebuilding an Instance deletes all existing Disks and Configs and deploys\n a new :any:`Image` to it. This can be used to reset an existing\n Instance or to install an Image on an empty Instance.\n\n :param image: The Image to deploy to this Instance\n :type image: str or Image\n :param root_pass: The root password for the newly rebuilt Instance. If\n omitted, a password will be generated and returned.\n :type root_pass: str\n :param authorized_keys: The ssh public keys to install in the linode's\n /root/.ssh/authorized_keys file. Each entry may\n be a single key, or a path to a file containing\n the key.\n :type authorized_keys: list or str\n\n :returns: The newly generated password, if one was not provided\n (otherwise True)\n :rtype: str or bool\n "
]
|
Please provide a description of the function:def mutate(self):
self._client.post('{}/mutate'.format(Instance.api_endpoint), model=self)
return True | [
"\n Upgrades this Instance to the latest generation type\n "
]
|
Please provide a description of the function:def initiate_migration(self):
self._client.post('{}/migrate'.format(Instance.api_endpoint), model=self) | [
"\n Initiates a pending migration that is already scheduled for this Linode\n Instance\n "
]
|
Please provide a description of the function:def clone(self, to_linode=None, region=None, service=None, configs=[], disks=[],
label=None, group=None, with_backups=None):
if to_linode and region:
raise ValueError('You may only specify one of "to_linode" and "region"')
if region and not service:
raise ValueError('Specifying a region requires a "service" as well')
if not isinstance(configs, list) and not isinstance(configs, PaginatedList):
configs = [configs]
if not isinstance(disks, list) and not isinstance(disks, PaginatedList):
disks = [disks]
cids = [ c.id if issubclass(type(c), Base) else c for c in configs ]
dids = [ d.id if issubclass(type(d), Base) else d for d in disks ]
params = {
"linode_id": to_linode.id if issubclass(type(to_linode), Base) else to_linode,
"region": region.id if issubclass(type(region), Base) else region,
"type": service.id if issubclass(type(service), Base) else service,
"configs": cids if cids else None,
"disks": dids if dids else None,
"label": label,
"group": group,
"with_backups": with_backups,
}
result = self._client.post('{}/clone'.format(Instance.api_endpoint), model=self, data=params)
if not 'id' in result:
raise UnexpectedResponseError('Unexpected response cloning Instance!', json=result)
l = Instance(self._client, result['id'], result)
return l | [
" Clones this linode into a new linode or into a new linode in the given region "
]
|
Please provide a description of the function:def stats(self):
# TODO - this would be nicer if we formatted the stats
return self._client.get('{}/stats'.format(Instance.api_endpoint), model=self) | [
"\n Returns the JSON stats for this Instance\n "
]
|
Please provide a description of the function:def stats_for(self, dt):
# TODO - this would be nicer if we formatted the stats
if not isinstance(dt, datetime):
raise TypeError('stats_for requires a datetime object!')
return self._client.get('{}/stats/'.format(dt.strftime('%Y/%m'))) | [
"\n Returns stats for the month containing the given datetime\n "
]
|
Please provide a description of the function:def _populate(self, json):
Base._populate(self, json)
mapped_udfs = []
for udf in self.user_defined_fields:
t = UserDefinedFieldType.text
choices = None
if hasattr(udf, 'oneof'):
t = UserDefinedFieldType.select_one
choices = udf.oneof.split(',')
elif hasattr(udf, 'manyof'):
t = UserDefinedFieldType.select_many
choices = udf.manyof.split(',')
mapped_udfs.append(UserDefinedField(udf.name,
udf.label if hasattr(udf, 'label') else None,
udf.example if hasattr(udf, 'example') else None,
t, choices=choices))
self._set('user_defined_fields', mapped_udfs)
ndist = [ Image(self._client, d) for d in self.images ]
self._set('images', ndist) | [
"\n Override the populate method to map user_defined_fields to\n fancy values\n "
]
|
Please provide a description of the function:def _populate(self, json):
super(InvoiceItem, self)._populate(json)
self.from_date = datetime.strptime(json['from'], DATE_FORMAT)
self.to_date = datetime.strptime(json['to'], DATE_FORMAT) | [
"\n Allows population of \"from_date\" from the returned \"from\" attribute which\n is a reserved word in python. Also populates \"to_date\" to be complete.\n "
]
|
Please provide a description of the function:def reset_secret(self):
result = self._client.post("{}/reset_secret".format(OAuthClient.api_endpoint), model=self)
if not 'id' in result:
raise UnexpectedResponseError('Unexpected response when resetting secret!', json=result)
self._populate(result)
return self.secret | [
"\n Resets the client secret for this client.\n "
]
|
Please provide a description of the function:def thumbnail(self, dump_to=None):
headers = {
"Authorization": "token {}".format(self._client.token)
}
result = requests.get('{}/{}/thumbnail'.format(self._client.base_url,
OAuthClient.api_endpoint.format(id=self.id)),
headers=headers)
if not result.status_code == 200:
raise ApiError('No thumbnail found for OAuthClient {}'.format(self.id))
if dump_to:
with open(dump_to, 'wb+') as f:
f.write(result.content)
return result.content | [
"\n This returns binary data that represents a 128x128 image.\n If dump_to is given, attempts to write the image to a file\n at the given location.\n "
]
|
Please provide a description of the function:def set_thumbnail(self, thumbnail):
headers = {
"Authorization": "token {}".format(self._client.token),
"Content-type": "image/png",
}
# TODO this check needs to be smarter - python2 doesn't do it right
if not isinstance(thumbnail, bytes):
with open(thumbnail, 'rb') as f:
thumbnail = f.read()
result = requests.put('{}/{}/thumbnail'.format(self._client.base_url,
OAuthClient.api_endpoint.format(id=self.id)),
headers=headers, data=thumbnail)
if not result.status_code == 200:
errors = []
j = result.json()
if 'errors' in j:
errors = [ e['reason'] for e in j['errors'] ]
raise ApiError('{}: {}'.format(result.status_code, errors), json=j)
return True | [
"\n Sets the thumbnail for this OAuth Client. If thumbnail is bytes,\n uploads it as a png. Otherwise, assumes thumbnail is a path to the\n thumbnail and reads it in as bytes before uploading.\n "
]
|
Please provide a description of the function:def grants(self):
from linode_api4.objects.account import UserGrants
if not hasattr(self, '_grants'):
resp = self._client.get(UserGrants.api_endpoint.format(username=self.username))
grants = UserGrants(self._client, self.username, resp)
self._set('_grants', grants)
return self._grants | [
"\n Retrieves the grants for this user. If the user is unrestricted, this\n will result in an ApiError. This is smart, and will only fetch from the\n api once unless the object is invalidated.\n\n :returns: The grants for this user.\n :rtype: linode.objects.account.UserGrants\n "
]
|
Please provide a description of the function:def entity(self):
# there are no grants for derived types, so this shouldn't happen
if not issubclass(self.cls, Base) or issubclass(self.cls, DerivedBase):
raise ValueError("Cannot get entity for non-base-class {}".format(self.cls))
return self.cls(self._client, self.id) | [
"\n Returns the object this grant is for. The objects type depends on the\n type of object this grant is applied to, and the object returned is\n not populated (accessing its attributes will trigger an api request).\n\n :returns: This grant's entity\n :rtype: Linode, NodeBalancer, Domain, StackScript, Volume, or Longview\n "
]
|
Please provide a description of the function:def make_list(json_arr, client, cls, parent_id=None):
result = []
for obj in json_arr:
id_val = None
if 'id' in obj:
id_val = obj['id']
elif hasattr(cls, 'id_attribute') and getattr(cls, 'id_attribute') in obj:
id_val = obj[getattr(cls, 'id_attribute')]
else:
continue
o = cls.make_instance(id_val, client, parent_id=parent_id, json=obj)
result.append(o)
return result | [
"\n Returns a list of Populated objects of the given class type. This\n should not be called outside of the :any:`LinodeClient` class.\n\n :param json_arr: The array of JSON data to make into a list\n :param client: The LinodeClient to pass to new objects\n :param parent_id: The parent id for derived objects\n\n :returns: A list of models from the JSON\n "
]
|
Please provide a description of the function:def make_paginated_list(json, client, cls, parent_id=None, page_url=None,
filters=None):
l = PaginatedList.make_list(json["data"], client, cls, parent_id=parent_id)
p = PaginatedList(client, page_url, page=l, max_pages=json['pages'],
total_items=json['results'], parent_id=parent_id, filters=filters)
return p | [
"\n Returns a PaginatedList populated with the first page of data provided,\n and the ability to load additional pages. This should not be called\n outside of the :any:`LinodeClient` class.\n\n :param json: The JSON list to use as the first page\n :param client: A LinodeClient to use to load additional pages\n :param parent_id: The parent ID for derived objects\n :param page_url: The URL to use when loading more pages\n :param cls: The class to instantiate for objects\n :param filters: The filters used when making the call that generated\n this list. If not provided, this will fail when\n loading additional pages.\n\n :returns: An instance of PaginatedList that will represent the entire\n collection whose first page is json\n "
]
|
Please provide a description of the function:def save(self):
resp = self._client.put(type(self).api_endpoint, model=self,
data=self._serialize())
if 'error' in resp:
return False
return True | [
"\n Send this object's mutable values to the server in a PUT request\n "
]
|
Please provide a description of the function:def delete(self):
resp = self._client.delete(type(self).api_endpoint, model=self)
if 'error' in resp:
return False
self.invalidate()
return True | [
"\n Sends a DELETE request for this object\n "
]
|
Please provide a description of the function:def invalidate(self):
for key in [k for k in type(self).properties.keys()
if not type(self).properties[k].identifier]:
self._set(key, None)
self._set('_populated', False) | [
"\n Invalidates all non-identifier Properties this object has locally,\n causing the next access to re-fetch them from the server\n "
]
|
Please provide a description of the function:def _serialize(self):
result = { a: getattr(self, a) for a in type(self).properties
if type(self).properties[a].mutable }
for k, v in result.items():
if isinstance(v, Base):
result[k] = v.id
return result | [
"\n A helper method to build a dict of all mutable Properties of\n this object\n "
]
|
Please provide a description of the function:def _api_get(self):
json = self._client.get(type(self).api_endpoint, model=self)
self._populate(json) | [
"\n A helper method to GET this object from the server\n "
]
|
Please provide a description of the function:def _populate(self, json):
if not json:
return
# hide the raw JSON away in case someone needs it
self._set('_raw_json', json)
for key in json:
if key in (k for k in type(self).properties.keys()
if not type(self).properties[k].identifier):
if type(self).properties[key].relationship \
and not json[key] is None:
if isinstance(json[key], list):
objs = []
for d in json[key]:
if not 'id' in d:
continue
new_class = type(self).properties[key].relationship
obj = new_class.make_instance(d['id'],
getattr(self,'_client'))
if obj:
obj._populate(d)
objs.append(obj)
self._set(key, objs)
else:
if isinstance(json[key], dict):
related_id = json[key]['id']
else:
related_id = json[key]
new_class = type(self).properties[key].relationship
obj = new_class.make_instance(related_id, getattr(self,'_client'))
if obj and isinstance(json[key], dict):
obj._populate(json[key])
self._set(key, obj)
elif type(self).properties[key].slug_relationship \
and not json[key] is None:
# create an object of the expected type with the given slug
self._set(key, type(self).properties[key].slug_relationship(self._client, json[key]))
elif type(json[key]) is dict:
self._set(key, MappedObject(**json[key]))
elif type(json[key]) is list:
# we're going to use MappedObject's behavior with lists to
# expand these, then grab the resulting value to set
mapping = MappedObject(_list=json[key])
self._set(key, mapping._list) # pylint: disable=no-member
elif type(self).properties[key].is_datetime:
try:
t = time.strptime(json[key], DATE_FORMAT)
self._set(key, datetime.fromtimestamp(time.mktime(t)))
except:
#TODO - handle this better (or log it?)
self._set(key, json[key])
else:
self._set(key, json[key])
self._set('_populated', True)
self._set('_last_updated', datetime.now()) | [
"\n A helper method that, given a JSON object representing this object,\n assigns values based on the properties dict and the attributes of\n its Properties.\n "
]
|
Please provide a description of the function:def make(id, client, cls, parent_id=None, json=None):
from .dbase import DerivedBase
if issubclass(cls, DerivedBase):
return cls(client, id, parent_id, json)
else:
return cls(client, id, json) | [
"\n Makes an api object based on an id and class.\n\n :param id: The id of the object to create\n :param client: The LinodeClient to give the new object\n :param cls: The class type to instantiate\n :param parent_id: The parent id for derived classes\n :param json: The JSON to use to populate the new class\n\n :returns: An instance of cls with the given id\n "
]
|
Please provide a description of the function:def make_instance(cls, id, client, parent_id=None, json=None):
return Base.make(id, client, cls, parent_id=parent_id, json=json) | [
"\n Makes an instance of the class this is called on and returns it.\n\n The intended usage is:\n instance = Linode.make_instance(123, client, json=response)\n\n :param cls: The class this was called on.\n :param id: The id of the instance to create\n :param client: The client to use for this instance\n :param parent_id: The parent id for derived classes\n :param json: The JSON to populate the instance with\n\n :returns: A new instance of this type, populated with json\n "
]
|
Please provide a description of the function:def or_(a, b):
if not isinstance(a, Filter) or not isinstance(b, Filter):
raise TypeError
return a.__or__(b) | [
"\n Combines two :any:`Filters<Filter>` with an \"or\" operation, matching\n any results that match any of the given filters.\n\n :param a: The first filter to consider.\n :type a: Filter\n :param b: The second filter to consider.\n :type b: Filter\n\n :returns: A filter that matches either a or\u0012 b\n :rtype: Filter\n "
]
|
Please provide a description of the function:def to(self, linode):
from .linode import Instance
if not isinstance(linode, Instance):
raise ValueError("IP Address can only be assigned to a Linode!")
return { "address": self.address, "linode_id": linode.id } | [
"\n This is a helper method for ip-assign, and should not be used outside\n of that context. It's used to cleanly build an IP Assign request with\n pretty python syntax.\n "
]
|
Please provide a description of the function:def stackscripts(self, *filters, **kwargs):
# python2 can't handle *args and a single keyword argument, so this is a workaround
if 'mine_only' in kwargs:
if kwargs['mine_only']:
new_filter = Filter({"mine":True})
if filters:
filters = [ f for f in filters ]
filters[0] = filters[0] & new_filter
else:
filters = [new_filter]
del kwargs['mine_only']
if kwargs:
raise TypeError("stackscripts() got unexpected keyword argument '{}'".format(kwargs.popitem()[0]))
return self.client._get_and_filter(StackScript, *filters) | [
"\n Returns a list of :any:`StackScripts<StackScript>`, both public and\n private. You may filter this query to return only\n :any:`StackScripts<StackScript>` that match certain criteria. You may\n also request only your own private :any:`StackScripts<StackScript>`::\n\n my_stackscripts = client.linode.stackscripts(mine_only=True)\n\n :param filters: Any number of filters to apply to this query.\n :param mine_only: If True, returns only private StackScripts\n :type mine_only: bool\n\n :returns: A list of StackScripts matching the query.\n :rtype: PaginatedList of StackScript\n "
]
|
Please provide a description of the function:def instance_create(self, ltype, region, image=None,
authorized_keys=None, **kwargs):
ret_pass = None
if image and not 'root_pass' in kwargs:
ret_pass = Instance.generate_root_password()
kwargs['root_pass'] = ret_pass
authorized_keys = load_and_validate_keys(authorized_keys)
if "stackscript" in kwargs:
# translate stackscripts
kwargs["stackscript_id"] = (kwargs["stackscript"].id if issubclass(type(kwargs["stackscript"]), Base)
else kwargs["stackscript"])
del kwargs["stackscript"]
if "backup" in kwargs:
# translate backups
kwargs["backup_id"] = (kwargs["backup"].id if issubclass(type(kwargs["backup"]), Base)
else kwargs["backup"])
del kwargs["backup"]
params = {
'type': ltype.id if issubclass(type(ltype), Base) else ltype,
'region': region.id if issubclass(type(region), Base) else region,
'image': (image.id if issubclass(type(image), Base) else image) if image else None,
'authorized_keys': authorized_keys,
}
params.update(kwargs)
result = self.client.post('/linode/instances', data=params)
if not 'id' in result:
raise UnexpectedResponseError('Unexpected response when creating linode!', json=result)
l = Instance(self.client, result['id'], result)
if not ret_pass:
return l
return l, ret_pass | [
"\n Creates a new Linode Instance. This function has several modes of operation:\n\n **Create an Instance from an Image**\n\n To create an Instance from an :any:`Image`, call `instance_create` with\n a :any:`Type`, a :any:`Region`, and an :any:`Image`. All three of\n these fields may be provided as either the ID or the appropriate object.\n In this mode, a root password will be generated and returned with the\n new Instance object. For example::\n\n new_linode, password = client.linode.instance_create(\n \"g6-standard-2\",\n \"us-east\",\n image=\"linode/debian9\")\n\n ltype = client.linode.types().first()\n region = client.regions().first()\n image = client.images().first()\n\n another_linode, password = client.linode.instance_create(\n ltype,\n region,\n image=image)\n\n **Create an Instance from StackScript**\n\n When creating an Instance from a :any:`StackScript`, an :any:`Image` that\n the StackScript support must be provided.. You must also provide any\n required StackScript data for the script's User Defined Fields.. For\n example, if deploying `StackScript 10079`_ (which deploys a new Instance\n with a user created from keys on `github`_::\n\n stackscript = StackScript(client, 10079)\n\n new_linode, password = client.linode.instance_create(\n \"g6-standard-2\",\n \"us-east\",\n image=\"linode/debian9\",\n stackscript=stackscript,\n stackscript_data={\"gh_username\": \"example\"})\n\n In the above example, \"gh_username\" is the name of a User Defined Field\n in the chosen StackScript. For more information on StackScripts, see\n the `StackScript guide`_.\n\n .. _`StackScript 10079`: https://www.linode.com/stackscripts/view/10079\n .. _`github`: https://github.com\n .. _`StackScript guide`: https://www.linode.com/docs/platform/stackscripts/\n\n **Create an Instance from a Backup**\n\n To create a new Instance by restoring a :any:`Backup` to it, provide a\n :any:`Type`, a :any:`Region`, and the :any:`Backup` to restore. You\n may provide either IDs or objects for all of these fields::\n\n existing_linode = Instance(client, 123)\n snapshot = existing_linode.available_backups.snapshot.current\n\n new_linode = client.linode.instance_create(\n \"g6-standard-2\",\n \"us-east\",\n backup=snapshot)\n\n **Create an empty Instance**\n\n If you want to create an empty Instance that you will configure manually,\n simply call `instance_create` with a :any:`Type` and a :any:`Region`::\n\n empty_linode = client.linode.instance_create(\"g6-standard-2\", \"us-east\")\n\n When created this way, the Instance will not be booted and cannot boot\n successfully until disks and configs are created, or it is otherwise\n configured.\n\n :param ltype: The Instance Type we are creating\n :type ltype: str or Type\n :param region: The Region in which we are creating the Instance\n :type region: str or Region\n :param image: The Image to deploy to this Instance. If this is provided\n and no root_pass is given, a password will be generated\n and returned along with the new Instance.\n :type image: str or Image\n :param stackscript: The StackScript to deploy to the new Instance. If\n provided, \"image\" is required and must be compatible\n with the chosen StackScript.\n :type stackscript: int or StackScript\n :param stackscript_data: Values for the User Defined Fields defined in\n the chosen StackScript. Does nothing if\n StackScript is not provided.\n :type stackscript_data: dict\n :param backup: The Backup to restore to the new Instance. May not be\n provided if \"image\" is given.\n :type backup: int of Backup\n :param authorized_keys: The ssh public keys to install in the linode's\n /root/.ssh/authorized_keys file. Each entry may\n be a single key, or a path to a file containing\n the key.\n :type authorized_keys: list or str\n :param label: The display label for the new Instance\n :type label: str\n :param group: The display group for the new Instance\n :type group: str\n :param booted: Whether the new Instance should be booted. This will\n default to True if the Instance is deployed from an Image\n or Backup.\n :type booted: bool\n\n :returns: A new Instance object, or a tuple containing the new Instance and\n the generated password.\n :rtype: Instance or tuple(Instance, str)\n :raises ApiError: If contacting the API fails\n :raises UnexpectedResponseError: If the API resposne is somehow malformed.\n This usually indicates that you are using\n an outdated library.\n "
]
|
Please provide a description of the function:def stackscript_create(self, label, script, images, desc=None, public=False, **kwargs):
image_list = None
if type(images) is list or type(images) is PaginatedList:
image_list = [d.id if issubclass(type(d), Base) else d for d in images ]
elif type(images) is Image:
image_list = [images.id]
elif type(images) is str:
image_list = [images]
else:
raise ValueError('images must be a list of Images or a single Image')
script_body = script
if not script.startswith("#!"):
# it doesn't look like a stackscript body, let's see if it's a file
import os
if os.path.isfile(script):
with open(script) as f:
script_body = f.read()
else:
raise ValueError("script must be the script text or a path to a file")
params = {
"label": label,
"images": image_list,
"is_public": public,
"script": script_body,
"description": desc if desc else '',
}
params.update(kwargs)
result = self.client.post('/linode/stackscripts', data=params)
if not 'id' in result:
raise UnexpectedResponseError('Unexpected response when creating StackScript!', json=result)
s = StackScript(self.client, result['id'], result)
return s | [
"\n Creates a new :any:`StackScript` on your account.\n\n :param label: The label for this StackScript.\n :type label: str\n :param script: The script to run when an :any:`Instance` is deployed with\n this StackScript. Must begin with a shebang (#!).\n :type script: str\n :param images: A list of :any:`Images<Image>` that this StackScript\n supports. Instances will not be deployed from this\n StackScript unless deployed from one of these Images.\n :type images: list of Image\n :param desc: A description for this StackScript.\n :type desc: str\n :param public: Whether this StackScript is public. Defaults to False.\n Once a StackScript is made public, it may not be set\n back to private.\n :type public: bool\n\n :returns: The new StackScript\n :rtype: StackScript\n "
]
|
Please provide a description of the function:def token_create(self, label=None, expiry=None, scopes=None, **kwargs):
if label:
kwargs['label'] = label
if expiry:
if isinstance(expiry, datetime):
expiry = datetime.strftime(expiry, "%Y-%m-%dT%H:%M:%S")
kwargs['expiry'] = expiry
if scopes:
kwargs['scopes'] = scopes
result = self.client.post('/profile/tokens', data=kwargs)
if not 'id' in result:
raise UnexpectedResponseError('Unexpected response when creating Personal Access '
'Token!', json=result)
token = PersonalAccessToken(self.client, result['id'], result)
return token | [
"\n Creates and returns a new Personal Access Token\n "
]
|
Please provide a description of the function:def ssh_key_upload(self, key, label):
if not key.startswith(SSH_KEY_TYPES):
# this might be a file path - look for it
path = os.path.expanduser(key)
if os.path.isfile(path):
with open(path) as f:
key = f.read().strip()
if not key.startswith(SSH_KEY_TYPES):
raise ValueError('Invalid SSH Public Key')
params = {
'ssh_key': key,
'label': label,
}
result = self.client.post('/profile/sshkeys', data=params)
if not 'id' in result:
raise UnexpectedResponseError('Unexpected response when uploading SSH Key!',
json=result)
ssh_key = SSHKey(self.client, result['id'], result)
return ssh_key | [
"\n Uploads a new SSH Public Key to your profile This key can be used in\n later Linode deployments.\n\n :param key: The ssh key, or a path to the ssh key. If a path is provided,\n the file at the path must exist and be readable or an exception\n will be thrown.\n :type key: str\n :param label: The name to give this key. This is purely aesthetic.\n :type label: str\n\n :returns: The newly uploaded SSH Key\n :rtype: SSHKey\n :raises ValueError: If the key provided does not appear to be valid, and\n does not appear to be a path to a valid key.\n "
]
|
Please provide a description of the function:def client_create(self, label=None):
result = self.client.post('/longview/clients', data={
"label": label
})
if not 'id' in result:
raise UnexpectedResponseError('Unexpected response when creating Longivew '
'Client!', json=result)
c = LongviewClient(self.client, result['id'], result)
return c | [
"\n Creates a new LongviewClient, optionally with a given label.\n\n :param label: The label for the new client. If None, a default label based\n on the new client's ID will be used.\n\n :returns: A new LongviewClient\n\n :raises ApiError: If a non-200 status code is returned\n :raises UnexpectedResponseError: If the returned data from the api does\n not look as expected.\n "
]
|
Please provide a description of the function:def events_mark_seen(self, event):
last_seen = event if isinstance(event, int) else event.id
self.client.post('{}/seen'.format(Event.api_endpoint), model=Event(self.client, last_seen)) | [
"\n Marks event as the last event we have seen. If event is an int, it is treated\n as an event_id, otherwise it should be an event object whose id will be used.\n "
]
|
Please provide a description of the function:def settings(self):
result = self.client.get('/account/settings')
if not 'managed' in result:
raise UnexpectedResponseError('Unexpected response when getting account settings!',
json=result)
s = AccountSettings(self.client, result['managed'], result)
return s | [
"\n Resturns the account settings data for this acocunt. This is not a\n listing endpoint.\n "
]
|
Please provide a description of the function:def oauth_client_create(self, name, redirect_uri, **kwargs):
params = {
"label": name,
"redirect_uri": redirect_uri,
}
params.update(kwargs)
result = self.client.post('/account/oauth-clients', data=params)
if not 'id' in result:
raise UnexpectedResponseError('Unexpected response when creating OAuth Client!',
json=result)
c = OAuthClient(self.client, result['id'], result)
return c | [
"\n Make a new OAuth Client and return it\n "
]
|
Please provide a description of the function:def transfer(self):
result = self.client.get('/account/transfer')
if not 'used' in result:
raise UnexpectedResponseError('Unexpected response when getting Transfer Pool!')
return MappedObject(**result) | [
"\n Returns a MappedObject containing the account's transfer pool data\n "
]
|
Please provide a description of the function:def user_create(self, email, username, restricted=True):
params = {
"email": email,
"username": username,
"restricted": restricted,
}
result = self.client.post('/account/users', data=params)
if not 'email' and 'restricted' and 'username' in result:
raise UnexpectedResponseError('Unexpected response when creating user!', json=result)
u = User(self.client, result['username'], result)
return u | [
"\n Creates a new user on your account. If you create an unrestricted user,\n they will immediately be able to access everything on your account. If\n you create a restricted user, you must grant them access to parts of your\n account that you want to allow them to manage (see :any:`User.grants` for\n details).\n\n The new user will receive an email inviting them to set up their password.\n This must be completed before they can log in.\n\n :param email: The new user's email address. This is used to finish setting\n up their user account.\n :type email: str\n :param username: The new user's unique username. They will use this username\n to log in.\n :type username: str\n :param restricted: If True, the new user must be granted access to parts of\n the account before they can do anything. If False, the\n new user will immediately be able to manage the entire\n account. Defaults to True.\n :type restricted: True\n\n :returns The new User.\n :rtype: User\n "
]
|
Please provide a description of the function:def ips_assign(self, region, *assignments):
for a in assignments:
if not 'address' in a or not 'linode_id' in a:
raise ValueError("Invalid assignment: {}".format(a))
if isinstance(region, Region):
region = region.id
self.client.post('/networking/ipv4/assign', data={
"region": region,
"assignments": [ a for a in assignments ],
}) | [
"\n Redistributes :any:`IP Addressees<IPAddress>` within a single region.\n This function takes a :any:`Region` and a list of assignments to make,\n then requests that the assignments take place. If any :any:`Instance`\n ends up without a public IP, or with more than one private IP, all of\n the assignments will fail.\n\n Example usage::\n\n linode1 = Instance(client, 123)\n linode2 = Instance(client, 456)\n\n # swap IPs between linodes 1 and 2\n client.networking.assign_ips(linode1.region,\n linode1.ips.ipv4.public[0].to(linode2),\n linode2.ips.ipv4.public[0].to(linode1))\n\n :param region: The Region in which the assignments should take place.\n All Instances and IPAddresses involved in the assignment\n must be within this region.\n :type region: str or Region\n :param assignments: Any number of assignments to make. See\n :any:`IPAddress.to` for details on how to construct\n assignments.\n :type assignments: dct\n "
]
|
Please provide a description of the function:def ip_allocate(self, linode, public=True):
result = self.client.post('/networking/ipv4/', data={
"linode_id": linode.id if isinstance(linode, Base) else linode,
"type": "ipv4",
"public": public,
})
if not 'address' in result:
raise UnexpectedResponseError('Unexpected response when adding IPv4 address!',
json=result)
ip = IPAddress(self.client, result['address'], result)
return ip | [
"\n Allocates an IP to a Instance you own. Additional IPs must be requested\n by opening a support ticket first.\n\n :param linode: The Instance to allocate the new IP for.\n :type linode: Instance or int\n :param public: If True, allocate a public IP address. Defaults to True.\n :type public: bool\n\n :returns: The new IPAddress\n :rtype: IPAddress\n "
]
|
Please provide a description of the function:def shared_ips(self, linode, *ips):
if not isinstance(linode, Instance):
# make this an object
linode = Instance(self.client, linode)
params = []
for ip in ips:
if isinstance(ip, str):
params.append(ip)
elif isinstance(ip, IPAddress):
params.append(ip.address)
else:
params.append(str(ip)) # and hope that works
params = {
"ips": params
}
self.client.post('{}/networking/ipv4/share'.format(Instance.api_endpoint),
model=linode, data=params)
linode.invalidate() | [
"\n Shares the given list of :any:`IPAddresses<IPAddress>` with the provided\n :any:`Instance`. This will enable the provided Instance to bring up the\n shared IP Addresses even though it does not own them.\n\n :param linode: The Instance to share the IPAddresses with. This Instance\n will be able to bring up the given addresses.\n :type: linode: int or Instance\n :param ips: Any number of IPAddresses to share to the Instance.\n :type ips: str or IPAddress\n "
]
|
Please provide a description of the function:def load(self, target_type, target_id, target_parent_id=None):
result = target_type.make_instance(target_id, self, parent_id=target_parent_id)
result._api_get()
return result | [
"\n Constructs and immediately loads the object, circumventing the\n lazy-loading scheme by immediately making an API request. Does not\n load related objects.\n\n For example, if you wanted to load an :any:`Instance` object with ID 123,\n you could do this::\n\n loaded_linode = client.load(Instance, 123)\n\n Similarly, if you instead wanted to load a :any:`NodeBalancerConfig`,\n you could do so like this::\n\n loaded_nodebalancer_config = client.load(NodeBalancerConfig, 456, 432)\n\n :param target_type: The type of object to create.\n :type target_type: type\n :param target_id: The ID of the object to create.\n :type target_id: int or str\n :param target_parent_id: The parent ID of the object to create, if\n applicable.\n :type target_parent_id: int, str, or None\n\n :returns: The resulting object, fully loaded.\n :rtype: target_type\n :raise ApiError: if the requested object could not be loaded.\n "
]
|
Please provide a description of the function:def _api_call(self, endpoint, model=None, method=None, data=None, filters=None):
if not self.token:
raise RuntimeError("You do not have an API token!")
if not method:
raise ValueError("Method is required for API calls!")
if model:
endpoint = endpoint.format(**vars(model))
url = '{}{}'.format(self.base_url, endpoint)
headers = {
'Authorization': "Bearer {}".format(self.token),
'Content-Type': 'application/json',
'User-Agent': self._user_agent,
}
if filters:
headers['X-Filter'] = json.dumps(filters)
body = None
if data is not None:
body = json.dumps(data)
response = method(url, headers=headers, data=body)
warning = response.headers.get('Warning', None)
if warning:
logger.warning('Received warning from server: {}'.format(warning))
if 399 < response.status_code < 600:
j = None
error_msg = '{}: '.format(response.status_code)
try:
j = response.json()
if 'errors' in j.keys():
for e in j['errors']:
error_msg += '{}; '.format(e['reason']) \
if 'reason' in e.keys() else ''
except:
pass
raise ApiError(error_msg, status=response.status_code, json=j)
if response.status_code != 204:
j = response.json()
else:
j = None # handle no response body
return j | [
"\n Makes a call to the linode api. Data should only be given if the method is\n POST or PUT, and should be a dictionary\n "
]
|
Please provide a description of the function:def image_create(self, disk, label=None, description=None):
params = {
"disk_id": disk.id if issubclass(type(disk), Base) else disk,
}
if label is not None:
params["label"] = label
if description is not None:
params["description"] = description
result = self.post('/images', data=params)
if not 'id' in result:
raise UnexpectedResponseError('Unexpected response when creating an '
'Image from disk {}'.format(disk))
return Image(self, result['id'], result) | [
"\n Creates a new Image from a disk you own.\n\n :param disk: The Disk to imagize.\n :type disk: Disk or int\n :param label: The label for the resulting Image (defaults to the disk's\n label.\n :type label: str\n :param description: The description for the new Image.\n :type description: str\n\n :returns: The new Image.\n :rtype: Image\n "
]
|
Please provide a description of the function:def nodebalancer_create(self, region, **kwargs):
params = {
"region": region.id if isinstance(region, Base) else region,
}
params.update(kwargs)
result = self.post('/nodebalancers', data=params)
if not 'id' in result:
raise UnexpectedResponseError('Unexpected response when creating Nodebalaner!', json=result)
n = NodeBalancer(self, result['id'], result)
return n | [
"\n Creates a new NodeBalancer in the given Region.\n\n :param region: The Region in which to create the NodeBalancer.\n :type region: Region or str\n\n :returns: The new NodeBalancer\n :rtype: NodeBalancer\n "
]
|
Please provide a description of the function:def domain_create(self, domain, master=True, **kwargs):
params = {
'domain': domain,
'type': 'master' if master else 'slave',
}
params.update(kwargs)
result = self.post('/domains', data=params)
if not 'id' in result:
raise UnexpectedResponseError('Unexpected response when creating Domain!', json=result)
d = Domain(self, result['id'], result)
return d | [
"\n Registers a new Domain on the acting user's account. Make sure to point\n your registrar to Linode's nameservers so that Linode's DNS manager will\n correctly serve your domain.\n\n :param domain: The domain to register to Linode's DNS manager.\n :type domain: str\n :param master: Whether this is a master (defaults to true)\n :type master: bool\n\n :returns: The new Domain object.\n :rtype: Domain\n "
]
|
Please provide a description of the function:def tag_create(self, label, instances=None, domains=None, nodebalancers=None,
volumes=None, entities=[]):
linode_ids, nodebalancer_ids, domain_ids, volume_ids = [], [], [], []
# filter input into lists of ids
sorter = zip((linode_ids, nodebalancer_ids, domain_ids, volume_ids),
(instances, nodebalancers, domains, volumes))
for id_list, input_list in sorter:
# if we got something, we need to find its ID
if input_list is not None:
for cur in input_list:
if isinstance(cur, int):
id_list.append(cur)
else:
id_list.append(cur.id)
# filter entities into id lists too
type_map = {
Instance: linode_ids,
NodeBalancer: nodebalancer_ids,
Domain: domain_ids,
Volume: volume_ids,
}
for e in entities:
if type(e) in type_map:
type_map[type(e)].append(e.id)
else:
raise ValueError('Unsupported entity type {}'.format(type(e)))
# finally, omit all id lists that are empty
params = {
'label': label,
'linodes': linode_ids or None,
'nodebalancers': nodebalancer_ids or None,
'domains': domain_ids or None,
'volumes': volume_ids or None,
}
result = self.post('/tags', data=params)
if not 'label' in result:
raise UnexpectedResponseError('Unexpected response when creating Tag!', json=result)
t = Tag(self, result['label'], result)
return t | [
"\n Creates a new Tag and optionally applies it to the given entities.\n\n :param label: The label for the new Tag\n :type label: str\n :param entities: A list of objects to apply this Tag to upon creation.\n May only be taggable types (Linode Instances, Domains,\n NodeBalancers, or Volumes). These are applied *in addition\n to* any IDs specified with ``instances``, ``domains``,\n ``nodebalancers``, or ``volumes``, and is a convenience\n for sending multiple entity types without sorting them\n yourself.\n :type entities: list of Instance, Domain, NodeBalancer, and/or Volume\n :param instances: A list of Linode Instances to apply this Tag to upon\n creation\n :type instances: list of Instance or list of int\n :param domains: A list of Domains to apply this Tag to upon\n creation\n :type domains: list of Domain or list of int\n :param nodebalancers: A list of NodeBalancers to apply this Tag to upon\n creation\n :type nodebalancers: list of NodeBalancer or list of int\n :param volumes: A list of Volumes to apply this Tag to upon\n creation\n :type volumes: list of Volumes or list of int\n\n :returns: The new Tag\n :rtype: Tag\n "
]
|
Please provide a description of the function:def volume_create(self, label, region=None, linode=None, size=20, **kwargs):
if not (region or linode):
raise ValueError('region or linode required!')
params = {
"label": label,
"size": size,
"region": region.id if issubclass(type(region), Base) else region,
"linode_id": linode.id if issubclass(type(linode), Base) else linode,
}
params.update(kwargs)
result = self.post('/volumes', data=params)
if not 'id' in result:
raise UnexpectedResponseError('Unexpected response when creating volume!', json=result)
v = Volume(self, result['id'], result)
return v | [
"\n Creates a new Block Storage Volume, either in the given Region or\n attached to the given Instance.\n\n :param label: The label for the new Volume.\n :type label: str\n :param region: The Region to create this Volume in. Not required if\n `linode` is provided.\n :type region: Region or str\n :param linode: The Instance to attach this Volume to. If not given, the\n new Volume will not be attached to anything.\n :type linode: Instance or int\n :param size: The size, in GB, of the new Volume. Defaults to 20.\n :type size: int\n\n :returns: The new Volume.\n :rtype: Volume\n "
]
|
Please provide a description of the function:def generate_login_url(self, scopes=None, redirect_uri=None):
url = self.base_url + "/oauth/authorize"
split = list(urlparse(url))
params = {
"client_id": self.client_id,
"response_type": "code", # needed for all logins
}
if scopes:
params["scopes"] = OAuthScopes.serialize(scopes)
if redirect_uri:
params["redirect_uri"] = redirect_uri
split[4] = urlencode(params)
return urlunparse(split) | [
"\n Generates a url to send users so that they may authenticate to this\n application. This url is suitable for redirecting a user to. For\n example, in `Flask`_, a login route might be implemented like this::\n\n @app.route(\"/login\")\n def begin_oauth_login():\n login_client = LinodeLoginClient(client_id, client_secret)\n return redirect(login_client.generate_login_url())\n\n .. _Flask:: http://flask.pocoo.org\n\n :param scopes: The OAuth scopes to request for this login.\n :type scopes: list\n :param redirect_uri: The requested redirect uri. The login service\n enforces that this is under the registered redirect\n path.\n :type redirect_uri: str\n\n :returns: The uri to send users to for this login attempt.\n :rtype: str\n "
]
|
Please provide a description of the function:def finish_oauth(self, code):
r = requests.post(self._login_uri("/oauth/token"), data={
"code": code,
"client_id": self.client_id,
"client_secret": self.client_secret
})
if r.status_code != 200:
raise ApiError("OAuth token exchange failed", status=r.status_code, json=r.json())
token = r.json()["access_token"]
scopes = OAuthScopes.parse(r.json()["scopes"])
expiry = datetime.now() + timedelta(seconds=r.json()['expires_in'])
refresh_token = r.json()['refresh_token']
return token, scopes, expiry, refresh_token | [
"\n Given an OAuth Exchange Code, completes the OAuth exchange with the\n authentication server. This should be called once the user has already\n been directed to the login_uri, and has been sent back after successfully\n authenticating. For example, in `Flask`_, this might be implemented as\n a route like this::\n\n @app.route(\"/oauth-redirect\")\n def oauth_redirect():\n exchange_code = request.args.get(\"code\")\n login_client = LinodeLoginClient(client_id, client_secret)\n\n token, scopes = login_client.finish_oauth(exchange_code)\n\n # store the user's OAuth token in their session for later use\n # and mark that they are logged in.\n\n return redirect(\"/\")\n\n .. _Flask: http://flask.pocoo.org\n\n :param code: The OAuth Exchange Code returned from the authentication\n server in the query string.\n :type code: str\n\n :returns: The new OAuth token, and a list of scopes the token has, when\n the token expires, and a refresh token that can generate a new\n valid token when this one is expired.\n :rtype: tuple(str, list)\n\n :raise ApiError: If the OAuth exchange fails.\n "
]
|
Please provide a description of the function:def expire_token(self, token):
r = requests.post(self._login_uri("/oauth/token/expire"),
data={
"client_id": self.client_id,
"client_secret": self.client_secret,
"token": token,
})
if r.status_code != 200:
raise ApiError("Failed to expire token!", r)
return True | [
"\n Given a token, makes a request to the authentication server to expire\n it immediately. This is considered a responsible way to log out a\n user. If you simply remove the session your application has for the\n user without expiring their token, the user is not _really_ logged out.\n\n :param token: The OAuth token you wish to expire\n :type token: str\n\n :returns: If the expiration attempt succeeded.\n :rtype: bool\n\n :raises ApiError: If the expiration attempt failed.\n "
]
|
Please provide a description of the function:def grants(self):
from linode_api4.objects.account import UserGrants
resp = self._client.get('/profile/grants') # use special endpoint for restricted users
grants = None
if resp is not None:
# if resp is None, we're unrestricted and do not have grants
grants = UserGrants(self._client, self.username, resp)
return grants | [
"\n Returns grants for the current user\n "
]
|
Please provide a description of the function:def add_whitelist_entry(self, address, netmask, note=None):
result = self._client.post("{}/whitelist".format(Profile.api_endpoint),
data={
"address": address,
"netmask": netmask,
"note": note,
})
if not 'id' in result:
raise UnexpectedResponseError("Unexpected response creating whitelist entry!")
return WhitelistEntry(result['id'], self._client, json=result) | [
"\n Adds a new entry to this user's IP whitelist, if enabled\n "
]
|
Please provide a description of the function:def confirm_login_allowed(self, user):
if not user.is_active:
raise forms.ValidationError(
self.error_messages['inactive'],
code='inactive',
) | [
"\n Controls whether the given User may log in. This is a policy setting,\n independent of end-user authentication. This default behavior is to\n allow login by active users, and reject login by inactive users.\n\n If the given user cannot log in, this method should raise a\n ``forms.ValidationError``.\n\n If the given user may log in, this method should return None.\n "
]
|
Please provide a description of the function:def broken_chains(samples, chains):
samples = np.asarray(samples)
if samples.ndim != 2:
raise ValueError("expected samples to be a numpy 2D array")
num_samples, num_variables = samples.shape
num_chains = len(chains)
broken = np.zeros((num_samples, num_chains), dtype=bool, order='F')
for cidx, chain in enumerate(chains):
if isinstance(chain, set):
chain = list(chain)
chain = np.asarray(chain)
if chain.ndim > 1:
raise ValueError("chains should be 1D array_like objects")
# chains of length 1, or 0 cannot be broken
if len(chain) <= 1:
continue
all_ = (samples[:, chain] == 1).all(axis=1)
any_ = (samples[:, chain] == 1).any(axis=1)
broken[:, cidx] = np.bitwise_xor(all_, any_)
return broken | [
"Find the broken chains.\n\n Args:\n samples (array_like):\n Samples as a nS x nV array_like object where nS is the number of samples and nV is the\n number of variables. The values should all be 0/1 or -1/+1.\n\n chains (list[array_like]):\n List of chains of length nC where nC is the number of chains.\n Each chain should be an array_like collection of column indices in samples.\n\n Returns:\n :obj:`numpy.ndarray`: A nS x nC boolean array. If i, j is True, then chain j in sample i is\n broken.\n\n Examples:\n >>> samples = np.array([[-1, +1, -1, +1], [-1, -1, +1, +1]], dtype=np.int8)\n >>> chains = [[0, 1], [2, 3]]\n >>> dwave.embedding.broken_chains(samples, chains)\n array([[True, True],\n [ False, False]])\n\n >>> samples = np.array([[-1, +1, -1, +1], [-1, -1, +1, +1]], dtype=np.int8)\n >>> chains = [[0, 2], [1, 3]]\n >>> dwave.embedding.broken_chains(samples, chains)\n array([[False, False],\n [ True, True]])\n\n "
]
|
Please provide a description of the function:def discard(samples, chains):
samples = np.asarray(samples)
if samples.ndim != 2:
raise ValueError("expected samples to be a numpy 2D array")
num_samples, num_variables = samples.shape
num_chains = len(chains)
broken = broken_chains(samples, chains)
unbroken_idxs, = np.where(~broken.any(axis=1))
chain_variables = np.fromiter((np.asarray(tuple(chain))[0] if isinstance(chain, set) else np.asarray(chain)[0]
for chain in chains),
count=num_chains, dtype=int)
return samples[np.ix_(unbroken_idxs, chain_variables)], unbroken_idxs | [
"Discard broken chains.\n\n Args:\n samples (array_like):\n Samples as a nS x nV array_like object where nS is the number of samples and nV is the\n number of variables. The values should all be 0/1 or -1/+1.\n\n chains (list[array_like]):\n List of chains of length nC where nC is the number of chains.\n Each chain should be an array_like collection of column indices in samples.\n\n Returns:\n tuple: A 2-tuple containing:\n\n :obj:`numpy.ndarray`: An array of unembedded samples. Broken chains are discarded. The\n array has dtype 'int8'.\n\n :obj:`numpy.ndarray`: The indicies of the rows with unbroken chains.\n\n Examples:\n This example unembeds two samples that chains nodes 0 and 1 to represent a single source\n node. The first sample has an unbroken chain, the second a broken chain.\n\n >>> import dimod\n >>> import numpy as np\n ...\n >>> chains = [(0, 1), (2,)]\n >>> samples = np.array([[1, 1, 0], [1, 0, 0]], dtype=np.int8)\n >>> unembedded, idx = dwave.embedding.discard(samples, chains)\n >>> unembedded\n array([[1, 0]], dtype=int8)\n >>> idx\n array([0])\n\n "
]
|
Please provide a description of the function:def majority_vote(samples, chains):
samples = np.asarray(samples)
if samples.ndim != 2:
raise ValueError("expected samples to be a numpy 2D array")
num_samples, num_variables = samples.shape
num_chains = len(chains)
unembedded = np.empty((num_samples, num_chains), dtype='int8', order='F')
# determine if spin or binary. If samples are all 1, then either method works, so we use spin
# because it is faster
if samples.all(): # spin-valued
for cidx, chain in enumerate(chains):
# we just need the sign for spin. We don't use np.sign because in that can return 0
# and fixing the 0s is slow.
unembedded[:, cidx] = 2*(samples[:, chain].sum(axis=1) >= 0) - 1
else: # binary-valued
for cidx, chain in enumerate(chains):
mid = len(chain) / 2
unembedded[:, cidx] = (samples[:, chain].sum(axis=1) >= mid)
return unembedded, np.arange(num_samples) | [
"Use the most common element in broken chains.\n\n Args:\n samples (array_like):\n Samples as a nS x nV array_like object where nS is the number of samples and nV is the\n number of variables. The values should all be 0/1 or -1/+1.\n\n chains (list[array_like]):\n List of chains of length nC where nC is the number of chains.\n Each chain should be an array_like collection of column indices in samples.\n\n Returns:\n tuple: A 2-tuple containing:\n\n :obj:`numpy.ndarray`: A nS x nC array of unembedded samples. The array has dtype 'int8'.\n Where there is a chain break, the value is chosen to match the most common value in the\n chain. For broken chains without a majority, the value is chosen arbitrarily.\n\n :obj:`numpy.ndarray`: Equivalent to :code:`np.arange(nS)` because all samples are kept\n and no samples are added.\n\n Examples:\n This example unembeds samples from a target graph that chains nodes 0 and 1 to\n represent one source node and nodes 2, 3, and 4 to represent another.\n Both samples have one broken chain, with different majority values.\n\n >>> import dimod\n >>> import numpy as np\n ...\n >>> chains = [(0, 1), (2, 3, 4)]\n >>> samples = np.array([[1, 1, 0, 0, 1], [1, 1, 1, 0, 1]], dtype=np.int8)\n >>> unembedded, idx = dwave.embedding.majority_vote(samples, chains)\n >>> unembedded\n array([[1, 0],\n [1, 1]], dtype=int8)\n >>> idx\n array([0, 1])\n\n "
]
|
Please provide a description of the function:def weighted_random(samples, chains):
samples = np.asarray(samples)
if samples.ndim != 2:
raise ValueError("expected samples to be a numpy 2D array")
# it sufficies to choose a random index from each chain and use that to construct the matrix
idx = [np.random.choice(chain) for chain in chains]
num_samples, num_variables = samples.shape
return samples[:, idx], np.arange(num_samples) | [
"Determine the sample values of chains by weighed random choice.\n\n Args:\n samples (array_like):\n Samples as a nS x nV array_like object where nS is the number of samples and nV is the\n number of variables. The values should all be 0/1 or -1/+1.\n\n chains (list[array_like]):\n List of chains of length nC where nC is the number of chains.\n Each chain should be an array_like collection of column indices in samples.\n\n Returns:\n tuple: A 2-tuple containing:\n\n :obj:`numpy.ndarray`: A nS x nC array of unembedded samples. The array has dtype 'int8'.\n Where there is a chain break, the value is chosen randomly, weighted by frequency of the\n chain's value.\n\n :obj:`numpy.ndarray`: Equivalent to :code:`np.arange(nS)` because all samples are kept\n and no samples are added.\n\n Examples:\n This example unembeds samples from a target graph that chains nodes 0 and 1 to\n represent one source node and nodes 2, 3, and 4 to represent another.\n The sample has broken chains for both source nodes.\n\n >>> import dimod\n >>> import numpy as np\n ...\n >>> chains = [(0, 1), (2, 3, 4)]\n >>> samples = np.array([[1, 0, 1, 0, 1]], dtype=np.int8)\n >>> unembedded, idx = dwave.embedding.weighted_random(samples, chains) # doctest: +SKIP\n >>> unembedded # doctest: +SKIP\n array([[1, 1]], dtype=int8)\n >>> idx # doctest: +SKIP\n array([0, 1])\n\n "
]
|
Please provide a description of the function:def sample_ising(self, h, J, **kwargs):
if isinstance(h, list):
h = dict(enumerate(h))
variables = set(h).union(*J)
try:
active_variables = sorted(variables)
except TypeError:
active_variables = list(variables)
num_variables = len(active_variables)
future = self.solver.sample_ising(h, J, **kwargs)
return dimod.SampleSet.from_future(future, _result_to_response_hook(active_variables, dimod.SPIN)) | [
"Sample from the specified Ising model.\n\n Args:\n h (list/dict):\n Linear biases of the Ising model. If a list, the list's indices are\n used as variable labels.\n\n J (dict[(int, int): float]):\n Quadratic biases of the Ising model.\n\n **kwargs:\n Optional keyword arguments for the sampling method, specified per solver in\n :attr:`.DWaveSampler.parameters`\n\n Returns:\n :class:`dimod.SampleSet`: A `dimod` :obj:`~dimod.SampleSet` object.\n\n Examples:\n This example submits a two-variable Ising problem mapped directly to qubits\n 0 and 1 on a D-Wave system selected by the user's default\n :std:doc:`D-Wave Cloud Client configuration file <cloud-client:intro>`.\n\n >>> from dwave.system.samplers import DWaveSampler\n >>> sampler = DWaveSampler()\n >>> response = sampler.sample_ising({0: -1, 1: 1}, {})\n >>> for sample in response.samples(): # doctest: +SKIP\n ... print(sample)\n ...\n {0: 1, 1: -1}\n\n See `Ocean Glossary <https://docs.ocean.dwavesys.com/en/latest/glossary.html>`_\n for explanations of technical terms in descriptions of Ocean tools.\n\n "
]
|
Please provide a description of the function:def sample_qubo(self, Q, **kwargs):
variables = set().union(*Q)
try:
active_variables = sorted(variables)
except TypeError:
active_variables = list(variables)
num_variables = len(active_variables)
future = self.solver.sample_qubo(Q, **kwargs)
return dimod.SampleSet.from_future(future, _result_to_response_hook(active_variables, dimod.BINARY)) | [
"Sample from the specified QUBO.\n\n Args:\n Q (dict):\n Coefficients of a quadratic unconstrained binary optimization (QUBO) model.\n\n **kwargs:\n Optional keyword arguments for the sampling method, specified per solver in\n :attr:`.DWaveSampler.parameters`\n\n Returns:\n :class:`dimod.SampleSet`: A `dimod` :obj:`~dimod.SampleSet` object.\n\n Examples:\n This example submits a two-variable Ising problem mapped directly to qubits\n 0 and 4 on a D-Wave system selected by the user's default\n :std:doc:`D-Wave Cloud Client configuration file <cloud-client:intro>`.\n\n >>> from dwave.system.samplers import DWaveSampler\n >>> sampler = DWaveSampler()\n >>> Q = {(0, 0): -1, (4, 4): -1, (0, 4): 2}\n >>> response = sampler.sample_qubo(Q)\n >>> for sample in response.samples(): # doctest: +SKIP\n ... print(sample)\n ...\n {0: 0, 4: 1}\n\n See `Ocean Glossary <https://docs.ocean.dwavesys.com/en/latest/glossary.html>`_\n for explanations of technical terms in descriptions of Ocean tools.\n\n "
]
|
Please provide a description of the function:def validate_anneal_schedule(self, anneal_schedule):
if 'anneal_schedule' not in self.parameters:
raise RuntimeError("anneal_schedule is not an accepted parameter for this sampler")
properties = self.properties
try:
min_anneal_time, max_anneal_time = properties['annealing_time_range']
max_anneal_schedule_points = properties['max_anneal_schedule_points']
except KeyError:
raise RuntimeError("annealing_time_range and max_anneal_schedule_points are not properties of this solver")
# The number of points must be >= 2.
# The upper bound is system-dependent; check the max_anneal_schedule_points property
if not isinstance(anneal_schedule, list):
raise TypeError("anneal_schedule should be a list")
elif len(anneal_schedule) < 2 or len(anneal_schedule) > max_anneal_schedule_points:
msg = ("anneal_schedule must contain between 2 and {} points (contains {})"
).format(max_anneal_schedule_points, len(anneal_schedule))
raise ValueError(msg)
try:
t_list, s_list = zip(*anneal_schedule)
except ValueError:
raise ValueError("anneal_schedule should be a list of 2-tuples")
# Time t must increase for all points in the schedule.
if not all(tail_t < lead_t for tail_t, lead_t in zip(t_list, t_list[1:])):
raise ValueError("Time t must increase for all points in the schedule")
# max t cannot exceed max_anneal_time
if t_list[-1] > max_anneal_time:
raise ValueError("schedule cannot be longer than the maximum anneal time of {}".format(max_anneal_time))
start_s, end_s = s_list[0], s_list[-1]
if end_s != 1:
raise ValueError("In the final point, anneal fraction s must equal 1.")
if start_s == 1:
# reverse annealing
pass
elif start_s == 0:
# forward annealing, s must monotonically increase.
if not all(tail_s <= lead_s for tail_s, lead_s in zip(s_list, s_list[1:])):
raise ValueError("For forward anneals, anneal fraction s must monotonically increase")
else:
msg = ("In the first point, anneal fraction s must equal 0 for forward annealing or "
"1 for reverse annealing")
raise ValueError(msg)
# finally check the slope abs(slope) < 1/min_anneal_time
max_slope = 1.0 / min_anneal_time
for (t0, s0), (t1, s1) in zip(anneal_schedule, anneal_schedule[1:]):
if abs((s0 - s1) / (t0 - t1)) > max_slope:
raise ValueError("the maximum slope cannot exceed {}".format(max_slope)) | [
"Raise an exception if the specified schedule is invalid for the sampler.\n\n Args:\n anneal_schedule (list):\n An anneal schedule variation is defined by a series of pairs of floating-point\n numbers identifying points in the schedule at which to change slope. The first\n element in the pair is time t in microseconds; the second, normalized persistent\n current s in the range [0,1]. The resulting schedule is the piecewise-linear curve\n that connects the provided points.\n\n Raises:\n ValueError: If the schedule violates any of the conditions listed below.\n\n RuntimeError: If the sampler does not accept the `anneal_schedule` parameter or\n if it does not have `annealing_time_range` or `max_anneal_schedule_points`\n properties.\n\n An anneal schedule must satisfy the following conditions:\n\n * Time t must increase for all points in the schedule.\n * For forward annealing, the first point must be (0,0) and the anneal fraction s must\n increase monotonically.\n * For reverse annealing, the anneal fraction s must start and end at s=1.\n * In the final point, anneal fraction s must equal 1 and time t must not exceed the\n maximum value in the `annealing_time_range` property.\n * The number of points must be >=2.\n * The upper bound is system-dependent; check the `max_anneal_schedule_points` property.\n For reverse annealing, the maximum number of points allowed is one more than the\n number given by this property.\n\n Examples:\n This example sets a quench schedule on a D-Wave system selected by the user's default\n :std:doc:`D-Wave Cloud Client configuration file <cloud-client:intro>`.\n\n >>> from dwave.system.samplers import DWaveSampler\n >>> sampler = DWaveSampler()\n >>> quench_schedule=[[0.0, 0.0], [12.0, 0.6], [12.8, 1.0]]\n >>> DWaveSampler().validate_anneal_schedule(quench_schedule) # doctest: +SKIP\n >>>\n\n "
]
|
Please provide a description of the function:def target_to_source(target_adjacency, embedding):
# the nodes in the source adjacency are just the keys of the embedding
source_adjacency = {v: set() for v in embedding}
# we need the mapping from each node in the target to its source node
reverse_embedding = {}
for v, chain in iteritems(embedding):
for u in chain:
if u in reverse_embedding:
raise ValueError("target node {} assigned to more than one source node".format(u))
reverse_embedding[u] = v
# v is node in target, n node in source
for v, n in iteritems(reverse_embedding):
neighbors = target_adjacency[v]
# u is node in target
for u in neighbors:
# some nodes might not be assigned to chains
if u not in reverse_embedding:
continue
# m is node in source
m = reverse_embedding[u]
if m == n:
continue
source_adjacency[n].add(m)
source_adjacency[m].add(n)
return source_adjacency | [
"Derive the source adjacency from an embedding and target adjacency.\n\n Args:\n target_adjacency (dict/:class:`networkx.Graph`):\n A dict of the form {v: Nv, ...} where v is a node in the target graph and Nv is the\n neighbors of v as an iterable. This can also be a networkx graph.\n\n embedding (dict):\n A mapping from a source graph to a target graph.\n\n Returns:\n dict: The adjacency of the source graph.\n\n Raises:\n ValueError: If any node in the target_adjacency is assigned more\n than one node in the source graph by embedding.\n\n Examples:\n\n >>> target_adjacency = {0: {1, 3}, 1: {0, 2}, 2: {1, 3}, 3: {0, 2}} # a square graph\n >>> embedding = {'a': {0}, 'b': {1}, 'c': {2, 3}}\n >>> source_adjacency = dimod.embedding.target_to_source(target_adjacency, embedding)\n >>> source_adjacency # triangle\n {'a': {'b', 'c'}, 'b': {'a', 'c'}, 'c': {'a', 'b'}}\n\n This function also works with networkx graphs.\n\n >>> import networkx as nx\n >>> target_graph = nx.complete_graph(5)\n >>> embedding = {'a': {0, 1, 2}, 'b': {3, 4}}\n >>> dimod.embedding.target_to_source(target_graph, embedding)\n\n "
]
|
Please provide a description of the function:def chain_to_quadratic(chain, target_adjacency, chain_strength):
quadratic = {} # we will be adding the edges that make the chain here
# do a breadth first search
seen = set()
try:
next_level = {next(iter(chain))}
except StopIteration:
raise ValueError("chain must have at least one variable")
while next_level:
this_level = next_level
next_level = set()
for v in this_level:
if v not in seen:
seen.add(v)
for u in target_adjacency[v]:
if u not in chain:
continue
next_level.add(u)
if u != v and (u, v) not in quadratic:
quadratic[(v, u)] = -chain_strength
if len(chain) != len(seen):
raise ValueError('{} is not a connected chain'.format(chain))
return quadratic | [
"Determine the quadratic biases that induce the given chain.\n\n Args:\n chain (iterable):\n The variables that make up a chain.\n\n target_adjacency (dict/:class:`networkx.Graph`):\n Should be a dict of the form {s: Ns, ...} where s is a variable\n in the target graph and Ns is the set of neighbours of s.\n\n chain_strength (float):\n The magnitude of the quadratic bias that should be used to create chains.\n\n Returns:\n dict[edge, float]: The quadratic biases that induce the given chain.\n\n Raises:\n ValueError: If the variables in chain do not form a connected subgraph of target.\n\n Examples:\n >>> chain = {1, 2}\n >>> target_adjacency = {0: {1, 2}, 1: {0, 2}, 2: {0, 1}}\n >>> dimod.embedding.chain_to_quadratic(chain, target_adjacency, 1)\n {(1, 2): -1}\n\n "
]
|
Please provide a description of the function:def chain_break_frequency(samples_like, embedding):
if isinstance(samples_like, dimod.SampleSet):
labels = samples_like.variables
samples = samples_like.record.sample
num_occurrences = samples_like.record.num_occurrences
else:
samples, labels = dimod.as_samples(samples_like)
num_occurrences = np.ones(samples.shape[0])
if not all(v == idx for idx, v in enumerate(labels)):
labels_to_idx = {v: idx for idx, v in enumerate(labels)}
embedding = {v: {labels_to_idx[u] for u in chain} for v, chain in embedding.items()}
if not embedding:
return {}
variables, chains = zip(*embedding.items())
broken = broken_chains(samples, chains)
return {v: float(np.average(broken[:, cidx], weights=num_occurrences))
for cidx, v in enumerate(variables)} | [
"Determine the frequency of chain breaks in the given samples.\n\n Args:\n samples_like (samples_like/:obj:`dimod.SampleSet`):\n A collection of raw samples. 'samples_like' is an extension of NumPy's array_like.\n See :func:`dimod.as_samples`.\n\n embedding (dict):\n Mapping from source graph to target graph as a dict of form {s: {t, ...}, ...},\n where s is a source-model variable and t is a target-model variable.\n\n Returns:\n dict: Frequency of chain breaks as a dict in the form {s: f, ...}, where s\n is a variable in the source graph, and frequency, a float, is the fraction\n of broken chains.\n\n Examples:\n This example embeds a single source node, 'a', as a chain of two target nodes (0, 1)\n and uses :func:`.chain_break_frequency` to show that out of two synthetic samples,\n one ([-1, +1]) represents a broken chain.\n\n >>> import dimod\n >>> import numpy as np\n >>> samples = np.array([[-1, +1], [+1, +1]])\n >>> embedding = {'a': {0, 1}}\n >>> print(dimod.chain_break_frequency(samples, embedding)['a'])\n 0.5\n\n\n This example embeds a single source node (0) as a chain of two target nodes (a, b)\n and uses :func:`.chain_break_frequency` to show that out of two samples in a\n dimod response, one ({'a': 1, 'b': 0}) represents a broken chain.\n\n >>> import dimod\n ...\n >>> response = dimod.SampleSet.from_samples([{'a': 1, 'b': 0}, {'a': 0, 'b': 0}],\n ... {'energy': [1, 0]}, {}, dimod.BINARY)\n >>> embedding = {0: {'a', 'b'}}\n >>> print(dimod.chain_break_frequency(response, embedding)[0])\n 0.5\n\n "
]
|
Please provide a description of the function:def edgelist_to_adjacency(edgelist):
adjacency = dict()
for u, v in edgelist:
if u in adjacency:
adjacency[u].add(v)
else:
adjacency[u] = {v}
if v in adjacency:
adjacency[v].add(u)
else:
adjacency[v] = {u}
return adjacency | [
"Converts an iterator of edges to an adjacency dict.\n\n Args:\n edgelist (iterable):\n An iterator over 2-tuples where each 2-tuple is an edge.\n\n Returns:\n dict: The adjacency dict. A dict of the form {v: Nv, ...} where v is a node in a graph and\n Nv is the neighbors of v as an set.\n\n "
]
|
Please provide a description of the function:def sample(self, bqm, **kwargs):
# apply the embeddings to the given problem to tile it across the child sampler
embedded_bqm = dimod.BinaryQuadraticModel.empty(bqm.vartype)
__, __, target_adjacency = self.child.structure
for embedding in self.embeddings:
embedded_bqm.update(dwave.embedding.embed_bqm(bqm, embedding, target_adjacency))
# solve the problem on the child system
tiled_response = self.child.sample(embedded_bqm, **kwargs)
responses = []
for embedding in self.embeddings:
embedding = {v: chain for v, chain in embedding.items() if v in bqm.variables}
responses.append(dwave.embedding.unembed_sampleset(tiled_response, embedding, bqm))
return dimod.concatenate(responses) | [
"Sample from the specified binary quadratic model.\n\n Args:\n bqm (:obj:`dimod.BinaryQuadraticModel`):\n Binary quadratic model to be sampled from.\n\n **kwargs:\n Optional keyword arguments for the sampling method, specified per solver.\n\n Returns:\n :class:`dimod.SampleSet`\n\n Examples:\n This example submits a simple Ising problem of just two variables on a\n D-Wave system selected by the user's default\n :std:doc:`D-Wave Cloud Client configuration file <cloud-client:intro>`.\n Because the problem fits in a single :term:`Chimera` unit cell, it is tiled\n across the solver's entire Chimera graph, resulting in multiple samples\n (the exact number depends on the working Chimera graph of the D-Wave system).\n\n >>> from dwave.system.samplers import DWaveSampler\n >>> from dwave.system.composites import EmbeddingComposite\n >>> from dwave.system.composites import EmbeddingComposite, TilingComposite\n ...\n >>> sampler = EmbeddingComposite(TilingComposite(DWaveSampler(), 1, 1, 4))\n >>> response = sampler.sample_ising({},{('a', 'b'): 1})\n >>> len(response) # doctest: +SKIP\n 246\n\n See `Ocean Glossary <https://docs.ocean.dwavesys.com/en/latest/glossary.html>`_\n for explanations of technical terms in descriptions of Ocean tools.\n\n "
]
|
Please provide a description of the function:def cache_connect(database=None):
if database is None:
database = cache_file()
if os.path.isfile(database):
# just connect to the database as-is
conn = sqlite3.connect(database)
else:
# we need to populate the database
conn = sqlite3.connect(database)
conn.executescript(schema)
with conn as cur:
# turn on foreign keys, allows deletes to cascade.
cur.execute("PRAGMA foreign_keys = ON;")
conn.row_factory = sqlite3.Row
return conn | [
"Returns a connection object to a sqlite database.\n\n Args:\n database (str, optional): The path to the database the user wishes\n to connect to. If not specified, a default is chosen using\n :func:`.cache_file`. If the special database name ':memory:'\n is given, then a temporary database is created in memory.\n\n Returns:\n :class:`sqlite3.Connection`\n\n "
]
|
Please provide a description of the function:def insert_chain(cur, chain, encoded_data=None):
if encoded_data is None:
encoded_data = {}
if 'nodes' not in encoded_data:
encoded_data['nodes'] = json.dumps(sorted(chain), separators=(',', ':'))
if 'chain_length' not in encoded_data:
encoded_data['chain_length'] = len(chain)
insert = "INSERT OR IGNORE INTO chain(chain_length, nodes) VALUES (:chain_length, :nodes);"
cur.execute(insert, encoded_data) | [
"Insert a chain into the cache.\n\n Args:\n cur (:class:`sqlite3.Cursor`):\n An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement.\n\n chain (iterable):\n A collection of nodes. Chains in embedding act as one node.\n\n encoded_data (dict, optional):\n If a dictionary is provided, it will be populated with the serialized data. This is\n useful for preventing encoding the same information many times.\n\n Notes:\n This function assumes that the nodes in chain are index-labeled.\n\n "
]
|
Please provide a description of the function:def iter_chain(cur):
select = "SELECT nodes FROM chain"
for nodes, in cur.execute(select):
yield json.loads(nodes) | [
"Iterate over all of the chains in the database.\n\n Args:\n cur (:class:`sqlite3.Cursor`):\n An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement.\n\n Yields:\n list: The chain.\n\n "
]
|
Please provide a description of the function:def insert_system(cur, system_name, encoded_data=None):
if encoded_data is None:
encoded_data = {}
if 'system_name' not in encoded_data:
encoded_data['system_name'] = system_name
insert = "INSERT OR IGNORE INTO system(system_name) VALUES (:system_name);"
cur.execute(insert, encoded_data) | [
"Insert a system name into the cache.\n\n Args:\n cur (:class:`sqlite3.Cursor`):\n An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement.\n\n system_name (str):\n The unique name of a system\n\n encoded_data (dict, optional):\n If a dictionary is provided, it will be populated with the serialized data. This is\n useful for preventing encoding the same information many times.\n\n "
]
|
Please provide a description of the function:def insert_flux_bias(cur, chain, system, flux_bias, chain_strength, encoded_data=None):
if encoded_data is None:
encoded_data = {}
insert_chain(cur, chain, encoded_data)
insert_system(cur, system, encoded_data)
if 'flux_bias' not in encoded_data:
encoded_data['flux_bias'] = _encode_real(flux_bias)
if 'chain_strength' not in encoded_data:
encoded_data['chain_strength'] = _encode_real(chain_strength)
if 'insert_time' not in encoded_data:
encoded_data['insert_time'] = datetime.datetime.now()
insert = \
cur.execute(insert, encoded_data) | [
"Insert a flux bias offset into the cache.\n\n Args:\n cur (:class:`sqlite3.Cursor`):\n An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement.\n\n chain (iterable):\n A collection of nodes. Chains in embedding act as one node.\n\n system (str):\n The unique name of a system.\n\n flux_bias (float):\n The flux bias offset associated with the given chain.\n\n chain_strength (float):\n The magnitude of the negative quadratic bias that induces the given chain in an Ising\n problem.\n\n encoded_data (dict, optional):\n If a dictionary is provided, it will be populated with the serialized data. This is\n useful for preventing encoding the same information many times.\n\n ",
"\n INSERT OR REPLACE INTO flux_bias(chain_id, system_id, insert_time, flux_bias, chain_strength)\n SELECT\n chain.id,\n system.id,\n :insert_time,\n :flux_bias,\n :chain_strength\n FROM chain, system\n WHERE\n chain.chain_length = :chain_length AND\n chain.nodes = :nodes AND\n system.system_name = :system_name;\n "
]
|
Please provide a description of the function:def iter_flux_bias(cur):
select = \
for nodes, system, flux_bias, chain_strength in cur.execute(select):
yield json.loads(nodes), system, _decode_real(flux_bias), _decode_real(chain_strength) | [
"Iterate over all flux biases in the cache.\n\n Args:\n cur (:class:`sqlite3.Cursor`):\n An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement.\n\n Yields:\n tuple: A 4-tuple:\n\n list: The chain.\n\n str: The system name.\n\n float: The flux bias associated with the chain.\n\n float: The chain strength associated with the chain.\n\n ",
"\n SELECT nodes, system_name, flux_bias, chain_strength FROM flux_bias_view;\n "
]
|
Please provide a description of the function:def get_flux_biases_from_cache(cur, chains, system_name, chain_strength, max_age=3600):
select = \
encoded_data = {'chain_strength': _encode_real(chain_strength),
'system_name': system_name,
'time_limit': datetime.datetime.now() + datetime.timedelta(seconds=-max_age)}
flux_biases = {}
for chain in chains:
encoded_data['chain_length'] = len(chain)
encoded_data['nodes'] = json.dumps(sorted(chain), separators=(',', ':'))
row = cur.execute(select, encoded_data).fetchone()
if row is None:
raise MissingFluxBias
flux_bias = _decode_real(*row)
if flux_bias == 0:
continue
flux_biases.update({v: flux_bias for v in chain})
return flux_biases | [
"Determine the flux biases for all of the the given chains, system and chain strength.\n\n Args:\n cur (:class:`sqlite3.Cursor`):\n An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement.\n\n chains (iterable):\n An iterable of chains. Each chain is a collection of nodes. Chains in embedding act as\n one node.\n\n system_name (str):\n The unique name of a system.\n\n chain_strength (float):\n The magnitude of the negative quadratic bias that induces the given chain in an Ising\n problem.\n\n max_age (int, optional, default=3600):\n The maximum age (in seconds) for the flux_bias offsets.\n\n Returns:\n dict: A dict where the keys are the nodes in the chains and the values are the flux biases.\n\n ",
"\n SELECT\n flux_bias\n FROM flux_bias_view WHERE\n chain_length = :chain_length AND\n nodes = :nodes AND\n chain_strength = :chain_strength AND\n system_name = :system_name AND\n insert_time >= :time_limit;\n "
]
|
Please provide a description of the function:def insert_graph(cur, nodelist, edgelist, encoded_data=None):
if encoded_data is None:
encoded_data = {}
if 'num_nodes' not in encoded_data:
encoded_data['num_nodes'] = len(nodelist)
if 'num_edges' not in encoded_data:
encoded_data['num_edges'] = len(edgelist)
if 'edges' not in encoded_data:
encoded_data['edges'] = json.dumps(edgelist, separators=(',', ':'))
insert = \
cur.execute(insert, encoded_data) | [
"Insert a graph into the cache.\n\n A graph is stored by number of nodes, number of edges and a\n json-encoded list of edges.\n\n Args:\n cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function\n is meant to be run within a :obj:`with` statement.\n nodelist (list): The nodes in the graph.\n edgelist (list): The edges in the graph.\n encoded_data (dict, optional): If a dictionary is provided, it\n will be populated with the serialized data. This is useful for\n preventing encoding the same information many times.\n\n Notes:\n This function assumes that the nodes are index-labeled and range\n from 0 to num_nodes - 1.\n\n In order to minimize the total size of the cache, it is a good\n idea to sort the nodelist and edgelist before inserting.\n\n Examples:\n >>> nodelist = [0, 1, 2]\n >>> edgelist = [(0, 1), (1, 2)]\n >>> with pmc.cache_connect(':memory:') as cur:\n ... pmc.insert_graph(cur, nodelist, edgelist)\n\n >>> nodelist = [0, 1, 2]\n >>> edgelist = [(0, 1), (1, 2)]\n >>> encoded_data = {}\n >>> with pmc.cache_connect(':memory:') as cur:\n ... pmc.insert_graph(cur, nodelist, edgelist, encoded_data)\n >>> encoded_data['num_nodes']\n 3\n >>> encoded_data['num_edges']\n 2\n >>> encoded_data['edges']\n '[[0,1],[1,2]]'\n\n ",
"\n INSERT OR IGNORE INTO graph(num_nodes, num_edges, edges)\n VALUES (:num_nodes, :num_edges, :edges);\n "
]
|
Please provide a description of the function:def iter_graph(cur):
select =
for num_nodes, num_edges, edges in cur.execute(select):
yield list(range(num_nodes)), json.loads(edges) | [
"Iterate over all graphs in the cache.\n\n Args:\n cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function\n is meant to be run within a :obj:`with` statement.\n\n Yields:\n tuple: A 2-tuple containing:\n\n list: The nodelist for a graph in the cache.\n\n list: the edgelist for a graph in the cache.\n\n Examples:\n >>> nodelist = [0, 1, 2]\n >>> edgelist = [(0, 1), (1, 2)]\n >>> with pmc.cache_connect(':memory:') as cur:\n ... pmc.insert_graph(cur, nodelist, edgelist)\n ... list(pmc.iter_graph(cur))\n [([0, 1, 2], [[0, 1], [1, 2]])]\n\n ",
"SELECT num_nodes, num_edges, edges from graph;"
]
|
Please provide a description of the function:def insert_embedding(cur, source_nodelist, source_edgelist, target_nodelist, target_edgelist,
embedding, embedding_tag):
encoded_data = {}
# first we need to encode the graphs and create the embedding id
source_data = {}
insert_graph(cur, source_nodelist, source_edgelist, source_data)
encoded_data['source_edges'] = source_data['edges']
encoded_data['source_num_nodes'] = source_data['num_nodes']
encoded_data['source_num_edges'] = source_data['num_edges']
target_data = {}
insert_graph(cur, target_nodelist, target_edgelist, target_data)
encoded_data['target_edges'] = target_data['edges']
encoded_data['target_num_nodes'] = target_data['num_nodes']
encoded_data['target_num_edges'] = target_data['num_edges']
encoded_data['tag'] = embedding_tag
insert_embedding = \
cur.execute(insert_embedding, encoded_data)
# now each chain needs to be inserted
insert_embedding_component = \
for v, chain in iteritems(embedding):
chain_data = {'source_node': v}
insert_chain(cur, chain, chain_data)
encoded_data.update(chain_data)
cur.execute(insert_embedding_component, encoded_data) | [
"Insert an embedding into the cache.\n\n Args:\n cur (:class:`sqlite3.Cursor`):\n An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement.\n\n source_nodelist (list):\n The nodes in the source graph. Should be integer valued.\n\n source_edgelist (list):\n The edges in the source graph.\n\n target_nodelist (list):\n The nodes in the target graph. Should be integer valued.\n\n target_edgelist (list):\n The edges in the target graph.\n\n embedding (dict):\n The mapping from the source graph to the target graph.\n Should be of the form {v: {s, ...}, ...} where v is a variable in the\n source model and s is a variable in the target model.\n\n embedding_tag (str):\n A string tag to associate with the embedding.\n\n ",
"\n INSERT OR REPLACE INTO embedding(\n source_id,\n target_id,\n tag)\n SELECT\n source_graph.id,\n target_graph.id,\n :tag\n FROM\n graph 'source_graph',\n graph 'target_graph'\n WHERE\n source_graph.edges = :source_edges AND\n source_graph.num_nodes = :source_num_nodes AND\n source_graph.num_edges = :source_num_edges AND\n target_graph.edges = :target_edges AND\n target_graph.num_nodes = :target_num_nodes AND\n target_graph.num_edges = :target_num_edges\n ",
"\n INSERT OR REPLACE INTO embedding_component(\n source_node,\n chain_id,\n embedding_id)\n SELECT\n :source_node,\n chain.id,\n embedding.id\n FROM\n graph 'source_graph',\n graph 'target_graph',\n chain,\n embedding\n WHERE\n source_graph.edges = :source_edges AND\n source_graph.num_nodes = :source_num_nodes AND\n target_graph.edges = :target_edges AND\n target_graph.num_nodes = :target_num_nodes AND\n embedding.source_id = source_graph.id AND\n embedding.target_id = target_graph.id AND\n embedding.tag = :tag AND\n chain.nodes = :nodes AND\n chain.chain_length = :chain_length\n "
]
|
Please provide a description of the function:def select_embedding_from_tag(cur, embedding_tag, target_nodelist, target_edgelist):
encoded_data = {'num_nodes': len(target_nodelist),
'num_edges': len(target_edgelist),
'edges': json.dumps(target_edgelist, separators=(',', ':')),
'tag': embedding_tag}
select = \
embedding = {v: json.loads(chain) for v, chain in cur.execute(select, encoded_data)}
return embedding | [
"Select an embedding from the given tag and target graph.\n\n Args:\n cur (:class:`sqlite3.Cursor`):\n An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement.\n\n source_nodelist (list):\n The nodes in the source graph. Should be integer valued.\n\n source_edgelist (list):\n The edges in the source graph.\n\n target_nodelist (list):\n The nodes in the target graph. Should be integer valued.\n\n target_edgelist (list):\n The edges in the target graph.\n\n Returns:\n dict: The mapping from the source graph to the target graph.\n In the form {v: {s, ...}, ...} where v is a variable in the\n source model and s is a variable in the target model.\n\n ",
"\n SELECT\n source_node,\n chain\n FROM\n embedding_component_view\n WHERE\n embedding_tag = :tag AND\n target_edges = :edges AND\n target_num_nodes = :num_nodes AND\n target_num_edges = :num_edges\n "
]
|
Please provide a description of the function:def select_embedding_from_source(cur, source_nodelist, source_edgelist,
target_nodelist, target_edgelist):
encoded_data = {'target_num_nodes': len(target_nodelist),
'target_num_edges': len(target_edgelist),
'target_edges': json.dumps(target_edgelist, separators=(',', ':')),
'source_num_nodes': len(source_nodelist),
'source_num_edges': len(source_edgelist),
'source_edges': json.dumps(source_edgelist, separators=(',', ':'))}
select = \
embedding = {v: json.loads(chain) for v, chain in cur.execute(select, encoded_data)}
return embedding | [
"Select an embedding from the source graph and target graph.\n\n Args:\n cur (:class:`sqlite3.Cursor`):\n An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement.\n\n target_nodelist (list):\n The nodes in the target graph. Should be integer valued.\n\n target_edgelist (list):\n The edges in the target graph.\n\n embedding_tag (str):\n A string tag to associate with the embedding.\n\n Returns:\n dict: The mapping from the source graph to the target graph.\n In the form {v: {s, ...}, ...} where v is a variable in the\n source model and s is a variable in the target model.\n\n ",
"\n SELECT\n source_node,\n chain\n FROM\n embedding_component_view\n WHERE\n source_num_edges = :source_num_edges AND\n source_edges = :source_edges AND\n source_num_nodes = :source_num_nodes AND\n\n target_num_edges = :target_num_edges AND\n target_edges = :target_edges AND\n target_num_nodes = :target_num_nodes\n "
]
|
Please provide a description of the function:def find_clique_embedding(k, m=None, target_graph=None):
# Organize parameter values
if target_graph is None:
if m is None:
raise TypeError("m and target_graph cannot both be None.")
target_graph = pegasus_graph(m)
m = target_graph.graph['rows'] # We only support square Pegasus graphs
_, nodes = k
# Deal with differences in ints vs coordinate target_graphs
if target_graph.graph['labels'] == 'nice':
fwd_converter = get_nice_to_pegasus_fn(m = m)
back_converter = get_pegasus_to_nice_fn(m = m)
pegasus_coords = [fwd_converter(*p) for p in target_graph.nodes]
back_translate = lambda embedding: {key: [back_converter(*p) for p in chain]
for key, chain in embedding.items()}
elif target_graph.graph['labels'] == 'int':
# Convert nodes in terms of Pegasus coordinates
coord_converter = pegasus_coordinates(m)
pegasus_coords = map(coord_converter.tuple, target_graph.nodes)
# A function to convert our final coordinate embedding to an ints embedding
back_translate = lambda embedding: {key: list(coord_converter.ints(chain))
for key, chain in embedding.items()}
else:
pegasus_coords = target_graph.nodes
back_translate = lambda embedding: embedding
# Break each Pegasus qubits into six Chimera fragments
# Note: By breaking the graph in this way, you end up with a K2,2 Chimera graph
fragment_tuple = get_tuple_fragmentation_fn(target_graph)
fragments = fragment_tuple(pegasus_coords)
# Create a K2,2 Chimera graph
# Note: 6 * m because Pegasus qubits split into six pieces, so the number of rows and columns
# get multiplied by six
chim_m = 6 * m
chim_graph = chimera_graph(chim_m, t=2, coordinates=True)
# Determine valid fragment couplers in a K2,2 Chimera graph
edges = chim_graph.subgraph(fragments).edges()
# Find clique embedding in K2,2 Chimera graph
embedding_processor = processor(edges, M=chim_m, N=chim_m, L=2, linear=False)
chimera_clique_embedding = embedding_processor.tightestNativeClique(len(nodes))
# Convert chimera fragment embedding in terms of Pegasus coordinates
defragment_tuple = get_tuple_defragmentation_fn(target_graph)
pegasus_clique_embedding = map(defragment_tuple, chimera_clique_embedding)
pegasus_clique_embedding = dict(zip(nodes, pegasus_clique_embedding))
pegasus_clique_embedding = back_translate(pegasus_clique_embedding)
if len(pegasus_clique_embedding) != len(nodes):
raise ValueError("No clique embedding found")
return pegasus_clique_embedding | [
"Find an embedding of a k-sized clique on a Pegasus graph (target_graph).\n\n This clique is found by transforming the Pegasus graph into a K2,2 Chimera graph and then\n applying a Chimera clique finding algorithm. The results are then converted back in terms of\n Pegasus coordinates.\n\n Note: If target_graph is None, m will be used to generate a m-by-m Pegasus graph. Hence m and\n target_graph cannot both be None.\n\n Args:\n k (int/iterable/:obj:`networkx.Graph`): Number of members in the requested clique; list of nodes;\n a complete graph that you want to embed onto the target_graph\n m (int): Number of tiles in a row of a square Pegasus graph\n target_graph (:obj:`networkx.Graph`): A Pegasus graph\n\n Returns:\n dict: A dictionary representing target_graphs's clique embedding. Each dictionary key\n represents a node in said clique. Each corresponding dictionary value is a list of pegasus\n coordinates that should be chained together to represent said node.\n\n "
]
|
Please provide a description of the function:def draw_chimera_bqm(bqm, width=None, height=None):
linear = bqm.linear.keys()
quadratic = bqm.quadratic.keys()
if width is None and height is None:
# Create a graph large enough to fit the input networkx graph.
graph_size = ceil(sqrt((max(linear) + 1) / 8.0))
width = graph_size
height = graph_size
if not width or not height:
raise Exception("Both dimensions must be defined, not just one.")
# A background image of the same size is created to show the complete graph.
G0 = chimera_graph(height, width, 4)
G = chimera_graph(height, width, 4)
# Check if input graph is chimera graph shaped, by making sure that no edges are invalid.
# Invalid edges can also appear if the size of the chimera graph is incompatible with the input graph in cell dimensions.
non_chimera_nodes = []
non_chimera_edges = []
for node in linear:
if not node in G.nodes:
non_chimera_nodes.append(node)
for edge in quadratic:
if not edge in G.edges:
non_chimera_edges.append(edge)
linear_set = set(linear)
g_node_set = set(G.nodes)
quadratic_set = set(map(frozenset, quadratic))
g_edge_set = set(map(frozenset, G.edges))
non_chimera_nodes = linear_set - g_node_set
non_chimera_edges = quadratic_set - g_edge_set
if non_chimera_nodes or non_chimera_edges:
raise Exception("Input graph is not a chimera graph: Nodes: %s Edges: %s" % (non_chimera_nodes, non_chimera_edges))
# Get lists of nodes and edges to remove from the complete graph to turn the complete graph into your graph.
remove_nodes = list(g_node_set - linear_set)
remove_edges = list(g_edge_set - quadratic_set)
# Remove the nodes and edges from the graph.
for edge in remove_edges:
G.remove_edge(*edge)
for node in remove_nodes:
G.remove_node(node)
node_size = 100
# Draw the complete chimera graph as the background.
draw_chimera(G0, node_size=node_size*0.5, node_color='black', edge_color='black')
# Draw your graph over the complete graph to show the connectivity.
draw_chimera(G, node_size=node_size, linear_biases=bqm.linear, quadratic_biases=bqm.quadratic,
width=3)
return | [
"Draws a Chimera Graph representation of a Binary Quadratic Model.\n\n If cell width and height not provided assumes square cell dimensions.\n Throws an error if drawing onto a Chimera graph of the given dimensions fails.\n\n Args:\n bqm (:obj:`dimod.BinaryQuadraticModel`):\n Should be equivalent to a Chimera graph or a subgraph of a Chimera graph produced by dnx.chimera_graph.\n The nodes and edges should have integer variables as in the dnx.chimera_graph.\n width (int, optional):\n An integer representing the number of cells of the Chimera graph will be in width.\n height (int, optional):\n An integer representing the number of cells of the Chimera graph will be in height.\n\n Examples:\n >>> from dwave.embedding.drawing import draw_chimera_bqm\n >>> from dimod import BinaryQuadraticModel\n >>> Q={(0, 0): 2, (1, 1): 1, (2, 2): 0, (3, 3): -1, (4, 4): -2, (5, 5): -2, (6, 6): -2, (7, 7): -2,\n ... (0, 4): 2, (0, 4): -1, (1, 7): 1, (1, 5): 0, (2, 5): -2, (2, 6): -2, (3, 4): -2, (3, 7): -2}\n >>> draw_chimera_bqm(BinaryQuadraticModel.from_qubo(Q), width=1, height=1)\n\n "
]
|
Please provide a description of the function:def embed_bqm(source_bqm, embedding, target_adjacency, chain_strength=1.0,
smear_vartype=None):
if smear_vartype is dimod.SPIN and source_bqm.vartype is dimod.BINARY:
return embed_bqm(source_bqm.spin, embedding, target_adjacency,
chain_strength=chain_strength, smear_vartype=None).binary
elif smear_vartype is dimod.BINARY and source_bqm.vartype is dimod.SPIN:
return embed_bqm(source_bqm.binary, embedding, target_adjacency,
chain_strength=chain_strength, smear_vartype=None).spin
# create a new empty binary quadratic model with the same class as source_bqm
target_bqm = source_bqm.empty(source_bqm.vartype)
# add the offset
target_bqm.add_offset(source_bqm.offset)
# start with the linear biases, spreading the source bias equally over the target variables in
# the chain
for v, bias in iteritems(source_bqm.linear):
if v in embedding:
chain = embedding[v]
else:
raise MissingChainError(v)
if any(u not in target_adjacency for u in chain):
raise InvalidNodeError(v, next(u not in target_adjacency for u in chain))
b = bias / len(chain)
target_bqm.add_variables_from({u: b for u in chain})
# next up the quadratic biases, spread the quadratic biases evenly over the available
# interactions
for (u, v), bias in iteritems(source_bqm.quadratic):
available_interactions = {(s, t) for s in embedding[u] for t in embedding[v] if s in target_adjacency[t]}
if not available_interactions:
raise MissingEdgeError(u, v)
b = bias / len(available_interactions)
target_bqm.add_interactions_from((u, v, b) for u, v in available_interactions)
for chain in itervalues(embedding):
# in the case where the chain has length 1, there are no chain quadratic biases, but we
# none-the-less want the chain variables to appear in the target_bqm
if len(chain) == 1:
v, = chain
target_bqm.add_variable(v, 0.0)
continue
quadratic_chain_biases = chain_to_quadratic(chain, target_adjacency, chain_strength)
target_bqm.add_interactions_from(quadratic_chain_biases, vartype=dimod.SPIN) # these are spin
# add the energy for satisfied chains to the offset
energy_diff = -sum(itervalues(quadratic_chain_biases))
target_bqm.add_offset(energy_diff)
return target_bqm | [
"Embed a binary quadratic model onto a target graph.\n\n Args:\n source_bqm (:obj:`.BinaryQuadraticModel`):\n Binary quadratic model to embed.\n\n embedding (dict):\n Mapping from source graph to target graph as a dict of form {s: {t, ...}, ...},\n where s is a source-model variable and t is a target-model variable.\n\n target_adjacency (dict/:class:`networkx.Graph`):\n Adjacency of the target graph as a dict of form {t: Nt, ...},\n where t is a variable in the target graph and Nt is its set of neighbours.\n\n chain_strength (float, optional):\n Magnitude of the quadratic bias (in SPIN-space) applied between variables to create chains. Note\n that the energy penalty of chain breaks is 2 * `chain_strength`.\n\n smear_vartype (:class:`.Vartype`, optional, default=None):\n When a single variable is embedded, it's linear bias is 'smeared' evenly over the\n chain. This parameter determines whether the variable is smeared in SPIN or BINARY\n space. By default the embedding is done according to the given source_bqm.\n\n Returns:\n :obj:`.BinaryQuadraticModel`: Target binary quadratic model.\n\n Examples:\n This example embeds a fully connected :math:`K_3` graph onto a square target graph.\n Embedding is accomplished by an edge contraction operation on the target graph:\n target-nodes 2 and 3 are chained to represent source-node c.\n\n >>> import dimod\n >>> import networkx as nx\n >>> # Binary quadratic model for a triangular source graph\n >>> bqm = dimod.BinaryQuadraticModel.from_ising({}, {('a', 'b'): 1, ('b', 'c'): 1, ('a', 'c'): 1})\n >>> # Target graph is a graph\n >>> target = nx.cycle_graph(4)\n >>> # Embedding from source to target graphs\n >>> embedding = {'a': {0}, 'b': {1}, 'c': {2, 3}}\n >>> # Embed the BQM\n >>> target_bqm = dimod.embed_bqm(bqm, embedding, target)\n >>> target_bqm.quadratic[(0, 1)] == bqm.quadratic[('a', 'b')]\n True\n >>> target_bqm.quadratic # doctest: +SKIP\n {(0, 1): 1.0, (0, 3): 1.0, (1, 2): 1.0, (2, 3): -1.0}\n\n This example embeds a fully connected :math:`K_3` graph onto the target graph\n of a dimod reference structured sampler, `StructureComposite`, using the dimod reference\n `ExactSolver` sampler with a square graph specified. Target-nodes 2 and 3\n are chained to represent source-node c.\n\n >>> import dimod\n >>> # Binary quadratic model for a triangular source graph\n >>> bqm = dimod.BinaryQuadraticModel.from_ising({}, {('a', 'b'): 1, ('b', 'c'): 1, ('a', 'c'): 1})\n >>> # Structured dimod sampler with a structure defined by a square graph\n >>> sampler = dimod.StructureComposite(dimod.ExactSolver(), [0, 1, 2, 3], [(0, 1), (1, 2), (2, 3), (0, 3)])\n >>> # Embedding from source to target graph\n >>> embedding = {'a': {0}, 'b': {1}, 'c': {2, 3}}\n >>> # Embed the BQM\n >>> target_bqm = dimod.embed_bqm(bqm, embedding, sampler.adjacency)\n >>> # Sample\n >>> samples = sampler.sample(target_bqm)\n >>> samples.record.sample # doctest: +SKIP\n array([[-1, -1, -1, -1],\n [ 1, -1, -1, -1],\n [ 1, 1, -1, -1],\n [-1, 1, -1, -1],\n [-1, 1, 1, -1],\n >>> # Snipped above samples for brevity\n\n "
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.