sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
---|---|---|
def random(cls, origin=None, radius=1):
'''
:origin: - optional Point subclass
:radius: - optional float
:return: Triangle
Creates a triangle with random coordinates in the circle
described by (origin,radius). If origin is unspecified, (0,0)
is assumed. If the radius is unspecified, 1.0 is assumed.
'''
# XXX no collinearity checks, possible to generate a
# line (not likely, just possible).
#
pts = set()
while len(pts) < 3:
p = Point.random(origin, radius)
pts.add(p)
return cls(pts) | :origin: - optional Point subclass
:radius: - optional float
:return: Triangle
Creates a triangle with random coordinates in the circle
described by (origin,radius). If origin is unspecified, (0,0)
is assumed. If the radius is unspecified, 1.0 is assumed. | entailment |
def equilateral(cls, origin=None, side=1):
'''
:origin: optional Point
:side: optional float describing triangle side length
:return: Triangle initialized with points comprising a
equilateral triangle.
XXX equilateral triangle definition
'''
o = Point(origin)
base = o.x + side
h = 0.5 * Sqrt_3 * side + o.y
return cls(o, [base, o.y], [base / 2, h]) | :origin: optional Point
:side: optional float describing triangle side length
:return: Triangle initialized with points comprising a
equilateral triangle.
XXX equilateral triangle definition | entailment |
def isosceles(cls, origin=None, base=1, alpha=90):
'''
:origin: optional Point
:base: optional float describing triangle base length
:return: Triangle initialized with points comprising a
isosceles triangle.
XXX isoceles triangle definition
'''
o = Point(origin)
base = o.x + base
return cls(o, [base, o.y], [base / 2, o.y + base]) | :origin: optional Point
:base: optional float describing triangle base length
:return: Triangle initialized with points comprising a
isosceles triangle.
XXX isoceles triangle definition | entailment |
def C(self):
'''
Third vertex of triangle, Point subclass.
'''
try:
return self._C
except AttributeError:
pass
self._C = Point(0, 1)
return self._C | Third vertex of triangle, Point subclass. | entailment |
def ABC(self):
'''
A list of the triangle's vertices, list.
'''
try:
return self._ABC
except AttributeError:
pass
self._ABC = [self.A, self.B, self.C]
return self._ABC | A list of the triangle's vertices, list. | entailment |
def BA(self):
'''
Vertices B and A, list.
'''
try:
return self._BA
except AttributeError:
pass
self._BA = [self.B, self.A]
return self._BA | Vertices B and A, list. | entailment |
def AC(self):
'''
Vertices A and C, list.
'''
try:
return self._AC
except AttributeError:
pass
self._AC = [self.A, self.C]
return self._AC | Vertices A and C, list. | entailment |
def CA(self):
'''
Vertices C and A, list.
'''
try:
return self._CA
except AttributeError:
pass
self._CA = [self.C, self.A]
return self._CA | Vertices C and A, list. | entailment |
def BC(self):
'''
Vertices B and C, list.
'''
try:
return self._BC
except AttributeError:
pass
self._BC = [self.B, self.C]
return self._BC | Vertices B and C, list. | entailment |
def CB(self):
'''
Vertices C and B, list.
'''
try:
return self._CB
except AttributeError:
pass
self._CB = [self.C, self.B]
return self._CB | Vertices C and B, list. | entailment |
def segments(self):
'''
A list of the Triangle's line segments [AB, BC, AC], list.
'''
return [Segment(self.AB),
Segment(self.BC),
Segment(self.AC)] | A list of the Triangle's line segments [AB, BC, AC], list. | entailment |
def circumcenter(self):
'''
The intersection of the median perpendicular bisectors, Point.
The center of the circumscribed circle, which is the circle that
passes through all vertices of the triangle.
https://en.wikipedia.org/wiki/Circumscribed_circle#Cartesian_coordinates_2
BUG: only finds the circumcenter in the XY plane
'''
if self.isRight:
return self.hypotenuse.midpoint
if self.A.isOrigin:
t = self
else:
# translate triangle to origin
t = Triangle(self.A - self.A, self.B - self.A, self.C - self.A)
# XXX translation would be easier by defining add and sub for points
# t = self - self.A
if not t.A.isOrigin:
raise ValueError('failed to translate {} to origin'.format(t))
BmulC = t.B * t.C.yx
d = 2 * (BmulC.x - BmulC.y)
bSqSum = sum((t.B ** 2).xy)
cSqSum = sum((t.C ** 2).xy)
x = (((t.C.y * bSqSum) - (t.B.y * cSqSum)) / d) + self.A.x
y = (((t.B.x * cSqSum) - (t.C.x * bSqSum)) / d) + self.A.y
return Point(x, y) | The intersection of the median perpendicular bisectors, Point.
The center of the circumscribed circle, which is the circle that
passes through all vertices of the triangle.
https://en.wikipedia.org/wiki/Circumscribed_circle#Cartesian_coordinates_2
BUG: only finds the circumcenter in the XY plane | entailment |
def altitudes(self):
'''
A list of the altitudes of each vertex [AltA, AltB, AltC], list of
floats.
An altitude is the shortest distance from a vertex to the side
opposite of it.
'''
a = self.area * 2
return [a / self.a, a / self.b, a / self.c] | A list of the altitudes of each vertex [AltA, AltB, AltC], list of
floats.
An altitude is the shortest distance from a vertex to the side
opposite of it. | entailment |
def isEquilateral(self):
'''
True if all sides of the triangle are the same length.
All equilateral triangles are also isosceles.
All equilateral triangles are also acute.
'''
if not nearly_eq(self.a, self.b):
return False
if not nearly_eq(self.b, self.c):
return False
return nearly_eq(self.a, self.c) | True if all sides of the triangle are the same length.
All equilateral triangles are also isosceles.
All equilateral triangles are also acute. | entailment |
def swap(self, side='AB', inplace=False):
'''
:side: - optional string
:inplace: - optional boolean
:return: Triangle with flipped side.
The optional side paramater should have one of three values:
AB, BC, or AC.
Changes the order of the triangle's points, swapping the
specified points. Doing so will change the results of isCCW
and ccw.
'''
try:
flipset = {'AB': (self.B.xyz, self.A.xyz, self.C.xyz),
'BC': (self.A.xyz, self.C.xyz, self.B.xyz),
'AC': (self.C.xyz, self.B.xyz, self.A.xyz)}[side]
except KeyError as e:
raise KeyError(str(e))
if inplace:
self.ABC = flipset
return self
return Triangle(flipset) | :side: - optional string
:inplace: - optional boolean
:return: Triangle with flipped side.
The optional side paramater should have one of three values:
AB, BC, or AC.
Changes the order of the triangle's points, swapping the
specified points. Doing so will change the results of isCCW
and ccw. | entailment |
def doesIntersect(self, other):
'''
:param: other - Triangle or Line subclass
:return: boolean
Returns True iff:
Any segment in self intersects any segment in other.
'''
otherType = type(other)
if issubclass(otherType, Triangle):
for s in self.segments.values():
for q in other.segments.values():
if s.doesIntersect(q):
return True
return False
if issubclass(otherType, Line):
for s in self.segments.values():
if s.doesIntersect(other):
return True
return False
msg = "expecting Line or Triangle subclasses, got '{}'"
raise TypeError(msg.format(otherType)) | :param: other - Triangle or Line subclass
:return: boolean
Returns True iff:
Any segment in self intersects any segment in other. | entailment |
def perimeter(self):
'''
Sum of the length of all sides, float.
'''
return sum([a.distance(b) for a, b in self.pairs()]) | Sum of the length of all sides, float. | entailment |
def vl_dsift(data, fast=False, norm=False, bounds=None, size=3, step=1,
window_size=None, float_descriptors=False,
verbose=False, matlab_style=True):
'''
Dense sift descriptors from an image.
Returns:
frames: num_frames x (2 or 3) matrix of x, y, (norm)
descrs: num_frames x 128 matrix of descriptors
'''
if not matlab_style:
import warnings
warnings.warn("matlab_style=False gets different results than matlab, "
"not sure why or how incorrect they are.")
order = 'F' if matlab_style else 'C'
data = as_float_image(data, dtype=np.float32, order=order)
if data.ndim != 2:
raise TypeError("data should be a 2d array")
if window_size is not None:
assert np.isscalar(window_size) and window_size >= 0
# construct the dsift object
M, N = data.shape
dsift_p = vl_dsift_new_basic(M, N, step, size)
try:
dsift = dsift_p.contents
# set parameters
if bounds is not None:
if matlab_style:
y0, x0, y1, x1 = bounds # transposed
else:
x0, y0, x1, y1 = bounds
dsift.boundMinX = int(max(x0, 0))
dsift.boundMinY = int(max(y0, 0))
dsift.boundMaxX = int(min(x1, M - 1))
dsift.boundMaxY = int(min(y1, N - 1))
_vl_dsift_update_buffers(dsift_p)
dsift.useFlatWindow = fast
if window_size is not None:
dsift.windowSize = window_size
# get calculated parameters
descr_size = dsift.descrSize
num_frames = dsift.numFrames
geom = dsift.geom
if verbose:
pr = lambda *a, **k: print('vl_dsift:', *a, **k)
pr("image size [W, H] = [{}, {}]".format(N, M))
x0 = dsift.boundMinX + 1
y0 = dsift.boundMinY + 1
x1 = dsift.boundMaxX + 1
y1 = dsift.boundMaxY + 1
bound_args = [y0, x0, y1, x1] if matlab_style else [x0, y0, x1, y1]
pr("bounds: [minX,minY,maxX,maxY] = [{}, {}, {}, {}]"
.format(*bound_args))
pr("subsampling steps: stepX={}, stepY={}".format(
dsift.stepX, dsift.stepY))
pr("num bins: [numBinT, numBinX, numBinY] = [{}, {}, {}]"
.format(geom.numBinT, geom.numBinX, geom.numBinY))
pr("descriptor size: {}".format(descr_size))
pr("bin sizes: [binSizeX, binSizeY] = [{}, {}]".format(
geom.binSizeX, geom.binSizeY))
pr("flat window: {}".format(bool(fast)))
pr("window size: {}".format(dsift.windowSize))
pr("num of features: {}".format(num_frames))
# do the actual processing
vl_dsift_process(dsift_p, data)
# copy frames' locations, norms out
# the frames are a structure of just 4 doubles (VLDsiftKeypoint),
# which luckily looks exactly like an array of doubles. :)
# NOTE: this might be platform/compiler-dependent...but it works with
# the provided binaries on os x, at least
frames_p = cast(dsift.frames, c_double_p)
frames_p_a = npc.as_array(frames_p, shape=(num_frames, 4))
cols = [1, 0] if matlab_style else [0, 1]
if norm:
cols.append(3)
frames = np.require(frames_p_a[:, cols], requirements=['C', 'O'])
# copy descriptors into a new array
descrs_p = npc.as_array(dsift.descrs, shape=(num_frames, descr_size))
descrs = descrs_p * 512
assert descrs.flags.owndata
np.minimum(descrs, 255, out=descrs)
if not float_descriptors:
descrs = descrs.astype(np.uint8) # TODO: smarter about copying?
if matlab_style:
new_order = np.empty(descr_size, dtype=int)
vl_dsift_transpose_descriptor(new_order, np.arange(descr_size),
geom.numBinT, geom.numBinX, geom.numBinY)
descrs = descrs[:, new_order]
# the old, super-slow way:
## # gross pointer arithmetic to get the relevant descriptor
## descrs_addr = addressof(descrs.contents)
## descrs_step = descr_size * sizeof(c_float)
##
## for k in range(num_frames):
## out_frames[:2, k] = [frames[k].y + 1, frames[k].x + 1]
## if norm: # there's an implied / 2 in norm, because of clipping
## out_frames[2, k] = frames[k].norm
##
## # gross pointer arithmetic to get the relevant descriptor
## the_descr = cast(descrs_addr + k * descrs_step, c_float_p)
## transposed = vl_dsift_transpose_descriptor(
## the_descr,
## geom.numBinT, geom.numBinX, geom.numBinY)
## out_descrs[:, k] = np.minimum(512. * transposed, 255.)
return frames, descrs
finally:
vl_dsift_delete(dsift_p) | Dense sift descriptors from an image.
Returns:
frames: num_frames x (2 or 3) matrix of x, y, (norm)
descrs: num_frames x 128 matrix of descriptors | entailment |
def rgb2gray(img):
"""Converts an RGB image to grayscale using matlab's algorithm."""
T = np.linalg.inv(np.array([
[1.0, 0.956, 0.621],
[1.0, -0.272, -0.647],
[1.0, -1.106, 1.703],
]))
r_c, g_c, b_c = T[0]
r, g, b = np.rollaxis(as_float_image(img), axis=-1)
return r_c * r + g_c * g + b_c * b | Converts an RGB image to grayscale using matlab's algorithm. | entailment |
def rgb2hsv(arr):
"""Converts an RGB image to HSV using scikit-image's algorithm."""
arr = np.asanyarray(arr)
if arr.ndim != 3 or arr.shape[2] != 3:
raise ValueError("the input array must have a shape == (.,.,3)")
arr = as_float_image(arr)
out = np.empty_like(arr)
# -- V channel
out_v = arr.max(-1)
# -- S channel
delta = arr.ptp(-1)
# Ignore warning for zero divided by zero
old_settings = np.seterr(invalid='ignore')
out_s = delta / out_v
out_s[delta == 0.] = 0.
# -- H channel
# red is max
idx = (arr[:, :, 0] == out_v)
out[idx, 0] = (arr[idx, 1] - arr[idx, 2]) / delta[idx]
# green is max
idx = (arr[:, :, 1] == out_v)
out[idx, 0] = 2. + (arr[idx, 2] - arr[idx, 0]) / delta[idx]
# blue is max
idx = (arr[:, :, 2] == out_v)
out[idx, 0] = 4. + (arr[idx, 0] - arr[idx, 1]) / delta[idx]
out_h = (out[:, :, 0] / 6.) % 1.
out_h[delta == 0.] = 0.
np.seterr(**old_settings)
# -- output
out[:, :, 0] = out_h
out[:, :, 1] = out_s
out[:, :, 2] = out_v
# remove NaN
out[np.isnan(out)] = 0
return out | Converts an RGB image to HSV using scikit-image's algorithm. | entailment |
def _parse_command_response(response):
"""Parse an SCI command response into ElementTree XML
This is a helper method that takes a Requests Response object
of an SCI command response and will parse it into an ElementTree Element
representing the root of the XML response.
:param response: The requests response object
:return: An ElementTree Element that is the root of the response XML
:raises ResponseParseError: If the response XML is not well formed
"""
try:
root = ET.fromstring(response.text)
except ET.ParseError:
raise ResponseParseError(
"Unexpected response format, could not parse XML. Response: {}".format(response.text))
return root | Parse an SCI command response into ElementTree XML
This is a helper method that takes a Requests Response object
of an SCI command response and will parse it into an ElementTree Element
representing the root of the XML response.
:param response: The requests response object
:return: An ElementTree Element that is the root of the response XML
:raises ResponseParseError: If the response XML is not well formed | entailment |
def _parse_error_tree(error):
"""Parse an error ElementTree Node to create an ErrorInfo object
:param error: The ElementTree error node
:return: An ErrorInfo object containing the error ID and the message.
"""
errinf = ErrorInfo(error.get('id'), None)
if error.text is not None:
errinf.message = error.text
else:
desc = error.find('./desc')
if desc is not None:
errinf.message = desc.text
return errinf | Parse an error ElementTree Node to create an ErrorInfo object
:param error: The ElementTree error node
:return: An ErrorInfo object containing the error ID and the message. | entailment |
def get_data(self):
"""Get the contents of this file
:return: The contents of this file
:rtype: six.binary_type
"""
target = DeviceTarget(self.device_id)
return self._fssapi.get_file(target, self.path)[self.device_id] | Get the contents of this file
:return: The contents of this file
:rtype: six.binary_type | entailment |
def delete(self):
"""Delete this file from the device
.. note::
After deleting the file, this object will no longer contain valid information
and further calls to delete or get_data will return :class:`~.ErrorInfo` objects
"""
target = DeviceTarget(self.device_id)
return self._fssapi.delete_file(target, self.path)[self.device_id] | Delete this file from the device
.. note::
After deleting the file, this object will no longer contain valid information
and further calls to delete or get_data will return :class:`~.ErrorInfo` objects | entailment |
def list_contents(self):
"""List the contents of this directory
:return: A LsInfo object that contains directories and files
:rtype: :class:`~.LsInfo` or :class:`~.ErrorInfo`
Here is an example usage::
# let dirinfo be a DirectoryInfo object
ldata = dirinfo.list_contents()
if isinstance(ldata, ErrorInfo):
# Do some error handling
logger.warn("Error listing file info: (%s) %s", ldata.errno, ldata.message)
# It's of type LsInfo
else:
# Look at all the files
for finfo in ldata.files:
logger.info("Found file %s of size %s", finfo.path, finfo.size)
# Look at all the directories
for dinfo in ldata.directories:
logger.info("Found directory %s of last modified %s", dinfo.path, dinfo.last_modified)
"""
target = DeviceTarget(self.device_id)
return self._fssapi.list_files(target, self.path)[self.device_id] | List the contents of this directory
:return: A LsInfo object that contains directories and files
:rtype: :class:`~.LsInfo` or :class:`~.ErrorInfo`
Here is an example usage::
# let dirinfo be a DirectoryInfo object
ldata = dirinfo.list_contents()
if isinstance(ldata, ErrorInfo):
# Do some error handling
logger.warn("Error listing file info: (%s) %s", ldata.errno, ldata.message)
# It's of type LsInfo
else:
# Look at all the files
for finfo in ldata.files:
logger.info("Found file %s of size %s", finfo.path, finfo.size)
# Look at all the directories
for dinfo in ldata.directories:
logger.info("Found directory %s of last modified %s", dinfo.path, dinfo.last_modified) | entailment |
def parse_response(cls, response, device_id=None, fssapi=None, **kwargs):
"""Parse the server response for this ls command
This will parse xml of the following form::
<ls hash="hash_type">
<file path="file_path" last_modified=last_modified_time ... />
...
<dir path="dir_path" last_modified=last_modified_time />
...
</ls>
or with an error::
<ls>
<error ... />
</ls>
:param response: The XML root of the response for an ls command
:type response: :class:`xml.etree.ElementTree.Element`
:param device_id: The device id of the device this ls response came from
:param fssapi: A reference to a :class:`~FileSystemServiceAPI` for use with the
:class:`~FileInfo` and :class:`~DirectoryInfo` objects for future commands
:return: An :class:`~LsInfo` object containing the list of directories and files on
the device or an :class:`~ErrorInfo` if the xml contained an error
"""
if response.tag != cls.command_name:
raise ResponseParseError(
"Received response of type {}, LsCommand can only parse responses of type {}".format(response.tag,
cls.command_name))
if fssapi is None:
raise FileSystemServiceException("fssapi is required to parse an LsCommand response")
if device_id is None:
raise FileSystemServiceException("device_id is required to parse an LsCommand response")
error = response.find('./error')
if error is not None:
return _parse_error_tree(error)
hash_type = response.get('hash')
dirs = []
files = []
# Get each file listed in this response
for myfile in response.findall('./file'):
fi = FileInfo(fssapi,
device_id,
myfile.get('path'),
int(myfile.get('last_modified')),
int(myfile.get('size')),
myfile.get('hash'),
hash_type)
files.append(fi)
# Get each directory listed for this device
for mydir in response.findall('./dir'):
di = DirectoryInfo(fssapi,
device_id,
mydir.get('path'),
int(mydir.get('last_modified')))
dirs.append(di)
return LsInfo(directories=dirs, files=files) | Parse the server response for this ls command
This will parse xml of the following form::
<ls hash="hash_type">
<file path="file_path" last_modified=last_modified_time ... />
...
<dir path="dir_path" last_modified=last_modified_time />
...
</ls>
or with an error::
<ls>
<error ... />
</ls>
:param response: The XML root of the response for an ls command
:type response: :class:`xml.etree.ElementTree.Element`
:param device_id: The device id of the device this ls response came from
:param fssapi: A reference to a :class:`~FileSystemServiceAPI` for use with the
:class:`~FileInfo` and :class:`~DirectoryInfo` objects for future commands
:return: An :class:`~LsInfo` object containing the list of directories and files on
the device or an :class:`~ErrorInfo` if the xml contained an error | entailment |
def parse_response(cls, response, **kwargs):
"""Parse the server response for this get file command
This will parse xml of the following form::
<get_file>
<data>
asdfasdfasdfasdfasf
</data>
</get_file>
or with an error::
<get_file>
<error ... />
</get_file>
:param response: The XML root of the response for a get file command
:type response: :class:`xml.etree.ElementTree.Element`
:return: a six.binary_type string of the data of a file or an :class:`~ErrorInfo` if the xml contained an error
"""
if response.tag != cls.command_name:
raise ResponseParseError(
"Received response of type {}, GetCommand can only parse responses of type {}".format(response.tag,
cls.command_name))
error = response.find('./error')
if error is not None:
return _parse_error_tree(error)
text = response.find('./data').text
if text:
return base64.b64decode(six.b(text))
else:
return six.b('') | Parse the server response for this get file command
This will parse xml of the following form::
<get_file>
<data>
asdfasdfasdfasdfasf
</data>
</get_file>
or with an error::
<get_file>
<error ... />
</get_file>
:param response: The XML root of the response for a get file command
:type response: :class:`xml.etree.ElementTree.Element`
:return: a six.binary_type string of the data of a file or an :class:`~ErrorInfo` if the xml contained an error | entailment |
def parse_response(cls, response, **kwargs):
"""Parse the server response for this put file command
This will parse xml of the following form::
<put_file />
or with an error::
<put_file>
<error ... />
</put_file>
:param response: The XML root of the response for a put file command
:type response: :class:`xml.etree.ElementTree.Element`
:return: None if everything was ok or an :class:`~ErrorInfo` if the xml contained an error
"""
if response.tag != cls.command_name:
raise ResponseParseError(
"Received response of type {}, PutCommand can only parse responses of type {}".format(response.tag,
cls.command_name))
error = response.find('./error')
if error is not None:
return _parse_error_tree(error)
return None | Parse the server response for this put file command
This will parse xml of the following form::
<put_file />
or with an error::
<put_file>
<error ... />
</put_file>
:param response: The XML root of the response for a put file command
:type response: :class:`xml.etree.ElementTree.Element`
:return: None if everything was ok or an :class:`~ErrorInfo` if the xml contained an error | entailment |
def send_command_block(self, target, command_block):
"""Send an arbitrary file system command block
The primary use for this method is to send multiple file system commands with a single
web service request. This can help to avoid throttling.
:param target: The device(s) to be targeted with this request
:type target: :class:`devicecloud.sci.TargetABC` or list of :class:`devicecloud.sci.TargetABC` instances
:param command_block: The block of commands to execute on the target
:type command_block: :class:`~FileSystemServiceCommandBlock`
:return: The response will be a dictionary where the keys are device_ids and the values are
the parsed responses of each command sent in the order listed in the command response for
that device. In practice it seems to be the same order as the commands were sent in, however,
Device Cloud documentation does not explicitly state anywhere that is the case so I cannot
guarantee it. This does mean that if you send different types of commands the response list
will be different types. Please see the commands parse_response functions for what those types
will be. (:meth:`LsCommand.parse_response`, :class:`GetCommand.parse_response`,
:class:`PutCommand.parse_response`, :class:`DeleteCommand.parse_response`)
"""
root = _parse_command_response(
self._sci_api.send_sci("file_system", target, command_block.get_command_string()))
out_dict = {}
for device in root.findall('./file_system/device'):
device_id = device.get('id')
results = []
for command in device.find('./commands'):
for command_class in FILE_SYSTEM_COMMANDS:
if command_class.command_name == command.tag.lower():
results.append(command_class.parse_response(command, fssapi=self, device_id=device_id))
out_dict[device_id] = results
return out_dict | Send an arbitrary file system command block
The primary use for this method is to send multiple file system commands with a single
web service request. This can help to avoid throttling.
:param target: The device(s) to be targeted with this request
:type target: :class:`devicecloud.sci.TargetABC` or list of :class:`devicecloud.sci.TargetABC` instances
:param command_block: The block of commands to execute on the target
:type command_block: :class:`~FileSystemServiceCommandBlock`
:return: The response will be a dictionary where the keys are device_ids and the values are
the parsed responses of each command sent in the order listed in the command response for
that device. In practice it seems to be the same order as the commands were sent in, however,
Device Cloud documentation does not explicitly state anywhere that is the case so I cannot
guarantee it. This does mean that if you send different types of commands the response list
will be different types. Please see the commands parse_response functions for what those types
will be. (:meth:`LsCommand.parse_response`, :class:`GetCommand.parse_response`,
:class:`PutCommand.parse_response`, :class:`DeleteCommand.parse_response`) | entailment |
def list_files(self, target, path, hash='any'):
"""List all files and directories in the path on the target
:param target: The device(s) to be targeted with this request
:type target: :class:`devicecloud.sci.TargetABC` or list of :class:`devicecloud.sci.TargetABC` instances
:param path: The path on the target to list files and directories from
:param hash: an optional attribute which indicates a hash over the file contents should be retrieved. Values
include none, any, md5, and crc32. any is used to indicate the device should choose its best available hash.
:return: A dictionary with keys of device ids and values of :class:`~.LsInfo` objects containing the files and
directories or an :class:`~.ErrorInfo` object if there was an error response
:raises: :class:`~.ResponseParseError` If the SCI response has unrecognized formatting
Here is an example usage::
# dc is a DeviceCloud instance
fssapi = dc.get_fss_api()
target = AllTarget()
ls_dir = '/root/home/user/important_files/'
ls_data = fssapi.list_files(target, ls_dir)
# Loop over all device results
for device_id, device_data in ls_data.iteritems():
# Check if it succeeded or was an error
if isinstance(device_data, ErrorInfo):
# Do some error handling
logger.warn("Error listing file info on device %s. errno: %s message:%s",
device_id, device_data.errno, device_data.message)
# It's of type LsInfo
else:
# Look at all the files
for finfo in device_data.files:
logger.info("Found file %s of size %s on device %s",
finfo.path, finfo.size, device_id)
# Look at all the directories
for dinfo in device_data.directories:
logger.info("Found directory %s of last modified %s on device %s",
dinfo.path, dinfo.last_modified, device_id)
"""
command_block = FileSystemServiceCommandBlock()
command_block.add_command(LsCommand(path, hash=hash))
root = _parse_command_response(
self._sci_api.send_sci("file_system", target, command_block.get_command_string()))
out_dict = {}
# At this point the XML we have is of the form
# <sci_reply>
# <file_system>
# <device id="device_id">
# <commands>
# <ls hash="hash_type">
# <file path="file_path" last_modified=last_modified_time ... />
# ...
# <dir path="dir_path" last_modified=last_modified_time />
# ...
# </ls>
# </commands>
# </device>
# <device id="device_id">
# <commands>
# <ls hash="hash_type">
# <file path="file_path" last_modified=last_modified_time ... />
# ...
# <dir path="dir_path" last_modified=last_modified_time />
# ...
# </ls>
# </commands>
# </device>
# ...
# </file_system>
# </sci_reply>
# Here we will get each of the XML trees rooted at the device nodes
for device in root.findall('./file_system/device'):
device_id = device.get('id')
error = device.find('./error')
if error is not None:
out_dict[device_id] = _parse_error_tree(error)
else:
linfo = LsCommand.parse_response(device.find('./commands/ls'), device_id=device_id, fssapi=self)
out_dict[device_id] = linfo
return out_dict | List all files and directories in the path on the target
:param target: The device(s) to be targeted with this request
:type target: :class:`devicecloud.sci.TargetABC` or list of :class:`devicecloud.sci.TargetABC` instances
:param path: The path on the target to list files and directories from
:param hash: an optional attribute which indicates a hash over the file contents should be retrieved. Values
include none, any, md5, and crc32. any is used to indicate the device should choose its best available hash.
:return: A dictionary with keys of device ids and values of :class:`~.LsInfo` objects containing the files and
directories or an :class:`~.ErrorInfo` object if there was an error response
:raises: :class:`~.ResponseParseError` If the SCI response has unrecognized formatting
Here is an example usage::
# dc is a DeviceCloud instance
fssapi = dc.get_fss_api()
target = AllTarget()
ls_dir = '/root/home/user/important_files/'
ls_data = fssapi.list_files(target, ls_dir)
# Loop over all device results
for device_id, device_data in ls_data.iteritems():
# Check if it succeeded or was an error
if isinstance(device_data, ErrorInfo):
# Do some error handling
logger.warn("Error listing file info on device %s. errno: %s message:%s",
device_id, device_data.errno, device_data.message)
# It's of type LsInfo
else:
# Look at all the files
for finfo in device_data.files:
logger.info("Found file %s of size %s on device %s",
finfo.path, finfo.size, device_id)
# Look at all the directories
for dinfo in device_data.directories:
logger.info("Found directory %s of last modified %s on device %s",
dinfo.path, dinfo.last_modified, device_id) | entailment |
def get_file(self, target, path, offset=None, length=None):
"""Get the contents of a file on the device
:param target: The device(s) to be targeted with this request
:type target: :class:`devicecloud.sci.TargetABC` or list of :class:`devicecloud.sci.TargetABC` instances
:param path: The path on the target to the file to retrieve
:param offset: Start retrieving data from this byte position in the file, if None start from the beginning
:param length: How many bytes to retrieve, if None retrieve until the end of the file
:return: A dictionary with keys of device ids and values of the bytes of the file (or partial file if offset
and/or length are specified) or an :class:`~.ErrorInfo` object if there was an error response
:raises: :class:`~.ResponseParseError` If the SCI response has unrecognized formatting
"""
command_block = FileSystemServiceCommandBlock()
command_block.add_command(GetCommand(path, offset, length))
root = _parse_command_response(
self._sci_api.send_sci("file_system", target, command_block.get_command_string()))
out_dict = {}
for device in root.findall('./file_system/device'):
device_id = device.get('id')
error = device.find('./error')
if error is not None:
out_dict[device_id] = _parse_error_tree(error)
else:
data = GetCommand.parse_response(device.find('./commands/get_file'))
out_dict[device_id] = data
return out_dict | Get the contents of a file on the device
:param target: The device(s) to be targeted with this request
:type target: :class:`devicecloud.sci.TargetABC` or list of :class:`devicecloud.sci.TargetABC` instances
:param path: The path on the target to the file to retrieve
:param offset: Start retrieving data from this byte position in the file, if None start from the beginning
:param length: How many bytes to retrieve, if None retrieve until the end of the file
:return: A dictionary with keys of device ids and values of the bytes of the file (or partial file if offset
and/or length are specified) or an :class:`~.ErrorInfo` object if there was an error response
:raises: :class:`~.ResponseParseError` If the SCI response has unrecognized formatting | entailment |
def put_file(self, target, path, file_data=None, server_file=None, offset=None, truncate=False):
"""Put data into a file on the device
:param target: The device(s) to be targeted with this request
:type target: :class:`devicecloud.sci.TargetABC` or list of :class:`devicecloud.sci.TargetABC` instances
:param path: The path on the target to the file to write to. If the file already exists it will be overwritten.
:param file_data: A `six.binary_type` containing the data to put into the file
:param server_file: The path to a file on the devicecloud server containing the data to put into the file on the
device
:param offset: Start writing bytes to the file at this position, if None start at the beginning
:param truncate: Boolean, if True after bytes are done being written end the file their even if previous data
exists beyond it. If False, leave any existing data in place.
:return: A dictionary with keys being device ids and value being None if successful or an :class:`~.ErrorInfo`
if the operation failed on that device
:raises: :class:`~.FileSystemServiceException` if either both file_data and server_file are specified or
neither are specified
:raises: :class:`~.ResponseParseError` If the SCI response has unrecognized formatting
"""
command_block = FileSystemServiceCommandBlock()
command_block.add_command(PutCommand(path, file_data, server_file, offset, truncate))
root = _parse_command_response(self._sci_api.send_sci("file_system", target, command_block.get_command_string()))
out_dict = {}
for device in root.findall('./file_system/device'):
device_id = device.get('id')
error = device.find('./error')
if error is not None:
out_dict[device_id] = _parse_error_tree(error)
else:
out_dict[device_id] = PutCommand.parse_response(device.find('./commands/put_file'))
return out_dict | Put data into a file on the device
:param target: The device(s) to be targeted with this request
:type target: :class:`devicecloud.sci.TargetABC` or list of :class:`devicecloud.sci.TargetABC` instances
:param path: The path on the target to the file to write to. If the file already exists it will be overwritten.
:param file_data: A `six.binary_type` containing the data to put into the file
:param server_file: The path to a file on the devicecloud server containing the data to put into the file on the
device
:param offset: Start writing bytes to the file at this position, if None start at the beginning
:param truncate: Boolean, if True after bytes are done being written end the file their even if previous data
exists beyond it. If False, leave any existing data in place.
:return: A dictionary with keys being device ids and value being None if successful or an :class:`~.ErrorInfo`
if the operation failed on that device
:raises: :class:`~.FileSystemServiceException` if either both file_data and server_file are specified or
neither are specified
:raises: :class:`~.ResponseParseError` If the SCI response has unrecognized formatting | entailment |
def delete_file(self, target, path):
"""Delete a file from a device
:param target: The device(s) to be targeted with this request
:type target: :class:`devicecloud.sci.TargetABC` or list of :class:`devicecloud.sci.TargetABC` instances
:param path: The path on the target to the file to delete.
:return: A dictionary with keys being device ids and value being None if successful or an :class:`~.ErrorInfo`
if the operation failed on that device
:raises: :class:`~.ResponseParseError` If the SCI response has unrecognized formatting
"""
command_block = FileSystemServiceCommandBlock()
command_block.add_command(DeleteCommand(path))
root = _parse_command_response(self._sci_api.send_sci("file_system", target, command_block.get_command_string()))
out_dict = {}
for device in root.findall('./file_system/device'):
device_id = device.get('id')
error = device.find('./error')
if error is not None:
out_dict[device_id] = _parse_error_tree(error)
else:
out_dict[device_id] = DeleteCommand.parse_response(device.find('./commands/rm'))
return out_dict | Delete a file from a device
:param target: The device(s) to be targeted with this request
:type target: :class:`devicecloud.sci.TargetABC` or list of :class:`devicecloud.sci.TargetABC` instances
:param path: The path on the target to the file to delete.
:return: A dictionary with keys being device ids and value being None if successful or an :class:`~.ErrorInfo`
if the operation failed on that device
:raises: :class:`~.ResponseParseError` If the SCI response has unrecognized formatting | entailment |
def get_modified_items(self, target, path, last_modified_cutoff):
"""Get all files and directories from a path on the device modified since a given time
:param target: The device(s) to be targeted with this request
:type target: :class:`devicecloud.sci.TargetABC` or list of :class:`devicecloud.sci.TargetABC` instances
:param path: The path on the target to the directory to check for modified files.
:param last_modified_cutoff: The time (as Unix epoch time) to get files modified since
:type last_modified_cutoff: int
:return: A dictionary where the key is a device id and the value is either an :class:`~.ErrorInfo` if there
was a problem with the operation or a :class:`~.LsInfo` with the items modified since the
specified date
"""
file_list = self.list_files(target, path)
out_dict = {}
for device_id, device_data in six.iteritems(file_list):
if isinstance(device_data, ErrorInfo):
out_dict[device_id] = device_data
else:
files = []
dirs = []
for cur_file in device_data.files:
if cur_file.last_modified > last_modified_cutoff:
files.append(cur_file)
for cur_dir in device_data.directories:
if cur_dir.last_modified > last_modified_cutoff:
dirs.append(cur_dir)
out_dict[device_id] = LsInfo(directories=dirs, files=files)
return out_dict | Get all files and directories from a path on the device modified since a given time
:param target: The device(s) to be targeted with this request
:type target: :class:`devicecloud.sci.TargetABC` or list of :class:`devicecloud.sci.TargetABC` instances
:param path: The path on the target to the directory to check for modified files.
:param last_modified_cutoff: The time (as Unix epoch time) to get files modified since
:type last_modified_cutoff: int
:return: A dictionary where the key is a device id and the value is either an :class:`~.ErrorInfo` if there
was a problem with the operation or a :class:`~.LsInfo` with the items modified since the
specified date | entailment |
def exists(self, target, path, path_sep="/"):
"""Check if path refers to an existing path on the device
:param target: The device(s) to be targeted with this request
:type target: :class:`devicecloud.sci.TargetABC` or list of :class:`devicecloud.sci.TargetABC` instances
:param path: The path on the target to check for existence.
:param path_sep: The path separator of the device
:return: A dictionary where the key is a device id and the value is either an :class:`~.ErrorInfo` if there
was a problem with the operation or a boolean with the existence status of the path on that device
"""
if path.endswith(path_sep):
path = path[:-len(path_sep)]
par_dir, filename = path.rsplit(path_sep, 1)
file_list = self.list_files(target, par_dir)
out_dict = {}
for device_id, device_data in six.iteritems(file_list):
if isinstance(device_data, ErrorInfo):
out_dict[device_id] = device_data
else:
out_dict[device_id] = False
for cur_file in device_data.files:
if cur_file.path == path:
out_dict[device_id] = True
for cur_dir in device_data.directories:
if cur_dir.path == path:
out_dict[device_id] = True
return out_dict | Check if path refers to an existing path on the device
:param target: The device(s) to be targeted with this request
:type target: :class:`devicecloud.sci.TargetABC` or list of :class:`devicecloud.sci.TargetABC` instances
:param path: The path on the target to check for existence.
:param path_sep: The path separator of the device
:return: A dictionary where the key is a device id and the value is either an :class:`~.ErrorInfo` if there
was a problem with the operation or a boolean with the existence status of the path on that device | entailment |
def get_devices(self, condition=None, page_size=1000):
"""Iterates over each :class:`Device` for this device cloud account
Examples::
# get a list of all devices
all_devices = list(dc.devicecore.get_devices())
# build a mapping of devices by their vendor id using a
# dict comprehension
devices = dc.devicecore.get_devices() # generator object
devs_by_vendor_id = {d.get_vendor_id(): d for d in devices}
# iterate over all devices in 'minnesota' group and
# print the device mac and location
for device in dc.get_devices(group_path == 'minnesota'):
print "%s at %s" % (device.get_mac(), device.get_location())
:param condition: An :class:`.Expression` which defines the condition
which must be matched on the devicecore. If unspecified,
an iterator over all devices will be returned.
:param int page_size: The number of results to fetch in a
single page. In general, the default will suffice.
:returns: Iterator over each :class:`~Device` in this device cloud
account in the form of a generator object.
"""
condition = validate_type(condition, type(None), Expression, *six.string_types)
page_size = validate_type(page_size, *six.integer_types)
params = {"embed": "true"}
if condition is not None:
params["condition"] = condition.compile()
for device_json in self._conn.iter_json_pages("/ws/DeviceCore", page_size=page_size, **params):
yield Device(self._conn, self._sci, device_json) | Iterates over each :class:`Device` for this device cloud account
Examples::
# get a list of all devices
all_devices = list(dc.devicecore.get_devices())
# build a mapping of devices by their vendor id using a
# dict comprehension
devices = dc.devicecore.get_devices() # generator object
devs_by_vendor_id = {d.get_vendor_id(): d for d in devices}
# iterate over all devices in 'minnesota' group and
# print the device mac and location
for device in dc.get_devices(group_path == 'minnesota'):
print "%s at %s" % (device.get_mac(), device.get_location())
:param condition: An :class:`.Expression` which defines the condition
which must be matched on the devicecore. If unspecified,
an iterator over all devices will be returned.
:param int page_size: The number of results to fetch in a
single page. In general, the default will suffice.
:returns: Iterator over each :class:`~Device` in this device cloud
account in the form of a generator object. | entailment |
def get_group_tree_root(self, page_size=1000):
r"""Return the root group for this accounts' group tree
This will return the root group for this tree but with all links
between nodes (i.e. children starting from root) populated.
Examples::
# print the group hierarchy to stdout
dc.devicecore.get_group_tree_root().print_subtree()
# gather statistics about devices in each group including
# the count from its subgroups (recursively)
#
# This also shows how you can go from a group reference to devices
# for that particular group.
stats = {} # group -> devices count including children
def count_nodes(group):
count_for_this_node = \
len(list(dc.devicecore.get_devices(group_path == group.get_path())))
subnode_count = 0
for child in group.get_children():
subnode_count += count_nodes(child)
total = count_for_this_node + subnode_count
stats[group] = total
return total
count_nodes(dc.devicecore.get_group_tree_root())
:param int page_size: The number of results to fetch in a
single page. In general, the default will suffice.
:returns: The root group for this device cloud accounts group
hierarchy.
"""
# first pass, build mapping
group_map = {} # map id -> group
page_size = validate_type(page_size, *six.integer_types)
for group in self.get_groups(page_size=page_size):
group_map[group.get_id()] = group
# second pass, find root and populate list of children for each node
root = None
for group_id, group in group_map.items():
if group.is_root():
root = group
else:
parent = group_map[group.get_parent_id()]
parent.add_child(group)
return root | r"""Return the root group for this accounts' group tree
This will return the root group for this tree but with all links
between nodes (i.e. children starting from root) populated.
Examples::
# print the group hierarchy to stdout
dc.devicecore.get_group_tree_root().print_subtree()
# gather statistics about devices in each group including
# the count from its subgroups (recursively)
#
# This also shows how you can go from a group reference to devices
# for that particular group.
stats = {} # group -> devices count including children
def count_nodes(group):
count_for_this_node = \
len(list(dc.devicecore.get_devices(group_path == group.get_path())))
subnode_count = 0
for child in group.get_children():
subnode_count += count_nodes(child)
total = count_for_this_node + subnode_count
stats[group] = total
return total
count_nodes(dc.devicecore.get_group_tree_root())
:param int page_size: The number of results to fetch in a
single page. In general, the default will suffice.
:returns: The root group for this device cloud accounts group
hierarchy. | entailment |
def get_groups(self, condition=None, page_size=1000):
"""Return an iterator over all groups in this device cloud account
Optionally, a condition can be specified to limit the number of
groups returned.
Examples::
# Get all groups and print information about them
for group in dc.devicecore.get_groups():
print group
# Iterate over all devices which are in a group with a specific
# ID.
group = dc.devicore.get_groups(group_id == 123)[0]
for device in dc.devicecore.get_devices(group_path == group.get_path()):
print device.get_mac()
:param condition: A condition to use when filtering the results set. If
unspecified, all groups will be returned.
:param int page_size: The number of results to fetch in a
single page. In general, the default will suffice.
:returns: Generator over the groups in this device cloud account. No
guarantees about the order of results is provided and child links
between nodes will not be populated.
"""
query_kwargs = {}
if condition is not None:
query_kwargs["condition"] = condition.compile()
for group_data in self._conn.iter_json_pages("/ws/Group", page_size=page_size, **query_kwargs):
yield Group.from_json(group_data) | Return an iterator over all groups in this device cloud account
Optionally, a condition can be specified to limit the number of
groups returned.
Examples::
# Get all groups and print information about them
for group in dc.devicecore.get_groups():
print group
# Iterate over all devices which are in a group with a specific
# ID.
group = dc.devicore.get_groups(group_id == 123)[0]
for device in dc.devicecore.get_devices(group_path == group.get_path()):
print device.get_mac()
:param condition: A condition to use when filtering the results set. If
unspecified, all groups will be returned.
:param int page_size: The number of results to fetch in a
single page. In general, the default will suffice.
:returns: Generator over the groups in this device cloud account. No
guarantees about the order of results is provided and child links
between nodes will not be populated. | entailment |
def provision_devices(self, devices):
"""Provision multiple devices with a single API call
This method takes an iterable of dictionaries where the values in the dictionary are
expected to match the arguments of a call to :meth:`provision_device`. The
contents of each dictionary will be validated.
:param list devices: An iterable of dictionaries each containing information about
a device to be provision. The form of the dictionary should match the keyword
arguments taken by :meth:`provision_device`.
:raises DeviceCloudHttpException: If there is an unexpected error reported by Device Cloud.
:raises ValueError: If any input fields are known to have a bad form.
:return: A list of dictionaries in the form described for :meth:`provision_device` in the
order matching the requested device list. Note that it is possible for there to
be mixed success and error when provisioning multiple devices.
"""
# Validate all the input for each device provided
sio = six.StringIO()
def write_tag(tag, val):
sio.write("<{tag}>{val}</{tag}>".format(tag=tag, val=val))
def maybe_write_element(tag, val):
if val is not None:
write_tag(tag, val)
return True
return False
sio.write("<list>")
for d in devices:
sio.write("<DeviceCore>")
mac_address = d.get("mac_address")
device_id = d.get("device_id")
imei = d.get("imei")
if mac_address is not None:
write_tag("devMac", mac_address)
elif device_id is not None:
write_tag("devConnectwareId", device_id)
elif imei is not None:
write_tag("devCellularModemId", imei)
else:
raise ValueError("mac_address, device_id, or imei must be provided for device %r" % d)
# Write optional elements if present.
maybe_write_element("grpPath", d.get("group_path"))
maybe_write_element("dpUserMetaData", d.get("metadata"))
maybe_write_element("dpTags", d.get("tags"))
maybe_write_element("dpMapLong", d.get("map_long"))
maybe_write_element("dpMapLat", d.get("map_lat"))
maybe_write_element("dpContact", d.get("contact"))
maybe_write_element("dpDescription", d.get("description"))
sio.write("</DeviceCore>")
sio.write("</list>")
# Send the request, set the Accept XML as a nicety
results = []
response = self._conn.post("/ws/DeviceCore", sio.getvalue(), headers={'Accept': 'application/xml'})
root = ET.fromstring(response.content) # <result> tag is root of <list> response
for child in root:
if child.tag.lower() == "location":
results.append({
"error": False,
"error_msg": None,
"location": child.text
})
else: # we expect "error" but handle generically
results.append({
"error": True,
"location": None,
"error_msg": child.text
})
return results | Provision multiple devices with a single API call
This method takes an iterable of dictionaries where the values in the dictionary are
expected to match the arguments of a call to :meth:`provision_device`. The
contents of each dictionary will be validated.
:param list devices: An iterable of dictionaries each containing information about
a device to be provision. The form of the dictionary should match the keyword
arguments taken by :meth:`provision_device`.
:raises DeviceCloudHttpException: If there is an unexpected error reported by Device Cloud.
:raises ValueError: If any input fields are known to have a bad form.
:return: A list of dictionaries in the form described for :meth:`provision_device` in the
order matching the requested device list. Note that it is possible for there to
be mixed success and error when provisioning multiple devices. | entailment |
def from_json(cls, json_data):
"""Build and return a new Group object from json data (used internally)"""
# Example Data:
# { "grpId": "11817", "grpName": "7603_Digi", "grpDescription": "7603_Digi root group",
# "grpPath": "\/7603_Digi\/", "grpParentId": "1"}
return cls(
group_id=json_data["grpId"],
name=json_data["grpName"],
description=json_data.get("grpDescription", ""),
path=json_data["grpPath"],
parent_id=json_data["grpParentId"],
) | Build and return a new Group object from json data (used internally) | entailment |
def print_subtree(self, fobj=sys.stdout, level=0):
"""Print this group node and the subtree rooted at it"""
fobj.write("{}{!r}\n".format(" " * (level * 2), self))
for child in self.get_children():
child.print_subtree(fobj, level + 1) | Print this group node and the subtree rooted at it | entailment |
def get_device_json(self, use_cached=True):
"""Get the JSON metadata for this device as a python data structure
If ``use_cached`` is not True, then a web services request will be made
synchronously in order to get the latest device metatdata. This will
update the cached data for this device.
"""
if not use_cached:
devicecore_data = self._conn.get_json(
"/ws/DeviceCore/{}".format(self.get_device_id()))
self._device_json = devicecore_data["items"][0] # should only be 1
return self._device_json | Get the JSON metadata for this device as a python data structure
If ``use_cached`` is not True, then a web services request will be made
synchronously in order to get the latest device metatdata. This will
update the cached data for this device. | entailment |
def get_tags(self, use_cached=True):
"""Get the list of tags for this device"""
device_json = self.get_device_json(use_cached)
potential_tags = device_json.get("dpTags")
if potential_tags:
return list(filter(None, potential_tags.split(",")))
else:
return [] | Get the list of tags for this device | entailment |
def is_connected(self, use_cached=True):
"""Return True if the device is currrently connect and False if not"""
device_json = self.get_device_json(use_cached)
return int(device_json.get("dpConnectionStatus")) > 0 | Return True if the device is currrently connect and False if not | entailment |
def get_connectware_id(self, use_cached=True):
"""Get the connectware id of this device (primary key)"""
device_json = self.get_device_json(use_cached)
return device_json.get("devConnectwareId") | Get the connectware id of this device (primary key) | entailment |
def get_device_id(self, use_cached=True):
"""Get this device's device id"""
device_json = self.get_device_json(use_cached)
return device_json["id"].get("devId") | Get this device's device id | entailment |
def get_ip(self, use_cached=True):
"""Get the last known IP of this device"""
device_json = self.get_device_json(use_cached)
return device_json.get("dpLastKnownIp") | Get the last known IP of this device | entailment |
def get_mac(self, use_cached=True):
"""Get the MAC address of this device"""
device_json = self.get_device_json(use_cached)
return device_json.get("devMac") | Get the MAC address of this device | entailment |
def get_mac_last4(self, use_cached=True):
"""Get the last 4 characters in the device mac address hex (e.g. 00:40:9D:58:17:5B -> 175B)
This is useful for use as a short reference to the device. It is not guaranteed to
be unique (obviously) but will often be if you don't have too many devices.
"""
chunks = self.get_mac(use_cached).split(":")
mac4 = "%s%s" % (chunks[-2], chunks[-1])
return mac4.upper() | Get the last 4 characters in the device mac address hex (e.g. 00:40:9D:58:17:5B -> 175B)
This is useful for use as a short reference to the device. It is not guaranteed to
be unique (obviously) but will often be if you don't have too many devices. | entailment |
def get_registration_dt(self, use_cached=True):
"""Get the datetime of when this device was added to Device Cloud"""
device_json = self.get_device_json(use_cached)
start_date_iso8601 = device_json.get("devRecordStartDate")
if start_date_iso8601:
return iso8601_to_dt(start_date_iso8601)
else:
return None | Get the datetime of when this device was added to Device Cloud | entailment |
def get_latlon(self, use_cached=True):
"""Get a tuple with device latitude and longitude... these may be None"""
device_json = self.get_device_json(use_cached)
lat = device_json.get("dpMapLat")
lon = device_json.get("dpMapLong")
return (float(lat) if lat else None,
float(lon) if lon else None, ) | Get a tuple with device latitude and longitude... these may be None | entailment |
def add_to_group(self, group_path):
"""Add a device to a group, if the group doesn't exist it is created
:param group_path: Path or "name" of the group
"""
if self.get_group_path() != group_path:
post_data = ADD_GROUP_TEMPLATE.format(connectware_id=self.get_connectware_id(),
group_path=group_path)
self._conn.put('/ws/DeviceCore', post_data)
# Invalidate cache
self._device_json = None | Add a device to a group, if the group doesn't exist it is created
:param group_path: Path or "name" of the group | entailment |
def add_tag(self, new_tags):
"""Add a tag to existing device tags. This method will not add a duplicate, if already in the list.
:param new_tags: the tag(s) to be added. new_tags can be a comma-separated string or list
"""
tags = self.get_tags()
orig_tag_cnt = len(tags)
# print("self.get_tags() {}".format(tags))
if isinstance(new_tags, six.string_types):
new_tags = new_tags.split(',')
# print("spliting tags :: {}".format(new_tags))
for tag in new_tags:
if not tag in tags:
tags.append(tag.strip())
if len(tags) > orig_tag_cnt:
xml_tags = escape(",".join(tags))
post_data = TAGS_TEMPLATE.format(connectware_id=self.get_connectware_id(),
tags=xml_tags)
self._conn.put('/ws/DeviceCore', post_data)
# Invalidate cache
self._device_json = None | Add a tag to existing device tags. This method will not add a duplicate, if already in the list.
:param new_tags: the tag(s) to be added. new_tags can be a comma-separated string or list | entailment |
def remove_tag(self, tag):
"""Remove tag from existing device tags
:param tag: the tag to be removed from the list
:raises ValueError: If tag does not exist in list
"""
tags = self.get_tags()
tags.remove(tag)
post_data = TAGS_TEMPLATE.format(connectware_id=self.get_connectware_id(),
tags=escape(",".join(tags)))
self._conn.put('/ws/DeviceCore', post_data)
# Invalidate cache
self._device_json = None | Remove tag from existing device tags
:param tag: the tag to be removed from the list
:raises ValueError: If tag does not exist in list | entailment |
def hostname(self):
"""Get the hostname that this connection is associated with"""
from six.moves.urllib.parse import urlparse
return urlparse(self._base_url).netloc.split(':', 1)[0] | Get the hostname that this connection is associated with | entailment |
def iter_json_pages(self, path, page_size=1000, **params):
"""Return an iterator over JSON items from a paginated resource
Legacy resources (prior to V1) implemented a common paging interfaces for
several different resources. This method handles the details of iterating
over the paged result set, yielding only the JSON data for each item
within the aggregate resource.
:param str path: The base path to the resource being requested (e.g. /ws/Group)
:param int page_size: The number of items that should be requested for each page. A larger
page_size may mean fewer HTTP requests but could also increase the time to get a first
result back from Device Cloud.
:param params: These are additional query parameters that should be sent with each
request to Device Cloud.
"""
path = validate_type(path, *six.string_types)
page_size = validate_type(page_size, *six.integer_types)
offset = 0
remaining_size = 1 # just needs to be non-zero
while remaining_size > 0:
reqparams = {"start": offset, "size": page_size}
reqparams.update(params)
response = self.get_json(path, params=reqparams)
offset += page_size
remaining_size = int(response.get("remainingSize", "0"))
for item_json in response.get("items", []):
yield item_json | Return an iterator over JSON items from a paginated resource
Legacy resources (prior to V1) implemented a common paging interfaces for
several different resources. This method handles the details of iterating
over the paged result set, yielding only the JSON data for each item
within the aggregate resource.
:param str path: The base path to the resource being requested (e.g. /ws/Group)
:param int page_size: The number of items that should be requested for each page. A larger
page_size may mean fewer HTTP requests but could also increase the time to get a first
result back from Device Cloud.
:param params: These are additional query parameters that should be sent with each
request to Device Cloud. | entailment |
def get(self, path, **kwargs):
"""Perform an HTTP GET request of the specified path in Device Cloud
Make an HTTP GET request against Device Cloud with this accounts
credentials and base url. This method uses the
`requests <http://docs.python-requests.org/en/latest/>`_ library
`request method <http://docs.python-requests.org/en/latest/api/#requests.request>`_
and all keyword arguments will be passed on to that method.
:param str path: Device Cloud path to GET
:param int retries: The number of times the request should be retried if an
unsuccessful response is received. Most likely, you should leave this at 0.
:raises DeviceCloudHttpException: if a non-success response to the request is received
from Device Cloud
:returns: A requests ``Response`` object
"""
url = self._make_url(path)
return self._make_request("GET", url, **kwargs) | Perform an HTTP GET request of the specified path in Device Cloud
Make an HTTP GET request against Device Cloud with this accounts
credentials and base url. This method uses the
`requests <http://docs.python-requests.org/en/latest/>`_ library
`request method <http://docs.python-requests.org/en/latest/api/#requests.request>`_
and all keyword arguments will be passed on to that method.
:param str path: Device Cloud path to GET
:param int retries: The number of times the request should be retried if an
unsuccessful response is received. Most likely, you should leave this at 0.
:raises DeviceCloudHttpException: if a non-success response to the request is received
from Device Cloud
:returns: A requests ``Response`` object | entailment |
def get_json(self, path, **kwargs):
"""Perform an HTTP GET request with JSON headers of the specified path against Device Cloud
Make an HTTP GET request against Device Cloud with this accounts
credentials and base url. This method uses the
`requests <http://docs.python-requests.org/en/latest/>`_ library
`request method <http://docs.python-requests.org/en/latest/api/#requests.request>`_
and all keyword arguments will be passed on to that method.
This method will automatically add the ``Accept: application/json`` and parse the
JSON response from Device Cloud.
:param str path: Device Cloud path to GET
:param int retries: The number of times the request should be retried if an
unsuccessful response is received. Most likely, you should leave this at 0.
:raises DeviceCloudHttpException: if a non-success response to the request is received
from Device Cloud
:returns: A python data structure containing the results of calling ``json.loads`` on the
body of the response from Device Cloud.
"""
url = self._make_url(path)
headers = kwargs.setdefault('headers', {})
headers.update({'Accept': 'application/json'})
response = self._make_request("GET", url, **kwargs)
return json.loads(response.text) | Perform an HTTP GET request with JSON headers of the specified path against Device Cloud
Make an HTTP GET request against Device Cloud with this accounts
credentials and base url. This method uses the
`requests <http://docs.python-requests.org/en/latest/>`_ library
`request method <http://docs.python-requests.org/en/latest/api/#requests.request>`_
and all keyword arguments will be passed on to that method.
This method will automatically add the ``Accept: application/json`` and parse the
JSON response from Device Cloud.
:param str path: Device Cloud path to GET
:param int retries: The number of times the request should be retried if an
unsuccessful response is received. Most likely, you should leave this at 0.
:raises DeviceCloudHttpException: if a non-success response to the request is received
from Device Cloud
:returns: A python data structure containing the results of calling ``json.loads`` on the
body of the response from Device Cloud. | entailment |
def post(self, path, data, **kwargs):
"""Perform an HTTP POST request of the specified path in Device Cloud
Make an HTTP POST request against Device Cloud with this accounts
credentials and base url. This method uses the
`requests <http://docs.python-requests.org/en/latest/>`_ library
`request method <http://docs.python-requests.org/en/latest/api/#requests.request>`_
and all keyword arguments will be passed on to that method.
:param str path: Device Cloud path to POST
:param int retries: The number of times the request should be retried if an
unsuccessful response is received. Most likely, you should leave this at 0.
:param data: The data to be posted in the body of the POST request (see docs for
``requests.post``
:raises DeviceCloudHttpException: if a non-success response to the request is received
from Device Cloud
:returns: A requests ``Response`` object
"""
url = self._make_url(path)
return self._make_request("POST", url, data=data, **kwargs) | Perform an HTTP POST request of the specified path in Device Cloud
Make an HTTP POST request against Device Cloud with this accounts
credentials and base url. This method uses the
`requests <http://docs.python-requests.org/en/latest/>`_ library
`request method <http://docs.python-requests.org/en/latest/api/#requests.request>`_
and all keyword arguments will be passed on to that method.
:param str path: Device Cloud path to POST
:param int retries: The number of times the request should be retried if an
unsuccessful response is received. Most likely, you should leave this at 0.
:param data: The data to be posted in the body of the POST request (see docs for
``requests.post``
:raises DeviceCloudHttpException: if a non-success response to the request is received
from Device Cloud
:returns: A requests ``Response`` object | entailment |
def put(self, path, data, **kwargs):
"""Perform an HTTP PUT request of the specified path in Device Cloud
Make an HTTP PUT request against Device Cloud with this accounts
credentials and base url. This method uses the
`requests <http://docs.python-requests.org/en/latest/>`_ library
`request method <http://docs.python-requests.org/en/latest/api/#requests.request>`_
and all keyword arguments will be passed on to that method.
:param str path: Device Cloud path to PUT
:param int retries: The number of times the request should be retried if an
unsuccessful response is received. Most likely, you should leave this at 0.
:param data: The data to be posted in the body of the POST request (see docs for
``requests.post``
:raises DeviceCloudHttpException: if a non-success response to the request is received
from Device Cloud
:returns: A requests ``Response`` object
"""
url = self._make_url(path)
return self._make_request("PUT", url, data=data, **kwargs) | Perform an HTTP PUT request of the specified path in Device Cloud
Make an HTTP PUT request against Device Cloud with this accounts
credentials and base url. This method uses the
`requests <http://docs.python-requests.org/en/latest/>`_ library
`request method <http://docs.python-requests.org/en/latest/api/#requests.request>`_
and all keyword arguments will be passed on to that method.
:param str path: Device Cloud path to PUT
:param int retries: The number of times the request should be retried if an
unsuccessful response is received. Most likely, you should leave this at 0.
:param data: The data to be posted in the body of the POST request (see docs for
``requests.post``
:raises DeviceCloudHttpException: if a non-success response to the request is received
from Device Cloud
:returns: A requests ``Response`` object | entailment |
def delete(self, path, retries=DEFAULT_THROTTLE_RETRIES, **kwargs):
"""Perform an HTTP DELETE request of the specified path in Device Cloud
Make an HTTP DELETE request against Device Cloud with this accounts
credentials and base url. This method uses the
`requests <http://docs.python-requests.org/en/latest/>`_ library
`request method <http://docs.python-requests.org/en/latest/api/#requests.request>`_
and all keyword arguments will be passed on to that method.
:param str path: Device Cloud path to DELETE
:param int retries: The number of times the request should be retried if an
unsuccessful response is received. Most likely, you should leave this at 0.
:raises DeviceCloudHttpException: if a non-success response to the request is received
from Device Cloud
:returns: A requests ``Response`` object
"""
url = self._make_url(path)
return self._make_request("DELETE", url, **kwargs) | Perform an HTTP DELETE request of the specified path in Device Cloud
Make an HTTP DELETE request against Device Cloud with this accounts
credentials and base url. This method uses the
`requests <http://docs.python-requests.org/en/latest/>`_ library
`request method <http://docs.python-requests.org/en/latest/api/#requests.request>`_
and all keyword arguments will be passed on to that method.
:param str path: Device Cloud path to DELETE
:param int retries: The number of times the request should be retried if an
unsuccessful response is received. Most likely, you should leave this at 0.
:raises DeviceCloudHttpException: if a non-success response to the request is received
from Device Cloud
:returns: A requests ``Response`` object | entailment |
def streams(self):
"""Property providing access to the :class:`.StreamsAPI`"""
if self._streams_api is None:
self._streams_api = self.get_streams_api()
return self._streams_api | Property providing access to the :class:`.StreamsAPI` | entailment |
def filedata(self):
"""Property providing access to the :class:`.FileDataAPI`"""
if self._filedata_api is None:
self._filedata_api = self.get_filedata_api()
return self._filedata_api | Property providing access to the :class:`.FileDataAPI` | entailment |
def devicecore(self):
"""Property providing access to the :class:`.DeviceCoreAPI`"""
if self._devicecore_api is None:
self._devicecore_api = self.get_devicecore_api()
return self._devicecore_api | Property providing access to the :class:`.DeviceCoreAPI` | entailment |
def sci(self):
"""Property providing access to the :class:`.ServerCommandInterfaceAPI`"""
if self._sci_api is None:
self._sci_api = self.get_sci_api()
return self._sci_api | Property providing access to the :class:`.ServerCommandInterfaceAPI` | entailment |
def file_system_service(self):
"""Property providing access to the :class:`.FileSystemServiceAPI`"""
if self._fss_api is None:
self._fss_api = self.get_fss_api()
return self._fss_api | Property providing access to the :class:`.FileSystemServiceAPI` | entailment |
def monitor(self):
"""Property providing access to the :class:`.MonitorAPI`"""
if self._monitor_api is None:
self._monitor_api = self.get_monitor_api()
return self._monitor_api | Property providing access to the :class:`.MonitorAPI` | entailment |
def get_devicecore_api(self):
"""Returns a :class:`.DeviceCoreAPI` bound to this device cloud instance
This provides access to the same API as :attr:`.DeviceCloud.devicecore` but will create
a new object (with a new cache) each time called.
:return: devicecore API object bound to this device cloud account
:rtype: :class:`.DeviceCoreAPI`
"""
from devicecloud.devicecore import DeviceCoreAPI
return DeviceCoreAPI(self._conn, self.get_sci_api()) | Returns a :class:`.DeviceCoreAPI` bound to this device cloud instance
This provides access to the same API as :attr:`.DeviceCloud.devicecore` but will create
a new object (with a new cache) each time called.
:return: devicecore API object bound to this device cloud account
:rtype: :class:`.DeviceCoreAPI` | entailment |
def get_async_job(self, job_id):
"""Query an asynchronous SCI job by ID
This is useful if the job was not created with send_sci_async().
:param int job_id: The job ID to query
:returns: The SCI response from GETting the job information
"""
uri = "/ws/sci/{0}".format(job_id)
# TODO: do parsing here?
return self._conn.get(uri) | Query an asynchronous SCI job by ID
This is useful if the job was not created with send_sci_async().
:param int job_id: The job ID to query
:returns: The SCI response from GETting the job information | entailment |
def send_sci_async(self, operation, target, payload, **sci_options):
"""Send an asynchronous SCI request, and wraps the job in an object
to manage it
:param str operation: The operation is one of {send_message, update_firmware, disconnect, query_firmware_targets,
file_system, data_service, and reboot}
:param target: The device(s) to be targeted with this request
:type target: :class:`~.TargetABC` or list of :class:`~.TargetABC` instances
TODO: document other params
"""
sci_options['synchronous'] = False
resp = self.send_sci(operation, target, payload, **sci_options)
dom = ET.fromstring(resp.content)
job_element = dom.find('.//jobId')
if job_element is None:
return
job_id = int(job_element.text)
return AsyncRequestProxy(job_id, self._conn) | Send an asynchronous SCI request, and wraps the job in an object
to manage it
:param str operation: The operation is one of {send_message, update_firmware, disconnect, query_firmware_targets,
file_system, data_service, and reboot}
:param target: The device(s) to be targeted with this request
:type target: :class:`~.TargetABC` or list of :class:`~.TargetABC` instances
TODO: document other params | entailment |
def send_sci(self, operation, target, payload, reply=None, synchronous=None, sync_timeout=None,
cache=None, allow_offline=None, wait_for_reconnect=None):
"""Send SCI request to 1 or more targets
:param str operation: The operation is one of {send_message, update_firmware, disconnect, query_firmware_targets,
file_system, data_service, and reboot}
:param target: The device(s) to be targeted with this request
:type target: :class:`~.TargetABC` or list of :class:`~.TargetABC` instances
TODO: document other params
"""
if not isinstance(payload, six.string_types) and not isinstance(payload, six.binary_type):
raise TypeError("payload is required to be a string or bytes")
# validate targets and bulid targets xml section
try:
iter(target)
targets = target
except TypeError:
targets = [target, ]
if not all(isinstance(t, TargetABC) for t in targets):
raise TypeError("Target(s) must each be instances of TargetABC")
targets_xml = "".join(t.to_xml() for t in targets)
# reply argument
if not isinstance(reply, (type(None), six.string_types)):
raise TypeError("reply must be either None or a string")
if reply is not None:
reply_xml = ' reply="{}"'.format(reply)
else:
reply_xml = ''
# synchronous argument
if not isinstance(synchronous, (type(None), bool)):
raise TypeError("synchronous expected to be either None or a boolean")
if synchronous is not None:
synchronous_xml = ' synchronous="{}"'.format('true' if synchronous else 'false')
else:
synchronous_xml = ''
# sync_timeout argument
# TODO: What units is syncTimeout in? seconds?
if sync_timeout is not None and not isinstance(sync_timeout, six.integer_types):
raise TypeError("sync_timeout expected to either be None or a number")
if sync_timeout is not None:
sync_timeout_xml = ' syncTimeout="{}"'.format(sync_timeout)
else:
sync_timeout_xml = ''
# cache argument
if not isinstance(cache, (type(None), bool)):
raise TypeError("cache expected to either be None or a boolean")
if cache is not None:
cache_xml = ' cache="{}"'.format('true' if cache else 'false')
else:
cache_xml = ''
# allow_offline argument
if not isinstance(allow_offline, (type(None), bool)):
raise TypeError("allow_offline is expected to be either None or a boolean")
if allow_offline is not None:
allow_offline_xml = ' allowOffline="{}"'.format('true' if allow_offline else 'false')
else:
allow_offline_xml = ''
# wait_for_reconnect argument
if not isinstance(wait_for_reconnect, (type(None), bool)):
raise TypeError("wait_for_reconnect expected to be either None or a boolean")
if wait_for_reconnect is not None:
wait_for_reconnect_xml = ' waitForReconnect="{}"'.format('true' if wait_for_reconnect else 'false')
else:
wait_for_reconnect_xml = ''
full_request = SCI_TEMPLATE.format(
operation=operation,
targets=targets_xml,
reply=reply_xml,
synchronous=synchronous_xml,
sync_timeout=sync_timeout_xml,
cache=cache_xml,
allow_offline=allow_offline_xml,
wait_for_reconnect=wait_for_reconnect_xml,
payload=payload
)
# TODO: do parsing here?
return self._conn.post("/ws/sci", full_request) | Send SCI request to 1 or more targets
:param str operation: The operation is one of {send_message, update_firmware, disconnect, query_firmware_targets,
file_system, data_service, and reboot}
:param target: The device(s) to be targeted with this request
:type target: :class:`~.TargetABC` or list of :class:`~.TargetABC` instances
TODO: document other params | entailment |
def conditional_write(strm, fmt, value, *args, **kwargs):
"""Write to stream using fmt and value if value is not None"""
if value is not None:
strm.write(fmt.format(value, *args, **kwargs)) | Write to stream using fmt and value if value is not None | entailment |
def iso8601_to_dt(iso8601):
"""Given an ISO8601 string as returned by Device Cloud, convert to a datetime object"""
# We could just use arrow.get() but that is more permissive than we actually want.
# Internal (but still public) to arrow is the actual parser where we can be
# a bit more specific
parser = DateTimeParser()
try:
arrow_dt = arrow.Arrow.fromdatetime(parser.parse_iso(iso8601))
return arrow_dt.to('utc').datetime
except ParserError as pe:
raise ValueError("Provided was not a valid ISO8601 string: %r" % pe) | Given an ISO8601 string as returned by Device Cloud, convert to a datetime object | entailment |
def to_none_or_dt(input):
"""Convert ``input`` to either None or a datetime object
If the input is None, None will be returned.
If the input is a datetime object, it will be converted to a datetime
object with UTC timezone info. If the datetime object is naive, then
this method will assume the object is specified according to UTC and
not local or some other timezone.
If the input to the function is a string, this method will attempt to
parse the input as an ISO-8601 formatted string.
:param input: Input data (expected to be either str, None, or datetime object)
:return: datetime object from input or None if already None
:rtype: datetime or None
"""
if input is None:
return input
elif isinstance(input, datetime.datetime):
arrow_dt = arrow.Arrow.fromdatetime(input, input.tzinfo or 'utc')
return arrow_dt.to('utc').datetime
if isinstance(input, six.string_types):
# try to convert from ISO8601
return iso8601_to_dt(input)
else:
raise TypeError("Not a string, NoneType, or datetime object") | Convert ``input`` to either None or a datetime object
If the input is None, None will be returned.
If the input is a datetime object, it will be converted to a datetime
object with UTC timezone info. If the datetime object is naive, then
this method will assume the object is specified according to UTC and
not local or some other timezone.
If the input to the function is a string, this method will attempt to
parse the input as an ISO-8601 formatted string.
:param input: Input data (expected to be either str, None, or datetime object)
:return: datetime object from input or None if already None
:rtype: datetime or None | entailment |
def isoformat(dt):
"""Return an ISO-8601 formatted string from the provided datetime object"""
if not isinstance(dt, datetime.datetime):
raise TypeError("Must provide datetime.datetime object to isoformat")
if dt.tzinfo is None:
raise ValueError("naive datetime objects are not allowed beyond the library boundaries")
return dt.isoformat().replace("+00:00", "Z") | Return an ISO-8601 formatted string from the provided datetime object | entailment |
def get_filedata(self, condition=None, page_size=1000):
"""Return a generator over all results matching the provided condition
:param condition: An :class:`.Expression` which defines the condition
which must be matched on the filedata that will be retrieved from
file data store. If a condition is unspecified, the following condition
will be used ``fd_path == '~/'``. This condition will match all file
data in this accounts "home" directory (a sensible root).
:type condition: :class:`.Expression` or None
:param int page_size: The number of results to fetch in a single page. Regardless
of the size specified, :meth:`.get_filedata` will continue to fetch pages
and yield results until all items have been fetched.
:return: Generator yielding :class:`.FileDataObject` instances matching the
provided conditions.
"""
condition = validate_type(condition, type(None), Expression, *six.string_types)
page_size = validate_type(page_size, *six.integer_types)
if condition is None:
condition = (fd_path == "~/") # home directory
params = {"embed": "true", "condition": condition.compile()}
for fd_json in self._conn.iter_json_pages("/ws/FileData", page_size=page_size, **params):
yield FileDataObject.from_json(self, fd_json) | Return a generator over all results matching the provided condition
:param condition: An :class:`.Expression` which defines the condition
which must be matched on the filedata that will be retrieved from
file data store. If a condition is unspecified, the following condition
will be used ``fd_path == '~/'``. This condition will match all file
data in this accounts "home" directory (a sensible root).
:type condition: :class:`.Expression` or None
:param int page_size: The number of results to fetch in a single page. Regardless
of the size specified, :meth:`.get_filedata` will continue to fetch pages
and yield results until all items have been fetched.
:return: Generator yielding :class:`.FileDataObject` instances matching the
provided conditions. | entailment |
def write_file(self, path, name, data, content_type=None, archive=False,
raw=False):
"""Write a file to the file data store at the given path
:param str path: The path (directory) into which the file should be written.
:param str name: The name of the file to be written.
:param data: The binary data that should be written into the file.
:type data: str (Python2) or bytes (Python3)
:param content_type: The content type for the data being written to the file. May
be left unspecified.
:type content_type: str or None
:param bool archive: If true, history will be retained for various revisions of this
file. If this is not required, leave as false.
:param bool raw: If true, skip the FileData XML headers (necessary for binary files)
"""
path = validate_type(path, *six.string_types)
name = validate_type(name, *six.string_types)
data = validate_type(data, six.binary_type)
content_type = validate_type(content_type, type(None), *six.string_types)
archive_str = "true" if validate_type(archive, bool) else "false"
if not path.startswith("/"):
path = "/" + path
if not path.endswith("/"):
path += "/"
name = name.lstrip("/")
sio = six.moves.StringIO()
if not raw:
if six.PY3:
base64_encoded_data = base64.encodebytes(data).decode('utf-8')
else:
base64_encoded_data = base64.encodestring(data)
sio.write("<FileData>")
if content_type is not None:
sio.write("<fdContentType>{}</fdContentType>".format(content_type))
sio.write("<fdType>file</fdType>")
sio.write("<fdData>{}</fdData>".format(base64_encoded_data))
sio.write("<fdArchive>{}</fdArchive>".format(archive_str))
sio.write("</FileData>")
else:
sio.write(data)
params = {
"type": "file",
"archive": archive_str
}
self._conn.put(
"/ws/FileData{path}{name}".format(path=path, name=name),
sio.getvalue(),
params=params) | Write a file to the file data store at the given path
:param str path: The path (directory) into which the file should be written.
:param str name: The name of the file to be written.
:param data: The binary data that should be written into the file.
:type data: str (Python2) or bytes (Python3)
:param content_type: The content type for the data being written to the file. May
be left unspecified.
:type content_type: str or None
:param bool archive: If true, history will be retained for various revisions of this
file. If this is not required, leave as false.
:param bool raw: If true, skip the FileData XML headers (necessary for binary files) | entailment |
def delete_file(self, path):
"""Delete a file or directory from the filedata store
This method removes a file or directory (recursively) from
the filedata store.
:param path: The path of the file or directory to remove
from the file data store.
"""
path = validate_type(path, *six.string_types)
if not path.startswith("/"):
path = "/" + path
self._conn.delete("/ws/FileData{path}".format(path=path)) | Delete a file or directory from the filedata store
This method removes a file or directory (recursively) from
the filedata store.
:param path: The path of the file or directory to remove
from the file data store. | entailment |
def walk(self, root="~/"):
"""Emulation of os.walk behavior against Device Cloud filedata store
This method will yield tuples in the form ``(dirpath, FileDataDirectory's, FileData's)``
recursively in pre-order (depth first from top down).
:param str root: The root path from which the search should commence. By default, this
is the root directory for this device cloud account (~).
:return: Generator yielding 3-tuples of dirpath, directories, and files
:rtype: 3-tuple in form (dirpath, list of :class:`FileDataDirectory`, list of :class:`FileDataFile`)
"""
root = validate_type(root, *six.string_types)
directories = []
files = []
# fd_path is real picky
query_fd_path = root
if not query_fd_path.endswith("/"):
query_fd_path += "/"
for fd_object in self.get_filedata(fd_path == query_fd_path):
if fd_object.get_type() == "directory":
directories.append(fd_object)
else:
files.append(fd_object)
# Yield the walk results for this level of the tree
yield (root, directories, files)
# recurse on each directory and yield results up the chain
for directory in directories:
for dirpath, directories, files in self.walk(directory.get_full_path()):
yield (dirpath, directories, files) | Emulation of os.walk behavior against Device Cloud filedata store
This method will yield tuples in the form ``(dirpath, FileDataDirectory's, FileData's)``
recursively in pre-order (depth first from top down).
:param str root: The root path from which the search should commence. By default, this
is the root directory for this device cloud account (~).
:return: Generator yielding 3-tuples of dirpath, directories, and files
:rtype: 3-tuple in form (dirpath, list of :class:`FileDataDirectory`, list of :class:`FileDataFile`) | entailment |
def get_data(self):
"""Get the data associated with this filedata object
:returns: Data associated with this object or None if none exists
:rtype: str (Python2)/bytes (Python3) or None
"""
# NOTE: we assume that the "embed" option is used
base64_data = self._json_data.get("fdData")
if base64_data is None:
return None
else:
# need to convert to bytes() with python 3
return base64.decodestring(six.b(base64_data)) | Get the data associated with this filedata object
:returns: Data associated with this object or None if none exists
:rtype: str (Python2)/bytes (Python3) or None | entailment |
def write_file(self, *args, **kwargs):
"""Write a file into this directory
This method takes the same arguments as :meth:`.FileDataAPI.write_file`
with the exception of the ``path`` argument which is not needed here.
"""
return self._fdapi.write_file(self.get_path(), *args, **kwargs) | Write a file into this directory
This method takes the same arguments as :meth:`.FileDataAPI.write_file`
with the exception of the ``path`` argument which is not needed here. | entailment |
def create_tcp_monitor(self, topics, batch_size=1, batch_duration=0,
compression='gzip', format_type='json'):
"""Creates a TCP Monitor instance in Device Cloud for a given list of topics
:param topics: a string list of topics (e.g. ['DeviceCore[U]',
'FileDataCore']).
:param batch_size: How many Msgs received before sending data.
:param batch_duration: How long to wait before sending batch if it
does not exceed batch_size.
:param compression: Compression value (i.e. 'gzip').
:param format_type: What format server should send data in (i.e. 'xml' or 'json').
Returns an object of the created Monitor
"""
monitor_xml = """\
<Monitor>
<monTopic>{topics}</monTopic>
<monBatchSize>{batch_size}</monBatchSize>
<monFormatType>{format_type}</monFormatType>
<monTransportType>tcp</monTransportType>
<monCompression>{compression}</monCompression>
</Monitor>
""".format(
topics=','.join(topics),
batch_size=batch_size,
batch_duration=batch_duration,
format_type=format_type,
compression=compression,
)
monitor_xml = textwrap.dedent(monitor_xml)
response = self._conn.post("/ws/Monitor", monitor_xml)
location = ET.fromstring(response.text).find('.//location').text
monitor_id = int(location.split('/')[-1])
return TCPDeviceCloudMonitor(self._conn, monitor_id, self._tcp_client_manager) | Creates a TCP Monitor instance in Device Cloud for a given list of topics
:param topics: a string list of topics (e.g. ['DeviceCore[U]',
'FileDataCore']).
:param batch_size: How many Msgs received before sending data.
:param batch_duration: How long to wait before sending batch if it
does not exceed batch_size.
:param compression: Compression value (i.e. 'gzip').
:param format_type: What format server should send data in (i.e. 'xml' or 'json').
Returns an object of the created Monitor | entailment |
def create_http_monitor(self, topics, transport_url, transport_token=None, transport_method='PUT', connect_timeout=0,
response_timeout=0, batch_size=1, batch_duration=0, compression='none', format_type='json'):
"""Creates a HTTP Monitor instance in Device Cloud for a given list of topics
:param topics: a string list of topics (e.g. ['DeviceCore[U]',
'FileDataCore']).
:param transport_url: URL of the customer web server.
:param transport_token: Credentials for basic authentication in the following format: username:password
:param transport_method: HTTP method to use for sending data: PUT or POST. The default is PUT.
:param connect_timeout: A value of 0 means use the system default of 5000 (5 seconds).
:param response_timeout: A value of 0 means use the system default of 5000 (5 seconds).
:param batch_size: How many Msgs received before sending data.
:param batch_duration: How long to wait before sending batch if it
does not exceed batch_size.
:param compression: Compression value (i.e. 'gzip').
:param format_type: What format server should send data in (i.e. 'xml' or 'json').
Returns an object of the created Monitor
"""
monitor_xml = """\
<Monitor>
<monTopic>{topics}</monTopic>
<monBatchSize>{batch_size}</monBatchSize>
<monFormatType>{format_type}</monFormatType>
<monTransportType>http</monTransportType>
<monTransportUrl>{transport_url}</monTransportUrl>
<monTransportToken>{transport_token}</monTransportToken>
<monTransportMethod>{transport_method}</monTransportMethod>
<monConnectTimeout>{connect_timeout}</monConnectTimeout>
<monResponseTimeout>{response_timeout}</monResponseTimeout>
<monCompression>{compression}</monCompression>
</Monitor>
""".format(
topics=','.join(topics),
transport_url=transport_url,
transport_token=transport_token,
transport_method=transport_method,
connect_timeout=connect_timeout,
response_timeout=response_timeout,
batch_size=batch_size,
batch_duration=batch_duration,
format_type=format_type,
compression=compression,
)
monitor_xml = textwrap.dedent(monitor_xml)
response = self._conn.post("/ws/Monitor", monitor_xml)
location = ET.fromstring(response.text).find('.//location').text
monitor_id = int(location.split('/')[-1])
return HTTPDeviceCloudMonitor(self._conn, monitor_id) | Creates a HTTP Monitor instance in Device Cloud for a given list of topics
:param topics: a string list of topics (e.g. ['DeviceCore[U]',
'FileDataCore']).
:param transport_url: URL of the customer web server.
:param transport_token: Credentials for basic authentication in the following format: username:password
:param transport_method: HTTP method to use for sending data: PUT or POST. The default is PUT.
:param connect_timeout: A value of 0 means use the system default of 5000 (5 seconds).
:param response_timeout: A value of 0 means use the system default of 5000 (5 seconds).
:param batch_size: How many Msgs received before sending data.
:param batch_duration: How long to wait before sending batch if it
does not exceed batch_size.
:param compression: Compression value (i.e. 'gzip').
:param format_type: What format server should send data in (i.e. 'xml' or 'json').
Returns an object of the created Monitor | entailment |
def get_monitors(self, condition=None, page_size=1000):
"""Return an iterator over all monitors matching the provided condition
Get all inactive monitors and print id::
for mon in dc.monitor.get_monitors(MON_STATUS_ATTR == "DISABLED"):
print(mon.get_id())
Get all the HTTP monitors and print id::
for mon in dc.monitor.get_monitors(MON_TRANSPORT_TYPE_ATTR == "http"):
print(mon.get_id())
Many other possibilities exist. See the :mod:`devicecloud.condition` documention
for additional details on building compound expressions.
:param condition: An :class:`.Expression` which defines the condition
which must be matched on the monitor that will be retrieved from
Device Cloud. If a condition is unspecified, an iterator over
all monitors for this account will be returned.
:type condition: :class:`.Expression` or None
:param int page_size: The number of results to fetch in a single page.
:return: Generator yielding :class:`.DeviceCloudMonitor` instances matching the
provided conditions.
"""
req_kwargs = {}
if condition:
req_kwargs['condition'] = condition.compile()
for monitor_data in self._conn.iter_json_pages("/ws/Monitor", **req_kwargs):
yield DeviceCloudMonitor.from_json(self._conn, monitor_data, self._tcp_client_manager) | Return an iterator over all monitors matching the provided condition
Get all inactive monitors and print id::
for mon in dc.monitor.get_monitors(MON_STATUS_ATTR == "DISABLED"):
print(mon.get_id())
Get all the HTTP monitors and print id::
for mon in dc.monitor.get_monitors(MON_TRANSPORT_TYPE_ATTR == "http"):
print(mon.get_id())
Many other possibilities exist. See the :mod:`devicecloud.condition` documention
for additional details on building compound expressions.
:param condition: An :class:`.Expression` which defines the condition
which must be matched on the monitor that will be retrieved from
Device Cloud. If a condition is unspecified, an iterator over
all monitors for this account will be returned.
:type condition: :class:`.Expression` or None
:param int page_size: The number of results to fetch in a single page.
:return: Generator yielding :class:`.DeviceCloudMonitor` instances matching the
provided conditions. | entailment |
def get_monitor(self, topics):
"""Attempts to find a Monitor in device cloud that matches the provided topics
:param topics: a string list of topics (e.g. ``['DeviceCore[U]', 'FileDataCore'])``)
Returns a :class:`DeviceCloudMonitor` if found, otherwise None.
"""
for monitor in self.get_monitors(MON_TOPIC_ATTR == ",".join(topics)):
return monitor # return the first one, even if there are multiple
return None | Attempts to find a Monitor in device cloud that matches the provided topics
:param topics: a string list of topics (e.g. ``['DeviceCore[U]', 'FileDataCore'])``)
Returns a :class:`DeviceCloudMonitor` if found, otherwise None. | entailment |
def _get_encoder_method(stream_type):
"""A function to get the python type to device cloud type converter function.
:param stream_type: The streams data type
:return: A function that when called with the python object will return the serializable
type for sending to the cloud. If there is no function for the given type, or the `stream_type`
is `None` the returned function will simply return the object unchanged.
"""
if stream_type is not None:
return DSTREAM_TYPE_MAP.get(stream_type.upper(), (lambda x: x, lambda x: x))[1]
else:
return lambda x: x | A function to get the python type to device cloud type converter function.
:param stream_type: The streams data type
:return: A function that when called with the python object will return the serializable
type for sending to the cloud. If there is no function for the given type, or the `stream_type`
is `None` the returned function will simply return the object unchanged. | entailment |
def _get_decoder_method(stream_type):
""" A function to get Device Cloud type to python type converter function.
:param stream_type: The streams data type
:return: A function that when called with Device Cloud object will return the python
native type. If there is no function for the given type, or the `stream_type` is `None`
the returned function will simply return the object unchanged.
"""
if stream_type is not None:
return DSTREAM_TYPE_MAP.get(stream_type.upper(), (lambda x: x, lambda x: x))[0]
else:
return lambda x: x | A function to get Device Cloud type to python type converter function.
:param stream_type: The streams data type
:return: A function that when called with Device Cloud object will return the python
native type. If there is no function for the given type, or the `stream_type` is `None`
the returned function will simply return the object unchanged. | entailment |
def _get_streams(self, uri_suffix=None):
"""Clear and update internal cache of stream objects"""
# TODO: handle paging, perhaps change this to be a generator
if uri_suffix is not None and not uri_suffix.startswith('/'):
uri_suffix = '/' + uri_suffix
elif uri_suffix is None:
uri_suffix = ""
streams = {}
response = self._conn.get_json("/ws/DataStream{}".format(uri_suffix))
for stream_data in response["items"]:
stream_id = stream_data["streamId"]
stream = DataStream(self._conn, stream_id, stream_data)
streams[stream_id] = stream
return streams | Clear and update internal cache of stream objects | entailment |
def create_stream(self, stream_id, data_type, description=None, data_ttl=None,
rollup_ttl=None, units=None):
"""Create a new data stream on Device Cloud
This method will attempt to create a new data stream on Device Cloud.
This method will only succeed if the stream does not already exist.
:param str stream_id: The path/id of the stream being created on Device Cloud.
:param str data_type: The type of this stream. This must be in the set
`{ INTEGER, LONG, FLOAT, DOUBLE, STRING, BINARY, UNKNOWN }`. These values are
available in constants like :attr:`~STREAM_TYPE_INTEGER`.
:param str description: An optional description of this stream. See :meth:`~DataStream.get_description`.
:param int data_ttl: The TTL for data points in this stream. See :meth:`~DataStream.get_data_ttl`.
:param int rollup_ttl: The TTL for performing rollups on data. See :meth:~DataStream.get_rollup_ttl`.
:param str units: Units for data in this stream. See :meth:`~DataStream.get_units`
"""
stream_id = validate_type(stream_id, *six.string_types)
data_type = validate_type(data_type, type(None), *six.string_types)
if isinstance(data_type, *six.string_types):
data_type = str(data_type).upper()
if not data_type in (set([None, ]) | set(list(DSTREAM_TYPE_MAP.keys()))):
raise ValueError("data_type %r is not valid" % data_type)
description = validate_type(description, type(None), *six.string_types)
data_ttl = validate_type(data_ttl, type(None), *six.integer_types)
rollup_ttl = validate_type(rollup_ttl, type(None), *six.integer_types)
units = validate_type(units, type(None), *six.string_types)
sio = StringIO()
sio.write("<DataStream>")
conditional_write(sio, "<streamId>{}</streamId>", stream_id)
conditional_write(sio, "<dataType>{}</dataType>", data_type)
conditional_write(sio, "<description>{}</description>", description)
conditional_write(sio, "<dataTtl>{}</dataTtl>", data_ttl)
conditional_write(sio, "<rollupTtl>{}</rollupTtl>", rollup_ttl)
conditional_write(sio, "<units>{}</units>", units)
sio.write("</DataStream>")
self._conn.post("/ws/DataStream", sio.getvalue())
logger.info("Data stream (%s) created successfully", stream_id)
stream = DataStream(self._conn, stream_id)
return stream | Create a new data stream on Device Cloud
This method will attempt to create a new data stream on Device Cloud.
This method will only succeed if the stream does not already exist.
:param str stream_id: The path/id of the stream being created on Device Cloud.
:param str data_type: The type of this stream. This must be in the set
`{ INTEGER, LONG, FLOAT, DOUBLE, STRING, BINARY, UNKNOWN }`. These values are
available in constants like :attr:`~STREAM_TYPE_INTEGER`.
:param str description: An optional description of this stream. See :meth:`~DataStream.get_description`.
:param int data_ttl: The TTL for data points in this stream. See :meth:`~DataStream.get_data_ttl`.
:param int rollup_ttl: The TTL for performing rollups on data. See :meth:~DataStream.get_rollup_ttl`.
:param str units: Units for data in this stream. See :meth:`~DataStream.get_units` | entailment |
def get_stream_if_exists(self, stream_id):
"""Return a reference to a stream with the given ``stream_id`` if it exists
This works similar to :py:meth:`get_stream` but will return None if the
stream is not already created.
:param stream_id: The path of the stream on Device Cloud
:raises TypeError: if the stream_id provided is the wrong type
:raises ValueError: if the stream_id is not properly formed
:return: :class:`.DataStream` instance with the provided stream_id
:rtype: :class:`~DataStream`
"""
stream = self.get_stream(stream_id)
try:
stream.get_data_type(use_cached=True)
except NoSuchStreamException:
return None
else:
return stream | Return a reference to a stream with the given ``stream_id`` if it exists
This works similar to :py:meth:`get_stream` but will return None if the
stream is not already created.
:param stream_id: The path of the stream on Device Cloud
:raises TypeError: if the stream_id provided is the wrong type
:raises ValueError: if the stream_id is not properly formed
:return: :class:`.DataStream` instance with the provided stream_id
:rtype: :class:`~DataStream` | entailment |
def bulk_write_datapoints(self, datapoints):
"""Perform a bulk write (or set of writes) of a collection of data points
This method takes a list (or other iterable) of datapoints and writes them
to Device Cloud in an efficient manner, minimizing the number of HTTP
requests that need to be made.
As this call is performed from outside the context of any particular stream,
each DataPoint object passed in must include information about the stream
into which the point should be written.
If all data points being written are for the same stream, you may want to
consider using :meth:`~DataStream.bulk_write_datapoints` instead.
Example::
datapoints = []
for i in range(300):
datapoints.append(DataPoint(
stream_id="my/stream%d" % (i % 3),
data_type=STREAM_TYPE_INTEGER,
units="meters",
data=i,
))
dc.streams.bulk_write_datapoints(datapoints)
Depending on the size of the list of datapoints provided, this method may
need to make multiple calls to Device Cloud (in chunks of 250).
:param list datapoints: a list of datapoints to be written to Device Cloud
:raises TypeError: if a list of datapoints is not provided
:raises ValueError: if any of the provided data points do not have all required
information (such as information about the stream)
:raises DeviceCloudHttpException: in the case of an unexpected error in communicating
with Device Cloud.
"""
datapoints = list(datapoints) # effectively performs validation that we have the right type
for dp in datapoints:
if not isinstance(dp, DataPoint):
raise TypeError("All items in the datapoints list must be DataPoints")
if dp.get_stream_id() is None:
raise ValueError("stream_id must be set on all datapoints")
remaining_datapoints = datapoints
while remaining_datapoints:
# take up to 250 points and post them until complete
this_chunk_of_datapoints = remaining_datapoints[:MAXIMUM_DATAPOINTS_PER_POST]
remaining_datapoints = remaining_datapoints[MAXIMUM_DATAPOINTS_PER_POST:]
# Build XML list containing data for all points
datapoints_out = StringIO()
datapoints_out.write("<list>")
for dp in this_chunk_of_datapoints:
datapoints_out.write(dp.to_xml())
datapoints_out.write("</list>")
# And send the HTTP Post
self._conn.post("/ws/DataPoint", datapoints_out.getvalue())
logger.info('DataPoint batch of %s datapoints written', len(this_chunk_of_datapoints)) | Perform a bulk write (or set of writes) of a collection of data points
This method takes a list (or other iterable) of datapoints and writes them
to Device Cloud in an efficient manner, minimizing the number of HTTP
requests that need to be made.
As this call is performed from outside the context of any particular stream,
each DataPoint object passed in must include information about the stream
into which the point should be written.
If all data points being written are for the same stream, you may want to
consider using :meth:`~DataStream.bulk_write_datapoints` instead.
Example::
datapoints = []
for i in range(300):
datapoints.append(DataPoint(
stream_id="my/stream%d" % (i % 3),
data_type=STREAM_TYPE_INTEGER,
units="meters",
data=i,
))
dc.streams.bulk_write_datapoints(datapoints)
Depending on the size of the list of datapoints provided, this method may
need to make multiple calls to Device Cloud (in chunks of 250).
:param list datapoints: a list of datapoints to be written to Device Cloud
:raises TypeError: if a list of datapoints is not provided
:raises ValueError: if any of the provided data points do not have all required
information (such as information about the stream)
:raises DeviceCloudHttpException: in the case of an unexpected error in communicating
with Device Cloud. | entailment |
def from_json(cls, stream, json_data):
"""Create a new DataPoint object from device cloud JSON data
:param DataStream stream: The :class:`~DataStream` out of which this data is coming
:param dict json_data: Deserialized JSON data from Device Cloud about this device
:raises ValueError: if the data is malformed
:return: (:class:`~DataPoint`) newly created :class:`~DataPoint`
"""
type_converter = _get_decoder_method(stream.get_data_type())
data = type_converter(json_data.get("data"))
return cls(
# these are actually properties of the stream, not the data point
stream_id=stream.get_stream_id(),
data_type=stream.get_data_type(),
units=stream.get_units(),
# and these are part of the data point itself
data=data,
description=json_data.get("description"),
timestamp=json_data.get("timestampISO"),
server_timestamp=json_data.get("serverTimestampISO"),
quality=json_data.get("quality"),
location=json_data.get("location"),
dp_id=json_data.get("id"),
) | Create a new DataPoint object from device cloud JSON data
:param DataStream stream: The :class:`~DataStream` out of which this data is coming
:param dict json_data: Deserialized JSON data from Device Cloud about this device
:raises ValueError: if the data is malformed
:return: (:class:`~DataPoint`) newly created :class:`~DataPoint` | entailment |
def from_rollup_json(cls, stream, json_data):
"""Rollup json data from the server looks slightly different
:param DataStream stream: The :class:`~DataStream` out of which this data is coming
:param dict json_data: Deserialized JSON data from Device Cloud about this device
:raises ValueError: if the data is malformed
:return: (:class:`~DataPoint`) newly created :class:`~DataPoint`
"""
dp = cls.from_json(stream, json_data)
# Special handling for timestamp
timestamp = isoformat(dc_utc_timestamp_to_dt(int(json_data.get("timestamp"))))
# Special handling for data, all rollup data is float type
type_converter = _get_decoder_method(stream.get_data_type())
data = type_converter(float(json_data.get("data")))
# Update the special fields
dp.set_timestamp(timestamp)
dp.set_data(data)
return dp | Rollup json data from the server looks slightly different
:param DataStream stream: The :class:`~DataStream` out of which this data is coming
:param dict json_data: Deserialized JSON data from Device Cloud about this device
:raises ValueError: if the data is malformed
:return: (:class:`~DataPoint`) newly created :class:`~DataPoint` | entailment |
def set_stream_id(self, stream_id):
"""Set the stream id associated with this data point"""
stream_id = validate_type(stream_id, type(None), *six.string_types)
if stream_id is not None:
stream_id = stream_id.lstrip('/')
self._stream_id = stream_id | Set the stream id associated with this data point | entailment |
def set_description(self, description):
"""Set the description for this data point"""
self._description = validate_type(description, type(None), *six.string_types) | Set the description for this data point | entailment |
def set_quality(self, quality):
"""Set the quality for this sample
Quality is stored on Device Cloud as a 32-bit integer, so the input
to this function should be either None, an integer, or a string that can
be converted to an integer.
"""
if isinstance(quality, *six.string_types):
quality = int(quality)
elif isinstance(quality, float):
quality = int(quality)
self._quality = validate_type(quality, type(None), *six.integer_types) | Set the quality for this sample
Quality is stored on Device Cloud as a 32-bit integer, so the input
to this function should be either None, an integer, or a string that can
be converted to an integer. | entailment |
def set_location(self, location):
"""Set the location for this data point
The location must be either None (if no location data is known) or a
3-tuple of floating point values in the form
(latitude-degrees, longitude-degrees, altitude-meters).
"""
if location is None:
self._location = location
elif isinstance(location, *six.string_types): # from device cloud, convert from csv
parts = str(location).split(",")
if len(parts) == 3:
self._location = tuple(map(float, parts))
return
else:
raise ValueError("Location string %r has unexpected format" % location)
# TODO: could maybe try to allow any iterable but this covers the most common cases
elif (isinstance(location, (tuple, list))
and len(location) == 3
and all([isinstance(x, (float, six.integer_types)) for x in location])):
self._location = tuple(map(float, location)) # coerce ints to float
else:
raise TypeError("Location must be None or 3-tuple of floats")
self._location = location | Set the location for this data point
The location must be either None (if no location data is known) or a
3-tuple of floating point values in the form
(latitude-degrees, longitude-degrees, altitude-meters). | entailment |
def set_data_type(self, data_type):
"""Set the data type for ths data point
The data type is actually associated with the stream itself and should
not (generally) vary on a point-per-point basis. That being said, if
creating a new stream by writing a datapoint, it may be beneficial to
include this information.
The data type provided should be in the set of available data types of
{ INTEGER, LONG, FLOAT, DOUBLE, STRING, BINARY, UNKNOWN }.
"""
validate_type(data_type, type(None), *six.string_types)
if isinstance(data_type, *six.string_types):
data_type = str(data_type).upper()
if not data_type in ({None} | set(DSTREAM_TYPE_MAP.keys())):
raise ValueError("Provided data type not in available set of types")
self._data_type = data_type | Set the data type for ths data point
The data type is actually associated with the stream itself and should
not (generally) vary on a point-per-point basis. That being said, if
creating a new stream by writing a datapoint, it may be beneficial to
include this information.
The data type provided should be in the set of available data types of
{ INTEGER, LONG, FLOAT, DOUBLE, STRING, BINARY, UNKNOWN }. | entailment |
def set_units(self, unit):
"""Set the unit for this data point
Unit, as with data_type, are actually associated with the stream and not
the individual data point. As such, changing this within a stream is
not encouraged. Setting the unit on the data point is useful when the
stream might be created with the write of a data point.
"""
self._units = validate_type(unit, type(None), *six.string_types) | Set the unit for this data point
Unit, as with data_type, are actually associated with the stream and not
the individual data point. As such, changing this within a stream is
not encouraged. Setting the unit on the data point is useful when the
stream might be created with the write of a data point. | entailment |
def to_xml(self):
"""Convert this datapoint into a form suitable for pushing to device cloud
An XML string will be returned that will contain all pieces of information
set on this datapoint. Values not set (e.g. quality) will be ommitted.
"""
type_converter = _get_encoder_method(self._data_type)
# Convert from python native to device cloud
encoded_data = type_converter(self._data)
out = StringIO()
out.write("<DataPoint>")
out.write("<streamId>{}</streamId>".format(self.get_stream_id()))
out.write("<data>{}</data>".format(encoded_data))
conditional_write(out, "<description>{}</description>", self.get_description())
if self.get_timestamp() is not None:
out.write("<timestamp>{}</timestamp>".format(isoformat(self.get_timestamp())))
conditional_write(out, "<quality>{}</quality>", self.get_quality())
if self.get_location() is not None:
out.write("<location>%s</location>" % ",".join(map(str, self.get_location())))
conditional_write(out, "<streamType>{}</streamType>", self.get_data_type())
conditional_write(out, "<streamUnits>{}</streamUnits>", self.get_units())
out.write("</DataPoint>")
return out.getvalue() | Convert this datapoint into a form suitable for pushing to device cloud
An XML string will be returned that will contain all pieces of information
set on this datapoint. Values not set (e.g. quality) will be ommitted. | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.